1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #if V8_TARGET_ARCH_X87
8
9 #include "src/base/bits.h"
10 #include "src/base/division-by-constant.h"
11 #include "src/bootstrapper.h"
12 #include "src/codegen.h"
13 #include "src/cpu-profiler.h"
14 #include "src/debug.h"
15 #include "src/isolate-inl.h"
16 #include "src/runtime.h"
17 #include "src/serialize.h"
18
19 namespace v8 {
20 namespace internal {
21
22 // -------------------------------------------------------------------------
23 // MacroAssembler implementation.
24
MacroAssembler(Isolate * arg_isolate,void * buffer,int size)25 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
26 : Assembler(arg_isolate, buffer, size),
27 generating_stub_(false),
28 has_frame_(false) {
29 if (isolate() != NULL) {
30 // TODO(titzer): should we just use a null handle here instead?
31 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
32 isolate());
33 }
34 }
35
36
Load(Register dst,const Operand & src,Representation r)37 void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
38 DCHECK(!r.IsDouble());
39 if (r.IsInteger8()) {
40 movsx_b(dst, src);
41 } else if (r.IsUInteger8()) {
42 movzx_b(dst, src);
43 } else if (r.IsInteger16()) {
44 movsx_w(dst, src);
45 } else if (r.IsUInteger16()) {
46 movzx_w(dst, src);
47 } else {
48 mov(dst, src);
49 }
50 }
51
52
Store(Register src,const Operand & dst,Representation r)53 void MacroAssembler::Store(Register src, const Operand& dst, Representation r) {
54 DCHECK(!r.IsDouble());
55 if (r.IsInteger8() || r.IsUInteger8()) {
56 mov_b(dst, src);
57 } else if (r.IsInteger16() || r.IsUInteger16()) {
58 mov_w(dst, src);
59 } else {
60 if (r.IsHeapObject()) {
61 AssertNotSmi(src);
62 } else if (r.IsSmi()) {
63 AssertSmi(src);
64 }
65 mov(dst, src);
66 }
67 }
68
69
LoadRoot(Register destination,Heap::RootListIndex index)70 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
71 if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
72 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
73 mov(destination, value);
74 return;
75 }
76 ExternalReference roots_array_start =
77 ExternalReference::roots_array_start(isolate());
78 mov(destination, Immediate(index));
79 mov(destination, Operand::StaticArray(destination,
80 times_pointer_size,
81 roots_array_start));
82 }
83
84
StoreRoot(Register source,Register scratch,Heap::RootListIndex index)85 void MacroAssembler::StoreRoot(Register source,
86 Register scratch,
87 Heap::RootListIndex index) {
88 DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
89 ExternalReference roots_array_start =
90 ExternalReference::roots_array_start(isolate());
91 mov(scratch, Immediate(index));
92 mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
93 source);
94 }
95
96
CompareRoot(Register with,Register scratch,Heap::RootListIndex index)97 void MacroAssembler::CompareRoot(Register with,
98 Register scratch,
99 Heap::RootListIndex index) {
100 ExternalReference roots_array_start =
101 ExternalReference::roots_array_start(isolate());
102 mov(scratch, Immediate(index));
103 cmp(with, Operand::StaticArray(scratch,
104 times_pointer_size,
105 roots_array_start));
106 }
107
108
CompareRoot(Register with,Heap::RootListIndex index)109 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
110 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
111 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
112 cmp(with, value);
113 }
114
115
CompareRoot(const Operand & with,Heap::RootListIndex index)116 void MacroAssembler::CompareRoot(const Operand& with,
117 Heap::RootListIndex index) {
118 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
119 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
120 cmp(with, value);
121 }
122
123
InNewSpace(Register object,Register scratch,Condition cc,Label * condition_met,Label::Distance condition_met_distance)124 void MacroAssembler::InNewSpace(
125 Register object,
126 Register scratch,
127 Condition cc,
128 Label* condition_met,
129 Label::Distance condition_met_distance) {
130 DCHECK(cc == equal || cc == not_equal);
131 if (scratch.is(object)) {
132 and_(scratch, Immediate(~Page::kPageAlignmentMask));
133 } else {
134 mov(scratch, Immediate(~Page::kPageAlignmentMask));
135 and_(scratch, object);
136 }
137 // Check that we can use a test_b.
138 DCHECK(MemoryChunk::IN_FROM_SPACE < 8);
139 DCHECK(MemoryChunk::IN_TO_SPACE < 8);
140 int mask = (1 << MemoryChunk::IN_FROM_SPACE)
141 | (1 << MemoryChunk::IN_TO_SPACE);
142 // If non-zero, the page belongs to new-space.
143 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
144 static_cast<uint8_t>(mask));
145 j(cc, condition_met, condition_met_distance);
146 }
147
148
RememberedSetHelper(Register object,Register addr,Register scratch,SaveFPRegsMode save_fp,MacroAssembler::RememberedSetFinalAction and_then)149 void MacroAssembler::RememberedSetHelper(
150 Register object, // Only used for debug checks.
151 Register addr, Register scratch, SaveFPRegsMode save_fp,
152 MacroAssembler::RememberedSetFinalAction and_then) {
153 Label done;
154 if (emit_debug_code()) {
155 Label ok;
156 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
157 int3();
158 bind(&ok);
159 }
160 // Load store buffer top.
161 ExternalReference store_buffer =
162 ExternalReference::store_buffer_top(isolate());
163 mov(scratch, Operand::StaticVariable(store_buffer));
164 // Store pointer to buffer.
165 mov(Operand(scratch, 0), addr);
166 // Increment buffer top.
167 add(scratch, Immediate(kPointerSize));
168 // Write back new top of buffer.
169 mov(Operand::StaticVariable(store_buffer), scratch);
170 // Call stub on end of buffer.
171 // Check for end of buffer.
172 test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
173 if (and_then == kReturnAtEnd) {
174 Label buffer_overflowed;
175 j(not_equal, &buffer_overflowed, Label::kNear);
176 ret(0);
177 bind(&buffer_overflowed);
178 } else {
179 DCHECK(and_then == kFallThroughAtEnd);
180 j(equal, &done, Label::kNear);
181 }
182 StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
183 CallStub(&store_buffer_overflow);
184 if (and_then == kReturnAtEnd) {
185 ret(0);
186 } else {
187 DCHECK(and_then == kFallThroughAtEnd);
188 bind(&done);
189 }
190 }
191
192
ClampTOSToUint8(Register result_reg)193 void MacroAssembler::ClampTOSToUint8(Register result_reg) {
194 Label done, conv_failure;
195 sub(esp, Immediate(kPointerSize));
196 fnclex();
197 fist_s(Operand(esp, 0));
198 pop(result_reg);
199 X87CheckIA();
200 j(equal, &conv_failure, Label::kNear);
201 test(result_reg, Immediate(0xFFFFFF00));
202 j(zero, &done, Label::kNear);
203 setcc(sign, result_reg);
204 sub(result_reg, Immediate(1));
205 and_(result_reg, Immediate(255));
206 jmp(&done, Label::kNear);
207 bind(&conv_failure);
208 fnclex();
209 fldz();
210 fld(1);
211 FCmp();
212 setcc(below, result_reg); // 1 if negative, 0 if positive.
213 dec_b(result_reg); // 0 if negative, 255 if positive.
214 bind(&done);
215 }
216
217
ClampUint8(Register reg)218 void MacroAssembler::ClampUint8(Register reg) {
219 Label done;
220 test(reg, Immediate(0xFFFFFF00));
221 j(zero, &done, Label::kNear);
222 setcc(negative, reg); // 1 if negative, 0 if positive.
223 dec_b(reg); // 0 if negative, 255 if positive.
224 bind(&done);
225 }
226
227
SlowTruncateToI(Register result_reg,Register input_reg,int offset)228 void MacroAssembler::SlowTruncateToI(Register result_reg,
229 Register input_reg,
230 int offset) {
231 DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
232 call(stub.GetCode(), RelocInfo::CODE_TARGET);
233 }
234
235
TruncateX87TOSToI(Register result_reg)236 void MacroAssembler::TruncateX87TOSToI(Register result_reg) {
237 sub(esp, Immediate(kDoubleSize));
238 fst_d(MemOperand(esp, 0));
239 SlowTruncateToI(result_reg, esp, 0);
240 add(esp, Immediate(kDoubleSize));
241 }
242
243
X87TOSToI(Register result_reg,MinusZeroMode minus_zero_mode,Label * lost_precision,Label * is_nan,Label * minus_zero,Label::Distance dst)244 void MacroAssembler::X87TOSToI(Register result_reg,
245 MinusZeroMode minus_zero_mode,
246 Label* lost_precision, Label* is_nan,
247 Label* minus_zero, Label::Distance dst) {
248 Label done;
249 sub(esp, Immediate(kPointerSize));
250 fld(0);
251 fist_s(MemOperand(esp, 0));
252 fild_s(MemOperand(esp, 0));
253 pop(result_reg);
254 FCmp();
255 j(not_equal, lost_precision, dst);
256 j(parity_even, is_nan, dst);
257 if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
258 test(result_reg, Operand(result_reg));
259 j(not_zero, &done, Label::kNear);
260 // To check for minus zero, we load the value again as float, and check
261 // if that is still 0.
262 sub(esp, Immediate(kPointerSize));
263 fst_s(MemOperand(esp, 0));
264 pop(result_reg);
265 test(result_reg, Operand(result_reg));
266 j(not_zero, minus_zero, dst);
267 }
268 bind(&done);
269 }
270
271
TruncateHeapNumberToI(Register result_reg,Register input_reg)272 void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
273 Register input_reg) {
274 Label done, slow_case;
275
276 SlowTruncateToI(result_reg, input_reg);
277 bind(&done);
278 }
279
280
LoadUint32NoSSE2(Register src)281 void MacroAssembler::LoadUint32NoSSE2(Register src) {
282 Label done;
283 push(src);
284 fild_s(Operand(esp, 0));
285 cmp(src, Immediate(0));
286 j(not_sign, &done, Label::kNear);
287 ExternalReference uint32_bias =
288 ExternalReference::address_of_uint32_bias();
289 fld_d(Operand::StaticVariable(uint32_bias));
290 faddp(1);
291 bind(&done);
292 add(esp, Immediate(kPointerSize));
293 }
294
295
RecordWriteArray(Register object,Register value,Register index,SaveFPRegsMode save_fp,RememberedSetAction remembered_set_action,SmiCheck smi_check,PointersToHereCheck pointers_to_here_check_for_value)296 void MacroAssembler::RecordWriteArray(
297 Register object, Register value, Register index, SaveFPRegsMode save_fp,
298 RememberedSetAction remembered_set_action, SmiCheck smi_check,
299 PointersToHereCheck pointers_to_here_check_for_value) {
300 // First, check if a write barrier is even needed. The tests below
301 // catch stores of Smis.
302 Label done;
303
304 // Skip barrier if writing a smi.
305 if (smi_check == INLINE_SMI_CHECK) {
306 DCHECK_EQ(0, kSmiTag);
307 test(value, Immediate(kSmiTagMask));
308 j(zero, &done);
309 }
310
311 // Array access: calculate the destination address in the same manner as
312 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
313 // into an array of words.
314 Register dst = index;
315 lea(dst, Operand(object, index, times_half_pointer_size,
316 FixedArray::kHeaderSize - kHeapObjectTag));
317
318 RecordWrite(object, dst, value, save_fp, remembered_set_action,
319 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
320
321 bind(&done);
322
323 // Clobber clobbered input registers when running with the debug-code flag
324 // turned on to provoke errors.
325 if (emit_debug_code()) {
326 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
327 mov(index, Immediate(bit_cast<int32_t>(kZapValue)));
328 }
329 }
330
331
RecordWriteField(Register object,int offset,Register value,Register dst,SaveFPRegsMode save_fp,RememberedSetAction remembered_set_action,SmiCheck smi_check,PointersToHereCheck pointers_to_here_check_for_value)332 void MacroAssembler::RecordWriteField(
333 Register object, int offset, Register value, Register dst,
334 SaveFPRegsMode save_fp, RememberedSetAction remembered_set_action,
335 SmiCheck smi_check, PointersToHereCheck pointers_to_here_check_for_value) {
336 // First, check if a write barrier is even needed. The tests below
337 // catch stores of Smis.
338 Label done;
339
340 // Skip barrier if writing a smi.
341 if (smi_check == INLINE_SMI_CHECK) {
342 JumpIfSmi(value, &done, Label::kNear);
343 }
344
345 // Although the object register is tagged, the offset is relative to the start
346 // of the object, so so offset must be a multiple of kPointerSize.
347 DCHECK(IsAligned(offset, kPointerSize));
348
349 lea(dst, FieldOperand(object, offset));
350 if (emit_debug_code()) {
351 Label ok;
352 test_b(dst, (1 << kPointerSizeLog2) - 1);
353 j(zero, &ok, Label::kNear);
354 int3();
355 bind(&ok);
356 }
357
358 RecordWrite(object, dst, value, save_fp, remembered_set_action,
359 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
360
361 bind(&done);
362
363 // Clobber clobbered input registers when running with the debug-code flag
364 // turned on to provoke errors.
365 if (emit_debug_code()) {
366 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
367 mov(dst, Immediate(bit_cast<int32_t>(kZapValue)));
368 }
369 }
370
371
RecordWriteForMap(Register object,Handle<Map> map,Register scratch1,Register scratch2,SaveFPRegsMode save_fp)372 void MacroAssembler::RecordWriteForMap(Register object, Handle<Map> map,
373 Register scratch1, Register scratch2,
374 SaveFPRegsMode save_fp) {
375 Label done;
376
377 Register address = scratch1;
378 Register value = scratch2;
379 if (emit_debug_code()) {
380 Label ok;
381 lea(address, FieldOperand(object, HeapObject::kMapOffset));
382 test_b(address, (1 << kPointerSizeLog2) - 1);
383 j(zero, &ok, Label::kNear);
384 int3();
385 bind(&ok);
386 }
387
388 DCHECK(!object.is(value));
389 DCHECK(!object.is(address));
390 DCHECK(!value.is(address));
391 AssertNotSmi(object);
392
393 if (!FLAG_incremental_marking) {
394 return;
395 }
396
397 // Compute the address.
398 lea(address, FieldOperand(object, HeapObject::kMapOffset));
399
400 // A single check of the map's pages interesting flag suffices, since it is
401 // only set during incremental collection, and then it's also guaranteed that
402 // the from object's page's interesting flag is also set. This optimization
403 // relies on the fact that maps can never be in new space.
404 DCHECK(!isolate()->heap()->InNewSpace(*map));
405 CheckPageFlagForMap(map,
406 MemoryChunk::kPointersToHereAreInterestingMask,
407 zero,
408 &done,
409 Label::kNear);
410
411 RecordWriteStub stub(isolate(), object, value, address, OMIT_REMEMBERED_SET,
412 save_fp);
413 CallStub(&stub);
414
415 bind(&done);
416
417 // Count number of write barriers in generated code.
418 isolate()->counters()->write_barriers_static()->Increment();
419 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
420
421 // Clobber clobbered input registers when running with the debug-code flag
422 // turned on to provoke errors.
423 if (emit_debug_code()) {
424 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
425 mov(scratch1, Immediate(bit_cast<int32_t>(kZapValue)));
426 mov(scratch2, Immediate(bit_cast<int32_t>(kZapValue)));
427 }
428 }
429
430
RecordWrite(Register object,Register address,Register value,SaveFPRegsMode fp_mode,RememberedSetAction remembered_set_action,SmiCheck smi_check,PointersToHereCheck pointers_to_here_check_for_value)431 void MacroAssembler::RecordWrite(
432 Register object, Register address, Register value, SaveFPRegsMode fp_mode,
433 RememberedSetAction remembered_set_action, SmiCheck smi_check,
434 PointersToHereCheck pointers_to_here_check_for_value) {
435 DCHECK(!object.is(value));
436 DCHECK(!object.is(address));
437 DCHECK(!value.is(address));
438 AssertNotSmi(object);
439
440 if (remembered_set_action == OMIT_REMEMBERED_SET &&
441 !FLAG_incremental_marking) {
442 return;
443 }
444
445 if (emit_debug_code()) {
446 Label ok;
447 cmp(value, Operand(address, 0));
448 j(equal, &ok, Label::kNear);
449 int3();
450 bind(&ok);
451 }
452
453 // First, check if a write barrier is even needed. The tests below
454 // catch stores of Smis and stores into young gen.
455 Label done;
456
457 if (smi_check == INLINE_SMI_CHECK) {
458 // Skip barrier if writing a smi.
459 JumpIfSmi(value, &done, Label::kNear);
460 }
461
462 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
463 CheckPageFlag(value,
464 value, // Used as scratch.
465 MemoryChunk::kPointersToHereAreInterestingMask,
466 zero,
467 &done,
468 Label::kNear);
469 }
470 CheckPageFlag(object,
471 value, // Used as scratch.
472 MemoryChunk::kPointersFromHereAreInterestingMask,
473 zero,
474 &done,
475 Label::kNear);
476
477 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
478 fp_mode);
479 CallStub(&stub);
480
481 bind(&done);
482
483 // Count number of write barriers in generated code.
484 isolate()->counters()->write_barriers_static()->Increment();
485 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
486
487 // Clobber clobbered registers when running with the debug-code flag
488 // turned on to provoke errors.
489 if (emit_debug_code()) {
490 mov(address, Immediate(bit_cast<int32_t>(kZapValue)));
491 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
492 }
493 }
494
495
DebugBreak()496 void MacroAssembler::DebugBreak() {
497 Move(eax, Immediate(0));
498 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
499 CEntryStub ces(isolate(), 1);
500 call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
501 }
502
503
IsUnsafeImmediate(const Immediate & x)504 bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
505 static const int kMaxImmediateBits = 17;
506 if (!RelocInfo::IsNone(x.rmode_)) return false;
507 return !is_intn(x.x_, kMaxImmediateBits);
508 }
509
510
SafeMove(Register dst,const Immediate & x)511 void MacroAssembler::SafeMove(Register dst, const Immediate& x) {
512 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
513 Move(dst, Immediate(x.x_ ^ jit_cookie()));
514 xor_(dst, jit_cookie());
515 } else {
516 Move(dst, x);
517 }
518 }
519
520
SafePush(const Immediate & x)521 void MacroAssembler::SafePush(const Immediate& x) {
522 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
523 push(Immediate(x.x_ ^ jit_cookie()));
524 xor_(Operand(esp, 0), Immediate(jit_cookie()));
525 } else {
526 push(x);
527 }
528 }
529
530
CmpObjectType(Register heap_object,InstanceType type,Register map)531 void MacroAssembler::CmpObjectType(Register heap_object,
532 InstanceType type,
533 Register map) {
534 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
535 CmpInstanceType(map, type);
536 }
537
538
CmpInstanceType(Register map,InstanceType type)539 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
540 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
541 static_cast<int8_t>(type));
542 }
543
544
CheckFastElements(Register map,Label * fail,Label::Distance distance)545 void MacroAssembler::CheckFastElements(Register map,
546 Label* fail,
547 Label::Distance distance) {
548 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
549 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
550 STATIC_ASSERT(FAST_ELEMENTS == 2);
551 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
552 cmpb(FieldOperand(map, Map::kBitField2Offset),
553 Map::kMaximumBitField2FastHoleyElementValue);
554 j(above, fail, distance);
555 }
556
557
CheckFastObjectElements(Register map,Label * fail,Label::Distance distance)558 void MacroAssembler::CheckFastObjectElements(Register map,
559 Label* fail,
560 Label::Distance distance) {
561 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
562 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
563 STATIC_ASSERT(FAST_ELEMENTS == 2);
564 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
565 cmpb(FieldOperand(map, Map::kBitField2Offset),
566 Map::kMaximumBitField2FastHoleySmiElementValue);
567 j(below_equal, fail, distance);
568 cmpb(FieldOperand(map, Map::kBitField2Offset),
569 Map::kMaximumBitField2FastHoleyElementValue);
570 j(above, fail, distance);
571 }
572
573
CheckFastSmiElements(Register map,Label * fail,Label::Distance distance)574 void MacroAssembler::CheckFastSmiElements(Register map,
575 Label* fail,
576 Label::Distance distance) {
577 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
578 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
579 cmpb(FieldOperand(map, Map::kBitField2Offset),
580 Map::kMaximumBitField2FastHoleySmiElementValue);
581 j(above, fail, distance);
582 }
583
584
StoreNumberToDoubleElements(Register maybe_number,Register elements,Register key,Register scratch,Label * fail,int elements_offset)585 void MacroAssembler::StoreNumberToDoubleElements(
586 Register maybe_number,
587 Register elements,
588 Register key,
589 Register scratch,
590 Label* fail,
591 int elements_offset) {
592 Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
593 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
594
595 CheckMap(maybe_number,
596 isolate()->factory()->heap_number_map(),
597 fail,
598 DONT_DO_SMI_CHECK);
599
600 // Double value, canonicalize NaN.
601 uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
602 cmp(FieldOperand(maybe_number, offset),
603 Immediate(kNaNOrInfinityLowerBoundUpper32));
604 j(greater_equal, &maybe_nan, Label::kNear);
605
606 bind(¬_nan);
607 ExternalReference canonical_nan_reference =
608 ExternalReference::address_of_canonical_non_hole_nan();
609 fld_d(FieldOperand(maybe_number, HeapNumber::kValueOffset));
610 bind(&have_double_value);
611 fstp_d(FieldOperand(elements, key, times_4,
612 FixedDoubleArray::kHeaderSize - elements_offset));
613 jmp(&done);
614
615 bind(&maybe_nan);
616 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
617 // it's an Infinity, and the non-NaN code path applies.
618 j(greater, &is_nan, Label::kNear);
619 cmp(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
620 j(zero, ¬_nan);
621 bind(&is_nan);
622 fld_d(Operand::StaticVariable(canonical_nan_reference));
623 jmp(&have_double_value, Label::kNear);
624
625 bind(&smi_value);
626 // Value is a smi. Convert to a double and store.
627 // Preserve original value.
628 mov(scratch, maybe_number);
629 SmiUntag(scratch);
630 push(scratch);
631 fild_s(Operand(esp, 0));
632 pop(scratch);
633 fstp_d(FieldOperand(elements, key, times_4,
634 FixedDoubleArray::kHeaderSize - elements_offset));
635 bind(&done);
636 }
637
638
CompareMap(Register obj,Handle<Map> map)639 void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
640 cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
641 }
642
643
CheckMap(Register obj,Handle<Map> map,Label * fail,SmiCheckType smi_check_type)644 void MacroAssembler::CheckMap(Register obj,
645 Handle<Map> map,
646 Label* fail,
647 SmiCheckType smi_check_type) {
648 if (smi_check_type == DO_SMI_CHECK) {
649 JumpIfSmi(obj, fail);
650 }
651
652 CompareMap(obj, map);
653 j(not_equal, fail);
654 }
655
656
DispatchMap(Register obj,Register unused,Handle<Map> map,Handle<Code> success,SmiCheckType smi_check_type)657 void MacroAssembler::DispatchMap(Register obj,
658 Register unused,
659 Handle<Map> map,
660 Handle<Code> success,
661 SmiCheckType smi_check_type) {
662 Label fail;
663 if (smi_check_type == DO_SMI_CHECK) {
664 JumpIfSmi(obj, &fail);
665 }
666 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
667 j(equal, success);
668
669 bind(&fail);
670 }
671
672
IsObjectStringType(Register heap_object,Register map,Register instance_type)673 Condition MacroAssembler::IsObjectStringType(Register heap_object,
674 Register map,
675 Register instance_type) {
676 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
677 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
678 STATIC_ASSERT(kNotStringTag != 0);
679 test(instance_type, Immediate(kIsNotStringMask));
680 return zero;
681 }
682
683
IsObjectNameType(Register heap_object,Register map,Register instance_type)684 Condition MacroAssembler::IsObjectNameType(Register heap_object,
685 Register map,
686 Register instance_type) {
687 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
688 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
689 cmpb(instance_type, static_cast<uint8_t>(LAST_NAME_TYPE));
690 return below_equal;
691 }
692
693
IsObjectJSObjectType(Register heap_object,Register map,Register scratch,Label * fail)694 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
695 Register map,
696 Register scratch,
697 Label* fail) {
698 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
699 IsInstanceJSObjectType(map, scratch, fail);
700 }
701
702
IsInstanceJSObjectType(Register map,Register scratch,Label * fail)703 void MacroAssembler::IsInstanceJSObjectType(Register map,
704 Register scratch,
705 Label* fail) {
706 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
707 sub(scratch, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
708 cmp(scratch,
709 LAST_NONCALLABLE_SPEC_OBJECT_TYPE - FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
710 j(above, fail);
711 }
712
713
FCmp()714 void MacroAssembler::FCmp() {
715 fucompp();
716 push(eax);
717 fnstsw_ax();
718 sahf();
719 pop(eax);
720 }
721
722
FXamMinusZero()723 void MacroAssembler::FXamMinusZero() {
724 fxam();
725 push(eax);
726 fnstsw_ax();
727 and_(eax, Immediate(0x4700));
728 // For minus zero, C3 == 1 && C1 == 1.
729 cmp(eax, Immediate(0x4200));
730 pop(eax);
731 fstp(0);
732 }
733
734
FXamSign()735 void MacroAssembler::FXamSign() {
736 fxam();
737 push(eax);
738 fnstsw_ax();
739 // For negative value (including -0.0), C1 == 1.
740 and_(eax, Immediate(0x0200));
741 pop(eax);
742 fstp(0);
743 }
744
745
X87CheckIA()746 void MacroAssembler::X87CheckIA() {
747 push(eax);
748 fnstsw_ax();
749 // For #IA, IE == 1 && SF == 0.
750 and_(eax, Immediate(0x0041));
751 cmp(eax, Immediate(0x0001));
752 pop(eax);
753 }
754
755
756 // rc=00B, round to nearest.
757 // rc=01B, round down.
758 // rc=10B, round up.
759 // rc=11B, round toward zero.
X87SetRC(int rc)760 void MacroAssembler::X87SetRC(int rc) {
761 sub(esp, Immediate(kPointerSize));
762 fnstcw(MemOperand(esp, 0));
763 and_(MemOperand(esp, 0), Immediate(0xF3FF));
764 or_(MemOperand(esp, 0), Immediate(rc));
765 fldcw(MemOperand(esp, 0));
766 add(esp, Immediate(kPointerSize));
767 }
768
769
X87SetFPUCW(int cw)770 void MacroAssembler::X87SetFPUCW(int cw) {
771 push(Immediate(cw));
772 fldcw(MemOperand(esp, 0));
773 add(esp, Immediate(kPointerSize));
774 }
775
776
AssertNumber(Register object)777 void MacroAssembler::AssertNumber(Register object) {
778 if (emit_debug_code()) {
779 Label ok;
780 JumpIfSmi(object, &ok);
781 cmp(FieldOperand(object, HeapObject::kMapOffset),
782 isolate()->factory()->heap_number_map());
783 Check(equal, kOperandNotANumber);
784 bind(&ok);
785 }
786 }
787
788
AssertSmi(Register object)789 void MacroAssembler::AssertSmi(Register object) {
790 if (emit_debug_code()) {
791 test(object, Immediate(kSmiTagMask));
792 Check(equal, kOperandIsNotASmi);
793 }
794 }
795
796
AssertString(Register object)797 void MacroAssembler::AssertString(Register object) {
798 if (emit_debug_code()) {
799 test(object, Immediate(kSmiTagMask));
800 Check(not_equal, kOperandIsASmiAndNotAString);
801 push(object);
802 mov(object, FieldOperand(object, HeapObject::kMapOffset));
803 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
804 pop(object);
805 Check(below, kOperandIsNotAString);
806 }
807 }
808
809
AssertName(Register object)810 void MacroAssembler::AssertName(Register object) {
811 if (emit_debug_code()) {
812 test(object, Immediate(kSmiTagMask));
813 Check(not_equal, kOperandIsASmiAndNotAName);
814 push(object);
815 mov(object, FieldOperand(object, HeapObject::kMapOffset));
816 CmpInstanceType(object, LAST_NAME_TYPE);
817 pop(object);
818 Check(below_equal, kOperandIsNotAName);
819 }
820 }
821
822
AssertUndefinedOrAllocationSite(Register object)823 void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
824 if (emit_debug_code()) {
825 Label done_checking;
826 AssertNotSmi(object);
827 cmp(object, isolate()->factory()->undefined_value());
828 j(equal, &done_checking);
829 cmp(FieldOperand(object, 0),
830 Immediate(isolate()->factory()->allocation_site_map()));
831 Assert(equal, kExpectedUndefinedOrCell);
832 bind(&done_checking);
833 }
834 }
835
836
AssertNotSmi(Register object)837 void MacroAssembler::AssertNotSmi(Register object) {
838 if (emit_debug_code()) {
839 test(object, Immediate(kSmiTagMask));
840 Check(not_equal, kOperandIsASmi);
841 }
842 }
843
844
StubPrologue()845 void MacroAssembler::StubPrologue() {
846 push(ebp); // Caller's frame pointer.
847 mov(ebp, esp);
848 push(esi); // Callee's context.
849 push(Immediate(Smi::FromInt(StackFrame::STUB)));
850 }
851
852
Prologue(bool code_pre_aging)853 void MacroAssembler::Prologue(bool code_pre_aging) {
854 PredictableCodeSizeScope predictible_code_size_scope(this,
855 kNoCodeAgeSequenceLength);
856 if (code_pre_aging) {
857 // Pre-age the code.
858 call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
859 RelocInfo::CODE_AGE_SEQUENCE);
860 Nop(kNoCodeAgeSequenceLength - Assembler::kCallInstructionLength);
861 } else {
862 push(ebp); // Caller's frame pointer.
863 mov(ebp, esp);
864 push(esi); // Callee's context.
865 push(edi); // Callee's JS function.
866 }
867 }
868
869
EnterFrame(StackFrame::Type type)870 void MacroAssembler::EnterFrame(StackFrame::Type type) {
871 push(ebp);
872 mov(ebp, esp);
873 push(esi);
874 push(Immediate(Smi::FromInt(type)));
875 push(Immediate(CodeObject()));
876 if (emit_debug_code()) {
877 cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
878 Check(not_equal, kCodeObjectNotProperlyPatched);
879 }
880 }
881
882
LeaveFrame(StackFrame::Type type)883 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
884 if (emit_debug_code()) {
885 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
886 Immediate(Smi::FromInt(type)));
887 Check(equal, kStackFrameTypesMustMatch);
888 }
889 leave();
890 }
891
892
EnterExitFramePrologue()893 void MacroAssembler::EnterExitFramePrologue() {
894 // Set up the frame structure on the stack.
895 DCHECK(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
896 DCHECK(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
897 DCHECK(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
898 push(ebp);
899 mov(ebp, esp);
900
901 // Reserve room for entry stack pointer and push the code object.
902 DCHECK(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
903 push(Immediate(0)); // Saved entry sp, patched before call.
904 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
905
906 // Save the frame pointer and the context in top.
907 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, isolate());
908 ExternalReference context_address(Isolate::kContextAddress, isolate());
909 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
910 mov(Operand::StaticVariable(context_address), esi);
911 }
912
913
EnterExitFrameEpilogue(int argc,bool save_doubles)914 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
915 // Optionally save FPU state.
916 if (save_doubles) {
917 // Store FPU state to m108byte.
918 int space = 108 + argc * kPointerSize;
919 sub(esp, Immediate(space));
920 const int offset = -2 * kPointerSize; // entry fp + code object.
921 fnsave(MemOperand(ebp, offset - 108));
922 } else {
923 sub(esp, Immediate(argc * kPointerSize));
924 }
925
926 // Get the required frame alignment for the OS.
927 const int kFrameAlignment = base::OS::ActivationFrameAlignment();
928 if (kFrameAlignment > 0) {
929 DCHECK(base::bits::IsPowerOfTwo32(kFrameAlignment));
930 and_(esp, -kFrameAlignment);
931 }
932
933 // Patch the saved entry sp.
934 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
935 }
936
937
EnterExitFrame(bool save_doubles)938 void MacroAssembler::EnterExitFrame(bool save_doubles) {
939 EnterExitFramePrologue();
940
941 // Set up argc and argv in callee-saved registers.
942 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
943 mov(edi, eax);
944 lea(esi, Operand(ebp, eax, times_4, offset));
945
946 // Reserve space for argc, argv and isolate.
947 EnterExitFrameEpilogue(3, save_doubles);
948 }
949
950
EnterApiExitFrame(int argc)951 void MacroAssembler::EnterApiExitFrame(int argc) {
952 EnterExitFramePrologue();
953 EnterExitFrameEpilogue(argc, false);
954 }
955
956
LeaveExitFrame(bool save_doubles)957 void MacroAssembler::LeaveExitFrame(bool save_doubles) {
958 // Optionally restore FPU state.
959 if (save_doubles) {
960 const int offset = -2 * kPointerSize;
961 frstor(MemOperand(ebp, offset - 108));
962 }
963
964 // Get the return address from the stack and restore the frame pointer.
965 mov(ecx, Operand(ebp, 1 * kPointerSize));
966 mov(ebp, Operand(ebp, 0 * kPointerSize));
967
968 // Pop the arguments and the receiver from the caller stack.
969 lea(esp, Operand(esi, 1 * kPointerSize));
970
971 // Push the return address to get ready to return.
972 push(ecx);
973
974 LeaveExitFrameEpilogue(true);
975 }
976
977
LeaveExitFrameEpilogue(bool restore_context)978 void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
979 // Restore current context from top and clear it in debug mode.
980 ExternalReference context_address(Isolate::kContextAddress, isolate());
981 if (restore_context) {
982 mov(esi, Operand::StaticVariable(context_address));
983 }
984 #ifdef DEBUG
985 mov(Operand::StaticVariable(context_address), Immediate(0));
986 #endif
987
988 // Clear the top frame.
989 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
990 isolate());
991 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
992 }
993
994
LeaveApiExitFrame(bool restore_context)995 void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
996 mov(esp, ebp);
997 pop(ebp);
998
999 LeaveExitFrameEpilogue(restore_context);
1000 }
1001
1002
PushTryHandler(StackHandler::Kind kind,int handler_index)1003 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
1004 int handler_index) {
1005 // Adjust this code if not the case.
1006 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1007 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1008 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1009 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1010 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1011 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1012
1013 // We will build up the handler from the bottom by pushing on the stack.
1014 // First push the frame pointer and context.
1015 if (kind == StackHandler::JS_ENTRY) {
1016 // The frame pointer does not point to a JS frame so we save NULL for
1017 // ebp. We expect the code throwing an exception to check ebp before
1018 // dereferencing it to restore the context.
1019 push(Immediate(0)); // NULL frame pointer.
1020 push(Immediate(Smi::FromInt(0))); // No context.
1021 } else {
1022 push(ebp);
1023 push(esi);
1024 }
1025 // Push the state and the code object.
1026 unsigned state =
1027 StackHandler::IndexField::encode(handler_index) |
1028 StackHandler::KindField::encode(kind);
1029 push(Immediate(state));
1030 Push(CodeObject());
1031
1032 // Link the current handler as the next handler.
1033 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1034 push(Operand::StaticVariable(handler_address));
1035 // Set this new handler as the current one.
1036 mov(Operand::StaticVariable(handler_address), esp);
1037 }
1038
1039
PopTryHandler()1040 void MacroAssembler::PopTryHandler() {
1041 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1042 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1043 pop(Operand::StaticVariable(handler_address));
1044 add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1045 }
1046
1047
JumpToHandlerEntry()1048 void MacroAssembler::JumpToHandlerEntry() {
1049 // Compute the handler entry address and jump to it. The handler table is
1050 // a fixed array of (smi-tagged) code offsets.
1051 // eax = exception, edi = code object, edx = state.
1052 mov(ebx, FieldOperand(edi, Code::kHandlerTableOffset));
1053 shr(edx, StackHandler::kKindWidth);
1054 mov(edx, FieldOperand(ebx, edx, times_4, FixedArray::kHeaderSize));
1055 SmiUntag(edx);
1056 lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
1057 jmp(edi);
1058 }
1059
1060
Throw(Register value)1061 void MacroAssembler::Throw(Register value) {
1062 // Adjust this code if not the case.
1063 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1064 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1065 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1066 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1067 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1068 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1069
1070 // The exception is expected in eax.
1071 if (!value.is(eax)) {
1072 mov(eax, value);
1073 }
1074 // Drop the stack pointer to the top of the top handler.
1075 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1076 mov(esp, Operand::StaticVariable(handler_address));
1077 // Restore the next handler.
1078 pop(Operand::StaticVariable(handler_address));
1079
1080 // Remove the code object and state, compute the handler address in edi.
1081 pop(edi); // Code object.
1082 pop(edx); // Index and state.
1083
1084 // Restore the context and frame pointer.
1085 pop(esi); // Context.
1086 pop(ebp); // Frame pointer.
1087
1088 // If the handler is a JS frame, restore the context to the frame.
1089 // (kind == ENTRY) == (ebp == 0) == (esi == 0), so we could test either
1090 // ebp or esi.
1091 Label skip;
1092 test(esi, esi);
1093 j(zero, &skip, Label::kNear);
1094 mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
1095 bind(&skip);
1096
1097 JumpToHandlerEntry();
1098 }
1099
1100
ThrowUncatchable(Register value)1101 void MacroAssembler::ThrowUncatchable(Register value) {
1102 // Adjust this code if not the case.
1103 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1104 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1105 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1106 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1107 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1108 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1109
1110 // The exception is expected in eax.
1111 if (!value.is(eax)) {
1112 mov(eax, value);
1113 }
1114 // Drop the stack pointer to the top of the top stack handler.
1115 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1116 mov(esp, Operand::StaticVariable(handler_address));
1117
1118 // Unwind the handlers until the top ENTRY handler is found.
1119 Label fetch_next, check_kind;
1120 jmp(&check_kind, Label::kNear);
1121 bind(&fetch_next);
1122 mov(esp, Operand(esp, StackHandlerConstants::kNextOffset));
1123
1124 bind(&check_kind);
1125 STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
1126 test(Operand(esp, StackHandlerConstants::kStateOffset),
1127 Immediate(StackHandler::KindField::kMask));
1128 j(not_zero, &fetch_next);
1129
1130 // Set the top handler address to next handler past the top ENTRY handler.
1131 pop(Operand::StaticVariable(handler_address));
1132
1133 // Remove the code object and state, compute the handler address in edi.
1134 pop(edi); // Code object.
1135 pop(edx); // Index and state.
1136
1137 // Clear the context pointer and frame pointer (0 was saved in the handler).
1138 pop(esi);
1139 pop(ebp);
1140
1141 JumpToHandlerEntry();
1142 }
1143
1144
CheckAccessGlobalProxy(Register holder_reg,Register scratch1,Register scratch2,Label * miss)1145 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1146 Register scratch1,
1147 Register scratch2,
1148 Label* miss) {
1149 Label same_contexts;
1150
1151 DCHECK(!holder_reg.is(scratch1));
1152 DCHECK(!holder_reg.is(scratch2));
1153 DCHECK(!scratch1.is(scratch2));
1154
1155 // Load current lexical context from the stack frame.
1156 mov(scratch1, Operand(ebp, StandardFrameConstants::kContextOffset));
1157
1158 // When generating debug code, make sure the lexical context is set.
1159 if (emit_debug_code()) {
1160 cmp(scratch1, Immediate(0));
1161 Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
1162 }
1163 // Load the native context of the current context.
1164 int offset =
1165 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
1166 mov(scratch1, FieldOperand(scratch1, offset));
1167 mov(scratch1, FieldOperand(scratch1, GlobalObject::kNativeContextOffset));
1168
1169 // Check the context is a native context.
1170 if (emit_debug_code()) {
1171 // Read the first word and compare to native_context_map.
1172 cmp(FieldOperand(scratch1, HeapObject::kMapOffset),
1173 isolate()->factory()->native_context_map());
1174 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1175 }
1176
1177 // Check if both contexts are the same.
1178 cmp(scratch1, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1179 j(equal, &same_contexts);
1180
1181 // Compare security tokens, save holder_reg on the stack so we can use it
1182 // as a temporary register.
1183 //
1184 // Check that the security token in the calling global object is
1185 // compatible with the security token in the receiving global
1186 // object.
1187 mov(scratch2,
1188 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1189
1190 // Check the context is a native context.
1191 if (emit_debug_code()) {
1192 cmp(scratch2, isolate()->factory()->null_value());
1193 Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
1194
1195 // Read the first word and compare to native_context_map(),
1196 cmp(FieldOperand(scratch2, HeapObject::kMapOffset),
1197 isolate()->factory()->native_context_map());
1198 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1199 }
1200
1201 int token_offset = Context::kHeaderSize +
1202 Context::SECURITY_TOKEN_INDEX * kPointerSize;
1203 mov(scratch1, FieldOperand(scratch1, token_offset));
1204 cmp(scratch1, FieldOperand(scratch2, token_offset));
1205 j(not_equal, miss);
1206
1207 bind(&same_contexts);
1208 }
1209
1210
1211 // Compute the hash code from the untagged key. This must be kept in sync with
1212 // ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
1213 // code-stub-hydrogen.cc
1214 //
1215 // Note: r0 will contain hash code
GetNumberHash(Register r0,Register scratch)1216 void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
1217 // Xor original key with a seed.
1218 if (serializer_enabled()) {
1219 ExternalReference roots_array_start =
1220 ExternalReference::roots_array_start(isolate());
1221 mov(scratch, Immediate(Heap::kHashSeedRootIndex));
1222 mov(scratch,
1223 Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
1224 SmiUntag(scratch);
1225 xor_(r0, scratch);
1226 } else {
1227 int32_t seed = isolate()->heap()->HashSeed();
1228 xor_(r0, Immediate(seed));
1229 }
1230
1231 // hash = ~hash + (hash << 15);
1232 mov(scratch, r0);
1233 not_(r0);
1234 shl(scratch, 15);
1235 add(r0, scratch);
1236 // hash = hash ^ (hash >> 12);
1237 mov(scratch, r0);
1238 shr(scratch, 12);
1239 xor_(r0, scratch);
1240 // hash = hash + (hash << 2);
1241 lea(r0, Operand(r0, r0, times_4, 0));
1242 // hash = hash ^ (hash >> 4);
1243 mov(scratch, r0);
1244 shr(scratch, 4);
1245 xor_(r0, scratch);
1246 // hash = hash * 2057;
1247 imul(r0, r0, 2057);
1248 // hash = hash ^ (hash >> 16);
1249 mov(scratch, r0);
1250 shr(scratch, 16);
1251 xor_(r0, scratch);
1252 }
1253
1254
1255
LoadFromNumberDictionary(Label * miss,Register elements,Register key,Register r0,Register r1,Register r2,Register result)1256 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1257 Register elements,
1258 Register key,
1259 Register r0,
1260 Register r1,
1261 Register r2,
1262 Register result) {
1263 // Register use:
1264 //
1265 // elements - holds the slow-case elements of the receiver and is unchanged.
1266 //
1267 // key - holds the smi key on entry and is unchanged.
1268 //
1269 // Scratch registers:
1270 //
1271 // r0 - holds the untagged key on entry and holds the hash once computed.
1272 //
1273 // r1 - used to hold the capacity mask of the dictionary
1274 //
1275 // r2 - used for the index into the dictionary.
1276 //
1277 // result - holds the result on exit if the load succeeds and we fall through.
1278
1279 Label done;
1280
1281 GetNumberHash(r0, r1);
1282
1283 // Compute capacity mask.
1284 mov(r1, FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
1285 shr(r1, kSmiTagSize); // convert smi to int
1286 dec(r1);
1287
1288 // Generate an unrolled loop that performs a few probes before giving up.
1289 for (int i = 0; i < kNumberDictionaryProbes; i++) {
1290 // Use r2 for index calculations and keep the hash intact in r0.
1291 mov(r2, r0);
1292 // Compute the masked index: (hash + i + i * i) & mask.
1293 if (i > 0) {
1294 add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
1295 }
1296 and_(r2, r1);
1297
1298 // Scale the index by multiplying by the entry size.
1299 DCHECK(SeededNumberDictionary::kEntrySize == 3);
1300 lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
1301
1302 // Check if the key matches.
1303 cmp(key, FieldOperand(elements,
1304 r2,
1305 times_pointer_size,
1306 SeededNumberDictionary::kElementsStartOffset));
1307 if (i != (kNumberDictionaryProbes - 1)) {
1308 j(equal, &done);
1309 } else {
1310 j(not_equal, miss);
1311 }
1312 }
1313
1314 bind(&done);
1315 // Check that the value is a normal propety.
1316 const int kDetailsOffset =
1317 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
1318 DCHECK_EQ(NORMAL, 0);
1319 test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
1320 Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
1321 j(not_zero, miss);
1322
1323 // Get the value at the masked, scaled index.
1324 const int kValueOffset =
1325 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
1326 mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
1327 }
1328
1329
LoadAllocationTopHelper(Register result,Register scratch,AllocationFlags flags)1330 void MacroAssembler::LoadAllocationTopHelper(Register result,
1331 Register scratch,
1332 AllocationFlags flags) {
1333 ExternalReference allocation_top =
1334 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1335
1336 // Just return if allocation top is already known.
1337 if ((flags & RESULT_CONTAINS_TOP) != 0) {
1338 // No use of scratch if allocation top is provided.
1339 DCHECK(scratch.is(no_reg));
1340 #ifdef DEBUG
1341 // Assert that result actually contains top on entry.
1342 cmp(result, Operand::StaticVariable(allocation_top));
1343 Check(equal, kUnexpectedAllocationTop);
1344 #endif
1345 return;
1346 }
1347
1348 // Move address of new object to result. Use scratch register if available.
1349 if (scratch.is(no_reg)) {
1350 mov(result, Operand::StaticVariable(allocation_top));
1351 } else {
1352 mov(scratch, Immediate(allocation_top));
1353 mov(result, Operand(scratch, 0));
1354 }
1355 }
1356
1357
UpdateAllocationTopHelper(Register result_end,Register scratch,AllocationFlags flags)1358 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1359 Register scratch,
1360 AllocationFlags flags) {
1361 if (emit_debug_code()) {
1362 test(result_end, Immediate(kObjectAlignmentMask));
1363 Check(zero, kUnalignedAllocationInNewSpace);
1364 }
1365
1366 ExternalReference allocation_top =
1367 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1368
1369 // Update new top. Use scratch if available.
1370 if (scratch.is(no_reg)) {
1371 mov(Operand::StaticVariable(allocation_top), result_end);
1372 } else {
1373 mov(Operand(scratch, 0), result_end);
1374 }
1375 }
1376
1377
Allocate(int object_size,Register result,Register result_end,Register scratch,Label * gc_required,AllocationFlags flags)1378 void MacroAssembler::Allocate(int object_size,
1379 Register result,
1380 Register result_end,
1381 Register scratch,
1382 Label* gc_required,
1383 AllocationFlags flags) {
1384 DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1385 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
1386 if (!FLAG_inline_new) {
1387 if (emit_debug_code()) {
1388 // Trash the registers to simulate an allocation failure.
1389 mov(result, Immediate(0x7091));
1390 if (result_end.is_valid()) {
1391 mov(result_end, Immediate(0x7191));
1392 }
1393 if (scratch.is_valid()) {
1394 mov(scratch, Immediate(0x7291));
1395 }
1396 }
1397 jmp(gc_required);
1398 return;
1399 }
1400 DCHECK(!result.is(result_end));
1401
1402 // Load address of new object into result.
1403 LoadAllocationTopHelper(result, scratch, flags);
1404
1405 ExternalReference allocation_limit =
1406 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1407
1408 // Align the next allocation. Storing the filler map without checking top is
1409 // safe in new-space because the limit of the heap is aligned there.
1410 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1411 DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1412 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1413 Label aligned;
1414 test(result, Immediate(kDoubleAlignmentMask));
1415 j(zero, &aligned, Label::kNear);
1416 if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1417 cmp(result, Operand::StaticVariable(allocation_limit));
1418 j(above_equal, gc_required);
1419 }
1420 mov(Operand(result, 0),
1421 Immediate(isolate()->factory()->one_pointer_filler_map()));
1422 add(result, Immediate(kDoubleSize / 2));
1423 bind(&aligned);
1424 }
1425
1426 // Calculate new top and bail out if space is exhausted.
1427 Register top_reg = result_end.is_valid() ? result_end : result;
1428 if (!top_reg.is(result)) {
1429 mov(top_reg, result);
1430 }
1431 add(top_reg, Immediate(object_size));
1432 j(carry, gc_required);
1433 cmp(top_reg, Operand::StaticVariable(allocation_limit));
1434 j(above, gc_required);
1435
1436 // Update allocation top.
1437 UpdateAllocationTopHelper(top_reg, scratch, flags);
1438
1439 // Tag result if requested.
1440 bool tag_result = (flags & TAG_OBJECT) != 0;
1441 if (top_reg.is(result)) {
1442 if (tag_result) {
1443 sub(result, Immediate(object_size - kHeapObjectTag));
1444 } else {
1445 sub(result, Immediate(object_size));
1446 }
1447 } else if (tag_result) {
1448 DCHECK(kHeapObjectTag == 1);
1449 inc(result);
1450 }
1451 }
1452
1453
Allocate(int header_size,ScaleFactor element_size,Register element_count,RegisterValueType element_count_type,Register result,Register result_end,Register scratch,Label * gc_required,AllocationFlags flags)1454 void MacroAssembler::Allocate(int header_size,
1455 ScaleFactor element_size,
1456 Register element_count,
1457 RegisterValueType element_count_type,
1458 Register result,
1459 Register result_end,
1460 Register scratch,
1461 Label* gc_required,
1462 AllocationFlags flags) {
1463 DCHECK((flags & SIZE_IN_WORDS) == 0);
1464 if (!FLAG_inline_new) {
1465 if (emit_debug_code()) {
1466 // Trash the registers to simulate an allocation failure.
1467 mov(result, Immediate(0x7091));
1468 mov(result_end, Immediate(0x7191));
1469 if (scratch.is_valid()) {
1470 mov(scratch, Immediate(0x7291));
1471 }
1472 // Register element_count is not modified by the function.
1473 }
1474 jmp(gc_required);
1475 return;
1476 }
1477 DCHECK(!result.is(result_end));
1478
1479 // Load address of new object into result.
1480 LoadAllocationTopHelper(result, scratch, flags);
1481
1482 ExternalReference allocation_limit =
1483 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1484
1485 // Align the next allocation. Storing the filler map without checking top is
1486 // safe in new-space because the limit of the heap is aligned there.
1487 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1488 DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1489 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1490 Label aligned;
1491 test(result, Immediate(kDoubleAlignmentMask));
1492 j(zero, &aligned, Label::kNear);
1493 if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1494 cmp(result, Operand::StaticVariable(allocation_limit));
1495 j(above_equal, gc_required);
1496 }
1497 mov(Operand(result, 0),
1498 Immediate(isolate()->factory()->one_pointer_filler_map()));
1499 add(result, Immediate(kDoubleSize / 2));
1500 bind(&aligned);
1501 }
1502
1503 // Calculate new top and bail out if space is exhausted.
1504 // We assume that element_count*element_size + header_size does not
1505 // overflow.
1506 if (element_count_type == REGISTER_VALUE_IS_SMI) {
1507 STATIC_ASSERT(static_cast<ScaleFactor>(times_2 - 1) == times_1);
1508 STATIC_ASSERT(static_cast<ScaleFactor>(times_4 - 1) == times_2);
1509 STATIC_ASSERT(static_cast<ScaleFactor>(times_8 - 1) == times_4);
1510 DCHECK(element_size >= times_2);
1511 DCHECK(kSmiTagSize == 1);
1512 element_size = static_cast<ScaleFactor>(element_size - 1);
1513 } else {
1514 DCHECK(element_count_type == REGISTER_VALUE_IS_INT32);
1515 }
1516 lea(result_end, Operand(element_count, element_size, header_size));
1517 add(result_end, result);
1518 j(carry, gc_required);
1519 cmp(result_end, Operand::StaticVariable(allocation_limit));
1520 j(above, gc_required);
1521
1522 if ((flags & TAG_OBJECT) != 0) {
1523 DCHECK(kHeapObjectTag == 1);
1524 inc(result);
1525 }
1526
1527 // Update allocation top.
1528 UpdateAllocationTopHelper(result_end, scratch, flags);
1529 }
1530
1531
Allocate(Register object_size,Register result,Register result_end,Register scratch,Label * gc_required,AllocationFlags flags)1532 void MacroAssembler::Allocate(Register object_size,
1533 Register result,
1534 Register result_end,
1535 Register scratch,
1536 Label* gc_required,
1537 AllocationFlags flags) {
1538 DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1539 if (!FLAG_inline_new) {
1540 if (emit_debug_code()) {
1541 // Trash the registers to simulate an allocation failure.
1542 mov(result, Immediate(0x7091));
1543 mov(result_end, Immediate(0x7191));
1544 if (scratch.is_valid()) {
1545 mov(scratch, Immediate(0x7291));
1546 }
1547 // object_size is left unchanged by this function.
1548 }
1549 jmp(gc_required);
1550 return;
1551 }
1552 DCHECK(!result.is(result_end));
1553
1554 // Load address of new object into result.
1555 LoadAllocationTopHelper(result, scratch, flags);
1556
1557 ExternalReference allocation_limit =
1558 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1559
1560 // Align the next allocation. Storing the filler map without checking top is
1561 // safe in new-space because the limit of the heap is aligned there.
1562 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1563 DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1564 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1565 Label aligned;
1566 test(result, Immediate(kDoubleAlignmentMask));
1567 j(zero, &aligned, Label::kNear);
1568 if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1569 cmp(result, Operand::StaticVariable(allocation_limit));
1570 j(above_equal, gc_required);
1571 }
1572 mov(Operand(result, 0),
1573 Immediate(isolate()->factory()->one_pointer_filler_map()));
1574 add(result, Immediate(kDoubleSize / 2));
1575 bind(&aligned);
1576 }
1577
1578 // Calculate new top and bail out if space is exhausted.
1579 if (!object_size.is(result_end)) {
1580 mov(result_end, object_size);
1581 }
1582 add(result_end, result);
1583 j(carry, gc_required);
1584 cmp(result_end, Operand::StaticVariable(allocation_limit));
1585 j(above, gc_required);
1586
1587 // Tag result if requested.
1588 if ((flags & TAG_OBJECT) != 0) {
1589 DCHECK(kHeapObjectTag == 1);
1590 inc(result);
1591 }
1592
1593 // Update allocation top.
1594 UpdateAllocationTopHelper(result_end, scratch, flags);
1595 }
1596
1597
UndoAllocationInNewSpace(Register object)1598 void MacroAssembler::UndoAllocationInNewSpace(Register object) {
1599 ExternalReference new_space_allocation_top =
1600 ExternalReference::new_space_allocation_top_address(isolate());
1601
1602 // Make sure the object has no tag before resetting top.
1603 and_(object, Immediate(~kHeapObjectTagMask));
1604 #ifdef DEBUG
1605 cmp(object, Operand::StaticVariable(new_space_allocation_top));
1606 Check(below, kUndoAllocationOfNonAllocatedMemory);
1607 #endif
1608 mov(Operand::StaticVariable(new_space_allocation_top), object);
1609 }
1610
1611
AllocateHeapNumber(Register result,Register scratch1,Register scratch2,Label * gc_required,MutableMode mode)1612 void MacroAssembler::AllocateHeapNumber(Register result,
1613 Register scratch1,
1614 Register scratch2,
1615 Label* gc_required,
1616 MutableMode mode) {
1617 // Allocate heap number in new space.
1618 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
1619 TAG_OBJECT);
1620
1621 Handle<Map> map = mode == MUTABLE
1622 ? isolate()->factory()->mutable_heap_number_map()
1623 : isolate()->factory()->heap_number_map();
1624
1625 // Set the map.
1626 mov(FieldOperand(result, HeapObject::kMapOffset), Immediate(map));
1627 }
1628
1629
AllocateTwoByteString(Register result,Register length,Register scratch1,Register scratch2,Register scratch3,Label * gc_required)1630 void MacroAssembler::AllocateTwoByteString(Register result,
1631 Register length,
1632 Register scratch1,
1633 Register scratch2,
1634 Register scratch3,
1635 Label* gc_required) {
1636 // Calculate the number of bytes needed for the characters in the string while
1637 // observing object alignment.
1638 DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1639 DCHECK(kShortSize == 2);
1640 // scratch1 = length * 2 + kObjectAlignmentMask.
1641 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
1642 and_(scratch1, Immediate(~kObjectAlignmentMask));
1643
1644 // Allocate two byte string in new space.
1645 Allocate(SeqTwoByteString::kHeaderSize,
1646 times_1,
1647 scratch1,
1648 REGISTER_VALUE_IS_INT32,
1649 result,
1650 scratch2,
1651 scratch3,
1652 gc_required,
1653 TAG_OBJECT);
1654
1655 // Set the map, length and hash field.
1656 mov(FieldOperand(result, HeapObject::kMapOffset),
1657 Immediate(isolate()->factory()->string_map()));
1658 mov(scratch1, length);
1659 SmiTag(scratch1);
1660 mov(FieldOperand(result, String::kLengthOffset), scratch1);
1661 mov(FieldOperand(result, String::kHashFieldOffset),
1662 Immediate(String::kEmptyHashField));
1663 }
1664
1665
AllocateOneByteString(Register result,Register length,Register scratch1,Register scratch2,Register scratch3,Label * gc_required)1666 void MacroAssembler::AllocateOneByteString(Register result, Register length,
1667 Register scratch1, Register scratch2,
1668 Register scratch3,
1669 Label* gc_required) {
1670 // Calculate the number of bytes needed for the characters in the string while
1671 // observing object alignment.
1672 DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1673 mov(scratch1, length);
1674 DCHECK(kCharSize == 1);
1675 add(scratch1, Immediate(kObjectAlignmentMask));
1676 and_(scratch1, Immediate(~kObjectAlignmentMask));
1677
1678 // Allocate one-byte string in new space.
1679 Allocate(SeqOneByteString::kHeaderSize,
1680 times_1,
1681 scratch1,
1682 REGISTER_VALUE_IS_INT32,
1683 result,
1684 scratch2,
1685 scratch3,
1686 gc_required,
1687 TAG_OBJECT);
1688
1689 // Set the map, length and hash field.
1690 mov(FieldOperand(result, HeapObject::kMapOffset),
1691 Immediate(isolate()->factory()->one_byte_string_map()));
1692 mov(scratch1, length);
1693 SmiTag(scratch1);
1694 mov(FieldOperand(result, String::kLengthOffset), scratch1);
1695 mov(FieldOperand(result, String::kHashFieldOffset),
1696 Immediate(String::kEmptyHashField));
1697 }
1698
1699
AllocateOneByteString(Register result,int length,Register scratch1,Register scratch2,Label * gc_required)1700 void MacroAssembler::AllocateOneByteString(Register result, int length,
1701 Register scratch1, Register scratch2,
1702 Label* gc_required) {
1703 DCHECK(length > 0);
1704
1705 // Allocate one-byte string in new space.
1706 Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2,
1707 gc_required, TAG_OBJECT);
1708
1709 // Set the map, length and hash field.
1710 mov(FieldOperand(result, HeapObject::kMapOffset),
1711 Immediate(isolate()->factory()->one_byte_string_map()));
1712 mov(FieldOperand(result, String::kLengthOffset),
1713 Immediate(Smi::FromInt(length)));
1714 mov(FieldOperand(result, String::kHashFieldOffset),
1715 Immediate(String::kEmptyHashField));
1716 }
1717
1718
AllocateTwoByteConsString(Register result,Register scratch1,Register scratch2,Label * gc_required)1719 void MacroAssembler::AllocateTwoByteConsString(Register result,
1720 Register scratch1,
1721 Register scratch2,
1722 Label* gc_required) {
1723 // Allocate heap number in new space.
1724 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
1725 TAG_OBJECT);
1726
1727 // Set the map. The other fields are left uninitialized.
1728 mov(FieldOperand(result, HeapObject::kMapOffset),
1729 Immediate(isolate()->factory()->cons_string_map()));
1730 }
1731
1732
AllocateOneByteConsString(Register result,Register scratch1,Register scratch2,Label * gc_required)1733 void MacroAssembler::AllocateOneByteConsString(Register result,
1734 Register scratch1,
1735 Register scratch2,
1736 Label* gc_required) {
1737 Allocate(ConsString::kSize,
1738 result,
1739 scratch1,
1740 scratch2,
1741 gc_required,
1742 TAG_OBJECT);
1743
1744 // Set the map. The other fields are left uninitialized.
1745 mov(FieldOperand(result, HeapObject::kMapOffset),
1746 Immediate(isolate()->factory()->cons_one_byte_string_map()));
1747 }
1748
1749
AllocateTwoByteSlicedString(Register result,Register scratch1,Register scratch2,Label * gc_required)1750 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
1751 Register scratch1,
1752 Register scratch2,
1753 Label* gc_required) {
1754 // Allocate heap number in new space.
1755 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1756 TAG_OBJECT);
1757
1758 // Set the map. The other fields are left uninitialized.
1759 mov(FieldOperand(result, HeapObject::kMapOffset),
1760 Immediate(isolate()->factory()->sliced_string_map()));
1761 }
1762
1763
AllocateOneByteSlicedString(Register result,Register scratch1,Register scratch2,Label * gc_required)1764 void MacroAssembler::AllocateOneByteSlicedString(Register result,
1765 Register scratch1,
1766 Register scratch2,
1767 Label* gc_required) {
1768 // Allocate heap number in new space.
1769 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1770 TAG_OBJECT);
1771
1772 // Set the map. The other fields are left uninitialized.
1773 mov(FieldOperand(result, HeapObject::kMapOffset),
1774 Immediate(isolate()->factory()->sliced_one_byte_string_map()));
1775 }
1776
1777
1778 // Copy memory, byte-by-byte, from source to destination. Not optimized for
1779 // long or aligned copies. The contents of scratch and length are destroyed.
1780 // Source and destination are incremented by length.
1781 // Many variants of movsb, loop unrolling, word moves, and indexed operands
1782 // have been tried here already, and this is fastest.
1783 // A simpler loop is faster on small copies, but 30% slower on large ones.
1784 // The cld() instruction must have been emitted, to set the direction flag(),
1785 // before calling this function.
CopyBytes(Register source,Register destination,Register length,Register scratch)1786 void MacroAssembler::CopyBytes(Register source,
1787 Register destination,
1788 Register length,
1789 Register scratch) {
1790 Label short_loop, len4, len8, len12, done, short_string;
1791 DCHECK(source.is(esi));
1792 DCHECK(destination.is(edi));
1793 DCHECK(length.is(ecx));
1794 cmp(length, Immediate(4));
1795 j(below, &short_string, Label::kNear);
1796
1797 // Because source is 4-byte aligned in our uses of this function,
1798 // we keep source aligned for the rep_movs call by copying the odd bytes
1799 // at the end of the ranges.
1800 mov(scratch, Operand(source, length, times_1, -4));
1801 mov(Operand(destination, length, times_1, -4), scratch);
1802
1803 cmp(length, Immediate(8));
1804 j(below_equal, &len4, Label::kNear);
1805 cmp(length, Immediate(12));
1806 j(below_equal, &len8, Label::kNear);
1807 cmp(length, Immediate(16));
1808 j(below_equal, &len12, Label::kNear);
1809
1810 mov(scratch, ecx);
1811 shr(ecx, 2);
1812 rep_movs();
1813 and_(scratch, Immediate(0x3));
1814 add(destination, scratch);
1815 jmp(&done, Label::kNear);
1816
1817 bind(&len12);
1818 mov(scratch, Operand(source, 8));
1819 mov(Operand(destination, 8), scratch);
1820 bind(&len8);
1821 mov(scratch, Operand(source, 4));
1822 mov(Operand(destination, 4), scratch);
1823 bind(&len4);
1824 mov(scratch, Operand(source, 0));
1825 mov(Operand(destination, 0), scratch);
1826 add(destination, length);
1827 jmp(&done, Label::kNear);
1828
1829 bind(&short_string);
1830 test(length, length);
1831 j(zero, &done, Label::kNear);
1832
1833 bind(&short_loop);
1834 mov_b(scratch, Operand(source, 0));
1835 mov_b(Operand(destination, 0), scratch);
1836 inc(source);
1837 inc(destination);
1838 dec(length);
1839 j(not_zero, &short_loop);
1840
1841 bind(&done);
1842 }
1843
1844
InitializeFieldsWithFiller(Register start_offset,Register end_offset,Register filler)1845 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
1846 Register end_offset,
1847 Register filler) {
1848 Label loop, entry;
1849 jmp(&entry);
1850 bind(&loop);
1851 mov(Operand(start_offset, 0), filler);
1852 add(start_offset, Immediate(kPointerSize));
1853 bind(&entry);
1854 cmp(start_offset, end_offset);
1855 j(less, &loop);
1856 }
1857
1858
BooleanBitTest(Register object,int field_offset,int bit_index)1859 void MacroAssembler::BooleanBitTest(Register object,
1860 int field_offset,
1861 int bit_index) {
1862 bit_index += kSmiTagSize + kSmiShiftSize;
1863 DCHECK(base::bits::IsPowerOfTwo32(kBitsPerByte));
1864 int byte_index = bit_index / kBitsPerByte;
1865 int byte_bit_index = bit_index & (kBitsPerByte - 1);
1866 test_b(FieldOperand(object, field_offset + byte_index),
1867 static_cast<byte>(1 << byte_bit_index));
1868 }
1869
1870
1871
NegativeZeroTest(Register result,Register op,Label * then_label)1872 void MacroAssembler::NegativeZeroTest(Register result,
1873 Register op,
1874 Label* then_label) {
1875 Label ok;
1876 test(result, result);
1877 j(not_zero, &ok);
1878 test(op, op);
1879 j(sign, then_label);
1880 bind(&ok);
1881 }
1882
1883
NegativeZeroTest(Register result,Register op1,Register op2,Register scratch,Label * then_label)1884 void MacroAssembler::NegativeZeroTest(Register result,
1885 Register op1,
1886 Register op2,
1887 Register scratch,
1888 Label* then_label) {
1889 Label ok;
1890 test(result, result);
1891 j(not_zero, &ok);
1892 mov(scratch, op1);
1893 or_(scratch, op2);
1894 j(sign, then_label);
1895 bind(&ok);
1896 }
1897
1898
TryGetFunctionPrototype(Register function,Register result,Register scratch,Label * miss,bool miss_on_bound_function)1899 void MacroAssembler::TryGetFunctionPrototype(Register function,
1900 Register result,
1901 Register scratch,
1902 Label* miss,
1903 bool miss_on_bound_function) {
1904 Label non_instance;
1905 if (miss_on_bound_function) {
1906 // Check that the receiver isn't a smi.
1907 JumpIfSmi(function, miss);
1908
1909 // Check that the function really is a function.
1910 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1911 j(not_equal, miss);
1912
1913 // If a bound function, go to miss label.
1914 mov(scratch,
1915 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1916 BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset,
1917 SharedFunctionInfo::kBoundFunction);
1918 j(not_zero, miss);
1919
1920 // Make sure that the function has an instance prototype.
1921 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
1922 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
1923 j(not_zero, &non_instance);
1924 }
1925
1926 // Get the prototype or initial map from the function.
1927 mov(result,
1928 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1929
1930 // If the prototype or initial map is the hole, don't return it and
1931 // simply miss the cache instead. This will allow us to allocate a
1932 // prototype object on-demand in the runtime system.
1933 cmp(result, Immediate(isolate()->factory()->the_hole_value()));
1934 j(equal, miss);
1935
1936 // If the function does not have an initial map, we're done.
1937 Label done;
1938 CmpObjectType(result, MAP_TYPE, scratch);
1939 j(not_equal, &done);
1940
1941 // Get the prototype from the initial map.
1942 mov(result, FieldOperand(result, Map::kPrototypeOffset));
1943
1944 if (miss_on_bound_function) {
1945 jmp(&done);
1946
1947 // Non-instance prototype: Fetch prototype from constructor field
1948 // in initial map.
1949 bind(&non_instance);
1950 mov(result, FieldOperand(result, Map::kConstructorOffset));
1951 }
1952
1953 // All done.
1954 bind(&done);
1955 }
1956
1957
CallStub(CodeStub * stub,TypeFeedbackId ast_id)1958 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
1959 DCHECK(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
1960 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
1961 }
1962
1963
TailCallStub(CodeStub * stub)1964 void MacroAssembler::TailCallStub(CodeStub* stub) {
1965 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1966 }
1967
1968
StubReturn(int argc)1969 void MacroAssembler::StubReturn(int argc) {
1970 DCHECK(argc >= 1 && generating_stub());
1971 ret((argc - 1) * kPointerSize);
1972 }
1973
1974
AllowThisStubCall(CodeStub * stub)1975 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
1976 return has_frame_ || !stub->SometimesSetsUpAFrame();
1977 }
1978
1979
IndexFromHash(Register hash,Register index)1980 void MacroAssembler::IndexFromHash(Register hash, Register index) {
1981 // The assert checks that the constants for the maximum number of digits
1982 // for an array index cached in the hash field and the number of bits
1983 // reserved for it does not conflict.
1984 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
1985 (1 << String::kArrayIndexValueBits));
1986 if (!index.is(hash)) {
1987 mov(index, hash);
1988 }
1989 DecodeFieldToSmi<String::ArrayIndexValueBits>(index);
1990 }
1991
1992
CallRuntime(const Runtime::Function * f,int num_arguments,SaveFPRegsMode save_doubles)1993 void MacroAssembler::CallRuntime(const Runtime::Function* f, int num_arguments,
1994 SaveFPRegsMode save_doubles) {
1995 // If the expected number of arguments of the runtime function is
1996 // constant, we check that the actual number of arguments match the
1997 // expectation.
1998 CHECK(f->nargs < 0 || f->nargs == num_arguments);
1999
2000 // TODO(1236192): Most runtime routines don't need the number of
2001 // arguments passed in because it is constant. At some point we
2002 // should remove this need and make the runtime routine entry code
2003 // smarter.
2004 Move(eax, Immediate(num_arguments));
2005 mov(ebx, Immediate(ExternalReference(f, isolate())));
2006 CEntryStub ces(isolate(), 1, save_doubles);
2007 CallStub(&ces);
2008 }
2009
2010
CallExternalReference(ExternalReference ref,int num_arguments)2011 void MacroAssembler::CallExternalReference(ExternalReference ref,
2012 int num_arguments) {
2013 mov(eax, Immediate(num_arguments));
2014 mov(ebx, Immediate(ref));
2015
2016 CEntryStub stub(isolate(), 1);
2017 CallStub(&stub);
2018 }
2019
2020
TailCallExternalReference(const ExternalReference & ext,int num_arguments,int result_size)2021 void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
2022 int num_arguments,
2023 int result_size) {
2024 // TODO(1236192): Most runtime routines don't need the number of
2025 // arguments passed in because it is constant. At some point we
2026 // should remove this need and make the runtime routine entry code
2027 // smarter.
2028 Move(eax, Immediate(num_arguments));
2029 JumpToExternalReference(ext);
2030 }
2031
2032
TailCallRuntime(Runtime::FunctionId fid,int num_arguments,int result_size)2033 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
2034 int num_arguments,
2035 int result_size) {
2036 TailCallExternalReference(ExternalReference(fid, isolate()),
2037 num_arguments,
2038 result_size);
2039 }
2040
2041
ApiParameterOperand(int index)2042 Operand ApiParameterOperand(int index) {
2043 return Operand(esp, index * kPointerSize);
2044 }
2045
2046
PrepareCallApiFunction(int argc)2047 void MacroAssembler::PrepareCallApiFunction(int argc) {
2048 EnterApiExitFrame(argc);
2049 if (emit_debug_code()) {
2050 mov(esi, Immediate(bit_cast<int32_t>(kZapValue)));
2051 }
2052 }
2053
2054
CallApiFunctionAndReturn(Register function_address,ExternalReference thunk_ref,Operand thunk_last_arg,int stack_space,Operand return_value_operand,Operand * context_restore_operand)2055 void MacroAssembler::CallApiFunctionAndReturn(
2056 Register function_address,
2057 ExternalReference thunk_ref,
2058 Operand thunk_last_arg,
2059 int stack_space,
2060 Operand return_value_operand,
2061 Operand* context_restore_operand) {
2062 ExternalReference next_address =
2063 ExternalReference::handle_scope_next_address(isolate());
2064 ExternalReference limit_address =
2065 ExternalReference::handle_scope_limit_address(isolate());
2066 ExternalReference level_address =
2067 ExternalReference::handle_scope_level_address(isolate());
2068
2069 DCHECK(edx.is(function_address));
2070 // Allocate HandleScope in callee-save registers.
2071 mov(ebx, Operand::StaticVariable(next_address));
2072 mov(edi, Operand::StaticVariable(limit_address));
2073 add(Operand::StaticVariable(level_address), Immediate(1));
2074
2075 if (FLAG_log_timer_events) {
2076 FrameScope frame(this, StackFrame::MANUAL);
2077 PushSafepointRegisters();
2078 PrepareCallCFunction(1, eax);
2079 mov(Operand(esp, 0),
2080 Immediate(ExternalReference::isolate_address(isolate())));
2081 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
2082 PopSafepointRegisters();
2083 }
2084
2085
2086 Label profiler_disabled;
2087 Label end_profiler_check;
2088 mov(eax, Immediate(ExternalReference::is_profiling_address(isolate())));
2089 cmpb(Operand(eax, 0), 0);
2090 j(zero, &profiler_disabled);
2091
2092 // Additional parameter is the address of the actual getter function.
2093 mov(thunk_last_arg, function_address);
2094 // Call the api function.
2095 mov(eax, Immediate(thunk_ref));
2096 call(eax);
2097 jmp(&end_profiler_check);
2098
2099 bind(&profiler_disabled);
2100 // Call the api function.
2101 call(function_address);
2102 bind(&end_profiler_check);
2103
2104 if (FLAG_log_timer_events) {
2105 FrameScope frame(this, StackFrame::MANUAL);
2106 PushSafepointRegisters();
2107 PrepareCallCFunction(1, eax);
2108 mov(Operand(esp, 0),
2109 Immediate(ExternalReference::isolate_address(isolate())));
2110 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
2111 PopSafepointRegisters();
2112 }
2113
2114 Label prologue;
2115 // Load the value from ReturnValue
2116 mov(eax, return_value_operand);
2117
2118 Label promote_scheduled_exception;
2119 Label exception_handled;
2120 Label delete_allocated_handles;
2121 Label leave_exit_frame;
2122
2123 bind(&prologue);
2124 // No more valid handles (the result handle was the last one). Restore
2125 // previous handle scope.
2126 mov(Operand::StaticVariable(next_address), ebx);
2127 sub(Operand::StaticVariable(level_address), Immediate(1));
2128 Assert(above_equal, kInvalidHandleScopeLevel);
2129 cmp(edi, Operand::StaticVariable(limit_address));
2130 j(not_equal, &delete_allocated_handles);
2131 bind(&leave_exit_frame);
2132
2133 // Check if the function scheduled an exception.
2134 ExternalReference scheduled_exception_address =
2135 ExternalReference::scheduled_exception_address(isolate());
2136 cmp(Operand::StaticVariable(scheduled_exception_address),
2137 Immediate(isolate()->factory()->the_hole_value()));
2138 j(not_equal, &promote_scheduled_exception);
2139 bind(&exception_handled);
2140
2141 #if ENABLE_EXTRA_CHECKS
2142 // Check if the function returned a valid JavaScript value.
2143 Label ok;
2144 Register return_value = eax;
2145 Register map = ecx;
2146
2147 JumpIfSmi(return_value, &ok, Label::kNear);
2148 mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
2149
2150 CmpInstanceType(map, FIRST_NONSTRING_TYPE);
2151 j(below, &ok, Label::kNear);
2152
2153 CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
2154 j(above_equal, &ok, Label::kNear);
2155
2156 cmp(map, isolate()->factory()->heap_number_map());
2157 j(equal, &ok, Label::kNear);
2158
2159 cmp(return_value, isolate()->factory()->undefined_value());
2160 j(equal, &ok, Label::kNear);
2161
2162 cmp(return_value, isolate()->factory()->true_value());
2163 j(equal, &ok, Label::kNear);
2164
2165 cmp(return_value, isolate()->factory()->false_value());
2166 j(equal, &ok, Label::kNear);
2167
2168 cmp(return_value, isolate()->factory()->null_value());
2169 j(equal, &ok, Label::kNear);
2170
2171 Abort(kAPICallReturnedInvalidObject);
2172
2173 bind(&ok);
2174 #endif
2175
2176 bool restore_context = context_restore_operand != NULL;
2177 if (restore_context) {
2178 mov(esi, *context_restore_operand);
2179 }
2180 LeaveApiExitFrame(!restore_context);
2181 ret(stack_space * kPointerSize);
2182
2183 bind(&promote_scheduled_exception);
2184 {
2185 FrameScope frame(this, StackFrame::INTERNAL);
2186 CallRuntime(Runtime::kPromoteScheduledException, 0);
2187 }
2188 jmp(&exception_handled);
2189
2190 // HandleScope limit has changed. Delete allocated extensions.
2191 ExternalReference delete_extensions =
2192 ExternalReference::delete_handle_scope_extensions(isolate());
2193 bind(&delete_allocated_handles);
2194 mov(Operand::StaticVariable(limit_address), edi);
2195 mov(edi, eax);
2196 mov(Operand(esp, 0),
2197 Immediate(ExternalReference::isolate_address(isolate())));
2198 mov(eax, Immediate(delete_extensions));
2199 call(eax);
2200 mov(eax, edi);
2201 jmp(&leave_exit_frame);
2202 }
2203
2204
JumpToExternalReference(const ExternalReference & ext)2205 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
2206 // Set the entry point and jump to the C entry runtime stub.
2207 mov(ebx, Immediate(ext));
2208 CEntryStub ces(isolate(), 1);
2209 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
2210 }
2211
2212
InvokePrologue(const ParameterCount & expected,const ParameterCount & actual,Handle<Code> code_constant,const Operand & code_operand,Label * done,bool * definitely_mismatches,InvokeFlag flag,Label::Distance done_near,const CallWrapper & call_wrapper)2213 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2214 const ParameterCount& actual,
2215 Handle<Code> code_constant,
2216 const Operand& code_operand,
2217 Label* done,
2218 bool* definitely_mismatches,
2219 InvokeFlag flag,
2220 Label::Distance done_near,
2221 const CallWrapper& call_wrapper) {
2222 bool definitely_matches = false;
2223 *definitely_mismatches = false;
2224 Label invoke;
2225 if (expected.is_immediate()) {
2226 DCHECK(actual.is_immediate());
2227 if (expected.immediate() == actual.immediate()) {
2228 definitely_matches = true;
2229 } else {
2230 mov(eax, actual.immediate());
2231 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
2232 if (expected.immediate() == sentinel) {
2233 // Don't worry about adapting arguments for builtins that
2234 // don't want that done. Skip adaption code by making it look
2235 // like we have a match between expected and actual number of
2236 // arguments.
2237 definitely_matches = true;
2238 } else {
2239 *definitely_mismatches = true;
2240 mov(ebx, expected.immediate());
2241 }
2242 }
2243 } else {
2244 if (actual.is_immediate()) {
2245 // Expected is in register, actual is immediate. This is the
2246 // case when we invoke function values without going through the
2247 // IC mechanism.
2248 cmp(expected.reg(), actual.immediate());
2249 j(equal, &invoke);
2250 DCHECK(expected.reg().is(ebx));
2251 mov(eax, actual.immediate());
2252 } else if (!expected.reg().is(actual.reg())) {
2253 // Both expected and actual are in (different) registers. This
2254 // is the case when we invoke functions using call and apply.
2255 cmp(expected.reg(), actual.reg());
2256 j(equal, &invoke);
2257 DCHECK(actual.reg().is(eax));
2258 DCHECK(expected.reg().is(ebx));
2259 }
2260 }
2261
2262 if (!definitely_matches) {
2263 Handle<Code> adaptor =
2264 isolate()->builtins()->ArgumentsAdaptorTrampoline();
2265 if (!code_constant.is_null()) {
2266 mov(edx, Immediate(code_constant));
2267 add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2268 } else if (!code_operand.is_reg(edx)) {
2269 mov(edx, code_operand);
2270 }
2271
2272 if (flag == CALL_FUNCTION) {
2273 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
2274 call(adaptor, RelocInfo::CODE_TARGET);
2275 call_wrapper.AfterCall();
2276 if (!*definitely_mismatches) {
2277 jmp(done, done_near);
2278 }
2279 } else {
2280 jmp(adaptor, RelocInfo::CODE_TARGET);
2281 }
2282 bind(&invoke);
2283 }
2284 }
2285
2286
InvokeCode(const Operand & code,const ParameterCount & expected,const ParameterCount & actual,InvokeFlag flag,const CallWrapper & call_wrapper)2287 void MacroAssembler::InvokeCode(const Operand& code,
2288 const ParameterCount& expected,
2289 const ParameterCount& actual,
2290 InvokeFlag flag,
2291 const CallWrapper& call_wrapper) {
2292 // You can't call a function without a valid frame.
2293 DCHECK(flag == JUMP_FUNCTION || has_frame());
2294
2295 Label done;
2296 bool definitely_mismatches = false;
2297 InvokePrologue(expected, actual, Handle<Code>::null(), code,
2298 &done, &definitely_mismatches, flag, Label::kNear,
2299 call_wrapper);
2300 if (!definitely_mismatches) {
2301 if (flag == CALL_FUNCTION) {
2302 call_wrapper.BeforeCall(CallSize(code));
2303 call(code);
2304 call_wrapper.AfterCall();
2305 } else {
2306 DCHECK(flag == JUMP_FUNCTION);
2307 jmp(code);
2308 }
2309 bind(&done);
2310 }
2311 }
2312
2313
InvokeFunction(Register fun,const ParameterCount & actual,InvokeFlag flag,const CallWrapper & call_wrapper)2314 void MacroAssembler::InvokeFunction(Register fun,
2315 const ParameterCount& actual,
2316 InvokeFlag flag,
2317 const CallWrapper& call_wrapper) {
2318 // You can't call a function without a valid frame.
2319 DCHECK(flag == JUMP_FUNCTION || has_frame());
2320
2321 DCHECK(fun.is(edi));
2322 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2323 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2324 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2325 SmiUntag(ebx);
2326
2327 ParameterCount expected(ebx);
2328 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2329 expected, actual, flag, call_wrapper);
2330 }
2331
2332
InvokeFunction(Register fun,const ParameterCount & expected,const ParameterCount & actual,InvokeFlag flag,const CallWrapper & call_wrapper)2333 void MacroAssembler::InvokeFunction(Register fun,
2334 const ParameterCount& expected,
2335 const ParameterCount& actual,
2336 InvokeFlag flag,
2337 const CallWrapper& call_wrapper) {
2338 // You can't call a function without a valid frame.
2339 DCHECK(flag == JUMP_FUNCTION || has_frame());
2340
2341 DCHECK(fun.is(edi));
2342 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2343
2344 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2345 expected, actual, flag, call_wrapper);
2346 }
2347
2348
InvokeFunction(Handle<JSFunction> function,const ParameterCount & expected,const ParameterCount & actual,InvokeFlag flag,const CallWrapper & call_wrapper)2349 void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
2350 const ParameterCount& expected,
2351 const ParameterCount& actual,
2352 InvokeFlag flag,
2353 const CallWrapper& call_wrapper) {
2354 LoadHeapObject(edi, function);
2355 InvokeFunction(edi, expected, actual, flag, call_wrapper);
2356 }
2357
2358
InvokeBuiltin(Builtins::JavaScript id,InvokeFlag flag,const CallWrapper & call_wrapper)2359 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
2360 InvokeFlag flag,
2361 const CallWrapper& call_wrapper) {
2362 // You can't call a builtin without a valid frame.
2363 DCHECK(flag == JUMP_FUNCTION || has_frame());
2364
2365 // Rely on the assertion to check that the number of provided
2366 // arguments match the expected number of arguments. Fake a
2367 // parameter count to avoid emitting code to do the check.
2368 ParameterCount expected(0);
2369 GetBuiltinFunction(edi, id);
2370 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2371 expected, expected, flag, call_wrapper);
2372 }
2373
2374
GetBuiltinFunction(Register target,Builtins::JavaScript id)2375 void MacroAssembler::GetBuiltinFunction(Register target,
2376 Builtins::JavaScript id) {
2377 // Load the JavaScript builtin function from the builtins object.
2378 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2379 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
2380 mov(target, FieldOperand(target,
2381 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
2382 }
2383
2384
GetBuiltinEntry(Register target,Builtins::JavaScript id)2385 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
2386 DCHECK(!target.is(edi));
2387 // Load the JavaScript builtin function from the builtins object.
2388 GetBuiltinFunction(edi, id);
2389 // Load the code entry point from the function into the target register.
2390 mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2391 }
2392
2393
LoadContext(Register dst,int context_chain_length)2394 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2395 if (context_chain_length > 0) {
2396 // Move up the chain of contexts to the context containing the slot.
2397 mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2398 for (int i = 1; i < context_chain_length; i++) {
2399 mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2400 }
2401 } else {
2402 // Slot is in the current function context. Move it into the
2403 // destination register in case we store into it (the write barrier
2404 // cannot be allowed to destroy the context in esi).
2405 mov(dst, esi);
2406 }
2407
2408 // We should not have found a with context by walking the context chain
2409 // (i.e., the static scope chain and runtime context chain do not agree).
2410 // A variable occurring in such a scope should have slot type LOOKUP and
2411 // not CONTEXT.
2412 if (emit_debug_code()) {
2413 cmp(FieldOperand(dst, HeapObject::kMapOffset),
2414 isolate()->factory()->with_context_map());
2415 Check(not_equal, kVariableResolvedToWithContext);
2416 }
2417 }
2418
2419
LoadTransitionedArrayMapConditional(ElementsKind expected_kind,ElementsKind transitioned_kind,Register map_in_out,Register scratch,Label * no_map_match)2420 void MacroAssembler::LoadTransitionedArrayMapConditional(
2421 ElementsKind expected_kind,
2422 ElementsKind transitioned_kind,
2423 Register map_in_out,
2424 Register scratch,
2425 Label* no_map_match) {
2426 // Load the global or builtins object from the current context.
2427 mov(scratch, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2428 mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
2429
2430 // Check that the function's map is the same as the expected cached map.
2431 mov(scratch, Operand(scratch,
2432 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
2433
2434 size_t offset = expected_kind * kPointerSize +
2435 FixedArrayBase::kHeaderSize;
2436 cmp(map_in_out, FieldOperand(scratch, offset));
2437 j(not_equal, no_map_match);
2438
2439 // Use the transitioned cached map.
2440 offset = transitioned_kind * kPointerSize +
2441 FixedArrayBase::kHeaderSize;
2442 mov(map_in_out, FieldOperand(scratch, offset));
2443 }
2444
2445
LoadGlobalFunction(int index,Register function)2446 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2447 // Load the global or builtins object from the current context.
2448 mov(function,
2449 Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2450 // Load the native context from the global or builtins object.
2451 mov(function,
2452 FieldOperand(function, GlobalObject::kNativeContextOffset));
2453 // Load the function from the native context.
2454 mov(function, Operand(function, Context::SlotOffset(index)));
2455 }
2456
2457
LoadGlobalFunctionInitialMap(Register function,Register map)2458 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2459 Register map) {
2460 // Load the initial map. The global functions all have initial maps.
2461 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2462 if (emit_debug_code()) {
2463 Label ok, fail;
2464 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
2465 jmp(&ok);
2466 bind(&fail);
2467 Abort(kGlobalFunctionsMustHaveInitialMap);
2468 bind(&ok);
2469 }
2470 }
2471
2472
2473 // Store the value in register src in the safepoint register stack
2474 // slot for register dst.
StoreToSafepointRegisterSlot(Register dst,Register src)2475 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2476 mov(SafepointRegisterSlot(dst), src);
2477 }
2478
2479
StoreToSafepointRegisterSlot(Register dst,Immediate src)2480 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2481 mov(SafepointRegisterSlot(dst), src);
2482 }
2483
2484
LoadFromSafepointRegisterSlot(Register dst,Register src)2485 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2486 mov(dst, SafepointRegisterSlot(src));
2487 }
2488
2489
SafepointRegisterSlot(Register reg)2490 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2491 return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2492 }
2493
2494
SafepointRegisterStackIndex(int reg_code)2495 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
2496 // The registers are pushed starting with the lowest encoding,
2497 // which means that lowest encodings are furthest away from
2498 // the stack pointer.
2499 DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
2500 return kNumSafepointRegisters - reg_code - 1;
2501 }
2502
2503
LoadHeapObject(Register result,Handle<HeapObject> object)2504 void MacroAssembler::LoadHeapObject(Register result,
2505 Handle<HeapObject> object) {
2506 AllowDeferredHandleDereference embedding_raw_address;
2507 if (isolate()->heap()->InNewSpace(*object)) {
2508 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2509 mov(result, Operand::ForCell(cell));
2510 } else {
2511 mov(result, object);
2512 }
2513 }
2514
2515
CmpHeapObject(Register reg,Handle<HeapObject> object)2516 void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject> object) {
2517 AllowDeferredHandleDereference using_raw_address;
2518 if (isolate()->heap()->InNewSpace(*object)) {
2519 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2520 cmp(reg, Operand::ForCell(cell));
2521 } else {
2522 cmp(reg, object);
2523 }
2524 }
2525
2526
PushHeapObject(Handle<HeapObject> object)2527 void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
2528 AllowDeferredHandleDereference using_raw_address;
2529 if (isolate()->heap()->InNewSpace(*object)) {
2530 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2531 push(Operand::ForCell(cell));
2532 } else {
2533 Push(object);
2534 }
2535 }
2536
2537
Ret()2538 void MacroAssembler::Ret() {
2539 ret(0);
2540 }
2541
2542
Ret(int bytes_dropped,Register scratch)2543 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2544 if (is_uint16(bytes_dropped)) {
2545 ret(bytes_dropped);
2546 } else {
2547 pop(scratch);
2548 add(esp, Immediate(bytes_dropped));
2549 push(scratch);
2550 ret(0);
2551 }
2552 }
2553
2554
VerifyX87StackDepth(uint32_t depth)2555 void MacroAssembler::VerifyX87StackDepth(uint32_t depth) {
2556 // Turn off the stack depth check when serializer is enabled to reduce the
2557 // code size.
2558 if (serializer_enabled()) return;
2559 // Make sure the floating point stack is either empty or has depth items.
2560 DCHECK(depth <= 7);
2561 // This is very expensive.
2562 DCHECK(FLAG_debug_code && FLAG_enable_slow_asserts);
2563
2564 // The top-of-stack (tos) is 7 if there is one item pushed.
2565 int tos = (8 - depth) % 8;
2566 const int kTopMask = 0x3800;
2567 push(eax);
2568 fwait();
2569 fnstsw_ax();
2570 and_(eax, kTopMask);
2571 shr(eax, 11);
2572 cmp(eax, Immediate(tos));
2573 Check(equal, kUnexpectedFPUStackDepthAfterInstruction);
2574 fnclex();
2575 pop(eax);
2576 }
2577
2578
Drop(int stack_elements)2579 void MacroAssembler::Drop(int stack_elements) {
2580 if (stack_elements > 0) {
2581 add(esp, Immediate(stack_elements * kPointerSize));
2582 }
2583 }
2584
2585
Move(Register dst,Register src)2586 void MacroAssembler::Move(Register dst, Register src) {
2587 if (!dst.is(src)) {
2588 mov(dst, src);
2589 }
2590 }
2591
2592
Move(Register dst,const Immediate & x)2593 void MacroAssembler::Move(Register dst, const Immediate& x) {
2594 if (x.is_zero()) {
2595 xor_(dst, dst); // Shorter than mov of 32-bit immediate 0.
2596 } else {
2597 mov(dst, x);
2598 }
2599 }
2600
2601
Move(const Operand & dst,const Immediate & x)2602 void MacroAssembler::Move(const Operand& dst, const Immediate& x) {
2603 mov(dst, x);
2604 }
2605
2606
SetCounter(StatsCounter * counter,int value)2607 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2608 if (FLAG_native_code_counters && counter->Enabled()) {
2609 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2610 }
2611 }
2612
2613
IncrementCounter(StatsCounter * counter,int value)2614 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2615 DCHECK(value > 0);
2616 if (FLAG_native_code_counters && counter->Enabled()) {
2617 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2618 if (value == 1) {
2619 inc(operand);
2620 } else {
2621 add(operand, Immediate(value));
2622 }
2623 }
2624 }
2625
2626
DecrementCounter(StatsCounter * counter,int value)2627 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2628 DCHECK(value > 0);
2629 if (FLAG_native_code_counters && counter->Enabled()) {
2630 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2631 if (value == 1) {
2632 dec(operand);
2633 } else {
2634 sub(operand, Immediate(value));
2635 }
2636 }
2637 }
2638
2639
IncrementCounter(Condition cc,StatsCounter * counter,int value)2640 void MacroAssembler::IncrementCounter(Condition cc,
2641 StatsCounter* counter,
2642 int value) {
2643 DCHECK(value > 0);
2644 if (FLAG_native_code_counters && counter->Enabled()) {
2645 Label skip;
2646 j(NegateCondition(cc), &skip);
2647 pushfd();
2648 IncrementCounter(counter, value);
2649 popfd();
2650 bind(&skip);
2651 }
2652 }
2653
2654
DecrementCounter(Condition cc,StatsCounter * counter,int value)2655 void MacroAssembler::DecrementCounter(Condition cc,
2656 StatsCounter* counter,
2657 int value) {
2658 DCHECK(value > 0);
2659 if (FLAG_native_code_counters && counter->Enabled()) {
2660 Label skip;
2661 j(NegateCondition(cc), &skip);
2662 pushfd();
2663 DecrementCounter(counter, value);
2664 popfd();
2665 bind(&skip);
2666 }
2667 }
2668
2669
Assert(Condition cc,BailoutReason reason)2670 void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
2671 if (emit_debug_code()) Check(cc, reason);
2672 }
2673
2674
AssertFastElements(Register elements)2675 void MacroAssembler::AssertFastElements(Register elements) {
2676 if (emit_debug_code()) {
2677 Factory* factory = isolate()->factory();
2678 Label ok;
2679 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2680 Immediate(factory->fixed_array_map()));
2681 j(equal, &ok);
2682 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2683 Immediate(factory->fixed_double_array_map()));
2684 j(equal, &ok);
2685 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2686 Immediate(factory->fixed_cow_array_map()));
2687 j(equal, &ok);
2688 Abort(kJSObjectWithFastElementsMapHasSlowElements);
2689 bind(&ok);
2690 }
2691 }
2692
2693
Check(Condition cc,BailoutReason reason)2694 void MacroAssembler::Check(Condition cc, BailoutReason reason) {
2695 Label L;
2696 j(cc, &L);
2697 Abort(reason);
2698 // will not return here
2699 bind(&L);
2700 }
2701
2702
CheckStackAlignment()2703 void MacroAssembler::CheckStackAlignment() {
2704 int frame_alignment = base::OS::ActivationFrameAlignment();
2705 int frame_alignment_mask = frame_alignment - 1;
2706 if (frame_alignment > kPointerSize) {
2707 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
2708 Label alignment_as_expected;
2709 test(esp, Immediate(frame_alignment_mask));
2710 j(zero, &alignment_as_expected);
2711 // Abort if stack is not aligned.
2712 int3();
2713 bind(&alignment_as_expected);
2714 }
2715 }
2716
2717
Abort(BailoutReason reason)2718 void MacroAssembler::Abort(BailoutReason reason) {
2719 #ifdef DEBUG
2720 const char* msg = GetBailoutReason(reason);
2721 if (msg != NULL) {
2722 RecordComment("Abort message: ");
2723 RecordComment(msg);
2724 }
2725
2726 if (FLAG_trap_on_abort) {
2727 int3();
2728 return;
2729 }
2730 #endif
2731
2732 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(reason))));
2733 // Disable stub call restrictions to always allow calls to abort.
2734 if (!has_frame_) {
2735 // We don't actually want to generate a pile of code for this, so just
2736 // claim there is a stack frame, without generating one.
2737 FrameScope scope(this, StackFrame::NONE);
2738 CallRuntime(Runtime::kAbort, 1);
2739 } else {
2740 CallRuntime(Runtime::kAbort, 1);
2741 }
2742 // will not return here
2743 int3();
2744 }
2745
2746
LoadInstanceDescriptors(Register map,Register descriptors)2747 void MacroAssembler::LoadInstanceDescriptors(Register map,
2748 Register descriptors) {
2749 mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
2750 }
2751
2752
NumberOfOwnDescriptors(Register dst,Register map)2753 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
2754 mov(dst, FieldOperand(map, Map::kBitField3Offset));
2755 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
2756 }
2757
2758
LookupNumberStringCache(Register object,Register result,Register scratch1,Register scratch2,Label * not_found)2759 void MacroAssembler::LookupNumberStringCache(Register object,
2760 Register result,
2761 Register scratch1,
2762 Register scratch2,
2763 Label* not_found) {
2764 // Use of registers. Register result is used as a temporary.
2765 Register number_string_cache = result;
2766 Register mask = scratch1;
2767 Register scratch = scratch2;
2768
2769 // Load the number string cache.
2770 LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
2771 // Make the hash mask from the length of the number string cache. It
2772 // contains two elements (number and string) for each cache entry.
2773 mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
2774 shr(mask, kSmiTagSize + 1); // Untag length and divide it by two.
2775 sub(mask, Immediate(1)); // Make mask.
2776
2777 // Calculate the entry in the number string cache. The hash value in the
2778 // number string cache for smis is just the smi value, and the hash for
2779 // doubles is the xor of the upper and lower words. See
2780 // Heap::GetNumberStringCache.
2781 Label smi_hash_calculated;
2782 Label load_result_from_cache;
2783 Label not_smi;
2784 STATIC_ASSERT(kSmiTag == 0);
2785 JumpIfNotSmi(object, ¬_smi, Label::kNear);
2786 mov(scratch, object);
2787 SmiUntag(scratch);
2788 jmp(&smi_hash_calculated, Label::kNear);
2789 bind(¬_smi);
2790 cmp(FieldOperand(object, HeapObject::kMapOffset),
2791 isolate()->factory()->heap_number_map());
2792 j(not_equal, not_found);
2793 STATIC_ASSERT(8 == kDoubleSize);
2794 mov(scratch, FieldOperand(object, HeapNumber::kValueOffset));
2795 xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
2796 // Object is heap number and hash is now in scratch. Calculate cache index.
2797 and_(scratch, mask);
2798 Register index = scratch;
2799 Register probe = mask;
2800 mov(probe,
2801 FieldOperand(number_string_cache,
2802 index,
2803 times_twice_pointer_size,
2804 FixedArray::kHeaderSize));
2805 JumpIfSmi(probe, not_found);
2806 fld_d(FieldOperand(object, HeapNumber::kValueOffset));
2807 fld_d(FieldOperand(probe, HeapNumber::kValueOffset));
2808 FCmp();
2809 j(parity_even, not_found); // Bail out if NaN is involved.
2810 j(not_equal, not_found); // The cache did not contain this value.
2811 jmp(&load_result_from_cache, Label::kNear);
2812
2813 bind(&smi_hash_calculated);
2814 // Object is smi and hash is now in scratch. Calculate cache index.
2815 and_(scratch, mask);
2816 // Check if the entry is the smi we are looking for.
2817 cmp(object,
2818 FieldOperand(number_string_cache,
2819 index,
2820 times_twice_pointer_size,
2821 FixedArray::kHeaderSize));
2822 j(not_equal, not_found);
2823
2824 // Get the result from the cache.
2825 bind(&load_result_from_cache);
2826 mov(result,
2827 FieldOperand(number_string_cache,
2828 index,
2829 times_twice_pointer_size,
2830 FixedArray::kHeaderSize + kPointerSize));
2831 IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
2832 }
2833
2834
JumpIfInstanceTypeIsNotSequentialOneByte(Register instance_type,Register scratch,Label * failure)2835 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(
2836 Register instance_type, Register scratch, Label* failure) {
2837 if (!scratch.is(instance_type)) {
2838 mov(scratch, instance_type);
2839 }
2840 and_(scratch,
2841 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2842 cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
2843 j(not_equal, failure);
2844 }
2845
2846
JumpIfNotBothSequentialOneByteStrings(Register object1,Register object2,Register scratch1,Register scratch2,Label * failure)2847 void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register object1,
2848 Register object2,
2849 Register scratch1,
2850 Register scratch2,
2851 Label* failure) {
2852 // Check that both objects are not smis.
2853 STATIC_ASSERT(kSmiTag == 0);
2854 mov(scratch1, object1);
2855 and_(scratch1, object2);
2856 JumpIfSmi(scratch1, failure);
2857
2858 // Load instance type for both strings.
2859 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
2860 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
2861 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2862 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2863
2864 // Check that both are flat one-byte strings.
2865 const int kFlatOneByteStringMask =
2866 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2867 const int kFlatOneByteStringTag =
2868 kStringTag | kOneByteStringTag | kSeqStringTag;
2869 // Interleave bits from both instance types and compare them in one check.
2870 DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
2871 and_(scratch1, kFlatOneByteStringMask);
2872 and_(scratch2, kFlatOneByteStringMask);
2873 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2874 cmp(scratch1, kFlatOneByteStringTag | (kFlatOneByteStringTag << 3));
2875 j(not_equal, failure);
2876 }
2877
2878
JumpIfNotUniqueNameInstanceType(Operand operand,Label * not_unique_name,Label::Distance distance)2879 void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
2880 Label* not_unique_name,
2881 Label::Distance distance) {
2882 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2883 Label succeed;
2884 test(operand, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2885 j(zero, &succeed);
2886 cmpb(operand, static_cast<uint8_t>(SYMBOL_TYPE));
2887 j(not_equal, not_unique_name, distance);
2888
2889 bind(&succeed);
2890 }
2891
2892
EmitSeqStringSetCharCheck(Register string,Register index,Register value,uint32_t encoding_mask)2893 void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
2894 Register index,
2895 Register value,
2896 uint32_t encoding_mask) {
2897 Label is_object;
2898 JumpIfNotSmi(string, &is_object, Label::kNear);
2899 Abort(kNonObject);
2900 bind(&is_object);
2901
2902 push(value);
2903 mov(value, FieldOperand(string, HeapObject::kMapOffset));
2904 movzx_b(value, FieldOperand(value, Map::kInstanceTypeOffset));
2905
2906 and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
2907 cmp(value, Immediate(encoding_mask));
2908 pop(value);
2909 Check(equal, kUnexpectedStringType);
2910
2911 // The index is assumed to be untagged coming in, tag it to compare with the
2912 // string length without using a temp register, it is restored at the end of
2913 // this function.
2914 SmiTag(index);
2915 Check(no_overflow, kIndexIsTooLarge);
2916
2917 cmp(index, FieldOperand(string, String::kLengthOffset));
2918 Check(less, kIndexIsTooLarge);
2919
2920 cmp(index, Immediate(Smi::FromInt(0)));
2921 Check(greater_equal, kIndexIsNegative);
2922
2923 // Restore the index
2924 SmiUntag(index);
2925 }
2926
2927
PrepareCallCFunction(int num_arguments,Register scratch)2928 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
2929 int frame_alignment = base::OS::ActivationFrameAlignment();
2930 if (frame_alignment != 0) {
2931 // Make stack end at alignment and make room for num_arguments words
2932 // and the original value of esp.
2933 mov(scratch, esp);
2934 sub(esp, Immediate((num_arguments + 1) * kPointerSize));
2935 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
2936 and_(esp, -frame_alignment);
2937 mov(Operand(esp, num_arguments * kPointerSize), scratch);
2938 } else {
2939 sub(esp, Immediate(num_arguments * kPointerSize));
2940 }
2941 }
2942
2943
CallCFunction(ExternalReference function,int num_arguments)2944 void MacroAssembler::CallCFunction(ExternalReference function,
2945 int num_arguments) {
2946 // Trashing eax is ok as it will be the return value.
2947 mov(eax, Immediate(function));
2948 CallCFunction(eax, num_arguments);
2949 }
2950
2951
CallCFunction(Register function,int num_arguments)2952 void MacroAssembler::CallCFunction(Register function,
2953 int num_arguments) {
2954 DCHECK(has_frame());
2955 // Check stack alignment.
2956 if (emit_debug_code()) {
2957 CheckStackAlignment();
2958 }
2959
2960 call(function);
2961 if (base::OS::ActivationFrameAlignment() != 0) {
2962 mov(esp, Operand(esp, num_arguments * kPointerSize));
2963 } else {
2964 add(esp, Immediate(num_arguments * kPointerSize));
2965 }
2966 }
2967
2968
2969 #ifdef DEBUG
AreAliased(Register reg1,Register reg2,Register reg3,Register reg4,Register reg5,Register reg6,Register reg7,Register reg8)2970 bool AreAliased(Register reg1,
2971 Register reg2,
2972 Register reg3,
2973 Register reg4,
2974 Register reg5,
2975 Register reg6,
2976 Register reg7,
2977 Register reg8) {
2978 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
2979 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
2980 reg7.is_valid() + reg8.is_valid();
2981
2982 RegList regs = 0;
2983 if (reg1.is_valid()) regs |= reg1.bit();
2984 if (reg2.is_valid()) regs |= reg2.bit();
2985 if (reg3.is_valid()) regs |= reg3.bit();
2986 if (reg4.is_valid()) regs |= reg4.bit();
2987 if (reg5.is_valid()) regs |= reg5.bit();
2988 if (reg6.is_valid()) regs |= reg6.bit();
2989 if (reg7.is_valid()) regs |= reg7.bit();
2990 if (reg8.is_valid()) regs |= reg8.bit();
2991 int n_of_non_aliasing_regs = NumRegs(regs);
2992
2993 return n_of_valid_regs != n_of_non_aliasing_regs;
2994 }
2995 #endif
2996
2997
CodePatcher(byte * address,int size)2998 CodePatcher::CodePatcher(byte* address, int size)
2999 : address_(address),
3000 size_(size),
3001 masm_(NULL, address, size + Assembler::kGap) {
3002 // Create a new macro assembler pointing to the address of the code to patch.
3003 // The size is adjusted with kGap on order for the assembler to generate size
3004 // bytes of instructions without failing with buffer size constraints.
3005 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3006 }
3007
3008
~CodePatcher()3009 CodePatcher::~CodePatcher() {
3010 // Indicate that code has changed.
3011 CpuFeatures::FlushICache(address_, size_);
3012
3013 // Check that the code was patched as expected.
3014 DCHECK(masm_.pc_ == address_ + size_);
3015 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3016 }
3017
3018
CheckPageFlag(Register object,Register scratch,int mask,Condition cc,Label * condition_met,Label::Distance condition_met_distance)3019 void MacroAssembler::CheckPageFlag(
3020 Register object,
3021 Register scratch,
3022 int mask,
3023 Condition cc,
3024 Label* condition_met,
3025 Label::Distance condition_met_distance) {
3026 DCHECK(cc == zero || cc == not_zero);
3027 if (scratch.is(object)) {
3028 and_(scratch, Immediate(~Page::kPageAlignmentMask));
3029 } else {
3030 mov(scratch, Immediate(~Page::kPageAlignmentMask));
3031 and_(scratch, object);
3032 }
3033 if (mask < (1 << kBitsPerByte)) {
3034 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
3035 static_cast<uint8_t>(mask));
3036 } else {
3037 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
3038 }
3039 j(cc, condition_met, condition_met_distance);
3040 }
3041
3042
CheckPageFlagForMap(Handle<Map> map,int mask,Condition cc,Label * condition_met,Label::Distance condition_met_distance)3043 void MacroAssembler::CheckPageFlagForMap(
3044 Handle<Map> map,
3045 int mask,
3046 Condition cc,
3047 Label* condition_met,
3048 Label::Distance condition_met_distance) {
3049 DCHECK(cc == zero || cc == not_zero);
3050 Page* page = Page::FromAddress(map->address());
3051 DCHECK(!serializer_enabled()); // Serializer cannot match page_flags.
3052 ExternalReference reference(ExternalReference::page_flags(page));
3053 // The inlined static address check of the page's flags relies
3054 // on maps never being compacted.
3055 DCHECK(!isolate()->heap()->mark_compact_collector()->
3056 IsOnEvacuationCandidate(*map));
3057 if (mask < (1 << kBitsPerByte)) {
3058 test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
3059 } else {
3060 test(Operand::StaticVariable(reference), Immediate(mask));
3061 }
3062 j(cc, condition_met, condition_met_distance);
3063 }
3064
3065
CheckMapDeprecated(Handle<Map> map,Register scratch,Label * if_deprecated)3066 void MacroAssembler::CheckMapDeprecated(Handle<Map> map,
3067 Register scratch,
3068 Label* if_deprecated) {
3069 if (map->CanBeDeprecated()) {
3070 mov(scratch, map);
3071 mov(scratch, FieldOperand(scratch, Map::kBitField3Offset));
3072 and_(scratch, Immediate(Map::Deprecated::kMask));
3073 j(not_zero, if_deprecated);
3074 }
3075 }
3076
3077
JumpIfBlack(Register object,Register scratch0,Register scratch1,Label * on_black,Label::Distance on_black_near)3078 void MacroAssembler::JumpIfBlack(Register object,
3079 Register scratch0,
3080 Register scratch1,
3081 Label* on_black,
3082 Label::Distance on_black_near) {
3083 HasColor(object, scratch0, scratch1,
3084 on_black, on_black_near,
3085 1, 0); // kBlackBitPattern.
3086 DCHECK(strcmp(Marking::kBlackBitPattern, "10") == 0);
3087 }
3088
3089
HasColor(Register object,Register bitmap_scratch,Register mask_scratch,Label * has_color,Label::Distance has_color_distance,int first_bit,int second_bit)3090 void MacroAssembler::HasColor(Register object,
3091 Register bitmap_scratch,
3092 Register mask_scratch,
3093 Label* has_color,
3094 Label::Distance has_color_distance,
3095 int first_bit,
3096 int second_bit) {
3097 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
3098
3099 GetMarkBits(object, bitmap_scratch, mask_scratch);
3100
3101 Label other_color, word_boundary;
3102 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3103 j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
3104 add(mask_scratch, mask_scratch); // Shift left 1 by adding.
3105 j(zero, &word_boundary, Label::kNear);
3106 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3107 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
3108 jmp(&other_color, Label::kNear);
3109
3110 bind(&word_boundary);
3111 test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
3112
3113 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
3114 bind(&other_color);
3115 }
3116
3117
GetMarkBits(Register addr_reg,Register bitmap_reg,Register mask_reg)3118 void MacroAssembler::GetMarkBits(Register addr_reg,
3119 Register bitmap_reg,
3120 Register mask_reg) {
3121 DCHECK(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
3122 mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
3123 and_(bitmap_reg, addr_reg);
3124 mov(ecx, addr_reg);
3125 int shift =
3126 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
3127 shr(ecx, shift);
3128 and_(ecx,
3129 (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
3130
3131 add(bitmap_reg, ecx);
3132 mov(ecx, addr_reg);
3133 shr(ecx, kPointerSizeLog2);
3134 and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
3135 mov(mask_reg, Immediate(1));
3136 shl_cl(mask_reg);
3137 }
3138
3139
EnsureNotWhite(Register value,Register bitmap_scratch,Register mask_scratch,Label * value_is_white_and_not_data,Label::Distance distance)3140 void MacroAssembler::EnsureNotWhite(
3141 Register value,
3142 Register bitmap_scratch,
3143 Register mask_scratch,
3144 Label* value_is_white_and_not_data,
3145 Label::Distance distance) {
3146 DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
3147 GetMarkBits(value, bitmap_scratch, mask_scratch);
3148
3149 // If the value is black or grey we don't need to do anything.
3150 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
3151 DCHECK(strcmp(Marking::kBlackBitPattern, "10") == 0);
3152 DCHECK(strcmp(Marking::kGreyBitPattern, "11") == 0);
3153 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
3154
3155 Label done;
3156
3157 // Since both black and grey have a 1 in the first position and white does
3158 // not have a 1 there we only need to check one bit.
3159 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3160 j(not_zero, &done, Label::kNear);
3161
3162 if (emit_debug_code()) {
3163 // Check for impossible bit pattern.
3164 Label ok;
3165 push(mask_scratch);
3166 // shl. May overflow making the check conservative.
3167 add(mask_scratch, mask_scratch);
3168 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3169 j(zero, &ok, Label::kNear);
3170 int3();
3171 bind(&ok);
3172 pop(mask_scratch);
3173 }
3174
3175 // Value is white. We check whether it is data that doesn't need scanning.
3176 // Currently only checks for HeapNumber and non-cons strings.
3177 Register map = ecx; // Holds map while checking type.
3178 Register length = ecx; // Holds length of object after checking type.
3179 Label not_heap_number;
3180 Label is_data_object;
3181
3182 // Check for heap-number
3183 mov(map, FieldOperand(value, HeapObject::kMapOffset));
3184 cmp(map, isolate()->factory()->heap_number_map());
3185 j(not_equal, ¬_heap_number, Label::kNear);
3186 mov(length, Immediate(HeapNumber::kSize));
3187 jmp(&is_data_object, Label::kNear);
3188
3189 bind(¬_heap_number);
3190 // Check for strings.
3191 DCHECK(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
3192 DCHECK(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
3193 // If it's a string and it's not a cons string then it's an object containing
3194 // no GC pointers.
3195 Register instance_type = ecx;
3196 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
3197 test_b(instance_type, kIsIndirectStringMask | kIsNotStringMask);
3198 j(not_zero, value_is_white_and_not_data);
3199 // It's a non-indirect (non-cons and non-slice) string.
3200 // If it's external, the length is just ExternalString::kSize.
3201 // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
3202 Label not_external;
3203 // External strings are the only ones with the kExternalStringTag bit
3204 // set.
3205 DCHECK_EQ(0, kSeqStringTag & kExternalStringTag);
3206 DCHECK_EQ(0, kConsStringTag & kExternalStringTag);
3207 test_b(instance_type, kExternalStringTag);
3208 j(zero, ¬_external, Label::kNear);
3209 mov(length, Immediate(ExternalString::kSize));
3210 jmp(&is_data_object, Label::kNear);
3211
3212 bind(¬_external);
3213 // Sequential string, either Latin1 or UC16.
3214 DCHECK(kOneByteStringTag == 0x04);
3215 and_(length, Immediate(kStringEncodingMask));
3216 xor_(length, Immediate(kStringEncodingMask));
3217 add(length, Immediate(0x04));
3218 // Value now either 4 (if Latin1) or 8 (if UC16), i.e., char-size shifted
3219 // by 2. If we multiply the string length as smi by this, it still
3220 // won't overflow a 32-bit value.
3221 DCHECK_EQ(SeqOneByteString::kMaxSize, SeqTwoByteString::kMaxSize);
3222 DCHECK(SeqOneByteString::kMaxSize <=
3223 static_cast<int>(0xffffffffu >> (2 + kSmiTagSize)));
3224 imul(length, FieldOperand(value, String::kLengthOffset));
3225 shr(length, 2 + kSmiTagSize + kSmiShiftSize);
3226 add(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
3227 and_(length, Immediate(~kObjectAlignmentMask));
3228
3229 bind(&is_data_object);
3230 // Value is a data object, and it is white. Mark it black. Since we know
3231 // that the object is white we can make it black by flipping one bit.
3232 or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
3233
3234 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
3235 add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
3236 length);
3237 if (emit_debug_code()) {
3238 mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
3239 cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
3240 Check(less_equal, kLiveBytesCountOverflowChunkSize);
3241 }
3242
3243 bind(&done);
3244 }
3245
3246
EnumLength(Register dst,Register map)3247 void MacroAssembler::EnumLength(Register dst, Register map) {
3248 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3249 mov(dst, FieldOperand(map, Map::kBitField3Offset));
3250 and_(dst, Immediate(Map::EnumLengthBits::kMask));
3251 SmiTag(dst);
3252 }
3253
3254
CheckEnumCache(Label * call_runtime)3255 void MacroAssembler::CheckEnumCache(Label* call_runtime) {
3256 Label next, start;
3257 mov(ecx, eax);
3258
3259 // Check if the enum length field is properly initialized, indicating that
3260 // there is an enum cache.
3261 mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
3262
3263 EnumLength(edx, ebx);
3264 cmp(edx, Immediate(Smi::FromInt(kInvalidEnumCacheSentinel)));
3265 j(equal, call_runtime);
3266
3267 jmp(&start);
3268
3269 bind(&next);
3270 mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
3271
3272 // For all objects but the receiver, check that the cache is empty.
3273 EnumLength(edx, ebx);
3274 cmp(edx, Immediate(Smi::FromInt(0)));
3275 j(not_equal, call_runtime);
3276
3277 bind(&start);
3278
3279 // Check that there are no elements. Register rcx contains the current JS
3280 // object we've reached through the prototype chain.
3281 Label no_elements;
3282 mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
3283 cmp(ecx, isolate()->factory()->empty_fixed_array());
3284 j(equal, &no_elements);
3285
3286 // Second chance, the object may be using the empty slow element dictionary.
3287 cmp(ecx, isolate()->factory()->empty_slow_element_dictionary());
3288 j(not_equal, call_runtime);
3289
3290 bind(&no_elements);
3291 mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3292 cmp(ecx, isolate()->factory()->null_value());
3293 j(not_equal, &next);
3294 }
3295
3296
TestJSArrayForAllocationMemento(Register receiver_reg,Register scratch_reg,Label * no_memento_found)3297 void MacroAssembler::TestJSArrayForAllocationMemento(
3298 Register receiver_reg,
3299 Register scratch_reg,
3300 Label* no_memento_found) {
3301 ExternalReference new_space_start =
3302 ExternalReference::new_space_start(isolate());
3303 ExternalReference new_space_allocation_top =
3304 ExternalReference::new_space_allocation_top_address(isolate());
3305
3306 lea(scratch_reg, Operand(receiver_reg,
3307 JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
3308 cmp(scratch_reg, Immediate(new_space_start));
3309 j(less, no_memento_found);
3310 cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
3311 j(greater, no_memento_found);
3312 cmp(MemOperand(scratch_reg, -AllocationMemento::kSize),
3313 Immediate(isolate()->factory()->allocation_memento_map()));
3314 }
3315
3316
JumpIfDictionaryInPrototypeChain(Register object,Register scratch0,Register scratch1,Label * found)3317 void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3318 Register object,
3319 Register scratch0,
3320 Register scratch1,
3321 Label* found) {
3322 DCHECK(!scratch1.is(scratch0));
3323 Factory* factory = isolate()->factory();
3324 Register current = scratch0;
3325 Label loop_again;
3326
3327 // scratch contained elements pointer.
3328 mov(current, object);
3329
3330 // Loop based on the map going up the prototype chain.
3331 bind(&loop_again);
3332 mov(current, FieldOperand(current, HeapObject::kMapOffset));
3333 mov(scratch1, FieldOperand(current, Map::kBitField2Offset));
3334 DecodeField<Map::ElementsKindBits>(scratch1);
3335 cmp(scratch1, Immediate(DICTIONARY_ELEMENTS));
3336 j(equal, found);
3337 mov(current, FieldOperand(current, Map::kPrototypeOffset));
3338 cmp(current, Immediate(factory->null_value()));
3339 j(not_equal, &loop_again);
3340 }
3341
3342
TruncatingDiv(Register dividend,int32_t divisor)3343 void MacroAssembler::TruncatingDiv(Register dividend, int32_t divisor) {
3344 DCHECK(!dividend.is(eax));
3345 DCHECK(!dividend.is(edx));
3346 base::MagicNumbersForDivision<uint32_t> mag =
3347 base::SignedDivisionByConstant(static_cast<uint32_t>(divisor));
3348 mov(eax, Immediate(mag.multiplier));
3349 imul(dividend);
3350 bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0;
3351 if (divisor > 0 && neg) add(edx, dividend);
3352 if (divisor < 0 && !neg && mag.multiplier > 0) sub(edx, dividend);
3353 if (mag.shift > 0) sar(edx, mag.shift);
3354 mov(eax, dividend);
3355 shr(eax, 31);
3356 add(edx, eax);
3357 }
3358
3359
3360 } } // namespace v8::internal
3361
3362 #endif // V8_TARGET_ARCH_X87
3363