1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_X87
6
7 #include "src/codegen.h"
8 #include "src/ic/ic.h"
9 #include "src/ic/ic-compiler.h"
10 #include "src/ic/stub-cache.h"
11
12 namespace v8 {
13 namespace internal {
14
15 // ----------------------------------------------------------------------------
16 // Static IC stub generators.
17 //
18
19 #define __ ACCESS_MASM(masm)
20
21 // Helper function used to load a property from a dictionary backing
22 // storage. This function may fail to load a property even though it is
23 // in the dictionary, so code at miss_label must always call a backup
24 // property load that is complete. This function is safe to call if
25 // name is not internalized, and will jump to the miss_label in that
26 // case. The generated code assumes that the receiver has slow
27 // properties, is not a global object and does not have interceptors.
GenerateDictionaryLoad(MacroAssembler * masm,Label * miss_label,Register elements,Register name,Register r0,Register r1,Register result)28 static void GenerateDictionaryLoad(MacroAssembler* masm, Label* miss_label,
29 Register elements, Register name,
30 Register r0, Register r1, Register result) {
31 // Register use:
32 //
33 // elements - holds the property dictionary on entry and is unchanged.
34 //
35 // name - holds the name of the property on entry and is unchanged.
36 //
37 // Scratch registers:
38 //
39 // r0 - used for the index into the property dictionary
40 //
41 // r1 - used to hold the capacity of the property dictionary.
42 //
43 // result - holds the result on exit.
44
45 Label done;
46
47 // Probe the dictionary.
48 NameDictionaryLookupStub::GeneratePositiveLookup(masm, miss_label, &done,
49 elements, name, r0, r1);
50
51 // If probing finds an entry in the dictionary, r0 contains the
52 // index into the dictionary. Check that the value is a normal
53 // property.
54 __ bind(&done);
55 const int kElementsStartOffset =
56 NameDictionary::kHeaderSize +
57 NameDictionary::kElementsStartIndex * kPointerSize;
58 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
59 __ test(Operand(elements, r0, times_4, kDetailsOffset - kHeapObjectTag),
60 Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
61 __ j(not_zero, miss_label);
62
63 // Get the value at the masked, scaled index.
64 const int kValueOffset = kElementsStartOffset + kPointerSize;
65 __ mov(result, Operand(elements, r0, times_4, kValueOffset - kHeapObjectTag));
66 }
67
68
69 // Helper function used to store a property to a dictionary backing
70 // storage. This function may fail to store a property eventhough it
71 // is in the dictionary, so code at miss_label must always call a
72 // backup property store that is complete. This function is safe to
73 // call if name is not internalized, and will jump to the miss_label in
74 // that case. The generated code assumes that the receiver has slow
75 // properties, is not a global object and does not have interceptors.
GenerateDictionaryStore(MacroAssembler * masm,Label * miss_label,Register elements,Register name,Register value,Register r0,Register r1)76 static void GenerateDictionaryStore(MacroAssembler* masm, Label* miss_label,
77 Register elements, Register name,
78 Register value, Register r0, Register r1) {
79 // Register use:
80 //
81 // elements - holds the property dictionary on entry and is clobbered.
82 //
83 // name - holds the name of the property on entry and is unchanged.
84 //
85 // value - holds the value to store and is unchanged.
86 //
87 // r0 - used for index into the property dictionary and is clobbered.
88 //
89 // r1 - used to hold the capacity of the property dictionary and is clobbered.
90 Label done;
91
92
93 // Probe the dictionary.
94 NameDictionaryLookupStub::GeneratePositiveLookup(masm, miss_label, &done,
95 elements, name, r0, r1);
96
97 // If probing finds an entry in the dictionary, r0 contains the
98 // index into the dictionary. Check that the value is a normal
99 // property that is not read only.
100 __ bind(&done);
101 const int kElementsStartOffset =
102 NameDictionary::kHeaderSize +
103 NameDictionary::kElementsStartIndex * kPointerSize;
104 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
105 const int kTypeAndReadOnlyMask =
106 (PropertyDetails::TypeField::kMask |
107 PropertyDetails::AttributesField::encode(READ_ONLY))
108 << kSmiTagSize;
109 __ test(Operand(elements, r0, times_4, kDetailsOffset - kHeapObjectTag),
110 Immediate(kTypeAndReadOnlyMask));
111 __ j(not_zero, miss_label);
112
113 // Store the value at the masked, scaled index.
114 const int kValueOffset = kElementsStartOffset + kPointerSize;
115 __ lea(r0, Operand(elements, r0, times_4, kValueOffset - kHeapObjectTag));
116 __ mov(Operand(r0, 0), value);
117
118 // Update write barrier. Make sure not to clobber the value.
119 __ mov(r1, value);
120 __ RecordWrite(elements, r0, r1, kDontSaveFPRegs);
121 }
122
KeyedStoreGenerateMegamorphicHelper(MacroAssembler * masm,Label * fast_object,Label * fast_double,Label * slow,KeyedStoreCheckMap check_map,KeyedStoreIncrementLength increment_length)123 static void KeyedStoreGenerateMegamorphicHelper(
124 MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow,
125 KeyedStoreCheckMap check_map, KeyedStoreIncrementLength increment_length) {
126 Label transition_smi_elements;
127 Label finish_object_store, non_double_value, transition_double_elements;
128 Label fast_double_without_map_check;
129 Register receiver = StoreDescriptor::ReceiverRegister();
130 Register key = StoreDescriptor::NameRegister();
131 Register value = StoreDescriptor::ValueRegister();
132 DCHECK(receiver.is(edx));
133 DCHECK(key.is(ecx));
134 DCHECK(value.is(eax));
135 // key is a smi.
136 // ebx: FixedArray receiver->elements
137 // edi: receiver map
138 // Fast case: Do the store, could either Object or double.
139 __ bind(fast_object);
140 if (check_map == kCheckMap) {
141 __ mov(edi, FieldOperand(ebx, HeapObject::kMapOffset));
142 __ cmp(edi, masm->isolate()->factory()->fixed_array_map());
143 __ j(not_equal, fast_double);
144 }
145
146 // HOLECHECK: guards "A[i] = V"
147 // We have to go to the runtime if the current value is the hole because
148 // there may be a callback on the element
149 Label holecheck_passed1;
150 __ cmp(FixedArrayElementOperand(ebx, key),
151 masm->isolate()->factory()->the_hole_value());
152 __ j(not_equal, &holecheck_passed1);
153 __ JumpIfDictionaryInPrototypeChain(receiver, ebx, edi, slow);
154 __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
155
156 __ bind(&holecheck_passed1);
157
158 // Smi stores don't require further checks.
159 Label non_smi_value;
160 __ JumpIfNotSmi(value, &non_smi_value);
161 if (increment_length == kIncrementLength) {
162 // Add 1 to receiver->length.
163 __ add(FieldOperand(receiver, JSArray::kLengthOffset),
164 Immediate(Smi::FromInt(1)));
165 }
166 // It's irrelevant whether array is smi-only or not when writing a smi.
167 __ mov(FixedArrayElementOperand(ebx, key), value);
168 __ ret(StoreWithVectorDescriptor::kStackArgumentsCount * kPointerSize);
169
170 __ bind(&non_smi_value);
171 // Escape to elements kind transition case.
172 __ mov(edi, FieldOperand(receiver, HeapObject::kMapOffset));
173 __ CheckFastObjectElements(edi, &transition_smi_elements);
174
175 // Fast elements array, store the value to the elements backing store.
176 __ bind(&finish_object_store);
177 if (increment_length == kIncrementLength) {
178 // Add 1 to receiver->length.
179 __ add(FieldOperand(receiver, JSArray::kLengthOffset),
180 Immediate(Smi::FromInt(1)));
181 }
182 __ mov(FixedArrayElementOperand(ebx, key), value);
183 // Update write barrier for the elements array address.
184 __ mov(edx, value); // Preserve the value which is returned.
185 __ RecordWriteArray(ebx, edx, key, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
186 OMIT_SMI_CHECK);
187 __ ret(StoreWithVectorDescriptor::kStackArgumentsCount * kPointerSize);
188
189 __ bind(fast_double);
190 if (check_map == kCheckMap) {
191 // Check for fast double array case. If this fails, call through to the
192 // runtime.
193 __ cmp(edi, masm->isolate()->factory()->fixed_double_array_map());
194 __ j(not_equal, slow);
195 // If the value is a number, store it as a double in the FastDoubleElements
196 // array.
197 }
198
199 // HOLECHECK: guards "A[i] double hole?"
200 // We have to see if the double version of the hole is present. If so
201 // go to the runtime.
202 uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
203 __ cmp(FieldOperand(ebx, key, times_4, offset), Immediate(kHoleNanUpper32));
204 __ j(not_equal, &fast_double_without_map_check);
205 __ JumpIfDictionaryInPrototypeChain(receiver, ebx, edi, slow);
206 __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
207
208 __ bind(&fast_double_without_map_check);
209 __ StoreNumberToDoubleElements(value, ebx, key, edi,
210 &transition_double_elements, false);
211 if (increment_length == kIncrementLength) {
212 // Add 1 to receiver->length.
213 __ add(FieldOperand(receiver, JSArray::kLengthOffset),
214 Immediate(Smi::FromInt(1)));
215 }
216 __ ret(StoreWithVectorDescriptor::kStackArgumentsCount * kPointerSize);
217
218 __ bind(&transition_smi_elements);
219 __ mov(ebx, FieldOperand(receiver, HeapObject::kMapOffset));
220
221 // Transition the array appropriately depending on the value type.
222 __ CheckMap(value, masm->isolate()->factory()->heap_number_map(),
223 &non_double_value, DONT_DO_SMI_CHECK);
224
225 // Value is a double. Transition FAST_SMI_ELEMENTS -> FAST_DOUBLE_ELEMENTS
226 // and complete the store.
227 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
228 FAST_DOUBLE_ELEMENTS, ebx, edi, slow);
229 AllocationSiteMode mode =
230 AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_DOUBLE_ELEMENTS);
231 ElementsTransitionGenerator::GenerateSmiToDouble(masm, receiver, key, value,
232 ebx, mode, slow);
233 __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
234 __ jmp(&fast_double_without_map_check);
235
236 __ bind(&non_double_value);
237 // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
238 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, FAST_ELEMENTS, ebx,
239 edi, slow);
240 mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS);
241 ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
242 masm, receiver, key, value, ebx, mode, slow);
243 __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
244 __ jmp(&finish_object_store);
245
246 __ bind(&transition_double_elements);
247 // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
248 // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
249 // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
250 __ mov(ebx, FieldOperand(receiver, HeapObject::kMapOffset));
251 __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS,
252 ebx, edi, slow);
253 mode = AllocationSite::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS);
254 ElementsTransitionGenerator::GenerateDoubleToObject(masm, receiver, key,
255 value, ebx, mode, slow);
256 __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
257 __ jmp(&finish_object_store);
258 }
259
260
GenerateMegamorphic(MacroAssembler * masm,LanguageMode language_mode)261 void KeyedStoreIC::GenerateMegamorphic(MacroAssembler* masm,
262 LanguageMode language_mode) {
263 typedef StoreWithVectorDescriptor Descriptor;
264 // Return address is on the stack.
265 Label slow, fast_object, fast_object_grow;
266 Label fast_double, fast_double_grow;
267 Label array, extra, check_if_double_array, maybe_name_key, miss;
268 Register receiver = Descriptor::ReceiverRegister();
269 Register key = Descriptor::NameRegister();
270 DCHECK(receiver.is(edx));
271 DCHECK(key.is(ecx));
272
273 // Check that the object isn't a smi.
274 __ JumpIfSmi(receiver, &slow);
275 // Get the map from the receiver.
276 __ mov(edi, FieldOperand(receiver, HeapObject::kMapOffset));
277 // Check that the receiver does not require access checks.
278 // The generic stub does not perform map checks.
279 __ test_b(FieldOperand(edi, Map::kBitFieldOffset),
280 Immediate(1 << Map::kIsAccessCheckNeeded));
281 __ j(not_zero, &slow);
282
283 __ LoadParameterFromStack<Descriptor>(Descriptor::ValueRegister(),
284 Descriptor::kValue);
285
286 // Check that the key is a smi.
287 __ JumpIfNotSmi(key, &maybe_name_key);
288 __ CmpInstanceType(edi, JS_ARRAY_TYPE);
289 __ j(equal, &array);
290 // Check that the object is some kind of JS object EXCEPT JS Value type. In
291 // the case that the object is a value-wrapper object, we enter the runtime
292 // system to make sure that indexing into string objects works as intended.
293 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
294 __ CmpInstanceType(edi, JS_OBJECT_TYPE);
295 __ j(below, &slow);
296
297 // Object case: Check key against length in the elements array.
298 // Key is a smi.
299 // edi: receiver map
300 __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
301 // Check array bounds. Both the key and the length of FixedArray are smis.
302 __ cmp(key, FieldOperand(ebx, FixedArray::kLengthOffset));
303 __ j(below, &fast_object);
304
305 // Slow case: call runtime.
306 __ bind(&slow);
307 PropertyICCompiler::GenerateRuntimeSetProperty(masm, language_mode);
308 // Never returns to here.
309
310 __ bind(&maybe_name_key);
311 __ mov(ebx, FieldOperand(key, HeapObject::kMapOffset));
312 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
313 __ JumpIfNotUniqueNameInstanceType(ebx, &slow);
314
315 masm->isolate()->store_stub_cache()->GenerateProbe(masm, receiver, key, edi,
316 no_reg);
317
318 // Cache miss.
319 __ jmp(&miss);
320
321 // Extra capacity case: Check if there is extra capacity to
322 // perform the store and update the length. Used for adding one
323 // element to the array by writing to array[array.length].
324 __ bind(&extra);
325 // receiver is a JSArray.
326 // key is a smi.
327 // ebx: receiver->elements, a FixedArray
328 // edi: receiver map
329 // flags: compare (key, receiver.length())
330 // do not leave holes in the array:
331 __ j(not_equal, &slow);
332 __ cmp(key, FieldOperand(ebx, FixedArray::kLengthOffset));
333 __ j(above_equal, &slow);
334 __ mov(edi, FieldOperand(ebx, HeapObject::kMapOffset));
335 __ cmp(edi, masm->isolate()->factory()->fixed_array_map());
336 __ j(not_equal, &check_if_double_array);
337 __ jmp(&fast_object_grow);
338
339 __ bind(&check_if_double_array);
340 __ cmp(edi, masm->isolate()->factory()->fixed_double_array_map());
341 __ j(not_equal, &slow);
342 __ jmp(&fast_double_grow);
343
344 // Array case: Get the length and the elements array from the JS
345 // array. Check that the array is in fast mode (and writable); if it
346 // is the length is always a smi.
347 __ bind(&array);
348 // receiver is a JSArray.
349 // key is a smi.
350 // edi: receiver map
351 __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
352
353 // Check the key against the length in the array and fall through to the
354 // common store code.
355 __ cmp(key, FieldOperand(receiver, JSArray::kLengthOffset)); // Compare smis.
356 __ j(above_equal, &extra);
357
358 KeyedStoreGenerateMegamorphicHelper(masm, &fast_object, &fast_double, &slow,
359 kCheckMap, kDontIncrementLength);
360 KeyedStoreGenerateMegamorphicHelper(masm, &fast_object_grow,
361 &fast_double_grow, &slow, kDontCheckMap,
362 kIncrementLength);
363
364 __ bind(&miss);
365 GenerateMiss(masm);
366 }
367
GenerateNormal(MacroAssembler * masm)368 void LoadIC::GenerateNormal(MacroAssembler* masm) {
369 Register dictionary = eax;
370 DCHECK(!dictionary.is(LoadDescriptor::ReceiverRegister()));
371 DCHECK(!dictionary.is(LoadDescriptor::NameRegister()));
372
373 Label slow;
374
375 __ mov(dictionary, FieldOperand(LoadDescriptor::ReceiverRegister(),
376 JSObject::kPropertiesOffset));
377 GenerateDictionaryLoad(masm, &slow, dictionary,
378 LoadDescriptor::NameRegister(), edi, ebx, eax);
379 __ ret(0);
380
381 // Dictionary load failed, go slow (but don't miss).
382 __ bind(&slow);
383 GenerateRuntimeGetProperty(masm);
384 }
385
386
LoadIC_PushArgs(MacroAssembler * masm)387 static void LoadIC_PushArgs(MacroAssembler* masm) {
388 Register receiver = LoadDescriptor::ReceiverRegister();
389 Register name = LoadDescriptor::NameRegister();
390
391 Register slot = LoadDescriptor::SlotRegister();
392 Register vector = LoadWithVectorDescriptor::VectorRegister();
393 DCHECK(!edi.is(receiver) && !edi.is(name) && !edi.is(slot) &&
394 !edi.is(vector));
395
396 __ pop(edi);
397 __ push(receiver);
398 __ push(name);
399 __ push(slot);
400 __ push(vector);
401 __ push(edi);
402 }
403
404
GenerateMiss(MacroAssembler * masm)405 void LoadIC::GenerateMiss(MacroAssembler* masm) {
406 // Return address is on the stack.
407 __ IncrementCounter(masm->isolate()->counters()->ic_load_miss(), 1);
408 LoadIC_PushArgs(masm);
409
410 // Perform tail call to the entry.
411 __ TailCallRuntime(Runtime::kLoadIC_Miss);
412 }
413
GenerateRuntimeGetProperty(MacroAssembler * masm)414 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
415 // Return address is on the stack.
416 Register receiver = LoadDescriptor::ReceiverRegister();
417 Register name = LoadDescriptor::NameRegister();
418 DCHECK(!ebx.is(receiver) && !ebx.is(name));
419
420 __ pop(ebx);
421 __ push(receiver);
422 __ push(name);
423 __ push(ebx);
424
425 // Do tail-call to runtime routine.
426 __ TailCallRuntime(Runtime::kGetProperty);
427 }
428
429
GenerateMiss(MacroAssembler * masm)430 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
431 // Return address is on the stack.
432 __ IncrementCounter(masm->isolate()->counters()->ic_keyed_load_miss(), 1);
433
434 LoadIC_PushArgs(masm);
435
436 // Perform tail call to the entry.
437 __ TailCallRuntime(Runtime::kKeyedLoadIC_Miss);
438 }
439
GenerateRuntimeGetProperty(MacroAssembler * masm)440 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
441 // Return address is on the stack.
442 Register receiver = LoadDescriptor::ReceiverRegister();
443 Register name = LoadDescriptor::NameRegister();
444 DCHECK(!ebx.is(receiver) && !ebx.is(name));
445
446 __ pop(ebx);
447 __ push(receiver);
448 __ push(name);
449 __ push(ebx);
450
451 // Do tail-call to runtime routine.
452 __ TailCallRuntime(Runtime::kKeyedGetProperty);
453 }
454
StoreIC_PushArgs(MacroAssembler * masm)455 static void StoreIC_PushArgs(MacroAssembler* masm) {
456 Register receiver = StoreWithVectorDescriptor::ReceiverRegister();
457 Register name = StoreWithVectorDescriptor::NameRegister();
458
459 STATIC_ASSERT(StoreWithVectorDescriptor::kStackArgumentsCount == 3);
460 // Current stack layout:
461 // - esp[12] -- value
462 // - esp[8] -- slot
463 // - esp[4] -- vector
464 // - esp[0] -- return address
465
466 Register return_address = StoreWithVectorDescriptor::SlotRegister();
467 __ pop(return_address);
468 __ push(receiver);
469 __ push(name);
470 __ push(return_address);
471 }
472
473
GenerateMiss(MacroAssembler * masm)474 void StoreIC::GenerateMiss(MacroAssembler* masm) {
475 // Return address is on the stack.
476 StoreIC_PushArgs(masm);
477
478 // Perform tail call to the entry.
479 __ TailCallRuntime(Runtime::kStoreIC_Miss);
480 }
481
482
GenerateNormal(MacroAssembler * masm)483 void StoreIC::GenerateNormal(MacroAssembler* masm) {
484 typedef StoreWithVectorDescriptor Descriptor;
485 Label restore_miss;
486 Register receiver = Descriptor::ReceiverRegister();
487 Register name = Descriptor::NameRegister();
488 Register value = Descriptor::ValueRegister();
489 // Since the slot and vector values are passed on the stack we can use
490 // respective registers as scratch registers.
491 Register scratch1 = Descriptor::VectorRegister();
492 Register scratch2 = Descriptor::SlotRegister();
493
494 __ LoadParameterFromStack<Descriptor>(value, Descriptor::kValue);
495
496 // A lot of registers are needed for storing to slow case objects.
497 // Push and restore receiver but rely on GenerateDictionaryStore preserving
498 // the value and name.
499 __ push(receiver);
500
501 Register dictionary = receiver;
502 __ mov(dictionary, FieldOperand(receiver, JSObject::kPropertiesOffset));
503 GenerateDictionaryStore(masm, &restore_miss, dictionary, name, value,
504 scratch1, scratch2);
505 __ Drop(1);
506 Counters* counters = masm->isolate()->counters();
507 __ IncrementCounter(counters->ic_store_normal_hit(), 1);
508 __ ret(Descriptor::kStackArgumentsCount * kPointerSize);
509
510 __ bind(&restore_miss);
511 __ pop(receiver);
512 __ IncrementCounter(counters->ic_store_normal_miss(), 1);
513 GenerateMiss(masm);
514 }
515
516
GenerateMiss(MacroAssembler * masm)517 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
518 // Return address is on the stack.
519 StoreIC_PushArgs(masm);
520
521 // Do tail-call to runtime routine.
522 __ TailCallRuntime(Runtime::kKeyedStoreIC_Miss);
523 }
524
GenerateSlow(MacroAssembler * masm)525 void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
526 // Return address is on the stack.
527 StoreIC_PushArgs(masm);
528
529 // Do tail-call to runtime routine.
530 __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
531 }
532
533 #undef __
534
535
ComputeCondition(Token::Value op)536 Condition CompareIC::ComputeCondition(Token::Value op) {
537 switch (op) {
538 case Token::EQ_STRICT:
539 case Token::EQ:
540 return equal;
541 case Token::LT:
542 return less;
543 case Token::GT:
544 return greater;
545 case Token::LTE:
546 return less_equal;
547 case Token::GTE:
548 return greater_equal;
549 default:
550 UNREACHABLE();
551 return no_condition;
552 }
553 }
554
555
HasInlinedSmiCode(Address address)556 bool CompareIC::HasInlinedSmiCode(Address address) {
557 // The address of the instruction following the call.
558 Address test_instruction_address =
559 address + Assembler::kCallTargetAddressOffset;
560
561 // If the instruction following the call is not a test al, nothing
562 // was inlined.
563 return *test_instruction_address == Assembler::kTestAlByte;
564 }
565
566
PatchInlinedSmiCode(Isolate * isolate,Address address,InlinedSmiCheck check)567 void PatchInlinedSmiCode(Isolate* isolate, Address address,
568 InlinedSmiCheck check) {
569 // The address of the instruction following the call.
570 Address test_instruction_address =
571 address + Assembler::kCallTargetAddressOffset;
572
573 // If the instruction following the call is not a test al, nothing
574 // was inlined.
575 if (*test_instruction_address != Assembler::kTestAlByte) {
576 DCHECK(*test_instruction_address == Assembler::kNopByte);
577 return;
578 }
579
580 Address delta_address = test_instruction_address + 1;
581 // The delta to the start of the map check instruction and the
582 // condition code uses at the patched jump.
583 uint8_t delta = *reinterpret_cast<uint8_t*>(delta_address);
584 if (FLAG_trace_ic) {
585 PrintF("[ patching ic at %p, test=%p, delta=%d\n",
586 static_cast<void*>(address),
587 static_cast<void*>(test_instruction_address), delta);
588 }
589
590 // Patch with a short conditional jump. Enabling means switching from a short
591 // jump-if-carry/not-carry to jump-if-zero/not-zero, whereas disabling is the
592 // reverse operation of that.
593 Address jmp_address = test_instruction_address - delta;
594 DCHECK((check == ENABLE_INLINED_SMI_CHECK)
595 ? (*jmp_address == Assembler::kJncShortOpcode ||
596 *jmp_address == Assembler::kJcShortOpcode)
597 : (*jmp_address == Assembler::kJnzShortOpcode ||
598 *jmp_address == Assembler::kJzShortOpcode));
599 Condition cc =
600 (check == ENABLE_INLINED_SMI_CHECK)
601 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero)
602 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry);
603 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
604 }
605 } // namespace internal
606 } // namespace v8
607
608 #endif // V8_TARGET_ARCH_X87
609