• Home
  • History
  • Annotate
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_X64
6 
7 #include "src/codegen.h"
8 #include "src/ic/ic.h"
9 #include "src/ic/ic-compiler.h"
10 #include "src/ic/stub-cache.h"
11 
12 namespace v8 {
13 namespace internal {
14 
15 // ----------------------------------------------------------------------------
16 // Static IC stub generators.
17 //
18 
19 #define __ ACCESS_MASM(masm)
20 
21 
GenerateGlobalInstanceTypeCheck(MacroAssembler * masm,Register type,Label * global_object)22 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm, Register type,
23                                             Label* global_object) {
24   // Register usage:
25   //   type: holds the receiver instance type on entry.
26   __ cmpb(type, Immediate(JS_GLOBAL_OBJECT_TYPE));
27   __ j(equal, global_object);
28   __ cmpb(type, Immediate(JS_GLOBAL_PROXY_TYPE));
29   __ j(equal, global_object);
30 }
31 
32 
33 // Helper function used to load a property from a dictionary backing storage.
34 // This function may return false negatives, so miss_label
35 // must always call a backup property load that is complete.
36 // This function is safe to call if name is not an internalized string,
37 // and will jump to the miss_label in that case.
38 // The generated code assumes that the receiver has slow properties,
39 // is not a global object and does not have interceptors.
GenerateDictionaryLoad(MacroAssembler * masm,Label * miss_label,Register elements,Register name,Register r0,Register r1,Register result)40 static void GenerateDictionaryLoad(MacroAssembler* masm, Label* miss_label,
41                                    Register elements, Register name,
42                                    Register r0, Register r1, Register result) {
43   // Register use:
44   //
45   // elements - holds the property dictionary on entry and is unchanged.
46   //
47   // name - holds the name of the property on entry and is unchanged.
48   //
49   // r0   - used to hold the capacity of the property dictionary.
50   //
51   // r1   - used to hold the index into the property dictionary.
52   //
53   // result - holds the result on exit if the load succeeded.
54 
55   Label done;
56 
57   // Probe the dictionary.
58   NameDictionaryLookupStub::GeneratePositiveLookup(masm, miss_label, &done,
59                                                    elements, name, r0, r1);
60 
61   // If probing finds an entry in the dictionary, r1 contains the
62   // index into the dictionary. Check that the value is a normal
63   // property.
64   __ bind(&done);
65   const int kElementsStartOffset =
66       NameDictionary::kHeaderSize +
67       NameDictionary::kElementsStartIndex * kPointerSize;
68   const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
69   __ Test(Operand(elements, r1, times_pointer_size,
70                   kDetailsOffset - kHeapObjectTag),
71           Smi::FromInt(PropertyDetails::TypeField::kMask));
72   __ j(not_zero, miss_label);
73 
74   // Get the value at the masked, scaled index.
75   const int kValueOffset = kElementsStartOffset + kPointerSize;
76   __ movp(result, Operand(elements, r1, times_pointer_size,
77                           kValueOffset - kHeapObjectTag));
78 }
79 
80 
81 // Helper function used to store a property to a dictionary backing
82 // storage. This function may fail to store a property even though it
83 // is in the dictionary, so code at miss_label must always call a
84 // backup property store that is complete. This function is safe to
85 // call if name is not an internalized string, and will jump to the miss_label
86 // in that case. The generated code assumes that the receiver has slow
87 // properties, is not a global object and does not have interceptors.
GenerateDictionaryStore(MacroAssembler * masm,Label * miss_label,Register elements,Register name,Register value,Register scratch0,Register scratch1)88 static void GenerateDictionaryStore(MacroAssembler* masm, Label* miss_label,
89                                     Register elements, Register name,
90                                     Register value, Register scratch0,
91                                     Register scratch1) {
92   // Register use:
93   //
94   // elements - holds the property dictionary on entry and is clobbered.
95   //
96   // name - holds the name of the property on entry and is unchanged.
97   //
98   // value - holds the value to store and is unchanged.
99   //
100   // scratch0 - used during the positive dictionary lookup and is clobbered.
101   //
102   // scratch1 - used for index into the property dictionary and is clobbered.
103   Label done;
104 
105   // Probe the dictionary.
106   NameDictionaryLookupStub::GeneratePositiveLookup(
107       masm, miss_label, &done, elements, name, scratch0, scratch1);
108 
109   // If probing finds an entry in the dictionary, scratch0 contains the
110   // index into the dictionary. Check that the value is a normal
111   // property that is not read only.
112   __ bind(&done);
113   const int kElementsStartOffset =
114       NameDictionary::kHeaderSize +
115       NameDictionary::kElementsStartIndex * kPointerSize;
116   const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
117   const int kTypeAndReadOnlyMask =
118       PropertyDetails::TypeField::kMask |
119       PropertyDetails::AttributesField::encode(READ_ONLY);
120   __ Test(Operand(elements, scratch1, times_pointer_size,
121                   kDetailsOffset - kHeapObjectTag),
122           Smi::FromInt(kTypeAndReadOnlyMask));
123   __ j(not_zero, miss_label);
124 
125   // Store the value at the masked, scaled index.
126   const int kValueOffset = kElementsStartOffset + kPointerSize;
127   __ leap(scratch1, Operand(elements, scratch1, times_pointer_size,
128                             kValueOffset - kHeapObjectTag));
129   __ movp(Operand(scratch1, 0), value);
130 
131   // Update write barrier. Make sure not to clobber the value.
132   __ movp(scratch0, value);
133   __ RecordWrite(elements, scratch1, scratch0, kDontSaveFPRegs);
134 }
135 
136 
137 // Checks the receiver for special cases (value type, slow case bits).
138 // Falls through for regular JS object.
GenerateKeyedLoadReceiverCheck(MacroAssembler * masm,Register receiver,Register map,int interceptor_bit,Label * slow)139 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
140                                            Register receiver, Register map,
141                                            int interceptor_bit, Label* slow) {
142   // Register use:
143   //   receiver - holds the receiver and is unchanged.
144   // Scratch registers:
145   //   map - used to hold the map of the receiver.
146 
147   // Check that the object isn't a smi.
148   __ JumpIfSmi(receiver, slow);
149 
150   // Check that the object is some kind of JS object EXCEPT JS Value type.
151   // In the case that the object is a value-wrapper object,
152   // we enter the runtime system to make sure that indexing
153   // into string objects work as intended.
154   DCHECK(JS_OBJECT_TYPE > JS_VALUE_TYPE);
155   __ CmpObjectType(receiver, JS_OBJECT_TYPE, map);
156   __ j(below, slow);
157 
158   // Check bit field.
159   __ testb(
160       FieldOperand(map, Map::kBitFieldOffset),
161       Immediate((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit)));
162   __ j(not_zero, slow);
163 }
164 
165 
166 // Loads an indexed element from a fast case array.
GenerateFastArrayLoad(MacroAssembler * masm,Register receiver,Register key,Register elements,Register scratch,Register result,Label * slow,LanguageMode language_mode)167 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver,
168                                   Register key, Register elements,
169                                   Register scratch, Register result,
170                                   Label* slow, LanguageMode language_mode) {
171   // Register use:
172   //
173   // receiver - holds the receiver on entry.
174   //            Unchanged unless 'result' is the same register.
175   //
176   // key      - holds the smi key on entry.
177   //            Unchanged unless 'result' is the same register.
178   //
179   // result   - holds the result on exit if the load succeeded.
180   //            Allowed to be the the same as 'receiver' or 'key'.
181   //            Unchanged on bailout so 'receiver' and 'key' can be safely
182   //            used by further computation.
183   //
184   // Scratch registers:
185   //
186   // elements - holds the elements of the receiver and its prototypes.
187   //
188   // scratch  - used to hold maps, prototypes, and the loaded value.
189   Label check_prototypes, check_next_prototype;
190   Label done, in_bounds, absent;
191 
192   __ movp(elements, FieldOperand(receiver, JSObject::kElementsOffset));
193   __ AssertFastElements(elements);
194   // Check that the key (index) is within bounds.
195   __ SmiCompare(key, FieldOperand(elements, FixedArray::kLengthOffset));
196   // Unsigned comparison rejects negative indices.
197   __ j(below, &in_bounds);
198 
199   // Out-of-bounds. Check the prototype chain to see if we can just return
200   // 'undefined'.
201   __ SmiCompare(key, Smi::FromInt(0));
202   __ j(less, slow);  // Negative keys can't take the fast OOB path.
203   __ bind(&check_prototypes);
204   __ movp(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
205   __ bind(&check_next_prototype);
206   __ movp(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
207   // scratch: current prototype
208   __ CompareRoot(scratch, Heap::kNullValueRootIndex);
209   __ j(equal, &absent);
210   __ movp(elements, FieldOperand(scratch, JSObject::kElementsOffset));
211   __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
212   // elements: elements of current prototype
213   // scratch: map of current prototype
214   __ CmpInstanceType(scratch, JS_OBJECT_TYPE);
215   __ j(below, slow);
216   __ testb(FieldOperand(scratch, Map::kBitFieldOffset),
217            Immediate((1 << Map::kIsAccessCheckNeeded) |
218                      (1 << Map::kHasIndexedInterceptor)));
219   __ j(not_zero, slow);
220   __ CompareRoot(elements, Heap::kEmptyFixedArrayRootIndex);
221   __ j(not_equal, slow);
222   __ jmp(&check_next_prototype);
223 
224   __ bind(&absent);
225   if (is_strong(language_mode)) {
226     // Strong mode accesses must throw in this case, so call the runtime.
227     __ jmp(slow);
228   } else {
229     __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
230     __ jmp(&done);
231   }
232 
233   __ bind(&in_bounds);
234   // Fast case: Do the load.
235   SmiIndex index = masm->SmiToIndex(scratch, key, kPointerSizeLog2);
236   __ movp(scratch, FieldOperand(elements, index.reg, index.scale,
237                                 FixedArray::kHeaderSize));
238   __ CompareRoot(scratch, Heap::kTheHoleValueRootIndex);
239   // In case the loaded value is the_hole we have to check the prototype chain.
240   __ j(equal, &check_prototypes);
241   __ Move(result, scratch);
242   __ bind(&done);
243 }
244 
245 
246 // Checks whether a key is an array index string or a unique name.
247 // Falls through if the key is a unique name.
GenerateKeyNameCheck(MacroAssembler * masm,Register key,Register map,Register hash,Label * index_string,Label * not_unique)248 static void GenerateKeyNameCheck(MacroAssembler* masm, Register key,
249                                  Register map, Register hash,
250                                  Label* index_string, Label* not_unique) {
251   // Register use:
252   //   key - holds the key and is unchanged. Assumed to be non-smi.
253   // Scratch registers:
254   //   map - used to hold the map of the key.
255   //   hash - used to hold the hash of the key.
256   Label unique;
257   __ CmpObjectType(key, LAST_UNIQUE_NAME_TYPE, map);
258   __ j(above, not_unique);
259   STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE);
260   __ j(equal, &unique);
261 
262   // Is the string an array index, with cached numeric value?
263   __ movl(hash, FieldOperand(key, Name::kHashFieldOffset));
264   __ testl(hash, Immediate(Name::kContainsCachedArrayIndexMask));
265   __ j(zero, index_string);  // The value in hash is used at jump target.
266 
267   // Is the string internalized? We already know it's a string so a single
268   // bit test is enough.
269   STATIC_ASSERT(kNotInternalizedTag != 0);
270   __ testb(FieldOperand(map, Map::kInstanceTypeOffset),
271            Immediate(kIsNotInternalizedMask));
272   __ j(not_zero, not_unique);
273 
274   __ bind(&unique);
275 }
276 
277 
GenerateMegamorphic(MacroAssembler * masm,LanguageMode language_mode)278 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm,
279                                       LanguageMode language_mode) {
280   // The return address is on the stack.
281   Label slow, check_name, index_smi, index_name, property_array_property;
282   Label probe_dictionary, check_number_dictionary;
283 
284   Register receiver = LoadDescriptor::ReceiverRegister();
285   Register key = LoadDescriptor::NameRegister();
286   DCHECK(receiver.is(rdx));
287   DCHECK(key.is(rcx));
288 
289   // Check that the key is a smi.
290   __ JumpIfNotSmi(key, &check_name);
291   __ bind(&index_smi);
292   // Now the key is known to be a smi. This place is also jumped to from below
293   // where a numeric string is converted to a smi.
294 
295   GenerateKeyedLoadReceiverCheck(masm, receiver, rax,
296                                  Map::kHasIndexedInterceptor, &slow);
297 
298   // Check the receiver's map to see if it has fast elements.
299   __ CheckFastElements(rax, &check_number_dictionary);
300 
301   GenerateFastArrayLoad(masm, receiver, key, rax, rbx, rax, &slow,
302                         language_mode);
303   Counters* counters = masm->isolate()->counters();
304   __ IncrementCounter(counters->keyed_load_generic_smi(), 1);
305   __ ret(0);
306 
307   __ bind(&check_number_dictionary);
308   __ SmiToInteger32(rbx, key);
309   __ movp(rax, FieldOperand(receiver, JSObject::kElementsOffset));
310 
311   // Check whether the elements is a number dictionary.
312   // rbx: key as untagged int32
313   // rax: elements
314   __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
315                  Heap::kHashTableMapRootIndex);
316   __ j(not_equal, &slow);
317   __ LoadFromNumberDictionary(&slow, rax, key, rbx, r9, rdi, rax);
318   __ ret(0);
319 
320   __ bind(&slow);
321   // Slow case: Jump to runtime.
322   __ IncrementCounter(counters->keyed_load_generic_slow(), 1);
323   KeyedLoadIC::GenerateRuntimeGetProperty(masm, language_mode);
324 
325   __ bind(&check_name);
326   GenerateKeyNameCheck(masm, key, rax, rbx, &index_name, &slow);
327 
328   GenerateKeyedLoadReceiverCheck(masm, receiver, rax, Map::kHasNamedInterceptor,
329                                  &slow);
330 
331   // If the receiver is a fast-case object, check the stub cache. Otherwise
332   // probe the dictionary.
333   __ movp(rbx, FieldOperand(receiver, JSObject::kPropertiesOffset));
334   __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
335                  Heap::kHashTableMapRootIndex);
336   __ j(equal, &probe_dictionary);
337 
338   Register megamorphic_scratch = rdi;
339   // The handlers in the stub cache expect a vector and slot. Since we won't
340   // change the IC from any downstream misses, a dummy vector can be used.
341   Register vector = LoadWithVectorDescriptor::VectorRegister();
342   Register slot = LoadDescriptor::SlotRegister();
343   DCHECK(!AreAliased(megamorphic_scratch, vector, slot));
344   Handle<TypeFeedbackVector> dummy_vector =
345       TypeFeedbackVector::DummyVector(masm->isolate());
346   int slot_index = dummy_vector->GetIndex(
347       FeedbackVectorSlot(TypeFeedbackVector::kDummyKeyedLoadICSlot));
348   __ Move(vector, dummy_vector);
349   __ Move(slot, Smi::FromInt(slot_index));
350 
351   Code::Flags flags = Code::RemoveTypeAndHolderFromFlags(
352       Code::ComputeHandlerFlags(Code::LOAD_IC));
353   masm->isolate()->stub_cache()->GenerateProbe(masm, Code::KEYED_LOAD_IC, flags,
354                                                receiver, key,
355                                                megamorphic_scratch, no_reg);
356   // Cache miss.
357   GenerateMiss(masm);
358 
359   // Do a quick inline probe of the receiver's dictionary, if it
360   // exists.
361   __ bind(&probe_dictionary);
362   // rbx: elements
363 
364   __ movp(rax, FieldOperand(receiver, JSObject::kMapOffset));
365   __ movb(rax, FieldOperand(rax, Map::kInstanceTypeOffset));
366   GenerateGlobalInstanceTypeCheck(masm, rax, &slow);
367 
368   GenerateDictionaryLoad(masm, &slow, rbx, key, rax, rdi, rax);
369   __ IncrementCounter(counters->keyed_load_generic_symbol(), 1);
370   __ ret(0);
371 
372   __ bind(&index_name);
373   __ IndexFromHash(rbx, key);
374   __ jmp(&index_smi);
375 }
376 
377 
KeyedStoreGenerateMegamorphicHelper(MacroAssembler * masm,Label * fast_object,Label * fast_double,Label * slow,KeyedStoreCheckMap check_map,KeyedStoreIncrementLength increment_length)378 static void KeyedStoreGenerateMegamorphicHelper(
379     MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow,
380     KeyedStoreCheckMap check_map, KeyedStoreIncrementLength increment_length) {
381   Label transition_smi_elements;
382   Label finish_object_store, non_double_value, transition_double_elements;
383   Label fast_double_without_map_check;
384   Register receiver = StoreDescriptor::ReceiverRegister();
385   Register key = StoreDescriptor::NameRegister();
386   Register value = StoreDescriptor::ValueRegister();
387   DCHECK(receiver.is(rdx));
388   DCHECK(key.is(rcx));
389   DCHECK(value.is(rax));
390   // Fast case: Do the store, could be either Object or double.
391   __ bind(fast_object);
392   // rbx: receiver's elements array (a FixedArray)
393   // receiver is a JSArray.
394   // r9: map of receiver
395   if (check_map == kCheckMap) {
396     __ movp(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
397     __ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex);
398     __ j(not_equal, fast_double);
399   }
400 
401   // HOLECHECK: guards "A[i] = V"
402   // We have to go to the runtime if the current value is the hole because
403   // there may be a callback on the element
404   Label holecheck_passed1;
405   __ movp(kScratchRegister,
406           FieldOperand(rbx, key, times_pointer_size, FixedArray::kHeaderSize));
407   __ CompareRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
408   __ j(not_equal, &holecheck_passed1);
409   __ JumpIfDictionaryInPrototypeChain(receiver, rdi, kScratchRegister, slow);
410 
411   __ bind(&holecheck_passed1);
412 
413   // Smi stores don't require further checks.
414   Label non_smi_value;
415   __ JumpIfNotSmi(value, &non_smi_value);
416   if (increment_length == kIncrementLength) {
417     // Add 1 to receiver->length.
418     __ leal(rdi, Operand(key, 1));
419     __ Integer32ToSmiField(FieldOperand(receiver, JSArray::kLengthOffset), rdi);
420   }
421   // It's irrelevant whether array is smi-only or not when writing a smi.
422   __ movp(FieldOperand(rbx, key, times_pointer_size, FixedArray::kHeaderSize),
423           value);
424   __ ret(0);
425 
426   __ bind(&non_smi_value);
427   // Writing a non-smi, check whether array allows non-smi elements.
428   // r9: receiver's map
429   __ CheckFastObjectElements(r9, &transition_smi_elements);
430 
431   __ bind(&finish_object_store);
432   if (increment_length == kIncrementLength) {
433     // Add 1 to receiver->length.
434     __ leal(rdi, Operand(key, 1));
435     __ Integer32ToSmiField(FieldOperand(receiver, JSArray::kLengthOffset), rdi);
436   }
437   __ movp(FieldOperand(rbx, key, times_pointer_size, FixedArray::kHeaderSize),
438           value);
439   __ movp(rdx, value);  // Preserve the value which is returned.
440   __ RecordWriteArray(rbx, rdx, key, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
441                       OMIT_SMI_CHECK);
442   __ ret(0);
443 
444   __ bind(fast_double);
445   if (check_map == kCheckMap) {
446     // Check for fast double array case. If this fails, call through to the
447     // runtime.
448     // rdi: elements array's map
449     __ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
450     __ j(not_equal, slow);
451   }
452 
453   // HOLECHECK: guards "A[i] double hole?"
454   // We have to see if the double version of the hole is present. If so
455   // go to the runtime.
456   uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
457   __ cmpl(FieldOperand(rbx, key, times_8, offset), Immediate(kHoleNanUpper32));
458   __ j(not_equal, &fast_double_without_map_check);
459   __ JumpIfDictionaryInPrototypeChain(receiver, rdi, kScratchRegister, slow);
460 
461   __ bind(&fast_double_without_map_check);
462   __ StoreNumberToDoubleElements(value, rbx, key, xmm0,
463                                  &transition_double_elements);
464   if (increment_length == kIncrementLength) {
465     // Add 1 to receiver->length.
466     __ leal(rdi, Operand(key, 1));
467     __ Integer32ToSmiField(FieldOperand(receiver, JSArray::kLengthOffset), rdi);
468   }
469   __ ret(0);
470 
471   __ bind(&transition_smi_elements);
472   __ movp(rbx, FieldOperand(receiver, HeapObject::kMapOffset));
473 
474   // Transition the array appropriately depending on the value type.
475   __ movp(r9, FieldOperand(value, HeapObject::kMapOffset));
476   __ CompareRoot(r9, Heap::kHeapNumberMapRootIndex);
477   __ j(not_equal, &non_double_value);
478 
479   // Value is a double. Transition FAST_SMI_ELEMENTS ->
480   // FAST_DOUBLE_ELEMENTS and complete the store.
481   __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
482                                          FAST_DOUBLE_ELEMENTS, rbx, rdi, slow);
483   AllocationSiteMode mode =
484       AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_DOUBLE_ELEMENTS);
485   ElementsTransitionGenerator::GenerateSmiToDouble(masm, receiver, key, value,
486                                                    rbx, mode, slow);
487   __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
488   __ jmp(&fast_double_without_map_check);
489 
490   __ bind(&non_double_value);
491   // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
492   __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, FAST_ELEMENTS, rbx,
493                                          rdi, slow);
494   mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS);
495   ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
496       masm, receiver, key, value, rbx, mode, slow);
497   __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
498   __ jmp(&finish_object_store);
499 
500   __ bind(&transition_double_elements);
501   // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
502   // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
503   // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
504   __ movp(rbx, FieldOperand(receiver, HeapObject::kMapOffset));
505   __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS,
506                                          rbx, rdi, slow);
507   mode = AllocationSite::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS);
508   ElementsTransitionGenerator::GenerateDoubleToObject(masm, receiver, key,
509                                                       value, rbx, mode, slow);
510   __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
511   __ jmp(&finish_object_store);
512 }
513 
514 
GenerateMegamorphic(MacroAssembler * masm,LanguageMode language_mode)515 void KeyedStoreIC::GenerateMegamorphic(MacroAssembler* masm,
516                                        LanguageMode language_mode) {
517   // Return address is on the stack.
518   Label slow, slow_with_tagged_index, fast_object, fast_object_grow;
519   Label fast_double, fast_double_grow;
520   Label array, extra, check_if_double_array, maybe_name_key, miss;
521   Register receiver = StoreDescriptor::ReceiverRegister();
522   Register key = StoreDescriptor::NameRegister();
523   DCHECK(receiver.is(rdx));
524   DCHECK(key.is(rcx));
525 
526   // Check that the object isn't a smi.
527   __ JumpIfSmi(receiver, &slow_with_tagged_index);
528   // Get the map from the receiver.
529   __ movp(r9, FieldOperand(receiver, HeapObject::kMapOffset));
530   // Check that the receiver does not require access checks and is not observed.
531   // The generic stub does not perform map checks or handle observed objects.
532   __ testb(FieldOperand(r9, Map::kBitFieldOffset),
533            Immediate(1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved));
534   __ j(not_zero, &slow_with_tagged_index);
535   // Check that the key is a smi.
536   __ JumpIfNotSmi(key, &maybe_name_key);
537   __ SmiToInteger32(key, key);
538 
539   __ CmpInstanceType(r9, JS_ARRAY_TYPE);
540   __ j(equal, &array);
541   // Check that the object is some kind of JS object EXCEPT JS Value type. In
542   // the case that the object is a value-wrapper object, we enter the runtime
543   // system to make sure that indexing into string objects works as intended.
544   STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
545   __ CmpInstanceType(r9, JS_OBJECT_TYPE);
546   __ j(below, &slow);
547 
548   // Object case: Check key against length in the elements array.
549   __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
550   // Check array bounds.
551   __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), key);
552   // rbx: FixedArray
553   __ j(above, &fast_object);
554 
555   // Slow case: call runtime.
556   __ bind(&slow);
557   __ Integer32ToSmi(key, key);
558   __ bind(&slow_with_tagged_index);
559   PropertyICCompiler::GenerateRuntimeSetProperty(masm, language_mode);
560   // Never returns to here.
561 
562   __ bind(&maybe_name_key);
563   __ movp(r9, FieldOperand(key, HeapObject::kMapOffset));
564   __ movzxbp(r9, FieldOperand(r9, Map::kInstanceTypeOffset));
565   __ JumpIfNotUniqueNameInstanceType(r9, &slow_with_tagged_index);
566 
567   Register vector = VectorStoreICDescriptor::VectorRegister();
568   Register slot = VectorStoreICDescriptor::SlotRegister();
569   // The handlers in the stub cache expect a vector and slot. Since we won't
570   // change the IC from any downstream misses, a dummy vector can be used.
571   Handle<TypeFeedbackVector> dummy_vector =
572       TypeFeedbackVector::DummyVector(masm->isolate());
573   int slot_index = dummy_vector->GetIndex(
574       FeedbackVectorSlot(TypeFeedbackVector::kDummyKeyedStoreICSlot));
575   __ Move(vector, dummy_vector);
576   __ Move(slot, Smi::FromInt(slot_index));
577 
578   Code::Flags flags = Code::RemoveTypeAndHolderFromFlags(
579       Code::ComputeHandlerFlags(Code::STORE_IC));
580   masm->isolate()->stub_cache()->GenerateProbe(masm, Code::STORE_IC, flags,
581                                                receiver, key, r9, no_reg);
582   // Cache miss.
583   __ jmp(&miss);
584 
585   // Extra capacity case: Check if there is extra capacity to
586   // perform the store and update the length. Used for adding one
587   // element to the array by writing to array[array.length].
588   __ bind(&extra);
589   // receiver is a JSArray.
590   // rbx: receiver's elements array (a FixedArray)
591   // flags: smicompare (receiver.length(), rbx)
592   __ j(not_equal, &slow);  // do not leave holes in the array
593   __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), key);
594   __ j(below_equal, &slow);
595   // Increment index to get new length.
596   __ movp(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
597   __ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex);
598   __ j(not_equal, &check_if_double_array);
599   __ jmp(&fast_object_grow);
600 
601   __ bind(&check_if_double_array);
602   // rdi: elements array's map
603   __ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
604   __ j(not_equal, &slow);
605   __ jmp(&fast_double_grow);
606 
607   // Array case: Get the length and the elements array from the JS
608   // array. Check that the array is in fast mode (and writable); if it
609   // is the length is always a smi.
610   __ bind(&array);
611   // receiver is a JSArray.
612   __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
613 
614   // Check the key against the length in the array, compute the
615   // address to store into and fall through to fast case.
616   __ SmiCompareInteger32(FieldOperand(receiver, JSArray::kLengthOffset), key);
617   __ j(below_equal, &extra);
618 
619   KeyedStoreGenerateMegamorphicHelper(masm, &fast_object, &fast_double, &slow,
620                                       kCheckMap, kDontIncrementLength);
621   KeyedStoreGenerateMegamorphicHelper(masm, &fast_object_grow,
622                                       &fast_double_grow, &slow, kDontCheckMap,
623                                       kIncrementLength);
624 
625   __ bind(&miss);
626   GenerateMiss(masm);
627 }
628 
629 
GenerateNormal(MacroAssembler * masm,LanguageMode language_mode)630 void LoadIC::GenerateNormal(MacroAssembler* masm, LanguageMode language_mode) {
631   Register dictionary = rax;
632   DCHECK(!dictionary.is(LoadDescriptor::ReceiverRegister()));
633   DCHECK(!dictionary.is(LoadDescriptor::NameRegister()));
634 
635   Label slow;
636 
637   __ movp(dictionary, FieldOperand(LoadDescriptor::ReceiverRegister(),
638                                    JSObject::kPropertiesOffset));
639   GenerateDictionaryLoad(masm, &slow, dictionary,
640                          LoadDescriptor::NameRegister(), rbx, rdi, rax);
641   __ ret(0);
642 
643   // Dictionary load failed, go slow (but don't miss).
644   __ bind(&slow);
645   LoadIC::GenerateRuntimeGetProperty(masm, language_mode);
646 }
647 
648 
LoadIC_PushArgs(MacroAssembler * masm)649 static void LoadIC_PushArgs(MacroAssembler* masm) {
650   Register receiver = LoadDescriptor::ReceiverRegister();
651   Register name = LoadDescriptor::NameRegister();
652   Register slot = LoadDescriptor::SlotRegister();
653   Register vector = LoadWithVectorDescriptor::VectorRegister();
654   DCHECK(!rdi.is(receiver) && !rdi.is(name) && !rdi.is(slot) &&
655          !rdi.is(vector));
656 
657   __ PopReturnAddressTo(rdi);
658   __ Push(receiver);
659   __ Push(name);
660   __ Push(slot);
661   __ Push(vector);
662   __ PushReturnAddressFrom(rdi);
663 }
664 
665 
GenerateMiss(MacroAssembler * masm)666 void LoadIC::GenerateMiss(MacroAssembler* masm) {
667   // The return address is on the stack.
668 
669   Counters* counters = masm->isolate()->counters();
670   __ IncrementCounter(counters->load_miss(), 1);
671 
672   LoadIC_PushArgs(masm);
673 
674   // Perform tail call to the entry.
675   __ TailCallRuntime(Runtime::kLoadIC_Miss);
676 }
677 
678 
GenerateRuntimeGetProperty(MacroAssembler * masm,LanguageMode language_mode)679 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm,
680                                         LanguageMode language_mode) {
681   // The return address is on the stack.
682   Register receiver = LoadDescriptor::ReceiverRegister();
683   Register name = LoadDescriptor::NameRegister();
684 
685   DCHECK(!rbx.is(receiver) && !rbx.is(name));
686 
687   __ PopReturnAddressTo(rbx);
688   __ Push(receiver);
689   __ Push(name);
690   __ PushReturnAddressFrom(rbx);
691 
692   // Do tail-call to runtime routine.
693   __ TailCallRuntime(is_strong(language_mode) ? Runtime::kGetPropertyStrong
694                                               : Runtime::kGetProperty);
695 }
696 
697 
GenerateMiss(MacroAssembler * masm)698 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
699   // The return address is on the stack.
700   Counters* counters = masm->isolate()->counters();
701   __ IncrementCounter(counters->keyed_load_miss(), 1);
702 
703   LoadIC_PushArgs(masm);
704 
705   // Perform tail call to the entry.
706   __ TailCallRuntime(Runtime::kKeyedLoadIC_Miss);
707 }
708 
709 
GenerateRuntimeGetProperty(MacroAssembler * masm,LanguageMode language_mode)710 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm,
711                                              LanguageMode language_mode) {
712   // The return address is on the stack.
713   Register receiver = LoadDescriptor::ReceiverRegister();
714   Register name = LoadDescriptor::NameRegister();
715 
716   DCHECK(!rbx.is(receiver) && !rbx.is(name));
717 
718   __ PopReturnAddressTo(rbx);
719   __ Push(receiver);
720   __ Push(name);
721   __ PushReturnAddressFrom(rbx);
722 
723   // Do tail-call to runtime routine.
724   __ TailCallRuntime(is_strong(language_mode) ? Runtime::kKeyedGetPropertyStrong
725                                               : Runtime::kKeyedGetProperty);
726 }
727 
728 
GenerateMegamorphic(MacroAssembler * masm)729 void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
730   // This shouldn't be called.
731   __ int3();
732 }
733 
734 
StoreIC_PushArgs(MacroAssembler * masm)735 static void StoreIC_PushArgs(MacroAssembler* masm) {
736   Register receiver = StoreDescriptor::ReceiverRegister();
737   Register name = StoreDescriptor::NameRegister();
738   Register value = StoreDescriptor::ValueRegister();
739   Register temp = r11;
740   DCHECK(!temp.is(receiver) && !temp.is(name) && !temp.is(value));
741 
742   __ PopReturnAddressTo(temp);
743   __ Push(receiver);
744   __ Push(name);
745   __ Push(value);
746   Register slot = VectorStoreICDescriptor::SlotRegister();
747   Register vector = VectorStoreICDescriptor::VectorRegister();
748   DCHECK(!temp.is(slot) && !temp.is(vector));
749   __ Push(slot);
750   __ Push(vector);
751   __ PushReturnAddressFrom(temp);
752 }
753 
754 
GenerateMiss(MacroAssembler * masm)755 void StoreIC::GenerateMiss(MacroAssembler* masm) {
756   // Return address is on the stack.
757   StoreIC_PushArgs(masm);
758 
759   // Perform tail call to the entry.
760   __ TailCallRuntime(Runtime::kStoreIC_Miss);
761 }
762 
763 
GenerateNormal(MacroAssembler * masm)764 void StoreIC::GenerateNormal(MacroAssembler* masm) {
765   Register receiver = StoreDescriptor::ReceiverRegister();
766   Register name = StoreDescriptor::NameRegister();
767   Register value = StoreDescriptor::ValueRegister();
768   Register dictionary = r11;
769   DCHECK(!AreAliased(dictionary, VectorStoreICDescriptor::VectorRegister(),
770                      VectorStoreICDescriptor::SlotRegister()));
771 
772   Label miss;
773 
774   __ movp(dictionary, FieldOperand(receiver, JSObject::kPropertiesOffset));
775   GenerateDictionaryStore(masm, &miss, dictionary, name, value, r8, r9);
776   Counters* counters = masm->isolate()->counters();
777   __ IncrementCounter(counters->store_normal_hit(), 1);
778   __ ret(0);
779 
780   __ bind(&miss);
781   __ IncrementCounter(counters->store_normal_miss(), 1);
782   GenerateMiss(masm);
783 }
784 
785 
GenerateMiss(MacroAssembler * masm)786 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
787   // Return address is on the stack.
788   StoreIC_PushArgs(masm);
789 
790   // Do tail-call to runtime routine.
791   __ TailCallRuntime(Runtime::kKeyedStoreIC_Miss);
792 }
793 
794 
795 #undef __
796 
797 
ComputeCondition(Token::Value op)798 Condition CompareIC::ComputeCondition(Token::Value op) {
799   switch (op) {
800     case Token::EQ_STRICT:
801     case Token::EQ:
802       return equal;
803     case Token::LT:
804       return less;
805     case Token::GT:
806       return greater;
807     case Token::LTE:
808       return less_equal;
809     case Token::GTE:
810       return greater_equal;
811     default:
812       UNREACHABLE();
813       return no_condition;
814   }
815 }
816 
817 
HasInlinedSmiCode(Address address)818 bool CompareIC::HasInlinedSmiCode(Address address) {
819   // The address of the instruction following the call.
820   Address test_instruction_address =
821       address + Assembler::kCallTargetAddressOffset;
822 
823   // If the instruction following the call is not a test al, nothing
824   // was inlined.
825   return *test_instruction_address == Assembler::kTestAlByte;
826 }
827 
828 
PatchInlinedSmiCode(Isolate * isolate,Address address,InlinedSmiCheck check)829 void PatchInlinedSmiCode(Isolate* isolate, Address address,
830                          InlinedSmiCheck check) {
831   // The address of the instruction following the call.
832   Address test_instruction_address =
833       address + Assembler::kCallTargetAddressOffset;
834 
835   // If the instruction following the call is not a test al, nothing
836   // was inlined.
837   if (*test_instruction_address != Assembler::kTestAlByte) {
838     DCHECK(*test_instruction_address == Assembler::kNopByte);
839     return;
840   }
841 
842   Address delta_address = test_instruction_address + 1;
843   // The delta to the start of the map check instruction and the
844   // condition code uses at the patched jump.
845   uint8_t delta = *reinterpret_cast<uint8_t*>(delta_address);
846   if (FLAG_trace_ic) {
847     PrintF("[  patching ic at %p, test=%p, delta=%d\n", address,
848            test_instruction_address, delta);
849   }
850 
851   // Patch with a short conditional jump. Enabling means switching from a short
852   // jump-if-carry/not-carry to jump-if-zero/not-zero, whereas disabling is the
853   // reverse operation of that.
854   Address jmp_address = test_instruction_address - delta;
855   DCHECK((check == ENABLE_INLINED_SMI_CHECK)
856              ? (*jmp_address == Assembler::kJncShortOpcode ||
857                 *jmp_address == Assembler::kJcShortOpcode)
858              : (*jmp_address == Assembler::kJnzShortOpcode ||
859                 *jmp_address == Assembler::kJzShortOpcode));
860   Condition cc =
861       (check == ENABLE_INLINED_SMI_CHECK)
862           ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero)
863           : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry);
864   *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
865 }
866 }  // namespace internal
867 }  // namespace v8
868 
869 #endif  // V8_TARGET_ARCH_X64
870