1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/js-native-context-specialization.h"
6
7 #include "src/accessors.h"
8 #include "src/api-inl.h"
9 #include "src/code-factory.h"
10 #include "src/compiler/access-builder.h"
11 #include "src/compiler/access-info.h"
12 #include "src/compiler/allocation-builder.h"
13 #include "src/compiler/compilation-dependencies.h"
14 #include "src/compiler/js-graph.h"
15 #include "src/compiler/js-operator.h"
16 #include "src/compiler/linkage.h"
17 #include "src/compiler/node-matchers.h"
18 #include "src/compiler/property-access-builder.h"
19 #include "src/compiler/type-cache.h"
20 #include "src/feedback-vector.h"
21 #include "src/field-index-inl.h"
22 #include "src/isolate-inl.h"
23 #include "src/objects/js-array-buffer-inl.h"
24 #include "src/objects/js-array-inl.h"
25 #include "src/objects/templates.h"
26 #include "src/vector-slot-pair.h"
27
28 namespace v8 {
29 namespace internal {
30 namespace compiler {
31
32 // This is needed for gc_mole which will compile this file without the full set
33 // of GN defined macros.
34 #ifndef V8_TYPED_ARRAY_MAX_SIZE_IN_HEAP
35 #define V8_TYPED_ARRAY_MAX_SIZE_IN_HEAP 64
36 #endif
37
38 namespace {
39
HasNumberMaps(MapHandles const & maps)40 bool HasNumberMaps(MapHandles const& maps) {
41 for (auto map : maps) {
42 if (map->instance_type() == HEAP_NUMBER_TYPE) return true;
43 }
44 return false;
45 }
46
HasOnlyJSArrayMaps(MapHandles const & maps)47 bool HasOnlyJSArrayMaps(MapHandles const& maps) {
48 for (auto map : maps) {
49 if (!map->IsJSArrayMap()) return false;
50 }
51 return true;
52 }
53
54 } // namespace
55
56 struct JSNativeContextSpecialization::ScriptContextTableLookupResult {
57 Handle<Context> context;
58 bool immutable;
59 int index;
60 };
61
JSNativeContextSpecialization(Editor * editor,JSGraph * jsgraph,JSHeapBroker * js_heap_broker,Flags flags,Handle<Context> native_context,CompilationDependencies * dependencies,Zone * zone)62 JSNativeContextSpecialization::JSNativeContextSpecialization(
63 Editor* editor, JSGraph* jsgraph, JSHeapBroker* js_heap_broker, Flags flags,
64 Handle<Context> native_context, CompilationDependencies* dependencies,
65 Zone* zone)
66 : AdvancedReducer(editor),
67 jsgraph_(jsgraph),
68 js_heap_broker_(js_heap_broker),
69 flags_(flags),
70 global_object_(native_context->global_object(), jsgraph->isolate()),
71 global_proxy_(JSGlobalProxy::cast(native_context->global_proxy()),
72 jsgraph->isolate()),
73 native_context_(js_heap_broker, native_context),
74 dependencies_(dependencies),
75 zone_(zone),
76 type_cache_(TypeCache::Get()) {}
77
Reduce(Node * node)78 Reduction JSNativeContextSpecialization::Reduce(Node* node) {
79 switch (node->opcode()) {
80 case IrOpcode::kJSAdd:
81 return ReduceJSAdd(node);
82 case IrOpcode::kJSGetSuperConstructor:
83 return ReduceJSGetSuperConstructor(node);
84 case IrOpcode::kJSInstanceOf:
85 return ReduceJSInstanceOf(node);
86 case IrOpcode::kJSHasInPrototypeChain:
87 return ReduceJSHasInPrototypeChain(node);
88 case IrOpcode::kJSOrdinaryHasInstance:
89 return ReduceJSOrdinaryHasInstance(node);
90 case IrOpcode::kJSPromiseResolve:
91 return ReduceJSPromiseResolve(node);
92 case IrOpcode::kJSResolvePromise:
93 return ReduceJSResolvePromise(node);
94 case IrOpcode::kJSLoadContext:
95 return ReduceJSLoadContext(node);
96 case IrOpcode::kJSLoadGlobal:
97 return ReduceJSLoadGlobal(node);
98 case IrOpcode::kJSStoreGlobal:
99 return ReduceJSStoreGlobal(node);
100 case IrOpcode::kJSLoadNamed:
101 return ReduceJSLoadNamed(node);
102 case IrOpcode::kJSStoreNamed:
103 return ReduceJSStoreNamed(node);
104 case IrOpcode::kJSLoadProperty:
105 return ReduceJSLoadProperty(node);
106 case IrOpcode::kJSStoreProperty:
107 return ReduceJSStoreProperty(node);
108 case IrOpcode::kJSStoreNamedOwn:
109 return ReduceJSStoreNamedOwn(node);
110 case IrOpcode::kJSStoreDataPropertyInLiteral:
111 return ReduceJSStoreDataPropertyInLiteral(node);
112 case IrOpcode::kJSStoreInArrayLiteral:
113 return ReduceJSStoreInArrayLiteral(node);
114 case IrOpcode::kJSToObject:
115 return ReduceJSToObject(node);
116 default:
117 break;
118 }
119 return NoChange();
120 }
121
ReduceJSAdd(Node * node)122 Reduction JSNativeContextSpecialization::ReduceJSAdd(Node* node) {
123 // TODO(turbofan): This has to run together with the inlining and
124 // native context specialization to be able to leverage the string
125 // constant-folding for optimizing property access, but we should
126 // nevertheless find a better home for this at some point.
127 DCHECK_EQ(IrOpcode::kJSAdd, node->opcode());
128
129 // Constant-fold string concatenation.
130 HeapObjectBinopMatcher m(node);
131 if (m.left().HasValue() && m.left().Value()->IsString() &&
132 m.right().HasValue() && m.right().Value()->IsString()) {
133 Handle<String> left = Handle<String>::cast(m.left().Value());
134 Handle<String> right = Handle<String>::cast(m.right().Value());
135 if (left->length() + right->length() <= String::kMaxLength) {
136 Handle<String> result =
137 factory()->NewConsString(left, right).ToHandleChecked();
138 Node* value = jsgraph()->HeapConstant(result);
139 ReplaceWithValue(node, value);
140 return Replace(value);
141 }
142 }
143 return NoChange();
144 }
145
ReduceJSGetSuperConstructor(Node * node)146 Reduction JSNativeContextSpecialization::ReduceJSGetSuperConstructor(
147 Node* node) {
148 DCHECK_EQ(IrOpcode::kJSGetSuperConstructor, node->opcode());
149 Node* constructor = NodeProperties::GetValueInput(node, 0);
150
151 // Check if the input is a known JSFunction.
152 HeapObjectMatcher m(constructor);
153 if (!m.HasValue()) return NoChange();
154 Handle<JSFunction> function = Handle<JSFunction>::cast(m.Value());
155 Handle<Map> function_map(function->map(), isolate());
156 Handle<Object> function_prototype(function_map->prototype(), isolate());
157
158 // We can constant-fold the super constructor access if the
159 // {function}s map is stable, i.e. we can use a code dependency
160 // to guard against [[Prototype]] changes of {function}.
161 if (function_map->is_stable() && function_prototype->IsConstructor()) {
162 dependencies()->DependOnStableMap(MapRef(js_heap_broker(), function_map));
163 Node* value = jsgraph()->Constant(function_prototype);
164 ReplaceWithValue(node, value);
165 return Replace(value);
166 }
167
168 return NoChange();
169 }
170
ReduceJSInstanceOf(Node * node)171 Reduction JSNativeContextSpecialization::ReduceJSInstanceOf(Node* node) {
172 DCHECK_EQ(IrOpcode::kJSInstanceOf, node->opcode());
173 FeedbackParameter const& p = FeedbackParameterOf(node->op());
174 Node* object = NodeProperties::GetValueInput(node, 0);
175 Node* constructor = NodeProperties::GetValueInput(node, 1);
176 Node* context = NodeProperties::GetContextInput(node);
177 Node* effect = NodeProperties::GetEffectInput(node);
178 Node* frame_state = NodeProperties::GetFrameStateInput(node);
179 Node* control = NodeProperties::GetControlInput(node);
180
181 // Check if the right hand side is a known {receiver}, or
182 // we have feedback from the InstanceOfIC.
183 Handle<JSObject> receiver;
184 HeapObjectMatcher m(constructor);
185 if (m.HasValue() && m.Value()->IsJSObject()) {
186 receiver = Handle<JSObject>::cast(m.Value());
187 } else if (p.feedback().IsValid()) {
188 FeedbackNexus nexus(p.feedback().vector(), p.feedback().slot());
189 if (!nexus.GetConstructorFeedback().ToHandle(&receiver)) return NoChange();
190 } else {
191 return NoChange();
192 }
193 Handle<Map> receiver_map(receiver->map(), isolate());
194
195 // Compute property access info for @@hasInstance on {receiver}.
196 PropertyAccessInfo access_info;
197 AccessInfoFactory access_info_factory(js_heap_broker(), dependencies(),
198 native_context().object<Context>(),
199 graph()->zone());
200 if (!access_info_factory.ComputePropertyAccessInfo(
201 receiver_map, factory()->has_instance_symbol(), AccessMode::kLoad,
202 &access_info)) {
203 return NoChange();
204 }
205
206 PropertyAccessBuilder access_builder(jsgraph(), js_heap_broker(),
207 dependencies());
208
209 if (access_info.IsNotFound()) {
210 // If there's no @@hasInstance handler, the OrdinaryHasInstance operation
211 // takes over, but that requires the {receiver} to be callable.
212 if (receiver->IsCallable()) {
213 // Determine actual holder and perform prototype chain checks.
214 Handle<JSObject> holder;
215 if (access_info.holder().ToHandle(&holder)) {
216 dependencies()->DependOnStablePrototypeChains(
217 js_heap_broker(), native_context().object<Context>(),
218 access_info.receiver_maps(), holder);
219 }
220
221 // Check that {constructor} is actually {receiver}.
222 constructor = access_builder.BuildCheckValue(constructor, &effect,
223 control, receiver);
224
225 // Monomorphic property access.
226 access_builder.BuildCheckMaps(constructor, &effect, control,
227 access_info.receiver_maps());
228
229 // Lower to OrdinaryHasInstance(C, O).
230 NodeProperties::ReplaceValueInput(node, constructor, 0);
231 NodeProperties::ReplaceValueInput(node, object, 1);
232 NodeProperties::ReplaceEffectInput(node, effect);
233 NodeProperties::ChangeOp(node, javascript()->OrdinaryHasInstance());
234 Reduction const reduction = ReduceJSOrdinaryHasInstance(node);
235 return reduction.Changed() ? reduction : Changed(node);
236 }
237 } else if (access_info.IsDataConstant() ||
238 access_info.IsDataConstantField()) {
239 // Determine actual holder and perform prototype chain checks.
240 Handle<JSObject> holder;
241 if (access_info.holder().ToHandle(&holder)) {
242 dependencies()->DependOnStablePrototypeChains(
243 js_heap_broker(), native_context().object<Context>(),
244 access_info.receiver_maps(), holder);
245 } else {
246 holder = receiver;
247 }
248
249 Handle<Object> constant;
250 if (access_info.IsDataConstant()) {
251 DCHECK(!FLAG_track_constant_fields);
252 constant = access_info.constant();
253 } else {
254 DCHECK(FLAG_track_constant_fields);
255 DCHECK(access_info.IsDataConstantField());
256 // The value must be callable therefore tagged.
257 DCHECK(CanBeTaggedPointer(access_info.field_representation()));
258 FieldIndex field_index = access_info.field_index();
259 constant = JSObject::FastPropertyAt(holder, Representation::Tagged(),
260 field_index);
261 }
262 DCHECK(constant->IsCallable());
263
264 // Check that {constructor} is actually {receiver}.
265 constructor =
266 access_builder.BuildCheckValue(constructor, &effect, control, receiver);
267
268 // Monomorphic property access.
269 access_builder.BuildCheckMaps(constructor, &effect, control,
270 access_info.receiver_maps());
271
272 // Create a nested frame state inside the current method's most-recent frame
273 // state that will ensure that deopts that happen after this point will not
274 // fallback to the last Checkpoint--which would completely re-execute the
275 // instanceof logic--but rather create an activation of a version of the
276 // ToBoolean stub that finishes the remaining work of instanceof and returns
277 // to the caller without duplicating side-effects upon a lazy deopt.
278 Node* continuation_frame_state = CreateStubBuiltinContinuationFrameState(
279 jsgraph(), Builtins::kToBooleanLazyDeoptContinuation, context, nullptr,
280 0, frame_state, ContinuationFrameStateMode::LAZY);
281
282 // Call the @@hasInstance handler.
283 Node* target = jsgraph()->Constant(constant);
284 node->InsertInput(graph()->zone(), 0, target);
285 node->ReplaceInput(1, constructor);
286 node->ReplaceInput(2, object);
287 node->ReplaceInput(4, continuation_frame_state);
288 node->ReplaceInput(5, effect);
289 NodeProperties::ChangeOp(
290 node, javascript()->Call(3, CallFrequency(), VectorSlotPair(),
291 ConvertReceiverMode::kNotNullOrUndefined));
292
293 // Rewire the value uses of {node} to ToBoolean conversion of the result.
294 Node* value = graph()->NewNode(simplified()->ToBoolean(), node);
295 for (Edge edge : node->use_edges()) {
296 if (NodeProperties::IsValueEdge(edge) && edge.from() != value) {
297 edge.UpdateTo(value);
298 Revisit(edge.from());
299 }
300 }
301 return Changed(node);
302 }
303
304 return NoChange();
305 }
306
307 JSNativeContextSpecialization::InferHasInPrototypeChainResult
InferHasInPrototypeChain(Node * receiver,Node * effect,Handle<HeapObject> prototype)308 JSNativeContextSpecialization::InferHasInPrototypeChain(
309 Node* receiver, Node* effect, Handle<HeapObject> prototype) {
310 ZoneHandleSet<Map> receiver_maps;
311 NodeProperties::InferReceiverMapsResult result =
312 NodeProperties::InferReceiverMaps(isolate(), receiver, effect,
313 &receiver_maps);
314 if (result == NodeProperties::kNoReceiverMaps) return kMayBeInPrototypeChain;
315
316 // Check if either all or none of the {receiver_maps} have the given
317 // {prototype} in their prototype chain.
318 bool all = true;
319 bool none = true;
320 for (size_t i = 0; i < receiver_maps.size(); ++i) {
321 Handle<Map> receiver_map = receiver_maps[i];
322 if (receiver_map->instance_type() <= LAST_SPECIAL_RECEIVER_TYPE) {
323 return kMayBeInPrototypeChain;
324 }
325 if (result == NodeProperties::kUnreliableReceiverMaps) {
326 // In case of an unreliable {result} we need to ensure that all
327 // {receiver_maps} are stable, because otherwise we cannot trust
328 // the {receiver_maps} information, since arbitrary side-effects
329 // may have happened.
330 if (!receiver_map->is_stable()) {
331 return kMayBeInPrototypeChain;
332 }
333 }
334 for (PrototypeIterator j(isolate(), receiver_map);; j.Advance()) {
335 if (j.IsAtEnd()) {
336 all = false;
337 break;
338 }
339 Handle<HeapObject> const current =
340 PrototypeIterator::GetCurrent<HeapObject>(j);
341 if (current.is_identical_to(prototype)) {
342 none = false;
343 break;
344 }
345 if (!current->map()->is_stable() ||
346 current->map()->instance_type() <= LAST_SPECIAL_RECEIVER_TYPE) {
347 return kMayBeInPrototypeChain;
348 }
349 }
350 }
351 DCHECK_IMPLIES(all, !none);
352 DCHECK_IMPLIES(none, !all);
353
354 if (all) return kIsInPrototypeChain;
355 if (none) return kIsNotInPrototypeChain;
356 return kMayBeInPrototypeChain;
357 }
358
ReduceJSHasInPrototypeChain(Node * node)359 Reduction JSNativeContextSpecialization::ReduceJSHasInPrototypeChain(
360 Node* node) {
361 DCHECK_EQ(IrOpcode::kJSHasInPrototypeChain, node->opcode());
362 Node* value = NodeProperties::GetValueInput(node, 0);
363 Node* prototype = NodeProperties::GetValueInput(node, 1);
364 Node* effect = NodeProperties::GetEffectInput(node);
365
366 // Check if we can constant-fold the prototype chain walk
367 // for the given {value} and the {prototype}.
368 HeapObjectMatcher m(prototype);
369 if (m.HasValue()) {
370 InferHasInPrototypeChainResult result =
371 InferHasInPrototypeChain(value, effect, m.Value());
372 if (result != kMayBeInPrototypeChain) {
373 Node* value = jsgraph()->BooleanConstant(result == kIsInPrototypeChain);
374 ReplaceWithValue(node, value);
375 return Replace(value);
376 }
377 }
378
379 return NoChange();
380 }
381
ReduceJSOrdinaryHasInstance(Node * node)382 Reduction JSNativeContextSpecialization::ReduceJSOrdinaryHasInstance(
383 Node* node) {
384 DCHECK_EQ(IrOpcode::kJSOrdinaryHasInstance, node->opcode());
385 Node* constructor = NodeProperties::GetValueInput(node, 0);
386 Node* object = NodeProperties::GetValueInput(node, 1);
387
388 // Check if the {constructor} is known at compile time.
389 HeapObjectMatcher m(constructor);
390 if (!m.HasValue()) return NoChange();
391
392 // Check if the {constructor} is a JSBoundFunction.
393 if (m.Value()->IsJSBoundFunction()) {
394 // OrdinaryHasInstance on bound functions turns into a recursive
395 // invocation of the instanceof operator again.
396 // ES6 section 7.3.19 OrdinaryHasInstance (C, O) step 2.
397 Handle<JSBoundFunction> function = Handle<JSBoundFunction>::cast(m.Value());
398 Handle<JSReceiver> bound_target_function(function->bound_target_function(),
399 isolate());
400 NodeProperties::ReplaceValueInput(node, object, 0);
401 NodeProperties::ReplaceValueInput(
402 node, jsgraph()->HeapConstant(bound_target_function), 1);
403 NodeProperties::ChangeOp(node, javascript()->InstanceOf(VectorSlotPair()));
404 Reduction const reduction = ReduceJSInstanceOf(node);
405 return reduction.Changed() ? reduction : Changed(node);
406 }
407
408 // Check if the {constructor} is a JSFunction.
409 if (m.Value()->IsJSFunction()) {
410 // Check if the {function} is a constructor and has an instance "prototype".
411 Handle<JSFunction> function = Handle<JSFunction>::cast(m.Value());
412 if (function->IsConstructor() && function->has_prototype_slot() &&
413 function->has_instance_prototype() &&
414 function->prototype()->IsJSReceiver()) {
415 // We need {function}'s initial map so that we can depend on it for the
416 // prototype constant-folding below.
417 if (!function->has_initial_map()) return NoChange();
418 MapRef initial_map = dependencies()->DependOnInitialMap(
419 JSFunctionRef(js_heap_broker(), function));
420 Node* prototype = jsgraph()->Constant(
421 handle(initial_map.object<Map>()->prototype(), isolate()));
422
423 // Lower the {node} to JSHasInPrototypeChain.
424 NodeProperties::ReplaceValueInput(node, object, 0);
425 NodeProperties::ReplaceValueInput(node, prototype, 1);
426 NodeProperties::ChangeOp(node, javascript()->HasInPrototypeChain());
427 Reduction const reduction = ReduceJSHasInPrototypeChain(node);
428 return reduction.Changed() ? reduction : Changed(node);
429 }
430 }
431
432 return NoChange();
433 }
434
435 // ES section #sec-promise-resolve
ReduceJSPromiseResolve(Node * node)436 Reduction JSNativeContextSpecialization::ReduceJSPromiseResolve(Node* node) {
437 DCHECK_EQ(IrOpcode::kJSPromiseResolve, node->opcode());
438 Node* constructor = NodeProperties::GetValueInput(node, 0);
439 Node* value = NodeProperties::GetValueInput(node, 1);
440 Node* context = NodeProperties::GetContextInput(node);
441 Node* frame_state = NodeProperties::GetFrameStateInput(node);
442 Node* effect = NodeProperties::GetEffectInput(node);
443 Node* control = NodeProperties::GetControlInput(node);
444
445 // Check if the {constructor} is the %Promise% function.
446 HeapObjectMatcher m(constructor);
447 if (!m.Is(handle(native_context().object<Context>()->promise_function(),
448 isolate())))
449 return NoChange();
450
451 // Check if we know something about the {value}.
452 ZoneHandleSet<Map> value_maps;
453 NodeProperties::InferReceiverMapsResult result =
454 NodeProperties::InferReceiverMaps(isolate(), value, effect, &value_maps);
455 if (result == NodeProperties::kNoReceiverMaps) return NoChange();
456 DCHECK_NE(0, value_maps.size());
457
458 // Check that the {value} cannot be a JSPromise.
459 for (Handle<Map> const value_map : value_maps) {
460 if (value_map->IsJSPromiseMap()) return NoChange();
461 }
462
463 // Create a %Promise% instance and resolve it with {value}.
464 Node* promise = effect =
465 graph()->NewNode(javascript()->CreatePromise(), context, effect);
466 effect = graph()->NewNode(javascript()->ResolvePromise(), promise, value,
467 context, frame_state, effect, control);
468 ReplaceWithValue(node, promise, effect, control);
469 return Replace(promise);
470 }
471
472 // ES section #sec-promise-resolve-functions
ReduceJSResolvePromise(Node * node)473 Reduction JSNativeContextSpecialization::ReduceJSResolvePromise(Node* node) {
474 DCHECK_EQ(IrOpcode::kJSResolvePromise, node->opcode());
475 Node* promise = NodeProperties::GetValueInput(node, 0);
476 Node* resolution = NodeProperties::GetValueInput(node, 1);
477 Node* context = NodeProperties::GetContextInput(node);
478 Node* effect = NodeProperties::GetEffectInput(node);
479 Node* control = NodeProperties::GetControlInput(node);
480
481 // Check if we know something about the {resolution}.
482 ZoneHandleSet<Map> resolution_maps;
483 NodeProperties::InferReceiverMapsResult result =
484 NodeProperties::InferReceiverMaps(isolate(), resolution, effect,
485 &resolution_maps);
486 if (result != NodeProperties::kReliableReceiverMaps) return NoChange();
487 DCHECK_NE(0, resolution_maps.size());
488
489 // Compute property access info for "then" on {resolution}.
490 PropertyAccessInfo access_info;
491 AccessInfoFactory access_info_factory(js_heap_broker(), dependencies(),
492 native_context().object<Context>(),
493 graph()->zone());
494 if (!access_info_factory.ComputePropertyAccessInfo(
495 MapHandles(resolution_maps.begin(), resolution_maps.end()),
496 factory()->then_string(), AccessMode::kLoad, &access_info)) {
497 return NoChange();
498 }
499
500 // We can further optimize the case where {resolution}
501 // definitely doesn't have a "then" property.
502 if (!access_info.IsNotFound()) return NoChange();
503 PropertyAccessBuilder access_builder(jsgraph(), js_heap_broker(),
504 dependencies());
505
506 // Add proper dependencies on the {resolution}s [[Prototype]]s.
507 Handle<JSObject> holder;
508 if (access_info.holder().ToHandle(&holder)) {
509 dependencies()->DependOnStablePrototypeChains(
510 js_heap_broker(), native_context().object<Context>(),
511 access_info.receiver_maps(), holder);
512 }
513
514 // Simply fulfill the {promise} with the {resolution}.
515 Node* value = effect =
516 graph()->NewNode(javascript()->FulfillPromise(), promise, resolution,
517 context, effect, control);
518 ReplaceWithValue(node, value, effect, control);
519 return Replace(value);
520 }
521
ReduceJSLoadContext(Node * node)522 Reduction JSNativeContextSpecialization::ReduceJSLoadContext(Node* node) {
523 DCHECK_EQ(IrOpcode::kJSLoadContext, node->opcode());
524 ContextAccess const& access = ContextAccessOf(node->op());
525 // Specialize JSLoadContext(NATIVE_CONTEXT_INDEX) to the known native
526 // context (if any), so we can constant-fold those fields, which is
527 // safe, since the NATIVE_CONTEXT_INDEX slot is always immutable.
528 if (access.index() == Context::NATIVE_CONTEXT_INDEX) {
529 Node* value = jsgraph()->Constant(native_context());
530 ReplaceWithValue(node, value);
531 return Replace(value);
532 }
533 return NoChange();
534 }
535
536 namespace {
537
ForPropertyCellValue(MachineRepresentation representation,Type type,MaybeHandle<Map> map,Handle<Name> name)538 FieldAccess ForPropertyCellValue(MachineRepresentation representation,
539 Type type, MaybeHandle<Map> map,
540 Handle<Name> name) {
541 WriteBarrierKind kind = kFullWriteBarrier;
542 if (representation == MachineRepresentation::kTaggedSigned) {
543 kind = kNoWriteBarrier;
544 } else if (representation == MachineRepresentation::kTaggedPointer) {
545 kind = kPointerWriteBarrier;
546 }
547 MachineType r = MachineType::TypeForRepresentation(representation);
548 FieldAccess access = {
549 kTaggedBase, PropertyCell::kValueOffset, name, map, type, r, kind};
550 return access;
551 }
552
553 } // namespace
554
ReduceGlobalAccess(Node * node,Node * receiver,Node * value,Handle<Name> name,AccessMode access_mode,Node * index)555 Reduction JSNativeContextSpecialization::ReduceGlobalAccess(
556 Node* node, Node* receiver, Node* value, Handle<Name> name,
557 AccessMode access_mode, Node* index) {
558 Node* effect = NodeProperties::GetEffectInput(node);
559 Node* control = NodeProperties::GetControlInput(node);
560
561 // Lookup on the global object. We only deal with own data properties
562 // of the global object here (represented as PropertyCell).
563 LookupIterator it(isolate(), global_object(), name, LookupIterator::OWN);
564 it.TryLookupCachedProperty();
565 if (it.state() != LookupIterator::DATA) return NoChange();
566 if (!it.GetHolder<JSObject>()->IsJSGlobalObject()) return NoChange();
567 Handle<PropertyCell> property_cell = it.GetPropertyCell();
568 PropertyDetails property_details = property_cell->property_details();
569 Handle<Object> property_cell_value(property_cell->value(), isolate());
570 PropertyCellType property_cell_type = property_details.cell_type();
571
572 // We have additional constraints for stores.
573 if (access_mode == AccessMode::kStore) {
574 if (property_details.IsReadOnly()) {
575 // Don't even bother trying to lower stores to read-only data properties.
576 return NoChange();
577 } else if (property_cell_type == PropertyCellType::kUndefined) {
578 // There's no fast-path for dealing with undefined property cells.
579 return NoChange();
580 } else if (property_cell_type == PropertyCellType::kConstantType) {
581 // There's also no fast-path to store to a global cell which pretended
582 // to be stable, but is no longer stable now.
583 if (property_cell_value->IsHeapObject() &&
584 !Handle<HeapObject>::cast(property_cell_value)->map()->is_stable()) {
585 return NoChange();
586 }
587 }
588 }
589
590 // Ensure that {index} matches the specified {name} (if {index} is given).
591 if (index != nullptr) {
592 effect = BuildCheckEqualsName(name, index, effect, control);
593 }
594
595 // Check if we have a {receiver} to validate. If so, we need to check that
596 // the {receiver} is actually the JSGlobalProxy for the native context that
597 // we are specializing to.
598 if (receiver != nullptr) {
599 Node* check = graph()->NewNode(simplified()->ReferenceEqual(), receiver,
600 jsgraph()->HeapConstant(global_proxy()));
601 effect = graph()->NewNode(
602 simplified()->CheckIf(DeoptimizeReason::kReceiverNotAGlobalProxy),
603 check, effect, control);
604 }
605
606 if (access_mode == AccessMode::kLoad) {
607 // Load from non-configurable, read-only data property on the global
608 // object can be constant-folded, even without deoptimization support.
609 if (!property_details.IsConfigurable() && property_details.IsReadOnly()) {
610 value = jsgraph()->Constant(property_cell_value);
611 } else {
612 // Record a code dependency on the cell if we can benefit from the
613 // additional feedback, or the global property is configurable (i.e.
614 // can be deleted or reconfigured to an accessor property).
615 if (property_details.cell_type() != PropertyCellType::kMutable ||
616 property_details.IsConfigurable()) {
617 dependencies()->DependOnGlobalProperty(
618 PropertyCellRef(js_heap_broker(), property_cell));
619 }
620
621 // Load from constant/undefined global property can be constant-folded.
622 if (property_details.cell_type() == PropertyCellType::kConstant ||
623 property_details.cell_type() == PropertyCellType::kUndefined) {
624 value = jsgraph()->Constant(property_cell_value);
625 } else {
626 // Load from constant type cell can benefit from type feedback.
627 MaybeHandle<Map> map;
628 Type property_cell_value_type = Type::NonInternal();
629 MachineRepresentation representation = MachineRepresentation::kTagged;
630 if (property_details.cell_type() == PropertyCellType::kConstantType) {
631 // Compute proper type based on the current value in the cell.
632 if (property_cell_value->IsSmi()) {
633 property_cell_value_type = Type::SignedSmall();
634 representation = MachineRepresentation::kTaggedSigned;
635 } else if (property_cell_value->IsNumber()) {
636 property_cell_value_type = Type::Number();
637 representation = MachineRepresentation::kTaggedPointer;
638 } else {
639 Handle<Map> property_cell_value_map(
640 Handle<HeapObject>::cast(property_cell_value)->map(),
641 isolate());
642 property_cell_value_type =
643 Type::For(js_heap_broker(), property_cell_value_map);
644 representation = MachineRepresentation::kTaggedPointer;
645
646 // We can only use the property cell value map for map check
647 // elimination if it's stable, i.e. the HeapObject wasn't
648 // mutated without the cell state being updated.
649 if (property_cell_value_map->is_stable()) {
650 dependencies()->DependOnStableMap(
651 MapRef(js_heap_broker(), property_cell_value_map));
652 map = property_cell_value_map;
653 }
654 }
655 }
656 value = effect = graph()->NewNode(
657 simplified()->LoadField(ForPropertyCellValue(
658 representation, property_cell_value_type, map, name)),
659 jsgraph()->HeapConstant(property_cell), effect, control);
660 }
661 }
662 } else {
663 DCHECK_EQ(AccessMode::kStore, access_mode);
664 DCHECK(!property_details.IsReadOnly());
665 switch (property_details.cell_type()) {
666 case PropertyCellType::kUndefined: {
667 UNREACHABLE();
668 break;
669 }
670 case PropertyCellType::kConstant: {
671 // Record a code dependency on the cell, and just deoptimize if the new
672 // value doesn't match the previous value stored inside the cell.
673 dependencies()->DependOnGlobalProperty(
674 PropertyCellRef(js_heap_broker(), property_cell));
675 Node* check =
676 graph()->NewNode(simplified()->ReferenceEqual(), value,
677 jsgraph()->Constant(property_cell_value));
678 effect = graph()->NewNode(
679 simplified()->CheckIf(DeoptimizeReason::kValueMismatch), check,
680 effect, control);
681 break;
682 }
683 case PropertyCellType::kConstantType: {
684 // Record a code dependency on the cell, and just deoptimize if the new
685 // values' type doesn't match the type of the previous value in the
686 // cell.
687 dependencies()->DependOnGlobalProperty(
688 PropertyCellRef(js_heap_broker(), property_cell));
689 Type property_cell_value_type;
690 MachineRepresentation representation = MachineRepresentation::kTagged;
691 if (property_cell_value->IsHeapObject()) {
692 // We cannot do anything if the {property_cell_value}s map is no
693 // longer stable.
694 Handle<Map> property_cell_value_map(
695 Handle<HeapObject>::cast(property_cell_value)->map(), isolate());
696 DCHECK(property_cell_value_map->is_stable());
697 dependencies()->DependOnStableMap(
698 MapRef(js_heap_broker(), property_cell_value_map));
699
700 // Check that the {value} is a HeapObject.
701 value = effect = graph()->NewNode(simplified()->CheckHeapObject(),
702 value, effect, control);
703
704 // Check {value} map against the {property_cell} map.
705 effect =
706 graph()->NewNode(simplified()->CheckMaps(
707 CheckMapsFlag::kNone,
708 ZoneHandleSet<Map>(property_cell_value_map)),
709 value, effect, control);
710 property_cell_value_type = Type::OtherInternal();
711 representation = MachineRepresentation::kTaggedPointer;
712 } else {
713 // Check that the {value} is a Smi.
714 value = effect = graph()->NewNode(
715 simplified()->CheckSmi(VectorSlotPair()), value, effect, control);
716 property_cell_value_type = Type::SignedSmall();
717 representation = MachineRepresentation::kTaggedSigned;
718 }
719 effect = graph()->NewNode(simplified()->StoreField(ForPropertyCellValue(
720 representation, property_cell_value_type,
721 MaybeHandle<Map>(), name)),
722 jsgraph()->HeapConstant(property_cell), value,
723 effect, control);
724 break;
725 }
726 case PropertyCellType::kMutable: {
727 // Record a code dependency on the cell, and just deoptimize if the
728 // property ever becomes read-only.
729 dependencies()->DependOnGlobalProperty(
730 PropertyCellRef(js_heap_broker(), property_cell));
731 effect = graph()->NewNode(
732 simplified()->StoreField(ForPropertyCellValue(
733 MachineRepresentation::kTagged, Type::NonInternal(),
734 MaybeHandle<Map>(), name)),
735 jsgraph()->HeapConstant(property_cell), value, effect, control);
736 break;
737 }
738 }
739 }
740
741 ReplaceWithValue(node, value, effect, control);
742 return Replace(value);
743 }
744
ReduceJSLoadGlobal(Node * node)745 Reduction JSNativeContextSpecialization::ReduceJSLoadGlobal(Node* node) {
746 DCHECK_EQ(IrOpcode::kJSLoadGlobal, node->opcode());
747 NameRef name(js_heap_broker(), LoadGlobalParametersOf(node->op()).name());
748 Node* effect = NodeProperties::GetEffectInput(node);
749
750 // Try to lookup the name on the script context table first (lexical scoping).
751 base::Optional<ScriptContextTableRef::LookupResult> result =
752 native_context().script_context_table().lookup(name);
753 if (result) {
754 ObjectRef contents = result->context.get(result->index);
755 OddballType oddball_type = contents.oddball_type();
756 if (oddball_type == OddballType::kHole) {
757 return NoChange();
758 }
759 Node* context = jsgraph()->Constant(result->context);
760 Node* value = effect = graph()->NewNode(
761 javascript()->LoadContext(0, result->index, result->immutable), context,
762 effect);
763 ReplaceWithValue(node, value, effect);
764 return Replace(value);
765 }
766
767 // Lookup the {name} on the global object instead.
768 return ReduceGlobalAccess(node, nullptr, nullptr, name.object<Name>(),
769 AccessMode::kLoad);
770 }
771
ReduceJSStoreGlobal(Node * node)772 Reduction JSNativeContextSpecialization::ReduceJSStoreGlobal(Node* node) {
773 DCHECK_EQ(IrOpcode::kJSStoreGlobal, node->opcode());
774 NameRef name(js_heap_broker(), StoreGlobalParametersOf(node->op()).name());
775 Node* value = NodeProperties::GetValueInput(node, 0);
776 Node* effect = NodeProperties::GetEffectInput(node);
777 Node* control = NodeProperties::GetControlInput(node);
778
779 // Try to lookup the name on the script context table first (lexical scoping).
780 base::Optional<ScriptContextTableRef::LookupResult> result =
781 native_context().script_context_table().lookup(name);
782 if (result) {
783 ObjectRef contents = result->context.get(result->index);
784 OddballType oddball_type = contents.oddball_type();
785 if (oddball_type == OddballType::kHole || result->immutable) {
786 return NoChange();
787 }
788 Node* context = jsgraph()->Constant(result->context);
789 effect = graph()->NewNode(javascript()->StoreContext(0, result->index),
790 value, context, effect, control);
791 ReplaceWithValue(node, value, effect, control);
792 return Replace(value);
793 }
794
795 // Lookup the {name} on the global object instead.
796 return ReduceGlobalAccess(node, nullptr, value, name.object<Name>(),
797 AccessMode::kStore);
798 }
799
ReduceNamedAccess(Node * node,Node * value,MapHandles const & receiver_maps,Handle<Name> name,AccessMode access_mode,Node * index)800 Reduction JSNativeContextSpecialization::ReduceNamedAccess(
801 Node* node, Node* value, MapHandles const& receiver_maps, Handle<Name> name,
802 AccessMode access_mode, Node* index) {
803 DCHECK(node->opcode() == IrOpcode::kJSLoadNamed ||
804 node->opcode() == IrOpcode::kJSStoreNamed ||
805 node->opcode() == IrOpcode::kJSLoadProperty ||
806 node->opcode() == IrOpcode::kJSStoreProperty ||
807 node->opcode() == IrOpcode::kJSStoreNamedOwn);
808 Node* receiver = NodeProperties::GetValueInput(node, 0);
809 Node* context = NodeProperties::GetContextInput(node);
810 Node* frame_state = NodeProperties::GetFrameStateInput(node);
811 Node* effect = NodeProperties::GetEffectInput(node);
812 Node* control = NodeProperties::GetControlInput(node);
813
814 // Check if we have an access o.x or o.x=v where o is the current
815 // native contexts' global proxy, and turn that into a direct access
816 // to the current native contexts' global object instead.
817 if (receiver_maps.size() == 1) {
818 Handle<Map> receiver_map = receiver_maps.front();
819 if (receiver_map->IsJSGlobalProxyMap()) {
820 Object* maybe_constructor = receiver_map->GetConstructor();
821 // Detached global proxies have |null| as their constructor.
822 if (maybe_constructor->IsJSFunction() &&
823 JSFunction::cast(maybe_constructor)->native_context() ==
824 *native_context().object<Context>()) {
825 return ReduceGlobalAccess(node, receiver, value, name, access_mode,
826 index);
827 }
828 }
829 }
830
831 // Compute property access infos for the receiver maps.
832 AccessInfoFactory access_info_factory(js_heap_broker(), dependencies(),
833 native_context().object<Context>(),
834 graph()->zone());
835 ZoneVector<PropertyAccessInfo> access_infos(zone());
836 if (!access_info_factory.ComputePropertyAccessInfos(
837 receiver_maps, name, access_mode, &access_infos)) {
838 return NoChange();
839 }
840
841 // Nothing to do if we have no non-deprecated maps.
842 if (access_infos.empty()) {
843 return ReduceSoftDeoptimize(
844 node, DeoptimizeReason::kInsufficientTypeFeedbackForGenericNamedAccess);
845 }
846
847 // Ensure that {index} matches the specified {name} (if {index} is given).
848 if (index != nullptr) {
849 effect = BuildCheckEqualsName(name, index, effect, control);
850 }
851
852 // Collect call nodes to rewire exception edges.
853 ZoneVector<Node*> if_exception_nodes(zone());
854 ZoneVector<Node*>* if_exceptions = nullptr;
855 Node* if_exception = nullptr;
856 if (NodeProperties::IsExceptionalCall(node, &if_exception)) {
857 if_exceptions = &if_exception_nodes;
858 }
859
860 PropertyAccessBuilder access_builder(jsgraph(), js_heap_broker(),
861 dependencies());
862
863 // Check for the monomorphic cases.
864 if (access_infos.size() == 1) {
865 PropertyAccessInfo access_info = access_infos.front();
866 // Try to build string check or number check if possible.
867 // Otherwise build a map check.
868 if (!access_builder.TryBuildStringCheck(access_info.receiver_maps(),
869 &receiver, &effect, control) &&
870 !access_builder.TryBuildNumberCheck(access_info.receiver_maps(),
871 &receiver, &effect, control)) {
872 if (HasNumberMaps(access_info.receiver_maps())) {
873 // We need to also let Smi {receiver}s through in this case, so
874 // we construct a diamond, guarded by the Sminess of the {receiver}
875 // and if {receiver} is not a Smi just emit a sequence of map checks.
876 Node* check = graph()->NewNode(simplified()->ObjectIsSmi(), receiver);
877 Node* branch = graph()->NewNode(common()->Branch(), check, control);
878
879 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
880 Node* etrue = effect;
881
882 Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
883 Node* efalse = effect;
884 {
885 access_builder.BuildCheckMaps(receiver, &efalse, if_false,
886 access_info.receiver_maps());
887 }
888
889 control = graph()->NewNode(common()->Merge(2), if_true, if_false);
890 effect =
891 graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
892 } else {
893 receiver =
894 access_builder.BuildCheckHeapObject(receiver, &effect, control);
895 access_builder.BuildCheckMaps(receiver, &effect, control,
896 access_info.receiver_maps());
897 }
898 }
899
900 // Generate the actual property access.
901 ValueEffectControl continuation = BuildPropertyAccess(
902 receiver, value, context, frame_state, effect, control, name,
903 if_exceptions, access_info, access_mode);
904 value = continuation.value();
905 effect = continuation.effect();
906 control = continuation.control();
907 } else {
908 // The final states for every polymorphic branch. We join them with
909 // Merge+Phi+EffectPhi at the bottom.
910 ZoneVector<Node*> values(zone());
911 ZoneVector<Node*> effects(zone());
912 ZoneVector<Node*> controls(zone());
913
914 // Check if {receiver} may be a number.
915 bool receiverissmi_possible = false;
916 for (PropertyAccessInfo const& access_info : access_infos) {
917 if (HasNumberMaps(access_info.receiver_maps())) {
918 receiverissmi_possible = true;
919 break;
920 }
921 }
922
923 // Ensure that {receiver} is a heap object.
924 Node* receiverissmi_control = nullptr;
925 Node* receiverissmi_effect = effect;
926 if (receiverissmi_possible) {
927 Node* check = graph()->NewNode(simplified()->ObjectIsSmi(), receiver);
928 Node* branch = graph()->NewNode(common()->Branch(), check, control);
929 control = graph()->NewNode(common()->IfFalse(), branch);
930 receiverissmi_control = graph()->NewNode(common()->IfTrue(), branch);
931 receiverissmi_effect = effect;
932 } else {
933 receiver =
934 access_builder.BuildCheckHeapObject(receiver, &effect, control);
935 }
936
937 // Generate code for the various different property access patterns.
938 Node* fallthrough_control = control;
939 for (size_t j = 0; j < access_infos.size(); ++j) {
940 PropertyAccessInfo const& access_info = access_infos[j];
941 Node* this_value = value;
942 Node* this_receiver = receiver;
943 Node* this_effect = effect;
944 Node* this_control = fallthrough_control;
945
946 // Perform map check on {receiver}.
947 MapHandles const& receiver_maps = access_info.receiver_maps();
948 {
949 // Whether to insert a dedicated MapGuard node into the
950 // effect to be able to learn from the control flow.
951 bool insert_map_guard = true;
952
953 // Check maps for the {receiver}s.
954 if (j == access_infos.size() - 1) {
955 // Last map check on the fallthrough control path, do a
956 // conditional eager deoptimization exit here.
957 access_builder.BuildCheckMaps(receiver, &this_effect, this_control,
958 receiver_maps);
959 fallthrough_control = nullptr;
960
961 // Don't insert a MapGuard in this case, as the CheckMaps
962 // node already gives you all the information you need
963 // along the effect chain.
964 insert_map_guard = false;
965 } else {
966 // Explicitly branch on the {receiver_maps}.
967 ZoneHandleSet<Map> maps;
968 for (Handle<Map> map : receiver_maps) {
969 maps.insert(map, graph()->zone());
970 }
971 Node* check = this_effect =
972 graph()->NewNode(simplified()->CompareMaps(maps), receiver,
973 this_effect, this_control);
974 Node* branch =
975 graph()->NewNode(common()->Branch(), check, this_control);
976 fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
977 this_control = graph()->NewNode(common()->IfTrue(), branch);
978 }
979
980 // The Number case requires special treatment to also deal with Smis.
981 if (HasNumberMaps(receiver_maps)) {
982 // Join this check with the "receiver is smi" check above.
983 DCHECK_NOT_NULL(receiverissmi_effect);
984 DCHECK_NOT_NULL(receiverissmi_control);
985 this_control = graph()->NewNode(common()->Merge(2), this_control,
986 receiverissmi_control);
987 this_effect = graph()->NewNode(common()->EffectPhi(2), this_effect,
988 receiverissmi_effect, this_control);
989 receiverissmi_effect = receiverissmi_control = nullptr;
990
991 // The {receiver} can also be a Smi in this case, so
992 // a MapGuard doesn't make sense for this at all.
993 insert_map_guard = false;
994 }
995
996 // Introduce a MapGuard to learn from this on the effect chain.
997 if (insert_map_guard) {
998 ZoneHandleSet<Map> maps;
999 for (auto receiver_map : receiver_maps) {
1000 maps.insert(receiver_map, graph()->zone());
1001 }
1002 this_effect = graph()->NewNode(simplified()->MapGuard(maps), receiver,
1003 this_effect, this_control);
1004 }
1005 }
1006
1007 // Generate the actual property access.
1008 ValueEffectControl continuation = BuildPropertyAccess(
1009 this_receiver, this_value, context, frame_state, this_effect,
1010 this_control, name, if_exceptions, access_info, access_mode);
1011 values.push_back(continuation.value());
1012 effects.push_back(continuation.effect());
1013 controls.push_back(continuation.control());
1014 }
1015
1016 DCHECK_NULL(fallthrough_control);
1017
1018 // Generate the final merge point for all (polymorphic) branches.
1019 int const control_count = static_cast<int>(controls.size());
1020 if (control_count == 0) {
1021 value = effect = control = jsgraph()->Dead();
1022 } else if (control_count == 1) {
1023 value = values.front();
1024 effect = effects.front();
1025 control = controls.front();
1026 } else {
1027 control = graph()->NewNode(common()->Merge(control_count), control_count,
1028 &controls.front());
1029 values.push_back(control);
1030 value = graph()->NewNode(
1031 common()->Phi(MachineRepresentation::kTagged, control_count),
1032 control_count + 1, &values.front());
1033 effects.push_back(control);
1034 effect = graph()->NewNode(common()->EffectPhi(control_count),
1035 control_count + 1, &effects.front());
1036 }
1037 }
1038
1039 // Properly rewire IfException edges if {node} is inside a try-block.
1040 if (!if_exception_nodes.empty()) {
1041 DCHECK_NOT_NULL(if_exception);
1042 DCHECK_EQ(if_exceptions, &if_exception_nodes);
1043 int const if_exception_count = static_cast<int>(if_exceptions->size());
1044 Node* merge = graph()->NewNode(common()->Merge(if_exception_count),
1045 if_exception_count, &if_exceptions->front());
1046 if_exceptions->push_back(merge);
1047 Node* ephi =
1048 graph()->NewNode(common()->EffectPhi(if_exception_count),
1049 if_exception_count + 1, &if_exceptions->front());
1050 Node* phi = graph()->NewNode(
1051 common()->Phi(MachineRepresentation::kTagged, if_exception_count),
1052 if_exception_count + 1, &if_exceptions->front());
1053 ReplaceWithValue(if_exception, phi, ephi, merge);
1054 }
1055
1056 ReplaceWithValue(node, value, effect, control);
1057 return Replace(value);
1058 }
1059
ReduceNamedAccessFromNexus(Node * node,Node * value,FeedbackNexus const & nexus,Handle<Name> name,AccessMode access_mode)1060 Reduction JSNativeContextSpecialization::ReduceNamedAccessFromNexus(
1061 Node* node, Node* value, FeedbackNexus const& nexus, Handle<Name> name,
1062 AccessMode access_mode) {
1063 DCHECK(node->opcode() == IrOpcode::kJSLoadNamed ||
1064 node->opcode() == IrOpcode::kJSStoreNamed ||
1065 node->opcode() == IrOpcode::kJSStoreNamedOwn);
1066 Node* const receiver = NodeProperties::GetValueInput(node, 0);
1067 Node* const effect = NodeProperties::GetEffectInput(node);
1068
1069 // Check if we are accessing the current native contexts' global proxy.
1070 HeapObjectMatcher m(receiver);
1071 if (m.HasValue() && m.Value().is_identical_to(global_proxy())) {
1072 // Optimize accesses to the current native contexts' global proxy.
1073 return ReduceGlobalAccess(node, nullptr, value, name, access_mode);
1074 }
1075
1076 // Extract receiver maps from the IC using the {nexus}.
1077 MapHandles receiver_maps;
1078 if (!ExtractReceiverMaps(receiver, effect, nexus, &receiver_maps)) {
1079 return NoChange();
1080 } else if (receiver_maps.empty()) {
1081 if (flags() & kBailoutOnUninitialized) {
1082 return ReduceSoftDeoptimize(
1083 node,
1084 DeoptimizeReason::kInsufficientTypeFeedbackForGenericNamedAccess);
1085 }
1086 return NoChange();
1087 }
1088 DCHECK(!nexus.IsUninitialized());
1089
1090 // Try to lower the named access based on the {receiver_maps}.
1091 return ReduceNamedAccess(node, value, receiver_maps, name, access_mode);
1092 }
1093
ReduceJSLoadNamed(Node * node)1094 Reduction JSNativeContextSpecialization::ReduceJSLoadNamed(Node* node) {
1095 DCHECK_EQ(IrOpcode::kJSLoadNamed, node->opcode());
1096 NamedAccess const& p = NamedAccessOf(node->op());
1097 Node* const receiver = NodeProperties::GetValueInput(node, 0);
1098 Node* const value = jsgraph()->Dead();
1099
1100 // Check if we have a constant receiver.
1101 HeapObjectMatcher m(receiver);
1102 if (m.HasValue()) {
1103 if (m.Value()->IsJSFunction() &&
1104 p.name().is_identical_to(factory()->prototype_string())) {
1105 // Optimize "prototype" property of functions.
1106 JSFunctionRef function = m.Ref(js_heap_broker()).AsJSFunction();
1107 // TODO(neis): Remove the has_prototype_slot condition once the broker is
1108 // always enabled.
1109 if (!function.map().has_prototype_slot() || !function.has_prototype() ||
1110 function.PrototypeRequiresRuntimeLookup()) {
1111 return NoChange();
1112 }
1113 ObjectRef prototype = dependencies()->DependOnPrototypeProperty(function);
1114 Node* value = jsgraph()->Constant(prototype);
1115 ReplaceWithValue(node, value);
1116 return Replace(value);
1117 } else if (m.Value()->IsString() &&
1118 p.name().is_identical_to(factory()->length_string())) {
1119 // Constant-fold "length" property on constant strings.
1120 Handle<String> string = Handle<String>::cast(m.Value());
1121 Node* value = jsgraph()->Constant(string->length());
1122 ReplaceWithValue(node, value);
1123 return Replace(value);
1124 }
1125 }
1126
1127 // Extract receiver maps from the load IC using the FeedbackNexus.
1128 if (!p.feedback().IsValid()) return NoChange();
1129 FeedbackNexus nexus(p.feedback().vector(), p.feedback().slot());
1130
1131 // Try to lower the named access based on the {receiver_maps}.
1132 return ReduceNamedAccessFromNexus(node, value, nexus, p.name(),
1133 AccessMode::kLoad);
1134 }
1135
1136
ReduceJSStoreNamed(Node * node)1137 Reduction JSNativeContextSpecialization::ReduceJSStoreNamed(Node* node) {
1138 DCHECK_EQ(IrOpcode::kJSStoreNamed, node->opcode());
1139 NamedAccess const& p = NamedAccessOf(node->op());
1140 Node* const value = NodeProperties::GetValueInput(node, 1);
1141
1142 // Extract receiver maps from the store IC using the FeedbackNexus.
1143 if (!p.feedback().IsValid()) return NoChange();
1144 FeedbackNexus nexus(p.feedback().vector(), p.feedback().slot());
1145
1146 // Try to lower the named access based on the {receiver_maps}.
1147 return ReduceNamedAccessFromNexus(node, value, nexus, p.name(),
1148 AccessMode::kStore);
1149 }
1150
ReduceJSStoreNamedOwn(Node * node)1151 Reduction JSNativeContextSpecialization::ReduceJSStoreNamedOwn(Node* node) {
1152 DCHECK_EQ(IrOpcode::kJSStoreNamedOwn, node->opcode());
1153 StoreNamedOwnParameters const& p = StoreNamedOwnParametersOf(node->op());
1154 Node* const value = NodeProperties::GetValueInput(node, 1);
1155
1156 // Extract receiver maps from the IC using the FeedbackNexus.
1157 if (!p.feedback().IsValid()) return NoChange();
1158 FeedbackNexus nexus(p.feedback().vector(), p.feedback().slot());
1159
1160 // Try to lower the creation of a named property based on the {receiver_maps}.
1161 return ReduceNamedAccessFromNexus(node, value, nexus, p.name(),
1162 AccessMode::kStoreInLiteral);
1163 }
1164
ReduceElementAccess(Node * node,Node * index,Node * value,MapHandles const & receiver_maps,AccessMode access_mode,KeyedAccessLoadMode load_mode,KeyedAccessStoreMode store_mode)1165 Reduction JSNativeContextSpecialization::ReduceElementAccess(
1166 Node* node, Node* index, Node* value, MapHandles const& receiver_maps,
1167 AccessMode access_mode, KeyedAccessLoadMode load_mode,
1168 KeyedAccessStoreMode store_mode) {
1169 DCHECK(node->opcode() == IrOpcode::kJSLoadProperty ||
1170 node->opcode() == IrOpcode::kJSStoreProperty ||
1171 node->opcode() == IrOpcode::kJSStoreInArrayLiteral);
1172 Node* receiver = NodeProperties::GetValueInput(node, 0);
1173 Node* effect = NodeProperties::GetEffectInput(node);
1174 Node* control = NodeProperties::GetControlInput(node);
1175 Node* frame_state = NodeProperties::FindFrameStateBefore(node);
1176
1177 // Check for keyed access to strings.
1178 if (HasOnlyStringMaps(receiver_maps)) {
1179 // Strings are immutable in JavaScript.
1180 if (access_mode == AccessMode::kStore) return NoChange();
1181
1182 // Ensure that the {receiver} is actually a String.
1183 receiver = effect = graph()->NewNode(
1184 simplified()->CheckString(VectorSlotPair()), receiver, effect, control);
1185
1186 // Determine the {receiver} length.
1187 Node* length = graph()->NewNode(simplified()->StringLength(), receiver);
1188
1189 // Load the single character string from {receiver} or yield undefined
1190 // if the {index} is out of bounds (depending on the {load_mode}).
1191 value = BuildIndexedStringLoad(receiver, index, length, &effect, &control,
1192 load_mode);
1193 } else {
1194 // Retrieve the native context from the given {node}.
1195 // Compute element access infos for the receiver maps.
1196 AccessInfoFactory access_info_factory(js_heap_broker(), dependencies(),
1197 native_context().object<Context>(),
1198 graph()->zone());
1199 ZoneVector<ElementAccessInfo> access_infos(zone());
1200 if (!access_info_factory.ComputeElementAccessInfos(
1201 receiver_maps, access_mode, &access_infos)) {
1202 return NoChange();
1203 }
1204
1205 // Nothing to do if we have no non-deprecated maps.
1206 if (access_infos.empty()) {
1207 return ReduceSoftDeoptimize(
1208 node,
1209 DeoptimizeReason::kInsufficientTypeFeedbackForGenericKeyedAccess);
1210 }
1211
1212 // For holey stores or growing stores, we need to check that the prototype
1213 // chain contains no setters for elements, and we need to guard those checks
1214 // via code dependencies on the relevant prototype maps.
1215 if (access_mode == AccessMode::kStore) {
1216 // TODO(turbofan): We could have a fast path here, that checks for the
1217 // common case of Array or Object prototype only and therefore avoids
1218 // the zone allocation of this vector.
1219 ZoneVector<Handle<Map>> prototype_maps(zone());
1220 for (ElementAccessInfo const& access_info : access_infos) {
1221 for (Handle<Map> receiver_map : access_info.receiver_maps()) {
1222 // If the {receiver_map} has a prototype and it's elements backing
1223 // store is either holey, or we have a potentially growing store,
1224 // then we need to check that all prototypes have stable maps with
1225 // fast elements (and we need to guard against changes to that below).
1226 if (IsHoleyOrDictionaryElementsKind(receiver_map->elements_kind()) ||
1227 IsGrowStoreMode(store_mode)) {
1228 // Make sure all prototypes are stable and have fast elements.
1229 for (Handle<Map> map = receiver_map;;) {
1230 Handle<Object> map_prototype(map->prototype(), isolate());
1231 if (map_prototype->IsNull(isolate())) break;
1232 if (!map_prototype->IsJSObject()) return NoChange();
1233 map = handle(Handle<JSObject>::cast(map_prototype)->map(),
1234 isolate());
1235 if (!map->is_stable()) return NoChange();
1236 if (!IsFastElementsKind(map->elements_kind())) return NoChange();
1237 prototype_maps.push_back(map);
1238 }
1239 }
1240 }
1241 }
1242
1243 // Install dependencies on the relevant prototype maps.
1244 for (Handle<Map> prototype_map : prototype_maps) {
1245 dependencies()->DependOnStableMap(
1246 MapRef(js_heap_broker(), prototype_map));
1247 }
1248 }
1249
1250 // Ensure that {receiver} is a heap object.
1251 PropertyAccessBuilder access_builder(jsgraph(), js_heap_broker(),
1252 dependencies());
1253 receiver = access_builder.BuildCheckHeapObject(receiver, &effect, control);
1254
1255 // Check for the monomorphic case.
1256 if (access_infos.size() == 1) {
1257 ElementAccessInfo access_info = access_infos.front();
1258
1259 // Perform possible elements kind transitions.
1260 for (auto transition : access_info.transitions()) {
1261 Handle<Map> const transition_source = transition.first;
1262 Handle<Map> const transition_target = transition.second;
1263 effect = graph()->NewNode(
1264 simplified()->TransitionElementsKind(ElementsTransition(
1265 IsSimpleMapChangeTransition(transition_source->elements_kind(),
1266 transition_target->elements_kind())
1267 ? ElementsTransition::kFastTransition
1268 : ElementsTransition::kSlowTransition,
1269 transition_source, transition_target)),
1270 receiver, effect, control);
1271 }
1272
1273 // TODO(turbofan): The effect/control linearization will not find a
1274 // FrameState after the StoreField or Call that is generated for the
1275 // elements kind transition above. This is because those operators
1276 // don't have the kNoWrite flag on it, even though they are not
1277 // observable by JavaScript.
1278 effect = graph()->NewNode(common()->Checkpoint(), frame_state, effect,
1279 control);
1280
1281 // Perform map check on the {receiver}.
1282 access_builder.BuildCheckMaps(receiver, &effect, control,
1283 access_info.receiver_maps());
1284
1285 // Access the actual element.
1286 ValueEffectControl continuation =
1287 BuildElementAccess(receiver, index, value, effect, control,
1288 access_info, access_mode, load_mode, store_mode);
1289 value = continuation.value();
1290 effect = continuation.effect();
1291 control = continuation.control();
1292 } else {
1293 // The final states for every polymorphic branch. We join them with
1294 // Merge+Phi+EffectPhi at the bottom.
1295 ZoneVector<Node*> values(zone());
1296 ZoneVector<Node*> effects(zone());
1297 ZoneVector<Node*> controls(zone());
1298
1299 // Generate code for the various different element access patterns.
1300 Node* fallthrough_control = control;
1301 for (size_t j = 0; j < access_infos.size(); ++j) {
1302 ElementAccessInfo const& access_info = access_infos[j];
1303 Node* this_receiver = receiver;
1304 Node* this_value = value;
1305 Node* this_index = index;
1306 Node* this_effect = effect;
1307 Node* this_control = fallthrough_control;
1308
1309 // Perform possible elements kind transitions.
1310 for (auto transition : access_info.transitions()) {
1311 Handle<Map> const transition_source = transition.first;
1312 Handle<Map> const transition_target = transition.second;
1313 this_effect = graph()->NewNode(
1314 simplified()->TransitionElementsKind(
1315 ElementsTransition(IsSimpleMapChangeTransition(
1316 transition_source->elements_kind(),
1317 transition_target->elements_kind())
1318 ? ElementsTransition::kFastTransition
1319 : ElementsTransition::kSlowTransition,
1320 transition_source, transition_target)),
1321 receiver, this_effect, this_control);
1322 }
1323
1324 // Perform map check(s) on {receiver}.
1325 MapHandles const& receiver_maps = access_info.receiver_maps();
1326 if (j == access_infos.size() - 1) {
1327 // Last map check on the fallthrough control path, do a
1328 // conditional eager deoptimization exit here.
1329 access_builder.BuildCheckMaps(receiver, &this_effect, this_control,
1330 receiver_maps);
1331 fallthrough_control = nullptr;
1332 } else {
1333 // Explicitly branch on the {receiver_maps}.
1334 ZoneHandleSet<Map> maps;
1335 for (Handle<Map> map : receiver_maps) {
1336 maps.insert(map, graph()->zone());
1337 }
1338 Node* check = this_effect =
1339 graph()->NewNode(simplified()->CompareMaps(maps), receiver,
1340 this_effect, fallthrough_control);
1341 Node* branch =
1342 graph()->NewNode(common()->Branch(), check, fallthrough_control);
1343 fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
1344 this_control = graph()->NewNode(common()->IfTrue(), branch);
1345
1346 // Introduce a MapGuard to learn from this on the effect chain.
1347 this_effect = graph()->NewNode(simplified()->MapGuard(maps), receiver,
1348 this_effect, this_control);
1349 }
1350
1351 // Access the actual element.
1352 ValueEffectControl continuation = BuildElementAccess(
1353 this_receiver, this_index, this_value, this_effect, this_control,
1354 access_info, access_mode, load_mode, store_mode);
1355 values.push_back(continuation.value());
1356 effects.push_back(continuation.effect());
1357 controls.push_back(continuation.control());
1358 }
1359
1360 DCHECK_NULL(fallthrough_control);
1361
1362 // Generate the final merge point for all (polymorphic) branches.
1363 int const control_count = static_cast<int>(controls.size());
1364 if (control_count == 0) {
1365 value = effect = control = jsgraph()->Dead();
1366 } else if (control_count == 1) {
1367 value = values.front();
1368 effect = effects.front();
1369 control = controls.front();
1370 } else {
1371 control = graph()->NewNode(common()->Merge(control_count),
1372 control_count, &controls.front());
1373 values.push_back(control);
1374 value = graph()->NewNode(
1375 common()->Phi(MachineRepresentation::kTagged, control_count),
1376 control_count + 1, &values.front());
1377 effects.push_back(control);
1378 effect = graph()->NewNode(common()->EffectPhi(control_count),
1379 control_count + 1, &effects.front());
1380 }
1381 }
1382 }
1383
1384 ReplaceWithValue(node, value, effect, control);
1385 return Replace(value);
1386 }
1387
ReduceKeyedAccess(Node * node,Node * index,Node * value,FeedbackNexus const & nexus,AccessMode access_mode,KeyedAccessLoadMode load_mode,KeyedAccessStoreMode store_mode)1388 Reduction JSNativeContextSpecialization::ReduceKeyedAccess(
1389 Node* node, Node* index, Node* value, FeedbackNexus const& nexus,
1390 AccessMode access_mode, KeyedAccessLoadMode load_mode,
1391 KeyedAccessStoreMode store_mode) {
1392 DCHECK(node->opcode() == IrOpcode::kJSLoadProperty ||
1393 node->opcode() == IrOpcode::kJSStoreProperty);
1394 Node* receiver = NodeProperties::GetValueInput(node, 0);
1395 Node* effect = NodeProperties::GetEffectInput(node);
1396 Node* control = NodeProperties::GetControlInput(node);
1397
1398 // Optimize the case where we load from a constant {receiver}.
1399 if (access_mode == AccessMode::kLoad) {
1400 HeapObjectMatcher mreceiver(receiver);
1401 if (mreceiver.HasValue() && !mreceiver.Value()->IsTheHole(isolate()) &&
1402 !mreceiver.Value()->IsNullOrUndefined(isolate())) {
1403 // Check whether we're accessing a known element on the {receiver}
1404 // that is non-configurable, non-writable (i.e. the {receiver} was
1405 // frozen using Object.freeze).
1406 NumberMatcher mindex(index);
1407 if (mindex.IsInteger() && mindex.IsInRange(0.0, kMaxUInt32 - 1.0)) {
1408 LookupIterator it(isolate(), mreceiver.Value(),
1409 static_cast<uint32_t>(mindex.Value()),
1410 LookupIterator::OWN);
1411 if (it.state() == LookupIterator::DATA) {
1412 if (it.IsReadOnly() && !it.IsConfigurable()) {
1413 // We can safely constant-fold the {index} access to {receiver},
1414 // since the element is non-configurable, non-writable and thus
1415 // cannot change anymore.
1416 value = jsgraph()->Constant(it.GetDataValue());
1417 ReplaceWithValue(node, value, effect, control);
1418 return Replace(value);
1419 }
1420
1421 // Check if the {receiver} is a known constant with a copy-on-write
1422 // backing store, and whether {index} is within the appropriate
1423 // bounds. In that case we can constant-fold the access and only
1424 // check that the {elements} didn't change. This is sufficient as
1425 // the backing store of a copy-on-write JSArray is defensively copied
1426 // whenever the length or the elements (might) change.
1427 //
1428 // What's interesting here is that we don't need to map check the
1429 // {receiver}, since JSArray's will always have their elements in
1430 // the backing store.
1431 if (mreceiver.Value()->IsJSArray()) {
1432 Handle<JSArray> array = Handle<JSArray>::cast(mreceiver.Value());
1433 if (array->elements()->IsCowArray()) {
1434 Node* elements = effect = graph()->NewNode(
1435 simplified()->LoadField(AccessBuilder::ForJSObjectElements()),
1436 receiver, effect, control);
1437 Handle<FixedArray> array_elements(
1438 FixedArray::cast(array->elements()), isolate());
1439 Node* check =
1440 graph()->NewNode(simplified()->ReferenceEqual(), elements,
1441 jsgraph()->HeapConstant(array_elements));
1442 effect = graph()->NewNode(
1443 simplified()->CheckIf(
1444 DeoptimizeReason::kCowArrayElementsChanged),
1445 check, effect, control);
1446 value = jsgraph()->Constant(it.GetDataValue());
1447 ReplaceWithValue(node, value, effect, control);
1448 return Replace(value);
1449 }
1450 }
1451 }
1452 }
1453
1454 // For constant Strings we can eagerly strength-reduce the keyed
1455 // accesses using the known length, which doesn't change.
1456 if (mreceiver.Value()->IsString()) {
1457 Handle<String> string = Handle<String>::cast(mreceiver.Value());
1458
1459 // We can only assume that the {index} is a valid array index if the IC
1460 // is in element access mode and not MEGAMORPHIC, otherwise there's no
1461 // guard for the bounds check below.
1462 if (nexus.ic_state() != MEGAMORPHIC && nexus.GetKeyType() == ELEMENT) {
1463 // Ensure that {index} is less than {receiver} length.
1464 Node* length = jsgraph()->Constant(string->length());
1465
1466 // Load the single character string from {receiver} or yield undefined
1467 // if the {index} is out of bounds (depending on the {load_mode}).
1468 value = BuildIndexedStringLoad(receiver, index, length, &effect,
1469 &control, load_mode);
1470 ReplaceWithValue(node, value, effect, control);
1471 return Replace(value);
1472 }
1473 }
1474 }
1475 }
1476
1477 // Extract receiver maps from the {nexus}.
1478 MapHandles receiver_maps;
1479 if (!ExtractReceiverMaps(receiver, effect, nexus, &receiver_maps)) {
1480 return NoChange();
1481 } else if (receiver_maps.empty()) {
1482 if (flags() & kBailoutOnUninitialized) {
1483 return ReduceSoftDeoptimize(
1484 node,
1485 DeoptimizeReason::kInsufficientTypeFeedbackForGenericKeyedAccess);
1486 }
1487 return NoChange();
1488 }
1489 DCHECK(!nexus.IsUninitialized());
1490
1491 // Optimize access for constant {index}.
1492 HeapObjectMatcher mindex(index);
1493 if (mindex.HasValue() && mindex.Value()->IsPrimitive()) {
1494 // Keyed access requires a ToPropertyKey on the {index} first before
1495 // looking up the property on the object (see ES6 section 12.3.2.1).
1496 // We can only do this for non-observable ToPropertyKey invocations,
1497 // so we limit the constant indices to primitives at this point.
1498 Handle<Name> name;
1499 if (Object::ToName(isolate(), mindex.Value()).ToHandle(&name)) {
1500 uint32_t array_index;
1501 if (name->AsArrayIndex(&array_index)) {
1502 // Use the constant array index.
1503 index = jsgraph()->Constant(static_cast<double>(array_index));
1504 } else {
1505 name = factory()->InternalizeName(name);
1506 return ReduceNamedAccess(node, value, receiver_maps, name, access_mode);
1507 }
1508 }
1509 }
1510
1511 // Check if we have feedback for a named access.
1512 if (Name* name = nexus.FindFirstName()) {
1513 return ReduceNamedAccess(node, value, receiver_maps,
1514 handle(name, isolate()), access_mode, index);
1515 } else if (nexus.GetKeyType() != ELEMENT) {
1516 // The KeyedLoad/StoreIC has seen non-element accesses, so we cannot assume
1517 // that the {index} is a valid array index, thus we just let the IC continue
1518 // to deal with this load/store.
1519 return NoChange();
1520 } else if (nexus.ic_state() == MEGAMORPHIC) {
1521 // The KeyedLoad/StoreIC uses the MEGAMORPHIC state to guard the assumption
1522 // that a numeric {index} is within the valid bounds for {receiver}, i.e.
1523 // it transitions to MEGAMORPHIC once it sees an out-of-bounds access. Thus
1524 // we cannot continue here if the IC state is MEGAMORPHIC.
1525 return NoChange();
1526 }
1527
1528 // Try to lower the element access based on the {receiver_maps}.
1529 return ReduceElementAccess(node, index, value, receiver_maps, access_mode,
1530 load_mode, store_mode);
1531 }
1532
ReduceSoftDeoptimize(Node * node,DeoptimizeReason reason)1533 Reduction JSNativeContextSpecialization::ReduceSoftDeoptimize(
1534 Node* node, DeoptimizeReason reason) {
1535 Node* effect = NodeProperties::GetEffectInput(node);
1536 Node* control = NodeProperties::GetControlInput(node);
1537 Node* frame_state = NodeProperties::FindFrameStateBefore(node);
1538 Node* deoptimize = graph()->NewNode(
1539 common()->Deoptimize(DeoptimizeKind::kSoft, reason, VectorSlotPair()),
1540 frame_state, effect, control);
1541 // TODO(bmeurer): This should be on the AdvancedReducer somehow.
1542 NodeProperties::MergeControlToEnd(graph(), common(), deoptimize);
1543 Revisit(graph()->end());
1544 node->TrimInputCount(0);
1545 NodeProperties::ChangeOp(node, common()->Dead());
1546 return Changed(node);
1547 }
1548
ReduceJSLoadProperty(Node * node)1549 Reduction JSNativeContextSpecialization::ReduceJSLoadProperty(Node* node) {
1550 DCHECK_EQ(IrOpcode::kJSLoadProperty, node->opcode());
1551 PropertyAccess const& p = PropertyAccessOf(node->op());
1552 Node* receiver = NodeProperties::GetValueInput(node, 0);
1553 Node* name = NodeProperties::GetValueInput(node, 1);
1554 Node* value = jsgraph()->Dead();
1555 Node* effect = NodeProperties::GetEffectInput(node);
1556 Node* control = NodeProperties::GetControlInput(node);
1557
1558 // We can optimize a property load if it's being used inside a for..in,
1559 // so for code like this:
1560 //
1561 // for (name in receiver) {
1562 // value = receiver[name];
1563 // ...
1564 // }
1565 //
1566 // If the for..in is in fast-mode, we know that the {receiver} has {name}
1567 // as own property, otherwise the enumeration wouldn't include it. The graph
1568 // constructed by the BytecodeGraphBuilder in this case looks like this:
1569
1570 // receiver
1571 // ^ ^
1572 // | |
1573 // | +-+
1574 // | |
1575 // | JSToObject
1576 // | ^
1577 // | |
1578 // | |
1579 // | JSForInNext
1580 // | ^
1581 // | |
1582 // +----+ |
1583 // | |
1584 // | |
1585 // JSLoadProperty
1586
1587 // If the for..in has only seen maps with enum cache consisting of keys
1588 // and indices so far, we can turn the {JSLoadProperty} into a map check
1589 // on the {receiver} and then just load the field value dynamically via
1590 // the {LoadFieldByIndex} operator. The map check is only necessary when
1591 // TurboFan cannot prove that there is no observable side effect between
1592 // the {JSForInNext} and the {JSLoadProperty} node.
1593 //
1594 // Also note that it's safe to look through the {JSToObject}, since the
1595 // [[Get]] operation does an implicit ToObject anyway, and these operations
1596 // are not observable.
1597 if (name->opcode() == IrOpcode::kJSForInNext) {
1598 ForInMode const mode = ForInModeOf(name->op());
1599 if (mode == ForInMode::kUseEnumCacheKeysAndIndices) {
1600 Node* object = NodeProperties::GetValueInput(name, 0);
1601 Node* enumerator = NodeProperties::GetValueInput(name, 2);
1602 Node* index = NodeProperties::GetValueInput(name, 3);
1603 if (object->opcode() == IrOpcode::kJSToObject) {
1604 object = NodeProperties::GetValueInput(object, 0);
1605 }
1606 if (object == receiver) {
1607 // No need to repeat the map check if we can prove that there's no
1608 // observable side effect between {effect} and {name].
1609 if (!NodeProperties::NoObservableSideEffectBetween(effect, name)) {
1610 // Check that the {receiver} map is still valid.
1611 Node* receiver_map = effect =
1612 graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
1613 receiver, effect, control);
1614 Node* check = graph()->NewNode(simplified()->ReferenceEqual(),
1615 receiver_map, enumerator);
1616 effect = graph()->NewNode(
1617 simplified()->CheckIf(DeoptimizeReason::kWrongMap), check, effect,
1618 control);
1619 }
1620
1621 // Load the enum cache indices from the {cache_type}.
1622 Node* descriptor_array = effect = graph()->NewNode(
1623 simplified()->LoadField(AccessBuilder::ForMapDescriptors()),
1624 enumerator, effect, control);
1625 Node* enum_cache = effect =
1626 graph()->NewNode(simplified()->LoadField(
1627 AccessBuilder::ForDescriptorArrayEnumCache()),
1628 descriptor_array, effect, control);
1629 Node* enum_indices = effect = graph()->NewNode(
1630 simplified()->LoadField(AccessBuilder::ForEnumCacheIndices()),
1631 enum_cache, effect, control);
1632
1633 // Ensure that the {enum_indices} are valid.
1634 Node* check = graph()->NewNode(
1635 simplified()->BooleanNot(),
1636 graph()->NewNode(simplified()->ReferenceEqual(), enum_indices,
1637 jsgraph()->EmptyFixedArrayConstant()));
1638 effect = graph()->NewNode(
1639 simplified()->CheckIf(DeoptimizeReason::kWrongEnumIndices), check,
1640 effect, control);
1641
1642 // Determine the index from the {enum_indices}.
1643 index = effect = graph()->NewNode(
1644 simplified()->LoadElement(
1645 AccessBuilder::ForFixedArrayElement(PACKED_SMI_ELEMENTS)),
1646 enum_indices, index, effect, control);
1647
1648 // Load the actual field value.
1649 Node* value = effect = graph()->NewNode(
1650 simplified()->LoadFieldByIndex(), receiver, index, effect, control);
1651 ReplaceWithValue(node, value, effect, control);
1652 return Replace(value);
1653 }
1654 }
1655 }
1656
1657 // Extract receiver maps from the keyed load IC using the FeedbackNexus.
1658 if (!p.feedback().IsValid()) return NoChange();
1659 FeedbackNexus nexus(p.feedback().vector(), p.feedback().slot());
1660
1661 // Extract the keyed access load mode from the keyed load IC.
1662 KeyedAccessLoadMode load_mode = nexus.GetKeyedAccessLoadMode();
1663
1664 // Try to lower the keyed access based on the {nexus}.
1665 return ReduceKeyedAccess(node, name, value, nexus, AccessMode::kLoad,
1666 load_mode, STANDARD_STORE);
1667 }
1668
ReduceJSStoreProperty(Node * node)1669 Reduction JSNativeContextSpecialization::ReduceJSStoreProperty(Node* node) {
1670 DCHECK_EQ(IrOpcode::kJSStoreProperty, node->opcode());
1671 PropertyAccess const& p = PropertyAccessOf(node->op());
1672 Node* const index = NodeProperties::GetValueInput(node, 1);
1673 Node* const value = NodeProperties::GetValueInput(node, 2);
1674
1675 // Extract receiver maps from the keyed store IC using the FeedbackNexus.
1676 if (!p.feedback().IsValid()) return NoChange();
1677 FeedbackNexus nexus(p.feedback().vector(), p.feedback().slot());
1678
1679 // Extract the keyed access store mode from the keyed store IC.
1680 KeyedAccessStoreMode store_mode = nexus.GetKeyedAccessStoreMode();
1681
1682 // Try to lower the keyed access based on the {nexus}.
1683 return ReduceKeyedAccess(node, index, value, nexus, AccessMode::kStore,
1684 STANDARD_LOAD, store_mode);
1685 }
1686
InlinePropertyGetterCall(Node * receiver,Node * context,Node * frame_state,Node ** effect,Node ** control,ZoneVector<Node * > * if_exceptions,PropertyAccessInfo const & access_info)1687 Node* JSNativeContextSpecialization::InlinePropertyGetterCall(
1688 Node* receiver, Node* context, Node* frame_state, Node** effect,
1689 Node** control, ZoneVector<Node*>* if_exceptions,
1690 PropertyAccessInfo const& access_info) {
1691 Node* target = jsgraph()->Constant(access_info.constant());
1692 FrameStateInfo const& frame_info = FrameStateInfoOf(frame_state->op());
1693 Handle<SharedFunctionInfo> shared_info =
1694 frame_info.shared_info().ToHandleChecked();
1695 // Introduce the call to the getter function.
1696 Node* value;
1697 if (access_info.constant()->IsJSFunction()) {
1698 value = *effect = *control = graph()->NewNode(
1699 jsgraph()->javascript()->Call(2, CallFrequency(), VectorSlotPair(),
1700 ConvertReceiverMode::kNotNullOrUndefined),
1701 target, receiver, context, frame_state, *effect, *control);
1702 } else {
1703 DCHECK(access_info.constant()->IsFunctionTemplateInfo());
1704 Handle<FunctionTemplateInfo> function_template_info(
1705 Handle<FunctionTemplateInfo>::cast(access_info.constant()));
1706 DCHECK(!function_template_info->call_code()->IsUndefined(isolate()));
1707 Node* holder =
1708 access_info.holder().is_null()
1709 ? receiver
1710 : jsgraph()->Constant(access_info.holder().ToHandleChecked());
1711 value = InlineApiCall(receiver, holder, frame_state, nullptr, effect,
1712 control, shared_info, function_template_info);
1713 }
1714 // Remember to rewire the IfException edge if this is inside a try-block.
1715 if (if_exceptions != nullptr) {
1716 // Create the appropriate IfException/IfSuccess projections.
1717 Node* const if_exception =
1718 graph()->NewNode(common()->IfException(), *control, *effect);
1719 Node* const if_success = graph()->NewNode(common()->IfSuccess(), *control);
1720 if_exceptions->push_back(if_exception);
1721 *control = if_success;
1722 }
1723 return value;
1724 }
1725
InlinePropertySetterCall(Node * receiver,Node * value,Node * context,Node * frame_state,Node ** effect,Node ** control,ZoneVector<Node * > * if_exceptions,PropertyAccessInfo const & access_info)1726 void JSNativeContextSpecialization::InlinePropertySetterCall(
1727 Node* receiver, Node* value, Node* context, Node* frame_state,
1728 Node** effect, Node** control, ZoneVector<Node*>* if_exceptions,
1729 PropertyAccessInfo const& access_info) {
1730 Node* target = jsgraph()->Constant(access_info.constant());
1731 FrameStateInfo const& frame_info = FrameStateInfoOf(frame_state->op());
1732 Handle<SharedFunctionInfo> shared_info =
1733 frame_info.shared_info().ToHandleChecked();
1734 // Introduce the call to the setter function.
1735 if (access_info.constant()->IsJSFunction()) {
1736 *effect = *control = graph()->NewNode(
1737 jsgraph()->javascript()->Call(3, CallFrequency(), VectorSlotPair(),
1738 ConvertReceiverMode::kNotNullOrUndefined),
1739 target, receiver, value, context, frame_state, *effect, *control);
1740 } else {
1741 DCHECK(access_info.constant()->IsFunctionTemplateInfo());
1742 Handle<FunctionTemplateInfo> function_template_info(
1743 Handle<FunctionTemplateInfo>::cast(access_info.constant()));
1744 DCHECK(!function_template_info->call_code()->IsUndefined(isolate()));
1745 Node* holder =
1746 access_info.holder().is_null()
1747 ? receiver
1748 : jsgraph()->Constant(access_info.holder().ToHandleChecked());
1749 InlineApiCall(receiver, holder, frame_state, value, effect, control,
1750 shared_info, function_template_info);
1751 }
1752 // Remember to rewire the IfException edge if this is inside a try-block.
1753 if (if_exceptions != nullptr) {
1754 // Create the appropriate IfException/IfSuccess projections.
1755 Node* const if_exception =
1756 graph()->NewNode(common()->IfException(), *control, *effect);
1757 Node* const if_success = graph()->NewNode(common()->IfSuccess(), *control);
1758 if_exceptions->push_back(if_exception);
1759 *control = if_success;
1760 }
1761 }
1762
InlineApiCall(Node * receiver,Node * holder,Node * frame_state,Node * value,Node ** effect,Node ** control,Handle<SharedFunctionInfo> shared_info,Handle<FunctionTemplateInfo> function_template_info)1763 Node* JSNativeContextSpecialization::InlineApiCall(
1764 Node* receiver, Node* holder, Node* frame_state, Node* value, Node** effect,
1765 Node** control, Handle<SharedFunctionInfo> shared_info,
1766 Handle<FunctionTemplateInfo> function_template_info) {
1767 Handle<CallHandlerInfo> call_handler_info = handle(
1768 CallHandlerInfo::cast(function_template_info->call_code()), isolate());
1769 Handle<Object> call_data_object(call_handler_info->data(), isolate());
1770
1771 // Only setters have a value.
1772 int const argc = value == nullptr ? 0 : 1;
1773 // The stub always expects the receiver as the first param on the stack.
1774 Callable call_api_callback = CodeFactory::CallApiCallback(isolate(), argc);
1775 CallInterfaceDescriptor call_interface_descriptor =
1776 call_api_callback.descriptor();
1777 auto call_descriptor = Linkage::GetStubCallDescriptor(
1778 graph()->zone(), call_interface_descriptor,
1779 call_interface_descriptor.GetStackParameterCount() + argc +
1780 1 /* implicit receiver */,
1781 CallDescriptor::kNeedsFrameState);
1782
1783 Node* data = jsgraph()->Constant(call_data_object);
1784 ApiFunction function(v8::ToCData<Address>(call_handler_info->callback()));
1785 Node* function_reference =
1786 graph()->NewNode(common()->ExternalConstant(ExternalReference::Create(
1787 &function, ExternalReference::DIRECT_API_CALL)));
1788 Node* code = jsgraph()->HeapConstant(call_api_callback.code());
1789
1790 // Add CallApiCallbackStub's register argument as well.
1791 Node* context = jsgraph()->Constant(native_context());
1792 Node* inputs[10] = {code, context, data, holder, function_reference,
1793 receiver};
1794 int index = 6 + argc;
1795 inputs[index++] = frame_state;
1796 inputs[index++] = *effect;
1797 inputs[index++] = *control;
1798 // This needs to stay here because of the edge case described in
1799 // http://crbug.com/675648.
1800 if (value != nullptr) {
1801 inputs[6] = value;
1802 }
1803
1804 return *effect = *control =
1805 graph()->NewNode(common()->Call(call_descriptor), index, inputs);
1806 }
1807
1808 JSNativeContextSpecialization::ValueEffectControl
BuildPropertyLoad(Node * receiver,Node * context,Node * frame_state,Node * effect,Node * control,Handle<Name> name,ZoneVector<Node * > * if_exceptions,PropertyAccessInfo const & access_info)1809 JSNativeContextSpecialization::BuildPropertyLoad(
1810 Node* receiver, Node* context, Node* frame_state, Node* effect,
1811 Node* control, Handle<Name> name, ZoneVector<Node*>* if_exceptions,
1812 PropertyAccessInfo const& access_info) {
1813 // Determine actual holder and perform prototype chain checks.
1814 Handle<JSObject> holder;
1815 PropertyAccessBuilder access_builder(jsgraph(), js_heap_broker(),
1816 dependencies());
1817 if (access_info.holder().ToHandle(&holder)) {
1818 dependencies()->DependOnStablePrototypeChains(
1819 js_heap_broker(), native_context().object<Context>(),
1820 access_info.receiver_maps(), holder);
1821 }
1822
1823 // Generate the actual property access.
1824 Node* value;
1825 if (access_info.IsNotFound()) {
1826 value = jsgraph()->UndefinedConstant();
1827 } else if (access_info.IsDataConstant()) {
1828 DCHECK(!FLAG_track_constant_fields);
1829 value = jsgraph()->Constant(access_info.constant());
1830 } else if (access_info.IsAccessorConstant()) {
1831 value = InlinePropertyGetterCall(receiver, context, frame_state, &effect,
1832 &control, if_exceptions, access_info);
1833 } else if (access_info.IsModuleExport()) {
1834 Node* cell = jsgraph()->Constant(access_info.export_cell());
1835 value = effect =
1836 graph()->NewNode(simplified()->LoadField(AccessBuilder::ForCellValue()),
1837 cell, effect, control);
1838 } else {
1839 DCHECK(access_info.IsDataField() || access_info.IsDataConstantField());
1840 value = access_builder.BuildLoadDataField(name, access_info, receiver,
1841 &effect, &control);
1842 }
1843
1844 return ValueEffectControl(value, effect, control);
1845 }
1846
1847 JSNativeContextSpecialization::ValueEffectControl
BuildPropertyAccess(Node * receiver,Node * value,Node * context,Node * frame_state,Node * effect,Node * control,Handle<Name> name,ZoneVector<Node * > * if_exceptions,PropertyAccessInfo const & access_info,AccessMode access_mode)1848 JSNativeContextSpecialization::BuildPropertyAccess(
1849 Node* receiver, Node* value, Node* context, Node* frame_state, Node* effect,
1850 Node* control, Handle<Name> name, ZoneVector<Node*>* if_exceptions,
1851 PropertyAccessInfo const& access_info, AccessMode access_mode) {
1852 switch (access_mode) {
1853 case AccessMode::kLoad:
1854 return BuildPropertyLoad(receiver, context, frame_state, effect, control,
1855 name, if_exceptions, access_info);
1856 case AccessMode::kStore:
1857 case AccessMode::kStoreInLiteral:
1858 return BuildPropertyStore(receiver, value, context, frame_state, effect,
1859 control, name, if_exceptions, access_info,
1860 access_mode);
1861 }
1862 UNREACHABLE();
1863 return ValueEffectControl();
1864 }
1865
1866 JSNativeContextSpecialization::ValueEffectControl
BuildPropertyStore(Node * receiver,Node * value,Node * context,Node * frame_state,Node * effect,Node * control,Handle<Name> name,ZoneVector<Node * > * if_exceptions,PropertyAccessInfo const & access_info,AccessMode access_mode)1867 JSNativeContextSpecialization::BuildPropertyStore(
1868 Node* receiver, Node* value, Node* context, Node* frame_state, Node* effect,
1869 Node* control, Handle<Name> name, ZoneVector<Node*>* if_exceptions,
1870 PropertyAccessInfo const& access_info, AccessMode access_mode) {
1871 // Determine actual holder and perform prototype chain checks.
1872 Handle<JSObject> holder;
1873 PropertyAccessBuilder access_builder(jsgraph(), js_heap_broker(),
1874 dependencies());
1875 if (access_info.holder().ToHandle(&holder)) {
1876 DCHECK_NE(AccessMode::kStoreInLiteral, access_mode);
1877 dependencies()->DependOnStablePrototypeChains(
1878 js_heap_broker(), native_context().object<Context>(),
1879 access_info.receiver_maps(), holder);
1880 }
1881
1882 DCHECK(!access_info.IsNotFound());
1883
1884 // Generate the actual property access.
1885 if (access_info.IsDataConstant()) {
1886 DCHECK(!FLAG_track_constant_fields);
1887 Node* constant_value = jsgraph()->Constant(access_info.constant());
1888 Node* check =
1889 graph()->NewNode(simplified()->ReferenceEqual(), value, constant_value);
1890 effect =
1891 graph()->NewNode(simplified()->CheckIf(DeoptimizeReason::kWrongValue),
1892 check, effect, control);
1893 value = constant_value;
1894 } else if (access_info.IsAccessorConstant()) {
1895 InlinePropertySetterCall(receiver, value, context, frame_state, &effect,
1896 &control, if_exceptions, access_info);
1897 } else {
1898 DCHECK(access_info.IsDataField() || access_info.IsDataConstantField());
1899 FieldIndex const field_index = access_info.field_index();
1900 Type const field_type = access_info.field_type();
1901 MachineRepresentation const field_representation =
1902 access_info.field_representation();
1903 Node* storage = receiver;
1904 if (!field_index.is_inobject()) {
1905 storage = effect = graph()->NewNode(
1906 simplified()->LoadField(AccessBuilder::ForJSObjectPropertiesOrHash()),
1907 storage, effect, control);
1908 }
1909 FieldAccess field_access = {
1910 kTaggedBase,
1911 field_index.offset(),
1912 name,
1913 MaybeHandle<Map>(),
1914 field_type,
1915 MachineType::TypeForRepresentation(field_representation),
1916 kFullWriteBarrier};
1917 bool store_to_constant_field = FLAG_track_constant_fields &&
1918 (access_mode == AccessMode::kStore) &&
1919 access_info.IsDataConstantField();
1920
1921 DCHECK(access_mode == AccessMode::kStore ||
1922 access_mode == AccessMode::kStoreInLiteral);
1923 switch (field_representation) {
1924 case MachineRepresentation::kFloat64: {
1925 value = effect =
1926 graph()->NewNode(simplified()->CheckNumber(VectorSlotPair()), value,
1927 effect, control);
1928 if (!field_index.is_inobject() || field_index.is_hidden_field() ||
1929 !FLAG_unbox_double_fields) {
1930 if (access_info.HasTransitionMap()) {
1931 // Allocate a MutableHeapNumber for the new property.
1932 AllocationBuilder a(jsgraph(), effect, control);
1933 a.Allocate(HeapNumber::kSize, NOT_TENURED, Type::OtherInternal());
1934 a.Store(AccessBuilder::ForMap(),
1935 factory()->mutable_heap_number_map());
1936 a.Store(AccessBuilder::ForHeapNumberValue(), value);
1937 value = effect = a.Finish();
1938
1939 field_access.type = Type::Any();
1940 field_access.machine_type = MachineType::TaggedPointer();
1941 field_access.write_barrier_kind = kPointerWriteBarrier;
1942 } else {
1943 // We just store directly to the MutableHeapNumber.
1944 FieldAccess const storage_access = {kTaggedBase,
1945 field_index.offset(),
1946 name,
1947 MaybeHandle<Map>(),
1948 Type::OtherInternal(),
1949 MachineType::TaggedPointer(),
1950 kPointerWriteBarrier};
1951 storage = effect =
1952 graph()->NewNode(simplified()->LoadField(storage_access),
1953 storage, effect, control);
1954 field_access.offset = HeapNumber::kValueOffset;
1955 field_access.name = MaybeHandle<Name>();
1956 field_access.machine_type = MachineType::Float64();
1957 }
1958 }
1959 if (store_to_constant_field) {
1960 DCHECK(!access_info.HasTransitionMap());
1961 // If the field is constant check that the value we are going
1962 // to store matches current value.
1963 Node* current_value = effect = graph()->NewNode(
1964 simplified()->LoadField(field_access), storage, effect, control);
1965
1966 Node* check = graph()->NewNode(simplified()->NumberEqual(),
1967 current_value, value);
1968 effect = graph()->NewNode(
1969 simplified()->CheckIf(DeoptimizeReason::kWrongValue), check,
1970 effect, control);
1971 return ValueEffectControl(value, effect, control);
1972 }
1973 break;
1974 }
1975 case MachineRepresentation::kTaggedSigned:
1976 case MachineRepresentation::kTaggedPointer:
1977 case MachineRepresentation::kTagged:
1978 if (store_to_constant_field) {
1979 DCHECK(!access_info.HasTransitionMap());
1980 // If the field is constant check that the value we are going
1981 // to store matches current value.
1982 Node* current_value = effect = graph()->NewNode(
1983 simplified()->LoadField(field_access), storage, effect, control);
1984
1985 Node* check = graph()->NewNode(simplified()->ReferenceEqual(),
1986 current_value, value);
1987 effect = graph()->NewNode(
1988 simplified()->CheckIf(DeoptimizeReason::kWrongValue), check,
1989 effect, control);
1990 return ValueEffectControl(value, effect, control);
1991 }
1992
1993 if (field_representation == MachineRepresentation::kTaggedSigned) {
1994 value = effect = graph()->NewNode(
1995 simplified()->CheckSmi(VectorSlotPair()), value, effect, control);
1996 field_access.write_barrier_kind = kNoWriteBarrier;
1997
1998 } else if (field_representation ==
1999 MachineRepresentation::kTaggedPointer) {
2000 // Ensure that {value} is a HeapObject.
2001 value = access_builder.BuildCheckHeapObject(value, &effect, control);
2002 Handle<Map> field_map;
2003 if (access_info.field_map().ToHandle(&field_map)) {
2004 // Emit a map check for the value.
2005 effect = graph()->NewNode(
2006 simplified()->CheckMaps(CheckMapsFlag::kNone,
2007 ZoneHandleSet<Map>(field_map)),
2008 value, effect, control);
2009 }
2010 field_access.write_barrier_kind = kPointerWriteBarrier;
2011
2012 } else {
2013 DCHECK_EQ(MachineRepresentation::kTagged, field_representation);
2014 }
2015 break;
2016 case MachineRepresentation::kNone:
2017 case MachineRepresentation::kBit:
2018 case MachineRepresentation::kWord8:
2019 case MachineRepresentation::kWord16:
2020 case MachineRepresentation::kWord32:
2021 case MachineRepresentation::kWord64:
2022 case MachineRepresentation::kFloat32:
2023 case MachineRepresentation::kSimd128:
2024 UNREACHABLE();
2025 break;
2026 }
2027 // Check if we need to perform a transitioning store.
2028 Handle<Map> transition_map;
2029 if (access_info.transition_map().ToHandle(&transition_map)) {
2030 // Check if we need to grow the properties backing store
2031 // with this transitioning store.
2032 Handle<Map> original_map(Map::cast(transition_map->GetBackPointer()),
2033 isolate());
2034 if (original_map->UnusedPropertyFields() == 0) {
2035 DCHECK(!field_index.is_inobject());
2036
2037 // Reallocate the properties {storage}.
2038 storage = effect = BuildExtendPropertiesBackingStore(
2039 original_map, storage, effect, control);
2040
2041 // Perform the actual store.
2042 effect = graph()->NewNode(simplified()->StoreField(field_access),
2043 storage, value, effect, control);
2044
2045 // Atomically switch to the new properties below.
2046 field_access = AccessBuilder::ForJSObjectPropertiesOrHash();
2047 value = storage;
2048 storage = receiver;
2049 }
2050 effect = graph()->NewNode(
2051 common()->BeginRegion(RegionObservability::kObservable), effect);
2052 effect = graph()->NewNode(
2053 simplified()->StoreField(AccessBuilder::ForMap()), receiver,
2054 jsgraph()->Constant(transition_map), effect, control);
2055 effect = graph()->NewNode(simplified()->StoreField(field_access), storage,
2056 value, effect, control);
2057 effect = graph()->NewNode(common()->FinishRegion(),
2058 jsgraph()->UndefinedConstant(), effect);
2059 } else {
2060 // Regular non-transitioning field store.
2061 effect = graph()->NewNode(simplified()->StoreField(field_access), storage,
2062 value, effect, control);
2063 }
2064 }
2065
2066 return ValueEffectControl(value, effect, control);
2067 }
2068
ReduceJSStoreDataPropertyInLiteral(Node * node)2069 Reduction JSNativeContextSpecialization::ReduceJSStoreDataPropertyInLiteral(
2070 Node* node) {
2071 DCHECK_EQ(IrOpcode::kJSStoreDataPropertyInLiteral, node->opcode());
2072
2073 FeedbackParameter const& p = FeedbackParameterOf(node->op());
2074
2075 if (!p.feedback().IsValid()) return NoChange();
2076
2077 FeedbackNexus nexus(p.feedback().vector(), p.feedback().slot());
2078 if (nexus.IsUninitialized()) {
2079 return NoChange();
2080 }
2081
2082 if (nexus.ic_state() == MEGAMORPHIC) {
2083 return NoChange();
2084 }
2085
2086 DCHECK_EQ(MONOMORPHIC, nexus.ic_state());
2087
2088 Map* map = nexus.FindFirstMap();
2089 if (map == nullptr) {
2090 // Maps are weakly held in the type feedback vector, we may not have one.
2091 return NoChange();
2092 }
2093
2094 Handle<Map> receiver_map(map, isolate());
2095 if (!Map::TryUpdate(isolate(), receiver_map).ToHandle(&receiver_map))
2096 return NoChange();
2097
2098 Handle<Name> cached_name = handle(
2099 Name::cast(nexus.GetFeedbackExtra()->ToStrongHeapObject()), isolate());
2100
2101 PropertyAccessInfo access_info;
2102 AccessInfoFactory access_info_factory(js_heap_broker(), dependencies(),
2103 native_context().object<Context>(),
2104 graph()->zone());
2105 if (!access_info_factory.ComputePropertyAccessInfo(
2106 receiver_map, cached_name, AccessMode::kStoreInLiteral,
2107 &access_info)) {
2108 return NoChange();
2109 }
2110
2111 Node* receiver = NodeProperties::GetValueInput(node, 0);
2112 Node* effect = NodeProperties::GetEffectInput(node);
2113 Node* control = NodeProperties::GetControlInput(node);
2114
2115 // Monomorphic property access.
2116 PropertyAccessBuilder access_builder(jsgraph(), js_heap_broker(),
2117 dependencies());
2118 receiver = access_builder.BuildCheckHeapObject(receiver, &effect, control);
2119 access_builder.BuildCheckMaps(receiver, &effect, control,
2120 access_info.receiver_maps());
2121
2122 // Ensure that {name} matches the cached name.
2123 Node* name = NodeProperties::GetValueInput(node, 1);
2124 Node* check = graph()->NewNode(simplified()->ReferenceEqual(), name,
2125 jsgraph()->HeapConstant(cached_name));
2126 effect = graph()->NewNode(simplified()->CheckIf(DeoptimizeReason::kWrongName),
2127 check, effect, control);
2128
2129 Node* value = NodeProperties::GetValueInput(node, 2);
2130 Node* context = NodeProperties::GetContextInput(node);
2131 Node* frame_state_lazy = NodeProperties::GetFrameStateInput(node);
2132
2133 // Generate the actual property access.
2134 ValueEffectControl continuation = BuildPropertyAccess(
2135 receiver, value, context, frame_state_lazy, effect, control, cached_name,
2136 nullptr, access_info, AccessMode::kStoreInLiteral);
2137 value = continuation.value();
2138 effect = continuation.effect();
2139 control = continuation.control();
2140
2141 ReplaceWithValue(node, value, effect, control);
2142 return Replace(value);
2143 }
2144
ReduceJSStoreInArrayLiteral(Node * node)2145 Reduction JSNativeContextSpecialization::ReduceJSStoreInArrayLiteral(
2146 Node* node) {
2147 DCHECK_EQ(IrOpcode::kJSStoreInArrayLiteral, node->opcode());
2148 FeedbackParameter const& p = FeedbackParameterOf(node->op());
2149 Node* const receiver = NodeProperties::GetValueInput(node, 0);
2150 Node* const index = NodeProperties::GetValueInput(node, 1);
2151 Node* const value = NodeProperties::GetValueInput(node, 2);
2152 Node* const effect = NodeProperties::GetEffectInput(node);
2153
2154 // Extract receiver maps from the keyed store IC using the FeedbackNexus.
2155 if (!p.feedback().IsValid()) return NoChange();
2156 FeedbackNexus nexus(p.feedback().vector(), p.feedback().slot());
2157
2158 // Extract the keyed access store mode from the keyed store IC.
2159 KeyedAccessStoreMode store_mode = nexus.GetKeyedAccessStoreMode();
2160
2161 // Extract receiver maps from the {nexus}.
2162 MapHandles receiver_maps;
2163 if (!ExtractReceiverMaps(receiver, effect, nexus, &receiver_maps)) {
2164 return NoChange();
2165 } else if (receiver_maps.empty()) {
2166 if (flags() & kBailoutOnUninitialized) {
2167 return ReduceSoftDeoptimize(
2168 node,
2169 DeoptimizeReason::kInsufficientTypeFeedbackForGenericKeyedAccess);
2170 }
2171 return NoChange();
2172 }
2173 DCHECK(!nexus.IsUninitialized());
2174 DCHECK_EQ(ELEMENT, nexus.GetKeyType());
2175
2176 if (nexus.ic_state() == MEGAMORPHIC) return NoChange();
2177
2178 // Try to lower the element access based on the {receiver_maps}.
2179 return ReduceElementAccess(node, index, value, receiver_maps,
2180 AccessMode::kStoreInLiteral, STANDARD_LOAD,
2181 store_mode);
2182 }
2183
ReduceJSToObject(Node * node)2184 Reduction JSNativeContextSpecialization::ReduceJSToObject(Node* node) {
2185 DCHECK_EQ(IrOpcode::kJSToObject, node->opcode());
2186 Node* receiver = NodeProperties::GetValueInput(node, 0);
2187 Node* effect = NodeProperties::GetEffectInput(node);
2188
2189 ZoneHandleSet<Map> receiver_maps;
2190 NodeProperties::InferReceiverMapsResult result =
2191 NodeProperties::InferReceiverMaps(isolate(), receiver, effect,
2192 &receiver_maps);
2193 if (result == NodeProperties::kNoReceiverMaps) return NoChange();
2194
2195 for (size_t i = 0; i < receiver_maps.size(); ++i) {
2196 if (!receiver_maps[i]->IsJSReceiverMap()) return NoChange();
2197 }
2198
2199 ReplaceWithValue(node, receiver, effect);
2200 return Replace(receiver);
2201 }
2202
2203 namespace {
2204
GetArrayTypeFromElementsKind(ElementsKind kind)2205 ExternalArrayType GetArrayTypeFromElementsKind(ElementsKind kind) {
2206 switch (kind) {
2207 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
2208 case TYPE##_ELEMENTS: \
2209 return kExternal##Type##Array;
2210 TYPED_ARRAYS(TYPED_ARRAY_CASE)
2211 #undef TYPED_ARRAY_CASE
2212 default:
2213 break;
2214 }
2215 UNREACHABLE();
2216 }
2217
2218 } // namespace
2219
2220 JSNativeContextSpecialization::ValueEffectControl
BuildElementAccess(Node * receiver,Node * index,Node * value,Node * effect,Node * control,ElementAccessInfo const & access_info,AccessMode access_mode,KeyedAccessLoadMode load_mode,KeyedAccessStoreMode store_mode)2221 JSNativeContextSpecialization::BuildElementAccess(
2222 Node* receiver, Node* index, Node* value, Node* effect, Node* control,
2223 ElementAccessInfo const& access_info, AccessMode access_mode,
2224 KeyedAccessLoadMode load_mode, KeyedAccessStoreMode store_mode) {
2225
2226 // TODO(bmeurer): We currently specialize based on elements kind. We should
2227 // also be able to properly support strings and other JSObjects here.
2228 ElementsKind elements_kind = access_info.elements_kind();
2229 MapHandles const& receiver_maps = access_info.receiver_maps();
2230
2231 if (IsFixedTypedArrayElementsKind(elements_kind)) {
2232 Node* buffer;
2233 Node* length;
2234 Node* base_pointer;
2235 Node* external_pointer;
2236
2237 // Check if we can constant-fold information about the {receiver} (i.e.
2238 // for asm.js-like code patterns).
2239 HeapObjectMatcher m(receiver);
2240 if (m.HasValue() && m.Value()->IsJSTypedArray()) {
2241 Handle<JSTypedArray> typed_array = Handle<JSTypedArray>::cast(m.Value());
2242
2243 // Determine the {receiver}s (known) length.
2244 length =
2245 jsgraph()->Constant(static_cast<double>(typed_array->length_value()));
2246
2247 // Check if the {receiver}s buffer was neutered.
2248 buffer = jsgraph()->HeapConstant(typed_array->GetBuffer());
2249
2250 // Load the (known) base and external pointer for the {receiver}. The
2251 // {external_pointer} might be invalid if the {buffer} was neutered, so
2252 // we need to make sure that any access is properly guarded.
2253 base_pointer = jsgraph()->ZeroConstant();
2254 external_pointer = jsgraph()->PointerConstant(
2255 FixedTypedArrayBase::cast(typed_array->elements())
2256 ->external_pointer());
2257 } else {
2258 // Load the {receiver}s length.
2259 length = effect = graph()->NewNode(
2260 simplified()->LoadField(AccessBuilder::ForJSTypedArrayLength()),
2261 receiver, effect, control);
2262
2263 // Load the buffer for the {receiver}.
2264 buffer = effect = graph()->NewNode(
2265 simplified()->LoadField(AccessBuilder::ForJSArrayBufferViewBuffer()),
2266 receiver, effect, control);
2267
2268 // Load the elements for the {receiver}.
2269 Node* elements = effect = graph()->NewNode(
2270 simplified()->LoadField(AccessBuilder::ForJSObjectElements()),
2271 receiver, effect, control);
2272
2273 // Load the base pointer for the {receiver}. This will always be Smi
2274 // zero unless we allow on-heap TypedArrays, which is only the case
2275 // for Chrome. Node and Electron both set this limit to 0. Setting
2276 // the base to Smi zero here allows the EffectControlLinearizer to
2277 // optimize away the tricky part of the access later.
2278 if (V8_TYPED_ARRAY_MAX_SIZE_IN_HEAP == 0) {
2279 base_pointer = jsgraph()->ZeroConstant();
2280 } else {
2281 base_pointer = effect = graph()->NewNode(
2282 simplified()->LoadField(
2283 AccessBuilder::ForFixedTypedArrayBaseBasePointer()),
2284 elements, effect, control);
2285 }
2286
2287 // Load the external pointer for the {receiver}s {elements}.
2288 external_pointer = effect = graph()->NewNode(
2289 simplified()->LoadField(
2290 AccessBuilder::ForFixedTypedArrayBaseExternalPointer()),
2291 elements, effect, control);
2292 }
2293
2294 // See if we can skip the neutering check.
2295 if (isolate()->IsArrayBufferNeuteringIntact()) {
2296 // Add a code dependency so we are deoptimized in case an ArrayBuffer
2297 // gets neutered.
2298 dependencies()->DependOnProtector(PropertyCellRef(
2299 js_heap_broker(), factory()->array_buffer_neutering_protector()));
2300 } else {
2301 // Default to zero if the {receiver}s buffer was neutered.
2302 Node* check = effect = graph()->NewNode(
2303 simplified()->ArrayBufferWasNeutered(), buffer, effect, control);
2304 length = graph()->NewNode(
2305 common()->Select(MachineRepresentation::kTagged, BranchHint::kFalse),
2306 check, jsgraph()->ZeroConstant(), length);
2307 }
2308
2309 if (load_mode == LOAD_IGNORE_OUT_OF_BOUNDS ||
2310 store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
2311 // Only check that the {index} is in SignedSmall range. We do the actual
2312 // bounds check below and just skip the property access if it's out of
2313 // bounds for the {receiver}.
2314 index = effect = graph()->NewNode(
2315 simplified()->CheckSmi(VectorSlotPair()), index, effect, control);
2316
2317 // Cast the {index} to Unsigned32 range, so that the bounds checks
2318 // below are performed on unsigned values, which means that all the
2319 // Negative32 values are treated as out-of-bounds.
2320 index = graph()->NewNode(simplified()->NumberToUint32(), index);
2321 } else {
2322 // Check that the {index} is in the valid range for the {receiver}.
2323 index = effect =
2324 graph()->NewNode(simplified()->CheckBounds(VectorSlotPair()), index,
2325 length, effect, control);
2326 }
2327
2328 // Access the actual element.
2329 ExternalArrayType external_array_type =
2330 GetArrayTypeFromElementsKind(elements_kind);
2331 switch (access_mode) {
2332 case AccessMode::kLoad: {
2333 // Check if we can return undefined for out-of-bounds loads.
2334 if (load_mode == LOAD_IGNORE_OUT_OF_BOUNDS) {
2335 Node* check =
2336 graph()->NewNode(simplified()->NumberLessThan(), index, length);
2337 Node* branch = graph()->NewNode(
2338 common()->Branch(BranchHint::kTrue,
2339 IsSafetyCheck::kCriticalSafetyCheck),
2340 check, control);
2341
2342 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
2343 Node* etrue = effect;
2344 Node* vtrue;
2345 {
2346 // Perform the actual load
2347 vtrue = etrue = graph()->NewNode(
2348 simplified()->LoadTypedElement(external_array_type), buffer,
2349 base_pointer, external_pointer, index, etrue, if_true);
2350 }
2351
2352 Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
2353 Node* efalse = effect;
2354 Node* vfalse;
2355 {
2356 // Materialize undefined for out-of-bounds loads.
2357 vfalse = jsgraph()->UndefinedConstant();
2358 }
2359
2360 control = graph()->NewNode(common()->Merge(2), if_true, if_false);
2361 effect =
2362 graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
2363 value =
2364 graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
2365 vtrue, vfalse, control);
2366 } else {
2367 // Perform the actual load.
2368 value = effect = graph()->NewNode(
2369 simplified()->LoadTypedElement(external_array_type), buffer,
2370 base_pointer, external_pointer, index, effect, control);
2371 }
2372 break;
2373 }
2374 case AccessMode::kStoreInLiteral:
2375 UNREACHABLE();
2376 break;
2377 case AccessMode::kStore: {
2378 // Ensure that the {value} is actually a Number or an Oddball,
2379 // and truncate it to a Number appropriately.
2380 value = effect = graph()->NewNode(
2381 simplified()->SpeculativeToNumber(
2382 NumberOperationHint::kNumberOrOddball, VectorSlotPair()),
2383 value, effect, control);
2384
2385 // Introduce the appropriate truncation for {value}. Currently we
2386 // only need to do this for ClamedUint8Array {receiver}s, as the
2387 // other truncations are implicit in the StoreTypedElement, but we
2388 // might want to change that at some point.
2389 if (external_array_type == kExternalUint8ClampedArray) {
2390 value = graph()->NewNode(simplified()->NumberToUint8Clamped(), value);
2391 }
2392
2393 // Check if we can skip the out-of-bounds store.
2394 if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
2395 Node* check =
2396 graph()->NewNode(simplified()->NumberLessThan(), index, length);
2397 Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
2398 check, control);
2399
2400 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
2401 Node* etrue = effect;
2402 {
2403 // Perform the actual store.
2404 etrue = graph()->NewNode(
2405 simplified()->StoreTypedElement(external_array_type), buffer,
2406 base_pointer, external_pointer, index, value, etrue, if_true);
2407 }
2408
2409 Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
2410 Node* efalse = effect;
2411 {
2412 // Just ignore the out-of-bounds write.
2413 }
2414
2415 control = graph()->NewNode(common()->Merge(2), if_true, if_false);
2416 effect =
2417 graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
2418 } else {
2419 // Perform the actual store
2420 effect = graph()->NewNode(
2421 simplified()->StoreTypedElement(external_array_type), buffer,
2422 base_pointer, external_pointer, index, value, effect, control);
2423 }
2424 break;
2425 }
2426 }
2427 } else {
2428 // Load the elements for the {receiver}.
2429 Node* elements = effect = graph()->NewNode(
2430 simplified()->LoadField(AccessBuilder::ForJSObjectElements()), receiver,
2431 effect, control);
2432
2433 // Don't try to store to a copy-on-write backing store (unless supported by
2434 // the store mode).
2435 if (access_mode == AccessMode::kStore &&
2436 IsSmiOrObjectElementsKind(elements_kind) &&
2437 !IsCOWHandlingStoreMode(store_mode)) {
2438 effect = graph()->NewNode(
2439 simplified()->CheckMaps(
2440 CheckMapsFlag::kNone,
2441 ZoneHandleSet<Map>(factory()->fixed_array_map())),
2442 elements, effect, control);
2443 }
2444
2445 // Check if the {receiver} is a JSArray.
2446 bool receiver_is_jsarray = HasOnlyJSArrayMaps(receiver_maps);
2447
2448 // Load the length of the {receiver}.
2449 Node* length = effect =
2450 receiver_is_jsarray
2451 ? graph()->NewNode(
2452 simplified()->LoadField(
2453 AccessBuilder::ForJSArrayLength(elements_kind)),
2454 receiver, effect, control)
2455 : graph()->NewNode(
2456 simplified()->LoadField(AccessBuilder::ForFixedArrayLength()),
2457 elements, effect, control);
2458
2459 // Check if we might need to grow the {elements} backing store.
2460 if (IsGrowStoreMode(store_mode)) {
2461 // For growing stores we validate the {index} below.
2462 DCHECK(access_mode == AccessMode::kStore ||
2463 access_mode == AccessMode::kStoreInLiteral);
2464 } else if (load_mode == LOAD_IGNORE_OUT_OF_BOUNDS &&
2465 CanTreatHoleAsUndefined(receiver_maps)) {
2466 // Check that the {index} is a valid array index, we do the actual
2467 // bounds check below and just skip the store below if it's out of
2468 // bounds for the {receiver}.
2469 index = effect = graph()->NewNode(
2470 simplified()->CheckBounds(VectorSlotPair()), index,
2471 jsgraph()->Constant(Smi::kMaxValue), effect, control);
2472 } else {
2473 // Check that the {index} is in the valid range for the {receiver}.
2474 index = effect =
2475 graph()->NewNode(simplified()->CheckBounds(VectorSlotPair()), index,
2476 length, effect, control);
2477 }
2478
2479 // Compute the element access.
2480 Type element_type = Type::NonInternal();
2481 MachineType element_machine_type = MachineType::AnyTagged();
2482 if (IsDoubleElementsKind(elements_kind)) {
2483 element_type = Type::Number();
2484 element_machine_type = MachineType::Float64();
2485 } else if (IsSmiElementsKind(elements_kind)) {
2486 element_type = Type::SignedSmall();
2487 element_machine_type = MachineType::TaggedSigned();
2488 }
2489 ElementAccess element_access = {
2490 kTaggedBase, FixedArray::kHeaderSize,
2491 element_type, element_machine_type,
2492 kFullWriteBarrier, LoadSensitivity::kCritical};
2493
2494 // Access the actual element.
2495 if (access_mode == AccessMode::kLoad) {
2496 // Compute the real element access type, which includes the hole in case
2497 // of holey backing stores.
2498 if (IsHoleyElementsKind(elements_kind)) {
2499 element_access.type =
2500 Type::Union(element_type, Type::Hole(), graph()->zone());
2501 }
2502 if (elements_kind == HOLEY_ELEMENTS ||
2503 elements_kind == HOLEY_SMI_ELEMENTS) {
2504 element_access.machine_type = MachineType::AnyTagged();
2505 }
2506
2507 // Check if we can return undefined for out-of-bounds loads.
2508 if (load_mode == LOAD_IGNORE_OUT_OF_BOUNDS &&
2509 CanTreatHoleAsUndefined(receiver_maps)) {
2510 Node* check =
2511 graph()->NewNode(simplified()->NumberLessThan(), index, length);
2512 Node* branch = graph()->NewNode(
2513 common()->Branch(BranchHint::kTrue,
2514 IsSafetyCheck::kCriticalSafetyCheck),
2515 check, control);
2516
2517 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
2518 Node* etrue = effect;
2519 Node* vtrue;
2520 {
2521 // Perform the actual load
2522 vtrue = etrue =
2523 graph()->NewNode(simplified()->LoadElement(element_access),
2524 elements, index, etrue, if_true);
2525
2526 // Handle loading from holey backing stores correctly, by either
2527 // mapping the hole to undefined if possible, or deoptimizing
2528 // otherwise.
2529 if (elements_kind == HOLEY_ELEMENTS ||
2530 elements_kind == HOLEY_SMI_ELEMENTS) {
2531 // Turn the hole into undefined.
2532 vtrue = graph()->NewNode(
2533 simplified()->ConvertTaggedHoleToUndefined(), vtrue);
2534 } else if (elements_kind == HOLEY_DOUBLE_ELEMENTS) {
2535 // Return the signaling NaN hole directly if all uses are
2536 // truncating.
2537 vtrue = etrue = graph()->NewNode(
2538 simplified()->CheckFloat64Hole(
2539 CheckFloat64HoleMode::kAllowReturnHole, VectorSlotPair()),
2540 vtrue, etrue, if_true);
2541 }
2542 }
2543
2544 Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
2545 Node* efalse = effect;
2546 Node* vfalse;
2547 {
2548 // Materialize undefined for out-of-bounds loads.
2549 vfalse = jsgraph()->UndefinedConstant();
2550 }
2551
2552 control = graph()->NewNode(common()->Merge(2), if_true, if_false);
2553 effect =
2554 graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
2555 value =
2556 graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
2557 vtrue, vfalse, control);
2558 } else {
2559 // Perform the actual load.
2560 value = effect =
2561 graph()->NewNode(simplified()->LoadElement(element_access),
2562 elements, index, effect, control);
2563
2564 // Handle loading from holey backing stores correctly, by either mapping
2565 // the hole to undefined if possible, or deoptimizing otherwise.
2566 if (elements_kind == HOLEY_ELEMENTS ||
2567 elements_kind == HOLEY_SMI_ELEMENTS) {
2568 // Check if we are allowed to turn the hole into undefined.
2569 if (CanTreatHoleAsUndefined(receiver_maps)) {
2570 // Turn the hole into undefined.
2571 value = graph()->NewNode(
2572 simplified()->ConvertTaggedHoleToUndefined(), value);
2573 } else {
2574 // Bailout if we see the hole.
2575 value = effect = graph()->NewNode(
2576 simplified()->CheckNotTaggedHole(), value, effect, control);
2577 }
2578 } else if (elements_kind == HOLEY_DOUBLE_ELEMENTS) {
2579 // Perform the hole check on the result.
2580 CheckFloat64HoleMode mode = CheckFloat64HoleMode::kNeverReturnHole;
2581 // Check if we are allowed to return the hole directly.
2582 if (CanTreatHoleAsUndefined(receiver_maps)) {
2583 // Return the signaling NaN hole directly if all uses are
2584 // truncating.
2585 mode = CheckFloat64HoleMode::kAllowReturnHole;
2586 }
2587 value = effect = graph()->NewNode(
2588 simplified()->CheckFloat64Hole(mode, VectorSlotPair()), value,
2589 effect, control);
2590 }
2591 }
2592 } else {
2593 DCHECK(access_mode == AccessMode::kStore ||
2594 access_mode == AccessMode::kStoreInLiteral);
2595 if (IsSmiElementsKind(elements_kind)) {
2596 value = effect = graph()->NewNode(
2597 simplified()->CheckSmi(VectorSlotPair()), value, effect, control);
2598 } else if (IsDoubleElementsKind(elements_kind)) {
2599 value = effect =
2600 graph()->NewNode(simplified()->CheckNumber(VectorSlotPair()), value,
2601 effect, control);
2602 // Make sure we do not store signalling NaNs into double arrays.
2603 value = graph()->NewNode(simplified()->NumberSilenceNaN(), value);
2604 }
2605
2606 // Ensure that copy-on-write backing store is writable.
2607 if (IsSmiOrObjectElementsKind(elements_kind) &&
2608 store_mode == STORE_NO_TRANSITION_HANDLE_COW) {
2609 elements = effect =
2610 graph()->NewNode(simplified()->EnsureWritableFastElements(),
2611 receiver, elements, effect, control);
2612 } else if (IsGrowStoreMode(store_mode)) {
2613 // Determine the length of the {elements} backing store.
2614 Node* elements_length = effect = graph()->NewNode(
2615 simplified()->LoadField(AccessBuilder::ForFixedArrayLength()),
2616 elements, effect, control);
2617
2618 // Validate the {index} depending on holeyness:
2619 //
2620 // For HOLEY_*_ELEMENTS the {index} must not exceed the {elements}
2621 // backing store capacity plus the maximum allowed gap, as otherwise
2622 // the (potential) backing store growth would normalize and thus
2623 // the elements kind of the {receiver} would change to slow mode.
2624 //
2625 // For PACKED_*_ELEMENTS the {index} must be within the range
2626 // [0,length+1[ to be valid. In case {index} equals {length},
2627 // the {receiver} will be extended, but kept packed.
2628 Node* limit =
2629 IsHoleyElementsKind(elements_kind)
2630 ? graph()->NewNode(simplified()->NumberAdd(), elements_length,
2631 jsgraph()->Constant(JSObject::kMaxGap))
2632 : graph()->NewNode(simplified()->NumberAdd(), length,
2633 jsgraph()->OneConstant());
2634 index = effect =
2635 graph()->NewNode(simplified()->CheckBounds(VectorSlotPair()), index,
2636 limit, effect, control);
2637
2638 // Grow {elements} backing store if necessary.
2639 GrowFastElementsMode mode =
2640 IsDoubleElementsKind(elements_kind)
2641 ? GrowFastElementsMode::kDoubleElements
2642 : GrowFastElementsMode::kSmiOrObjectElements;
2643 elements = effect = graph()->NewNode(
2644 simplified()->MaybeGrowFastElements(mode, VectorSlotPair()),
2645 receiver, elements, index, elements_length, effect, control);
2646
2647 // If we didn't grow {elements}, it might still be COW, in which case we
2648 // copy it now.
2649 if (IsSmiOrObjectElementsKind(elements_kind) &&
2650 store_mode == STORE_AND_GROW_NO_TRANSITION_HANDLE_COW) {
2651 elements = effect =
2652 graph()->NewNode(simplified()->EnsureWritableFastElements(),
2653 receiver, elements, effect, control);
2654 }
2655
2656 // Also update the "length" property if {receiver} is a JSArray.
2657 if (receiver_is_jsarray) {
2658 Node* check =
2659 graph()->NewNode(simplified()->NumberLessThan(), index, length);
2660 Node* branch = graph()->NewNode(common()->Branch(), check, control);
2661
2662 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
2663 Node* etrue = effect;
2664 {
2665 // We don't need to do anything, the {index} is within
2666 // the valid bounds for the JSArray {receiver}.
2667 }
2668
2669 Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
2670 Node* efalse = effect;
2671 {
2672 // Update the JSArray::length field. Since this is observable,
2673 // there must be no other check after this.
2674 Node* new_length = graph()->NewNode(
2675 simplified()->NumberAdd(), index, jsgraph()->OneConstant());
2676 efalse = graph()->NewNode(
2677 simplified()->StoreField(
2678 AccessBuilder::ForJSArrayLength(elements_kind)),
2679 receiver, new_length, efalse, if_false);
2680 }
2681
2682 control = graph()->NewNode(common()->Merge(2), if_true, if_false);
2683 effect =
2684 graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
2685 }
2686 }
2687
2688 // Perform the actual element access.
2689 effect = graph()->NewNode(simplified()->StoreElement(element_access),
2690 elements, index, value, effect, control);
2691 }
2692 }
2693
2694 return ValueEffectControl(value, effect, control);
2695 }
2696
BuildIndexedStringLoad(Node * receiver,Node * index,Node * length,Node ** effect,Node ** control,KeyedAccessLoadMode load_mode)2697 Node* JSNativeContextSpecialization::BuildIndexedStringLoad(
2698 Node* receiver, Node* index, Node* length, Node** effect, Node** control,
2699 KeyedAccessLoadMode load_mode) {
2700 if (load_mode == LOAD_IGNORE_OUT_OF_BOUNDS &&
2701 isolate()->IsNoElementsProtectorIntact()) {
2702 dependencies()->DependOnProtector(
2703 PropertyCellRef(js_heap_broker(), factory()->no_elements_protector()));
2704
2705 // Ensure that the {index} is a valid String length.
2706 index = *effect = graph()->NewNode(
2707 simplified()->CheckBounds(VectorSlotPair()), index,
2708 jsgraph()->Constant(String::kMaxLength), *effect, *control);
2709
2710 // Load the single character string from {receiver} or yield
2711 // undefined if the {index} is not within the valid bounds.
2712 Node* check =
2713 graph()->NewNode(simplified()->NumberLessThan(), index, length);
2714 Node* branch =
2715 graph()->NewNode(common()->Branch(BranchHint::kTrue,
2716 IsSafetyCheck::kCriticalSafetyCheck),
2717 check, *control);
2718
2719 Node* masked_index = graph()->NewNode(simplified()->PoisonIndex(), index);
2720
2721 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
2722 Node* etrue;
2723 Node* vtrue = etrue =
2724 graph()->NewNode(simplified()->StringCharCodeAt(), receiver,
2725 masked_index, *effect, if_true);
2726 vtrue = graph()->NewNode(simplified()->StringFromSingleCharCode(), vtrue);
2727
2728 Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
2729 Node* vfalse = jsgraph()->UndefinedConstant();
2730
2731 *control = graph()->NewNode(common()->Merge(2), if_true, if_false);
2732 *effect =
2733 graph()->NewNode(common()->EffectPhi(2), etrue, *effect, *control);
2734 return graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
2735 vtrue, vfalse, *control);
2736 } else {
2737 // Ensure that {index} is less than {receiver} length.
2738 index = *effect =
2739 graph()->NewNode(simplified()->CheckBounds(VectorSlotPair()), index,
2740 length, *effect, *control);
2741
2742 Node* masked_index = graph()->NewNode(simplified()->PoisonIndex(), index);
2743
2744 // Return the character from the {receiver} as single character string.
2745 Node* value = *effect =
2746 graph()->NewNode(simplified()->StringCharCodeAt(), receiver,
2747 masked_index, *effect, *control);
2748 value = graph()->NewNode(simplified()->StringFromSingleCharCode(), value);
2749 return value;
2750 }
2751 }
2752
BuildExtendPropertiesBackingStore(Handle<Map> map,Node * properties,Node * effect,Node * control)2753 Node* JSNativeContextSpecialization::BuildExtendPropertiesBackingStore(
2754 Handle<Map> map, Node* properties, Node* effect, Node* control) {
2755 // TODO(bmeurer/jkummerow): Property deletions can undo map transitions
2756 // while keeping the backing store around, meaning that even though the
2757 // map might believe that objects have no unused property fields, there
2758 // might actually be some. It would be nice to not create a new backing
2759 // store in that case (i.e. when properties->length() >= new_length).
2760 // However, introducing branches and Phi nodes here would make it more
2761 // difficult for escape analysis to get rid of the backing stores used
2762 // for intermediate states of chains of property additions. That makes
2763 // it unclear what the best approach is here.
2764 DCHECK_EQ(0, map->UnusedPropertyFields());
2765 // Compute the length of the old {properties} and the new properties.
2766 int length = map->NextFreePropertyIndex() - map->GetInObjectProperties();
2767 int new_length = length + JSObject::kFieldsAdded;
2768 // Collect the field values from the {properties}.
2769 ZoneVector<Node*> values(zone());
2770 values.reserve(new_length);
2771 for (int i = 0; i < length; ++i) {
2772 Node* value = effect = graph()->NewNode(
2773 simplified()->LoadField(AccessBuilder::ForFixedArraySlot(i)),
2774 properties, effect, control);
2775 values.push_back(value);
2776 }
2777 // Initialize the new fields to undefined.
2778 for (int i = 0; i < JSObject::kFieldsAdded; ++i) {
2779 values.push_back(jsgraph()->UndefinedConstant());
2780 }
2781
2782 // Compute new length and hash.
2783 Node* hash;
2784 if (length == 0) {
2785 hash = graph()->NewNode(
2786 common()->Select(MachineRepresentation::kTaggedSigned),
2787 graph()->NewNode(simplified()->ObjectIsSmi(), properties), properties,
2788 jsgraph()->SmiConstant(PropertyArray::kNoHashSentinel));
2789 hash = effect = graph()->NewNode(common()->TypeGuard(Type::SignedSmall()),
2790 hash, effect, control);
2791 hash =
2792 graph()->NewNode(simplified()->NumberShiftLeft(), hash,
2793 jsgraph()->Constant(PropertyArray::HashField::kShift));
2794 } else {
2795 hash = effect = graph()->NewNode(
2796 simplified()->LoadField(AccessBuilder::ForPropertyArrayLengthAndHash()),
2797 properties, effect, control);
2798 hash =
2799 graph()->NewNode(simplified()->NumberBitwiseAnd(), hash,
2800 jsgraph()->Constant(PropertyArray::HashField::kMask));
2801 }
2802 Node* new_length_and_hash = graph()->NewNode(
2803 simplified()->NumberBitwiseOr(), jsgraph()->Constant(new_length), hash);
2804 // TDOO(jarin): Fix the typer to infer tighter bound for NumberBitwiseOr.
2805 new_length_and_hash = effect =
2806 graph()->NewNode(common()->TypeGuard(Type::SignedSmall()),
2807 new_length_and_hash, effect, control);
2808
2809 // Allocate and initialize the new properties.
2810 AllocationBuilder a(jsgraph(), effect, control);
2811 a.Allocate(PropertyArray::SizeFor(new_length), NOT_TENURED,
2812 Type::OtherInternal());
2813 a.Store(AccessBuilder::ForMap(), jsgraph()->PropertyArrayMapConstant());
2814 a.Store(AccessBuilder::ForPropertyArrayLengthAndHash(), new_length_and_hash);
2815 for (int i = 0; i < new_length; ++i) {
2816 a.Store(AccessBuilder::ForFixedArraySlot(i), values[i]);
2817 }
2818 return a.Finish();
2819 }
2820
BuildCheckEqualsName(Handle<Name> name,Node * value,Node * effect,Node * control)2821 Node* JSNativeContextSpecialization::BuildCheckEqualsName(Handle<Name> name,
2822 Node* value,
2823 Node* effect,
2824 Node* control) {
2825 DCHECK(name->IsUniqueName());
2826 Operator const* const op =
2827 name->IsSymbol() ? simplified()->CheckEqualsSymbol()
2828 : simplified()->CheckEqualsInternalizedString();
2829 return graph()->NewNode(op, jsgraph()->HeapConstant(name), value, effect,
2830 control);
2831 }
2832
CanTreatHoleAsUndefined(MapHandles const & receiver_maps)2833 bool JSNativeContextSpecialization::CanTreatHoleAsUndefined(
2834 MapHandles const& receiver_maps) {
2835 // Check if all {receiver_maps} either have one of the initial Array.prototype
2836 // or Object.prototype objects as their prototype (in any of the current
2837 // native contexts, as the global Array protector works isolate-wide).
2838 for (Handle<Map> receiver_map : receiver_maps) {
2839 DisallowHeapAllocation no_gc;
2840 Object* const receiver_prototype = receiver_map->prototype();
2841 if (!isolate()->IsInAnyContext(receiver_prototype,
2842 Context::INITIAL_ARRAY_PROTOTYPE_INDEX) &&
2843 !isolate()->IsInAnyContext(receiver_prototype,
2844 Context::INITIAL_OBJECT_PROTOTYPE_INDEX)) {
2845 return false;
2846 }
2847 }
2848
2849 // Check if the array prototype chain is intact.
2850 if (!isolate()->IsNoElementsProtectorIntact()) return false;
2851
2852 dependencies()->DependOnProtector(
2853 PropertyCellRef(js_heap_broker(), factory()->no_elements_protector()));
2854 return true;
2855 }
2856
ExtractReceiverMaps(Node * receiver,Node * effect,FeedbackNexus const & nexus,MapHandles * receiver_maps)2857 bool JSNativeContextSpecialization::ExtractReceiverMaps(
2858 Node* receiver, Node* effect, FeedbackNexus const& nexus,
2859 MapHandles* receiver_maps) {
2860 DCHECK_EQ(0, receiver_maps->size());
2861 if (nexus.IsUninitialized()) return true;
2862
2863 // See if we can infer a concrete type for the {receiver}. Solely relying on
2864 // the inference is not safe for keyed stores, because we would potentially
2865 // miss out on transitions that need to be performed.
2866 {
2867 FeedbackSlotKind kind = nexus.kind();
2868 bool use_inference =
2869 !IsKeyedStoreICKind(kind) && !IsStoreInArrayLiteralICKind(kind);
2870 if (use_inference && InferReceiverMaps(receiver, effect, receiver_maps)) {
2871 // We can assume that {receiver} still has the inferred {receiver_maps}.
2872 return true;
2873 }
2874 }
2875
2876 // Try to extract some maps from the {nexus}.
2877 if (nexus.ExtractMaps(receiver_maps) != 0) {
2878 // Try to filter impossible candidates based on inferred root map.
2879 Handle<Map> receiver_map;
2880 if (InferReceiverRootMap(receiver).ToHandle(&receiver_map)) {
2881 DCHECK(!receiver_map->is_abandoned_prototype_map());
2882 Isolate* isolate = this->isolate();
2883 receiver_maps->erase(
2884 std::remove_if(receiver_maps->begin(), receiver_maps->end(),
2885 [receiver_map, isolate](const Handle<Map>& map) {
2886 return map->is_abandoned_prototype_map() ||
2887 map->FindRootMap(isolate) != *receiver_map;
2888 }),
2889 receiver_maps->end());
2890 }
2891 return true;
2892 }
2893
2894 return false;
2895 }
2896
InferReceiverMaps(Node * receiver,Node * effect,MapHandles * receiver_maps)2897 bool JSNativeContextSpecialization::InferReceiverMaps(
2898 Node* receiver, Node* effect, MapHandles* receiver_maps) {
2899 ZoneHandleSet<Map> maps;
2900 NodeProperties::InferReceiverMapsResult result =
2901 NodeProperties::InferReceiverMaps(isolate(), receiver, effect, &maps);
2902 if (result == NodeProperties::kReliableReceiverMaps) {
2903 for (size_t i = 0; i < maps.size(); ++i) {
2904 receiver_maps->push_back(maps[i]);
2905 }
2906 return true;
2907 } else if (result == NodeProperties::kUnreliableReceiverMaps) {
2908 // For untrusted receiver maps, we can still use the information
2909 // if the maps are stable.
2910 for (size_t i = 0; i < maps.size(); ++i) {
2911 if (!maps[i]->is_stable()) return false;
2912 }
2913 for (size_t i = 0; i < maps.size(); ++i) {
2914 receiver_maps->push_back(maps[i]);
2915 }
2916 return true;
2917 }
2918 return false;
2919 }
2920
InferReceiverRootMap(Node * receiver)2921 MaybeHandle<Map> JSNativeContextSpecialization::InferReceiverRootMap(
2922 Node* receiver) {
2923 HeapObjectMatcher m(receiver);
2924 if (m.HasValue()) {
2925 return handle(m.Value()->map()->FindRootMap(isolate()), isolate());
2926 } else if (m.IsJSCreate()) {
2927 HeapObjectMatcher mtarget(m.InputAt(0));
2928 HeapObjectMatcher mnewtarget(m.InputAt(1));
2929 if (mtarget.HasValue() && mnewtarget.HasValue()) {
2930 Handle<JSFunction> constructor =
2931 Handle<JSFunction>::cast(mtarget.Value());
2932 if (constructor->has_initial_map()) {
2933 Handle<Map> initial_map(constructor->initial_map(), isolate());
2934 if (initial_map->constructor_or_backpointer() == *mnewtarget.Value()) {
2935 DCHECK_EQ(*initial_map, initial_map->FindRootMap(isolate()));
2936 return initial_map;
2937 }
2938 }
2939 }
2940 }
2941 return MaybeHandle<Map>();
2942 }
2943
graph() const2944 Graph* JSNativeContextSpecialization::graph() const {
2945 return jsgraph()->graph();
2946 }
2947
isolate() const2948 Isolate* JSNativeContextSpecialization::isolate() const {
2949 return jsgraph()->isolate();
2950 }
2951
factory() const2952 Factory* JSNativeContextSpecialization::factory() const {
2953 return isolate()->factory();
2954 }
2955
common() const2956 CommonOperatorBuilder* JSNativeContextSpecialization::common() const {
2957 return jsgraph()->common();
2958 }
2959
javascript() const2960 JSOperatorBuilder* JSNativeContextSpecialization::javascript() const {
2961 return jsgraph()->javascript();
2962 }
2963
simplified() const2964 SimplifiedOperatorBuilder* JSNativeContextSpecialization::simplified() const {
2965 return jsgraph()->simplified();
2966 }
2967
2968 #undef V8_TYPED_ARRAY_MAX_SIZE_IN_HEAP
2969
2970 } // namespace compiler
2971 } // namespace internal
2972 } // namespace v8
2973