1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #include "src/bailout-reason.h"
8 #include "src/code-stubs.h"
9 #include "src/field-index.h"
10 #include "src/hydrogen.h"
11 #include "src/lithium.h"
12
13 namespace v8 {
14 namespace internal {
15
16
OptimizeGraph(HGraph * graph)17 static LChunk* OptimizeGraph(HGraph* graph) {
18 DisallowHeapAllocation no_allocation;
19 DisallowHandleAllocation no_handles;
20 DisallowHandleDereference no_deref;
21
22 DCHECK(graph != NULL);
23 BailoutReason bailout_reason = kNoReason;
24 if (!graph->Optimize(&bailout_reason)) {
25 FATAL(GetBailoutReason(bailout_reason));
26 }
27 LChunk* chunk = LChunk::NewChunk(graph);
28 if (chunk == NULL) {
29 FATAL(GetBailoutReason(graph->info()->bailout_reason()));
30 }
31 return chunk;
32 }
33
34
35 class CodeStubGraphBuilderBase : public HGraphBuilder {
36 public:
CodeStubGraphBuilderBase(Isolate * isolate,HydrogenCodeStub * stub)37 CodeStubGraphBuilderBase(Isolate* isolate, HydrogenCodeStub* stub)
38 : HGraphBuilder(&info_),
39 arguments_length_(NULL),
40 info_(stub, isolate),
41 descriptor_(stub),
42 context_(NULL) {
43 int parameter_count = descriptor_.GetEnvironmentParameterCount();
44 parameters_.Reset(new HParameter*[parameter_count]);
45 }
46 virtual bool BuildGraph();
47
48 protected:
49 virtual HValue* BuildCodeStub() = 0;
GetParameter(int parameter)50 HParameter* GetParameter(int parameter) {
51 DCHECK(parameter < descriptor_.GetEnvironmentParameterCount());
52 return parameters_[parameter];
53 }
GetArgumentsLength()54 HValue* GetArgumentsLength() {
55 // This is initialized in BuildGraph()
56 DCHECK(arguments_length_ != NULL);
57 return arguments_length_;
58 }
info()59 CompilationInfo* info() { return &info_; }
stub()60 HydrogenCodeStub* stub() { return info_.code_stub(); }
context()61 HContext* context() { return context_; }
isolate()62 Isolate* isolate() { return info_.isolate(); }
63
64 HLoadNamedField* BuildLoadNamedField(HValue* object,
65 FieldIndex index);
66 void BuildStoreNamedField(HValue* object, HValue* value, FieldIndex index,
67 Representation representation);
68
69 enum ArgumentClass {
70 NONE,
71 SINGLE,
72 MULTIPLE
73 };
74
75 HValue* UnmappedCase(HValue* elements, HValue* key);
76
77 HValue* BuildArrayConstructor(ElementsKind kind,
78 AllocationSiteOverrideMode override_mode,
79 ArgumentClass argument_class);
80 HValue* BuildInternalArrayConstructor(ElementsKind kind,
81 ArgumentClass argument_class);
82
83 // BuildCheckAndInstallOptimizedCode emits code to install the optimized
84 // function found in the optimized code map at map_index in js_function, if
85 // the function at map_index matches the given native_context. Builder is
86 // left in the "Then()" state after the install.
87 void BuildCheckAndInstallOptimizedCode(HValue* js_function,
88 HValue* native_context,
89 IfBuilder* builder,
90 HValue* optimized_map,
91 HValue* map_index);
92 void BuildInstallCode(HValue* js_function, HValue* shared_info);
93
94 HInstruction* LoadFromOptimizedCodeMap(HValue* optimized_map,
95 HValue* iterator,
96 int field_offset);
97 void BuildInstallFromOptimizedCodeMap(HValue* js_function,
98 HValue* shared_info,
99 HValue* native_context);
100
101 private:
102 HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder);
103 HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder,
104 ElementsKind kind);
105
106 SmartArrayPointer<HParameter*> parameters_;
107 HValue* arguments_length_;
108 CompilationInfoWithZone info_;
109 CodeStubDescriptor descriptor_;
110 HContext* context_;
111 };
112
113
BuildGraph()114 bool CodeStubGraphBuilderBase::BuildGraph() {
115 // Update the static counter each time a new code stub is generated.
116 isolate()->counters()->code_stubs()->Increment();
117
118 if (FLAG_trace_hydrogen_stubs) {
119 const char* name = CodeStub::MajorName(stub()->MajorKey(), false);
120 PrintF("-----------------------------------------------------------\n");
121 PrintF("Compiling stub %s using hydrogen\n", name);
122 isolate()->GetHTracer()->TraceCompilation(&info_);
123 }
124
125 int param_count = descriptor_.GetEnvironmentParameterCount();
126 HEnvironment* start_environment = graph()->start_environment();
127 HBasicBlock* next_block = CreateBasicBlock(start_environment);
128 Goto(next_block);
129 next_block->SetJoinId(BailoutId::StubEntry());
130 set_current_block(next_block);
131
132 bool runtime_stack_params = descriptor_.stack_parameter_count().is_valid();
133 HInstruction* stack_parameter_count = NULL;
134 for (int i = 0; i < param_count; ++i) {
135 Representation r = descriptor_.GetEnvironmentParameterRepresentation(i);
136 HParameter* param = Add<HParameter>(i,
137 HParameter::REGISTER_PARAMETER, r);
138 start_environment->Bind(i, param);
139 parameters_[i] = param;
140 if (descriptor_.IsEnvironmentParameterCountRegister(i)) {
141 param->set_type(HType::Smi());
142 stack_parameter_count = param;
143 arguments_length_ = stack_parameter_count;
144 }
145 }
146
147 DCHECK(!runtime_stack_params || arguments_length_ != NULL);
148 if (!runtime_stack_params) {
149 stack_parameter_count = graph()->GetConstantMinus1();
150 arguments_length_ = graph()->GetConstant0();
151 }
152
153 context_ = Add<HContext>();
154 start_environment->BindContext(context_);
155
156 Add<HSimulate>(BailoutId::StubEntry());
157
158 NoObservableSideEffectsScope no_effects(this);
159
160 HValue* return_value = BuildCodeStub();
161
162 // We might have extra expressions to pop from the stack in addition to the
163 // arguments above.
164 HInstruction* stack_pop_count = stack_parameter_count;
165 if (descriptor_.function_mode() == JS_FUNCTION_STUB_MODE) {
166 if (!stack_parameter_count->IsConstant() &&
167 descriptor_.hint_stack_parameter_count() < 0) {
168 HInstruction* constant_one = graph()->GetConstant1();
169 stack_pop_count = AddUncasted<HAdd>(stack_parameter_count, constant_one);
170 stack_pop_count->ClearFlag(HValue::kCanOverflow);
171 // TODO(mvstanton): verify that stack_parameter_count+1 really fits in a
172 // smi.
173 } else {
174 int count = descriptor_.hint_stack_parameter_count();
175 stack_pop_count = Add<HConstant>(count);
176 }
177 }
178
179 if (current_block() != NULL) {
180 HReturn* hreturn_instruction = New<HReturn>(return_value,
181 stack_pop_count);
182 FinishCurrentBlock(hreturn_instruction);
183 }
184 return true;
185 }
186
187
188 template <class Stub>
189 class CodeStubGraphBuilder: public CodeStubGraphBuilderBase {
190 public:
CodeStubGraphBuilder(Isolate * isolate,Stub * stub)191 CodeStubGraphBuilder(Isolate* isolate, Stub* stub)
192 : CodeStubGraphBuilderBase(isolate, stub) {}
193
194 protected:
BuildCodeStub()195 virtual HValue* BuildCodeStub() {
196 if (casted_stub()->IsUninitialized()) {
197 return BuildCodeUninitializedStub();
198 } else {
199 return BuildCodeInitializedStub();
200 }
201 }
202
BuildCodeInitializedStub()203 virtual HValue* BuildCodeInitializedStub() {
204 UNIMPLEMENTED();
205 return NULL;
206 }
207
BuildCodeUninitializedStub()208 virtual HValue* BuildCodeUninitializedStub() {
209 // Force a deopt that falls back to the runtime.
210 HValue* undefined = graph()->GetConstantUndefined();
211 IfBuilder builder(this);
212 builder.IfNot<HCompareObjectEqAndBranch, HValue*>(undefined, undefined);
213 builder.Then();
214 builder.ElseDeopt("Forced deopt to runtime");
215 return undefined;
216 }
217
casted_stub()218 Stub* casted_stub() { return static_cast<Stub*>(stub()); }
219 };
220
221
GenerateLightweightMissCode(ExternalReference miss)222 Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode(
223 ExternalReference miss) {
224 Factory* factory = isolate()->factory();
225
226 // Generate the new code.
227 MacroAssembler masm(isolate(), NULL, 256);
228
229 {
230 // Update the static counter each time a new code stub is generated.
231 isolate()->counters()->code_stubs()->Increment();
232
233 // Generate the code for the stub.
234 masm.set_generating_stub(true);
235 NoCurrentFrameScope scope(&masm);
236 GenerateLightweightMiss(&masm, miss);
237 }
238
239 // Create the code object.
240 CodeDesc desc;
241 masm.GetCode(&desc);
242
243 // Copy the generated code into a heap object.
244 Code::Flags flags = Code::ComputeFlags(
245 GetCodeKind(),
246 GetICState(),
247 GetExtraICState(),
248 GetStubType());
249 Handle<Code> new_object = factory->NewCode(
250 desc, flags, masm.CodeObject(), NeedsImmovableCode());
251 return new_object;
252 }
253
254
255 template <class Stub>
DoGenerateCode(Stub * stub)256 static Handle<Code> DoGenerateCode(Stub* stub) {
257 Isolate* isolate = stub->isolate();
258 CodeStubDescriptor descriptor(stub);
259
260 // If we are uninitialized we can use a light-weight stub to enter
261 // the runtime that is significantly faster than using the standard
262 // stub-failure deopt mechanism.
263 if (stub->IsUninitialized() && descriptor.has_miss_handler()) {
264 DCHECK(!descriptor.stack_parameter_count().is_valid());
265 return stub->GenerateLightweightMissCode(descriptor.miss_handler());
266 }
267 base::ElapsedTimer timer;
268 if (FLAG_profile_hydrogen_code_stub_compilation) {
269 timer.Start();
270 }
271 CodeStubGraphBuilder<Stub> builder(isolate, stub);
272 LChunk* chunk = OptimizeGraph(builder.CreateGraph());
273 // TODO(yangguo) remove this once the code serializer handles code stubs.
274 if (FLAG_serialize_toplevel) chunk->info()->PrepareForSerializing();
275 Handle<Code> code = chunk->Codegen();
276 if (FLAG_profile_hydrogen_code_stub_compilation) {
277 OFStream os(stdout);
278 os << "[Lazy compilation of " << stub << " took "
279 << timer.Elapsed().InMillisecondsF() << " ms]" << endl;
280 }
281 return code;
282 }
283
284
285 template <>
BuildCodeStub()286 HValue* CodeStubGraphBuilder<ToNumberStub>::BuildCodeStub() {
287 HValue* value = GetParameter(0);
288
289 // Check if the parameter is already a SMI or heap number.
290 IfBuilder if_number(this);
291 if_number.If<HIsSmiAndBranch>(value);
292 if_number.OrIf<HCompareMap>(value, isolate()->factory()->heap_number_map());
293 if_number.Then();
294
295 // Return the number.
296 Push(value);
297
298 if_number.Else();
299
300 // Convert the parameter to number using the builtin.
301 HValue* function = AddLoadJSBuiltin(Builtins::TO_NUMBER);
302 Add<HPushArguments>(value);
303 Push(Add<HInvokeFunction>(function, 1));
304
305 if_number.End();
306
307 return Pop();
308 }
309
310
GenerateCode()311 Handle<Code> ToNumberStub::GenerateCode() {
312 return DoGenerateCode(this);
313 }
314
315
316 template <>
BuildCodeStub()317 HValue* CodeStubGraphBuilder<NumberToStringStub>::BuildCodeStub() {
318 info()->MarkAsSavesCallerDoubles();
319 HValue* number = GetParameter(NumberToStringStub::kNumber);
320 return BuildNumberToString(number, Type::Number(zone()));
321 }
322
323
GenerateCode()324 Handle<Code> NumberToStringStub::GenerateCode() {
325 return DoGenerateCode(this);
326 }
327
328
329 template <>
BuildCodeStub()330 HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
331 Factory* factory = isolate()->factory();
332 HValue* undefined = graph()->GetConstantUndefined();
333 AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
334
335 // This stub is very performance sensitive, the generated code must be tuned
336 // so that it doesn't build and eager frame.
337 info()->MarkMustNotHaveEagerFrame();
338
339 HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
340 GetParameter(1),
341 static_cast<HValue*>(NULL),
342 FAST_ELEMENTS);
343 IfBuilder checker(this);
344 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
345 undefined);
346 checker.Then();
347
348 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
349 AllocationSite::kTransitionInfoOffset);
350 HInstruction* boilerplate = Add<HLoadNamedField>(
351 allocation_site, static_cast<HValue*>(NULL), access);
352 HValue* elements = AddLoadElements(boilerplate);
353 HValue* capacity = AddLoadFixedArrayLength(elements);
354 IfBuilder zero_capacity(this);
355 zero_capacity.If<HCompareNumericAndBranch>(capacity, graph()->GetConstant0(),
356 Token::EQ);
357 zero_capacity.Then();
358 Push(BuildCloneShallowArrayEmpty(boilerplate,
359 allocation_site,
360 alloc_site_mode));
361 zero_capacity.Else();
362 IfBuilder if_fixed_cow(this);
363 if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
364 if_fixed_cow.Then();
365 Push(BuildCloneShallowArrayCow(boilerplate,
366 allocation_site,
367 alloc_site_mode,
368 FAST_ELEMENTS));
369 if_fixed_cow.Else();
370 IfBuilder if_fixed(this);
371 if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
372 if_fixed.Then();
373 Push(BuildCloneShallowArrayNonEmpty(boilerplate,
374 allocation_site,
375 alloc_site_mode,
376 FAST_ELEMENTS));
377
378 if_fixed.Else();
379 Push(BuildCloneShallowArrayNonEmpty(boilerplate,
380 allocation_site,
381 alloc_site_mode,
382 FAST_DOUBLE_ELEMENTS));
383 if_fixed.End();
384 if_fixed_cow.End();
385 zero_capacity.End();
386
387 checker.ElseDeopt("Uninitialized boilerplate literals");
388 checker.End();
389
390 return environment()->Pop();
391 }
392
393
GenerateCode()394 Handle<Code> FastCloneShallowArrayStub::GenerateCode() {
395 return DoGenerateCode(this);
396 }
397
398
399 template <>
BuildCodeStub()400 HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
401 HValue* undefined = graph()->GetConstantUndefined();
402
403 HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
404 GetParameter(1),
405 static_cast<HValue*>(NULL),
406 FAST_ELEMENTS);
407
408 IfBuilder checker(this);
409 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
410 undefined);
411 checker.And();
412
413 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
414 AllocationSite::kTransitionInfoOffset);
415 HInstruction* boilerplate = Add<HLoadNamedField>(
416 allocation_site, static_cast<HValue*>(NULL), access);
417
418 int size = JSObject::kHeaderSize + casted_stub()->length() * kPointerSize;
419 int object_size = size;
420 if (FLAG_allocation_site_pretenuring) {
421 size += AllocationMemento::kSize;
422 }
423
424 HValue* boilerplate_map = Add<HLoadNamedField>(
425 boilerplate, static_cast<HValue*>(NULL),
426 HObjectAccess::ForMap());
427 HValue* boilerplate_size = Add<HLoadNamedField>(
428 boilerplate_map, static_cast<HValue*>(NULL),
429 HObjectAccess::ForMapInstanceSize());
430 HValue* size_in_words = Add<HConstant>(object_size >> kPointerSizeLog2);
431 checker.If<HCompareNumericAndBranch>(boilerplate_size,
432 size_in_words, Token::EQ);
433 checker.Then();
434
435 HValue* size_in_bytes = Add<HConstant>(size);
436
437 HInstruction* object = Add<HAllocate>(size_in_bytes, HType::JSObject(),
438 NOT_TENURED, JS_OBJECT_TYPE);
439
440 for (int i = 0; i < object_size; i += kPointerSize) {
441 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i);
442 Add<HStoreNamedField>(
443 object, access, Add<HLoadNamedField>(
444 boilerplate, static_cast<HValue*>(NULL), access));
445 }
446
447 DCHECK(FLAG_allocation_site_pretenuring || (size == object_size));
448 if (FLAG_allocation_site_pretenuring) {
449 BuildCreateAllocationMemento(
450 object, Add<HConstant>(object_size), allocation_site);
451 }
452
453 environment()->Push(object);
454 checker.ElseDeopt("Uninitialized boilerplate in fast clone");
455 checker.End();
456
457 return environment()->Pop();
458 }
459
460
GenerateCode()461 Handle<Code> FastCloneShallowObjectStub::GenerateCode() {
462 return DoGenerateCode(this);
463 }
464
465
466 template <>
BuildCodeStub()467 HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
468 HValue* size = Add<HConstant>(AllocationSite::kSize);
469 HInstruction* object = Add<HAllocate>(size, HType::JSObject(), TENURED,
470 JS_OBJECT_TYPE);
471
472 // Store the map
473 Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
474 AddStoreMapConstant(object, allocation_site_map);
475
476 // Store the payload (smi elements kind)
477 HValue* initial_elements_kind = Add<HConstant>(GetInitialFastElementsKind());
478 Add<HStoreNamedField>(object,
479 HObjectAccess::ForAllocationSiteOffset(
480 AllocationSite::kTransitionInfoOffset),
481 initial_elements_kind);
482
483 // Unlike literals, constructed arrays don't have nested sites
484 Add<HStoreNamedField>(object,
485 HObjectAccess::ForAllocationSiteOffset(
486 AllocationSite::kNestedSiteOffset),
487 graph()->GetConstant0());
488
489 // Pretenuring calculation field.
490 Add<HStoreNamedField>(object,
491 HObjectAccess::ForAllocationSiteOffset(
492 AllocationSite::kPretenureDataOffset),
493 graph()->GetConstant0());
494
495 // Pretenuring memento creation count field.
496 Add<HStoreNamedField>(object,
497 HObjectAccess::ForAllocationSiteOffset(
498 AllocationSite::kPretenureCreateCountOffset),
499 graph()->GetConstant0());
500
501 // Store an empty fixed array for the code dependency.
502 HConstant* empty_fixed_array =
503 Add<HConstant>(isolate()->factory()->empty_fixed_array());
504 Add<HStoreNamedField>(
505 object,
506 HObjectAccess::ForAllocationSiteOffset(
507 AllocationSite::kDependentCodeOffset),
508 empty_fixed_array);
509
510 // Link the object to the allocation site list
511 HValue* site_list = Add<HConstant>(
512 ExternalReference::allocation_sites_list_address(isolate()));
513 HValue* site = Add<HLoadNamedField>(
514 site_list, static_cast<HValue*>(NULL),
515 HObjectAccess::ForAllocationSiteList());
516 // TODO(mvstanton): This is a store to a weak pointer, which we may want to
517 // mark as such in order to skip the write barrier, once we have a unified
518 // system for weakness. For now we decided to keep it like this because having
519 // an initial write barrier backed store makes this pointer strong until the
520 // next GC, and allocation sites are designed to survive several GCs anyway.
521 Add<HStoreNamedField>(
522 object,
523 HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset),
524 site);
525 Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(),
526 object);
527
528 HInstruction* feedback_vector = GetParameter(0);
529 HInstruction* slot = GetParameter(1);
530 Add<HStoreKeyed>(feedback_vector, slot, object, FAST_ELEMENTS,
531 INITIALIZING_STORE);
532 return feedback_vector;
533 }
534
535
GenerateCode()536 Handle<Code> CreateAllocationSiteStub::GenerateCode() {
537 return DoGenerateCode(this);
538 }
539
540
541 template <>
BuildCodeStub()542 HValue* CodeStubGraphBuilder<LoadFastElementStub>::BuildCodeStub() {
543 HInstruction* load = BuildUncheckedMonomorphicElementAccess(
544 GetParameter(LoadDescriptor::kReceiverIndex),
545 GetParameter(LoadDescriptor::kNameIndex), NULL,
546 casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD,
547 NEVER_RETURN_HOLE, STANDARD_STORE);
548 return load;
549 }
550
551
GenerateCode()552 Handle<Code> LoadFastElementStub::GenerateCode() {
553 return DoGenerateCode(this);
554 }
555
556
BuildLoadNamedField(HValue * object,FieldIndex index)557 HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField(
558 HValue* object, FieldIndex index) {
559 Representation representation = index.is_double()
560 ? Representation::Double()
561 : Representation::Tagged();
562 int offset = index.offset();
563 HObjectAccess access = index.is_inobject()
564 ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
565 : HObjectAccess::ForBackingStoreOffset(offset, representation);
566 if (index.is_double()) {
567 // Load the heap number.
568 object = Add<HLoadNamedField>(
569 object, static_cast<HValue*>(NULL),
570 access.WithRepresentation(Representation::Tagged()));
571 // Load the double value from it.
572 access = HObjectAccess::ForHeapNumberValue();
573 }
574 return Add<HLoadNamedField>(object, static_cast<HValue*>(NULL), access);
575 }
576
577
578 template<>
BuildCodeStub()579 HValue* CodeStubGraphBuilder<LoadFieldStub>::BuildCodeStub() {
580 return BuildLoadNamedField(GetParameter(0), casted_stub()->index());
581 }
582
583
GenerateCode()584 Handle<Code> LoadFieldStub::GenerateCode() {
585 return DoGenerateCode(this);
586 }
587
588
589 template <>
BuildCodeStub()590 HValue* CodeStubGraphBuilder<LoadConstantStub>::BuildCodeStub() {
591 HValue* map = AddLoadMap(GetParameter(0), NULL);
592 HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset(
593 Map::kDescriptorsOffset, Representation::Tagged());
594 HValue* descriptors =
595 Add<HLoadNamedField>(map, static_cast<HValue*>(NULL), descriptors_access);
596 HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset(
597 DescriptorArray::GetValueOffset(casted_stub()->constant_index()));
598 return Add<HLoadNamedField>(descriptors, static_cast<HValue*>(NULL),
599 value_access);
600 }
601
602
GenerateCode()603 Handle<Code> LoadConstantStub::GenerateCode() { return DoGenerateCode(this); }
604
605
UnmappedCase(HValue * elements,HValue * key)606 HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key) {
607 HValue* result;
608 HInstruction* backing_store = Add<HLoadKeyed>(
609 elements, graph()->GetConstant1(), static_cast<HValue*>(NULL),
610 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
611 Add<HCheckMaps>(backing_store, isolate()->factory()->fixed_array_map());
612 HValue* backing_store_length =
613 Add<HLoadNamedField>(backing_store, static_cast<HValue*>(NULL),
614 HObjectAccess::ForFixedArrayLength());
615 IfBuilder in_unmapped_range(this);
616 in_unmapped_range.If<HCompareNumericAndBranch>(key, backing_store_length,
617 Token::LT);
618 in_unmapped_range.Then();
619 {
620 result = Add<HLoadKeyed>(backing_store, key, static_cast<HValue*>(NULL),
621 FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE);
622 }
623 in_unmapped_range.ElseDeopt("Outside of range");
624 in_unmapped_range.End();
625 return result;
626 }
627
628
629 template <>
BuildCodeStub()630 HValue* CodeStubGraphBuilder<KeyedLoadSloppyArgumentsStub>::BuildCodeStub() {
631 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
632 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
633
634 // Mapped arguments are actual arguments. Unmapped arguments are values added
635 // to the arguments object after it was created for the call. Mapped arguments
636 // are stored in the context at indexes given by elements[key + 2]. Unmapped
637 // arguments are stored as regular indexed properties in the arguments array,
638 // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
639 // look at argument object construction.
640 //
641 // The sloppy arguments elements array has a special format:
642 //
643 // 0: context
644 // 1: unmapped arguments array
645 // 2: mapped_index0,
646 // 3: mapped_index1,
647 // ...
648 //
649 // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
650 // If key + 2 >= elements.length then attempt to look in the unmapped
651 // arguments array (given by elements[1]) and return the value at key, missing
652 // to the runtime if the unmapped arguments array is not a fixed array or if
653 // key >= unmapped_arguments_array.length.
654 //
655 // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
656 // in the unmapped arguments array, as described above. Otherwise, t is a Smi
657 // index into the context array given at elements[0]. Return the value at
658 // context[t].
659
660 key = AddUncasted<HForceRepresentation>(key, Representation::Smi());
661 IfBuilder positive_smi(this);
662 positive_smi.If<HCompareNumericAndBranch>(key, graph()->GetConstant0(),
663 Token::LT);
664 positive_smi.ThenDeopt("key is negative");
665 positive_smi.End();
666
667 HValue* constant_two = Add<HConstant>(2);
668 HValue* elements = AddLoadElements(receiver, static_cast<HValue*>(NULL));
669 HValue* elements_length =
670 Add<HLoadNamedField>(elements, static_cast<HValue*>(NULL),
671 HObjectAccess::ForFixedArrayLength());
672 HValue* adjusted_length = AddUncasted<HSub>(elements_length, constant_two);
673 IfBuilder in_range(this);
674 in_range.If<HCompareNumericAndBranch>(key, adjusted_length, Token::LT);
675 in_range.Then();
676 {
677 HValue* index = AddUncasted<HAdd>(key, constant_two);
678 HInstruction* mapped_index =
679 Add<HLoadKeyed>(elements, index, static_cast<HValue*>(NULL),
680 FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE);
681
682 IfBuilder is_valid(this);
683 is_valid.IfNot<HCompareObjectEqAndBranch>(mapped_index,
684 graph()->GetConstantHole());
685 is_valid.Then();
686 {
687 // TODO(mvstanton): I'd like to assert from this point, that if the
688 // mapped_index is not the hole that it is indeed, a smi. An unnecessary
689 // smi check is being emitted.
690 HValue* the_context =
691 Add<HLoadKeyed>(elements, graph()->GetConstant0(),
692 static_cast<HValue*>(NULL), FAST_ELEMENTS);
693 DCHECK(Context::kHeaderSize == FixedArray::kHeaderSize);
694 HValue* result =
695 Add<HLoadKeyed>(the_context, mapped_index, static_cast<HValue*>(NULL),
696 FAST_ELEMENTS, ALLOW_RETURN_HOLE);
697 environment()->Push(result);
698 }
699 is_valid.Else();
700 {
701 HValue* result = UnmappedCase(elements, key);
702 environment()->Push(result);
703 }
704 is_valid.End();
705 }
706 in_range.Else();
707 {
708 HValue* result = UnmappedCase(elements, key);
709 environment()->Push(result);
710 }
711 in_range.End();
712
713 return environment()->Pop();
714 }
715
716
GenerateCode()717 Handle<Code> KeyedLoadSloppyArgumentsStub::GenerateCode() {
718 return DoGenerateCode(this);
719 }
720
721
BuildStoreNamedField(HValue * object,HValue * value,FieldIndex index,Representation representation)722 void CodeStubGraphBuilderBase::BuildStoreNamedField(
723 HValue* object, HValue* value, FieldIndex index,
724 Representation representation) {
725 DCHECK(!index.is_double() || representation.IsDouble());
726 int offset = index.offset();
727 HObjectAccess access =
728 index.is_inobject()
729 ? HObjectAccess::ForObservableJSObjectOffset(offset, representation)
730 : HObjectAccess::ForBackingStoreOffset(offset, representation);
731
732 if (representation.IsDouble()) {
733 // Load the heap number.
734 object = Add<HLoadNamedField>(
735 object, static_cast<HValue*>(NULL),
736 access.WithRepresentation(Representation::Tagged()));
737 // Store the double value into it.
738 access = HObjectAccess::ForHeapNumberValue();
739 } else if (representation.IsHeapObject()) {
740 BuildCheckHeapObject(value);
741 }
742
743 Add<HStoreNamedField>(object, access, value, INITIALIZING_STORE);
744 }
745
746
747 template <>
BuildCodeStub()748 HValue* CodeStubGraphBuilder<StoreFieldStub>::BuildCodeStub() {
749 BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(),
750 casted_stub()->representation());
751 return GetParameter(2);
752 }
753
754
GenerateCode()755 Handle<Code> StoreFieldStub::GenerateCode() { return DoGenerateCode(this); }
756
757
758 template <>
BuildCodeStub()759 HValue* CodeStubGraphBuilder<StringLengthStub>::BuildCodeStub() {
760 HValue* string = BuildLoadNamedField(GetParameter(0),
761 FieldIndex::ForInObjectOffset(JSValue::kValueOffset));
762 return BuildLoadNamedField(string,
763 FieldIndex::ForInObjectOffset(String::kLengthOffset));
764 }
765
766
GenerateCode()767 Handle<Code> StringLengthStub::GenerateCode() {
768 return DoGenerateCode(this);
769 }
770
771
772 template <>
BuildCodeStub()773 HValue* CodeStubGraphBuilder<StoreFastElementStub>::BuildCodeStub() {
774 BuildUncheckedMonomorphicElementAccess(
775 GetParameter(StoreDescriptor::kReceiverIndex),
776 GetParameter(StoreDescriptor::kNameIndex),
777 GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(),
778 casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE,
779 casted_stub()->store_mode());
780
781 return GetParameter(2);
782 }
783
784
GenerateCode()785 Handle<Code> StoreFastElementStub::GenerateCode() {
786 return DoGenerateCode(this);
787 }
788
789
790 template <>
BuildCodeStub()791 HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() {
792 info()->MarkAsSavesCallerDoubles();
793
794 BuildTransitionElementsKind(GetParameter(0),
795 GetParameter(1),
796 casted_stub()->from_kind(),
797 casted_stub()->to_kind(),
798 casted_stub()->is_js_array());
799
800 return GetParameter(0);
801 }
802
803
GenerateCode()804 Handle<Code> TransitionElementsKindStub::GenerateCode() {
805 return DoGenerateCode(this);
806 }
807
BuildArrayConstructor(ElementsKind kind,AllocationSiteOverrideMode override_mode,ArgumentClass argument_class)808 HValue* CodeStubGraphBuilderBase::BuildArrayConstructor(
809 ElementsKind kind,
810 AllocationSiteOverrideMode override_mode,
811 ArgumentClass argument_class) {
812 HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor);
813 HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite);
814 JSArrayBuilder array_builder(this, kind, alloc_site, constructor,
815 override_mode);
816 HValue* result = NULL;
817 switch (argument_class) {
818 case NONE:
819 // This stub is very performance sensitive, the generated code must be
820 // tuned so that it doesn't build and eager frame.
821 info()->MarkMustNotHaveEagerFrame();
822 result = array_builder.AllocateEmptyArray();
823 break;
824 case SINGLE:
825 result = BuildArraySingleArgumentConstructor(&array_builder);
826 break;
827 case MULTIPLE:
828 result = BuildArrayNArgumentsConstructor(&array_builder, kind);
829 break;
830 }
831
832 return result;
833 }
834
835
BuildInternalArrayConstructor(ElementsKind kind,ArgumentClass argument_class)836 HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor(
837 ElementsKind kind, ArgumentClass argument_class) {
838 HValue* constructor = GetParameter(
839 InternalArrayConstructorStubBase::kConstructor);
840 JSArrayBuilder array_builder(this, kind, constructor);
841
842 HValue* result = NULL;
843 switch (argument_class) {
844 case NONE:
845 // This stub is very performance sensitive, the generated code must be
846 // tuned so that it doesn't build and eager frame.
847 info()->MarkMustNotHaveEagerFrame();
848 result = array_builder.AllocateEmptyArray();
849 break;
850 case SINGLE:
851 result = BuildArraySingleArgumentConstructor(&array_builder);
852 break;
853 case MULTIPLE:
854 result = BuildArrayNArgumentsConstructor(&array_builder, kind);
855 break;
856 }
857 return result;
858 }
859
860
BuildArraySingleArgumentConstructor(JSArrayBuilder * array_builder)861 HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor(
862 JSArrayBuilder* array_builder) {
863 // Smi check and range check on the input arg.
864 HValue* constant_one = graph()->GetConstant1();
865 HValue* constant_zero = graph()->GetConstant0();
866
867 HInstruction* elements = Add<HArgumentsElements>(false);
868 HInstruction* argument = Add<HAccessArgumentsAt>(
869 elements, constant_one, constant_zero);
870
871 return BuildAllocateArrayFromLength(array_builder, argument);
872 }
873
874
BuildArrayNArgumentsConstructor(JSArrayBuilder * array_builder,ElementsKind kind)875 HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor(
876 JSArrayBuilder* array_builder, ElementsKind kind) {
877 // Insert a bounds check because the number of arguments might exceed
878 // the kInitialMaxFastElementArray limit. This cannot happen for code
879 // that was parsed, but calling via Array.apply(thisArg, [...]) might
880 // trigger it.
881 HValue* length = GetArgumentsLength();
882 HConstant* max_alloc_length =
883 Add<HConstant>(JSObject::kInitialMaxFastElementArray);
884 HValue* checked_length = Add<HBoundsCheck>(length, max_alloc_length);
885
886 // We need to fill with the hole if it's a smi array in the multi-argument
887 // case because we might have to bail out while copying arguments into
888 // the array because they aren't compatible with a smi array.
889 // If it's a double array, no problem, and if it's fast then no
890 // problem either because doubles are boxed.
891 //
892 // TODO(mvstanton): consider an instruction to memset fill the array
893 // with zero in this case instead.
894 JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
895 ? JSArrayBuilder::FILL_WITH_HOLE
896 : JSArrayBuilder::DONT_FILL_WITH_HOLE;
897 HValue* new_object = array_builder->AllocateArray(checked_length,
898 max_alloc_length,
899 checked_length,
900 fill_mode);
901 HValue* elements = array_builder->GetElementsLocation();
902 DCHECK(elements != NULL);
903
904 // Now populate the elements correctly.
905 LoopBuilder builder(this,
906 context(),
907 LoopBuilder::kPostIncrement);
908 HValue* start = graph()->GetConstant0();
909 HValue* key = builder.BeginBody(start, checked_length, Token::LT);
910 HInstruction* argument_elements = Add<HArgumentsElements>(false);
911 HInstruction* argument = Add<HAccessArgumentsAt>(
912 argument_elements, checked_length, key);
913
914 Add<HStoreKeyed>(elements, key, argument, kind);
915 builder.EndBody();
916 return new_object;
917 }
918
919
920 template <>
BuildCodeStub()921 HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() {
922 ElementsKind kind = casted_stub()->elements_kind();
923 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
924 return BuildArrayConstructor(kind, override_mode, NONE);
925 }
926
927
GenerateCode()928 Handle<Code> ArrayNoArgumentConstructorStub::GenerateCode() {
929 return DoGenerateCode(this);
930 }
931
932
933 template <>
934 HValue* CodeStubGraphBuilder<ArraySingleArgumentConstructorStub>::
BuildCodeStub()935 BuildCodeStub() {
936 ElementsKind kind = casted_stub()->elements_kind();
937 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
938 return BuildArrayConstructor(kind, override_mode, SINGLE);
939 }
940
941
GenerateCode()942 Handle<Code> ArraySingleArgumentConstructorStub::GenerateCode() {
943 return DoGenerateCode(this);
944 }
945
946
947 template <>
BuildCodeStub()948 HValue* CodeStubGraphBuilder<ArrayNArgumentsConstructorStub>::BuildCodeStub() {
949 ElementsKind kind = casted_stub()->elements_kind();
950 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
951 return BuildArrayConstructor(kind, override_mode, MULTIPLE);
952 }
953
954
GenerateCode()955 Handle<Code> ArrayNArgumentsConstructorStub::GenerateCode() {
956 return DoGenerateCode(this);
957 }
958
959
960 template <>
961 HValue* CodeStubGraphBuilder<InternalArrayNoArgumentConstructorStub>::
BuildCodeStub()962 BuildCodeStub() {
963 ElementsKind kind = casted_stub()->elements_kind();
964 return BuildInternalArrayConstructor(kind, NONE);
965 }
966
967
GenerateCode()968 Handle<Code> InternalArrayNoArgumentConstructorStub::GenerateCode() {
969 return DoGenerateCode(this);
970 }
971
972
973 template <>
974 HValue* CodeStubGraphBuilder<InternalArraySingleArgumentConstructorStub>::
BuildCodeStub()975 BuildCodeStub() {
976 ElementsKind kind = casted_stub()->elements_kind();
977 return BuildInternalArrayConstructor(kind, SINGLE);
978 }
979
980
GenerateCode()981 Handle<Code> InternalArraySingleArgumentConstructorStub::GenerateCode() {
982 return DoGenerateCode(this);
983 }
984
985
986 template <>
987 HValue* CodeStubGraphBuilder<InternalArrayNArgumentsConstructorStub>::
BuildCodeStub()988 BuildCodeStub() {
989 ElementsKind kind = casted_stub()->elements_kind();
990 return BuildInternalArrayConstructor(kind, MULTIPLE);
991 }
992
993
GenerateCode()994 Handle<Code> InternalArrayNArgumentsConstructorStub::GenerateCode() {
995 return DoGenerateCode(this);
996 }
997
998
999 template <>
BuildCodeInitializedStub()1000 HValue* CodeStubGraphBuilder<CompareNilICStub>::BuildCodeInitializedStub() {
1001 Isolate* isolate = graph()->isolate();
1002 CompareNilICStub* stub = casted_stub();
1003 HIfContinuation continuation;
1004 Handle<Map> sentinel_map(isolate->heap()->meta_map());
1005 Type* type = stub->GetType(zone(), sentinel_map);
1006 BuildCompareNil(GetParameter(0), type, &continuation);
1007 IfBuilder if_nil(this, &continuation);
1008 if_nil.Then();
1009 if (continuation.IsFalseReachable()) {
1010 if_nil.Else();
1011 if_nil.Return(graph()->GetConstant0());
1012 }
1013 if_nil.End();
1014 return continuation.IsTrueReachable()
1015 ? graph()->GetConstant1()
1016 : graph()->GetConstantUndefined();
1017 }
1018
1019
GenerateCode()1020 Handle<Code> CompareNilICStub::GenerateCode() {
1021 return DoGenerateCode(this);
1022 }
1023
1024
1025 template <>
BuildCodeInitializedStub()1026 HValue* CodeStubGraphBuilder<BinaryOpICStub>::BuildCodeInitializedStub() {
1027 BinaryOpICState state = casted_stub()->state();
1028
1029 HValue* left = GetParameter(BinaryOpICStub::kLeft);
1030 HValue* right = GetParameter(BinaryOpICStub::kRight);
1031
1032 Type* left_type = state.GetLeftType(zone());
1033 Type* right_type = state.GetRightType(zone());
1034 Type* result_type = state.GetResultType(zone());
1035
1036 DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) &&
1037 (state.HasSideEffects() || !result_type->Is(Type::None())));
1038
1039 HValue* result = NULL;
1040 HAllocationMode allocation_mode(NOT_TENURED);
1041 if (state.op() == Token::ADD &&
1042 (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) &&
1043 !left_type->Is(Type::String()) && !right_type->Is(Type::String())) {
1044 // For the generic add stub a fast case for string addition is performance
1045 // critical.
1046 if (left_type->Maybe(Type::String())) {
1047 IfBuilder if_leftisstring(this);
1048 if_leftisstring.If<HIsStringAndBranch>(left);
1049 if_leftisstring.Then();
1050 {
1051 Push(BuildBinaryOperation(
1052 state.op(), left, right,
1053 Type::String(zone()), right_type,
1054 result_type, state.fixed_right_arg(),
1055 allocation_mode));
1056 }
1057 if_leftisstring.Else();
1058 {
1059 Push(BuildBinaryOperation(
1060 state.op(), left, right,
1061 left_type, right_type, result_type,
1062 state.fixed_right_arg(), allocation_mode));
1063 }
1064 if_leftisstring.End();
1065 result = Pop();
1066 } else {
1067 IfBuilder if_rightisstring(this);
1068 if_rightisstring.If<HIsStringAndBranch>(right);
1069 if_rightisstring.Then();
1070 {
1071 Push(BuildBinaryOperation(
1072 state.op(), left, right,
1073 left_type, Type::String(zone()),
1074 result_type, state.fixed_right_arg(),
1075 allocation_mode));
1076 }
1077 if_rightisstring.Else();
1078 {
1079 Push(BuildBinaryOperation(
1080 state.op(), left, right,
1081 left_type, right_type, result_type,
1082 state.fixed_right_arg(), allocation_mode));
1083 }
1084 if_rightisstring.End();
1085 result = Pop();
1086 }
1087 } else {
1088 result = BuildBinaryOperation(
1089 state.op(), left, right,
1090 left_type, right_type, result_type,
1091 state.fixed_right_arg(), allocation_mode);
1092 }
1093
1094 // If we encounter a generic argument, the number conversion is
1095 // observable, thus we cannot afford to bail out after the fact.
1096 if (!state.HasSideEffects()) {
1097 result = EnforceNumberType(result, result_type);
1098 }
1099
1100 // Reuse the double box of one of the operands if we are allowed to (i.e.
1101 // chained binops).
1102 if (state.CanReuseDoubleBox()) {
1103 HValue* operand = (state.mode() == OVERWRITE_LEFT) ? left : right;
1104 IfBuilder if_heap_number(this);
1105 if_heap_number.If<HHasInstanceTypeAndBranch>(operand, HEAP_NUMBER_TYPE);
1106 if_heap_number.Then();
1107 Add<HStoreNamedField>(operand, HObjectAccess::ForHeapNumberValue(), result);
1108 Push(operand);
1109 if_heap_number.Else();
1110 Push(result);
1111 if_heap_number.End();
1112 result = Pop();
1113 }
1114
1115 return result;
1116 }
1117
1118
GenerateCode()1119 Handle<Code> BinaryOpICStub::GenerateCode() {
1120 return DoGenerateCode(this);
1121 }
1122
1123
1124 template <>
BuildCodeStub()1125 HValue* CodeStubGraphBuilder<BinaryOpWithAllocationSiteStub>::BuildCodeStub() {
1126 BinaryOpICState state = casted_stub()->state();
1127
1128 HValue* allocation_site = GetParameter(
1129 BinaryOpWithAllocationSiteStub::kAllocationSite);
1130 HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft);
1131 HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight);
1132
1133 Type* left_type = state.GetLeftType(zone());
1134 Type* right_type = state.GetRightType(zone());
1135 Type* result_type = state.GetResultType(zone());
1136 HAllocationMode allocation_mode(allocation_site);
1137
1138 return BuildBinaryOperation(state.op(), left, right,
1139 left_type, right_type, result_type,
1140 state.fixed_right_arg(), allocation_mode);
1141 }
1142
1143
GenerateCode()1144 Handle<Code> BinaryOpWithAllocationSiteStub::GenerateCode() {
1145 return DoGenerateCode(this);
1146 }
1147
1148
1149 template <>
BuildCodeInitializedStub()1150 HValue* CodeStubGraphBuilder<StringAddStub>::BuildCodeInitializedStub() {
1151 StringAddStub* stub = casted_stub();
1152 StringAddFlags flags = stub->flags();
1153 PretenureFlag pretenure_flag = stub->pretenure_flag();
1154
1155 HValue* left = GetParameter(StringAddStub::kLeft);
1156 HValue* right = GetParameter(StringAddStub::kRight);
1157
1158 // Make sure that both arguments are strings if not known in advance.
1159 if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
1160 left = BuildCheckString(left);
1161 }
1162 if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
1163 right = BuildCheckString(right);
1164 }
1165
1166 return BuildStringAdd(left, right, HAllocationMode(pretenure_flag));
1167 }
1168
1169
GenerateCode()1170 Handle<Code> StringAddStub::GenerateCode() {
1171 return DoGenerateCode(this);
1172 }
1173
1174
1175 template <>
BuildCodeInitializedStub()1176 HValue* CodeStubGraphBuilder<ToBooleanStub>::BuildCodeInitializedStub() {
1177 ToBooleanStub* stub = casted_stub();
1178 HValue* true_value = NULL;
1179 HValue* false_value = NULL;
1180
1181 switch (stub->mode()) {
1182 case ToBooleanStub::RESULT_AS_SMI:
1183 true_value = graph()->GetConstant1();
1184 false_value = graph()->GetConstant0();
1185 break;
1186 case ToBooleanStub::RESULT_AS_ODDBALL:
1187 true_value = graph()->GetConstantTrue();
1188 false_value = graph()->GetConstantFalse();
1189 break;
1190 case ToBooleanStub::RESULT_AS_INVERSE_ODDBALL:
1191 true_value = graph()->GetConstantFalse();
1192 false_value = graph()->GetConstantTrue();
1193 break;
1194 }
1195
1196 IfBuilder if_true(this);
1197 if_true.If<HBranch>(GetParameter(0), stub->types());
1198 if_true.Then();
1199 if_true.Return(true_value);
1200 if_true.Else();
1201 if_true.End();
1202 return false_value;
1203 }
1204
1205
GenerateCode()1206 Handle<Code> ToBooleanStub::GenerateCode() {
1207 return DoGenerateCode(this);
1208 }
1209
1210
1211 template <>
BuildCodeInitializedStub()1212 HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() {
1213 StoreGlobalStub* stub = casted_stub();
1214 Handle<Object> placeholer_value(Smi::FromInt(0), isolate());
1215 Handle<PropertyCell> placeholder_cell =
1216 isolate()->factory()->NewPropertyCell(placeholer_value);
1217
1218 HParameter* value = GetParameter(StoreDescriptor::kValueIndex);
1219
1220 if (stub->check_global()) {
1221 // Check that the map of the global has not changed: use a placeholder map
1222 // that will be replaced later with the global object's map.
1223 Handle<Map> placeholder_map = isolate()->factory()->meta_map();
1224 HValue* global = Add<HConstant>(
1225 StoreGlobalStub::global_placeholder(isolate()));
1226 Add<HCheckMaps>(global, placeholder_map);
1227 }
1228
1229 HValue* cell = Add<HConstant>(placeholder_cell);
1230 HObjectAccess access(HObjectAccess::ForCellPayload(isolate()));
1231 HValue* cell_contents = Add<HLoadNamedField>(
1232 cell, static_cast<HValue*>(NULL), access);
1233
1234 if (stub->is_constant()) {
1235 IfBuilder builder(this);
1236 builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
1237 builder.Then();
1238 builder.ElseDeopt("Unexpected cell contents in constant global store");
1239 builder.End();
1240 } else {
1241 // Load the payload of the global parameter cell. A hole indicates that the
1242 // property has been deleted and that the store must be handled by the
1243 // runtime.
1244 IfBuilder builder(this);
1245 HValue* hole_value = graph()->GetConstantHole();
1246 builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value);
1247 builder.Then();
1248 builder.Deopt("Unexpected cell contents in global store");
1249 builder.Else();
1250 Add<HStoreNamedField>(cell, access, value);
1251 builder.End();
1252 }
1253
1254 return value;
1255 }
1256
1257
GenerateCode()1258 Handle<Code> StoreGlobalStub::GenerateCode() {
1259 return DoGenerateCode(this);
1260 }
1261
1262
1263 template<>
BuildCodeStub()1264 HValue* CodeStubGraphBuilder<ElementsTransitionAndStoreStub>::BuildCodeStub() {
1265 HValue* value = GetParameter(ElementsTransitionAndStoreStub::kValueIndex);
1266 HValue* map = GetParameter(ElementsTransitionAndStoreStub::kMapIndex);
1267 HValue* key = GetParameter(ElementsTransitionAndStoreStub::kKeyIndex);
1268 HValue* object = GetParameter(ElementsTransitionAndStoreStub::kObjectIndex);
1269
1270 if (FLAG_trace_elements_transitions) {
1271 // Tracing elements transitions is the job of the runtime.
1272 Add<HDeoptimize>("Tracing elements transitions", Deoptimizer::EAGER);
1273 } else {
1274 info()->MarkAsSavesCallerDoubles();
1275
1276 BuildTransitionElementsKind(object, map,
1277 casted_stub()->from_kind(),
1278 casted_stub()->to_kind(),
1279 casted_stub()->is_jsarray());
1280
1281 BuildUncheckedMonomorphicElementAccess(object, key, value,
1282 casted_stub()->is_jsarray(),
1283 casted_stub()->to_kind(),
1284 STORE, ALLOW_RETURN_HOLE,
1285 casted_stub()->store_mode());
1286 }
1287
1288 return value;
1289 }
1290
1291
GenerateCode()1292 Handle<Code> ElementsTransitionAndStoreStub::GenerateCode() {
1293 return DoGenerateCode(this);
1294 }
1295
1296
BuildCheckAndInstallOptimizedCode(HValue * js_function,HValue * native_context,IfBuilder * builder,HValue * optimized_map,HValue * map_index)1297 void CodeStubGraphBuilderBase::BuildCheckAndInstallOptimizedCode(
1298 HValue* js_function,
1299 HValue* native_context,
1300 IfBuilder* builder,
1301 HValue* optimized_map,
1302 HValue* map_index) {
1303 HValue* osr_ast_id_none = Add<HConstant>(BailoutId::None().ToInt());
1304 HValue* context_slot = LoadFromOptimizedCodeMap(
1305 optimized_map, map_index, SharedFunctionInfo::kContextOffset);
1306 HValue* osr_ast_slot = LoadFromOptimizedCodeMap(
1307 optimized_map, map_index, SharedFunctionInfo::kOsrAstIdOffset);
1308 builder->If<HCompareObjectEqAndBranch>(native_context,
1309 context_slot);
1310 builder->AndIf<HCompareObjectEqAndBranch>(osr_ast_slot, osr_ast_id_none);
1311 builder->Then();
1312 HValue* code_object = LoadFromOptimizedCodeMap(optimized_map,
1313 map_index, SharedFunctionInfo::kCachedCodeOffset);
1314 // and the literals
1315 HValue* literals = LoadFromOptimizedCodeMap(optimized_map,
1316 map_index, SharedFunctionInfo::kLiteralsOffset);
1317
1318 Counters* counters = isolate()->counters();
1319 AddIncrementCounter(counters->fast_new_closure_install_optimized());
1320
1321 // TODO(fschneider): Idea: store proper code pointers in the optimized code
1322 // map and either unmangle them on marking or do nothing as the whole map is
1323 // discarded on major GC anyway.
1324 Add<HStoreCodeEntry>(js_function, code_object);
1325 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1326 literals);
1327
1328 // Now link a function into a list of optimized functions.
1329 HValue* optimized_functions_list = Add<HLoadNamedField>(
1330 native_context, static_cast<HValue*>(NULL),
1331 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST));
1332 Add<HStoreNamedField>(js_function,
1333 HObjectAccess::ForNextFunctionLinkPointer(),
1334 optimized_functions_list);
1335
1336 // This store is the only one that should have a write barrier.
1337 Add<HStoreNamedField>(native_context,
1338 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST),
1339 js_function);
1340
1341 // The builder continues in the "then" after this function.
1342 }
1343
1344
BuildInstallCode(HValue * js_function,HValue * shared_info)1345 void CodeStubGraphBuilderBase::BuildInstallCode(HValue* js_function,
1346 HValue* shared_info) {
1347 Add<HStoreNamedField>(js_function,
1348 HObjectAccess::ForNextFunctionLinkPointer(),
1349 graph()->GetConstantUndefined());
1350 HValue* code_object = Add<HLoadNamedField>(
1351 shared_info, static_cast<HValue*>(NULL), HObjectAccess::ForCodeOffset());
1352 Add<HStoreCodeEntry>(js_function, code_object);
1353 }
1354
1355
LoadFromOptimizedCodeMap(HValue * optimized_map,HValue * iterator,int field_offset)1356 HInstruction* CodeStubGraphBuilderBase::LoadFromOptimizedCodeMap(
1357 HValue* optimized_map,
1358 HValue* iterator,
1359 int field_offset) {
1360 // By making sure to express these loads in the form [<hvalue> + constant]
1361 // the keyed load can be hoisted.
1362 DCHECK(field_offset >= 0 && field_offset < SharedFunctionInfo::kEntryLength);
1363 HValue* field_slot = iterator;
1364 if (field_offset > 0) {
1365 HValue* field_offset_value = Add<HConstant>(field_offset);
1366 field_slot = AddUncasted<HAdd>(iterator, field_offset_value);
1367 }
1368 HInstruction* field_entry = Add<HLoadKeyed>(optimized_map, field_slot,
1369 static_cast<HValue*>(NULL), FAST_ELEMENTS);
1370 return field_entry;
1371 }
1372
1373
BuildInstallFromOptimizedCodeMap(HValue * js_function,HValue * shared_info,HValue * native_context)1374 void CodeStubGraphBuilderBase::BuildInstallFromOptimizedCodeMap(
1375 HValue* js_function,
1376 HValue* shared_info,
1377 HValue* native_context) {
1378 Counters* counters = isolate()->counters();
1379 IfBuilder is_optimized(this);
1380 HInstruction* optimized_map = Add<HLoadNamedField>(
1381 shared_info, static_cast<HValue*>(NULL),
1382 HObjectAccess::ForOptimizedCodeMap());
1383 HValue* null_constant = Add<HConstant>(0);
1384 is_optimized.If<HCompareObjectEqAndBranch>(optimized_map, null_constant);
1385 is_optimized.Then();
1386 {
1387 BuildInstallCode(js_function, shared_info);
1388 }
1389 is_optimized.Else();
1390 {
1391 AddIncrementCounter(counters->fast_new_closure_try_optimized());
1392 // optimized_map points to fixed array of 3-element entries
1393 // (native context, optimized code, literals).
1394 // Map must never be empty, so check the first elements.
1395 HValue* first_entry_index =
1396 Add<HConstant>(SharedFunctionInfo::kEntriesStart);
1397 IfBuilder already_in(this);
1398 BuildCheckAndInstallOptimizedCode(js_function, native_context, &already_in,
1399 optimized_map, first_entry_index);
1400 already_in.Else();
1401 {
1402 // Iterate through the rest of map backwards. Do not double check first
1403 // entry. After the loop, if no matching optimized code was found,
1404 // install unoptimized code.
1405 // for(i = map.length() - SharedFunctionInfo::kEntryLength;
1406 // i > SharedFunctionInfo::kEntriesStart;
1407 // i -= SharedFunctionInfo::kEntryLength) { .. }
1408 HValue* shared_function_entry_length =
1409 Add<HConstant>(SharedFunctionInfo::kEntryLength);
1410 LoopBuilder loop_builder(this,
1411 context(),
1412 LoopBuilder::kPostDecrement,
1413 shared_function_entry_length);
1414 HValue* array_length = Add<HLoadNamedField>(
1415 optimized_map, static_cast<HValue*>(NULL),
1416 HObjectAccess::ForFixedArrayLength());
1417 HValue* start_pos = AddUncasted<HSub>(array_length,
1418 shared_function_entry_length);
1419 HValue* slot_iterator = loop_builder.BeginBody(start_pos,
1420 first_entry_index,
1421 Token::GT);
1422 {
1423 IfBuilder done_check(this);
1424 BuildCheckAndInstallOptimizedCode(js_function, native_context,
1425 &done_check,
1426 optimized_map,
1427 slot_iterator);
1428 // Fall out of the loop
1429 loop_builder.Break();
1430 }
1431 loop_builder.EndBody();
1432
1433 // If slot_iterator equals first entry index, then we failed to find and
1434 // install optimized code
1435 IfBuilder no_optimized_code_check(this);
1436 no_optimized_code_check.If<HCompareNumericAndBranch>(
1437 slot_iterator, first_entry_index, Token::EQ);
1438 no_optimized_code_check.Then();
1439 {
1440 // Store the unoptimized code
1441 BuildInstallCode(js_function, shared_info);
1442 }
1443 }
1444 }
1445 }
1446
1447
1448 template<>
BuildCodeStub()1449 HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
1450 Counters* counters = isolate()->counters();
1451 Factory* factory = isolate()->factory();
1452 HInstruction* empty_fixed_array =
1453 Add<HConstant>(factory->empty_fixed_array());
1454 HValue* shared_info = GetParameter(0);
1455
1456 AddIncrementCounter(counters->fast_new_closure_total());
1457
1458 // Create a new closure from the given function info in new space
1459 HValue* size = Add<HConstant>(JSFunction::kSize);
1460 HInstruction* js_function = Add<HAllocate>(size, HType::JSObject(),
1461 NOT_TENURED, JS_FUNCTION_TYPE);
1462
1463 int map_index = Context::FunctionMapIndex(casted_stub()->strict_mode(),
1464 casted_stub()->kind());
1465
1466 // Compute the function map in the current native context and set that
1467 // as the map of the allocated object.
1468 HInstruction* native_context = BuildGetNativeContext();
1469 HInstruction* map_slot_value = Add<HLoadNamedField>(
1470 native_context, static_cast<HValue*>(NULL),
1471 HObjectAccess::ForContextSlot(map_index));
1472 Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
1473
1474 // Initialize the rest of the function.
1475 Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
1476 empty_fixed_array);
1477 Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
1478 empty_fixed_array);
1479 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1480 empty_fixed_array);
1481 Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
1482 graph()->GetConstantHole());
1483 Add<HStoreNamedField>(js_function,
1484 HObjectAccess::ForSharedFunctionInfoPointer(),
1485 shared_info);
1486 Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
1487 context());
1488
1489 // Initialize the code pointer in the function to be the one
1490 // found in the shared function info object.
1491 // But first check if there is an optimized version for our context.
1492 if (FLAG_cache_optimized_code) {
1493 BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
1494 } else {
1495 BuildInstallCode(js_function, shared_info);
1496 }
1497
1498 return js_function;
1499 }
1500
1501
GenerateCode()1502 Handle<Code> FastNewClosureStub::GenerateCode() {
1503 return DoGenerateCode(this);
1504 }
1505
1506
1507 template<>
BuildCodeStub()1508 HValue* CodeStubGraphBuilder<FastNewContextStub>::BuildCodeStub() {
1509 int length = casted_stub()->slots() + Context::MIN_CONTEXT_SLOTS;
1510
1511 // Get the function.
1512 HParameter* function = GetParameter(FastNewContextStub::kFunction);
1513
1514 // Allocate the context in new space.
1515 HAllocate* function_context = Add<HAllocate>(
1516 Add<HConstant>(length * kPointerSize + FixedArray::kHeaderSize),
1517 HType::HeapObject(), NOT_TENURED, FIXED_ARRAY_TYPE);
1518
1519 // Set up the object header.
1520 AddStoreMapConstant(function_context,
1521 isolate()->factory()->function_context_map());
1522 Add<HStoreNamedField>(function_context,
1523 HObjectAccess::ForFixedArrayLength(),
1524 Add<HConstant>(length));
1525
1526 // Set up the fixed slots.
1527 Add<HStoreNamedField>(function_context,
1528 HObjectAccess::ForContextSlot(Context::CLOSURE_INDEX),
1529 function);
1530 Add<HStoreNamedField>(function_context,
1531 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX),
1532 context());
1533 Add<HStoreNamedField>(function_context,
1534 HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX),
1535 graph()->GetConstant0());
1536
1537 // Copy the global object from the previous context.
1538 HValue* global_object = Add<HLoadNamedField>(
1539 context(), static_cast<HValue*>(NULL),
1540 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
1541 Add<HStoreNamedField>(function_context,
1542 HObjectAccess::ForContextSlot(
1543 Context::GLOBAL_OBJECT_INDEX),
1544 global_object);
1545
1546 // Initialize the rest of the slots to undefined.
1547 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; ++i) {
1548 Add<HStoreNamedField>(function_context,
1549 HObjectAccess::ForContextSlot(i),
1550 graph()->GetConstantUndefined());
1551 }
1552
1553 return function_context;
1554 }
1555
1556
GenerateCode()1557 Handle<Code> FastNewContextStub::GenerateCode() {
1558 return DoGenerateCode(this);
1559 }
1560
1561
1562 template <>
BuildCodeStub()1563 HValue* CodeStubGraphBuilder<LoadDictionaryElementStub>::BuildCodeStub() {
1564 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
1565 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
1566
1567 Add<HCheckSmi>(key);
1568
1569 HValue* elements = AddLoadElements(receiver);
1570
1571 HValue* hash = BuildElementIndexHash(key);
1572
1573 return BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash);
1574 }
1575
1576
GenerateCode()1577 Handle<Code> LoadDictionaryElementStub::GenerateCode() {
1578 return DoGenerateCode(this);
1579 }
1580
1581
1582 template<>
BuildCodeStub()1583 HValue* CodeStubGraphBuilder<RegExpConstructResultStub>::BuildCodeStub() {
1584 // Determine the parameters.
1585 HValue* length = GetParameter(RegExpConstructResultStub::kLength);
1586 HValue* index = GetParameter(RegExpConstructResultStub::kIndex);
1587 HValue* input = GetParameter(RegExpConstructResultStub::kInput);
1588
1589 info()->MarkMustNotHaveEagerFrame();
1590
1591 return BuildRegExpConstructResult(length, index, input);
1592 }
1593
1594
GenerateCode()1595 Handle<Code> RegExpConstructResultStub::GenerateCode() {
1596 return DoGenerateCode(this);
1597 }
1598
1599
1600 template <>
1601 class CodeStubGraphBuilder<KeyedLoadGenericStub>
1602 : public CodeStubGraphBuilderBase {
1603 public:
CodeStubGraphBuilder(Isolate * isolate,KeyedLoadGenericStub * stub)1604 CodeStubGraphBuilder(Isolate* isolate, KeyedLoadGenericStub* stub)
1605 : CodeStubGraphBuilderBase(isolate, stub) {}
1606
1607 protected:
1608 virtual HValue* BuildCodeStub();
1609
1610 void BuildElementsKindLimitCheck(HGraphBuilder::IfBuilder* if_builder,
1611 HValue* bit_field2,
1612 ElementsKind kind);
1613
1614 void BuildFastElementLoad(HGraphBuilder::IfBuilder* if_builder,
1615 HValue* receiver,
1616 HValue* key,
1617 HValue* instance_type,
1618 HValue* bit_field2,
1619 ElementsKind kind);
1620
1621 void BuildExternalElementLoad(HGraphBuilder::IfBuilder* if_builder,
1622 HValue* receiver,
1623 HValue* key,
1624 HValue* instance_type,
1625 HValue* bit_field2,
1626 ElementsKind kind);
1627
casted_stub()1628 KeyedLoadGenericStub* casted_stub() {
1629 return static_cast<KeyedLoadGenericStub*>(stub());
1630 }
1631 };
1632
1633
BuildElementsKindLimitCheck(HGraphBuilder::IfBuilder * if_builder,HValue * bit_field2,ElementsKind kind)1634 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildElementsKindLimitCheck(
1635 HGraphBuilder::IfBuilder* if_builder, HValue* bit_field2,
1636 ElementsKind kind) {
1637 ElementsKind next_kind = static_cast<ElementsKind>(kind + 1);
1638 HValue* kind_limit = Add<HConstant>(
1639 static_cast<int>(Map::ElementsKindBits::encode(next_kind)));
1640
1641 if_builder->If<HCompareNumericAndBranch>(bit_field2, kind_limit, Token::LT);
1642 if_builder->Then();
1643 }
1644
1645
BuildFastElementLoad(HGraphBuilder::IfBuilder * if_builder,HValue * receiver,HValue * key,HValue * instance_type,HValue * bit_field2,ElementsKind kind)1646 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildFastElementLoad(
1647 HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
1648 HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
1649 DCHECK(!IsExternalArrayElementsKind(kind));
1650
1651 BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
1652
1653 IfBuilder js_array_check(this);
1654 js_array_check.If<HCompareNumericAndBranch>(
1655 instance_type, Add<HConstant>(JS_ARRAY_TYPE), Token::EQ);
1656 js_array_check.Then();
1657 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
1658 true, kind,
1659 LOAD, NEVER_RETURN_HOLE,
1660 STANDARD_STORE));
1661 js_array_check.Else();
1662 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
1663 false, kind,
1664 LOAD, NEVER_RETURN_HOLE,
1665 STANDARD_STORE));
1666 js_array_check.End();
1667 }
1668
1669
BuildExternalElementLoad(HGraphBuilder::IfBuilder * if_builder,HValue * receiver,HValue * key,HValue * instance_type,HValue * bit_field2,ElementsKind kind)1670 void CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildExternalElementLoad(
1671 HGraphBuilder::IfBuilder* if_builder, HValue* receiver, HValue* key,
1672 HValue* instance_type, HValue* bit_field2, ElementsKind kind) {
1673 DCHECK(IsExternalArrayElementsKind(kind));
1674
1675 BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
1676
1677 Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
1678 false, kind,
1679 LOAD, NEVER_RETURN_HOLE,
1680 STANDARD_STORE));
1681 }
1682
1683
BuildCodeStub()1684 HValue* CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildCodeStub() {
1685 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
1686 HValue* key = GetParameter(LoadDescriptor::kNameIndex);
1687
1688 // Split into a smi/integer case and unique string case.
1689 HIfContinuation index_name_split_continuation(graph()->CreateBasicBlock(),
1690 graph()->CreateBasicBlock());
1691
1692 BuildKeyedIndexCheck(key, &index_name_split_continuation);
1693
1694 IfBuilder index_name_split(this, &index_name_split_continuation);
1695 index_name_split.Then();
1696 {
1697 // Key is an index (number)
1698 key = Pop();
1699
1700 int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
1701 (1 << Map::kHasIndexedInterceptor);
1702 BuildJSObjectCheck(receiver, bit_field_mask);
1703
1704 HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL),
1705 HObjectAccess::ForMap());
1706
1707 HValue* instance_type =
1708 Add<HLoadNamedField>(map, static_cast<HValue*>(NULL),
1709 HObjectAccess::ForMapInstanceType());
1710
1711 HValue* bit_field2 = Add<HLoadNamedField>(map,
1712 static_cast<HValue*>(NULL),
1713 HObjectAccess::ForMapBitField2());
1714
1715 IfBuilder kind_if(this);
1716 BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1717 FAST_HOLEY_ELEMENTS);
1718
1719 kind_if.Else();
1720 {
1721 BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1722 FAST_HOLEY_DOUBLE_ELEMENTS);
1723 }
1724 kind_if.Else();
1725
1726 // The DICTIONARY_ELEMENTS check generates a "kind_if.Then"
1727 BuildElementsKindLimitCheck(&kind_if, bit_field2, DICTIONARY_ELEMENTS);
1728 {
1729 HValue* elements = AddLoadElements(receiver);
1730
1731 HValue* hash = BuildElementIndexHash(key);
1732
1733 Push(BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash));
1734 }
1735 kind_if.Else();
1736
1737 // The SLOPPY_ARGUMENTS_ELEMENTS check generates a "kind_if.Then"
1738 BuildElementsKindLimitCheck(&kind_if, bit_field2,
1739 SLOPPY_ARGUMENTS_ELEMENTS);
1740 // Non-strict elements are not handled.
1741 Add<HDeoptimize>("non-strict elements in KeyedLoadGenericStub",
1742 Deoptimizer::EAGER);
1743 Push(graph()->GetConstant0());
1744
1745 kind_if.Else();
1746 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1747 EXTERNAL_INT8_ELEMENTS);
1748
1749 kind_if.Else();
1750 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1751 EXTERNAL_UINT8_ELEMENTS);
1752
1753 kind_if.Else();
1754 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1755 EXTERNAL_INT16_ELEMENTS);
1756
1757 kind_if.Else();
1758 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1759 EXTERNAL_UINT16_ELEMENTS);
1760
1761 kind_if.Else();
1762 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1763 EXTERNAL_INT32_ELEMENTS);
1764
1765 kind_if.Else();
1766 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1767 EXTERNAL_UINT32_ELEMENTS);
1768
1769 kind_if.Else();
1770 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1771 EXTERNAL_FLOAT32_ELEMENTS);
1772
1773 kind_if.Else();
1774 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1775 EXTERNAL_FLOAT64_ELEMENTS);
1776
1777 kind_if.Else();
1778 BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
1779 EXTERNAL_UINT8_CLAMPED_ELEMENTS);
1780
1781 kind_if.ElseDeopt("ElementsKind unhandled in KeyedLoadGenericStub");
1782
1783 kind_if.End();
1784 }
1785 index_name_split.Else();
1786 {
1787 // Key is a unique string.
1788 key = Pop();
1789
1790 int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
1791 (1 << Map::kHasNamedInterceptor);
1792 BuildJSObjectCheck(receiver, bit_field_mask);
1793
1794 HIfContinuation continuation;
1795 BuildTestForDictionaryProperties(receiver, &continuation);
1796 IfBuilder if_dict_properties(this, &continuation);
1797 if_dict_properties.Then();
1798 {
1799 // Key is string, properties are dictionary mode
1800 BuildNonGlobalObjectCheck(receiver);
1801
1802 HValue* properties = Add<HLoadNamedField>(
1803 receiver, static_cast<HValue*>(NULL),
1804 HObjectAccess::ForPropertiesPointer());
1805
1806 HValue* hash =
1807 Add<HLoadNamedField>(key, static_cast<HValue*>(NULL),
1808 HObjectAccess::ForNameHashField());
1809
1810 hash = AddUncasted<HShr>(hash, Add<HConstant>(Name::kHashShift));
1811
1812 HValue* value = BuildUncheckedDictionaryElementLoad(receiver,
1813 properties,
1814 key,
1815 hash);
1816 Push(value);
1817 }
1818 if_dict_properties.Else();
1819 {
1820 // Key is string, properties are fast mode
1821 HValue* hash = BuildKeyedLookupCacheHash(receiver, key);
1822
1823 ExternalReference cache_keys_ref =
1824 ExternalReference::keyed_lookup_cache_keys(isolate());
1825 HValue* cache_keys = Add<HConstant>(cache_keys_ref);
1826
1827 HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL),
1828 HObjectAccess::ForMap());
1829 HValue* base_index = AddUncasted<HMul>(hash, Add<HConstant>(2));
1830 base_index->ClearFlag(HValue::kCanOverflow);
1831
1832 HIfContinuation inline_or_runtime_continuation(
1833 graph()->CreateBasicBlock(), graph()->CreateBasicBlock());
1834 {
1835 IfBuilder lookup_ifs[KeyedLookupCache::kEntriesPerBucket];
1836 for (int probe = 0; probe < KeyedLookupCache::kEntriesPerBucket;
1837 ++probe) {
1838 IfBuilder* lookup_if = &lookup_ifs[probe];
1839 lookup_if->Initialize(this);
1840 int probe_base = probe * KeyedLookupCache::kEntryLength;
1841 HValue* map_index = AddUncasted<HAdd>(
1842 base_index,
1843 Add<HConstant>(probe_base + KeyedLookupCache::kMapIndex));
1844 map_index->ClearFlag(HValue::kCanOverflow);
1845 HValue* key_index = AddUncasted<HAdd>(
1846 base_index,
1847 Add<HConstant>(probe_base + KeyedLookupCache::kKeyIndex));
1848 key_index->ClearFlag(HValue::kCanOverflow);
1849 HValue* map_to_check =
1850 Add<HLoadKeyed>(cache_keys, map_index, static_cast<HValue*>(NULL),
1851 FAST_ELEMENTS, NEVER_RETURN_HOLE, 0);
1852 lookup_if->If<HCompareObjectEqAndBranch>(map_to_check, map);
1853 lookup_if->And();
1854 HValue* key_to_check =
1855 Add<HLoadKeyed>(cache_keys, key_index, static_cast<HValue*>(NULL),
1856 FAST_ELEMENTS, NEVER_RETURN_HOLE, 0);
1857 lookup_if->If<HCompareObjectEqAndBranch>(key_to_check, key);
1858 lookup_if->Then();
1859 {
1860 ExternalReference cache_field_offsets_ref =
1861 ExternalReference::keyed_lookup_cache_field_offsets(isolate());
1862 HValue* cache_field_offsets =
1863 Add<HConstant>(cache_field_offsets_ref);
1864 HValue* index = AddUncasted<HAdd>(hash, Add<HConstant>(probe));
1865 index->ClearFlag(HValue::kCanOverflow);
1866 HValue* property_index = Add<HLoadKeyed>(
1867 cache_field_offsets, index, static_cast<HValue*>(NULL),
1868 EXTERNAL_INT32_ELEMENTS, NEVER_RETURN_HOLE, 0);
1869 Push(property_index);
1870 }
1871 lookup_if->Else();
1872 }
1873 for (int i = 0; i < KeyedLookupCache::kEntriesPerBucket; ++i) {
1874 lookup_ifs[i].JoinContinuation(&inline_or_runtime_continuation);
1875 }
1876 }
1877
1878 IfBuilder inline_or_runtime(this, &inline_or_runtime_continuation);
1879 inline_or_runtime.Then();
1880 {
1881 // Found a cached index, load property inline.
1882 Push(Add<HLoadFieldByIndex>(receiver, Pop()));
1883 }
1884 inline_or_runtime.Else();
1885 {
1886 // KeyedLookupCache miss; call runtime.
1887 Add<HPushArguments>(receiver, key);
1888 Push(Add<HCallRuntime>(
1889 isolate()->factory()->empty_string(),
1890 Runtime::FunctionForId(Runtime::kKeyedGetProperty), 2));
1891 }
1892 inline_or_runtime.End();
1893 }
1894 if_dict_properties.End();
1895 }
1896 index_name_split.End();
1897
1898 return Pop();
1899 }
1900
1901
GenerateCode()1902 Handle<Code> KeyedLoadGenericStub::GenerateCode() {
1903 return DoGenerateCode(this);
1904 }
1905
1906
1907 template <>
BuildCodeStub()1908 HValue* CodeStubGraphBuilder<VectorLoadStub>::BuildCodeStub() {
1909 HValue* receiver = GetParameter(VectorLoadICDescriptor::kReceiverIndex);
1910 Add<HDeoptimize>("Always deopt", Deoptimizer::EAGER);
1911 return receiver;
1912 }
1913
1914
GenerateCode()1915 Handle<Code> VectorLoadStub::GenerateCode() { return DoGenerateCode(this); }
1916
1917
1918 template <>
BuildCodeStub()1919 HValue* CodeStubGraphBuilder<VectorKeyedLoadStub>::BuildCodeStub() {
1920 HValue* receiver = GetParameter(VectorLoadICDescriptor::kReceiverIndex);
1921 Add<HDeoptimize>("Always deopt", Deoptimizer::EAGER);
1922 return receiver;
1923 }
1924
1925
GenerateCode()1926 Handle<Code> VectorKeyedLoadStub::GenerateCode() {
1927 return DoGenerateCode(this);
1928 }
1929
1930
GenerateCode()1931 Handle<Code> MegamorphicLoadStub::GenerateCode() {
1932 return DoGenerateCode(this);
1933 }
1934
1935
1936 template <>
BuildCodeStub()1937 HValue* CodeStubGraphBuilder<MegamorphicLoadStub>::BuildCodeStub() {
1938 // The return address is on the stack.
1939 HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
1940 HValue* name = GetParameter(LoadDescriptor::kNameIndex);
1941
1942 // Probe the stub cache.
1943 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags(
1944 Code::ComputeHandlerFlags(Code::LOAD_IC));
1945 Add<HTailCallThroughMegamorphicCache>(receiver, name, flags);
1946
1947 // We never continue.
1948 return graph()->GetConstant0();
1949 }
1950 } } // namespace v8::internal
1951