1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/crankshaft/lithium.h"
6
7 #include "src/ast/scopes.h"
8 #include "src/codegen.h"
9
10 #if V8_TARGET_ARCH_IA32
11 #include "src/crankshaft/ia32/lithium-ia32.h" // NOLINT
12 #include "src/crankshaft/ia32/lithium-codegen-ia32.h" // NOLINT
13 #elif V8_TARGET_ARCH_X64
14 #include "src/crankshaft/x64/lithium-x64.h" // NOLINT
15 #include "src/crankshaft/x64/lithium-codegen-x64.h" // NOLINT
16 #elif V8_TARGET_ARCH_ARM
17 #include "src/crankshaft/arm/lithium-arm.h" // NOLINT
18 #include "src/crankshaft/arm/lithium-codegen-arm.h" // NOLINT
19 #elif V8_TARGET_ARCH_PPC
20 #include "src/crankshaft/ppc/lithium-ppc.h" // NOLINT
21 #include "src/crankshaft/ppc/lithium-codegen-ppc.h" // NOLINT
22 #elif V8_TARGET_ARCH_MIPS
23 #include "src/crankshaft/mips/lithium-mips.h" // NOLINT
24 #include "src/crankshaft/mips/lithium-codegen-mips.h" // NOLINT
25 #elif V8_TARGET_ARCH_ARM64
26 #include "src/crankshaft/arm64/lithium-arm64.h" // NOLINT
27 #include "src/crankshaft/arm64/lithium-codegen-arm64.h" // NOLINT
28 #elif V8_TARGET_ARCH_MIPS64
29 #include "src/crankshaft/mips64/lithium-mips64.h" // NOLINT
30 #include "src/crankshaft/mips64/lithium-codegen-mips64.h" // NOLINT
31 #elif V8_TARGET_ARCH_X87
32 #include "src/crankshaft/x87/lithium-x87.h" // NOLINT
33 #include "src/crankshaft/x87/lithium-codegen-x87.h" // NOLINT
34 #elif V8_TARGET_ARCH_S390
35 #include "src/crankshaft/s390/lithium-s390.h" // NOLINT
36 #include "src/crankshaft/s390/lithium-codegen-s390.h" // NOLINT
37 #else
38 #error "Unknown architecture."
39 #endif
40
41 namespace v8 {
42 namespace internal {
43
44 const auto GetRegConfig = RegisterConfiguration::Crankshaft;
45
PrintTo(StringStream * stream)46 void LOperand::PrintTo(StringStream* stream) {
47 LUnallocated* unalloc = NULL;
48 switch (kind()) {
49 case INVALID:
50 stream->Add("(0)");
51 break;
52 case UNALLOCATED:
53 unalloc = LUnallocated::cast(this);
54 stream->Add("v%d", unalloc->virtual_register());
55 if (unalloc->basic_policy() == LUnallocated::FIXED_SLOT) {
56 stream->Add("(=%dS)", unalloc->fixed_slot_index());
57 break;
58 }
59 switch (unalloc->extended_policy()) {
60 case LUnallocated::NONE:
61 break;
62 case LUnallocated::FIXED_REGISTER: {
63 int reg_index = unalloc->fixed_register_index();
64 if (reg_index < 0 || reg_index >= Register::kNumRegisters) {
65 stream->Add("(=invalid_reg#%d)", reg_index);
66 } else {
67 const char* register_name =
68 GetRegConfig()->GetGeneralRegisterName(reg_index);
69 stream->Add("(=%s)", register_name);
70 }
71 break;
72 }
73 case LUnallocated::FIXED_DOUBLE_REGISTER: {
74 int reg_index = unalloc->fixed_register_index();
75 if (reg_index < 0 || reg_index >= DoubleRegister::kMaxNumRegisters) {
76 stream->Add("(=invalid_double_reg#%d)", reg_index);
77 } else {
78 const char* double_register_name =
79 GetRegConfig()->GetDoubleRegisterName(reg_index);
80 stream->Add("(=%s)", double_register_name);
81 }
82 break;
83 }
84 case LUnallocated::MUST_HAVE_REGISTER:
85 stream->Add("(R)");
86 break;
87 case LUnallocated::MUST_HAVE_DOUBLE_REGISTER:
88 stream->Add("(D)");
89 break;
90 case LUnallocated::WRITABLE_REGISTER:
91 stream->Add("(WR)");
92 break;
93 case LUnallocated::SAME_AS_FIRST_INPUT:
94 stream->Add("(1)");
95 break;
96 case LUnallocated::ANY:
97 stream->Add("(-)");
98 break;
99 }
100 break;
101 case CONSTANT_OPERAND:
102 stream->Add("[constant:%d]", index());
103 break;
104 case STACK_SLOT:
105 stream->Add("[stack:%d]", index());
106 break;
107 case DOUBLE_STACK_SLOT:
108 stream->Add("[double_stack:%d]", index());
109 break;
110 case REGISTER: {
111 int reg_index = index();
112 if (reg_index < 0 || reg_index >= Register::kNumRegisters) {
113 stream->Add("(=invalid_reg#%d|R)", reg_index);
114 } else {
115 stream->Add("[%s|R]",
116 GetRegConfig()->GetGeneralRegisterName(reg_index));
117 }
118 break;
119 }
120 case DOUBLE_REGISTER: {
121 int reg_index = index();
122 if (reg_index < 0 || reg_index >= DoubleRegister::kMaxNumRegisters) {
123 stream->Add("(=invalid_double_reg#%d|R)", reg_index);
124 } else {
125 stream->Add("[%s|R]", GetRegConfig()->GetDoubleRegisterName(reg_index));
126 }
127 break;
128 }
129 }
130 }
131
132
133 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
134 LSubKindOperand<kOperandKind, kNumCachedOperands>*
135 LSubKindOperand<kOperandKind, kNumCachedOperands>::cache = NULL;
136
137
138 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
SetUpCache()139 void LSubKindOperand<kOperandKind, kNumCachedOperands>::SetUpCache() {
140 if (cache) return;
141 cache = new LSubKindOperand[kNumCachedOperands];
142 for (int i = 0; i < kNumCachedOperands; i++) {
143 cache[i].ConvertTo(kOperandKind, i);
144 }
145 }
146
147
148 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
TearDownCache()149 void LSubKindOperand<kOperandKind, kNumCachedOperands>::TearDownCache() {
150 delete[] cache;
151 cache = NULL;
152 }
153
154
SetUpCaches()155 void LOperand::SetUpCaches() {
156 #define LITHIUM_OPERAND_SETUP(name, type, number) L##name::SetUpCache();
157 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_SETUP)
158 #undef LITHIUM_OPERAND_SETUP
159 }
160
161
TearDownCaches()162 void LOperand::TearDownCaches() {
163 #define LITHIUM_OPERAND_TEARDOWN(name, type, number) L##name::TearDownCache();
164 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_TEARDOWN)
165 #undef LITHIUM_OPERAND_TEARDOWN
166 }
167
168
IsRedundant() const169 bool LParallelMove::IsRedundant() const {
170 for (int i = 0; i < move_operands_.length(); ++i) {
171 if (!move_operands_[i].IsRedundant()) return false;
172 }
173 return true;
174 }
175
176
PrintDataTo(StringStream * stream) const177 void LParallelMove::PrintDataTo(StringStream* stream) const {
178 bool first = true;
179 for (int i = 0; i < move_operands_.length(); ++i) {
180 if (!move_operands_[i].IsEliminated()) {
181 LOperand* source = move_operands_[i].source();
182 LOperand* destination = move_operands_[i].destination();
183 if (!first) stream->Add(" ");
184 first = false;
185 if (source->Equals(destination)) {
186 destination->PrintTo(stream);
187 } else {
188 destination->PrintTo(stream);
189 stream->Add(" = ");
190 source->PrintTo(stream);
191 }
192 stream->Add(";");
193 }
194 }
195 }
196
197
PrintTo(StringStream * stream)198 void LEnvironment::PrintTo(StringStream* stream) {
199 stream->Add("[id=%d|", ast_id().ToInt());
200 if (deoptimization_index() != Safepoint::kNoDeoptimizationIndex) {
201 stream->Add("deopt_id=%d|", deoptimization_index());
202 }
203 stream->Add("parameters=%d|", parameter_count());
204 stream->Add("arguments_stack_height=%d|", arguments_stack_height());
205 for (int i = 0; i < values_.length(); ++i) {
206 if (i != 0) stream->Add(";");
207 if (values_[i] == NULL) {
208 stream->Add("[hole]");
209 } else {
210 values_[i]->PrintTo(stream);
211 }
212 }
213 stream->Add("]");
214 }
215
216
RecordPointer(LOperand * op,Zone * zone)217 void LPointerMap::RecordPointer(LOperand* op, Zone* zone) {
218 // Do not record arguments as pointers.
219 if (op->IsStackSlot() && op->index() < 0) return;
220 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
221 pointer_operands_.Add(op, zone);
222 }
223
224
RemovePointer(LOperand * op)225 void LPointerMap::RemovePointer(LOperand* op) {
226 // Do not record arguments as pointers.
227 if (op->IsStackSlot() && op->index() < 0) return;
228 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
229 for (int i = 0; i < pointer_operands_.length(); ++i) {
230 if (pointer_operands_[i]->Equals(op)) {
231 pointer_operands_.Remove(i);
232 --i;
233 }
234 }
235 }
236
237
RecordUntagged(LOperand * op,Zone * zone)238 void LPointerMap::RecordUntagged(LOperand* op, Zone* zone) {
239 // Do not record arguments as pointers.
240 if (op->IsStackSlot() && op->index() < 0) return;
241 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
242 untagged_operands_.Add(op, zone);
243 }
244
245
PrintTo(StringStream * stream)246 void LPointerMap::PrintTo(StringStream* stream) {
247 stream->Add("{");
248 for (int i = 0; i < pointer_operands_.length(); ++i) {
249 if (i != 0) stream->Add(";");
250 pointer_operands_[i]->PrintTo(stream);
251 }
252 stream->Add("}");
253 }
254
LChunk(CompilationInfo * info,HGraph * graph)255 LChunk::LChunk(CompilationInfo* info, HGraph* graph)
256 : base_frame_slots_(info->IsStub()
257 ? TypedFrameConstants::kFixedSlotCount
258 : StandardFrameConstants::kFixedSlotCount),
259 current_frame_slots_(base_frame_slots_),
260 info_(info),
261 graph_(graph),
262 instructions_(32, info->zone()),
263 pointer_maps_(8, info->zone()),
264 deprecation_dependencies_(32, info->zone()),
265 stability_dependencies_(8, info->zone()) {}
266
GetLabel(int block_id) const267 LLabel* LChunk::GetLabel(int block_id) const {
268 HBasicBlock* block = graph_->blocks()->at(block_id);
269 int first_instruction = block->first_instruction_index();
270 return LLabel::cast(instructions_[first_instruction]);
271 }
272
273
LookupDestination(int block_id) const274 int LChunk::LookupDestination(int block_id) const {
275 LLabel* cur = GetLabel(block_id);
276 while (cur->replacement() != NULL) {
277 cur = cur->replacement();
278 }
279 return cur->block_id();
280 }
281
GetAssemblyLabel(int block_id) const282 Label* LChunk::GetAssemblyLabel(int block_id) const {
283 LLabel* label = GetLabel(block_id);
284 DCHECK(!label->HasReplacement());
285 return label->label();
286 }
287
288
MarkEmptyBlocks()289 void LChunk::MarkEmptyBlocks() {
290 LPhase phase("L_Mark empty blocks", this);
291 for (int i = 0; i < graph()->blocks()->length(); ++i) {
292 HBasicBlock* block = graph()->blocks()->at(i);
293 int first = block->first_instruction_index();
294 int last = block->last_instruction_index();
295 LInstruction* first_instr = instructions()->at(first);
296 LInstruction* last_instr = instructions()->at(last);
297
298 LLabel* label = LLabel::cast(first_instr);
299 if (last_instr->IsGoto()) {
300 LGoto* goto_instr = LGoto::cast(last_instr);
301 if (label->IsRedundant() &&
302 !label->is_loop_header()) {
303 bool can_eliminate = true;
304 for (int i = first + 1; i < last && can_eliminate; ++i) {
305 LInstruction* cur = instructions()->at(i);
306 if (cur->IsGap()) {
307 LGap* gap = LGap::cast(cur);
308 if (!gap->IsRedundant()) {
309 can_eliminate = false;
310 }
311 } else {
312 can_eliminate = false;
313 }
314 }
315 if (can_eliminate) {
316 label->set_replacement(GetLabel(goto_instr->block_id()));
317 }
318 }
319 }
320 }
321 }
322
323
AddInstruction(LInstruction * instr,HBasicBlock * block)324 void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
325 LInstructionGap* gap = new (zone()) LInstructionGap(block);
326 gap->set_hydrogen_value(instr->hydrogen_value());
327 int index = -1;
328 if (instr->IsControl()) {
329 instructions_.Add(gap, zone());
330 index = instructions_.length();
331 instructions_.Add(instr, zone());
332 } else {
333 index = instructions_.length();
334 instructions_.Add(instr, zone());
335 instructions_.Add(gap, zone());
336 }
337 if (instr->HasPointerMap()) {
338 pointer_maps_.Add(instr->pointer_map(), zone());
339 instr->pointer_map()->set_lithium_position(index);
340 }
341 }
342
DefineConstantOperand(HConstant * constant)343 LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
344 return LConstantOperand::Create(constant->id(), zone());
345 }
346
347
GetParameterStackSlot(int index) const348 int LChunk::GetParameterStackSlot(int index) const {
349 // The receiver is at index 0, the first parameter at index 1, so we
350 // shift all parameter indexes down by the number of parameters, and
351 // make sure they end up negative so they are distinguishable from
352 // spill slots.
353 int result = index - info()->num_parameters() - 1;
354
355 DCHECK(result < 0);
356 return result;
357 }
358
359
360 // A parameter relative to ebp in the arguments stub.
ParameterAt(int index)361 int LChunk::ParameterAt(int index) {
362 DCHECK(-1 <= index); // -1 is the receiver.
363 return (1 + info()->scope()->num_parameters() - index) *
364 kPointerSize;
365 }
366
367
GetGapAt(int index) const368 LGap* LChunk::GetGapAt(int index) const {
369 return LGap::cast(instructions_[index]);
370 }
371
372
IsGapAt(int index) const373 bool LChunk::IsGapAt(int index) const {
374 return instructions_[index]->IsGap();
375 }
376
377
NearestGapPos(int index) const378 int LChunk::NearestGapPos(int index) const {
379 while (!IsGapAt(index)) index--;
380 return index;
381 }
382
383
AddGapMove(int index,LOperand * from,LOperand * to)384 void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
385 GetGapAt(index)->GetOrCreateParallelMove(
386 LGap::START, zone())->AddMove(from, to, zone());
387 }
388
389
LookupConstant(LConstantOperand * operand) const390 HConstant* LChunk::LookupConstant(LConstantOperand* operand) const {
391 return HConstant::cast(graph_->LookupValue(operand->index()));
392 }
393
394
LookupLiteralRepresentation(LConstantOperand * operand) const395 Representation LChunk::LookupLiteralRepresentation(
396 LConstantOperand* operand) const {
397 return graph_->LookupValue(operand->index())->representation();
398 }
399
400
CommitDependencies(Handle<Code> code) const401 void LChunk::CommitDependencies(Handle<Code> code) const {
402 if (!code->is_optimized_code()) return;
403 HandleScope scope(isolate());
404
405 for (Handle<Map> map : deprecation_dependencies_) {
406 DCHECK(!map->is_deprecated());
407 DCHECK(map->CanBeDeprecated());
408 Map::AddDependentCode(map, DependentCode::kTransitionGroup, code);
409 }
410
411 for (Handle<Map> map : stability_dependencies_) {
412 DCHECK(map->is_stable());
413 DCHECK(map->CanTransition());
414 Map::AddDependentCode(map, DependentCode::kPrototypeCheckGroup, code);
415 }
416
417 info_->dependencies()->Commit(code);
418 }
419
420
NewChunk(HGraph * graph)421 LChunk* LChunk::NewChunk(HGraph* graph) {
422 DisallowHandleAllocation no_handles;
423 DisallowHeapAllocation no_gc;
424 graph->DisallowAddingNewValues();
425 int values = graph->GetMaximumValueID();
426 CompilationInfo* info = graph->info();
427 if (values > LUnallocated::kMaxVirtualRegisters) {
428 info->AbortOptimization(kNotEnoughVirtualRegistersForValues);
429 return NULL;
430 }
431 LAllocator allocator(values, graph);
432 LChunkBuilder builder(info, graph, &allocator);
433 LChunk* chunk = builder.Build();
434 if (chunk == NULL) return NULL;
435
436 if (!allocator.Allocate(chunk)) {
437 info->AbortOptimization(kNotEnoughVirtualRegistersRegalloc);
438 return NULL;
439 }
440
441 chunk->set_allocated_double_registers(
442 allocator.assigned_double_registers());
443
444 return chunk;
445 }
446
447
Codegen()448 Handle<Code> LChunk::Codegen() {
449 MacroAssembler assembler(info()->isolate(), NULL, 0,
450 CodeObjectRequired::kYes);
451 // Code serializer only takes unoptimized code.
452 DCHECK(!info()->will_serialize());
453 LCodeGen generator(this, &assembler, info());
454
455 MarkEmptyBlocks();
456
457 if (generator.GenerateCode()) {
458 generator.CheckEnvironmentUsage();
459 CodeGenerator::MakeCodePrologue(info(), "optimized");
460 Handle<Code> code = CodeGenerator::MakeCodeEpilogue(
461 &assembler, nullptr, info(), assembler.CodeObject());
462 generator.FinishCode(code);
463 CommitDependencies(code);
464 Handle<ByteArray> source_positions =
465 generator.source_position_table_builder()->ToSourcePositionTable(
466 info()->isolate(), Handle<AbstractCode>::cast(code));
467 code->set_source_position_table(*source_positions);
468 code->set_is_crankshafted(true);
469
470 CodeGenerator::PrintCode(code, info());
471 return code;
472 }
473 assembler.AbortedCodeGeneration();
474 return Handle<Code>::null();
475 }
476
477
set_allocated_double_registers(BitVector * allocated_registers)478 void LChunk::set_allocated_double_registers(BitVector* allocated_registers) {
479 allocated_double_registers_ = allocated_registers;
480 BitVector* doubles = allocated_double_registers();
481 BitVector::Iterator iterator(doubles);
482 while (!iterator.Done()) {
483 if (info()->saves_caller_doubles()) {
484 if (kDoubleSize == kPointerSize * 2) {
485 current_frame_slots_ += 2;
486 } else {
487 current_frame_slots_++;
488 }
489 }
490 iterator.Advance();
491 }
492 }
493
494
Abort(BailoutReason reason)495 void LChunkBuilderBase::Abort(BailoutReason reason) {
496 info()->AbortOptimization(reason);
497 status_ = ABORTED;
498 }
499
500
Retry(BailoutReason reason)501 void LChunkBuilderBase::Retry(BailoutReason reason) {
502 info()->RetryOptimization(reason);
503 status_ = ABORTED;
504 }
505
CreateLazyBailoutForCall(HBasicBlock * current_block,LInstruction * instr,HInstruction * hydrogen_val)506 void LChunkBuilderBase::CreateLazyBailoutForCall(HBasicBlock* current_block,
507 LInstruction* instr,
508 HInstruction* hydrogen_val) {
509 if (!instr->IsCall()) return;
510
511 HEnvironment* hydrogen_env = current_block->last_environment();
512 HValue* hydrogen_value_for_lazy_bailout = hydrogen_val;
513 DCHECK_NOT_NULL(hydrogen_env);
514 if (instr->IsSyntacticTailCall()) {
515 // If it was a syntactic tail call we need to drop the current frame and
516 // all the frames on top of it that are either an arguments adaptor frame
517 // or a tail caller frame.
518 hydrogen_env = hydrogen_env->outer();
519 while (hydrogen_env != nullptr &&
520 (hydrogen_env->frame_type() == ARGUMENTS_ADAPTOR ||
521 hydrogen_env->frame_type() == TAIL_CALLER_FUNCTION)) {
522 hydrogen_env = hydrogen_env->outer();
523 }
524 if (hydrogen_env != nullptr) {
525 if (hydrogen_env->frame_type() == JS_FUNCTION) {
526 // In case an outer frame is a function frame we have to replay
527 // environment manually because
528 // 1) it does not contain a result of inlined function yet,
529 // 2) we can't find the proper simulate that corresponds to the point
530 // after inlined call to do a ReplayEnvironment() on.
531 // So we push return value on top of outer environment.
532 // As for JS_GETTER/JS_SETTER/JS_CONSTRUCT nothing has to be done here,
533 // the deoptimizer ensures that the result of the callee is correctly
534 // propagated to result register during deoptimization.
535 hydrogen_env = hydrogen_env->Copy();
536 hydrogen_env->Push(hydrogen_val);
537 }
538 } else {
539 // Although we don't need this lazy bailout for normal execution
540 // (because when we tail call from the outermost function we should pop
541 // its frame) we still need it when debugger is on.
542 hydrogen_env = current_block->last_environment();
543 }
544 } else {
545 if (hydrogen_val->HasObservableSideEffects()) {
546 HSimulate* sim = HSimulate::cast(hydrogen_val->next());
547 sim->ReplayEnvironment(hydrogen_env);
548 hydrogen_value_for_lazy_bailout = sim;
549 }
550 }
551 LInstruction* bailout = LChunkBuilderBase::AssignEnvironment(
552 new (zone()) LLazyBailout(), hydrogen_env);
553 bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
554 chunk_->AddInstruction(bailout, current_block);
555 }
556
AssignEnvironment(LInstruction * instr,HEnvironment * hydrogen_env)557 LInstruction* LChunkBuilderBase::AssignEnvironment(LInstruction* instr,
558 HEnvironment* hydrogen_env) {
559 int argument_index_accumulator = 0;
560 ZoneList<HValue*> objects_to_materialize(0, zone());
561 DCHECK_NE(TAIL_CALLER_FUNCTION, hydrogen_env->frame_type());
562 instr->set_environment(CreateEnvironment(
563 hydrogen_env, &argument_index_accumulator, &objects_to_materialize));
564 return instr;
565 }
566
CreateEnvironment(HEnvironment * hydrogen_env,int * argument_index_accumulator,ZoneList<HValue * > * objects_to_materialize)567 LEnvironment* LChunkBuilderBase::CreateEnvironment(
568 HEnvironment* hydrogen_env, int* argument_index_accumulator,
569 ZoneList<HValue*>* objects_to_materialize) {
570 if (hydrogen_env == NULL) return NULL;
571
572 BailoutId ast_id = hydrogen_env->ast_id();
573 DCHECK(!ast_id.IsNone() ||
574 (hydrogen_env->frame_type() != JS_FUNCTION &&
575 hydrogen_env->frame_type() != TAIL_CALLER_FUNCTION));
576
577 if (hydrogen_env->frame_type() == TAIL_CALLER_FUNCTION) {
578 // Skip potential outer arguments adaptor frame.
579 HEnvironment* outer_hydrogen_env = hydrogen_env->outer();
580 if (outer_hydrogen_env != nullptr &&
581 outer_hydrogen_env->frame_type() == ARGUMENTS_ADAPTOR) {
582 outer_hydrogen_env = outer_hydrogen_env->outer();
583 }
584 LEnvironment* outer = CreateEnvironment(
585 outer_hydrogen_env, argument_index_accumulator, objects_to_materialize);
586 return new (zone())
587 LEnvironment(hydrogen_env->closure(), hydrogen_env->frame_type(),
588 ast_id, 0, 0, 0, outer, hydrogen_env->entry(), zone());
589 }
590
591 LEnvironment* outer =
592 CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator,
593 objects_to_materialize);
594
595 int omitted_count = (hydrogen_env->frame_type() == JS_FUNCTION)
596 ? 0
597 : hydrogen_env->specials_count();
598
599 int value_count = hydrogen_env->length() - omitted_count;
600 LEnvironment* result =
601 new(zone()) LEnvironment(hydrogen_env->closure(),
602 hydrogen_env->frame_type(),
603 ast_id,
604 hydrogen_env->parameter_count(),
605 argument_count_,
606 value_count,
607 outer,
608 hydrogen_env->entry(),
609 zone());
610 int argument_index = *argument_index_accumulator;
611
612 // Store the environment description into the environment
613 // (with holes for nested objects)
614 for (int i = 0; i < hydrogen_env->length(); ++i) {
615 if (hydrogen_env->is_special_index(i) &&
616 hydrogen_env->frame_type() != JS_FUNCTION) {
617 continue;
618 }
619 LOperand* op;
620 HValue* value = hydrogen_env->values()->at(i);
621 CHECK(!value->IsPushArguments()); // Do not deopt outgoing arguments
622 if (value->IsArgumentsObject() || value->IsCapturedObject()) {
623 op = LEnvironment::materialization_marker();
624 } else {
625 op = UseAny(value);
626 }
627 result->AddValue(op,
628 value->representation(),
629 value->CheckFlag(HInstruction::kUint32));
630 }
631
632 // Recursively store the nested objects into the environment
633 for (int i = 0; i < hydrogen_env->length(); ++i) {
634 if (hydrogen_env->is_special_index(i)) continue;
635
636 HValue* value = hydrogen_env->values()->at(i);
637 if (value->IsArgumentsObject() || value->IsCapturedObject()) {
638 AddObjectToMaterialize(value, objects_to_materialize, result);
639 }
640 }
641
642 if (hydrogen_env->frame_type() == JS_FUNCTION) {
643 *argument_index_accumulator = argument_index;
644 }
645
646 return result;
647 }
648
649
650 // Add an object to the supplied environment and object materialization list.
651 //
652 // Notes:
653 //
654 // We are building three lists here:
655 //
656 // 1. In the result->object_mapping_ list (added to by the
657 // LEnvironment::Add*Object methods), we store the lengths (number
658 // of fields) of the captured objects in depth-first traversal order, or
659 // in case of duplicated objects, we store the index to the duplicate object
660 // (with a tag to differentiate between captured and duplicated objects).
661 //
662 // 2. The object fields are stored in the result->values_ list
663 // (added to by the LEnvironment.AddValue method) sequentially as lists
664 // of fields with holes for nested objects (the holes will be expanded
665 // later by LCodegen::AddToTranslation according to the
666 // LEnvironment.object_mapping_ list).
667 //
668 // 3. The auxiliary objects_to_materialize array stores the hydrogen values
669 // in the same order as result->object_mapping_ list. This is used
670 // to detect duplicate values and calculate the corresponding object index.
AddObjectToMaterialize(HValue * value,ZoneList<HValue * > * objects_to_materialize,LEnvironment * result)671 void LChunkBuilderBase::AddObjectToMaterialize(HValue* value,
672 ZoneList<HValue*>* objects_to_materialize, LEnvironment* result) {
673 int object_index = objects_to_materialize->length();
674 // Store the hydrogen value into the de-duplication array
675 objects_to_materialize->Add(value, zone());
676 // Find out whether we are storing a duplicated value
677 int previously_materialized_object = -1;
678 for (int prev = 0; prev < object_index; ++prev) {
679 if (objects_to_materialize->at(prev) == value) {
680 previously_materialized_object = prev;
681 break;
682 }
683 }
684 // Store the captured object length (or duplicated object index)
685 // into the environment. For duplicated objects, we stop here.
686 int length = value->OperandCount();
687 bool is_arguments = value->IsArgumentsObject();
688 if (previously_materialized_object >= 0) {
689 result->AddDuplicateObject(previously_materialized_object);
690 return;
691 } else {
692 result->AddNewObject(is_arguments ? length - 1 : length, is_arguments);
693 }
694 // Store the captured object's fields into the environment
695 for (int i = is_arguments ? 1 : 0; i < length; ++i) {
696 LOperand* op;
697 HValue* arg_value = value->OperandAt(i);
698 if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) {
699 // Insert a hole for nested objects
700 op = LEnvironment::materialization_marker();
701 } else {
702 DCHECK(!arg_value->IsPushArguments());
703 // For ordinary values, tell the register allocator we need the value
704 // to be alive here
705 op = UseAny(arg_value);
706 }
707 result->AddValue(op,
708 arg_value->representation(),
709 arg_value->CheckFlag(HInstruction::kUint32));
710 }
711 // Recursively store all the nested captured objects into the environment
712 for (int i = is_arguments ? 1 : 0; i < length; ++i) {
713 HValue* arg_value = value->OperandAt(i);
714 if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) {
715 AddObjectToMaterialize(arg_value, objects_to_materialize, result);
716 }
717 }
718 }
719
720
~LPhase()721 LPhase::~LPhase() {
722 if (ShouldProduceTraceOutput()) {
723 isolate()->GetHTracer()->TraceLithium(name(), chunk_);
724 }
725 }
726
727
728 } // namespace internal
729 } // namespace v8
730