1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/code-generator.h"
6
7 #include "src/address-map.h"
8 #include "src/compiler/code-generator-impl.h"
9 #include "src/compiler/linkage.h"
10 #include "src/compiler/pipeline.h"
11 #include "src/frames-inl.h"
12
13 namespace v8 {
14 namespace internal {
15 namespace compiler {
16
17 class CodeGenerator::JumpTable final : public ZoneObject {
18 public:
JumpTable(JumpTable * next,Label ** targets,size_t target_count)19 JumpTable(JumpTable* next, Label** targets, size_t target_count)
20 : next_(next), targets_(targets), target_count_(target_count) {}
21
label()22 Label* label() { return &label_; }
next() const23 JumpTable* next() const { return next_; }
targets() const24 Label** targets() const { return targets_; }
target_count() const25 size_t target_count() const { return target_count_; }
26
27 private:
28 Label label_;
29 JumpTable* const next_;
30 Label** const targets_;
31 size_t const target_count_;
32 };
33
34
CodeGenerator(Frame * frame,Linkage * linkage,InstructionSequence * code,CompilationInfo * info)35 CodeGenerator::CodeGenerator(Frame* frame, Linkage* linkage,
36 InstructionSequence* code, CompilationInfo* info)
37 : frame_access_state_(new (code->zone()) FrameAccessState(frame)),
38 linkage_(linkage),
39 code_(code),
40 info_(info),
41 labels_(zone()->NewArray<Label>(code->InstructionBlockCount())),
42 current_block_(RpoNumber::Invalid()),
43 current_source_position_(SourcePosition::Unknown()),
44 masm_(info->isolate(), nullptr, 0, CodeObjectRequired::kYes),
45 resolver_(this),
46 safepoints_(code->zone()),
47 handlers_(code->zone()),
48 deoptimization_states_(code->zone()),
49 deoptimization_literals_(code->zone()),
50 inlined_function_count_(0),
51 translations_(code->zone()),
52 last_lazy_deopt_pc_(0),
53 jump_tables_(nullptr),
54 ools_(nullptr),
55 osr_pc_offset_(-1) {
56 for (int i = 0; i < code->InstructionBlockCount(); ++i) {
57 new (&labels_[i]) Label;
58 }
59 if (code->ContainsCall()) {
60 frame->MarkNeedsFrame();
61 }
62 }
63
64
GenerateCode()65 Handle<Code> CodeGenerator::GenerateCode() {
66 CompilationInfo* info = this->info();
67
68 // Open a frame scope to indicate that there is a frame on the stack. The
69 // MANUAL indicates that the scope shouldn't actually generate code to set up
70 // the frame (that is done in AssemblePrologue).
71 FrameScope frame_scope(masm(), StackFrame::MANUAL);
72
73 // Emit a code line info recording start event.
74 PositionsRecorder* recorder = masm()->positions_recorder();
75 LOG_CODE_EVENT(isolate(), CodeStartLinePosInfoRecordEvent(recorder));
76
77 // Place function entry hook if requested to do so.
78 if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
79 ProfileEntryHookStub::MaybeCallEntryHook(masm());
80 }
81
82 // Architecture-specific, linkage-specific prologue.
83 info->set_prologue_offset(masm()->pc_offset());
84 AssemblePrologue();
85
86 // Define deoptimization literals for all inlined functions.
87 DCHECK_EQ(0u, deoptimization_literals_.size());
88 for (auto& inlined : info->inlined_functions()) {
89 if (!inlined.shared_info.is_identical_to(info->shared_info())) {
90 DefineDeoptimizationLiteral(inlined.shared_info);
91 }
92 }
93 inlined_function_count_ = deoptimization_literals_.size();
94
95 // Define deoptimization literals for all unoptimized code objects of inlined
96 // functions. This ensures unoptimized code is kept alive by optimized code.
97 for (auto& inlined : info->inlined_functions()) {
98 if (!inlined.shared_info.is_identical_to(info->shared_info())) {
99 DefineDeoptimizationLiteral(inlined.inlined_code_object_root);
100 }
101 }
102
103 // Assemble all non-deferred blocks, followed by deferred ones.
104 for (int deferred = 0; deferred < 2; ++deferred) {
105 for (auto const block : code()->instruction_blocks()) {
106 if (block->IsDeferred() == (deferred == 0)) {
107 continue;
108 }
109 // Align loop headers on 16-byte boundaries.
110 if (block->IsLoopHeader()) masm()->Align(16);
111 // Ensure lazy deopt doesn't patch handler entry points.
112 if (block->IsHandler()) EnsureSpaceForLazyDeopt();
113 // Bind a label for a block.
114 current_block_ = block->rpo_number();
115 if (FLAG_code_comments) {
116 // TODO(titzer): these code comments are a giant memory leak.
117 Vector<char> buffer = Vector<char>::New(200);
118 char* buffer_start = buffer.start();
119
120 int next = SNPrintF(
121 buffer, "-- B%d start%s%s%s%s", block->rpo_number().ToInt(),
122 block->IsDeferred() ? " (deferred)" : "",
123 block->needs_frame() ? "" : " (no frame)",
124 block->must_construct_frame() ? " (construct frame)" : "",
125 block->must_deconstruct_frame() ? " (deconstruct frame)" : "");
126
127 buffer = buffer.SubVector(next, buffer.length());
128
129 if (block->IsLoopHeader()) {
130 next =
131 SNPrintF(buffer, " (loop up to %d)", block->loop_end().ToInt());
132 buffer = buffer.SubVector(next, buffer.length());
133 }
134 if (block->loop_header().IsValid()) {
135 next =
136 SNPrintF(buffer, " (in loop %d)", block->loop_header().ToInt());
137 buffer = buffer.SubVector(next, buffer.length());
138 }
139 SNPrintF(buffer, " --");
140 masm()->RecordComment(buffer_start);
141 }
142 masm()->bind(GetLabel(current_block_));
143 for (int i = block->code_start(); i < block->code_end(); ++i) {
144 AssembleInstruction(code()->InstructionAt(i));
145 }
146 }
147 }
148
149 // Assemble all out-of-line code.
150 if (ools_) {
151 masm()->RecordComment("-- Out of line code --");
152 for (OutOfLineCode* ool = ools_; ool; ool = ool->next()) {
153 masm()->bind(ool->entry());
154 ool->Generate();
155 if (ool->exit()->is_bound()) masm()->jmp(ool->exit());
156 }
157 }
158
159 // Ensure there is space for lazy deoptimization in the code.
160 if (info->ShouldEnsureSpaceForLazyDeopt()) {
161 int target_offset = masm()->pc_offset() + Deoptimizer::patch_size();
162 while (masm()->pc_offset() < target_offset) {
163 masm()->nop();
164 }
165 }
166
167 FinishCode(masm());
168
169 // Emit the jump tables.
170 if (jump_tables_) {
171 masm()->Align(kPointerSize);
172 for (JumpTable* table = jump_tables_; table; table = table->next()) {
173 masm()->bind(table->label());
174 AssembleJumpTable(table->targets(), table->target_count());
175 }
176 }
177
178 safepoints()->Emit(masm(), frame()->GetSpillSlotCount());
179
180 Handle<Code> result =
181 v8::internal::CodeGenerator::MakeCodeEpilogue(masm(), info);
182 result->set_is_turbofanned(true);
183 result->set_stack_slots(frame()->GetSpillSlotCount());
184 result->set_safepoint_table_offset(safepoints()->GetCodeOffset());
185
186 // Emit exception handler table.
187 if (!handlers_.empty()) {
188 Handle<HandlerTable> table =
189 Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray(
190 HandlerTable::LengthForReturn(static_cast<int>(handlers_.size())),
191 TENURED));
192 for (size_t i = 0; i < handlers_.size(); ++i) {
193 int position = handlers_[i].handler->pos();
194 HandlerTable::CatchPrediction prediction = handlers_[i].caught_locally
195 ? HandlerTable::CAUGHT
196 : HandlerTable::UNCAUGHT;
197 table->SetReturnOffset(static_cast<int>(i), handlers_[i].pc_offset);
198 table->SetReturnHandler(static_cast<int>(i), position, prediction);
199 }
200 result->set_handler_table(*table);
201 }
202
203 PopulateDeoptimizationData(result);
204
205 // Ensure there is space for lazy deoptimization in the relocation info.
206 if (info->ShouldEnsureSpaceForLazyDeopt()) {
207 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(result);
208 }
209
210 // Emit a code line info recording stop event.
211 void* line_info = recorder->DetachJITHandlerData();
212 LOG_CODE_EVENT(isolate(), CodeEndLinePosInfoRecordEvent(*result, line_info));
213
214 return result;
215 }
216
217
IsNextInAssemblyOrder(RpoNumber block) const218 bool CodeGenerator::IsNextInAssemblyOrder(RpoNumber block) const {
219 return code()
220 ->InstructionBlockAt(current_block_)
221 ->ao_number()
222 .IsNext(code()->InstructionBlockAt(block)->ao_number());
223 }
224
225
RecordSafepoint(ReferenceMap * references,Safepoint::Kind kind,int arguments,Safepoint::DeoptMode deopt_mode)226 void CodeGenerator::RecordSafepoint(ReferenceMap* references,
227 Safepoint::Kind kind, int arguments,
228 Safepoint::DeoptMode deopt_mode) {
229 Safepoint safepoint =
230 safepoints()->DefineSafepoint(masm(), kind, arguments, deopt_mode);
231 int stackSlotToSpillSlotDelta =
232 frame()->GetTotalFrameSlotCount() - frame()->GetSpillSlotCount();
233 for (auto& operand : references->reference_operands()) {
234 if (operand.IsStackSlot()) {
235 int index = LocationOperand::cast(operand).index();
236 DCHECK(index >= 0);
237 // Safepoint table indices are 0-based from the beginning of the spill
238 // slot area, adjust appropriately.
239 index -= stackSlotToSpillSlotDelta;
240 safepoint.DefinePointerSlot(index, zone());
241 } else if (operand.IsRegister() && (kind & Safepoint::kWithRegisters)) {
242 Register reg = LocationOperand::cast(operand).GetRegister();
243 safepoint.DefinePointerRegister(reg, zone());
244 }
245 }
246 }
247
248
IsMaterializableFromFrame(Handle<HeapObject> object,int * offset_return)249 bool CodeGenerator::IsMaterializableFromFrame(Handle<HeapObject> object,
250 int* offset_return) {
251 if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
252 if (info()->has_context() && object.is_identical_to(info()->context()) &&
253 !info()->is_osr()) {
254 *offset_return = StandardFrameConstants::kContextOffset;
255 return true;
256 } else if (object.is_identical_to(info()->closure())) {
257 *offset_return = JavaScriptFrameConstants::kFunctionOffset;
258 return true;
259 }
260 }
261 return false;
262 }
263
264
IsMaterializableFromRoot(Handle<HeapObject> object,Heap::RootListIndex * index_return)265 bool CodeGenerator::IsMaterializableFromRoot(
266 Handle<HeapObject> object, Heap::RootListIndex* index_return) {
267 const CallDescriptor* incoming_descriptor =
268 linkage()->GetIncomingDescriptor();
269 if (incoming_descriptor->flags() & CallDescriptor::kCanUseRoots) {
270 RootIndexMap map(isolate());
271 int root_index = map.Lookup(*object);
272 if (root_index != RootIndexMap::kInvalidRootIndex) {
273 *index_return = static_cast<Heap::RootListIndex>(root_index);
274 return true;
275 }
276 }
277 return false;
278 }
279
280
AssembleInstruction(Instruction * instr)281 void CodeGenerator::AssembleInstruction(Instruction* instr) {
282 AssembleGaps(instr);
283 AssembleSourcePosition(instr);
284 // Assemble architecture-specific code for the instruction.
285 AssembleArchInstruction(instr);
286
287 FlagsMode mode = FlagsModeField::decode(instr->opcode());
288 FlagsCondition condition = FlagsConditionField::decode(instr->opcode());
289 if (mode == kFlags_branch) {
290 // Assemble a branch after this instruction.
291 InstructionOperandConverter i(this, instr);
292 RpoNumber true_rpo = i.InputRpo(instr->InputCount() - 2);
293 RpoNumber false_rpo = i.InputRpo(instr->InputCount() - 1);
294
295 if (true_rpo == false_rpo) {
296 // redundant branch.
297 if (!IsNextInAssemblyOrder(true_rpo)) {
298 AssembleArchJump(true_rpo);
299 }
300 return;
301 }
302 if (IsNextInAssemblyOrder(true_rpo)) {
303 // true block is next, can fall through if condition negated.
304 std::swap(true_rpo, false_rpo);
305 condition = NegateFlagsCondition(condition);
306 }
307 BranchInfo branch;
308 branch.condition = condition;
309 branch.true_label = GetLabel(true_rpo);
310 branch.false_label = GetLabel(false_rpo);
311 branch.fallthru = IsNextInAssemblyOrder(false_rpo);
312 // Assemble architecture-specific branch.
313 AssembleArchBranch(instr, &branch);
314 } else if (mode == kFlags_set) {
315 // Assemble a boolean materialization after this instruction.
316 AssembleArchBoolean(instr, condition);
317 }
318 }
319
320
AssembleSourcePosition(Instruction * instr)321 void CodeGenerator::AssembleSourcePosition(Instruction* instr) {
322 SourcePosition source_position;
323 if (!code()->GetSourcePosition(instr, &source_position)) return;
324 if (source_position == current_source_position_) return;
325 current_source_position_ = source_position;
326 if (source_position.IsUnknown()) return;
327 int code_pos = source_position.raw();
328 masm()->positions_recorder()->RecordPosition(code_pos);
329 masm()->positions_recorder()->WriteRecordedPositions();
330 if (FLAG_code_comments) {
331 Vector<char> buffer = Vector<char>::New(256);
332 CompilationInfo* info = this->info();
333 int ln = Script::GetLineNumber(info->script(), code_pos);
334 int cn = Script::GetColumnNumber(info->script(), code_pos);
335 if (info->script()->name()->IsString()) {
336 Handle<String> file(String::cast(info->script()->name()));
337 base::OS::SNPrintF(buffer.start(), buffer.length(), "-- %s:%d:%d --",
338 file->ToCString().get(), ln, cn);
339 } else {
340 base::OS::SNPrintF(buffer.start(), buffer.length(),
341 "-- <unknown>:%d:%d --", ln, cn);
342 }
343 masm()->RecordComment(buffer.start());
344 }
345 }
346
347
AssembleGaps(Instruction * instr)348 void CodeGenerator::AssembleGaps(Instruction* instr) {
349 for (int i = Instruction::FIRST_GAP_POSITION;
350 i <= Instruction::LAST_GAP_POSITION; i++) {
351 Instruction::GapPosition inner_pos =
352 static_cast<Instruction::GapPosition>(i);
353 ParallelMove* move = instr->GetParallelMove(inner_pos);
354 if (move != nullptr) resolver()->Resolve(move);
355 }
356 }
357
358
PopulateDeoptimizationData(Handle<Code> code_object)359 void CodeGenerator::PopulateDeoptimizationData(Handle<Code> code_object) {
360 CompilationInfo* info = this->info();
361 int deopt_count = static_cast<int>(deoptimization_states_.size());
362 if (deopt_count == 0 && !info->is_osr()) return;
363 Handle<DeoptimizationInputData> data =
364 DeoptimizationInputData::New(isolate(), deopt_count, TENURED);
365
366 Handle<ByteArray> translation_array =
367 translations_.CreateByteArray(isolate()->factory());
368
369 data->SetTranslationByteArray(*translation_array);
370 data->SetInlinedFunctionCount(
371 Smi::FromInt(static_cast<int>(inlined_function_count_)));
372 data->SetOptimizationId(Smi::FromInt(info->optimization_id()));
373
374 if (info->has_shared_info()) {
375 data->SetSharedFunctionInfo(*info->shared_info());
376 } else {
377 data->SetSharedFunctionInfo(Smi::FromInt(0));
378 }
379
380 Handle<FixedArray> literals = isolate()->factory()->NewFixedArray(
381 static_cast<int>(deoptimization_literals_.size()), TENURED);
382 {
383 AllowDeferredHandleDereference copy_handles;
384 for (unsigned i = 0; i < deoptimization_literals_.size(); i++) {
385 literals->set(i, *deoptimization_literals_[i]);
386 }
387 data->SetLiteralArray(*literals);
388 }
389
390 if (info->is_osr()) {
391 DCHECK(osr_pc_offset_ >= 0);
392 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id().ToInt()));
393 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
394 } else {
395 BailoutId osr_ast_id = BailoutId::None();
396 data->SetOsrAstId(Smi::FromInt(osr_ast_id.ToInt()));
397 data->SetOsrPcOffset(Smi::FromInt(-1));
398 }
399
400 // Populate deoptimization entries.
401 for (int i = 0; i < deopt_count; i++) {
402 DeoptimizationState* deoptimization_state = deoptimization_states_[i];
403 data->SetAstId(i, deoptimization_state->bailout_id());
404 CHECK(deoptimization_states_[i]);
405 data->SetTranslationIndex(
406 i, Smi::FromInt(deoptimization_states_[i]->translation_id()));
407 data->SetArgumentsStackHeight(i, Smi::FromInt(0));
408 data->SetPc(i, Smi::FromInt(deoptimization_state->pc_offset()));
409 }
410
411 code_object->set_deoptimization_data(*data);
412 }
413
414
AddJumpTable(Label ** targets,size_t target_count)415 Label* CodeGenerator::AddJumpTable(Label** targets, size_t target_count) {
416 jump_tables_ = new (zone()) JumpTable(jump_tables_, targets, target_count);
417 return jump_tables_->label();
418 }
419
420
RecordCallPosition(Instruction * instr)421 void CodeGenerator::RecordCallPosition(Instruction* instr) {
422 CallDescriptor::Flags flags(MiscField::decode(instr->opcode()));
423
424 bool needs_frame_state = (flags & CallDescriptor::kNeedsFrameState);
425
426 RecordSafepoint(
427 instr->reference_map(), Safepoint::kSimple, 0,
428 needs_frame_state ? Safepoint::kLazyDeopt : Safepoint::kNoLazyDeopt);
429
430 if (flags & CallDescriptor::kHasExceptionHandler) {
431 InstructionOperandConverter i(this, instr);
432 bool caught = flags & CallDescriptor::kHasLocalCatchHandler;
433 RpoNumber handler_rpo = i.InputRpo(instr->InputCount() - 1);
434 handlers_.push_back({caught, GetLabel(handler_rpo), masm()->pc_offset()});
435 }
436
437 if (flags & CallDescriptor::kNeedsNopAfterCall) {
438 AddNopForSmiCodeInlining();
439 }
440
441 if (needs_frame_state) {
442 MarkLazyDeoptSite();
443 // If the frame state is present, it starts at argument 1 (just after the
444 // code address).
445 size_t frame_state_offset = 1;
446 FrameStateDescriptor* descriptor =
447 GetFrameStateDescriptor(instr, frame_state_offset);
448 int pc_offset = masm()->pc_offset();
449 int deopt_state_id = BuildTranslation(instr, pc_offset, frame_state_offset,
450 descriptor->state_combine());
451 // If the pre-call frame state differs from the post-call one, produce the
452 // pre-call frame state, too.
453 // TODO(jarin) We might want to avoid building the pre-call frame state
454 // because it is only used to get locals and arguments (by the debugger and
455 // f.arguments), and those are the same in the pre-call and post-call
456 // states.
457 if (!descriptor->state_combine().IsOutputIgnored()) {
458 deopt_state_id = BuildTranslation(instr, -1, frame_state_offset,
459 OutputFrameStateCombine::Ignore());
460 }
461 #if DEBUG
462 // Make sure all the values live in stack slots or they are immediates.
463 // (The values should not live in register because registers are clobbered
464 // by calls.)
465 for (size_t i = 0; i < descriptor->GetSize(); i++) {
466 InstructionOperand* op = instr->InputAt(frame_state_offset + 1 + i);
467 CHECK(op->IsStackSlot() || op->IsDoubleStackSlot() || op->IsImmediate());
468 }
469 #endif
470 safepoints()->RecordLazyDeoptimizationIndex(deopt_state_id);
471 }
472 }
473
474
DefineDeoptimizationLiteral(Handle<Object> literal)475 int CodeGenerator::DefineDeoptimizationLiteral(Handle<Object> literal) {
476 int result = static_cast<int>(deoptimization_literals_.size());
477 for (unsigned i = 0; i < deoptimization_literals_.size(); ++i) {
478 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
479 }
480 deoptimization_literals_.push_back(literal);
481 return result;
482 }
483
484
GetFrameStateDescriptor(Instruction * instr,size_t frame_state_offset)485 FrameStateDescriptor* CodeGenerator::GetFrameStateDescriptor(
486 Instruction* instr, size_t frame_state_offset) {
487 InstructionOperandConverter i(this, instr);
488 InstructionSequence::StateId state_id =
489 InstructionSequence::StateId::FromInt(i.InputInt32(frame_state_offset));
490 return code()->GetFrameStateDescriptor(state_id);
491 }
492
493
TranslateStateValueDescriptor(StateValueDescriptor * desc,Translation * translation,InstructionOperandIterator * iter)494 void CodeGenerator::TranslateStateValueDescriptor(
495 StateValueDescriptor* desc, Translation* translation,
496 InstructionOperandIterator* iter) {
497 if (desc->IsNested()) {
498 translation->BeginCapturedObject(static_cast<int>(desc->size()));
499 for (size_t index = 0; index < desc->fields().size(); index++) {
500 TranslateStateValueDescriptor(&desc->fields()[index], translation, iter);
501 }
502 } else if (desc->IsDuplicate()) {
503 translation->DuplicateObject(static_cast<int>(desc->id()));
504 } else {
505 DCHECK(desc->IsPlain());
506 AddTranslationForOperand(translation, iter->instruction(), iter->Advance(),
507 desc->type());
508 }
509 }
510
511
TranslateFrameStateDescriptorOperands(FrameStateDescriptor * desc,InstructionOperandIterator * iter,OutputFrameStateCombine combine,Translation * translation)512 void CodeGenerator::TranslateFrameStateDescriptorOperands(
513 FrameStateDescriptor* desc, InstructionOperandIterator* iter,
514 OutputFrameStateCombine combine, Translation* translation) {
515 for (size_t index = 0; index < desc->GetSize(combine); index++) {
516 switch (combine.kind()) {
517 case OutputFrameStateCombine::kPushOutput: {
518 DCHECK(combine.GetPushCount() <= iter->instruction()->OutputCount());
519 size_t size_without_output =
520 desc->GetSize(OutputFrameStateCombine::Ignore());
521 // If the index is past the existing stack items in values_.
522 if (index >= size_without_output) {
523 // Materialize the result of the call instruction in this slot.
524 AddTranslationForOperand(
525 translation, iter->instruction(),
526 iter->instruction()->OutputAt(index - size_without_output),
527 MachineType::AnyTagged());
528 continue;
529 }
530 break;
531 }
532 case OutputFrameStateCombine::kPokeAt:
533 // The result of the call should be placed at position
534 // [index_from_top] in the stack (overwriting whatever was
535 // previously there).
536 size_t index_from_top =
537 desc->GetSize(combine) - 1 - combine.GetOffsetToPokeAt();
538 if (index >= index_from_top &&
539 index < index_from_top + iter->instruction()->OutputCount()) {
540 AddTranslationForOperand(
541 translation, iter->instruction(),
542 iter->instruction()->OutputAt(index - index_from_top),
543 MachineType::AnyTagged());
544 iter->Advance(); // We do not use this input, but we need to
545 // advace, as the input got replaced.
546 continue;
547 }
548 break;
549 }
550 StateValueDescriptor* value_desc = desc->GetStateValueDescriptor();
551 TranslateStateValueDescriptor(&value_desc->fields()[index], translation,
552 iter);
553 }
554 }
555
556
BuildTranslationForFrameStateDescriptor(FrameStateDescriptor * descriptor,InstructionOperandIterator * iter,Translation * translation,OutputFrameStateCombine state_combine)557 void CodeGenerator::BuildTranslationForFrameStateDescriptor(
558 FrameStateDescriptor* descriptor, InstructionOperandIterator* iter,
559 Translation* translation, OutputFrameStateCombine state_combine) {
560 // Outer-most state must be added to translation first.
561 if (descriptor->outer_state() != nullptr) {
562 BuildTranslationForFrameStateDescriptor(descriptor->outer_state(), iter,
563 translation,
564 OutputFrameStateCombine::Ignore());
565 }
566
567 Handle<SharedFunctionInfo> shared_info;
568 if (!descriptor->shared_info().ToHandle(&shared_info)) {
569 if (!info()->has_shared_info()) {
570 return; // Stub with no SharedFunctionInfo.
571 }
572 shared_info = info()->shared_info();
573 }
574 int shared_info_id = DefineDeoptimizationLiteral(shared_info);
575
576 switch (descriptor->type()) {
577 case FrameStateType::kJavaScriptFunction:
578 translation->BeginJSFrame(
579 descriptor->bailout_id(), shared_info_id,
580 static_cast<unsigned int>(descriptor->GetSize(state_combine) -
581 (1 + descriptor->parameters_count())));
582 break;
583 case FrameStateType::kInterpretedFunction:
584 translation->BeginInterpretedFrame(
585 descriptor->bailout_id(), shared_info_id,
586 static_cast<unsigned int>(descriptor->locals_count()));
587 break;
588 case FrameStateType::kArgumentsAdaptor:
589 translation->BeginArgumentsAdaptorFrame(
590 shared_info_id,
591 static_cast<unsigned int>(descriptor->parameters_count()));
592 break;
593 case FrameStateType::kConstructStub:
594 translation->BeginConstructStubFrame(
595 shared_info_id,
596 static_cast<unsigned int>(descriptor->parameters_count()));
597 break;
598 }
599
600 TranslateFrameStateDescriptorOperands(descriptor, iter, state_combine,
601 translation);
602 }
603
604
BuildTranslation(Instruction * instr,int pc_offset,size_t frame_state_offset,OutputFrameStateCombine state_combine)605 int CodeGenerator::BuildTranslation(Instruction* instr, int pc_offset,
606 size_t frame_state_offset,
607 OutputFrameStateCombine state_combine) {
608 FrameStateDescriptor* descriptor =
609 GetFrameStateDescriptor(instr, frame_state_offset);
610 frame_state_offset++;
611
612 Translation translation(
613 &translations_, static_cast<int>(descriptor->GetFrameCount()),
614 static_cast<int>(descriptor->GetJSFrameCount()), zone());
615 InstructionOperandIterator iter(instr, frame_state_offset);
616 BuildTranslationForFrameStateDescriptor(descriptor, &iter, &translation,
617 state_combine);
618
619 int deoptimization_id = static_cast<int>(deoptimization_states_.size());
620
621 deoptimization_states_.push_back(new (zone()) DeoptimizationState(
622 descriptor->bailout_id(), translation.index(), pc_offset));
623
624 return deoptimization_id;
625 }
626
627
AddTranslationForOperand(Translation * translation,Instruction * instr,InstructionOperand * op,MachineType type)628 void CodeGenerator::AddTranslationForOperand(Translation* translation,
629 Instruction* instr,
630 InstructionOperand* op,
631 MachineType type) {
632 if (op->IsStackSlot()) {
633 if (type.representation() == MachineRepresentation::kBit) {
634 translation->StoreBoolStackSlot(LocationOperand::cast(op)->index());
635 } else if (type == MachineType::Int8() || type == MachineType::Int16() ||
636 type == MachineType::Int32()) {
637 translation->StoreInt32StackSlot(LocationOperand::cast(op)->index());
638 } else if (type == MachineType::Uint8() || type == MachineType::Uint16() ||
639 type == MachineType::Uint32()) {
640 translation->StoreUint32StackSlot(LocationOperand::cast(op)->index());
641 } else if (type.representation() == MachineRepresentation::kTagged) {
642 translation->StoreStackSlot(LocationOperand::cast(op)->index());
643 } else {
644 CHECK(false);
645 }
646 } else if (op->IsDoubleStackSlot()) {
647 DCHECK(IsFloatingPoint(type.representation()));
648 translation->StoreDoubleStackSlot(LocationOperand::cast(op)->index());
649 } else if (op->IsRegister()) {
650 InstructionOperandConverter converter(this, instr);
651 if (type.representation() == MachineRepresentation::kBit) {
652 translation->StoreBoolRegister(converter.ToRegister(op));
653 } else if (type == MachineType::Int8() || type == MachineType::Int16() ||
654 type == MachineType::Int32()) {
655 translation->StoreInt32Register(converter.ToRegister(op));
656 } else if (type == MachineType::Uint8() || type == MachineType::Uint16() ||
657 type == MachineType::Uint32()) {
658 translation->StoreUint32Register(converter.ToRegister(op));
659 } else if (type.representation() == MachineRepresentation::kTagged) {
660 translation->StoreRegister(converter.ToRegister(op));
661 } else {
662 CHECK(false);
663 }
664 } else if (op->IsDoubleRegister()) {
665 DCHECK(IsFloatingPoint(type.representation()));
666 InstructionOperandConverter converter(this, instr);
667 translation->StoreDoubleRegister(converter.ToDoubleRegister(op));
668 } else if (op->IsImmediate()) {
669 InstructionOperandConverter converter(this, instr);
670 Constant constant = converter.ToConstant(op);
671 Handle<Object> constant_object;
672 switch (constant.type()) {
673 case Constant::kInt32:
674 DCHECK(type == MachineType::Int32() || type == MachineType::Uint32() ||
675 type.representation() == MachineRepresentation::kBit);
676 constant_object =
677 isolate()->factory()->NewNumberFromInt(constant.ToInt32());
678 break;
679 case Constant::kFloat32:
680 DCHECK(type.representation() == MachineRepresentation::kFloat32 ||
681 type.representation() == MachineRepresentation::kTagged);
682 constant_object = isolate()->factory()->NewNumber(constant.ToFloat32());
683 break;
684 case Constant::kFloat64:
685 DCHECK(type.representation() == MachineRepresentation::kFloat64 ||
686 type.representation() == MachineRepresentation::kTagged);
687 constant_object = isolate()->factory()->NewNumber(constant.ToFloat64());
688 break;
689 case Constant::kHeapObject:
690 DCHECK(type.representation() == MachineRepresentation::kTagged);
691 constant_object = constant.ToHeapObject();
692 break;
693 default:
694 CHECK(false);
695 }
696 if (constant_object.is_identical_to(info()->closure())) {
697 translation->StoreJSFrameFunction();
698 } else {
699 int literal_id = DefineDeoptimizationLiteral(constant_object);
700 translation->StoreLiteral(literal_id);
701 }
702 } else {
703 CHECK(false);
704 }
705 }
706
707
MarkLazyDeoptSite()708 void CodeGenerator::MarkLazyDeoptSite() {
709 last_lazy_deopt_pc_ = masm()->pc_offset();
710 }
711
712
TailCallFrameStackSlotDelta(int stack_param_delta)713 int CodeGenerator::TailCallFrameStackSlotDelta(int stack_param_delta) {
714 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
715 int spill_slots = frame()->GetSpillSlotCount();
716 bool has_frame = descriptor->IsJSFunctionCall() || spill_slots > 0;
717 // Leave the PC on the stack on platforms that have that as part of their ABI
718 int pc_slots = V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0;
719 int sp_slot_delta =
720 has_frame ? (frame()->GetTotalFrameSlotCount() - pc_slots) : 0;
721 // Discard only slots that won't be used by new parameters.
722 sp_slot_delta += stack_param_delta;
723 return sp_slot_delta;
724 }
725
726
OutOfLineCode(CodeGenerator * gen)727 OutOfLineCode::OutOfLineCode(CodeGenerator* gen)
728 : frame_(gen->frame()), masm_(gen->masm()), next_(gen->ools_) {
729 gen->ools_ = this;
730 }
731
732
~OutOfLineCode()733 OutOfLineCode::~OutOfLineCode() {}
734
735 } // namespace compiler
736 } // namespace internal
737 } // namespace v8
738