1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/deoptimizer.h"
6
7 #include "src/accessors.h"
8 #include "src/ast/prettyprinter.h"
9 #include "src/codegen.h"
10 #include "src/disasm.h"
11 #include "src/frames-inl.h"
12 #include "src/full-codegen/full-codegen.h"
13 #include "src/global-handles.h"
14 #include "src/macro-assembler.h"
15 #include "src/profiler/cpu-profiler.h"
16 #include "src/v8.h"
17
18
19 namespace v8 {
20 namespace internal {
21
AllocateCodeChunk(MemoryAllocator * allocator)22 static MemoryChunk* AllocateCodeChunk(MemoryAllocator* allocator) {
23 return allocator->AllocateChunk(Deoptimizer::GetMaxDeoptTableSize(),
24 base::OS::CommitPageSize(),
25 #if defined(__native_client__)
26 // The Native Client port of V8 uses an interpreter,
27 // so code pages don't need PROT_EXEC.
28 NOT_EXECUTABLE,
29 #else
30 EXECUTABLE,
31 #endif
32 NULL);
33 }
34
35
DeoptimizerData(MemoryAllocator * allocator)36 DeoptimizerData::DeoptimizerData(MemoryAllocator* allocator)
37 : allocator_(allocator),
38 deoptimized_frame_info_(NULL),
39 current_(NULL) {
40 for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) {
41 deopt_entry_code_entries_[i] = -1;
42 deopt_entry_code_[i] = AllocateCodeChunk(allocator);
43 }
44 }
45
46
~DeoptimizerData()47 DeoptimizerData::~DeoptimizerData() {
48 for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) {
49 allocator_->Free(deopt_entry_code_[i]);
50 deopt_entry_code_[i] = NULL;
51 }
52 }
53
54
Iterate(ObjectVisitor * v)55 void DeoptimizerData::Iterate(ObjectVisitor* v) {
56 if (deoptimized_frame_info_ != NULL) {
57 deoptimized_frame_info_->Iterate(v);
58 }
59 }
60
61
FindDeoptimizingCode(Address addr)62 Code* Deoptimizer::FindDeoptimizingCode(Address addr) {
63 if (function_->IsHeapObject()) {
64 // Search all deoptimizing code in the native context of the function.
65 Context* native_context = function_->context()->native_context();
66 Object* element = native_context->DeoptimizedCodeListHead();
67 while (!element->IsUndefined()) {
68 Code* code = Code::cast(element);
69 CHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
70 if (code->contains(addr)) return code;
71 element = code->next_code_link();
72 }
73 }
74 return NULL;
75 }
76
77
78 // We rely on this function not causing a GC. It is called from generated code
79 // without having a real stack frame in place.
New(JSFunction * function,BailoutType type,unsigned bailout_id,Address from,int fp_to_sp_delta,Isolate * isolate)80 Deoptimizer* Deoptimizer::New(JSFunction* function,
81 BailoutType type,
82 unsigned bailout_id,
83 Address from,
84 int fp_to_sp_delta,
85 Isolate* isolate) {
86 Deoptimizer* deoptimizer = new Deoptimizer(isolate,
87 function,
88 type,
89 bailout_id,
90 from,
91 fp_to_sp_delta,
92 NULL);
93 CHECK(isolate->deoptimizer_data()->current_ == NULL);
94 isolate->deoptimizer_data()->current_ = deoptimizer;
95 return deoptimizer;
96 }
97
98
99 // No larger than 2K on all platforms
100 static const int kDeoptTableMaxEpilogueCodeSize = 2 * KB;
101
102
GetMaxDeoptTableSize()103 size_t Deoptimizer::GetMaxDeoptTableSize() {
104 int entries_size =
105 Deoptimizer::kMaxNumberOfEntries * Deoptimizer::table_entry_size_;
106 int commit_page_size = static_cast<int>(base::OS::CommitPageSize());
107 int page_count = ((kDeoptTableMaxEpilogueCodeSize + entries_size - 1) /
108 commit_page_size) + 1;
109 return static_cast<size_t>(commit_page_size * page_count);
110 }
111
112
Grab(Isolate * isolate)113 Deoptimizer* Deoptimizer::Grab(Isolate* isolate) {
114 Deoptimizer* result = isolate->deoptimizer_data()->current_;
115 CHECK_NOT_NULL(result);
116 result->DeleteFrameDescriptions();
117 isolate->deoptimizer_data()->current_ = NULL;
118 return result;
119 }
120
121
ConvertJSFrameIndexToFrameIndex(int jsframe_index)122 int Deoptimizer::ConvertJSFrameIndexToFrameIndex(int jsframe_index) {
123 if (jsframe_index == 0) return 0;
124
125 int frame_index = 0;
126 while (jsframe_index >= 0) {
127 FrameDescription* frame = output_[frame_index];
128 if (frame->GetFrameType() == StackFrame::JAVA_SCRIPT) {
129 jsframe_index--;
130 }
131 frame_index++;
132 }
133
134 return frame_index - 1;
135 }
136
137
DebuggerInspectableFrame(JavaScriptFrame * frame,int jsframe_index,Isolate * isolate)138 DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
139 JavaScriptFrame* frame,
140 int jsframe_index,
141 Isolate* isolate) {
142 CHECK(frame->is_optimized());
143 CHECK(isolate->deoptimizer_data()->deoptimized_frame_info_ == NULL);
144
145 // Get the function and code from the frame.
146 JSFunction* function = frame->function();
147 Code* code = frame->LookupCode();
148
149 // Locate the deoptimization point in the code. As we are at a call the
150 // return address must be at a place in the code with deoptimization support.
151 SafepointEntry safepoint_entry = code->GetSafepointEntry(frame->pc());
152 int deoptimization_index = safepoint_entry.deoptimization_index();
153 CHECK_NE(deoptimization_index, Safepoint::kNoDeoptimizationIndex);
154
155 // Always use the actual stack slots when calculating the fp to sp
156 // delta adding two for the function and context.
157 unsigned stack_slots = code->stack_slots();
158 unsigned arguments_stack_height =
159 Deoptimizer::ComputeOutgoingArgumentSize(code, deoptimization_index);
160 unsigned fp_to_sp_delta = (stack_slots * kPointerSize) +
161 StandardFrameConstants::kFixedFrameSizeFromFp +
162 arguments_stack_height;
163
164 Deoptimizer* deoptimizer = new Deoptimizer(isolate,
165 function,
166 Deoptimizer::DEBUGGER,
167 deoptimization_index,
168 frame->pc(),
169 fp_to_sp_delta,
170 code);
171 Address tos = frame->fp() - fp_to_sp_delta;
172 deoptimizer->FillInputFrame(tos, frame);
173
174 // Calculate the output frames.
175 Deoptimizer::ComputeOutputFrames(deoptimizer);
176
177 // Create the GC safe output frame information and register it for GC
178 // handling.
179 CHECK_LT(jsframe_index, deoptimizer->jsframe_count());
180
181 // Convert JS frame index into frame index.
182 int frame_index = deoptimizer->ConvertJSFrameIndexToFrameIndex(jsframe_index);
183
184 bool has_arguments_adaptor =
185 frame_index > 0 &&
186 deoptimizer->output_[frame_index - 1]->GetFrameType() ==
187 StackFrame::ARGUMENTS_ADAPTOR;
188
189 int construct_offset = has_arguments_adaptor ? 2 : 1;
190 bool has_construct_stub =
191 frame_index >= construct_offset &&
192 deoptimizer->output_[frame_index - construct_offset]->GetFrameType() ==
193 StackFrame::CONSTRUCT;
194
195 DeoptimizedFrameInfo* info = new DeoptimizedFrameInfo(deoptimizer,
196 frame_index,
197 has_arguments_adaptor,
198 has_construct_stub);
199 isolate->deoptimizer_data()->deoptimized_frame_info_ = info;
200
201 // Done with the GC-unsafe frame descriptions. This re-enables allocation.
202 deoptimizer->DeleteFrameDescriptions();
203
204 // Allocate a heap number for the doubles belonging to this frame.
205 deoptimizer->MaterializeHeapNumbersForDebuggerInspectableFrame(
206 frame_index, info->parameters_count(), info->expression_count(), info);
207
208 // Finished using the deoptimizer instance.
209 delete deoptimizer;
210
211 return info;
212 }
213
214
DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo * info,Isolate * isolate)215 void Deoptimizer::DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
216 Isolate* isolate) {
217 CHECK_EQ(isolate->deoptimizer_data()->deoptimized_frame_info_, info);
218 delete info;
219 isolate->deoptimizer_data()->deoptimized_frame_info_ = NULL;
220 }
221
222
GenerateDeoptimizationEntries(MacroAssembler * masm,int count,BailoutType type)223 void Deoptimizer::GenerateDeoptimizationEntries(MacroAssembler* masm,
224 int count,
225 BailoutType type) {
226 TableEntryGenerator generator(masm, type, count);
227 generator.Generate();
228 }
229
230
VisitAllOptimizedFunctionsForContext(Context * context,OptimizedFunctionVisitor * visitor)231 void Deoptimizer::VisitAllOptimizedFunctionsForContext(
232 Context* context, OptimizedFunctionVisitor* visitor) {
233 DisallowHeapAllocation no_allocation;
234
235 CHECK(context->IsNativeContext());
236
237 visitor->EnterContext(context);
238
239 // Visit the list of optimized functions, removing elements that
240 // no longer refer to optimized code.
241 JSFunction* prev = NULL;
242 Object* element = context->OptimizedFunctionsListHead();
243 while (!element->IsUndefined()) {
244 JSFunction* function = JSFunction::cast(element);
245 Object* next = function->next_function_link();
246 if (function->code()->kind() != Code::OPTIMIZED_FUNCTION ||
247 (visitor->VisitFunction(function),
248 function->code()->kind() != Code::OPTIMIZED_FUNCTION)) {
249 // The function no longer refers to optimized code, or the visitor
250 // changed the code to which it refers to no longer be optimized code.
251 // Remove the function from this list.
252 if (prev != NULL) {
253 prev->set_next_function_link(next, UPDATE_WEAK_WRITE_BARRIER);
254 } else {
255 context->SetOptimizedFunctionsListHead(next);
256 }
257 // The visitor should not alter the link directly.
258 CHECK_EQ(function->next_function_link(), next);
259 // Set the next function link to undefined to indicate it is no longer
260 // in the optimized functions list.
261 function->set_next_function_link(context->GetHeap()->undefined_value(),
262 SKIP_WRITE_BARRIER);
263 } else {
264 // The visitor should not alter the link directly.
265 CHECK_EQ(function->next_function_link(), next);
266 // preserve this element.
267 prev = function;
268 }
269 element = next;
270 }
271
272 visitor->LeaveContext(context);
273 }
274
275
VisitAllOptimizedFunctions(Isolate * isolate,OptimizedFunctionVisitor * visitor)276 void Deoptimizer::VisitAllOptimizedFunctions(
277 Isolate* isolate,
278 OptimizedFunctionVisitor* visitor) {
279 DisallowHeapAllocation no_allocation;
280
281 // Run through the list of all native contexts.
282 Object* context = isolate->heap()->native_contexts_list();
283 while (!context->IsUndefined()) {
284 VisitAllOptimizedFunctionsForContext(Context::cast(context), visitor);
285 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
286 }
287 }
288
289
290 // Unlink functions referring to code marked for deoptimization, then move
291 // marked code from the optimized code list to the deoptimized code list,
292 // and patch code for lazy deopt.
DeoptimizeMarkedCodeForContext(Context * context)293 void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) {
294 DisallowHeapAllocation no_allocation;
295
296 // A "closure" that unlinks optimized code that is going to be
297 // deoptimized from the functions that refer to it.
298 class SelectedCodeUnlinker: public OptimizedFunctionVisitor {
299 public:
300 virtual void EnterContext(Context* context) { } // Don't care.
301 virtual void LeaveContext(Context* context) { } // Don't care.
302 virtual void VisitFunction(JSFunction* function) {
303 Code* code = function->code();
304 if (!code->marked_for_deoptimization()) return;
305
306 // Unlink this function and evict from optimized code map.
307 SharedFunctionInfo* shared = function->shared();
308 function->set_code(shared->code());
309
310 if (FLAG_trace_deopt) {
311 CodeTracer::Scope scope(code->GetHeap()->isolate()->GetCodeTracer());
312 PrintF(scope.file(), "[deoptimizer unlinked: ");
313 function->PrintName(scope.file());
314 PrintF(scope.file(),
315 " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
316 }
317 }
318 };
319
320 // Unlink all functions that refer to marked code.
321 SelectedCodeUnlinker unlinker;
322 VisitAllOptimizedFunctionsForContext(context, &unlinker);
323
324 Isolate* isolate = context->GetHeap()->isolate();
325 #ifdef DEBUG
326 Code* topmost_optimized_code = NULL;
327 bool safe_to_deopt_topmost_optimized_code = false;
328 // Make sure all activations of optimized code can deopt at their current PC.
329 // The topmost optimized code has special handling because it cannot be
330 // deoptimized due to weak object dependency.
331 for (StackFrameIterator it(isolate, isolate->thread_local_top());
332 !it.done(); it.Advance()) {
333 StackFrame::Type type = it.frame()->type();
334 if (type == StackFrame::OPTIMIZED) {
335 Code* code = it.frame()->LookupCode();
336 JSFunction* function =
337 static_cast<OptimizedFrame*>(it.frame())->function();
338 if (FLAG_trace_deopt) {
339 CodeTracer::Scope scope(isolate->GetCodeTracer());
340 PrintF(scope.file(), "[deoptimizer found activation of function: ");
341 function->PrintName(scope.file());
342 PrintF(scope.file(),
343 " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
344 }
345 SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc());
346 int deopt_index = safepoint.deoptimization_index();
347 // Turbofan deopt is checked when we are patching addresses on stack.
348 bool turbofanned = code->is_turbofanned() &&
349 function->shared()->asm_function() &&
350 !FLAG_turbo_asm_deoptimization;
351 bool safe_to_deopt =
352 deopt_index != Safepoint::kNoDeoptimizationIndex || turbofanned;
353 CHECK(topmost_optimized_code == NULL || safe_to_deopt || turbofanned);
354 if (topmost_optimized_code == NULL) {
355 topmost_optimized_code = code;
356 safe_to_deopt_topmost_optimized_code = safe_to_deopt;
357 }
358 }
359 }
360 #endif
361
362 // Move marked code from the optimized code list to the deoptimized
363 // code list, collecting them into a ZoneList.
364 Zone zone;
365 ZoneList<Code*> codes(10, &zone);
366
367 // Walk over all optimized code objects in this native context.
368 Code* prev = NULL;
369 Object* element = context->OptimizedCodeListHead();
370 while (!element->IsUndefined()) {
371 Code* code = Code::cast(element);
372 CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION);
373 Object* next = code->next_code_link();
374
375 if (code->marked_for_deoptimization()) {
376 // Put the code into the list for later patching.
377 codes.Add(code, &zone);
378
379 if (prev != NULL) {
380 // Skip this code in the optimized code list.
381 prev->set_next_code_link(next);
382 } else {
383 // There was no previous node, the next node is the new head.
384 context->SetOptimizedCodeListHead(next);
385 }
386
387 // Move the code to the _deoptimized_ code list.
388 code->set_next_code_link(context->DeoptimizedCodeListHead());
389 context->SetDeoptimizedCodeListHead(code);
390 } else {
391 // Not marked; preserve this element.
392 prev = code;
393 }
394 element = next;
395 }
396
397 // TODO(titzer): we need a handle scope only because of the macro assembler,
398 // which is only used in EnsureCodeForDeoptimizationEntry.
399 HandleScope scope(isolate);
400
401 // Now patch all the codes for deoptimization.
402 for (int i = 0; i < codes.length(); i++) {
403 #ifdef DEBUG
404 if (codes[i] == topmost_optimized_code) {
405 DCHECK(safe_to_deopt_topmost_optimized_code);
406 }
407 #endif
408 // It is finally time to die, code object.
409
410 // Remove the code from optimized code map.
411 DeoptimizationInputData* deopt_data =
412 DeoptimizationInputData::cast(codes[i]->deoptimization_data());
413 SharedFunctionInfo* shared =
414 SharedFunctionInfo::cast(deopt_data->SharedFunctionInfo());
415 shared->EvictFromOptimizedCodeMap(codes[i], "deoptimized code");
416
417 // Do platform-specific patching to force any activations to lazy deopt.
418 PatchCodeForDeoptimization(isolate, codes[i]);
419
420 // We might be in the middle of incremental marking with compaction.
421 // Tell collector to treat this code object in a special way and
422 // ignore all slots that might have been recorded on it.
423 isolate->heap()->mark_compact_collector()->InvalidateCode(codes[i]);
424 }
425 }
426
427
DeoptimizeAll(Isolate * isolate)428 void Deoptimizer::DeoptimizeAll(Isolate* isolate) {
429 if (FLAG_trace_deopt) {
430 CodeTracer::Scope scope(isolate->GetCodeTracer());
431 PrintF(scope.file(), "[deoptimize all code in all contexts]\n");
432 }
433 DisallowHeapAllocation no_allocation;
434 // For all contexts, mark all code, then deoptimize.
435 Object* context = isolate->heap()->native_contexts_list();
436 while (!context->IsUndefined()) {
437 Context* native_context = Context::cast(context);
438 MarkAllCodeForContext(native_context);
439 DeoptimizeMarkedCodeForContext(native_context);
440 context = native_context->get(Context::NEXT_CONTEXT_LINK);
441 }
442 }
443
444
DeoptimizeMarkedCode(Isolate * isolate)445 void Deoptimizer::DeoptimizeMarkedCode(Isolate* isolate) {
446 if (FLAG_trace_deopt) {
447 CodeTracer::Scope scope(isolate->GetCodeTracer());
448 PrintF(scope.file(), "[deoptimize marked code in all contexts]\n");
449 }
450 DisallowHeapAllocation no_allocation;
451 // For all contexts, deoptimize code already marked.
452 Object* context = isolate->heap()->native_contexts_list();
453 while (!context->IsUndefined()) {
454 Context* native_context = Context::cast(context);
455 DeoptimizeMarkedCodeForContext(native_context);
456 context = native_context->get(Context::NEXT_CONTEXT_LINK);
457 }
458 }
459
460
MarkAllCodeForContext(Context * context)461 void Deoptimizer::MarkAllCodeForContext(Context* context) {
462 Object* element = context->OptimizedCodeListHead();
463 while (!element->IsUndefined()) {
464 Code* code = Code::cast(element);
465 CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION);
466 code->set_marked_for_deoptimization(true);
467 element = code->next_code_link();
468 }
469 }
470
471
DeoptimizeFunction(JSFunction * function)472 void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
473 Code* code = function->code();
474 if (code->kind() == Code::OPTIMIZED_FUNCTION) {
475 // Mark the code for deoptimization and unlink any functions that also
476 // refer to that code. The code cannot be shared across native contexts,
477 // so we only need to search one.
478 code->set_marked_for_deoptimization(true);
479 DeoptimizeMarkedCodeForContext(function->context()->native_context());
480 }
481 }
482
483
ComputeOutputFrames(Deoptimizer * deoptimizer)484 void Deoptimizer::ComputeOutputFrames(Deoptimizer* deoptimizer) {
485 deoptimizer->DoComputeOutputFrames();
486 }
487
488
TraceEnabledFor(BailoutType deopt_type,StackFrame::Type frame_type)489 bool Deoptimizer::TraceEnabledFor(BailoutType deopt_type,
490 StackFrame::Type frame_type) {
491 switch (deopt_type) {
492 case EAGER:
493 case SOFT:
494 case LAZY:
495 case DEBUGGER:
496 return (frame_type == StackFrame::STUB)
497 ? FLAG_trace_stub_failures
498 : FLAG_trace_deopt;
499 }
500 FATAL("Unsupported deopt type");
501 return false;
502 }
503
504
MessageFor(BailoutType type)505 const char* Deoptimizer::MessageFor(BailoutType type) {
506 switch (type) {
507 case EAGER: return "eager";
508 case SOFT: return "soft";
509 case LAZY: return "lazy";
510 case DEBUGGER: return "debugger";
511 }
512 FATAL("Unsupported deopt type");
513 return NULL;
514 }
515
516
Deoptimizer(Isolate * isolate,JSFunction * function,BailoutType type,unsigned bailout_id,Address from,int fp_to_sp_delta,Code * optimized_code)517 Deoptimizer::Deoptimizer(Isolate* isolate, JSFunction* function,
518 BailoutType type, unsigned bailout_id, Address from,
519 int fp_to_sp_delta, Code* optimized_code)
520 : isolate_(isolate),
521 function_(function),
522 bailout_id_(bailout_id),
523 bailout_type_(type),
524 from_(from),
525 fp_to_sp_delta_(fp_to_sp_delta),
526 has_alignment_padding_(0),
527 input_(nullptr),
528 output_count_(0),
529 jsframe_count_(0),
530 output_(nullptr),
531 trace_scope_(nullptr) {
532 // For COMPILED_STUBs called from builtins, the function pointer is a SMI
533 // indicating an internal frame.
534 if (function->IsSmi()) {
535 function = nullptr;
536 }
537 DCHECK(from != nullptr);
538 if (function != nullptr && function->IsOptimized()) {
539 function->shared()->increment_deopt_count();
540 if (bailout_type_ == Deoptimizer::SOFT) {
541 isolate->counters()->soft_deopts_executed()->Increment();
542 // Soft deopts shouldn't count against the overall re-optimization count
543 // that can eventually lead to disabling optimization for a function.
544 int opt_count = function->shared()->opt_count();
545 if (opt_count > 0) opt_count--;
546 function->shared()->set_opt_count(opt_count);
547 }
548 }
549 compiled_code_ = FindOptimizedCode(function, optimized_code);
550 #if DEBUG
551 DCHECK(compiled_code_ != NULL);
552 if (type == EAGER || type == SOFT || type == LAZY) {
553 DCHECK(compiled_code_->kind() != Code::FUNCTION);
554 }
555 #endif
556
557 StackFrame::Type frame_type = function == NULL
558 ? StackFrame::STUB
559 : StackFrame::JAVA_SCRIPT;
560 trace_scope_ = TraceEnabledFor(type, frame_type) ?
561 new CodeTracer::Scope(isolate->GetCodeTracer()) : NULL;
562 #ifdef DEBUG
563 CHECK(AllowHeapAllocation::IsAllowed());
564 disallow_heap_allocation_ = new DisallowHeapAllocation();
565 #endif // DEBUG
566 if (compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) {
567 PROFILE(isolate_, CodeDeoptEvent(compiled_code_, from_, fp_to_sp_delta_));
568 }
569 unsigned size = ComputeInputFrameSize();
570 input_ = new(size) FrameDescription(size, function);
571 input_->SetFrameType(frame_type);
572 }
573
574
FindOptimizedCode(JSFunction * function,Code * optimized_code)575 Code* Deoptimizer::FindOptimizedCode(JSFunction* function,
576 Code* optimized_code) {
577 switch (bailout_type_) {
578 case Deoptimizer::SOFT:
579 case Deoptimizer::EAGER:
580 case Deoptimizer::LAZY: {
581 Code* compiled_code = FindDeoptimizingCode(from_);
582 return (compiled_code == NULL)
583 ? static_cast<Code*>(isolate_->FindCodeObject(from_))
584 : compiled_code;
585 }
586 case Deoptimizer::DEBUGGER:
587 DCHECK(optimized_code->contains(from_));
588 return optimized_code;
589 }
590 FATAL("Could not find code for optimized function");
591 return NULL;
592 }
593
594
PrintFunctionName()595 void Deoptimizer::PrintFunctionName() {
596 if (function_->IsJSFunction()) {
597 function_->ShortPrint(trace_scope_->file());
598 } else {
599 PrintF(trace_scope_->file(),
600 "%s", Code::Kind2String(compiled_code_->kind()));
601 }
602 }
603
604
~Deoptimizer()605 Deoptimizer::~Deoptimizer() {
606 DCHECK(input_ == NULL && output_ == NULL);
607 DCHECK(disallow_heap_allocation_ == NULL);
608 delete trace_scope_;
609 }
610
611
DeleteFrameDescriptions()612 void Deoptimizer::DeleteFrameDescriptions() {
613 delete input_;
614 for (int i = 0; i < output_count_; ++i) {
615 if (output_[i] != input_) delete output_[i];
616 }
617 delete[] output_;
618 input_ = NULL;
619 output_ = NULL;
620 #ifdef DEBUG
621 CHECK(!AllowHeapAllocation::IsAllowed());
622 CHECK(disallow_heap_allocation_ != NULL);
623 delete disallow_heap_allocation_;
624 disallow_heap_allocation_ = NULL;
625 #endif // DEBUG
626 }
627
628
GetDeoptimizationEntry(Isolate * isolate,int id,BailoutType type,GetEntryMode mode)629 Address Deoptimizer::GetDeoptimizationEntry(Isolate* isolate,
630 int id,
631 BailoutType type,
632 GetEntryMode mode) {
633 CHECK_GE(id, 0);
634 if (id >= kMaxNumberOfEntries) return NULL;
635 if (mode == ENSURE_ENTRY_CODE) {
636 EnsureCodeForDeoptimizationEntry(isolate, type, id);
637 } else {
638 CHECK_EQ(mode, CALCULATE_ENTRY_ADDRESS);
639 }
640 DeoptimizerData* data = isolate->deoptimizer_data();
641 CHECK_LT(type, kBailoutTypesWithCodeEntry);
642 MemoryChunk* base = data->deopt_entry_code_[type];
643 return base->area_start() + (id * table_entry_size_);
644 }
645
646
GetDeoptimizationId(Isolate * isolate,Address addr,BailoutType type)647 int Deoptimizer::GetDeoptimizationId(Isolate* isolate,
648 Address addr,
649 BailoutType type) {
650 DeoptimizerData* data = isolate->deoptimizer_data();
651 MemoryChunk* base = data->deopt_entry_code_[type];
652 Address start = base->area_start();
653 if (addr < start ||
654 addr >= start + (kMaxNumberOfEntries * table_entry_size_)) {
655 return kNotDeoptimizationEntry;
656 }
657 DCHECK_EQ(0,
658 static_cast<int>(addr - start) % table_entry_size_);
659 return static_cast<int>(addr - start) / table_entry_size_;
660 }
661
662
GetOutputInfo(DeoptimizationOutputData * data,BailoutId id,SharedFunctionInfo * shared)663 int Deoptimizer::GetOutputInfo(DeoptimizationOutputData* data,
664 BailoutId id,
665 SharedFunctionInfo* shared) {
666 // TODO(kasperl): For now, we do a simple linear search for the PC
667 // offset associated with the given node id. This should probably be
668 // changed to a binary search.
669 int length = data->DeoptPoints();
670 for (int i = 0; i < length; i++) {
671 if (data->AstId(i) == id) {
672 return data->PcAndState(i)->value();
673 }
674 }
675 OFStream os(stderr);
676 os << "[couldn't find pc offset for node=" << id.ToInt() << "]\n"
677 << "[method: " << shared->DebugName()->ToCString().get() << "]\n"
678 << "[source:\n" << SourceCodeOf(shared) << "\n]" << std::endl;
679
680 shared->GetHeap()->isolate()->PushStackTraceAndDie(0xfefefefe, data, shared,
681 0xfefefeff);
682 FATAL("unable to find pc offset during deoptimization");
683 return -1;
684 }
685
686
GetDeoptimizedCodeCount(Isolate * isolate)687 int Deoptimizer::GetDeoptimizedCodeCount(Isolate* isolate) {
688 int length = 0;
689 // Count all entries in the deoptimizing code list of every context.
690 Object* context = isolate->heap()->native_contexts_list();
691 while (!context->IsUndefined()) {
692 Context* native_context = Context::cast(context);
693 Object* element = native_context->DeoptimizedCodeListHead();
694 while (!element->IsUndefined()) {
695 Code* code = Code::cast(element);
696 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
697 length++;
698 element = code->next_code_link();
699 }
700 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
701 }
702 return length;
703 }
704
705
706 // We rely on this function not causing a GC. It is called from generated code
707 // without having a real stack frame in place.
DoComputeOutputFrames()708 void Deoptimizer::DoComputeOutputFrames() {
709 base::ElapsedTimer timer;
710
711 // Determine basic deoptimization information. The optimized frame is
712 // described by the input data.
713 DeoptimizationInputData* input_data =
714 DeoptimizationInputData::cast(compiled_code_->deoptimization_data());
715
716 if (trace_scope_ != NULL) {
717 timer.Start();
718 PrintF(trace_scope_->file(), "[deoptimizing (DEOPT %s): begin ",
719 MessageFor(bailout_type_));
720 PrintFunctionName();
721 PrintF(trace_scope_->file(),
722 " (opt #%d) @%d, FP to SP delta: %d]\n",
723 input_data->OptimizationId()->value(),
724 bailout_id_,
725 fp_to_sp_delta_);
726 if (bailout_type_ == EAGER || bailout_type_ == SOFT ||
727 (compiled_code_->is_hydrogen_stub())) {
728 compiled_code_->PrintDeoptLocation(trace_scope_->file(), from_);
729 }
730 }
731
732 BailoutId node_id = input_data->AstId(bailout_id_);
733 ByteArray* translations = input_data->TranslationByteArray();
734 unsigned translation_index =
735 input_data->TranslationIndex(bailout_id_)->value();
736
737 TranslationIterator state_iterator(translations, translation_index);
738 translated_state_.Init(
739 input_->GetFramePointerAddress(), &state_iterator,
740 input_data->LiteralArray(), input_->GetRegisterValues(),
741 trace_scope_ == nullptr ? nullptr : trace_scope_->file());
742
743 // Do the input frame to output frame(s) translation.
744 size_t count = translated_state_.frames().size();
745 DCHECK(output_ == NULL);
746 output_ = new FrameDescription*[count];
747 for (size_t i = 0; i < count; ++i) {
748 output_[i] = NULL;
749 }
750 output_count_ = static_cast<int>(count);
751
752 Register fp_reg = JavaScriptFrame::fp_register();
753 stack_fp_ = reinterpret_cast<Address>(
754 input_->GetRegister(fp_reg.code()) +
755 has_alignment_padding_ * kPointerSize);
756
757 // Translate each output frame.
758 for (size_t i = 0; i < count; ++i) {
759 // Read the ast node id, function, and frame height for this output frame.
760 int frame_index = static_cast<int>(i);
761 switch (translated_state_.frames()[i].kind()) {
762 case TranslatedFrame::kFunction:
763 DoComputeJSFrame(frame_index);
764 jsframe_count_++;
765 break;
766 case TranslatedFrame::kInterpretedFunction:
767 DoComputeInterpretedFrame(frame_index);
768 jsframe_count_++;
769 break;
770 case TranslatedFrame::kArgumentsAdaptor:
771 DoComputeArgumentsAdaptorFrame(frame_index);
772 break;
773 case TranslatedFrame::kConstructStub:
774 DoComputeConstructStubFrame(frame_index);
775 break;
776 case TranslatedFrame::kGetter:
777 DoComputeAccessorStubFrame(frame_index, false);
778 break;
779 case TranslatedFrame::kSetter:
780 DoComputeAccessorStubFrame(frame_index, true);
781 break;
782 case TranslatedFrame::kCompiledStub:
783 DoComputeCompiledStubFrame(frame_index);
784 break;
785 case TranslatedFrame::kInvalid:
786 FATAL("invalid frame");
787 break;
788 }
789 }
790
791 // Print some helpful diagnostic information.
792 if (trace_scope_ != NULL) {
793 double ms = timer.Elapsed().InMillisecondsF();
794 int index = output_count_ - 1; // Index of the topmost frame.
795 PrintF(trace_scope_->file(), "[deoptimizing (%s): end ",
796 MessageFor(bailout_type_));
797 PrintFunctionName();
798 PrintF(trace_scope_->file(),
799 " @%d => node=%d, pc=0x%08" V8PRIxPTR ", state=%s, alignment=%s,"
800 " took %0.3f ms]\n",
801 bailout_id_,
802 node_id.ToInt(),
803 output_[index]->GetPc(),
804 FullCodeGenerator::State2String(
805 static_cast<FullCodeGenerator::State>(
806 output_[index]->GetState()->value())),
807 has_alignment_padding_ ? "with padding" : "no padding",
808 ms);
809 }
810 }
811
812
DoComputeJSFrame(int frame_index)813 void Deoptimizer::DoComputeJSFrame(int frame_index) {
814 TranslatedFrame* translated_frame =
815 &(translated_state_.frames()[frame_index]);
816 TranslatedFrame::iterator value_iterator = translated_frame->begin();
817 int input_index = 0;
818
819 BailoutId node_id = translated_frame->node_id();
820 unsigned height =
821 translated_frame->height() - 1; // Do not count the context.
822 unsigned height_in_bytes = height * kPointerSize;
823 JSFunction* function = JSFunction::cast(value_iterator->GetRawValue());
824 value_iterator++;
825 input_index++;
826 if (trace_scope_ != NULL) {
827 PrintF(trace_scope_->file(), " translating frame ");
828 function->PrintName(trace_scope_->file());
829 PrintF(trace_scope_->file(),
830 " => node=%d, height=%d\n", node_id.ToInt(), height_in_bytes);
831 }
832
833 // The 'fixed' part of the frame consists of the incoming parameters and
834 // the part described by JavaScriptFrameConstants.
835 unsigned fixed_frame_size = ComputeJavascriptFixedSize(function);
836 unsigned input_frame_size = input_->GetFrameSize();
837 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
838
839 // Allocate and store the output frame description.
840 FrameDescription* output_frame =
841 new(output_frame_size) FrameDescription(output_frame_size, function);
842 output_frame->SetFrameType(StackFrame::JAVA_SCRIPT);
843
844 bool is_bottommost = (0 == frame_index);
845 bool is_topmost = (output_count_ - 1 == frame_index);
846 CHECK(frame_index >= 0 && frame_index < output_count_);
847 CHECK_NULL(output_[frame_index]);
848 output_[frame_index] = output_frame;
849
850 // The top address for the bottommost output frame can be computed from
851 // the input frame pointer and the output frame's height. For all
852 // subsequent output frames, it can be computed from the previous one's
853 // top address and the current frame's size.
854 Register fp_reg = JavaScriptFrame::fp_register();
855 intptr_t top_address;
856 if (is_bottommost) {
857 // Determine whether the input frame contains alignment padding.
858 has_alignment_padding_ =
859 (!compiled_code_->is_turbofanned() && HasAlignmentPadding(function))
860 ? 1
861 : 0;
862 // 2 = context and function in the frame.
863 // If the optimized frame had alignment padding, adjust the frame pointer
864 // to point to the new position of the old frame pointer after padding
865 // is removed. Subtract 2 * kPointerSize for the context and function slots.
866 top_address = input_->GetRegister(fp_reg.code()) -
867 StandardFrameConstants::kFixedFrameSizeFromFp -
868 height_in_bytes + has_alignment_padding_ * kPointerSize;
869 } else {
870 top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
871 }
872 output_frame->SetTop(top_address);
873
874 // Compute the incoming parameter translation.
875 int parameter_count =
876 function->shared()->internal_formal_parameter_count() + 1;
877 unsigned output_offset = output_frame_size;
878 unsigned input_offset = input_frame_size;
879 for (int i = 0; i < parameter_count; ++i) {
880 output_offset -= kPointerSize;
881 WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
882 output_offset);
883 }
884 input_offset -= (parameter_count * kPointerSize);
885
886 // There are no translation commands for the caller's pc and fp, the
887 // context, and the function. Synthesize their values and set them up
888 // explicitly.
889 //
890 // The caller's pc for the bottommost output frame is the same as in the
891 // input frame. For all subsequent output frames, it can be read from the
892 // previous one. This frame's pc can be computed from the non-optimized
893 // function code and AST id of the bailout.
894 output_offset -= kPCOnStackSize;
895 input_offset -= kPCOnStackSize;
896 intptr_t value;
897 if (is_bottommost) {
898 value = input_->GetFrameSlot(input_offset);
899 } else {
900 value = output_[frame_index - 1]->GetPc();
901 }
902 output_frame->SetCallerPc(output_offset, value);
903 DebugPrintOutputSlot(value, frame_index, output_offset, "caller's pc\n");
904
905 // The caller's frame pointer for the bottommost output frame is the same
906 // as in the input frame. For all subsequent output frames, it can be
907 // read from the previous one. Also compute and set this frame's frame
908 // pointer.
909 output_offset -= kFPOnStackSize;
910 input_offset -= kFPOnStackSize;
911 if (is_bottommost) {
912 value = input_->GetFrameSlot(input_offset);
913 } else {
914 value = output_[frame_index - 1]->GetFp();
915 }
916 output_frame->SetCallerFp(output_offset, value);
917 intptr_t fp_value = top_address + output_offset;
918 DCHECK(!is_bottommost || (input_->GetRegister(fp_reg.code()) +
919 has_alignment_padding_ * kPointerSize) == fp_value);
920 output_frame->SetFp(fp_value);
921 if (is_topmost) output_frame->SetRegister(fp_reg.code(), fp_value);
922 DebugPrintOutputSlot(value, frame_index, output_offset, "caller's fp\n");
923 DCHECK(!is_bottommost || !has_alignment_padding_ ||
924 (fp_value & kPointerSize) != 0);
925
926 if (FLAG_enable_embedded_constant_pool) {
927 // For the bottommost output frame the constant pool pointer can be gotten
928 // from the input frame. For subsequent output frames, it can be read from
929 // the previous frame.
930 output_offset -= kPointerSize;
931 input_offset -= kPointerSize;
932 if (is_bottommost) {
933 value = input_->GetFrameSlot(input_offset);
934 } else {
935 value = output_[frame_index - 1]->GetConstantPool();
936 }
937 output_frame->SetCallerConstantPool(output_offset, value);
938 DebugPrintOutputSlot(value, frame_index, output_offset,
939 "caller's constant_pool\n");
940 }
941
942 // For the bottommost output frame the context can be gotten from the input
943 // frame. For all subsequent output frames it can be gotten from the function
944 // so long as we don't inline functions that need local contexts.
945 Register context_reg = JavaScriptFrame::context_register();
946 output_offset -= kPointerSize;
947 input_offset -= kPointerSize;
948 // Read the context from the translations.
949 Object* context = value_iterator->GetRawValue();
950 if (context == isolate_->heap()->undefined_value()) {
951 // If the context was optimized away, just use the context from
952 // the activation. This should only apply to Crankshaft code.
953 CHECK(!compiled_code_->is_turbofanned());
954 context =
955 is_bottommost
956 ? reinterpret_cast<Object*>(input_->GetFrameSlot(input_offset))
957 : function->context();
958 }
959 value = reinterpret_cast<intptr_t>(context);
960 output_frame->SetContext(value);
961 if (is_topmost) output_frame->SetRegister(context_reg.code(), value);
962 WriteValueToOutput(context, input_index, frame_index, output_offset,
963 "context ");
964 if (context == isolate_->heap()->arguments_marker()) {
965 Address output_address =
966 reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
967 output_offset;
968 values_to_materialize_.push_back({output_address, value_iterator});
969 }
970 value_iterator++;
971 input_index++;
972
973 // The function was mentioned explicitly in the BEGIN_FRAME.
974 output_offset -= kPointerSize;
975 input_offset -= kPointerSize;
976 value = reinterpret_cast<intptr_t>(function);
977 // The function for the bottommost output frame should also agree with the
978 // input frame.
979 DCHECK(!is_bottommost || input_->GetFrameSlot(input_offset) == value);
980 WriteValueToOutput(function, 0, frame_index, output_offset, "function ");
981
982 // Translate the rest of the frame.
983 for (unsigned i = 0; i < height; ++i) {
984 output_offset -= kPointerSize;
985 WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
986 output_offset);
987 }
988 CHECK_EQ(0u, output_offset);
989
990 // Compute this frame's PC, state, and continuation.
991 Code* non_optimized_code = function->shared()->code();
992 FixedArray* raw_data = non_optimized_code->deoptimization_data();
993 DeoptimizationOutputData* data = DeoptimizationOutputData::cast(raw_data);
994 Address start = non_optimized_code->instruction_start();
995 unsigned pc_and_state = GetOutputInfo(data, node_id, function->shared());
996 unsigned pc_offset = FullCodeGenerator::PcField::decode(pc_and_state);
997 intptr_t pc_value = reinterpret_cast<intptr_t>(start + pc_offset);
998 output_frame->SetPc(pc_value);
999
1000 // Update constant pool.
1001 if (FLAG_enable_embedded_constant_pool) {
1002 intptr_t constant_pool_value =
1003 reinterpret_cast<intptr_t>(non_optimized_code->constant_pool());
1004 output_frame->SetConstantPool(constant_pool_value);
1005 if (is_topmost) {
1006 Register constant_pool_reg =
1007 JavaScriptFrame::constant_pool_pointer_register();
1008 output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value);
1009 }
1010 }
1011
1012 FullCodeGenerator::State state =
1013 FullCodeGenerator::StateField::decode(pc_and_state);
1014 output_frame->SetState(Smi::FromInt(state));
1015
1016 // Set the continuation for the topmost frame.
1017 if (is_topmost && bailout_type_ != DEBUGGER) {
1018 Builtins* builtins = isolate_->builtins();
1019 Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
1020 if (bailout_type_ == LAZY) {
1021 continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized);
1022 } else if (bailout_type_ == SOFT) {
1023 continuation = builtins->builtin(Builtins::kNotifySoftDeoptimized);
1024 } else {
1025 CHECK_EQ(bailout_type_, EAGER);
1026 }
1027 output_frame->SetContinuation(
1028 reinterpret_cast<intptr_t>(continuation->entry()));
1029 }
1030 }
1031
1032
DoComputeInterpretedFrame(int frame_index)1033 void Deoptimizer::DoComputeInterpretedFrame(int frame_index) {
1034 TranslatedFrame* translated_frame =
1035 &(translated_state_.frames()[frame_index]);
1036 TranslatedFrame::iterator value_iterator = translated_frame->begin();
1037 int input_index = 0;
1038
1039 BailoutId bytecode_offset = translated_frame->node_id();
1040 unsigned height = translated_frame->height();
1041 unsigned height_in_bytes = height * kPointerSize;
1042 JSFunction* function = JSFunction::cast(value_iterator->GetRawValue());
1043 value_iterator++;
1044 input_index++;
1045 if (trace_scope_ != NULL) {
1046 PrintF(trace_scope_->file(), " translating interpreted frame ");
1047 function->PrintName(trace_scope_->file());
1048 PrintF(trace_scope_->file(), " => bytecode_offset=%d, height=%d\n",
1049 bytecode_offset.ToInt(), height_in_bytes);
1050 }
1051
1052 // The 'fixed' part of the frame consists of the incoming parameters and
1053 // the part described by InterpreterFrameConstants.
1054 unsigned fixed_frame_size = ComputeInterpretedFixedSize(function);
1055 unsigned input_frame_size = input_->GetFrameSize();
1056 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
1057
1058 // Allocate and store the output frame description.
1059 FrameDescription* output_frame =
1060 new (output_frame_size) FrameDescription(output_frame_size, function);
1061 output_frame->SetFrameType(StackFrame::INTERPRETED);
1062
1063 bool is_bottommost = (0 == frame_index);
1064 bool is_topmost = (output_count_ - 1 == frame_index);
1065 CHECK(frame_index >= 0 && frame_index < output_count_);
1066 CHECK_NULL(output_[frame_index]);
1067 output_[frame_index] = output_frame;
1068
1069 // The top address for the bottommost output frame can be computed from
1070 // the input frame pointer and the output frame's height. For all
1071 // subsequent output frames, it can be computed from the previous one's
1072 // top address and the current frame's size.
1073 Register fp_reg = InterpretedFrame::fp_register();
1074 intptr_t top_address;
1075 if (is_bottommost) {
1076 // Subtract interpreter fixed frame size for the context function slots,
1077 // new,target and bytecode offset.
1078 top_address = input_->GetRegister(fp_reg.code()) -
1079 InterpreterFrameConstants::kFixedFrameSizeFromFp -
1080 height_in_bytes;
1081 } else {
1082 top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
1083 }
1084 output_frame->SetTop(top_address);
1085
1086 // Compute the incoming parameter translation.
1087 int parameter_count =
1088 function->shared()->internal_formal_parameter_count() + 1;
1089 unsigned output_offset = output_frame_size;
1090 unsigned input_offset = input_frame_size;
1091 for (int i = 0; i < parameter_count; ++i) {
1092 output_offset -= kPointerSize;
1093 WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
1094 output_offset);
1095 }
1096 input_offset -= (parameter_count * kPointerSize);
1097
1098 // There are no translation commands for the caller's pc and fp, the
1099 // context, the function, new.target and the bytecode offset. Synthesize
1100 // their values and set them up
1101 // explicitly.
1102 //
1103 // The caller's pc for the bottommost output frame is the same as in the
1104 // input frame. For all subsequent output frames, it can be read from the
1105 // previous one. This frame's pc can be computed from the non-optimized
1106 // function code and AST id of the bailout.
1107 output_offset -= kPCOnStackSize;
1108 input_offset -= kPCOnStackSize;
1109 intptr_t value;
1110 if (is_bottommost) {
1111 value = input_->GetFrameSlot(input_offset);
1112 } else {
1113 value = output_[frame_index - 1]->GetPc();
1114 }
1115 output_frame->SetCallerPc(output_offset, value);
1116 DebugPrintOutputSlot(value, frame_index, output_offset, "caller's pc\n");
1117
1118 // The caller's frame pointer for the bottommost output frame is the same
1119 // as in the input frame. For all subsequent output frames, it can be
1120 // read from the previous one. Also compute and set this frame's frame
1121 // pointer.
1122 output_offset -= kFPOnStackSize;
1123 input_offset -= kFPOnStackSize;
1124 if (is_bottommost) {
1125 value = input_->GetFrameSlot(input_offset);
1126 } else {
1127 value = output_[frame_index - 1]->GetFp();
1128 }
1129 output_frame->SetCallerFp(output_offset, value);
1130 intptr_t fp_value = top_address + output_offset;
1131 DCHECK(!is_bottommost ||
1132 (input_->GetRegister(fp_reg.code()) +
1133 has_alignment_padding_ * kPointerSize) == fp_value);
1134 output_frame->SetFp(fp_value);
1135 if (is_topmost) output_frame->SetRegister(fp_reg.code(), fp_value);
1136 DebugPrintOutputSlot(value, frame_index, output_offset, "caller's fp\n");
1137 DCHECK(!is_bottommost || !has_alignment_padding_ ||
1138 (fp_value & kPointerSize) != 0);
1139
1140 if (FLAG_enable_embedded_constant_pool) {
1141 // For the bottommost output frame the constant pool pointer can be gotten
1142 // from the input frame. For subsequent output frames, it can be read from
1143 // the previous frame.
1144 output_offset -= kPointerSize;
1145 input_offset -= kPointerSize;
1146 if (is_bottommost) {
1147 value = input_->GetFrameSlot(input_offset);
1148 } else {
1149 value = output_[frame_index - 1]->GetConstantPool();
1150 }
1151 output_frame->SetCallerConstantPool(output_offset, value);
1152 DebugPrintOutputSlot(value, frame_index, output_offset,
1153 "caller's constant_pool\n");
1154 }
1155
1156 // For the bottommost output frame the context can be gotten from the input
1157 // frame. For all subsequent output frames it can be gotten from the function
1158 // so long as we don't inline functions that need local contexts.
1159 Register context_reg = InterpretedFrame::context_register();
1160 output_offset -= kPointerSize;
1161 input_offset -= kPointerSize;
1162 // Read the context from the translations.
1163 Object* context = value_iterator->GetRawValue();
1164 // The context should not be a placeholder for a materialized object.
1165 CHECK(context != isolate_->heap()->arguments_marker());
1166 value = reinterpret_cast<intptr_t>(context);
1167 output_frame->SetContext(value);
1168 if (is_topmost) output_frame->SetRegister(context_reg.code(), value);
1169 WriteValueToOutput(context, input_index, frame_index, output_offset,
1170 "context ");
1171 value_iterator++;
1172 input_index++;
1173
1174 // The function was mentioned explicitly in the BEGIN_FRAME.
1175 output_offset -= kPointerSize;
1176 input_offset -= kPointerSize;
1177 value = reinterpret_cast<intptr_t>(function);
1178 // The function for the bottommost output frame should also agree with the
1179 // input frame.
1180 DCHECK(!is_bottommost || input_->GetFrameSlot(input_offset) == value);
1181 WriteValueToOutput(function, 0, frame_index, output_offset, "function ");
1182
1183 // TODO(rmcilroy): Deal with new.target correctly - currently just set it to
1184 // undefined.
1185 output_offset -= kPointerSize;
1186 input_offset -= kPointerSize;
1187 Object* new_target = isolate_->heap()->undefined_value();
1188 WriteValueToOutput(new_target, 0, frame_index, output_offset, "new_target ");
1189
1190 // The bytecode offset was mentioned explicitly in the BEGIN_FRAME.
1191 output_offset -= kPointerSize;
1192 input_offset -= kPointerSize;
1193 int raw_bytecode_offset =
1194 BytecodeArray::kHeaderSize - kHeapObjectTag + bytecode_offset.ToInt();
1195 Smi* smi_bytecode_offset = Smi::FromInt(raw_bytecode_offset);
1196 WriteValueToOutput(smi_bytecode_offset, 0, frame_index, output_offset,
1197 "bytecode offset ");
1198
1199 // Translate the rest of the interpreter registers in the frame.
1200 for (unsigned i = 0; i < height; ++i) {
1201 output_offset -= kPointerSize;
1202 WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
1203 output_offset);
1204 }
1205 CHECK_EQ(0u, output_offset);
1206
1207 // Set the accumulator register.
1208 output_frame->SetRegister(
1209 kInterpreterAccumulatorRegister.code(),
1210 reinterpret_cast<intptr_t>(value_iterator->GetRawValue()));
1211 value_iterator++;
1212
1213 Builtins* builtins = isolate_->builtins();
1214 Code* trampoline = builtins->builtin(Builtins::kInterpreterEntryTrampoline);
1215 output_frame->SetPc(reinterpret_cast<intptr_t>(trampoline->entry()));
1216 output_frame->SetState(0);
1217
1218 // Update constant pool.
1219 if (FLAG_enable_embedded_constant_pool) {
1220 intptr_t constant_pool_value =
1221 reinterpret_cast<intptr_t>(trampoline->constant_pool());
1222 output_frame->SetConstantPool(constant_pool_value);
1223 if (is_topmost) {
1224 Register constant_pool_reg =
1225 InterpretedFrame::constant_pool_pointer_register();
1226 output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value);
1227 }
1228 }
1229
1230 // Set the continuation for the topmost frame.
1231 if (is_topmost && bailout_type_ != DEBUGGER) {
1232 Code* continuation =
1233 builtins->builtin(Builtins::kInterpreterNotifyDeoptimized);
1234 if (bailout_type_ == LAZY) {
1235 continuation =
1236 builtins->builtin(Builtins::kInterpreterNotifyLazyDeoptimized);
1237 } else if (bailout_type_ == SOFT) {
1238 continuation =
1239 builtins->builtin(Builtins::kInterpreterNotifySoftDeoptimized);
1240 } else {
1241 CHECK_EQ(bailout_type_, EAGER);
1242 }
1243 output_frame->SetContinuation(
1244 reinterpret_cast<intptr_t>(continuation->entry()));
1245 }
1246 }
1247
1248
DoComputeArgumentsAdaptorFrame(int frame_index)1249 void Deoptimizer::DoComputeArgumentsAdaptorFrame(int frame_index) {
1250 TranslatedFrame* translated_frame =
1251 &(translated_state_.frames()[frame_index]);
1252 TranslatedFrame::iterator value_iterator = translated_frame->begin();
1253 int input_index = 0;
1254
1255 unsigned height = translated_frame->height();
1256 unsigned height_in_bytes = height * kPointerSize;
1257 JSFunction* function = JSFunction::cast(value_iterator->GetRawValue());
1258 value_iterator++;
1259 input_index++;
1260 if (trace_scope_ != NULL) {
1261 PrintF(trace_scope_->file(),
1262 " translating arguments adaptor => height=%d\n", height_in_bytes);
1263 }
1264
1265 unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFrameSize;
1266 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
1267
1268 // Allocate and store the output frame description.
1269 FrameDescription* output_frame =
1270 new(output_frame_size) FrameDescription(output_frame_size, function);
1271 output_frame->SetFrameType(StackFrame::ARGUMENTS_ADAPTOR);
1272
1273 // Arguments adaptor can not be topmost or bottommost.
1274 CHECK(frame_index > 0 && frame_index < output_count_ - 1);
1275 CHECK(output_[frame_index] == NULL);
1276 output_[frame_index] = output_frame;
1277
1278 // The top address of the frame is computed from the previous
1279 // frame's top and this frame's size.
1280 intptr_t top_address;
1281 top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
1282 output_frame->SetTop(top_address);
1283
1284 // Compute the incoming parameter translation.
1285 int parameter_count = height;
1286 unsigned output_offset = output_frame_size;
1287 for (int i = 0; i < parameter_count; ++i) {
1288 output_offset -= kPointerSize;
1289 WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
1290 output_offset);
1291 }
1292
1293 // Read caller's PC from the previous frame.
1294 output_offset -= kPCOnStackSize;
1295 intptr_t callers_pc = output_[frame_index - 1]->GetPc();
1296 output_frame->SetCallerPc(output_offset, callers_pc);
1297 DebugPrintOutputSlot(callers_pc, frame_index, output_offset, "caller's pc\n");
1298
1299 // Read caller's FP from the previous frame, and set this frame's FP.
1300 output_offset -= kFPOnStackSize;
1301 intptr_t value = output_[frame_index - 1]->GetFp();
1302 output_frame->SetCallerFp(output_offset, value);
1303 intptr_t fp_value = top_address + output_offset;
1304 output_frame->SetFp(fp_value);
1305 DebugPrintOutputSlot(value, frame_index, output_offset, "caller's fp\n");
1306
1307 if (FLAG_enable_embedded_constant_pool) {
1308 // Read the caller's constant pool from the previous frame.
1309 output_offset -= kPointerSize;
1310 value = output_[frame_index - 1]->GetConstantPool();
1311 output_frame->SetCallerConstantPool(output_offset, value);
1312 DebugPrintOutputSlot(value, frame_index, output_offset,
1313 "caller's constant_pool\n");
1314 }
1315
1316 // A marker value is used in place of the context.
1317 output_offset -= kPointerSize;
1318 intptr_t context = reinterpret_cast<intptr_t>(
1319 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1320 output_frame->SetFrameSlot(output_offset, context);
1321 DebugPrintOutputSlot(context, frame_index, output_offset,
1322 "context (adaptor sentinel)\n");
1323
1324 // The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
1325 output_offset -= kPointerSize;
1326 value = reinterpret_cast<intptr_t>(function);
1327 WriteValueToOutput(function, 0, frame_index, output_offset, "function ");
1328
1329 // Number of incoming arguments.
1330 output_offset -= kPointerSize;
1331 value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
1332 output_frame->SetFrameSlot(output_offset, value);
1333 DebugPrintOutputSlot(value, frame_index, output_offset, "argc ");
1334 if (trace_scope_ != nullptr) {
1335 PrintF(trace_scope_->file(), "(%d)\n", height - 1);
1336 }
1337
1338 DCHECK(0 == output_offset);
1339
1340 Builtins* builtins = isolate_->builtins();
1341 Code* adaptor_trampoline =
1342 builtins->builtin(Builtins::kArgumentsAdaptorTrampoline);
1343 intptr_t pc_value = reinterpret_cast<intptr_t>(
1344 adaptor_trampoline->instruction_start() +
1345 isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value());
1346 output_frame->SetPc(pc_value);
1347 if (FLAG_enable_embedded_constant_pool) {
1348 intptr_t constant_pool_value =
1349 reinterpret_cast<intptr_t>(adaptor_trampoline->constant_pool());
1350 output_frame->SetConstantPool(constant_pool_value);
1351 }
1352 }
1353
1354
DoComputeConstructStubFrame(int frame_index)1355 void Deoptimizer::DoComputeConstructStubFrame(int frame_index) {
1356 TranslatedFrame* translated_frame =
1357 &(translated_state_.frames()[frame_index]);
1358 TranslatedFrame::iterator value_iterator = translated_frame->begin();
1359 int input_index = 0;
1360
1361 Builtins* builtins = isolate_->builtins();
1362 Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
1363 unsigned height = translated_frame->height();
1364 unsigned height_in_bytes = height * kPointerSize;
1365 JSFunction* function = JSFunction::cast(value_iterator->GetRawValue());
1366 value_iterator++;
1367 input_index++;
1368 if (trace_scope_ != NULL) {
1369 PrintF(trace_scope_->file(),
1370 " translating construct stub => height=%d\n", height_in_bytes);
1371 }
1372
1373 unsigned fixed_frame_size = ConstructFrameConstants::kFrameSize;
1374 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
1375
1376 // Allocate and store the output frame description.
1377 FrameDescription* output_frame =
1378 new(output_frame_size) FrameDescription(output_frame_size, function);
1379 output_frame->SetFrameType(StackFrame::CONSTRUCT);
1380
1381 // Construct stub can not be topmost or bottommost.
1382 DCHECK(frame_index > 0 && frame_index < output_count_ - 1);
1383 DCHECK(output_[frame_index] == NULL);
1384 output_[frame_index] = output_frame;
1385
1386 // The top address of the frame is computed from the previous
1387 // frame's top and this frame's size.
1388 intptr_t top_address;
1389 top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
1390 output_frame->SetTop(top_address);
1391
1392 // Compute the incoming parameter translation.
1393 int parameter_count = height;
1394 unsigned output_offset = output_frame_size;
1395 for (int i = 0; i < parameter_count; ++i) {
1396 output_offset -= kPointerSize;
1397 // The allocated receiver of a construct stub frame is passed as the
1398 // receiver parameter through the translation. It might be encoding
1399 // a captured object, override the slot address for a captured object.
1400 WriteTranslatedValueToOutput(
1401 &value_iterator, &input_index, frame_index, output_offset, nullptr,
1402 (i == 0) ? reinterpret_cast<Address>(top_address) : nullptr);
1403 }
1404
1405 // Read caller's PC from the previous frame.
1406 output_offset -= kPCOnStackSize;
1407 intptr_t callers_pc = output_[frame_index - 1]->GetPc();
1408 output_frame->SetCallerPc(output_offset, callers_pc);
1409 DebugPrintOutputSlot(callers_pc, frame_index, output_offset, "caller's pc\n");
1410
1411 // Read caller's FP from the previous frame, and set this frame's FP.
1412 output_offset -= kFPOnStackSize;
1413 intptr_t value = output_[frame_index - 1]->GetFp();
1414 output_frame->SetCallerFp(output_offset, value);
1415 intptr_t fp_value = top_address + output_offset;
1416 output_frame->SetFp(fp_value);
1417 DebugPrintOutputSlot(value, frame_index, output_offset, "caller's fp\n");
1418
1419 if (FLAG_enable_embedded_constant_pool) {
1420 // Read the caller's constant pool from the previous frame.
1421 output_offset -= kPointerSize;
1422 value = output_[frame_index - 1]->GetConstantPool();
1423 output_frame->SetCallerConstantPool(output_offset, value);
1424 DebugPrintOutputSlot(value, frame_index, output_offset,
1425 "caller's constant_pool\n");
1426 }
1427
1428 // The context can be gotten from the previous frame.
1429 output_offset -= kPointerSize;
1430 value = output_[frame_index - 1]->GetContext();
1431 output_frame->SetFrameSlot(output_offset, value);
1432 DebugPrintOutputSlot(value, frame_index, output_offset, "context\n");
1433
1434 // A marker value is used in place of the function.
1435 output_offset -= kPointerSize;
1436 value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT));
1437 output_frame->SetFrameSlot(output_offset, value);
1438 DebugPrintOutputSlot(value, frame_index, output_offset,
1439 "function (construct sentinel)\n");
1440
1441 // The output frame reflects a JSConstructStubGeneric frame.
1442 output_offset -= kPointerSize;
1443 value = reinterpret_cast<intptr_t>(construct_stub);
1444 output_frame->SetFrameSlot(output_offset, value);
1445 DebugPrintOutputSlot(value, frame_index, output_offset, "code object\n");
1446
1447 // The allocation site.
1448 output_offset -= kPointerSize;
1449 value = reinterpret_cast<intptr_t>(isolate_->heap()->undefined_value());
1450 output_frame->SetFrameSlot(output_offset, value);
1451 DebugPrintOutputSlot(value, frame_index, output_offset, "allocation site\n");
1452
1453 // Number of incoming arguments.
1454 output_offset -= kPointerSize;
1455 value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
1456 output_frame->SetFrameSlot(output_offset, value);
1457 DebugPrintOutputSlot(value, frame_index, output_offset, "argc ");
1458 if (trace_scope_ != nullptr) {
1459 PrintF(trace_scope_->file(), "(%d)\n", height - 1);
1460 }
1461
1462 // The newly allocated object was passed as receiver in the artificial
1463 // constructor stub environment created by HEnvironment::CopyForInlining().
1464 output_offset -= kPointerSize;
1465 value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
1466 output_frame->SetFrameSlot(output_offset, value);
1467 DebugPrintOutputSlot(value, frame_index, output_offset,
1468 "allocated receiver\n");
1469
1470 CHECK_EQ(0u, output_offset);
1471
1472 intptr_t pc = reinterpret_cast<intptr_t>(
1473 construct_stub->instruction_start() +
1474 isolate_->heap()->construct_stub_deopt_pc_offset()->value());
1475 output_frame->SetPc(pc);
1476 if (FLAG_enable_embedded_constant_pool) {
1477 intptr_t constant_pool_value =
1478 reinterpret_cast<intptr_t>(construct_stub->constant_pool());
1479 output_frame->SetConstantPool(constant_pool_value);
1480 }
1481 }
1482
1483
DoComputeAccessorStubFrame(int frame_index,bool is_setter_stub_frame)1484 void Deoptimizer::DoComputeAccessorStubFrame(int frame_index,
1485 bool is_setter_stub_frame) {
1486 TranslatedFrame* translated_frame =
1487 &(translated_state_.frames()[frame_index]);
1488 TranslatedFrame::iterator value_iterator = translated_frame->begin();
1489 int input_index = 0;
1490
1491 JSFunction* accessor = JSFunction::cast(value_iterator->GetRawValue());
1492 value_iterator++;
1493 input_index++;
1494 // The receiver (and the implicit return value, if any) are expected in
1495 // registers by the LoadIC/StoreIC, so they don't belong to the output stack
1496 // frame. This means that we have to use a height of 0.
1497 unsigned height = 0;
1498 unsigned height_in_bytes = height * kPointerSize;
1499 const char* kind = is_setter_stub_frame ? "setter" : "getter";
1500 if (trace_scope_ != NULL) {
1501 PrintF(trace_scope_->file(),
1502 " translating %s stub => height=%u\n", kind, height_in_bytes);
1503 }
1504
1505 // We need 1 stack entry for the return address and enough entries for the
1506 // StackFrame::INTERNAL (FP, context, frame type, code object and constant
1507 // pool (if enabled)- see MacroAssembler::EnterFrame).
1508 // For a setter stub frame we need one additional entry for the implicit
1509 // return value, see StoreStubCompiler::CompileStoreViaSetter.
1510 unsigned fixed_frame_entries =
1511 (StandardFrameConstants::kFixedFrameSize / kPointerSize) + 1 +
1512 (is_setter_stub_frame ? 1 : 0);
1513 unsigned fixed_frame_size = fixed_frame_entries * kPointerSize;
1514 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
1515
1516 // Allocate and store the output frame description.
1517 FrameDescription* output_frame =
1518 new(output_frame_size) FrameDescription(output_frame_size, accessor);
1519 output_frame->SetFrameType(StackFrame::INTERNAL);
1520
1521 // A frame for an accessor stub can not be the topmost or bottommost one.
1522 CHECK(frame_index > 0 && frame_index < output_count_ - 1);
1523 CHECK_NULL(output_[frame_index]);
1524 output_[frame_index] = output_frame;
1525
1526 // The top address of the frame is computed from the previous frame's top and
1527 // this frame's size.
1528 intptr_t top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
1529 output_frame->SetTop(top_address);
1530
1531 unsigned output_offset = output_frame_size;
1532
1533 // Read caller's PC from the previous frame.
1534 output_offset -= kPCOnStackSize;
1535 intptr_t callers_pc = output_[frame_index - 1]->GetPc();
1536 output_frame->SetCallerPc(output_offset, callers_pc);
1537 DebugPrintOutputSlot(callers_pc, frame_index, output_offset, "caller's pc\n");
1538
1539 // Read caller's FP from the previous frame, and set this frame's FP.
1540 output_offset -= kFPOnStackSize;
1541 intptr_t value = output_[frame_index - 1]->GetFp();
1542 output_frame->SetCallerFp(output_offset, value);
1543 intptr_t fp_value = top_address + output_offset;
1544 output_frame->SetFp(fp_value);
1545 DebugPrintOutputSlot(value, frame_index, output_offset, "caller's fp\n");
1546
1547 if (FLAG_enable_embedded_constant_pool) {
1548 // Read the caller's constant pool from the previous frame.
1549 output_offset -= kPointerSize;
1550 value = output_[frame_index - 1]->GetConstantPool();
1551 output_frame->SetCallerConstantPool(output_offset, value);
1552 DebugPrintOutputSlot(value, frame_index, output_offset,
1553 "caller's constant_pool\n");
1554 }
1555
1556 // The context can be gotten from the previous frame.
1557 output_offset -= kPointerSize;
1558 value = output_[frame_index - 1]->GetContext();
1559 output_frame->SetFrameSlot(output_offset, value);
1560 DebugPrintOutputSlot(value, frame_index, output_offset, "context\n");
1561
1562 // A marker value is used in place of the function.
1563 output_offset -= kPointerSize;
1564 value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::INTERNAL));
1565 output_frame->SetFrameSlot(output_offset, value);
1566 DebugPrintOutputSlot(value, frame_index, output_offset, "function ");
1567 if (trace_scope_ != nullptr) {
1568 PrintF(trace_scope_->file(), "(%s sentinel)\n", kind);
1569 }
1570
1571 // Get Code object from accessor stub.
1572 output_offset -= kPointerSize;
1573 Builtins::Name name = is_setter_stub_frame ?
1574 Builtins::kStoreIC_Setter_ForDeopt :
1575 Builtins::kLoadIC_Getter_ForDeopt;
1576 Code* accessor_stub = isolate_->builtins()->builtin(name);
1577 value = reinterpret_cast<intptr_t>(accessor_stub);
1578 output_frame->SetFrameSlot(output_offset, value);
1579 DebugPrintOutputSlot(value, frame_index, output_offset, "code object\n");
1580
1581 // Skip receiver.
1582 value_iterator++;
1583 input_index++;
1584
1585 if (is_setter_stub_frame) {
1586 // The implicit return value was part of the artificial setter stub
1587 // environment.
1588 output_offset -= kPointerSize;
1589 WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
1590 output_offset);
1591 }
1592
1593 CHECK_EQ(0u, output_offset);
1594
1595 Smi* offset = is_setter_stub_frame ?
1596 isolate_->heap()->setter_stub_deopt_pc_offset() :
1597 isolate_->heap()->getter_stub_deopt_pc_offset();
1598 intptr_t pc = reinterpret_cast<intptr_t>(
1599 accessor_stub->instruction_start() + offset->value());
1600 output_frame->SetPc(pc);
1601 if (FLAG_enable_embedded_constant_pool) {
1602 intptr_t constant_pool_value =
1603 reinterpret_cast<intptr_t>(accessor_stub->constant_pool());
1604 output_frame->SetConstantPool(constant_pool_value);
1605 }
1606 }
1607
1608
DoComputeCompiledStubFrame(int frame_index)1609 void Deoptimizer::DoComputeCompiledStubFrame(int frame_index) {
1610 //
1611 // FROM TO
1612 // | .... | | .... |
1613 // +-------------------------+ +-------------------------+
1614 // | JSFunction continuation | | JSFunction continuation |
1615 // +-------------------------+ +-------------------------+
1616 // | | saved frame (FP) | | saved frame (FP) |
1617 // | +=========================+<-fpreg +=========================+<-fpreg
1618 // | |constant pool (if ool_cp)| |constant pool (if ool_cp)|
1619 // | +-------------------------+ +-------------------------|
1620 // | | JSFunction context | | JSFunction context |
1621 // v +-------------------------+ +-------------------------|
1622 // | COMPILED_STUB marker | | STUB_FAILURE marker |
1623 // +-------------------------+ +-------------------------+
1624 // | | | caller args.arguments_ |
1625 // | ... | +-------------------------+
1626 // | | | caller args.length_ |
1627 // |-------------------------|<-spreg +-------------------------+
1628 // | caller args pointer |
1629 // +-------------------------+
1630 // | caller stack param 1 |
1631 // parameters in registers +-------------------------+
1632 // and spilled to stack | .... |
1633 // +-------------------------+
1634 // | caller stack param n |
1635 // +-------------------------+<-spreg
1636 // reg = number of parameters
1637 // reg = failure handler address
1638 // reg = saved frame
1639 // reg = JSFunction context
1640 //
1641 // Caller stack params contain the register parameters to the stub first,
1642 // and then, if the descriptor specifies a constant number of stack
1643 // parameters, the stack parameters as well.
1644
1645 TranslatedFrame* translated_frame =
1646 &(translated_state_.frames()[frame_index]);
1647 TranslatedFrame::iterator value_iterator = translated_frame->begin();
1648 int input_index = 0;
1649
1650 CHECK(compiled_code_->is_hydrogen_stub());
1651 int major_key = CodeStub::GetMajorKey(compiled_code_);
1652 CodeStubDescriptor descriptor(isolate_, compiled_code_->stub_key());
1653
1654 // The output frame must have room for all pushed register parameters
1655 // and the standard stack frame slots. Include space for an argument
1656 // object to the callee and optionally the space to pass the argument
1657 // object to the stub failure handler.
1658 int param_count = descriptor.GetRegisterParameterCount();
1659 int stack_param_count = descriptor.GetStackParameterCount();
1660 CHECK_EQ(translated_frame->height(), param_count);
1661 CHECK_GE(param_count, 0);
1662
1663 int height_in_bytes = kPointerSize * (param_count + stack_param_count) +
1664 sizeof(Arguments) + kPointerSize;
1665 int fixed_frame_size = StandardFrameConstants::kFixedFrameSize;
1666 int input_frame_size = input_->GetFrameSize();
1667 int output_frame_size = height_in_bytes + fixed_frame_size;
1668 if (trace_scope_ != NULL) {
1669 PrintF(trace_scope_->file(),
1670 " translating %s => StubFailureTrampolineStub, height=%d\n",
1671 CodeStub::MajorName(static_cast<CodeStub::Major>(major_key)),
1672 height_in_bytes);
1673 }
1674
1675 // The stub failure trampoline is a single frame.
1676 FrameDescription* output_frame =
1677 new(output_frame_size) FrameDescription(output_frame_size, NULL);
1678 output_frame->SetFrameType(StackFrame::STUB_FAILURE_TRAMPOLINE);
1679 CHECK_EQ(frame_index, 0);
1680 output_[frame_index] = output_frame;
1681
1682 // The top address for the output frame can be computed from the input
1683 // frame pointer and the output frame's height. Subtract space for the
1684 // context and function slots.
1685 Register fp_reg = StubFailureTrampolineFrame::fp_register();
1686 intptr_t top_address = input_->GetRegister(fp_reg.code()) -
1687 StandardFrameConstants::kFixedFrameSizeFromFp - height_in_bytes;
1688 output_frame->SetTop(top_address);
1689
1690 // Read caller's PC (JSFunction continuation) from the input frame.
1691 unsigned input_frame_offset = input_frame_size - kPCOnStackSize;
1692 unsigned output_frame_offset = output_frame_size - kFPOnStackSize;
1693 intptr_t value = input_->GetFrameSlot(input_frame_offset);
1694 output_frame->SetCallerPc(output_frame_offset, value);
1695 DebugPrintOutputSlot(value, frame_index, output_frame_offset,
1696 "caller's pc\n");
1697
1698 // Read caller's FP from the input frame, and set this frame's FP.
1699 input_frame_offset -= kFPOnStackSize;
1700 value = input_->GetFrameSlot(input_frame_offset);
1701 output_frame_offset -= kFPOnStackSize;
1702 output_frame->SetCallerFp(output_frame_offset, value);
1703 intptr_t frame_ptr = input_->GetRegister(fp_reg.code());
1704 output_frame->SetRegister(fp_reg.code(), frame_ptr);
1705 output_frame->SetFp(frame_ptr);
1706 DebugPrintOutputSlot(value, frame_index, output_frame_offset,
1707 "caller's fp\n");
1708
1709 if (FLAG_enable_embedded_constant_pool) {
1710 // Read the caller's constant pool from the input frame.
1711 input_frame_offset -= kPointerSize;
1712 value = input_->GetFrameSlot(input_frame_offset);
1713 output_frame_offset -= kPointerSize;
1714 output_frame->SetCallerConstantPool(output_frame_offset, value);
1715 DebugPrintOutputSlot(value, frame_index, output_frame_offset,
1716 "caller's constant_pool\n");
1717 }
1718
1719 // The context can be gotten from the input frame.
1720 Register context_reg = StubFailureTrampolineFrame::context_register();
1721 input_frame_offset -= kPointerSize;
1722 value = input_->GetFrameSlot(input_frame_offset);
1723 output_frame->SetRegister(context_reg.code(), value);
1724 output_frame_offset -= kPointerSize;
1725 output_frame->SetFrameSlot(output_frame_offset, value);
1726 CHECK(reinterpret_cast<Object*>(value)->IsContext());
1727 DebugPrintOutputSlot(value, frame_index, output_frame_offset, "context\n");
1728
1729 // A marker value is used in place of the function.
1730 output_frame_offset -= kPointerSize;
1731 value = reinterpret_cast<intptr_t>(
1732 Smi::FromInt(StackFrame::STUB_FAILURE_TRAMPOLINE));
1733 output_frame->SetFrameSlot(output_frame_offset, value);
1734 DebugPrintOutputSlot(value, frame_index, output_frame_offset,
1735 "function (stub failure sentinel)\n");
1736
1737 intptr_t caller_arg_count = stack_param_count;
1738 bool arg_count_known = !descriptor.stack_parameter_count().is_valid();
1739
1740 // Build the Arguments object for the caller's parameters and a pointer to it.
1741 output_frame_offset -= kPointerSize;
1742 int args_arguments_offset = output_frame_offset;
1743 intptr_t the_hole = reinterpret_cast<intptr_t>(
1744 isolate_->heap()->the_hole_value());
1745 if (arg_count_known) {
1746 value = frame_ptr + StandardFrameConstants::kCallerSPOffset +
1747 (caller_arg_count - 1) * kPointerSize;
1748 } else {
1749 value = the_hole;
1750 }
1751
1752 output_frame->SetFrameSlot(args_arguments_offset, value);
1753 DebugPrintOutputSlot(
1754 value, frame_index, args_arguments_offset,
1755 arg_count_known ? "args.arguments\n" : "args.arguments (the hole)\n");
1756
1757 output_frame_offset -= kPointerSize;
1758 int length_frame_offset = output_frame_offset;
1759 value = arg_count_known ? caller_arg_count : the_hole;
1760 output_frame->SetFrameSlot(length_frame_offset, value);
1761 DebugPrintOutputSlot(
1762 value, frame_index, length_frame_offset,
1763 arg_count_known ? "args.length\n" : "args.length (the hole)\n");
1764
1765 output_frame_offset -= kPointerSize;
1766 value = frame_ptr + StandardFrameConstants::kCallerSPOffset -
1767 (output_frame_size - output_frame_offset) + kPointerSize;
1768 output_frame->SetFrameSlot(output_frame_offset, value);
1769 DebugPrintOutputSlot(value, frame_index, output_frame_offset, "args*\n");
1770
1771 // Copy the register parameters to the failure frame.
1772 int arguments_length_offset = -1;
1773 for (int i = 0; i < param_count; ++i) {
1774 output_frame_offset -= kPointerSize;
1775 WriteTranslatedValueToOutput(&value_iterator, &input_index, 0,
1776 output_frame_offset);
1777
1778 if (!arg_count_known &&
1779 descriptor.GetRegisterParameter(i)
1780 .is(descriptor.stack_parameter_count())) {
1781 arguments_length_offset = output_frame_offset;
1782 }
1783 }
1784
1785 // Copy constant stack parameters to the failure frame. If the number of stack
1786 // parameters is not known in the descriptor, the arguments object is the way
1787 // to access them.
1788 for (int i = 0; i < stack_param_count; i++) {
1789 output_frame_offset -= kPointerSize;
1790 Object** stack_parameter = reinterpret_cast<Object**>(
1791 frame_ptr + StandardFrameConstants::kCallerSPOffset +
1792 (stack_param_count - i - 1) * kPointerSize);
1793 value = reinterpret_cast<intptr_t>(*stack_parameter);
1794 output_frame->SetFrameSlot(output_frame_offset, value);
1795 DebugPrintOutputSlot(value, frame_index, output_frame_offset,
1796 "stack parameter\n");
1797 }
1798
1799 CHECK_EQ(0u, output_frame_offset);
1800
1801 if (!arg_count_known) {
1802 CHECK_GE(arguments_length_offset, 0);
1803 // We know it's a smi because 1) the code stub guarantees the stack
1804 // parameter count is in smi range, and 2) the DoTranslateCommand in the
1805 // parameter loop above translated that to a tagged value.
1806 Smi* smi_caller_arg_count = reinterpret_cast<Smi*>(
1807 output_frame->GetFrameSlot(arguments_length_offset));
1808 caller_arg_count = smi_caller_arg_count->value();
1809 output_frame->SetFrameSlot(length_frame_offset, caller_arg_count);
1810 DebugPrintOutputSlot(caller_arg_count, frame_index, length_frame_offset,
1811 "args.length\n");
1812 value = frame_ptr + StandardFrameConstants::kCallerSPOffset +
1813 (caller_arg_count - 1) * kPointerSize;
1814 output_frame->SetFrameSlot(args_arguments_offset, value);
1815 DebugPrintOutputSlot(value, frame_index, args_arguments_offset,
1816 "args.arguments");
1817 }
1818
1819 // Copy the double registers from the input into the output frame.
1820 CopyDoubleRegisters(output_frame);
1821
1822 // Fill registers containing handler and number of parameters.
1823 SetPlatformCompiledStubRegisters(output_frame, &descriptor);
1824
1825 // Compute this frame's PC, state, and continuation.
1826 Code* trampoline = NULL;
1827 StubFunctionMode function_mode = descriptor.function_mode();
1828 StubFailureTrampolineStub(isolate_, function_mode)
1829 .FindCodeInCache(&trampoline);
1830 DCHECK(trampoline != NULL);
1831 output_frame->SetPc(reinterpret_cast<intptr_t>(
1832 trampoline->instruction_start()));
1833 if (FLAG_enable_embedded_constant_pool) {
1834 Register constant_pool_reg =
1835 StubFailureTrampolineFrame::constant_pool_pointer_register();
1836 intptr_t constant_pool_value =
1837 reinterpret_cast<intptr_t>(trampoline->constant_pool());
1838 output_frame->SetConstantPool(constant_pool_value);
1839 output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value);
1840 }
1841 output_frame->SetState(Smi::FromInt(FullCodeGenerator::NO_REGISTERS));
1842 Code* notify_failure =
1843 isolate_->builtins()->builtin(Builtins::kNotifyStubFailureSaveDoubles);
1844 output_frame->SetContinuation(
1845 reinterpret_cast<intptr_t>(notify_failure->entry()));
1846 }
1847
1848
MaterializeHeapObjects(JavaScriptFrameIterator * it)1849 void Deoptimizer::MaterializeHeapObjects(JavaScriptFrameIterator* it) {
1850 DCHECK_NE(DEBUGGER, bailout_type_);
1851
1852 // Walk to the last JavaScript output frame to find out if it has
1853 // adapted arguments.
1854 for (int frame_index = 0; frame_index < jsframe_count(); ++frame_index) {
1855 if (frame_index != 0) it->Advance();
1856 }
1857 translated_state_.Prepare(it->frame()->has_adapted_arguments(), stack_fp_);
1858
1859 for (auto& materialization : values_to_materialize_) {
1860 Handle<Object> value = materialization.value_->GetValue();
1861
1862 if (trace_scope_ != nullptr) {
1863 PrintF("Materialization [0x%08" V8PRIxPTR "] <- 0x%08" V8PRIxPTR " ; ",
1864 reinterpret_cast<intptr_t>(materialization.output_slot_address_),
1865 reinterpret_cast<intptr_t>(*value));
1866 value->ShortPrint(trace_scope_->file());
1867 PrintF(trace_scope_->file(), "\n");
1868 }
1869
1870 *(reinterpret_cast<intptr_t*>(materialization.output_slot_address_)) =
1871 reinterpret_cast<intptr_t>(*value);
1872 }
1873
1874 isolate_->materialized_object_store()->Remove(stack_fp_);
1875 }
1876
1877
MaterializeHeapNumbersForDebuggerInspectableFrame(int frame_index,int parameter_count,int expression_count,DeoptimizedFrameInfo * info)1878 void Deoptimizer::MaterializeHeapNumbersForDebuggerInspectableFrame(
1879 int frame_index, int parameter_count, int expression_count,
1880 DeoptimizedFrameInfo* info) {
1881 CHECK_EQ(DEBUGGER, bailout_type_);
1882
1883 translated_state_.Prepare(false, nullptr);
1884
1885 TranslatedFrame* frame = &(translated_state_.frames()[frame_index]);
1886 CHECK(frame->kind() == TranslatedFrame::kFunction);
1887 int frame_arg_count = frame->shared_info()->internal_formal_parameter_count();
1888
1889 // The height is #expressions + 1 for context.
1890 CHECK_EQ(expression_count + 1, frame->height());
1891 TranslatedFrame* argument_frame = frame;
1892 if (frame_index > 0) {
1893 TranslatedFrame* previous_frame =
1894 &(translated_state_.frames()[frame_index - 1]);
1895 if (previous_frame->kind() == TranslatedFrame::kArgumentsAdaptor) {
1896 argument_frame = previous_frame;
1897 CHECK_EQ(parameter_count, argument_frame->height() - 1);
1898 } else {
1899 CHECK_EQ(frame_arg_count, parameter_count);
1900 }
1901 } else {
1902 CHECK_EQ(frame_arg_count, parameter_count);
1903 }
1904
1905 TranslatedFrame::iterator arg_iter = argument_frame->begin();
1906 arg_iter++; // Skip the function.
1907 arg_iter++; // Skip the receiver.
1908 for (int i = 0; i < parameter_count; i++, arg_iter++) {
1909 if (!arg_iter->IsMaterializedObject()) {
1910 info->SetParameter(i, *(arg_iter->GetValue()));
1911 }
1912 }
1913
1914 TranslatedFrame::iterator iter = frame->begin();
1915 // Skip the function, receiver, context and arguments.
1916 for (int i = 0; i < frame_arg_count + 3; i++, iter++) {
1917 }
1918
1919 for (int i = 0; i < expression_count; i++, iter++) {
1920 if (!iter->IsMaterializedObject()) {
1921 info->SetExpression(i, *(iter->GetValue()));
1922 }
1923 }
1924 }
1925
1926
WriteTranslatedValueToOutput(TranslatedFrame::iterator * iterator,int * input_index,int frame_index,unsigned output_offset,const char * debug_hint_string,Address output_address_for_materialization)1927 void Deoptimizer::WriteTranslatedValueToOutput(
1928 TranslatedFrame::iterator* iterator, int* input_index, int frame_index,
1929 unsigned output_offset, const char* debug_hint_string,
1930 Address output_address_for_materialization) {
1931 Object* value = (*iterator)->GetRawValue();
1932
1933 WriteValueToOutput(value, *input_index, frame_index, output_offset,
1934 debug_hint_string);
1935
1936 if (value == isolate_->heap()->arguments_marker()) {
1937 Address output_address =
1938 reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
1939 output_offset;
1940 if (output_address_for_materialization == nullptr) {
1941 output_address_for_materialization = output_address;
1942 }
1943 values_to_materialize_.push_back(
1944 {output_address_for_materialization, *iterator});
1945 }
1946
1947 (*iterator)++;
1948 (*input_index)++;
1949 }
1950
1951
WriteValueToOutput(Object * value,int input_index,int frame_index,unsigned output_offset,const char * debug_hint_string)1952 void Deoptimizer::WriteValueToOutput(Object* value, int input_index,
1953 int frame_index, unsigned output_offset,
1954 const char* debug_hint_string) {
1955 output_[frame_index]->SetFrameSlot(output_offset,
1956 reinterpret_cast<intptr_t>(value));
1957
1958 if (trace_scope_ != nullptr) {
1959 DebugPrintOutputSlot(reinterpret_cast<intptr_t>(value), frame_index,
1960 output_offset, debug_hint_string);
1961 value->ShortPrint(trace_scope_->file());
1962 PrintF(trace_scope_->file(), " (input #%d)\n", input_index);
1963 }
1964 }
1965
1966
DebugPrintOutputSlot(intptr_t value,int frame_index,unsigned output_offset,const char * debug_hint_string)1967 void Deoptimizer::DebugPrintOutputSlot(intptr_t value, int frame_index,
1968 unsigned output_offset,
1969 const char* debug_hint_string) {
1970 if (trace_scope_ != nullptr) {
1971 Address output_address =
1972 reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
1973 output_offset;
1974 PrintF(trace_scope_->file(),
1975 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" V8PRIxPTR " ; %s",
1976 reinterpret_cast<intptr_t>(output_address), output_offset, value,
1977 debug_hint_string == nullptr ? "" : debug_hint_string);
1978 }
1979 }
1980
1981
ComputeInputFrameSize() const1982 unsigned Deoptimizer::ComputeInputFrameSize() const {
1983 unsigned fixed_size = ComputeJavascriptFixedSize(function_);
1984 // The fp-to-sp delta already takes the context, constant pool pointer and the
1985 // function into account so we have to avoid double counting them.
1986 unsigned result = fixed_size + fp_to_sp_delta_ -
1987 StandardFrameConstants::kFixedFrameSizeFromFp;
1988 if (compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) {
1989 unsigned stack_slots = compiled_code_->stack_slots();
1990 unsigned outgoing_size =
1991 ComputeOutgoingArgumentSize(compiled_code_, bailout_id_);
1992 CHECK(result == fixed_size + (stack_slots * kPointerSize) + outgoing_size);
1993 }
1994 return result;
1995 }
1996
1997
ComputeJavascriptFixedSize(JSFunction * function) const1998 unsigned Deoptimizer::ComputeJavascriptFixedSize(JSFunction* function) const {
1999 // The fixed part of the frame consists of the return address, frame
2000 // pointer, function, context, and all the incoming arguments.
2001 return ComputeIncomingArgumentSize(function) +
2002 StandardFrameConstants::kFixedFrameSize;
2003 }
2004
2005
ComputeInterpretedFixedSize(JSFunction * function) const2006 unsigned Deoptimizer::ComputeInterpretedFixedSize(JSFunction* function) const {
2007 // The fixed part of the frame consists of the return address, frame
2008 // pointer, function, context, new.target, bytecode offset and all the
2009 // incoming arguments.
2010 return ComputeIncomingArgumentSize(function) +
2011 InterpreterFrameConstants::kFixedFrameSize;
2012 }
2013
2014
ComputeIncomingArgumentSize(JSFunction * function) const2015 unsigned Deoptimizer::ComputeIncomingArgumentSize(JSFunction* function) const {
2016 // The incoming arguments is the values for formal parameters and
2017 // the receiver. Every slot contains a pointer.
2018 if (function->IsSmi()) {
2019 CHECK_EQ(Smi::cast(function), Smi::FromInt(StackFrame::STUB));
2020 return 0;
2021 }
2022 unsigned arguments =
2023 function->shared()->internal_formal_parameter_count() + 1;
2024 return arguments * kPointerSize;
2025 }
2026
2027
2028 // static
ComputeOutgoingArgumentSize(Code * code,unsigned bailout_id)2029 unsigned Deoptimizer::ComputeOutgoingArgumentSize(Code* code,
2030 unsigned bailout_id) {
2031 DeoptimizationInputData* data =
2032 DeoptimizationInputData::cast(code->deoptimization_data());
2033 unsigned height = data->ArgumentsStackHeight(bailout_id)->value();
2034 return height * kPointerSize;
2035 }
2036
2037
ComputeLiteral(int index) const2038 Object* Deoptimizer::ComputeLiteral(int index) const {
2039 DeoptimizationInputData* data =
2040 DeoptimizationInputData::cast(compiled_code_->deoptimization_data());
2041 FixedArray* literals = data->LiteralArray();
2042 return literals->get(index);
2043 }
2044
2045
EnsureCodeForDeoptimizationEntry(Isolate * isolate,BailoutType type,int max_entry_id)2046 void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate,
2047 BailoutType type,
2048 int max_entry_id) {
2049 // We cannot run this if the serializer is enabled because this will
2050 // cause us to emit relocation information for the external
2051 // references. This is fine because the deoptimizer's code section
2052 // isn't meant to be serialized at all.
2053 CHECK(type == EAGER || type == SOFT || type == LAZY);
2054 DeoptimizerData* data = isolate->deoptimizer_data();
2055 int entry_count = data->deopt_entry_code_entries_[type];
2056 if (max_entry_id < entry_count) return;
2057 entry_count = Max(entry_count, Deoptimizer::kMinNumberOfEntries);
2058 while (max_entry_id >= entry_count) entry_count *= 2;
2059 CHECK(entry_count <= Deoptimizer::kMaxNumberOfEntries);
2060
2061 MacroAssembler masm(isolate, NULL, 16 * KB, CodeObjectRequired::kYes);
2062 masm.set_emit_debug_code(false);
2063 GenerateDeoptimizationEntries(&masm, entry_count, type);
2064 CodeDesc desc;
2065 masm.GetCode(&desc);
2066 DCHECK(!RelocInfo::RequiresRelocation(desc));
2067
2068 MemoryChunk* chunk = data->deopt_entry_code_[type];
2069 CHECK(static_cast<int>(Deoptimizer::GetMaxDeoptTableSize()) >=
2070 desc.instr_size);
2071 if (!chunk->CommitArea(desc.instr_size)) {
2072 V8::FatalProcessOutOfMemory(
2073 "Deoptimizer::EnsureCodeForDeoptimizationEntry");
2074 }
2075 CopyBytes(chunk->area_start(), desc.buffer,
2076 static_cast<size_t>(desc.instr_size));
2077 Assembler::FlushICache(isolate, chunk->area_start(), desc.instr_size);
2078
2079 data->deopt_entry_code_entries_[type] = entry_count;
2080 }
2081
2082
FrameDescription(uint32_t frame_size,JSFunction * function)2083 FrameDescription::FrameDescription(uint32_t frame_size,
2084 JSFunction* function)
2085 : frame_size_(frame_size),
2086 function_(function),
2087 top_(kZapUint32),
2088 pc_(kZapUint32),
2089 fp_(kZapUint32),
2090 context_(kZapUint32),
2091 constant_pool_(kZapUint32) {
2092 // Zap all the registers.
2093 for (int r = 0; r < Register::kNumRegisters; r++) {
2094 // TODO(jbramley): It isn't safe to use kZapUint32 here. If the register
2095 // isn't used before the next safepoint, the GC will try to scan it as a
2096 // tagged value. kZapUint32 looks like a valid tagged pointer, but it isn't.
2097 SetRegister(r, kZapUint32);
2098 }
2099
2100 // Zap all the slots.
2101 for (unsigned o = 0; o < frame_size; o += kPointerSize) {
2102 SetFrameSlot(o, kZapUint32);
2103 }
2104 }
2105
2106
ComputeFixedSize()2107 int FrameDescription::ComputeFixedSize() {
2108 if (type_ == StackFrame::INTERPRETED) {
2109 return InterpreterFrameConstants::kFixedFrameSize +
2110 (ComputeParametersCount() + 1) * kPointerSize;
2111 } else {
2112 return StandardFrameConstants::kFixedFrameSize +
2113 (ComputeParametersCount() + 1) * kPointerSize;
2114 }
2115 }
2116
2117
GetOffsetFromSlotIndex(int slot_index)2118 unsigned FrameDescription::GetOffsetFromSlotIndex(int slot_index) {
2119 if (slot_index >= 0) {
2120 // Local or spill slots. Skip the fixed part of the frame
2121 // including all arguments.
2122 unsigned base = GetFrameSize() - ComputeFixedSize();
2123 return base - ((slot_index + 1) * kPointerSize);
2124 } else {
2125 // Incoming parameter.
2126 int arg_size = (ComputeParametersCount() + 1) * kPointerSize;
2127 unsigned base = GetFrameSize() - arg_size;
2128 return base - ((slot_index + 1) * kPointerSize);
2129 }
2130 }
2131
2132
ComputeParametersCount()2133 int FrameDescription::ComputeParametersCount() {
2134 switch (type_) {
2135 case StackFrame::JAVA_SCRIPT:
2136 return function_->shared()->internal_formal_parameter_count();
2137 case StackFrame::ARGUMENTS_ADAPTOR: {
2138 // Last slot contains number of incomming arguments as a smi.
2139 // Can't use GetExpression(0) because it would cause infinite recursion.
2140 return reinterpret_cast<Smi*>(*GetFrameSlotPointer(0))->value();
2141 }
2142 case StackFrame::STUB:
2143 return -1; // Minus receiver.
2144 default:
2145 FATAL("Unexpected stack frame type");
2146 return 0;
2147 }
2148 }
2149
2150
GetParameter(int index)2151 Object* FrameDescription::GetParameter(int index) {
2152 CHECK_GE(index, 0);
2153 CHECK_LT(index, ComputeParametersCount());
2154 // The slot indexes for incoming arguments are negative.
2155 unsigned offset = GetOffsetFromSlotIndex(index - ComputeParametersCount());
2156 return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset));
2157 }
2158
2159
GetExpressionCount()2160 unsigned FrameDescription::GetExpressionCount() {
2161 CHECK_EQ(StackFrame::JAVA_SCRIPT, type_);
2162 unsigned size = GetFrameSize() - ComputeFixedSize();
2163 return size / kPointerSize;
2164 }
2165
2166
GetExpression(int index)2167 Object* FrameDescription::GetExpression(int index) {
2168 DCHECK_EQ(StackFrame::JAVA_SCRIPT, type_);
2169 unsigned offset = GetOffsetFromSlotIndex(index);
2170 return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset));
2171 }
2172
2173
Add(int32_t value,Zone * zone)2174 void TranslationBuffer::Add(int32_t value, Zone* zone) {
2175 // This wouldn't handle kMinInt correctly if it ever encountered it.
2176 DCHECK(value != kMinInt);
2177 // Encode the sign bit in the least significant bit.
2178 bool is_negative = (value < 0);
2179 uint32_t bits = ((is_negative ? -value : value) << 1) |
2180 static_cast<int32_t>(is_negative);
2181 // Encode the individual bytes using the least significant bit of
2182 // each byte to indicate whether or not more bytes follow.
2183 do {
2184 uint32_t next = bits >> 7;
2185 contents_.Add(((bits << 1) & 0xFF) | (next != 0), zone);
2186 bits = next;
2187 } while (bits != 0);
2188 }
2189
2190
Next()2191 int32_t TranslationIterator::Next() {
2192 // Run through the bytes until we reach one with a least significant
2193 // bit of zero (marks the end).
2194 uint32_t bits = 0;
2195 for (int i = 0; true; i += 7) {
2196 DCHECK(HasNext());
2197 uint8_t next = buffer_->get(index_++);
2198 bits |= (next >> 1) << i;
2199 if ((next & 1) == 0) break;
2200 }
2201 // The bits encode the sign in the least significant bit.
2202 bool is_negative = (bits & 1) == 1;
2203 int32_t result = bits >> 1;
2204 return is_negative ? -result : result;
2205 }
2206
2207
CreateByteArray(Factory * factory)2208 Handle<ByteArray> TranslationBuffer::CreateByteArray(Factory* factory) {
2209 int length = contents_.length();
2210 Handle<ByteArray> result = factory->NewByteArray(length, TENURED);
2211 MemCopy(result->GetDataStartAddress(), contents_.ToVector().start(), length);
2212 return result;
2213 }
2214
2215
BeginConstructStubFrame(int literal_id,unsigned height)2216 void Translation::BeginConstructStubFrame(int literal_id, unsigned height) {
2217 buffer_->Add(CONSTRUCT_STUB_FRAME, zone());
2218 buffer_->Add(literal_id, zone());
2219 buffer_->Add(height, zone());
2220 }
2221
2222
BeginGetterStubFrame(int literal_id)2223 void Translation::BeginGetterStubFrame(int literal_id) {
2224 buffer_->Add(GETTER_STUB_FRAME, zone());
2225 buffer_->Add(literal_id, zone());
2226 }
2227
2228
BeginSetterStubFrame(int literal_id)2229 void Translation::BeginSetterStubFrame(int literal_id) {
2230 buffer_->Add(SETTER_STUB_FRAME, zone());
2231 buffer_->Add(literal_id, zone());
2232 }
2233
2234
BeginArgumentsAdaptorFrame(int literal_id,unsigned height)2235 void Translation::BeginArgumentsAdaptorFrame(int literal_id, unsigned height) {
2236 buffer_->Add(ARGUMENTS_ADAPTOR_FRAME, zone());
2237 buffer_->Add(literal_id, zone());
2238 buffer_->Add(height, zone());
2239 }
2240
2241
BeginJSFrame(BailoutId node_id,int literal_id,unsigned height)2242 void Translation::BeginJSFrame(BailoutId node_id,
2243 int literal_id,
2244 unsigned height) {
2245 buffer_->Add(JS_FRAME, zone());
2246 buffer_->Add(node_id.ToInt(), zone());
2247 buffer_->Add(literal_id, zone());
2248 buffer_->Add(height, zone());
2249 }
2250
2251
BeginInterpretedFrame(BailoutId bytecode_offset,int literal_id,unsigned height)2252 void Translation::BeginInterpretedFrame(BailoutId bytecode_offset,
2253 int literal_id, unsigned height) {
2254 buffer_->Add(INTERPRETED_FRAME, zone());
2255 buffer_->Add(bytecode_offset.ToInt(), zone());
2256 buffer_->Add(literal_id, zone());
2257 buffer_->Add(height, zone());
2258 }
2259
2260
BeginCompiledStubFrame(int height)2261 void Translation::BeginCompiledStubFrame(int height) {
2262 buffer_->Add(COMPILED_STUB_FRAME, zone());
2263 buffer_->Add(height, zone());
2264 }
2265
2266
BeginArgumentsObject(int args_length)2267 void Translation::BeginArgumentsObject(int args_length) {
2268 buffer_->Add(ARGUMENTS_OBJECT, zone());
2269 buffer_->Add(args_length, zone());
2270 }
2271
2272
BeginCapturedObject(int length)2273 void Translation::BeginCapturedObject(int length) {
2274 buffer_->Add(CAPTURED_OBJECT, zone());
2275 buffer_->Add(length, zone());
2276 }
2277
2278
DuplicateObject(int object_index)2279 void Translation::DuplicateObject(int object_index) {
2280 buffer_->Add(DUPLICATED_OBJECT, zone());
2281 buffer_->Add(object_index, zone());
2282 }
2283
2284
StoreRegister(Register reg)2285 void Translation::StoreRegister(Register reg) {
2286 buffer_->Add(REGISTER, zone());
2287 buffer_->Add(reg.code(), zone());
2288 }
2289
2290
StoreInt32Register(Register reg)2291 void Translation::StoreInt32Register(Register reg) {
2292 buffer_->Add(INT32_REGISTER, zone());
2293 buffer_->Add(reg.code(), zone());
2294 }
2295
2296
StoreUint32Register(Register reg)2297 void Translation::StoreUint32Register(Register reg) {
2298 buffer_->Add(UINT32_REGISTER, zone());
2299 buffer_->Add(reg.code(), zone());
2300 }
2301
2302
StoreBoolRegister(Register reg)2303 void Translation::StoreBoolRegister(Register reg) {
2304 buffer_->Add(BOOL_REGISTER, zone());
2305 buffer_->Add(reg.code(), zone());
2306 }
2307
2308
StoreDoubleRegister(DoubleRegister reg)2309 void Translation::StoreDoubleRegister(DoubleRegister reg) {
2310 buffer_->Add(DOUBLE_REGISTER, zone());
2311 buffer_->Add(reg.code(), zone());
2312 }
2313
2314
StoreStackSlot(int index)2315 void Translation::StoreStackSlot(int index) {
2316 buffer_->Add(STACK_SLOT, zone());
2317 buffer_->Add(index, zone());
2318 }
2319
2320
StoreInt32StackSlot(int index)2321 void Translation::StoreInt32StackSlot(int index) {
2322 buffer_->Add(INT32_STACK_SLOT, zone());
2323 buffer_->Add(index, zone());
2324 }
2325
2326
StoreUint32StackSlot(int index)2327 void Translation::StoreUint32StackSlot(int index) {
2328 buffer_->Add(UINT32_STACK_SLOT, zone());
2329 buffer_->Add(index, zone());
2330 }
2331
2332
StoreBoolStackSlot(int index)2333 void Translation::StoreBoolStackSlot(int index) {
2334 buffer_->Add(BOOL_STACK_SLOT, zone());
2335 buffer_->Add(index, zone());
2336 }
2337
2338
StoreDoubleStackSlot(int index)2339 void Translation::StoreDoubleStackSlot(int index) {
2340 buffer_->Add(DOUBLE_STACK_SLOT, zone());
2341 buffer_->Add(index, zone());
2342 }
2343
2344
StoreLiteral(int literal_id)2345 void Translation::StoreLiteral(int literal_id) {
2346 buffer_->Add(LITERAL, zone());
2347 buffer_->Add(literal_id, zone());
2348 }
2349
2350
StoreArgumentsObject(bool args_known,int args_index,int args_length)2351 void Translation::StoreArgumentsObject(bool args_known,
2352 int args_index,
2353 int args_length) {
2354 buffer_->Add(ARGUMENTS_OBJECT, zone());
2355 buffer_->Add(args_known, zone());
2356 buffer_->Add(args_index, zone());
2357 buffer_->Add(args_length, zone());
2358 }
2359
2360
StoreJSFrameFunction()2361 void Translation::StoreJSFrameFunction() {
2362 buffer_->Add(JS_FRAME_FUNCTION, zone());
2363 }
2364
2365
NumberOfOperandsFor(Opcode opcode)2366 int Translation::NumberOfOperandsFor(Opcode opcode) {
2367 switch (opcode) {
2368 case JS_FRAME_FUNCTION:
2369 return 0;
2370 case GETTER_STUB_FRAME:
2371 case SETTER_STUB_FRAME:
2372 case DUPLICATED_OBJECT:
2373 case ARGUMENTS_OBJECT:
2374 case CAPTURED_OBJECT:
2375 case REGISTER:
2376 case INT32_REGISTER:
2377 case UINT32_REGISTER:
2378 case BOOL_REGISTER:
2379 case DOUBLE_REGISTER:
2380 case STACK_SLOT:
2381 case INT32_STACK_SLOT:
2382 case UINT32_STACK_SLOT:
2383 case BOOL_STACK_SLOT:
2384 case DOUBLE_STACK_SLOT:
2385 case LITERAL:
2386 case COMPILED_STUB_FRAME:
2387 return 1;
2388 case BEGIN:
2389 case ARGUMENTS_ADAPTOR_FRAME:
2390 case CONSTRUCT_STUB_FRAME:
2391 return 2;
2392 case JS_FRAME:
2393 case INTERPRETED_FRAME:
2394 return 3;
2395 }
2396 FATAL("Unexpected translation type");
2397 return -1;
2398 }
2399
2400
2401 #if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
2402
StringFor(Opcode opcode)2403 const char* Translation::StringFor(Opcode opcode) {
2404 #define TRANSLATION_OPCODE_CASE(item) case item: return #item;
2405 switch (opcode) {
2406 TRANSLATION_OPCODE_LIST(TRANSLATION_OPCODE_CASE)
2407 }
2408 #undef TRANSLATION_OPCODE_CASE
2409 UNREACHABLE();
2410 return "";
2411 }
2412
2413 #endif
2414
2415
Get(Address fp)2416 Handle<FixedArray> MaterializedObjectStore::Get(Address fp) {
2417 int index = StackIdToIndex(fp);
2418 if (index == -1) {
2419 return Handle<FixedArray>::null();
2420 }
2421 Handle<FixedArray> array = GetStackEntries();
2422 CHECK_GT(array->length(), index);
2423 return Handle<FixedArray>::cast(Handle<Object>(array->get(index), isolate()));
2424 }
2425
2426
Set(Address fp,Handle<FixedArray> materialized_objects)2427 void MaterializedObjectStore::Set(Address fp,
2428 Handle<FixedArray> materialized_objects) {
2429 int index = StackIdToIndex(fp);
2430 if (index == -1) {
2431 index = frame_fps_.length();
2432 frame_fps_.Add(fp);
2433 }
2434
2435 Handle<FixedArray> array = EnsureStackEntries(index + 1);
2436 array->set(index, *materialized_objects);
2437 }
2438
2439
Remove(Address fp)2440 bool MaterializedObjectStore::Remove(Address fp) {
2441 int index = StackIdToIndex(fp);
2442 if (index == -1) {
2443 return false;
2444 }
2445 CHECK_GE(index, 0);
2446
2447 frame_fps_.Remove(index);
2448 FixedArray* array = isolate()->heap()->materialized_objects();
2449 CHECK_LT(index, array->length());
2450 for (int i = index; i < frame_fps_.length(); i++) {
2451 array->set(i, array->get(i + 1));
2452 }
2453 array->set(frame_fps_.length(), isolate()->heap()->undefined_value());
2454 return true;
2455 }
2456
2457
StackIdToIndex(Address fp)2458 int MaterializedObjectStore::StackIdToIndex(Address fp) {
2459 for (int i = 0; i < frame_fps_.length(); i++) {
2460 if (frame_fps_[i] == fp) {
2461 return i;
2462 }
2463 }
2464 return -1;
2465 }
2466
2467
GetStackEntries()2468 Handle<FixedArray> MaterializedObjectStore::GetStackEntries() {
2469 return Handle<FixedArray>(isolate()->heap()->materialized_objects());
2470 }
2471
2472
EnsureStackEntries(int length)2473 Handle<FixedArray> MaterializedObjectStore::EnsureStackEntries(int length) {
2474 Handle<FixedArray> array = GetStackEntries();
2475 if (array->length() >= length) {
2476 return array;
2477 }
2478
2479 int new_length = length > 10 ? length : 10;
2480 if (new_length < 2 * array->length()) {
2481 new_length = 2 * array->length();
2482 }
2483
2484 Handle<FixedArray> new_array =
2485 isolate()->factory()->NewFixedArray(new_length, TENURED);
2486 for (int i = 0; i < array->length(); i++) {
2487 new_array->set(i, array->get(i));
2488 }
2489 for (int i = array->length(); i < length; i++) {
2490 new_array->set(i, isolate()->heap()->undefined_value());
2491 }
2492 isolate()->heap()->SetRootMaterializedObjects(*new_array);
2493 return new_array;
2494 }
2495
2496
DeoptimizedFrameInfo(Deoptimizer * deoptimizer,int frame_index,bool has_arguments_adaptor,bool has_construct_stub)2497 DeoptimizedFrameInfo::DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
2498 int frame_index,
2499 bool has_arguments_adaptor,
2500 bool has_construct_stub) {
2501 FrameDescription* output_frame = deoptimizer->output_[frame_index];
2502 function_ = output_frame->GetFunction();
2503 context_ = reinterpret_cast<Object*>(output_frame->GetContext());
2504 has_construct_stub_ = has_construct_stub;
2505 expression_count_ = output_frame->GetExpressionCount();
2506 expression_stack_ = new Object* [expression_count_];
2507 // Get the source position using the unoptimized code.
2508 Address pc = reinterpret_cast<Address>(output_frame->GetPc());
2509 Code* code = Code::cast(deoptimizer->isolate()->FindCodeObject(pc));
2510 source_position_ = code->SourcePosition(pc);
2511
2512 for (int i = 0; i < expression_count_; i++) {
2513 Object* value = output_frame->GetExpression(i);
2514 // Replace materialization markers with the undefined value.
2515 if (value == deoptimizer->isolate()->heap()->arguments_marker()) {
2516 value = deoptimizer->isolate()->heap()->undefined_value();
2517 }
2518 SetExpression(i, value);
2519 }
2520
2521 if (has_arguments_adaptor) {
2522 output_frame = deoptimizer->output_[frame_index - 1];
2523 CHECK_EQ(output_frame->GetFrameType(), StackFrame::ARGUMENTS_ADAPTOR);
2524 }
2525
2526 parameters_count_ = output_frame->ComputeParametersCount();
2527 parameters_ = new Object* [parameters_count_];
2528 for (int i = 0; i < parameters_count_; i++) {
2529 Object* value = output_frame->GetParameter(i);
2530 // Replace materialization markers with the undefined value.
2531 if (value == deoptimizer->isolate()->heap()->arguments_marker()) {
2532 value = deoptimizer->isolate()->heap()->undefined_value();
2533 }
2534 SetParameter(i, value);
2535 }
2536 }
2537
2538
~DeoptimizedFrameInfo()2539 DeoptimizedFrameInfo::~DeoptimizedFrameInfo() {
2540 delete[] expression_stack_;
2541 delete[] parameters_;
2542 }
2543
2544
Iterate(ObjectVisitor * v)2545 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) {
2546 v->VisitPointer(bit_cast<Object**>(&function_));
2547 v->VisitPointer(&context_);
2548 v->VisitPointers(parameters_, parameters_ + parameters_count_);
2549 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_);
2550 }
2551
2552
GetDeoptReason(DeoptReason deopt_reason)2553 const char* Deoptimizer::GetDeoptReason(DeoptReason deopt_reason) {
2554 DCHECK(deopt_reason < kLastDeoptReason);
2555 #define DEOPT_MESSAGES_TEXTS(C, T) T,
2556 static const char* deopt_messages_[] = {
2557 DEOPT_MESSAGES_LIST(DEOPT_MESSAGES_TEXTS)};
2558 #undef DEOPT_MESSAGES_TEXTS
2559 return deopt_messages_[deopt_reason];
2560 }
2561
2562
GetDeoptInfo(Code * code,Address pc)2563 Deoptimizer::DeoptInfo Deoptimizer::GetDeoptInfo(Code* code, Address pc) {
2564 SourcePosition last_position = SourcePosition::Unknown();
2565 Deoptimizer::DeoptReason last_reason = Deoptimizer::kNoReason;
2566 int mask = RelocInfo::ModeMask(RelocInfo::DEOPT_REASON) |
2567 RelocInfo::ModeMask(RelocInfo::POSITION);
2568 for (RelocIterator it(code, mask); !it.done(); it.next()) {
2569 RelocInfo* info = it.rinfo();
2570 if (info->pc() >= pc) return DeoptInfo(last_position, NULL, last_reason);
2571 if (info->rmode() == RelocInfo::POSITION) {
2572 int raw_position = static_cast<int>(info->data());
2573 last_position = raw_position ? SourcePosition::FromRaw(raw_position)
2574 : SourcePosition::Unknown();
2575 } else if (info->rmode() == RelocInfo::DEOPT_REASON) {
2576 last_reason = static_cast<Deoptimizer::DeoptReason>(info->data());
2577 }
2578 }
2579 return DeoptInfo(SourcePosition::Unknown(), NULL, Deoptimizer::kNoReason);
2580 }
2581
2582
2583 // static
NewArgumentsObject(TranslatedState * container,int length,int object_index)2584 TranslatedValue TranslatedValue::NewArgumentsObject(TranslatedState* container,
2585 int length,
2586 int object_index) {
2587 TranslatedValue slot(container, kArgumentsObject);
2588 slot.materialization_info_ = {object_index, length};
2589 return slot;
2590 }
2591
2592
2593 // static
NewDeferredObject(TranslatedState * container,int length,int object_index)2594 TranslatedValue TranslatedValue::NewDeferredObject(TranslatedState* container,
2595 int length,
2596 int object_index) {
2597 TranslatedValue slot(container, kCapturedObject);
2598 slot.materialization_info_ = {object_index, length};
2599 return slot;
2600 }
2601
2602
2603 // static
NewDuplicateObject(TranslatedState * container,int id)2604 TranslatedValue TranslatedValue::NewDuplicateObject(TranslatedState* container,
2605 int id) {
2606 TranslatedValue slot(container, kDuplicatedObject);
2607 slot.materialization_info_ = {id, -1};
2608 return slot;
2609 }
2610
2611
2612 // static
NewDouble(TranslatedState * container,double value)2613 TranslatedValue TranslatedValue::NewDouble(TranslatedState* container,
2614 double value) {
2615 TranslatedValue slot(container, kDouble);
2616 slot.double_value_ = value;
2617 return slot;
2618 }
2619
2620
2621 // static
NewInt32(TranslatedState * container,int32_t value)2622 TranslatedValue TranslatedValue::NewInt32(TranslatedState* container,
2623 int32_t value) {
2624 TranslatedValue slot(container, kInt32);
2625 slot.int32_value_ = value;
2626 return slot;
2627 }
2628
2629
2630 // static
NewUInt32(TranslatedState * container,uint32_t value)2631 TranslatedValue TranslatedValue::NewUInt32(TranslatedState* container,
2632 uint32_t value) {
2633 TranslatedValue slot(container, kUInt32);
2634 slot.uint32_value_ = value;
2635 return slot;
2636 }
2637
2638
2639 // static
NewBool(TranslatedState * container,uint32_t value)2640 TranslatedValue TranslatedValue::NewBool(TranslatedState* container,
2641 uint32_t value) {
2642 TranslatedValue slot(container, kBoolBit);
2643 slot.uint32_value_ = value;
2644 return slot;
2645 }
2646
2647
2648 // static
NewTagged(TranslatedState * container,Object * literal)2649 TranslatedValue TranslatedValue::NewTagged(TranslatedState* container,
2650 Object* literal) {
2651 TranslatedValue slot(container, kTagged);
2652 slot.raw_literal_ = literal;
2653 return slot;
2654 }
2655
2656
2657 // static
NewInvalid(TranslatedState * container)2658 TranslatedValue TranslatedValue::NewInvalid(TranslatedState* container) {
2659 return TranslatedValue(container, kInvalid);
2660 }
2661
2662
isolate() const2663 Isolate* TranslatedValue::isolate() const { return container_->isolate(); }
2664
2665
raw_literal() const2666 Object* TranslatedValue::raw_literal() const {
2667 DCHECK_EQ(kTagged, kind());
2668 return raw_literal_;
2669 }
2670
2671
int32_value() const2672 int32_t TranslatedValue::int32_value() const {
2673 DCHECK_EQ(kInt32, kind());
2674 return int32_value_;
2675 }
2676
2677
uint32_value() const2678 uint32_t TranslatedValue::uint32_value() const {
2679 DCHECK(kind() == kUInt32 || kind() == kBoolBit);
2680 return uint32_value_;
2681 }
2682
2683
double_value() const2684 double TranslatedValue::double_value() const {
2685 DCHECK_EQ(kDouble, kind());
2686 return double_value_;
2687 }
2688
2689
object_length() const2690 int TranslatedValue::object_length() const {
2691 DCHECK(kind() == kArgumentsObject || kind() == kCapturedObject);
2692 return materialization_info_.length_;
2693 }
2694
2695
object_index() const2696 int TranslatedValue::object_index() const {
2697 DCHECK(kind() == kArgumentsObject || kind() == kCapturedObject ||
2698 kind() == kDuplicatedObject);
2699 return materialization_info_.id_;
2700 }
2701
2702
GetRawValue() const2703 Object* TranslatedValue::GetRawValue() const {
2704 // If we have a value, return it.
2705 Handle<Object> result_handle;
2706 if (value_.ToHandle(&result_handle)) {
2707 return *result_handle;
2708 }
2709
2710 // Otherwise, do a best effort to get the value without allocation.
2711 switch (kind()) {
2712 case kTagged:
2713 return raw_literal();
2714
2715 case kInt32: {
2716 bool is_smi = Smi::IsValid(int32_value());
2717 if (is_smi) {
2718 return Smi::FromInt(int32_value());
2719 }
2720 break;
2721 }
2722
2723 case kUInt32: {
2724 bool is_smi = (uint32_value() <= static_cast<uintptr_t>(Smi::kMaxValue));
2725 if (is_smi) {
2726 return Smi::FromInt(static_cast<int32_t>(uint32_value()));
2727 }
2728 break;
2729 }
2730
2731 case kBoolBit: {
2732 if (uint32_value() == 0) {
2733 return isolate()->heap()->false_value();
2734 } else {
2735 CHECK_EQ(1U, uint32_value());
2736 return isolate()->heap()->true_value();
2737 }
2738 }
2739
2740 default:
2741 break;
2742 }
2743
2744 // If we could not get the value without allocation, return the arguments
2745 // marker.
2746 return isolate()->heap()->arguments_marker();
2747 }
2748
2749
GetValue()2750 Handle<Object> TranslatedValue::GetValue() {
2751 Handle<Object> result;
2752 // If we already have a value, then get it.
2753 if (value_.ToHandle(&result)) return result;
2754
2755 // Otherwise we have to materialize.
2756 switch (kind()) {
2757 case TranslatedValue::kTagged:
2758 case TranslatedValue::kInt32:
2759 case TranslatedValue::kUInt32:
2760 case TranslatedValue::kBoolBit:
2761 case TranslatedValue::kDouble: {
2762 MaterializeSimple();
2763 return value_.ToHandleChecked();
2764 }
2765
2766 case TranslatedValue::kArgumentsObject:
2767 case TranslatedValue::kCapturedObject:
2768 case TranslatedValue::kDuplicatedObject:
2769 return container_->MaterializeObjectAt(object_index());
2770
2771 case TranslatedValue::kInvalid:
2772 FATAL("unexpected case");
2773 return Handle<Object>::null();
2774 }
2775
2776 FATAL("internal error: value missing");
2777 return Handle<Object>::null();
2778 }
2779
2780
MaterializeSimple()2781 void TranslatedValue::MaterializeSimple() {
2782 // If we already have materialized, return.
2783 if (!value_.is_null()) return;
2784
2785 Object* raw_value = GetRawValue();
2786 if (raw_value != isolate()->heap()->arguments_marker()) {
2787 // We can get the value without allocation, just return it here.
2788 value_ = Handle<Object>(raw_value, isolate());
2789 return;
2790 }
2791
2792 switch (kind()) {
2793 case kInt32: {
2794 value_ = Handle<Object>(isolate()->factory()->NewNumber(int32_value()));
2795 return;
2796 }
2797
2798 case kUInt32:
2799 value_ = Handle<Object>(isolate()->factory()->NewNumber(uint32_value()));
2800 return;
2801
2802 case kDouble:
2803 value_ = Handle<Object>(isolate()->factory()->NewNumber(double_value()));
2804 return;
2805
2806 case kCapturedObject:
2807 case kDuplicatedObject:
2808 case kArgumentsObject:
2809 case kInvalid:
2810 case kTagged:
2811 case kBoolBit:
2812 FATAL("internal error: unexpected materialization.");
2813 break;
2814 }
2815 }
2816
2817
IsMaterializedObject() const2818 bool TranslatedValue::IsMaterializedObject() const {
2819 switch (kind()) {
2820 case kCapturedObject:
2821 case kDuplicatedObject:
2822 case kArgumentsObject:
2823 return true;
2824 default:
2825 return false;
2826 }
2827 }
2828
2829
GetChildrenCount() const2830 int TranslatedValue::GetChildrenCount() const {
2831 if (kind() == kCapturedObject || kind() == kArgumentsObject) {
2832 return object_length();
2833 } else {
2834 return 0;
2835 }
2836 }
2837
2838
GetUInt32Slot(Address fp,int slot_offset)2839 uint32_t TranslatedState::GetUInt32Slot(Address fp, int slot_offset) {
2840 Address address = fp + slot_offset;
2841 #if V8_TARGET_BIG_ENDIAN && V8_HOST_ARCH_64_BIT
2842 return Memory::uint32_at(address + kIntSize);
2843 #else
2844 return Memory::uint32_at(address);
2845 #endif
2846 }
2847
2848
Handlify()2849 void TranslatedValue::Handlify() {
2850 if (kind() == kTagged) {
2851 value_ = Handle<Object>(raw_literal(), isolate());
2852 raw_literal_ = nullptr;
2853 }
2854 }
2855
2856
JSFrame(BailoutId node_id,SharedFunctionInfo * shared_info,int height)2857 TranslatedFrame TranslatedFrame::JSFrame(BailoutId node_id,
2858 SharedFunctionInfo* shared_info,
2859 int height) {
2860 TranslatedFrame frame(kFunction, shared_info->GetIsolate(), shared_info,
2861 height);
2862 frame.node_id_ = node_id;
2863 return frame;
2864 }
2865
2866
InterpretedFrame(BailoutId bytecode_offset,SharedFunctionInfo * shared_info,int height)2867 TranslatedFrame TranslatedFrame::InterpretedFrame(
2868 BailoutId bytecode_offset, SharedFunctionInfo* shared_info, int height) {
2869 TranslatedFrame frame(kInterpretedFunction, shared_info->GetIsolate(),
2870 shared_info, height);
2871 frame.node_id_ = bytecode_offset;
2872 return frame;
2873 }
2874
2875
AccessorFrame(Kind kind,SharedFunctionInfo * shared_info)2876 TranslatedFrame TranslatedFrame::AccessorFrame(
2877 Kind kind, SharedFunctionInfo* shared_info) {
2878 DCHECK(kind == kSetter || kind == kGetter);
2879 return TranslatedFrame(kind, shared_info->GetIsolate(), shared_info);
2880 }
2881
2882
ArgumentsAdaptorFrame(SharedFunctionInfo * shared_info,int height)2883 TranslatedFrame TranslatedFrame::ArgumentsAdaptorFrame(
2884 SharedFunctionInfo* shared_info, int height) {
2885 return TranslatedFrame(kArgumentsAdaptor, shared_info->GetIsolate(),
2886 shared_info, height);
2887 }
2888
2889
ConstructStubFrame(SharedFunctionInfo * shared_info,int height)2890 TranslatedFrame TranslatedFrame::ConstructStubFrame(
2891 SharedFunctionInfo* shared_info, int height) {
2892 return TranslatedFrame(kConstructStub, shared_info->GetIsolate(), shared_info,
2893 height);
2894 }
2895
2896
GetValueCount()2897 int TranslatedFrame::GetValueCount() {
2898 switch (kind()) {
2899 case kFunction: {
2900 int parameter_count =
2901 raw_shared_info_->internal_formal_parameter_count() + 1;
2902 // + 1 for function.
2903 return height_ + parameter_count + 1;
2904 }
2905
2906 case kInterpretedFunction: {
2907 int parameter_count =
2908 raw_shared_info_->internal_formal_parameter_count() + 1;
2909 // + 3 for function, context and accumulator.
2910 return height_ + parameter_count + 3;
2911 }
2912
2913 case kGetter:
2914 return 2; // Function and receiver.
2915
2916 case kSetter:
2917 return 3; // Function, receiver and the value to set.
2918
2919 case kArgumentsAdaptor:
2920 case kConstructStub:
2921 return 1 + height_;
2922
2923 case kCompiledStub:
2924 return height_;
2925
2926 case kInvalid:
2927 UNREACHABLE();
2928 break;
2929 }
2930 UNREACHABLE();
2931 return -1;
2932 }
2933
2934
Handlify()2935 void TranslatedFrame::Handlify() {
2936 if (raw_shared_info_ != nullptr) {
2937 shared_info_ = Handle<SharedFunctionInfo>(raw_shared_info_);
2938 raw_shared_info_ = nullptr;
2939 }
2940 for (auto& value : values_) {
2941 value.Handlify();
2942 }
2943 }
2944
2945
CreateNextTranslatedFrame(TranslationIterator * iterator,FixedArray * literal_array,Address fp,FILE * trace_file)2946 TranslatedFrame TranslatedState::CreateNextTranslatedFrame(
2947 TranslationIterator* iterator, FixedArray* literal_array, Address fp,
2948 FILE* trace_file) {
2949 Translation::Opcode opcode =
2950 static_cast<Translation::Opcode>(iterator->Next());
2951 switch (opcode) {
2952 case Translation::JS_FRAME: {
2953 BailoutId node_id = BailoutId(iterator->Next());
2954 SharedFunctionInfo* shared_info =
2955 SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
2956 int height = iterator->Next();
2957 if (trace_file != nullptr) {
2958 base::SmartArrayPointer<char> name =
2959 shared_info->DebugName()->ToCString();
2960 PrintF(trace_file, " reading input frame %s", name.get());
2961 int arg_count = shared_info->internal_formal_parameter_count() + 1;
2962 PrintF(trace_file, " => node=%d, args=%d, height=%d; inputs:\n",
2963 node_id.ToInt(), arg_count, height);
2964 }
2965 return TranslatedFrame::JSFrame(node_id, shared_info, height);
2966 }
2967
2968 case Translation::INTERPRETED_FRAME: {
2969 BailoutId bytecode_offset = BailoutId(iterator->Next());
2970 SharedFunctionInfo* shared_info =
2971 SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
2972 int height = iterator->Next();
2973 if (trace_file != nullptr) {
2974 base::SmartArrayPointer<char> name =
2975 shared_info->DebugName()->ToCString();
2976 PrintF(trace_file, " reading input frame %s", name.get());
2977 int arg_count = shared_info->internal_formal_parameter_count() + 1;
2978 PrintF(trace_file,
2979 " => bytecode_offset=%d, args=%d, height=%d; inputs:\n",
2980 bytecode_offset.ToInt(), arg_count, height);
2981 }
2982 return TranslatedFrame::InterpretedFrame(bytecode_offset, shared_info,
2983 height);
2984 }
2985
2986 case Translation::ARGUMENTS_ADAPTOR_FRAME: {
2987 SharedFunctionInfo* shared_info =
2988 SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
2989 int height = iterator->Next();
2990 if (trace_file != nullptr) {
2991 base::SmartArrayPointer<char> name =
2992 shared_info->DebugName()->ToCString();
2993 PrintF(trace_file, " reading arguments adaptor frame %s", name.get());
2994 PrintF(trace_file, " => height=%d; inputs:\n", height);
2995 }
2996 return TranslatedFrame::ArgumentsAdaptorFrame(shared_info, height);
2997 }
2998
2999 case Translation::CONSTRUCT_STUB_FRAME: {
3000 SharedFunctionInfo* shared_info =
3001 SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
3002 int height = iterator->Next();
3003 if (trace_file != nullptr) {
3004 base::SmartArrayPointer<char> name =
3005 shared_info->DebugName()->ToCString();
3006 PrintF(trace_file, " reading construct stub frame %s", name.get());
3007 PrintF(trace_file, " => height=%d; inputs:\n", height);
3008 }
3009 return TranslatedFrame::ConstructStubFrame(shared_info, height);
3010 }
3011
3012 case Translation::GETTER_STUB_FRAME: {
3013 SharedFunctionInfo* shared_info =
3014 SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
3015 if (trace_file != nullptr) {
3016 base::SmartArrayPointer<char> name =
3017 shared_info->DebugName()->ToCString();
3018 PrintF(trace_file, " reading getter frame %s; inputs:\n", name.get());
3019 }
3020 return TranslatedFrame::AccessorFrame(TranslatedFrame::kGetter,
3021 shared_info);
3022 }
3023
3024 case Translation::SETTER_STUB_FRAME: {
3025 SharedFunctionInfo* shared_info =
3026 SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
3027 if (trace_file != nullptr) {
3028 base::SmartArrayPointer<char> name =
3029 shared_info->DebugName()->ToCString();
3030 PrintF(trace_file, " reading setter frame %s; inputs:\n", name.get());
3031 }
3032 return TranslatedFrame::AccessorFrame(TranslatedFrame::kSetter,
3033 shared_info);
3034 }
3035
3036 case Translation::COMPILED_STUB_FRAME: {
3037 int height = iterator->Next();
3038 if (trace_file != nullptr) {
3039 PrintF(trace_file,
3040 " reading compiler stub frame => height=%d; inputs:\n", height);
3041 }
3042 return TranslatedFrame::CompiledStubFrame(height,
3043 literal_array->GetIsolate());
3044 }
3045
3046 case Translation::BEGIN:
3047 case Translation::DUPLICATED_OBJECT:
3048 case Translation::ARGUMENTS_OBJECT:
3049 case Translation::CAPTURED_OBJECT:
3050 case Translation::REGISTER:
3051 case Translation::INT32_REGISTER:
3052 case Translation::UINT32_REGISTER:
3053 case Translation::BOOL_REGISTER:
3054 case Translation::DOUBLE_REGISTER:
3055 case Translation::STACK_SLOT:
3056 case Translation::INT32_STACK_SLOT:
3057 case Translation::UINT32_STACK_SLOT:
3058 case Translation::BOOL_STACK_SLOT:
3059 case Translation::DOUBLE_STACK_SLOT:
3060 case Translation::LITERAL:
3061 case Translation::JS_FRAME_FUNCTION:
3062 break;
3063 }
3064 FATAL("We should never get here - unexpected deopt info.");
3065 return TranslatedFrame::InvalidFrame();
3066 }
3067
3068
3069 // static
AdvanceIterator(std::deque<TranslatedValue>::iterator * iter)3070 void TranslatedFrame::AdvanceIterator(
3071 std::deque<TranslatedValue>::iterator* iter) {
3072 int values_to_skip = 1;
3073 while (values_to_skip > 0) {
3074 // Consume the current element.
3075 values_to_skip--;
3076 // Add all the children.
3077 values_to_skip += (*iter)->GetChildrenCount();
3078
3079 (*iter)++;
3080 }
3081 }
3082
3083
3084 // We can't intermix stack decoding and allocations because
3085 // deoptimization infrastracture is not GC safe.
3086 // Thus we build a temporary structure in malloced space.
CreateNextTranslatedValue(int frame_index,int value_index,TranslationIterator * iterator,FixedArray * literal_array,Address fp,RegisterValues * registers,FILE * trace_file)3087 TranslatedValue TranslatedState::CreateNextTranslatedValue(
3088 int frame_index, int value_index, TranslationIterator* iterator,
3089 FixedArray* literal_array, Address fp, RegisterValues* registers,
3090 FILE* trace_file) {
3091 disasm::NameConverter converter;
3092
3093 Translation::Opcode opcode =
3094 static_cast<Translation::Opcode>(iterator->Next());
3095 switch (opcode) {
3096 case Translation::BEGIN:
3097 case Translation::JS_FRAME:
3098 case Translation::INTERPRETED_FRAME:
3099 case Translation::ARGUMENTS_ADAPTOR_FRAME:
3100 case Translation::CONSTRUCT_STUB_FRAME:
3101 case Translation::GETTER_STUB_FRAME:
3102 case Translation::SETTER_STUB_FRAME:
3103 case Translation::COMPILED_STUB_FRAME:
3104 // Peeled off before getting here.
3105 break;
3106
3107 case Translation::DUPLICATED_OBJECT: {
3108 int object_id = iterator->Next();
3109 if (trace_file != nullptr) {
3110 PrintF(trace_file, "duplicated object #%d", object_id);
3111 }
3112 object_positions_.push_back(object_positions_[object_id]);
3113 return TranslatedValue::NewDuplicateObject(this, object_id);
3114 }
3115
3116 case Translation::ARGUMENTS_OBJECT: {
3117 int arg_count = iterator->Next();
3118 int object_index = static_cast<int>(object_positions_.size());
3119 if (trace_file != nullptr) {
3120 PrintF(trace_file, "argumets object #%d (length = %d)", object_index,
3121 arg_count);
3122 }
3123 object_positions_.push_back({frame_index, value_index});
3124 return TranslatedValue::NewArgumentsObject(this, arg_count, object_index);
3125 }
3126
3127 case Translation::CAPTURED_OBJECT: {
3128 int field_count = iterator->Next();
3129 int object_index = static_cast<int>(object_positions_.size());
3130 if (trace_file != nullptr) {
3131 PrintF(trace_file, "captured object #%d (length = %d)", object_index,
3132 field_count);
3133 }
3134 object_positions_.push_back({frame_index, value_index});
3135 return TranslatedValue::NewDeferredObject(this, field_count,
3136 object_index);
3137 }
3138
3139 case Translation::REGISTER: {
3140 int input_reg = iterator->Next();
3141 if (registers == nullptr) return TranslatedValue::NewInvalid(this);
3142 intptr_t value = registers->GetRegister(input_reg);
3143 if (trace_file != nullptr) {
3144 PrintF(trace_file, "0x%08" V8PRIxPTR " ; %s ", value,
3145 converter.NameOfCPURegister(input_reg));
3146 reinterpret_cast<Object*>(value)->ShortPrint(trace_file);
3147 }
3148 return TranslatedValue::NewTagged(this, reinterpret_cast<Object*>(value));
3149 }
3150
3151 case Translation::INT32_REGISTER: {
3152 int input_reg = iterator->Next();
3153 if (registers == nullptr) return TranslatedValue::NewInvalid(this);
3154 intptr_t value = registers->GetRegister(input_reg);
3155 if (trace_file != nullptr) {
3156 PrintF(trace_file, "%" V8PRIdPTR " ; %s ", value,
3157 converter.NameOfCPURegister(input_reg));
3158 }
3159 return TranslatedValue::NewInt32(this, static_cast<int32_t>(value));
3160 }
3161
3162 case Translation::UINT32_REGISTER: {
3163 int input_reg = iterator->Next();
3164 if (registers == nullptr) return TranslatedValue::NewInvalid(this);
3165 intptr_t value = registers->GetRegister(input_reg);
3166 if (trace_file != nullptr) {
3167 PrintF(trace_file, "%" V8PRIuPTR " ; %s (uint)", value,
3168 converter.NameOfCPURegister(input_reg));
3169 reinterpret_cast<Object*>(value)->ShortPrint(trace_file);
3170 }
3171 return TranslatedValue::NewUInt32(this, static_cast<uint32_t>(value));
3172 }
3173
3174 case Translation::BOOL_REGISTER: {
3175 int input_reg = iterator->Next();
3176 if (registers == nullptr) return TranslatedValue::NewInvalid(this);
3177 intptr_t value = registers->GetRegister(input_reg);
3178 if (trace_file != nullptr) {
3179 PrintF(trace_file, "%" V8PRIdPTR " ; %s (bool)", value,
3180 converter.NameOfCPURegister(input_reg));
3181 }
3182 return TranslatedValue::NewBool(this, static_cast<uint32_t>(value));
3183 }
3184
3185 case Translation::DOUBLE_REGISTER: {
3186 int input_reg = iterator->Next();
3187 if (registers == nullptr) return TranslatedValue::NewInvalid(this);
3188 double value = registers->GetDoubleRegister(input_reg);
3189 if (trace_file != nullptr) {
3190 PrintF(trace_file, "%e ; %s (bool)", value,
3191 DoubleRegister::from_code(input_reg).ToString());
3192 }
3193 return TranslatedValue::NewDouble(this, value);
3194 }
3195
3196 case Translation::STACK_SLOT: {
3197 int slot_offset =
3198 OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
3199 intptr_t value = *(reinterpret_cast<intptr_t*>(fp + slot_offset));
3200 if (trace_file != nullptr) {
3201 PrintF(trace_file, "0x%08" V8PRIxPTR " ; [fp %c %d] ", value,
3202 slot_offset < 0 ? '-' : '+', std::abs(slot_offset));
3203 reinterpret_cast<Object*>(value)->ShortPrint(trace_file);
3204 }
3205 return TranslatedValue::NewTagged(this, reinterpret_cast<Object*>(value));
3206 }
3207
3208 case Translation::INT32_STACK_SLOT: {
3209 int slot_offset =
3210 OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
3211 uint32_t value = GetUInt32Slot(fp, slot_offset);
3212 if (trace_file != nullptr) {
3213 PrintF(trace_file, "%d ; (int) [fp %c %d] ",
3214 static_cast<int32_t>(value), slot_offset < 0 ? '-' : '+',
3215 std::abs(slot_offset));
3216 }
3217 return TranslatedValue::NewInt32(this, value);
3218 }
3219
3220 case Translation::UINT32_STACK_SLOT: {
3221 int slot_offset =
3222 OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
3223 uint32_t value = GetUInt32Slot(fp, slot_offset);
3224 if (trace_file != nullptr) {
3225 PrintF(trace_file, "%u ; (uint) [fp %c %d] ", value,
3226 slot_offset < 0 ? '-' : '+', std::abs(slot_offset));
3227 }
3228 return TranslatedValue::NewUInt32(this, value);
3229 }
3230
3231 case Translation::BOOL_STACK_SLOT: {
3232 int slot_offset =
3233 OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
3234 uint32_t value = GetUInt32Slot(fp, slot_offset);
3235 if (trace_file != nullptr) {
3236 PrintF(trace_file, "%u ; (bool) [fp %c %d] ", value,
3237 slot_offset < 0 ? '-' : '+', std::abs(slot_offset));
3238 }
3239 return TranslatedValue::NewBool(this, value);
3240 }
3241
3242 case Translation::DOUBLE_STACK_SLOT: {
3243 int slot_offset =
3244 OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
3245 double value = ReadDoubleValue(fp + slot_offset);
3246 if (trace_file != nullptr) {
3247 PrintF(trace_file, "%e ; (double) [fp %c %d] ", value,
3248 slot_offset < 0 ? '-' : '+', std::abs(slot_offset));
3249 }
3250 return TranslatedValue::NewDouble(this, value);
3251 }
3252
3253 case Translation::LITERAL: {
3254 int literal_index = iterator->Next();
3255 Object* value = literal_array->get(literal_index);
3256 if (trace_file != nullptr) {
3257 PrintF(trace_file, "0x%08" V8PRIxPTR " ; (literal %d) ",
3258 reinterpret_cast<intptr_t>(value), literal_index);
3259 reinterpret_cast<Object*>(value)->ShortPrint(trace_file);
3260 }
3261
3262 return TranslatedValue::NewTagged(this, value);
3263 }
3264
3265 case Translation::JS_FRAME_FUNCTION: {
3266 int slot_offset = JavaScriptFrameConstants::kFunctionOffset;
3267 intptr_t value = *(reinterpret_cast<intptr_t*>(fp + slot_offset));
3268 if (trace_file != nullptr) {
3269 PrintF(trace_file, "0x%08" V8PRIxPTR " ; (frame function) ", value);
3270 reinterpret_cast<Object*>(value)->ShortPrint(trace_file);
3271 }
3272 return TranslatedValue::NewTagged(this, reinterpret_cast<Object*>(value));
3273 }
3274 }
3275
3276 FATAL("We should never get here - unexpected deopt info.");
3277 return TranslatedValue(nullptr, TranslatedValue::kInvalid);
3278 }
3279
3280
TranslatedState(JavaScriptFrame * frame)3281 TranslatedState::TranslatedState(JavaScriptFrame* frame)
3282 : isolate_(nullptr),
3283 stack_frame_pointer_(nullptr),
3284 has_adapted_arguments_(false) {
3285 int deopt_index = Safepoint::kNoDeoptimizationIndex;
3286 DeoptimizationInputData* data =
3287 static_cast<OptimizedFrame*>(frame)->GetDeoptimizationData(&deopt_index);
3288 TranslationIterator it(data->TranslationByteArray(),
3289 data->TranslationIndex(deopt_index)->value());
3290 Init(frame->fp(), &it, data->LiteralArray(), nullptr /* registers */,
3291 nullptr /* trace file */);
3292 }
3293
3294
TranslatedState()3295 TranslatedState::TranslatedState()
3296 : isolate_(nullptr),
3297 stack_frame_pointer_(nullptr),
3298 has_adapted_arguments_(false) {}
3299
3300
Init(Address input_frame_pointer,TranslationIterator * iterator,FixedArray * literal_array,RegisterValues * registers,FILE * trace_file)3301 void TranslatedState::Init(Address input_frame_pointer,
3302 TranslationIterator* iterator,
3303 FixedArray* literal_array, RegisterValues* registers,
3304 FILE* trace_file) {
3305 DCHECK(frames_.empty());
3306
3307 isolate_ = literal_array->GetIsolate();
3308 // Read out the 'header' translation.
3309 Translation::Opcode opcode =
3310 static_cast<Translation::Opcode>(iterator->Next());
3311 CHECK(opcode == Translation::BEGIN);
3312
3313 int count = iterator->Next();
3314 iterator->Next(); // Drop JS frames count.
3315
3316 frames_.reserve(count);
3317
3318 std::stack<int> nested_counts;
3319
3320 // Read the frames
3321 for (int i = 0; i < count; i++) {
3322 // Read the frame descriptor.
3323 frames_.push_back(CreateNextTranslatedFrame(
3324 iterator, literal_array, input_frame_pointer, trace_file));
3325 TranslatedFrame& frame = frames_.back();
3326
3327 // Read the values.
3328 int values_to_process = frame.GetValueCount();
3329 while (values_to_process > 0 || !nested_counts.empty()) {
3330 if (trace_file != nullptr) {
3331 if (nested_counts.empty()) {
3332 // For top level values, print the value number.
3333 PrintF(trace_file, " %3i: ",
3334 frame.GetValueCount() - values_to_process);
3335 } else {
3336 // Take care of indenting for nested values.
3337 PrintF(trace_file, " ");
3338 for (size_t j = 0; j < nested_counts.size(); j++) {
3339 PrintF(trace_file, " ");
3340 }
3341 }
3342 }
3343
3344 TranslatedValue value = CreateNextTranslatedValue(
3345 i, static_cast<int>(frame.values_.size()), iterator, literal_array,
3346 input_frame_pointer, registers, trace_file);
3347 frame.Add(value);
3348
3349 if (trace_file != nullptr) {
3350 PrintF(trace_file, "\n");
3351 }
3352
3353 // Update the value count and resolve the nesting.
3354 values_to_process--;
3355 int children_count = value.GetChildrenCount();
3356 if (children_count > 0) {
3357 nested_counts.push(values_to_process);
3358 values_to_process = children_count;
3359 } else {
3360 while (values_to_process == 0 && !nested_counts.empty()) {
3361 values_to_process = nested_counts.top();
3362 nested_counts.pop();
3363 }
3364 }
3365 }
3366 }
3367
3368 CHECK(!iterator->HasNext() ||
3369 static_cast<Translation::Opcode>(iterator->Next()) ==
3370 Translation::BEGIN);
3371 }
3372
3373
Prepare(bool has_adapted_arguments,Address stack_frame_pointer)3374 void TranslatedState::Prepare(bool has_adapted_arguments,
3375 Address stack_frame_pointer) {
3376 for (auto& frame : frames_) frame.Handlify();
3377
3378 stack_frame_pointer_ = stack_frame_pointer;
3379 has_adapted_arguments_ = has_adapted_arguments;
3380
3381 UpdateFromPreviouslyMaterializedObjects();
3382 }
3383
3384
MaterializeAt(int frame_index,int * value_index)3385 Handle<Object> TranslatedState::MaterializeAt(int frame_index,
3386 int* value_index) {
3387 TranslatedFrame* frame = &(frames_[frame_index]);
3388 DCHECK(static_cast<size_t>(*value_index) < frame->values_.size());
3389
3390 TranslatedValue* slot = &(frame->values_[*value_index]);
3391 (*value_index)++;
3392
3393 switch (slot->kind()) {
3394 case TranslatedValue::kTagged:
3395 case TranslatedValue::kInt32:
3396 case TranslatedValue::kUInt32:
3397 case TranslatedValue::kBoolBit:
3398 case TranslatedValue::kDouble: {
3399 slot->MaterializeSimple();
3400 Handle<Object> value = slot->GetValue();
3401 if (value->IsMutableHeapNumber()) {
3402 HeapNumber::cast(*value)->set_map(isolate()->heap()->heap_number_map());
3403 }
3404 return value;
3405 }
3406
3407 case TranslatedValue::kArgumentsObject: {
3408 int length = slot->GetChildrenCount();
3409 Handle<JSObject> arguments;
3410 if (GetAdaptedArguments(&arguments, frame_index)) {
3411 // Store the materialized object and consume the nested values.
3412 for (int i = 0; i < length; ++i) {
3413 MaterializeAt(frame_index, value_index);
3414 }
3415 } else {
3416 Handle<JSFunction> function =
3417 Handle<JSFunction>::cast(frame->front().GetValue());
3418 arguments = isolate_->factory()->NewArgumentsObject(function, length);
3419 Handle<FixedArray> array = isolate_->factory()->NewFixedArray(length);
3420 DCHECK_EQ(array->length(), length);
3421 arguments->set_elements(*array);
3422 for (int i = 0; i < length; ++i) {
3423 Handle<Object> value = MaterializeAt(frame_index, value_index);
3424 array->set(i, *value);
3425 }
3426 }
3427 slot->value_ = arguments;
3428 return arguments;
3429 }
3430 case TranslatedValue::kCapturedObject: {
3431 int length = slot->GetChildrenCount();
3432
3433 // The map must be a tagged object.
3434 CHECK(frame->values_[*value_index].kind() == TranslatedValue::kTagged);
3435
3436 Handle<Object> result;
3437 if (slot->value_.ToHandle(&result)) {
3438 // This has been previously materialized, return the previous value.
3439 // We still need to skip all the nested objects.
3440 for (int i = 0; i < length; i++) {
3441 MaterializeAt(frame_index, value_index);
3442 }
3443
3444 return result;
3445 }
3446
3447 Handle<Object> map_object = MaterializeAt(frame_index, value_index);
3448 Handle<Map> map =
3449 Map::GeneralizeAllFieldRepresentations(Handle<Map>::cast(map_object));
3450 switch (map->instance_type()) {
3451 case MUTABLE_HEAP_NUMBER_TYPE:
3452 case HEAP_NUMBER_TYPE: {
3453 // Reuse the HeapNumber value directly as it is already properly
3454 // tagged and skip materializing the HeapNumber explicitly.
3455 Handle<Object> object = MaterializeAt(frame_index, value_index);
3456 slot->value_ = object;
3457 // On 32-bit architectures, there is an extra slot there because
3458 // the escape analysis calculates the number of slots as
3459 // object-size/pointer-size. To account for this, we read out
3460 // any extra slots.
3461 for (int i = 0; i < length - 2; i++) {
3462 MaterializeAt(frame_index, value_index);
3463 }
3464 return object;
3465 }
3466 case JS_OBJECT_TYPE: {
3467 Handle<JSObject> object =
3468 isolate_->factory()->NewJSObjectFromMap(map, NOT_TENURED);
3469 slot->value_ = object;
3470 Handle<Object> properties = MaterializeAt(frame_index, value_index);
3471 Handle<Object> elements = MaterializeAt(frame_index, value_index);
3472 object->set_properties(FixedArray::cast(*properties));
3473 object->set_elements(FixedArrayBase::cast(*elements));
3474 for (int i = 0; i < length - 3; ++i) {
3475 Handle<Object> value = MaterializeAt(frame_index, value_index);
3476 FieldIndex index = FieldIndex::ForPropertyIndex(object->map(), i);
3477 object->FastPropertyAtPut(index, *value);
3478 }
3479 return object;
3480 }
3481 case JS_ARRAY_TYPE: {
3482 Handle<JSArray> object =
3483 isolate_->factory()->NewJSArray(0, map->elements_kind());
3484 slot->value_ = object;
3485 Handle<Object> properties = MaterializeAt(frame_index, value_index);
3486 Handle<Object> elements = MaterializeAt(frame_index, value_index);
3487 Handle<Object> length = MaterializeAt(frame_index, value_index);
3488 object->set_properties(FixedArray::cast(*properties));
3489 object->set_elements(FixedArrayBase::cast(*elements));
3490 object->set_length(*length);
3491 return object;
3492 }
3493 case FIXED_ARRAY_TYPE: {
3494 Handle<Object> lengthObject = MaterializeAt(frame_index, value_index);
3495 int32_t length = 0;
3496 CHECK(lengthObject->ToInt32(&length));
3497 Handle<FixedArray> object =
3498 isolate_->factory()->NewFixedArray(length);
3499 // We need to set the map, because the fixed array we are
3500 // materializing could be a context or an arguments object,
3501 // in which case we must retain that information.
3502 object->set_map(*map);
3503 slot->value_ = object;
3504 for (int i = 0; i < length; ++i) {
3505 Handle<Object> value = MaterializeAt(frame_index, value_index);
3506 object->set(i, *value);
3507 }
3508 return object;
3509 }
3510 case FIXED_DOUBLE_ARRAY_TYPE: {
3511 DCHECK_EQ(*map, isolate_->heap()->fixed_double_array_map());
3512 Handle<Object> lengthObject = MaterializeAt(frame_index, value_index);
3513 int32_t length = 0;
3514 CHECK(lengthObject->ToInt32(&length));
3515 Handle<FixedArrayBase> object =
3516 isolate_->factory()->NewFixedDoubleArray(length);
3517 slot->value_ = object;
3518 if (length > 0) {
3519 Handle<FixedDoubleArray> double_array =
3520 Handle<FixedDoubleArray>::cast(object);
3521 for (int i = 0; i < length; ++i) {
3522 Handle<Object> value = MaterializeAt(frame_index, value_index);
3523 CHECK(value->IsNumber());
3524 double_array->set(i, value->Number());
3525 }
3526 }
3527 return object;
3528 }
3529 default:
3530 PrintF(stderr, "[couldn't handle instance type %d]\n",
3531 map->instance_type());
3532 FATAL("unreachable");
3533 return Handle<Object>::null();
3534 }
3535 UNREACHABLE();
3536 break;
3537 }
3538
3539 case TranslatedValue::kDuplicatedObject: {
3540 int object_index = slot->object_index();
3541 TranslatedState::ObjectPosition pos = object_positions_[object_index];
3542
3543 // Make sure the duplicate is refering to a previous object.
3544 DCHECK(pos.frame_index_ < frame_index ||
3545 (pos.frame_index_ == frame_index &&
3546 pos.value_index_ < *value_index - 1));
3547
3548 Handle<Object> object =
3549 frames_[pos.frame_index_].values_[pos.value_index_].GetValue();
3550
3551 // The object should have a (non-sentinel) value.
3552 DCHECK(!object.is_null() &&
3553 !object.is_identical_to(isolate_->factory()->arguments_marker()));
3554
3555 slot->value_ = object;
3556 return object;
3557 }
3558
3559 case TranslatedValue::kInvalid:
3560 UNREACHABLE();
3561 break;
3562 }
3563
3564 FATAL("We should never get here - unexpected deopt slot kind.");
3565 return Handle<Object>::null();
3566 }
3567
3568
MaterializeObjectAt(int object_index)3569 Handle<Object> TranslatedState::MaterializeObjectAt(int object_index) {
3570 TranslatedState::ObjectPosition pos = object_positions_[object_index];
3571 return MaterializeAt(pos.frame_index_, &(pos.value_index_));
3572 }
3573
3574
GetAdaptedArguments(Handle<JSObject> * result,int frame_index)3575 bool TranslatedState::GetAdaptedArguments(Handle<JSObject>* result,
3576 int frame_index) {
3577 if (frame_index == 0) {
3578 // Top level frame -> we need to go to the parent frame on the stack.
3579 if (!has_adapted_arguments_) return false;
3580
3581 // This is top level frame, so we need to go to the stack to get
3582 // this function's argument. (Note that this relies on not inlining
3583 // recursive functions!)
3584 Handle<JSFunction> function =
3585 Handle<JSFunction>::cast(frames_[frame_index].front().GetValue());
3586 *result = Handle<JSObject>::cast(Accessors::FunctionGetArguments(function));
3587 return true;
3588 } else {
3589 TranslatedFrame* previous_frame = &(frames_[frame_index]);
3590 if (previous_frame->kind() != TranslatedFrame::kArgumentsAdaptor) {
3591 return false;
3592 }
3593 // We get the adapted arguments from the parent translation.
3594 int length = previous_frame->height();
3595 Handle<JSFunction> function =
3596 Handle<JSFunction>::cast(previous_frame->front().GetValue());
3597 Handle<JSObject> arguments =
3598 isolate_->factory()->NewArgumentsObject(function, length);
3599 Handle<FixedArray> array = isolate_->factory()->NewFixedArray(length);
3600 arguments->set_elements(*array);
3601 TranslatedFrame::iterator arg_iterator = previous_frame->begin();
3602 arg_iterator++; // Skip function.
3603 for (int i = 0; i < length; ++i) {
3604 Handle<Object> value = arg_iterator->GetValue();
3605 array->set(i, *value);
3606 arg_iterator++;
3607 }
3608 CHECK(arg_iterator == previous_frame->end());
3609 *result = arguments;
3610 return true;
3611 }
3612 }
3613
3614
GetArgumentsInfoFromJSFrameIndex(int jsframe_index,int * args_count)3615 TranslatedFrame* TranslatedState::GetArgumentsInfoFromJSFrameIndex(
3616 int jsframe_index, int* args_count) {
3617 for (size_t i = 0; i < frames_.size(); i++) {
3618 if (frames_[i].kind() == TranslatedFrame::kFunction) {
3619 if (jsframe_index > 0) {
3620 jsframe_index--;
3621 } else {
3622 // We have the JS function frame, now check if it has arguments adaptor.
3623 if (i > 0 &&
3624 frames_[i - 1].kind() == TranslatedFrame::kArgumentsAdaptor) {
3625 *args_count = frames_[i - 1].height();
3626 return &(frames_[i - 1]);
3627 }
3628 *args_count =
3629 frames_[i].shared_info()->internal_formal_parameter_count() + 1;
3630 return &(frames_[i]);
3631 }
3632 }
3633 }
3634 return nullptr;
3635 }
3636
3637
StoreMaterializedValuesAndDeopt()3638 void TranslatedState::StoreMaterializedValuesAndDeopt() {
3639 MaterializedObjectStore* materialized_store =
3640 isolate_->materialized_object_store();
3641 Handle<FixedArray> previously_materialized_objects =
3642 materialized_store->Get(stack_frame_pointer_);
3643
3644 Handle<Object> marker = isolate_->factory()->arguments_marker();
3645
3646 int length = static_cast<int>(object_positions_.size());
3647 bool new_store = false;
3648 if (previously_materialized_objects.is_null()) {
3649 previously_materialized_objects =
3650 isolate_->factory()->NewFixedArray(length);
3651 for (int i = 0; i < length; i++) {
3652 previously_materialized_objects->set(i, *marker);
3653 }
3654 new_store = true;
3655 }
3656
3657 DCHECK_EQ(length, previously_materialized_objects->length());
3658
3659 bool value_changed = false;
3660 for (int i = 0; i < length; i++) {
3661 TranslatedState::ObjectPosition pos = object_positions_[i];
3662 TranslatedValue* value_info =
3663 &(frames_[pos.frame_index_].values_[pos.value_index_]);
3664
3665 DCHECK(value_info->IsMaterializedObject());
3666
3667 Handle<Object> value(value_info->GetRawValue(), isolate_);
3668
3669 if (!value.is_identical_to(marker)) {
3670 if (previously_materialized_objects->get(i) == *marker) {
3671 previously_materialized_objects->set(i, *value);
3672 value_changed = true;
3673 } else {
3674 DCHECK(previously_materialized_objects->get(i) == *value);
3675 }
3676 }
3677 }
3678 if (new_store && value_changed) {
3679 materialized_store->Set(stack_frame_pointer_,
3680 previously_materialized_objects);
3681 DCHECK_EQ(TranslatedFrame::kFunction, frames_[0].kind());
3682 Object* const function = frames_[0].front().GetRawValue();
3683 Deoptimizer::DeoptimizeFunction(JSFunction::cast(function));
3684 }
3685 }
3686
3687
UpdateFromPreviouslyMaterializedObjects()3688 void TranslatedState::UpdateFromPreviouslyMaterializedObjects() {
3689 MaterializedObjectStore* materialized_store =
3690 isolate_->materialized_object_store();
3691 Handle<FixedArray> previously_materialized_objects =
3692 materialized_store->Get(stack_frame_pointer_);
3693
3694 // If we have no previously materialized objects, there is nothing to do.
3695 if (previously_materialized_objects.is_null()) return;
3696
3697 Handle<Object> marker = isolate_->factory()->arguments_marker();
3698
3699 int length = static_cast<int>(object_positions_.size());
3700 DCHECK_EQ(length, previously_materialized_objects->length());
3701
3702 for (int i = 0; i < length; i++) {
3703 // For a previously materialized objects, inject their value into the
3704 // translated values.
3705 if (previously_materialized_objects->get(i) != *marker) {
3706 TranslatedState::ObjectPosition pos = object_positions_[i];
3707 TranslatedValue* value_info =
3708 &(frames_[pos.frame_index_].values_[pos.value_index_]);
3709 DCHECK(value_info->IsMaterializedObject());
3710
3711 value_info->value_ =
3712 Handle<Object>(previously_materialized_objects->get(i), isolate_);
3713 }
3714 }
3715 }
3716
3717 } // namespace internal
3718 } // namespace v8
3719