1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/frames.h"
6
7 #include <memory>
8 #include <sstream>
9
10 #include "src/base/bits.h"
11 #include "src/deoptimizer.h"
12 #include "src/frames-inl.h"
13 #include "src/full-codegen/full-codegen.h"
14 #include "src/register-configuration.h"
15 #include "src/safepoint-table.h"
16 #include "src/string-stream.h"
17 #include "src/vm-state-inl.h"
18 #include "src/wasm/wasm-module.h"
19 #include "src/wasm/wasm-objects.h"
20
21 namespace v8 {
22 namespace internal {
23
24 ReturnAddressLocationResolver
25 StackFrame::return_address_location_resolver_ = NULL;
26
27
28 // Iterator that supports traversing the stack handlers of a
29 // particular frame. Needs to know the top of the handler chain.
30 class StackHandlerIterator BASE_EMBEDDED {
31 public:
StackHandlerIterator(const StackFrame * frame,StackHandler * handler)32 StackHandlerIterator(const StackFrame* frame, StackHandler* handler)
33 : limit_(frame->fp()), handler_(handler) {
34 // Make sure the handler has already been unwound to this frame.
35 DCHECK(frame->sp() <= handler->address());
36 }
37
handler() const38 StackHandler* handler() const { return handler_; }
39
done()40 bool done() {
41 return handler_ == NULL || handler_->address() > limit_;
42 }
Advance()43 void Advance() {
44 DCHECK(!done());
45 handler_ = handler_->next();
46 }
47
48 private:
49 const Address limit_;
50 StackHandler* handler_;
51 };
52
53
54 // -------------------------------------------------------------------------
55
56
57 #define INITIALIZE_SINGLETON(type, field) field##_(this),
StackFrameIteratorBase(Isolate * isolate,bool can_access_heap_objects)58 StackFrameIteratorBase::StackFrameIteratorBase(Isolate* isolate,
59 bool can_access_heap_objects)
60 : isolate_(isolate),
61 STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON)
62 frame_(NULL), handler_(NULL),
63 can_access_heap_objects_(can_access_heap_objects) {
64 }
65 #undef INITIALIZE_SINGLETON
66
StackFrameIterator(Isolate * isolate)67 StackFrameIterator::StackFrameIterator(Isolate* isolate)
68 : StackFrameIterator(isolate, isolate->thread_local_top()) {}
69
StackFrameIterator(Isolate * isolate,ThreadLocalTop * t)70 StackFrameIterator::StackFrameIterator(Isolate* isolate, ThreadLocalTop* t)
71 : StackFrameIteratorBase(isolate, true) {
72 Reset(t);
73 }
74
Advance()75 void StackFrameIterator::Advance() {
76 DCHECK(!done());
77 // Compute the state of the calling frame before restoring
78 // callee-saved registers and unwinding handlers. This allows the
79 // frame code that computes the caller state to access the top
80 // handler and the value of any callee-saved register if needed.
81 StackFrame::State state;
82 StackFrame::Type type = frame_->GetCallerState(&state);
83
84 // Unwind handlers corresponding to the current frame.
85 StackHandlerIterator it(frame_, handler_);
86 while (!it.done()) it.Advance();
87 handler_ = it.handler();
88
89 // Advance to the calling frame.
90 frame_ = SingletonFor(type, &state);
91
92 // When we're done iterating over the stack frames, the handler
93 // chain must have been completely unwound.
94 DCHECK(!done() || handler_ == NULL);
95 }
96
97
Reset(ThreadLocalTop * top)98 void StackFrameIterator::Reset(ThreadLocalTop* top) {
99 StackFrame::State state;
100 StackFrame::Type type = ExitFrame::GetStateForFramePointer(
101 Isolate::c_entry_fp(top), &state);
102 handler_ = StackHandler::FromAddress(Isolate::handler(top));
103 frame_ = SingletonFor(type, &state);
104 }
105
106
SingletonFor(StackFrame::Type type,StackFrame::State * state)107 StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type,
108 StackFrame::State* state) {
109 StackFrame* result = SingletonFor(type);
110 DCHECK((!result) == (type == StackFrame::NONE));
111 if (result) result->state_ = *state;
112 return result;
113 }
114
115
SingletonFor(StackFrame::Type type)116 StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type) {
117 #define FRAME_TYPE_CASE(type, field) \
118 case StackFrame::type: \
119 return &field##_;
120
121 switch (type) {
122 case StackFrame::NONE: return NULL;
123 STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
124 default: break;
125 }
126 return NULL;
127
128 #undef FRAME_TYPE_CASE
129 }
130
131 // -------------------------------------------------------------------------
132
JavaScriptFrameIterator(Isolate * isolate,StackFrame::Id id)133 JavaScriptFrameIterator::JavaScriptFrameIterator(Isolate* isolate,
134 StackFrame::Id id)
135 : iterator_(isolate) {
136 while (!done()) {
137 Advance();
138 if (frame()->id() == id) return;
139 }
140 }
141
142
Advance()143 void JavaScriptFrameIterator::Advance() {
144 do {
145 iterator_.Advance();
146 } while (!iterator_.done() && !iterator_.frame()->is_java_script());
147 }
148
149
AdvanceToArgumentsFrame()150 void JavaScriptFrameIterator::AdvanceToArgumentsFrame() {
151 if (!frame()->has_adapted_arguments()) return;
152 iterator_.Advance();
153 DCHECK(iterator_.frame()->is_arguments_adaptor());
154 }
155
156
157 // -------------------------------------------------------------------------
158
StackTraceFrameIterator(Isolate * isolate)159 StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate)
160 : iterator_(isolate) {
161 if (!done() && !IsValidFrame(iterator_.frame())) Advance();
162 }
163
StackTraceFrameIterator(Isolate * isolate,StackFrame::Id id)164 StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate,
165 StackFrame::Id id)
166 : StackTraceFrameIterator(isolate) {
167 while (!done() && frame()->id() != id) Advance();
168 }
169
Advance()170 void StackTraceFrameIterator::Advance() {
171 do {
172 iterator_.Advance();
173 } while (!done() && !IsValidFrame(iterator_.frame()));
174 }
175
IsValidFrame(StackFrame * frame) const176 bool StackTraceFrameIterator::IsValidFrame(StackFrame* frame) const {
177 if (frame->is_java_script()) {
178 JavaScriptFrame* jsFrame = static_cast<JavaScriptFrame*>(frame);
179 if (!jsFrame->function()->IsJSFunction()) return false;
180 Object* script = jsFrame->function()->shared()->script();
181 // Don't show functions from native scripts to user.
182 return (script->IsScript() &&
183 Script::TYPE_NATIVE != Script::cast(script)->type());
184 }
185 // apart from javascript, only wasm is valid
186 return frame->is_wasm();
187 }
188
AdvanceToArgumentsFrame()189 void StackTraceFrameIterator::AdvanceToArgumentsFrame() {
190 if (!is_javascript() || !javascript_frame()->has_adapted_arguments()) return;
191 iterator_.Advance();
192 DCHECK(iterator_.frame()->is_arguments_adaptor());
193 }
194
195 // -------------------------------------------------------------------------
196
197
SafeStackFrameIterator(Isolate * isolate,Address fp,Address sp,Address js_entry_sp)198 SafeStackFrameIterator::SafeStackFrameIterator(
199 Isolate* isolate,
200 Address fp, Address sp, Address js_entry_sp)
201 : StackFrameIteratorBase(isolate, false),
202 low_bound_(sp),
203 high_bound_(js_entry_sp),
204 top_frame_type_(StackFrame::NONE),
205 external_callback_scope_(isolate->external_callback_scope()) {
206 StackFrame::State state;
207 StackFrame::Type type;
208 ThreadLocalTop* top = isolate->thread_local_top();
209 if (IsValidTop(top)) {
210 type = ExitFrame::GetStateForFramePointer(Isolate::c_entry_fp(top), &state);
211 top_frame_type_ = type;
212 } else if (IsValidStackAddress(fp)) {
213 DCHECK(fp != NULL);
214 state.fp = fp;
215 state.sp = sp;
216 state.pc_address = StackFrame::ResolveReturnAddressLocation(
217 reinterpret_cast<Address*>(StandardFrame::ComputePCAddress(fp)));
218 // StackFrame::ComputeType will read both kContextOffset and kMarkerOffset,
219 // we check only that kMarkerOffset is within the stack bounds and do
220 // compile time check that kContextOffset slot is pushed on the stack before
221 // kMarkerOffset.
222 STATIC_ASSERT(StandardFrameConstants::kFunctionOffset <
223 StandardFrameConstants::kContextOffset);
224 Address frame_marker = fp + StandardFrameConstants::kFunctionOffset;
225 if (IsValidStackAddress(frame_marker)) {
226 type = StackFrame::ComputeType(this, &state);
227 top_frame_type_ = type;
228 } else {
229 // Mark the frame as JAVA_SCRIPT if we cannot determine its type.
230 // The frame anyways will be skipped.
231 type = StackFrame::JAVA_SCRIPT;
232 // Top frame is incomplete so we cannot reliably determine its type.
233 top_frame_type_ = StackFrame::NONE;
234 }
235 } else {
236 return;
237 }
238 frame_ = SingletonFor(type, &state);
239 if (frame_) Advance();
240 }
241
242
IsValidTop(ThreadLocalTop * top) const243 bool SafeStackFrameIterator::IsValidTop(ThreadLocalTop* top) const {
244 Address c_entry_fp = Isolate::c_entry_fp(top);
245 if (!IsValidExitFrame(c_entry_fp)) return false;
246 // There should be at least one JS_ENTRY stack handler.
247 Address handler = Isolate::handler(top);
248 if (handler == NULL) return false;
249 // Check that there are no js frames on top of the native frames.
250 return c_entry_fp < handler;
251 }
252
253
AdvanceOneFrame()254 void SafeStackFrameIterator::AdvanceOneFrame() {
255 DCHECK(!done());
256 StackFrame* last_frame = frame_;
257 Address last_sp = last_frame->sp(), last_fp = last_frame->fp();
258 // Before advancing to the next stack frame, perform pointer validity tests.
259 if (!IsValidFrame(last_frame) || !IsValidCaller(last_frame)) {
260 frame_ = NULL;
261 return;
262 }
263
264 // Advance to the previous frame.
265 StackFrame::State state;
266 StackFrame::Type type = frame_->GetCallerState(&state);
267 frame_ = SingletonFor(type, &state);
268 if (!frame_) return;
269
270 // Check that we have actually moved to the previous frame in the stack.
271 if (frame_->sp() < last_sp || frame_->fp() < last_fp) {
272 frame_ = NULL;
273 }
274 }
275
276
IsValidFrame(StackFrame * frame) const277 bool SafeStackFrameIterator::IsValidFrame(StackFrame* frame) const {
278 return IsValidStackAddress(frame->sp()) && IsValidStackAddress(frame->fp());
279 }
280
281
IsValidCaller(StackFrame * frame)282 bool SafeStackFrameIterator::IsValidCaller(StackFrame* frame) {
283 StackFrame::State state;
284 if (frame->is_entry() || frame->is_entry_construct()) {
285 // See EntryFrame::GetCallerState. It computes the caller FP address
286 // and calls ExitFrame::GetStateForFramePointer on it. We need to be
287 // sure that caller FP address is valid.
288 Address caller_fp = Memory::Address_at(
289 frame->fp() + EntryFrameConstants::kCallerFPOffset);
290 if (!IsValidExitFrame(caller_fp)) return false;
291 } else if (frame->is_arguments_adaptor()) {
292 // See ArgumentsAdaptorFrame::GetCallerStackPointer. It assumes that
293 // the number of arguments is stored on stack as Smi. We need to check
294 // that it really an Smi.
295 Object* number_of_args = reinterpret_cast<ArgumentsAdaptorFrame*>(frame)->
296 GetExpression(0);
297 if (!number_of_args->IsSmi()) {
298 return false;
299 }
300 }
301 frame->ComputeCallerState(&state);
302 return IsValidStackAddress(state.sp) && IsValidStackAddress(state.fp) &&
303 SingletonFor(frame->GetCallerState(&state)) != NULL;
304 }
305
306
IsValidExitFrame(Address fp) const307 bool SafeStackFrameIterator::IsValidExitFrame(Address fp) const {
308 if (!IsValidStackAddress(fp)) return false;
309 Address sp = ExitFrame::ComputeStackPointer(fp);
310 if (!IsValidStackAddress(sp)) return false;
311 StackFrame::State state;
312 ExitFrame::FillState(fp, sp, &state);
313 MSAN_MEMORY_IS_INITIALIZED(state.pc_address, sizeof(state.pc_address));
314 return *state.pc_address != nullptr;
315 }
316
317
Advance()318 void SafeStackFrameIterator::Advance() {
319 while (true) {
320 AdvanceOneFrame();
321 if (done()) break;
322 ExternalCallbackScope* last_callback_scope = NULL;
323 while (external_callback_scope_ != NULL &&
324 external_callback_scope_->scope_address() < frame_->fp()) {
325 // As long as the setup of a frame is not atomic, we may happen to be
326 // in an interval where an ExternalCallbackScope is already created,
327 // but the frame is not yet entered. So we are actually observing
328 // the previous frame.
329 // Skip all the ExternalCallbackScope's that are below the current fp.
330 last_callback_scope = external_callback_scope_;
331 external_callback_scope_ = external_callback_scope_->previous();
332 }
333 if (frame_->is_java_script()) break;
334 if (frame_->is_exit() || frame_->is_builtin_exit()) {
335 // Some of the EXIT frames may have ExternalCallbackScope allocated on
336 // top of them. In that case the scope corresponds to the first EXIT
337 // frame beneath it. There may be other EXIT frames on top of the
338 // ExternalCallbackScope, just skip them as we cannot collect any useful
339 // information about them.
340 if (last_callback_scope) {
341 frame_->state_.pc_address =
342 last_callback_scope->callback_entrypoint_address();
343 }
344 break;
345 }
346 }
347 }
348
349
350 // -------------------------------------------------------------------------
351
352
GetSafepointData(Isolate * isolate,Address inner_pointer,SafepointEntry * safepoint_entry,unsigned * stack_slots)353 Code* StackFrame::GetSafepointData(Isolate* isolate,
354 Address inner_pointer,
355 SafepointEntry* safepoint_entry,
356 unsigned* stack_slots) {
357 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry =
358 isolate->inner_pointer_to_code_cache()->GetCacheEntry(inner_pointer);
359 if (!entry->safepoint_entry.is_valid()) {
360 entry->safepoint_entry = entry->code->GetSafepointEntry(inner_pointer);
361 DCHECK(entry->safepoint_entry.is_valid());
362 } else {
363 DCHECK(entry->safepoint_entry.Equals(
364 entry->code->GetSafepointEntry(inner_pointer)));
365 }
366
367 // Fill in the results and return the code.
368 Code* code = entry->code;
369 *safepoint_entry = entry->safepoint_entry;
370 *stack_slots = code->stack_slots();
371 return code;
372 }
373
374
375 #ifdef DEBUG
376 static bool GcSafeCodeContains(HeapObject* object, Address addr);
377 #endif
378
379
IteratePc(ObjectVisitor * v,Address * pc_address,Address * constant_pool_address,Code * holder)380 void StackFrame::IteratePc(ObjectVisitor* v, Address* pc_address,
381 Address* constant_pool_address, Code* holder) {
382 Address pc = *pc_address;
383 DCHECK(GcSafeCodeContains(holder, pc));
384 unsigned pc_offset = static_cast<unsigned>(pc - holder->instruction_start());
385 Object* code = holder;
386 v->VisitPointer(&code);
387 if (code != holder) {
388 holder = reinterpret_cast<Code*>(code);
389 pc = holder->instruction_start() + pc_offset;
390 *pc_address = pc;
391 if (FLAG_enable_embedded_constant_pool && constant_pool_address) {
392 *constant_pool_address = holder->constant_pool();
393 }
394 }
395 }
396
397
SetReturnAddressLocationResolver(ReturnAddressLocationResolver resolver)398 void StackFrame::SetReturnAddressLocationResolver(
399 ReturnAddressLocationResolver resolver) {
400 DCHECK(return_address_location_resolver_ == NULL);
401 return_address_location_resolver_ = resolver;
402 }
403
IsInterpreterFramePc(Isolate * isolate,Address pc)404 static bool IsInterpreterFramePc(Isolate* isolate, Address pc) {
405 Code* interpreter_entry_trampoline =
406 isolate->builtins()->builtin(Builtins::kInterpreterEntryTrampoline);
407 Code* interpreter_bytecode_advance =
408 isolate->builtins()->builtin(Builtins::kInterpreterEnterBytecodeAdvance);
409 Code* interpreter_bytecode_dispatch =
410 isolate->builtins()->builtin(Builtins::kInterpreterEnterBytecodeDispatch);
411
412 return (pc >= interpreter_entry_trampoline->instruction_start() &&
413 pc < interpreter_entry_trampoline->instruction_end()) ||
414 (pc >= interpreter_bytecode_advance->instruction_start() &&
415 pc < interpreter_bytecode_advance->instruction_end()) ||
416 (pc >= interpreter_bytecode_dispatch->instruction_start() &&
417 pc < interpreter_bytecode_dispatch->instruction_end());
418 }
419
ComputeType(const StackFrameIteratorBase * iterator,State * state)420 StackFrame::Type StackFrame::ComputeType(const StackFrameIteratorBase* iterator,
421 State* state) {
422 DCHECK(state->fp != NULL);
423
424 MSAN_MEMORY_IS_INITIALIZED(
425 state->fp + CommonFrameConstants::kContextOrFrameTypeOffset,
426 kPointerSize);
427 Object* marker = Memory::Object_at(
428 state->fp + CommonFrameConstants::kContextOrFrameTypeOffset);
429 if (!iterator->can_access_heap_objects_) {
430 // TODO(titzer): "can_access_heap_objects" is kind of bogus. It really
431 // means that we are being called from the profiler, which can interrupt
432 // the VM with a signal at any arbitrary instruction, with essentially
433 // anything on the stack. So basically none of these checks are 100%
434 // reliable.
435 MSAN_MEMORY_IS_INITIALIZED(
436 state->fp + StandardFrameConstants::kFunctionOffset, kPointerSize);
437 Object* maybe_function =
438 Memory::Object_at(state->fp + StandardFrameConstants::kFunctionOffset);
439 if (!marker->IsSmi()) {
440 if (maybe_function->IsSmi()) {
441 return NONE;
442 } else if (IsInterpreterFramePc(iterator->isolate(),
443 *(state->pc_address))) {
444 return INTERPRETED;
445 } else {
446 return JAVA_SCRIPT;
447 }
448 }
449 } else {
450 // Look up the code object to figure out the type of the stack frame.
451 Code* code_obj =
452 GetContainingCode(iterator->isolate(), *(state->pc_address));
453 if (code_obj != nullptr) {
454 switch (code_obj->kind()) {
455 case Code::BUILTIN:
456 if (marker->IsSmi()) break;
457 if (code_obj->is_interpreter_trampoline_builtin()) {
458 return INTERPRETED;
459 }
460 if (code_obj->is_turbofanned()) {
461 // TODO(bmeurer): We treat frames for BUILTIN Code objects as
462 // OptimizedFrame for now (all the builtins with JavaScript
463 // linkage are actually generated with TurboFan currently, so
464 // this is sound).
465 return OPTIMIZED;
466 }
467 return BUILTIN;
468 case Code::FUNCTION:
469 return JAVA_SCRIPT;
470 case Code::OPTIMIZED_FUNCTION:
471 return OPTIMIZED;
472 case Code::WASM_FUNCTION:
473 return WASM;
474 case Code::WASM_TO_JS_FUNCTION:
475 return WASM_TO_JS;
476 case Code::JS_TO_WASM_FUNCTION:
477 return JS_TO_WASM;
478 default:
479 // All other types should have an explicit marker
480 break;
481 }
482 } else {
483 return NONE;
484 }
485 }
486
487 DCHECK(marker->IsSmi());
488 StackFrame::Type candidate =
489 static_cast<StackFrame::Type>(Smi::cast(marker)->value());
490 switch (candidate) {
491 case ENTRY:
492 case ENTRY_CONSTRUCT:
493 case EXIT:
494 case BUILTIN_EXIT:
495 case STUB:
496 case STUB_FAILURE_TRAMPOLINE:
497 case INTERNAL:
498 case CONSTRUCT:
499 case ARGUMENTS_ADAPTOR:
500 case WASM_TO_JS:
501 case WASM:
502 return candidate;
503 case JS_TO_WASM:
504 case JAVA_SCRIPT:
505 case OPTIMIZED:
506 case INTERPRETED:
507 default:
508 // Unoptimized and optimized JavaScript frames, including
509 // interpreted frames, should never have a StackFrame::Type
510 // marker. If we find one, we're likely being called from the
511 // profiler in a bogus stack frame.
512 return NONE;
513 }
514 }
515
516
517 #ifdef DEBUG
can_access_heap_objects() const518 bool StackFrame::can_access_heap_objects() const {
519 return iterator_->can_access_heap_objects_;
520 }
521 #endif
522
523
GetCallerState(State * state) const524 StackFrame::Type StackFrame::GetCallerState(State* state) const {
525 ComputeCallerState(state);
526 return ComputeType(iterator_, state);
527 }
528
529
UnpaddedFP() const530 Address StackFrame::UnpaddedFP() const {
531 return fp();
532 }
533
534
unchecked_code() const535 Code* EntryFrame::unchecked_code() const {
536 return isolate()->heap()->js_entry_code();
537 }
538
539
ComputeCallerState(State * state) const540 void EntryFrame::ComputeCallerState(State* state) const {
541 GetCallerState(state);
542 }
543
544
SetCallerFp(Address caller_fp)545 void EntryFrame::SetCallerFp(Address caller_fp) {
546 const int offset = EntryFrameConstants::kCallerFPOffset;
547 Memory::Address_at(this->fp() + offset) = caller_fp;
548 }
549
550
GetCallerState(State * state) const551 StackFrame::Type EntryFrame::GetCallerState(State* state) const {
552 const int offset = EntryFrameConstants::kCallerFPOffset;
553 Address fp = Memory::Address_at(this->fp() + offset);
554 return ExitFrame::GetStateForFramePointer(fp, state);
555 }
556
557
unchecked_code() const558 Code* EntryConstructFrame::unchecked_code() const {
559 return isolate()->heap()->js_construct_entry_code();
560 }
561
562
code_slot() const563 Object*& ExitFrame::code_slot() const {
564 const int offset = ExitFrameConstants::kCodeOffset;
565 return Memory::Object_at(fp() + offset);
566 }
567
unchecked_code() const568 Code* ExitFrame::unchecked_code() const {
569 return reinterpret_cast<Code*>(code_slot());
570 }
571
572
ComputeCallerState(State * state) const573 void ExitFrame::ComputeCallerState(State* state) const {
574 // Set up the caller state.
575 state->sp = caller_sp();
576 state->fp = Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset);
577 state->pc_address = ResolveReturnAddressLocation(
578 reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset));
579 if (FLAG_enable_embedded_constant_pool) {
580 state->constant_pool_address = reinterpret_cast<Address*>(
581 fp() + ExitFrameConstants::kConstantPoolOffset);
582 }
583 }
584
585
SetCallerFp(Address caller_fp)586 void ExitFrame::SetCallerFp(Address caller_fp) {
587 Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset) = caller_fp;
588 }
589
590
Iterate(ObjectVisitor * v) const591 void ExitFrame::Iterate(ObjectVisitor* v) const {
592 // The arguments are traversed as part of the expression stack of
593 // the calling frame.
594 IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
595 v->VisitPointer(&code_slot());
596 }
597
598
GetCallerStackPointer() const599 Address ExitFrame::GetCallerStackPointer() const {
600 return fp() + ExitFrameConstants::kCallerSPOffset;
601 }
602
603
GetStateForFramePointer(Address fp,State * state)604 StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) {
605 if (fp == 0) return NONE;
606 Address sp = ComputeStackPointer(fp);
607 FillState(fp, sp, state);
608 DCHECK(*state->pc_address != NULL);
609
610 return ComputeFrameType(fp);
611 }
612
ComputeFrameType(Address fp)613 StackFrame::Type ExitFrame::ComputeFrameType(Address fp) {
614 // Distinguish between between regular and builtin exit frames.
615 // Default to EXIT in all hairy cases (e.g., when called from profiler).
616 const int offset = ExitFrameConstants::kFrameTypeOffset;
617 Object* marker = Memory::Object_at(fp + offset);
618
619 if (!marker->IsSmi()) {
620 return EXIT;
621 }
622
623 StackFrame::Type frame_type =
624 static_cast<StackFrame::Type>(Smi::cast(marker)->value());
625 if (frame_type == EXIT || frame_type == BUILTIN_EXIT) {
626 return frame_type;
627 }
628
629 return EXIT;
630 }
631
ComputeStackPointer(Address fp)632 Address ExitFrame::ComputeStackPointer(Address fp) {
633 MSAN_MEMORY_IS_INITIALIZED(fp + ExitFrameConstants::kSPOffset, kPointerSize);
634 return Memory::Address_at(fp + ExitFrameConstants::kSPOffset);
635 }
636
FillState(Address fp,Address sp,State * state)637 void ExitFrame::FillState(Address fp, Address sp, State* state) {
638 state->sp = sp;
639 state->fp = fp;
640 state->pc_address = ResolveReturnAddressLocation(
641 reinterpret_cast<Address*>(sp - 1 * kPCOnStackSize));
642 // The constant pool recorded in the exit frame is not associated
643 // with the pc in this state (the return address into a C entry
644 // stub). ComputeCallerState will retrieve the constant pool
645 // together with the associated caller pc.
646 state->constant_pool_address = NULL;
647 }
648
function() const649 JSFunction* BuiltinExitFrame::function() const {
650 return JSFunction::cast(target_slot_object());
651 }
652
receiver() const653 Object* BuiltinExitFrame::receiver() const { return receiver_slot_object(); }
654
IsConstructor() const655 bool BuiltinExitFrame::IsConstructor() const {
656 return !new_target_slot_object()->IsUndefined(isolate());
657 }
658
GetParameter(int i) const659 Object* BuiltinExitFrame::GetParameter(int i) const {
660 DCHECK(i >= 0 && i < ComputeParametersCount());
661 int offset = BuiltinExitFrameConstants::kArgcOffset + (i + 1) * kPointerSize;
662 return Memory::Object_at(fp() + offset);
663 }
664
ComputeParametersCount() const665 int BuiltinExitFrame::ComputeParametersCount() const {
666 Object* argc_slot = argc_slot_object();
667 DCHECK(argc_slot->IsSmi());
668 // Argc also counts the receiver, target, new target, and argc itself as args,
669 // therefore the real argument count is argc - 4.
670 int argc = Smi::cast(argc_slot)->value() - 4;
671 DCHECK(argc >= 0);
672 return argc;
673 }
674
Print(StringStream * accumulator,PrintMode mode,int index) const675 void BuiltinExitFrame::Print(StringStream* accumulator, PrintMode mode,
676 int index) const {
677 DisallowHeapAllocation no_gc;
678 Object* receiver = this->receiver();
679 JSFunction* function = this->function();
680
681 accumulator->PrintSecurityTokenIfChanged(function);
682 PrintIndex(accumulator, mode, index);
683 accumulator->Add("builtin exit frame: ");
684 Code* code = NULL;
685 if (IsConstructor()) accumulator->Add("new ");
686 accumulator->PrintFunction(function, receiver, &code);
687
688 accumulator->Add("(this=%o", receiver);
689
690 // Print the parameters.
691 int parameters_count = ComputeParametersCount();
692 for (int i = 0; i < parameters_count; i++) {
693 accumulator->Add(",%o", GetParameter(i));
694 }
695
696 accumulator->Add(")\n\n");
697 }
698
GetExpressionAddress(int n) const699 Address StandardFrame::GetExpressionAddress(int n) const {
700 const int offset = StandardFrameConstants::kExpressionsOffset;
701 return fp() + offset - n * kPointerSize;
702 }
703
GetExpressionAddress(int n) const704 Address InterpretedFrame::GetExpressionAddress(int n) const {
705 const int offset = InterpreterFrameConstants::kExpressionsOffset;
706 return fp() + offset - n * kPointerSize;
707 }
708
script() const709 Script* StandardFrame::script() const {
710 // This should only be called on frames which override this method.
711 DCHECK(false);
712 return nullptr;
713 }
714
receiver() const715 Object* StandardFrame::receiver() const {
716 return isolate()->heap()->undefined_value();
717 }
718
context() const719 Object* StandardFrame::context() const {
720 return isolate()->heap()->undefined_value();
721 }
722
position() const723 int StandardFrame::position() const {
724 AbstractCode* code = AbstractCode::cast(LookupCode());
725 int code_offset = static_cast<int>(pc() - code->instruction_start());
726 return code->SourcePosition(code_offset);
727 }
728
ComputeExpressionsCount() const729 int StandardFrame::ComputeExpressionsCount() const {
730 Address base = GetExpressionAddress(0);
731 Address limit = sp() - kPointerSize;
732 DCHECK(base >= limit); // stack grows downwards
733 // Include register-allocated locals in number of expressions.
734 return static_cast<int>((base - limit) / kPointerSize);
735 }
736
GetParameter(int index) const737 Object* StandardFrame::GetParameter(int index) const {
738 // StandardFrame does not define any parameters.
739 UNREACHABLE();
740 return nullptr;
741 }
742
ComputeParametersCount() const743 int StandardFrame::ComputeParametersCount() const { return 0; }
744
ComputeCallerState(State * state) const745 void StandardFrame::ComputeCallerState(State* state) const {
746 state->sp = caller_sp();
747 state->fp = caller_fp();
748 state->pc_address = ResolveReturnAddressLocation(
749 reinterpret_cast<Address*>(ComputePCAddress(fp())));
750 state->constant_pool_address =
751 reinterpret_cast<Address*>(ComputeConstantPoolAddress(fp()));
752 }
753
754
SetCallerFp(Address caller_fp)755 void StandardFrame::SetCallerFp(Address caller_fp) {
756 Memory::Address_at(fp() + StandardFrameConstants::kCallerFPOffset) =
757 caller_fp;
758 }
759
IsConstructor() const760 bool StandardFrame::IsConstructor() const { return false; }
761
IterateCompiledFrame(ObjectVisitor * v) const762 void StandardFrame::IterateCompiledFrame(ObjectVisitor* v) const {
763 // Make sure that we're not doing "safe" stack frame iteration. We cannot
764 // possibly find pointers in optimized frames in that state.
765 DCHECK(can_access_heap_objects());
766
767 // Compute the safepoint information.
768 unsigned stack_slots = 0;
769 SafepointEntry safepoint_entry;
770 Code* code = StackFrame::GetSafepointData(
771 isolate(), pc(), &safepoint_entry, &stack_slots);
772 unsigned slot_space = stack_slots * kPointerSize;
773
774 // Determine the fixed header and spill slot area size.
775 int frame_header_size = StandardFrameConstants::kFixedFrameSizeFromFp;
776 Object* marker =
777 Memory::Object_at(fp() + CommonFrameConstants::kContextOrFrameTypeOffset);
778 if (marker->IsSmi()) {
779 StackFrame::Type candidate =
780 static_cast<StackFrame::Type>(Smi::cast(marker)->value());
781 switch (candidate) {
782 case ENTRY:
783 case ENTRY_CONSTRUCT:
784 case EXIT:
785 case BUILTIN_EXIT:
786 case STUB_FAILURE_TRAMPOLINE:
787 case ARGUMENTS_ADAPTOR:
788 case STUB:
789 case INTERNAL:
790 case CONSTRUCT:
791 case JS_TO_WASM:
792 case WASM_TO_JS:
793 case WASM:
794 frame_header_size = TypedFrameConstants::kFixedFrameSizeFromFp;
795 break;
796 case JAVA_SCRIPT:
797 case OPTIMIZED:
798 case INTERPRETED:
799 case BUILTIN:
800 // These frame types have a context, but they are actually stored
801 // in the place on the stack that one finds the frame type.
802 UNREACHABLE();
803 break;
804 case NONE:
805 case NUMBER_OF_TYPES:
806 case MANUAL:
807 UNREACHABLE();
808 break;
809 }
810 }
811 slot_space -=
812 (frame_header_size + StandardFrameConstants::kFixedFrameSizeAboveFp);
813
814 Object** frame_header_base = &Memory::Object_at(fp() - frame_header_size);
815 Object** frame_header_limit =
816 &Memory::Object_at(fp() - StandardFrameConstants::kCPSlotSize);
817 Object** parameters_base = &Memory::Object_at(sp());
818 Object** parameters_limit = frame_header_base - slot_space / kPointerSize;
819
820 // Visit the parameters that may be on top of the saved registers.
821 if (safepoint_entry.argument_count() > 0) {
822 v->VisitPointers(parameters_base,
823 parameters_base + safepoint_entry.argument_count());
824 parameters_base += safepoint_entry.argument_count();
825 }
826
827 // Skip saved double registers.
828 if (safepoint_entry.has_doubles()) {
829 // Number of doubles not known at snapshot time.
830 DCHECK(!isolate()->serializer_enabled());
831 parameters_base += RegisterConfiguration::Crankshaft()
832 ->num_allocatable_double_registers() *
833 kDoubleSize / kPointerSize;
834 }
835
836 // Visit the registers that contain pointers if any.
837 if (safepoint_entry.HasRegisters()) {
838 for (int i = kNumSafepointRegisters - 1; i >=0; i--) {
839 if (safepoint_entry.HasRegisterAt(i)) {
840 int reg_stack_index = MacroAssembler::SafepointRegisterStackIndex(i);
841 v->VisitPointer(parameters_base + reg_stack_index);
842 }
843 }
844 // Skip the words containing the register values.
845 parameters_base += kNumSafepointRegisters;
846 }
847
848 // We're done dealing with the register bits.
849 uint8_t* safepoint_bits = safepoint_entry.bits();
850 safepoint_bits += kNumSafepointRegisters >> kBitsPerByteLog2;
851
852 // Visit the rest of the parameters.
853 if (!is_js_to_wasm() && !is_wasm()) {
854 // Non-WASM frames have tagged values as parameters.
855 v->VisitPointers(parameters_base, parameters_limit);
856 }
857
858 // Visit pointer spill slots and locals.
859 for (unsigned index = 0; index < stack_slots; index++) {
860 int byte_index = index >> kBitsPerByteLog2;
861 int bit_index = index & (kBitsPerByte - 1);
862 if ((safepoint_bits[byte_index] & (1U << bit_index)) != 0) {
863 v->VisitPointer(parameters_limit + index);
864 }
865 }
866
867 // Visit the return address in the callee and incoming arguments.
868 IteratePc(v, pc_address(), constant_pool_address(), code);
869
870 if (!is_wasm() && !is_wasm_to_js()) {
871 // Visit the context in stub frame and JavaScript frame.
872 // Visit the function in JavaScript frame.
873 v->VisitPointers(frame_header_base, frame_header_limit);
874 }
875 }
876
877
Iterate(ObjectVisitor * v) const878 void StubFrame::Iterate(ObjectVisitor* v) const {
879 IterateCompiledFrame(v);
880 }
881
882
unchecked_code() const883 Code* StubFrame::unchecked_code() const {
884 return static_cast<Code*>(isolate()->FindCodeObject(pc()));
885 }
886
887
GetCallerStackPointer() const888 Address StubFrame::GetCallerStackPointer() const {
889 return fp() + ExitFrameConstants::kCallerSPOffset;
890 }
891
892
GetNumberOfIncomingArguments() const893 int StubFrame::GetNumberOfIncomingArguments() const {
894 return 0;
895 }
896
897
Iterate(ObjectVisitor * v) const898 void OptimizedFrame::Iterate(ObjectVisitor* v) const {
899 IterateCompiledFrame(v);
900 }
901
902
SetParameterValue(int index,Object * value) const903 void JavaScriptFrame::SetParameterValue(int index, Object* value) const {
904 Memory::Object_at(GetParameterSlot(index)) = value;
905 }
906
907
IsConstructor() const908 bool JavaScriptFrame::IsConstructor() const {
909 Address fp = caller_fp();
910 if (has_adapted_arguments()) {
911 // Skip the arguments adaptor frame and look at the real caller.
912 fp = Memory::Address_at(fp + StandardFrameConstants::kCallerFPOffset);
913 }
914 return IsConstructFrame(fp);
915 }
916
917
HasInlinedFrames() const918 bool JavaScriptFrame::HasInlinedFrames() const {
919 List<JSFunction*> functions(1);
920 GetFunctions(&functions);
921 return functions.length() > 1;
922 }
923
924
GetArgumentsLength() const925 int JavaScriptFrame::GetArgumentsLength() const {
926 // If there is an arguments adaptor frame get the arguments length from it.
927 if (has_adapted_arguments()) {
928 return ArgumentsAdaptorFrame::GetLength(caller_fp());
929 } else {
930 return GetNumberOfIncomingArguments();
931 }
932 }
933
934
unchecked_code() const935 Code* JavaScriptFrame::unchecked_code() const {
936 return function()->code();
937 }
938
939
GetNumberOfIncomingArguments() const940 int JavaScriptFrame::GetNumberOfIncomingArguments() const {
941 DCHECK(can_access_heap_objects() &&
942 isolate()->heap()->gc_state() == Heap::NOT_IN_GC);
943
944 return function()->shared()->internal_formal_parameter_count();
945 }
946
947
GetCallerStackPointer() const948 Address JavaScriptFrame::GetCallerStackPointer() const {
949 return fp() + StandardFrameConstants::kCallerSPOffset;
950 }
951
952
GetFunctions(List<JSFunction * > * functions) const953 void JavaScriptFrame::GetFunctions(List<JSFunction*>* functions) const {
954 DCHECK(functions->length() == 0);
955 functions->Add(function());
956 }
957
Summarize(List<FrameSummary> * functions,FrameSummary::Mode mode) const958 void JavaScriptFrame::Summarize(List<FrameSummary>* functions,
959 FrameSummary::Mode mode) const {
960 DCHECK(functions->length() == 0);
961 Code* code = LookupCode();
962 int offset = static_cast<int>(pc() - code->instruction_start());
963 AbstractCode* abstract_code = AbstractCode::cast(code);
964 FrameSummary summary(receiver(), function(), abstract_code, offset,
965 IsConstructor(), mode);
966 functions->Add(summary);
967 }
968
function() const969 JSFunction* JavaScriptFrame::function() const {
970 return JSFunction::cast(function_slot_object());
971 }
972
receiver() const973 Object* JavaScriptFrame::receiver() const { return GetParameter(-1); }
974
script() const975 Script* JavaScriptFrame::script() const {
976 return Script::cast(function()->shared()->script());
977 }
978
context() const979 Object* JavaScriptFrame::context() const {
980 const int offset = StandardFrameConstants::kContextOffset;
981 Object* maybe_result = Memory::Object_at(fp() + offset);
982 DCHECK(!maybe_result->IsSmi());
983 return maybe_result;
984 }
985
LookupExceptionHandlerInTable(int * stack_depth,HandlerTable::CatchPrediction * prediction)986 int JavaScriptFrame::LookupExceptionHandlerInTable(
987 int* stack_depth, HandlerTable::CatchPrediction* prediction) {
988 Code* code = LookupCode();
989 DCHECK(!code->is_optimized_code());
990 int pc_offset = static_cast<int>(pc() - code->entry());
991 return code->LookupRangeInHandlerTable(pc_offset, stack_depth, prediction);
992 }
993
PrintFunctionAndOffset(JSFunction * function,AbstractCode * code,int code_offset,FILE * file,bool print_line_number)994 void JavaScriptFrame::PrintFunctionAndOffset(JSFunction* function,
995 AbstractCode* code,
996 int code_offset, FILE* file,
997 bool print_line_number) {
998 PrintF(file, "%s", function->IsOptimized() ? "*" : "~");
999 function->PrintName(file);
1000 PrintF(file, "+%d", code_offset);
1001 if (print_line_number) {
1002 SharedFunctionInfo* shared = function->shared();
1003 int source_pos = code->SourcePosition(code_offset);
1004 Object* maybe_script = shared->script();
1005 if (maybe_script->IsScript()) {
1006 Script* script = Script::cast(maybe_script);
1007 int line = script->GetLineNumber(source_pos) + 1;
1008 Object* script_name_raw = script->name();
1009 if (script_name_raw->IsString()) {
1010 String* script_name = String::cast(script->name());
1011 std::unique_ptr<char[]> c_script_name =
1012 script_name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
1013 PrintF(file, " at %s:%d", c_script_name.get(), line);
1014 } else {
1015 PrintF(file, " at <unknown>:%d", line);
1016 }
1017 } else {
1018 PrintF(file, " at <unknown>:<unknown>");
1019 }
1020 }
1021 }
1022
1023
PrintTop(Isolate * isolate,FILE * file,bool print_args,bool print_line_number)1024 void JavaScriptFrame::PrintTop(Isolate* isolate, FILE* file, bool print_args,
1025 bool print_line_number) {
1026 // constructor calls
1027 DisallowHeapAllocation no_allocation;
1028 JavaScriptFrameIterator it(isolate);
1029 while (!it.done()) {
1030 if (it.frame()->is_java_script()) {
1031 JavaScriptFrame* frame = it.frame();
1032 if (frame->IsConstructor()) PrintF(file, "new ");
1033 JSFunction* function = frame->function();
1034 int code_offset = 0;
1035 if (frame->is_interpreted()) {
1036 InterpretedFrame* iframe = reinterpret_cast<InterpretedFrame*>(frame);
1037 code_offset = iframe->GetBytecodeOffset();
1038 } else {
1039 Code* code = frame->unchecked_code();
1040 code_offset = static_cast<int>(frame->pc() - code->instruction_start());
1041 }
1042 PrintFunctionAndOffset(function, function->abstract_code(), code_offset,
1043 file, print_line_number);
1044 if (print_args) {
1045 // function arguments
1046 // (we are intentionally only printing the actually
1047 // supplied parameters, not all parameters required)
1048 PrintF(file, "(this=");
1049 frame->receiver()->ShortPrint(file);
1050 const int length = frame->ComputeParametersCount();
1051 for (int i = 0; i < length; i++) {
1052 PrintF(file, ", ");
1053 frame->GetParameter(i)->ShortPrint(file);
1054 }
1055 PrintF(file, ")");
1056 }
1057 break;
1058 }
1059 it.Advance();
1060 }
1061 }
1062
1063
SaveOperandStack(FixedArray * store) const1064 void JavaScriptFrame::SaveOperandStack(FixedArray* store) const {
1065 int operands_count = store->length();
1066 DCHECK_LE(operands_count, ComputeOperandsCount());
1067 for (int i = 0; i < operands_count; i++) {
1068 store->set(i, GetOperand(i));
1069 }
1070 }
1071
GetParameter(int index) const1072 Object* JavaScriptFrame::GetParameter(int index) const {
1073 return Memory::Object_at(GetParameterSlot(index));
1074 }
1075
ComputeParametersCount() const1076 int JavaScriptFrame::ComputeParametersCount() const {
1077 return GetNumberOfIncomingArguments();
1078 }
1079
1080 namespace {
1081
CannotDeoptFromAsmCode(Code * code,JSFunction * function)1082 bool CannotDeoptFromAsmCode(Code* code, JSFunction* function) {
1083 return code->is_turbofanned() && function->shared()->asm_function() &&
1084 !FLAG_turbo_asm_deoptimization;
1085 }
1086
1087 } // namespace
1088
FrameSummary(Object * receiver,JSFunction * function,AbstractCode * abstract_code,int code_offset,bool is_constructor,Mode mode)1089 FrameSummary::FrameSummary(Object* receiver, JSFunction* function,
1090 AbstractCode* abstract_code, int code_offset,
1091 bool is_constructor, Mode mode)
1092 : receiver_(receiver, function->GetIsolate()),
1093 function_(function),
1094 abstract_code_(abstract_code),
1095 code_offset_(code_offset),
1096 is_constructor_(is_constructor) {
1097 DCHECK(abstract_code->IsBytecodeArray() ||
1098 Code::cast(abstract_code)->kind() != Code::OPTIMIZED_FUNCTION ||
1099 CannotDeoptFromAsmCode(Code::cast(abstract_code), function) ||
1100 mode == kApproximateSummary);
1101 }
1102
GetFirst(JavaScriptFrame * frame)1103 FrameSummary FrameSummary::GetFirst(JavaScriptFrame* frame) {
1104 List<FrameSummary> frames(FLAG_max_inlining_levels + 1);
1105 frame->Summarize(&frames);
1106 return frames.first();
1107 }
1108
Print()1109 void FrameSummary::Print() {
1110 PrintF("receiver: ");
1111 receiver_->ShortPrint();
1112 PrintF("\nfunction: ");
1113 function_->shared()->DebugName()->ShortPrint();
1114 PrintF("\ncode: ");
1115 abstract_code_->ShortPrint();
1116 if (abstract_code_->IsCode()) {
1117 Code* code = abstract_code_->GetCode();
1118 if (code->kind() == Code::FUNCTION) PrintF(" UNOPT ");
1119 if (code->kind() == Code::OPTIMIZED_FUNCTION) {
1120 if (function()->shared()->asm_function()) {
1121 DCHECK(CannotDeoptFromAsmCode(code, *function()));
1122 PrintF(" ASM ");
1123 } else {
1124 PrintF(" OPT (approximate)");
1125 }
1126 }
1127 } else {
1128 PrintF(" BYTECODE ");
1129 }
1130 PrintF("\npc: %d\n", code_offset_);
1131 }
1132
Summarize(List<FrameSummary> * frames,FrameSummary::Mode mode) const1133 void OptimizedFrame::Summarize(List<FrameSummary>* frames,
1134 FrameSummary::Mode mode) const {
1135 DCHECK(frames->length() == 0);
1136 DCHECK(is_optimized());
1137
1138 // Delegate to JS frame in absence of turbofan deoptimization.
1139 // TODO(turbofan): Revisit once we support deoptimization across the board.
1140 Code* code = LookupCode();
1141 if (code->kind() == Code::BUILTIN ||
1142 CannotDeoptFromAsmCode(code, function())) {
1143 return JavaScriptFrame::Summarize(frames);
1144 }
1145
1146 DisallowHeapAllocation no_gc;
1147 int deopt_index = Safepoint::kNoDeoptimizationIndex;
1148 DeoptimizationInputData* const data = GetDeoptimizationData(&deopt_index);
1149 if (deopt_index == Safepoint::kNoDeoptimizationIndex) {
1150 DCHECK(data == nullptr);
1151 if (mode == FrameSummary::kApproximateSummary) {
1152 return JavaScriptFrame::Summarize(frames, mode);
1153 }
1154 FATAL("Missing deoptimization information for OptimizedFrame::Summarize.");
1155 }
1156 FixedArray* const literal_array = data->LiteralArray();
1157
1158 TranslationIterator it(data->TranslationByteArray(),
1159 data->TranslationIndex(deopt_index)->value());
1160 Translation::Opcode frame_opcode =
1161 static_cast<Translation::Opcode>(it.Next());
1162 DCHECK_EQ(Translation::BEGIN, frame_opcode);
1163 it.Next(); // Drop frame count.
1164 int jsframe_count = it.Next();
1165
1166 // We create the summary in reverse order because the frames
1167 // in the deoptimization translation are ordered bottom-to-top.
1168 bool is_constructor = IsConstructor();
1169 while (jsframe_count != 0) {
1170 frame_opcode = static_cast<Translation::Opcode>(it.Next());
1171 if (frame_opcode == Translation::JS_FRAME ||
1172 frame_opcode == Translation::INTERPRETED_FRAME) {
1173 jsframe_count--;
1174 BailoutId const bailout_id = BailoutId(it.Next());
1175 SharedFunctionInfo* const shared_info =
1176 SharedFunctionInfo::cast(literal_array->get(it.Next()));
1177 it.Next(); // Skip height.
1178
1179 // The translation commands are ordered and the function is always
1180 // at the first position, and the receiver is next.
1181 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
1182
1183 // Get the correct function in the optimized frame.
1184 JSFunction* function;
1185 if (opcode == Translation::LITERAL) {
1186 function = JSFunction::cast(literal_array->get(it.Next()));
1187 } else {
1188 CHECK_EQ(opcode, Translation::STACK_SLOT);
1189 function = JSFunction::cast(StackSlotAt(it.Next()));
1190 }
1191 DCHECK_EQ(shared_info, function->shared());
1192
1193 // If we are at a call, the receiver is always in a stack slot.
1194 // Otherwise we are not guaranteed to get the receiver value.
1195 opcode = static_cast<Translation::Opcode>(it.Next());
1196
1197 // Get the correct receiver in the optimized frame.
1198 Object* receiver;
1199 if (opcode == Translation::LITERAL) {
1200 receiver = literal_array->get(it.Next());
1201 } else if (opcode == Translation::STACK_SLOT) {
1202 receiver = StackSlotAt(it.Next());
1203 } else {
1204 // The receiver is not in a stack slot nor in a literal. We give up.
1205 it.Skip(Translation::NumberOfOperandsFor(opcode));
1206 // TODO(3029): Materializing a captured object (or duplicated
1207 // object) is hard, we return undefined for now. This breaks the
1208 // produced stack trace, as constructor frames aren't marked as
1209 // such anymore.
1210 receiver = isolate()->heap()->undefined_value();
1211 }
1212
1213 AbstractCode* abstract_code;
1214
1215 unsigned code_offset;
1216 if (frame_opcode == Translation::JS_FRAME) {
1217 Code* code = shared_info->code();
1218 DeoptimizationOutputData* const output_data =
1219 DeoptimizationOutputData::cast(code->deoptimization_data());
1220 unsigned const entry =
1221 Deoptimizer::GetOutputInfo(output_data, bailout_id, shared_info);
1222 code_offset = FullCodeGenerator::PcField::decode(entry);
1223 abstract_code = AbstractCode::cast(code);
1224 } else {
1225 DCHECK_EQ(frame_opcode, Translation::INTERPRETED_FRAME);
1226 code_offset = bailout_id.ToInt(); // Points to current bytecode.
1227 abstract_code = AbstractCode::cast(shared_info->bytecode_array());
1228 }
1229 FrameSummary summary(receiver, function, abstract_code, code_offset,
1230 is_constructor);
1231 frames->Add(summary);
1232 is_constructor = false;
1233 } else if (frame_opcode == Translation::CONSTRUCT_STUB_FRAME) {
1234 // The next encountered JS_FRAME will be marked as a constructor call.
1235 it.Skip(Translation::NumberOfOperandsFor(frame_opcode));
1236 DCHECK(!is_constructor);
1237 is_constructor = true;
1238 } else {
1239 // Skip over operands to advance to the next opcode.
1240 it.Skip(Translation::NumberOfOperandsFor(frame_opcode));
1241 }
1242 }
1243 DCHECK(!is_constructor);
1244 }
1245
1246
LookupExceptionHandlerInTable(int * stack_slots,HandlerTable::CatchPrediction * prediction)1247 int OptimizedFrame::LookupExceptionHandlerInTable(
1248 int* stack_slots, HandlerTable::CatchPrediction* prediction) {
1249 // We cannot perform exception prediction on optimized code. Instead, we need
1250 // to use FrameSummary to find the corresponding code offset in unoptimized
1251 // code to perform prediction there.
1252 DCHECK_NULL(prediction);
1253 Code* code = LookupCode();
1254 HandlerTable* table = HandlerTable::cast(code->handler_table());
1255 int pc_offset = static_cast<int>(pc() - code->entry());
1256 if (stack_slots) *stack_slots = code->stack_slots();
1257 return table->LookupReturn(pc_offset);
1258 }
1259
1260
GetDeoptimizationData(int * deopt_index) const1261 DeoptimizationInputData* OptimizedFrame::GetDeoptimizationData(
1262 int* deopt_index) const {
1263 DCHECK(is_optimized());
1264
1265 JSFunction* opt_function = function();
1266 Code* code = opt_function->code();
1267
1268 // The code object may have been replaced by lazy deoptimization. Fall
1269 // back to a slow search in this case to find the original optimized
1270 // code object.
1271 if (!code->contains(pc())) {
1272 code = isolate()->inner_pointer_to_code_cache()->
1273 GcSafeFindCodeForInnerPointer(pc());
1274 }
1275 DCHECK(code != NULL);
1276 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
1277
1278 SafepointEntry safepoint_entry = code->GetSafepointEntry(pc());
1279 *deopt_index = safepoint_entry.deoptimization_index();
1280 if (*deopt_index != Safepoint::kNoDeoptimizationIndex) {
1281 return DeoptimizationInputData::cast(code->deoptimization_data());
1282 }
1283 return nullptr;
1284 }
1285
receiver() const1286 Object* OptimizedFrame::receiver() const {
1287 Code* code = LookupCode();
1288 if (code->kind() == Code::BUILTIN) {
1289 Address argc_ptr = fp() + OptimizedBuiltinFrameConstants::kArgCOffset;
1290 intptr_t argc = *reinterpret_cast<intptr_t*>(argc_ptr);
1291 intptr_t args_size =
1292 (StandardFrameConstants::kFixedSlotCountAboveFp + argc) * kPointerSize;
1293 Address receiver_ptr = fp() + args_size;
1294 return *reinterpret_cast<Object**>(receiver_ptr);
1295 } else {
1296 return JavaScriptFrame::receiver();
1297 }
1298 }
1299
GetFunctions(List<JSFunction * > * functions) const1300 void OptimizedFrame::GetFunctions(List<JSFunction*>* functions) const {
1301 DCHECK(functions->length() == 0);
1302 DCHECK(is_optimized());
1303
1304 // Delegate to JS frame in absence of turbofan deoptimization.
1305 // TODO(turbofan): Revisit once we support deoptimization across the board.
1306 Code* code = LookupCode();
1307 if (code->kind() == Code::BUILTIN ||
1308 CannotDeoptFromAsmCode(code, function())) {
1309 return JavaScriptFrame::GetFunctions(functions);
1310 }
1311
1312 DisallowHeapAllocation no_gc;
1313 int deopt_index = Safepoint::kNoDeoptimizationIndex;
1314 DeoptimizationInputData* const data = GetDeoptimizationData(&deopt_index);
1315 DCHECK_NOT_NULL(data);
1316 DCHECK_NE(Safepoint::kNoDeoptimizationIndex, deopt_index);
1317 FixedArray* const literal_array = data->LiteralArray();
1318
1319 TranslationIterator it(data->TranslationByteArray(),
1320 data->TranslationIndex(deopt_index)->value());
1321 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
1322 DCHECK_EQ(Translation::BEGIN, opcode);
1323 it.Next(); // Skip frame count.
1324 int jsframe_count = it.Next();
1325
1326 // We insert the frames in reverse order because the frames
1327 // in the deoptimization translation are ordered bottom-to-top.
1328 while (jsframe_count != 0) {
1329 opcode = static_cast<Translation::Opcode>(it.Next());
1330 // Skip over operands to advance to the next opcode.
1331 it.Skip(Translation::NumberOfOperandsFor(opcode));
1332 if (opcode == Translation::JS_FRAME ||
1333 opcode == Translation::INTERPRETED_FRAME) {
1334 jsframe_count--;
1335
1336 // The translation commands are ordered and the function is always at the
1337 // first position.
1338 opcode = static_cast<Translation::Opcode>(it.Next());
1339
1340 // Get the correct function in the optimized frame.
1341 Object* function;
1342 if (opcode == Translation::LITERAL) {
1343 function = literal_array->get(it.Next());
1344 } else {
1345 CHECK_EQ(Translation::STACK_SLOT, opcode);
1346 function = StackSlotAt(it.Next());
1347 }
1348 functions->Add(JSFunction::cast(function));
1349 }
1350 }
1351 }
1352
1353
StackSlotOffsetRelativeToFp(int slot_index)1354 int OptimizedFrame::StackSlotOffsetRelativeToFp(int slot_index) {
1355 return StandardFrameConstants::kCallerSPOffset -
1356 ((slot_index + 1) * kPointerSize);
1357 }
1358
1359
StackSlotAt(int index) const1360 Object* OptimizedFrame::StackSlotAt(int index) const {
1361 return Memory::Object_at(fp() + StackSlotOffsetRelativeToFp(index));
1362 }
1363
position() const1364 int InterpretedFrame::position() const {
1365 AbstractCode* code = AbstractCode::cast(GetBytecodeArray());
1366 int code_offset = GetBytecodeOffset();
1367 return code->SourcePosition(code_offset);
1368 }
1369
LookupExceptionHandlerInTable(int * context_register,HandlerTable::CatchPrediction * prediction)1370 int InterpretedFrame::LookupExceptionHandlerInTable(
1371 int* context_register, HandlerTable::CatchPrediction* prediction) {
1372 BytecodeArray* bytecode = function()->shared()->bytecode_array();
1373 return bytecode->LookupRangeInHandlerTable(GetBytecodeOffset(),
1374 context_register, prediction);
1375 }
1376
GetBytecodeOffset() const1377 int InterpretedFrame::GetBytecodeOffset() const {
1378 const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex;
1379 DCHECK_EQ(
1380 InterpreterFrameConstants::kBytecodeOffsetFromFp,
1381 InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1382 int raw_offset = Smi::cast(GetExpression(index))->value();
1383 return raw_offset - BytecodeArray::kHeaderSize + kHeapObjectTag;
1384 }
1385
GetBytecodeOffset(Address fp)1386 int InterpretedFrame::GetBytecodeOffset(Address fp) {
1387 const int offset = InterpreterFrameConstants::kExpressionsOffset;
1388 const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex;
1389 DCHECK_EQ(
1390 InterpreterFrameConstants::kBytecodeOffsetFromFp,
1391 InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1392 Address expression_offset = fp + offset - index * kPointerSize;
1393 int raw_offset = Smi::cast(Memory::Object_at(expression_offset))->value();
1394 return raw_offset - BytecodeArray::kHeaderSize + kHeapObjectTag;
1395 }
1396
PatchBytecodeOffset(int new_offset)1397 void InterpretedFrame::PatchBytecodeOffset(int new_offset) {
1398 const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex;
1399 DCHECK_EQ(
1400 InterpreterFrameConstants::kBytecodeOffsetFromFp,
1401 InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1402 int raw_offset = new_offset + BytecodeArray::kHeaderSize - kHeapObjectTag;
1403 SetExpression(index, Smi::FromInt(raw_offset));
1404 }
1405
GetBytecodeArray() const1406 BytecodeArray* InterpretedFrame::GetBytecodeArray() const {
1407 const int index = InterpreterFrameConstants::kBytecodeArrayExpressionIndex;
1408 DCHECK_EQ(
1409 InterpreterFrameConstants::kBytecodeArrayFromFp,
1410 InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1411 return BytecodeArray::cast(GetExpression(index));
1412 }
1413
PatchBytecodeArray(BytecodeArray * bytecode_array)1414 void InterpretedFrame::PatchBytecodeArray(BytecodeArray* bytecode_array) {
1415 const int index = InterpreterFrameConstants::kBytecodeArrayExpressionIndex;
1416 DCHECK_EQ(
1417 InterpreterFrameConstants::kBytecodeArrayFromFp,
1418 InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1419 SetExpression(index, bytecode_array);
1420 }
1421
ReadInterpreterRegister(int register_index) const1422 Object* InterpretedFrame::ReadInterpreterRegister(int register_index) const {
1423 const int index = InterpreterFrameConstants::kRegisterFileExpressionIndex;
1424 DCHECK_EQ(
1425 InterpreterFrameConstants::kRegisterFileFromFp,
1426 InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1427 return GetExpression(index + register_index);
1428 }
1429
WriteInterpreterRegister(int register_index,Object * value)1430 void InterpretedFrame::WriteInterpreterRegister(int register_index,
1431 Object* value) {
1432 const int index = InterpreterFrameConstants::kRegisterFileExpressionIndex;
1433 DCHECK_EQ(
1434 InterpreterFrameConstants::kRegisterFileFromFp,
1435 InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1436 return SetExpression(index + register_index, value);
1437 }
1438
Summarize(List<FrameSummary> * functions,FrameSummary::Mode mode) const1439 void InterpretedFrame::Summarize(List<FrameSummary>* functions,
1440 FrameSummary::Mode mode) const {
1441 DCHECK(functions->length() == 0);
1442 AbstractCode* abstract_code =
1443 AbstractCode::cast(function()->shared()->bytecode_array());
1444 FrameSummary summary(receiver(), function(), abstract_code,
1445 GetBytecodeOffset(), IsConstructor());
1446 functions->Add(summary);
1447 }
1448
GetNumberOfIncomingArguments() const1449 int ArgumentsAdaptorFrame::GetNumberOfIncomingArguments() const {
1450 return Smi::cast(GetExpression(0))->value();
1451 }
1452
GetLength(Address fp)1453 int ArgumentsAdaptorFrame::GetLength(Address fp) {
1454 const int offset = ArgumentsAdaptorFrameConstants::kLengthOffset;
1455 return Smi::cast(Memory::Object_at(fp + offset))->value();
1456 }
1457
unchecked_code() const1458 Code* ArgumentsAdaptorFrame::unchecked_code() const {
1459 return isolate()->builtins()->builtin(
1460 Builtins::kArgumentsAdaptorTrampoline);
1461 }
1462
GetNumberOfIncomingArguments() const1463 int BuiltinFrame::GetNumberOfIncomingArguments() const {
1464 return Smi::cast(GetExpression(0))->value();
1465 }
1466
PrintFrameKind(StringStream * accumulator) const1467 void BuiltinFrame::PrintFrameKind(StringStream* accumulator) const {
1468 accumulator->Add("builtin frame: ");
1469 }
1470
GetCallerStackPointer() const1471 Address InternalFrame::GetCallerStackPointer() const {
1472 // Internal frames have no arguments. The stack pointer of the
1473 // caller is at a fixed offset from the frame pointer.
1474 return fp() + StandardFrameConstants::kCallerSPOffset;
1475 }
1476
unchecked_code() const1477 Code* InternalFrame::unchecked_code() const {
1478 const int offset = InternalFrameConstants::kCodeOffset;
1479 Object* code = Memory::Object_at(fp() + offset);
1480 DCHECK(code != NULL);
1481 return reinterpret_cast<Code*>(code);
1482 }
1483
1484
PrintIndex(StringStream * accumulator,PrintMode mode,int index)1485 void StackFrame::PrintIndex(StringStream* accumulator,
1486 PrintMode mode,
1487 int index) {
1488 accumulator->Add((mode == OVERVIEW) ? "%5d: " : "[%d]: ", index);
1489 }
1490
Print(StringStream * accumulator,PrintMode mode,int index) const1491 void WasmFrame::Print(StringStream* accumulator, PrintMode mode,
1492 int index) const {
1493 accumulator->Add("wasm frame");
1494 }
1495
unchecked_code() const1496 Code* WasmFrame::unchecked_code() const {
1497 return static_cast<Code*>(isolate()->FindCodeObject(pc()));
1498 }
1499
Iterate(ObjectVisitor * v) const1500 void WasmFrame::Iterate(ObjectVisitor* v) const { IterateCompiledFrame(v); }
1501
GetCallerStackPointer() const1502 Address WasmFrame::GetCallerStackPointer() const {
1503 return fp() + ExitFrameConstants::kCallerSPOffset;
1504 }
1505
wasm_instance() const1506 Object* WasmFrame::wasm_instance() const {
1507 Object* ret = wasm::GetOwningWasmInstance(LookupCode());
1508 if (ret == nullptr) ret = isolate()->heap()->undefined_value();
1509 return ret;
1510 }
1511
function_index() const1512 uint32_t WasmFrame::function_index() const {
1513 FixedArray* deopt_data = LookupCode()->deoptimization_data();
1514 DCHECK(deopt_data->length() == 2);
1515 return Smi::cast(deopt_data->get(1))->value();
1516 }
1517
script() const1518 Script* WasmFrame::script() const {
1519 Handle<JSObject> instance(JSObject::cast(wasm_instance()), isolate());
1520 return *wasm::GetScript(instance);
1521 }
1522
position() const1523 int WasmFrame::position() const {
1524 int position = StandardFrame::position();
1525 if (wasm::WasmIsAsmJs(wasm_instance(), isolate())) {
1526 Handle<JSObject> instance(JSObject::cast(wasm_instance()), isolate());
1527 position =
1528 wasm::GetAsmWasmSourcePosition(instance, function_index(), position);
1529 }
1530 return position;
1531 }
1532
LookupExceptionHandlerInTable(int * stack_slots)1533 int WasmFrame::LookupExceptionHandlerInTable(int* stack_slots) {
1534 DCHECK_NOT_NULL(stack_slots);
1535 Code* code = LookupCode();
1536 HandlerTable* table = HandlerTable::cast(code->handler_table());
1537 int pc_offset = static_cast<int>(pc() - code->entry());
1538 *stack_slots = code->stack_slots();
1539 return table->LookupReturn(pc_offset);
1540 }
1541
1542 namespace {
1543
1544
PrintFunctionSource(StringStream * accumulator,SharedFunctionInfo * shared,Code * code)1545 void PrintFunctionSource(StringStream* accumulator, SharedFunctionInfo* shared,
1546 Code* code) {
1547 if (FLAG_max_stack_trace_source_length != 0 && code != NULL) {
1548 std::ostringstream os;
1549 os << "--------- s o u r c e c o d e ---------\n"
1550 << SourceCodeOf(shared, FLAG_max_stack_trace_source_length)
1551 << "\n-----------------------------------------\n";
1552 accumulator->Add(os.str().c_str());
1553 }
1554 }
1555
1556
1557 } // namespace
1558
1559
Print(StringStream * accumulator,PrintMode mode,int index) const1560 void JavaScriptFrame::Print(StringStream* accumulator,
1561 PrintMode mode,
1562 int index) const {
1563 DisallowHeapAllocation no_gc;
1564 Object* receiver = this->receiver();
1565 JSFunction* function = this->function();
1566
1567 accumulator->PrintSecurityTokenIfChanged(function);
1568 PrintIndex(accumulator, mode, index);
1569 PrintFrameKind(accumulator);
1570 Code* code = NULL;
1571 if (IsConstructor()) accumulator->Add("new ");
1572 accumulator->PrintFunction(function, receiver, &code);
1573
1574 // Get scope information for nicer output, if possible. If code is NULL, or
1575 // doesn't contain scope info, scope_info will return 0 for the number of
1576 // parameters, stack local variables, context local variables, stack slots,
1577 // or context slots.
1578 SharedFunctionInfo* shared = function->shared();
1579 ScopeInfo* scope_info = shared->scope_info();
1580 Object* script_obj = shared->script();
1581 if (script_obj->IsScript()) {
1582 Script* script = Script::cast(script_obj);
1583 accumulator->Add(" [");
1584 accumulator->PrintName(script->name());
1585
1586 Address pc = this->pc();
1587 if (code != NULL && code->kind() == Code::FUNCTION &&
1588 pc >= code->instruction_start() && pc < code->instruction_end()) {
1589 int offset = static_cast<int>(pc - code->instruction_start());
1590 int source_pos = AbstractCode::cast(code)->SourcePosition(offset);
1591 int line = script->GetLineNumber(source_pos) + 1;
1592 accumulator->Add(":%d] [pc=%p]", line, pc);
1593 } else if (is_interpreted()) {
1594 const InterpretedFrame* iframe =
1595 reinterpret_cast<const InterpretedFrame*>(this);
1596 BytecodeArray* bytecodes = iframe->GetBytecodeArray();
1597 int offset = iframe->GetBytecodeOffset();
1598 int source_pos = AbstractCode::cast(bytecodes)->SourcePosition(offset);
1599 int line = script->GetLineNumber(source_pos) + 1;
1600 accumulator->Add(":%d] [bytecode=%p offset=%d]", line, bytecodes, offset);
1601 } else {
1602 int function_start_pos = shared->start_position();
1603 int line = script->GetLineNumber(function_start_pos) + 1;
1604 accumulator->Add(":~%d] [pc=%p]", line, pc);
1605 }
1606 }
1607
1608 accumulator->Add("(this=%o", receiver);
1609
1610 // Print the parameters.
1611 int parameters_count = ComputeParametersCount();
1612 for (int i = 0; i < parameters_count; i++) {
1613 accumulator->Add(",");
1614 // If we have a name for the parameter we print it. Nameless
1615 // parameters are either because we have more actual parameters
1616 // than formal parameters or because we have no scope information.
1617 if (i < scope_info->ParameterCount()) {
1618 accumulator->PrintName(scope_info->ParameterName(i));
1619 accumulator->Add("=");
1620 }
1621 accumulator->Add("%o", GetParameter(i));
1622 }
1623
1624 accumulator->Add(")");
1625 if (mode == OVERVIEW) {
1626 accumulator->Add("\n");
1627 return;
1628 }
1629 if (is_optimized()) {
1630 accumulator->Add(" {\n// optimized frame\n");
1631 PrintFunctionSource(accumulator, shared, code);
1632 accumulator->Add("}\n");
1633 return;
1634 }
1635 accumulator->Add(" {\n");
1636
1637 // Compute the number of locals and expression stack elements.
1638 int stack_locals_count = scope_info->StackLocalCount();
1639 int heap_locals_count = scope_info->ContextLocalCount();
1640 int expressions_count = ComputeExpressionsCount();
1641
1642 // Print stack-allocated local variables.
1643 if (stack_locals_count > 0) {
1644 accumulator->Add(" // stack-allocated locals\n");
1645 }
1646 for (int i = 0; i < stack_locals_count; i++) {
1647 accumulator->Add(" var ");
1648 accumulator->PrintName(scope_info->StackLocalName(i));
1649 accumulator->Add(" = ");
1650 if (i < expressions_count) {
1651 accumulator->Add("%o", GetExpression(i));
1652 } else {
1653 accumulator->Add("// no expression found - inconsistent frame?");
1654 }
1655 accumulator->Add("\n");
1656 }
1657
1658 // Try to get hold of the context of this frame.
1659 Context* context = NULL;
1660 if (this->context() != NULL && this->context()->IsContext()) {
1661 context = Context::cast(this->context());
1662 }
1663 while (context->IsWithContext()) {
1664 context = context->previous();
1665 DCHECK(context != NULL);
1666 }
1667
1668 // Print heap-allocated local variables.
1669 if (heap_locals_count > 0) {
1670 accumulator->Add(" // heap-allocated locals\n");
1671 }
1672 for (int i = 0; i < heap_locals_count; i++) {
1673 accumulator->Add(" var ");
1674 accumulator->PrintName(scope_info->ContextLocalName(i));
1675 accumulator->Add(" = ");
1676 if (context != NULL) {
1677 int index = Context::MIN_CONTEXT_SLOTS + i;
1678 if (index < context->length()) {
1679 accumulator->Add("%o", context->get(index));
1680 } else {
1681 accumulator->Add(
1682 "// warning: missing context slot - inconsistent frame?");
1683 }
1684 } else {
1685 accumulator->Add("// warning: no context found - inconsistent frame?");
1686 }
1687 accumulator->Add("\n");
1688 }
1689
1690 // Print the expression stack.
1691 int expressions_start = stack_locals_count;
1692 if (expressions_start < expressions_count) {
1693 accumulator->Add(" // expression stack (top to bottom)\n");
1694 }
1695 for (int i = expressions_count - 1; i >= expressions_start; i--) {
1696 accumulator->Add(" [%02d] : %o\n", i, GetExpression(i));
1697 }
1698
1699 PrintFunctionSource(accumulator, shared, code);
1700
1701 accumulator->Add("}\n\n");
1702 }
1703
1704
Print(StringStream * accumulator,PrintMode mode,int index) const1705 void ArgumentsAdaptorFrame::Print(StringStream* accumulator,
1706 PrintMode mode,
1707 int index) const {
1708 int actual = ComputeParametersCount();
1709 int expected = -1;
1710 JSFunction* function = this->function();
1711 expected = function->shared()->internal_formal_parameter_count();
1712
1713 PrintIndex(accumulator, mode, index);
1714 accumulator->Add("arguments adaptor frame: %d->%d", actual, expected);
1715 if (mode == OVERVIEW) {
1716 accumulator->Add("\n");
1717 return;
1718 }
1719 accumulator->Add(" {\n");
1720
1721 // Print actual arguments.
1722 if (actual > 0) accumulator->Add(" // actual arguments\n");
1723 for (int i = 0; i < actual; i++) {
1724 accumulator->Add(" [%02d] : %o", i, GetParameter(i));
1725 if (expected != -1 && i >= expected) {
1726 accumulator->Add(" // not passed to callee");
1727 }
1728 accumulator->Add("\n");
1729 }
1730
1731 accumulator->Add("}\n\n");
1732 }
1733
1734
Iterate(ObjectVisitor * v) const1735 void EntryFrame::Iterate(ObjectVisitor* v) const {
1736 IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
1737 }
1738
1739
IterateExpressions(ObjectVisitor * v) const1740 void StandardFrame::IterateExpressions(ObjectVisitor* v) const {
1741 const int offset = StandardFrameConstants::kLastObjectOffset;
1742 Object** base = &Memory::Object_at(sp());
1743 Object** limit = &Memory::Object_at(fp() + offset) + 1;
1744 v->VisitPointers(base, limit);
1745 }
1746
1747
Iterate(ObjectVisitor * v) const1748 void JavaScriptFrame::Iterate(ObjectVisitor* v) const {
1749 IterateExpressions(v);
1750 IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
1751 }
1752
Iterate(ObjectVisitor * v) const1753 void InternalFrame::Iterate(ObjectVisitor* v) const {
1754 // Internal frames only have object pointers on the expression stack
1755 // as they never have any arguments.
1756 IterateExpressions(v);
1757 IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
1758 }
1759
1760
Iterate(ObjectVisitor * v) const1761 void StubFailureTrampolineFrame::Iterate(ObjectVisitor* v) const {
1762 Object** base = &Memory::Object_at(sp());
1763 Object** limit = &Memory::Object_at(
1764 fp() + StubFailureTrampolineFrameConstants::kFixedHeaderBottomOffset);
1765 v->VisitPointers(base, limit);
1766 base = &Memory::Object_at(fp() + StandardFrameConstants::kFunctionOffset);
1767 const int offset = StandardFrameConstants::kLastObjectOffset;
1768 limit = &Memory::Object_at(fp() + offset) + 1;
1769 v->VisitPointers(base, limit);
1770 IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
1771 }
1772
1773
GetCallerStackPointer() const1774 Address StubFailureTrampolineFrame::GetCallerStackPointer() const {
1775 return fp() + StandardFrameConstants::kCallerSPOffset;
1776 }
1777
1778
unchecked_code() const1779 Code* StubFailureTrampolineFrame::unchecked_code() const {
1780 Code* trampoline;
1781 StubFailureTrampolineStub(isolate(), NOT_JS_FUNCTION_STUB_MODE).
1782 FindCodeInCache(&trampoline);
1783 if (trampoline->contains(pc())) {
1784 return trampoline;
1785 }
1786
1787 StubFailureTrampolineStub(isolate(), JS_FUNCTION_STUB_MODE).
1788 FindCodeInCache(&trampoline);
1789 if (trampoline->contains(pc())) {
1790 return trampoline;
1791 }
1792
1793 UNREACHABLE();
1794 return NULL;
1795 }
1796
1797
1798 // -------------------------------------------------------------------------
1799
1800
FindJavaScriptFrame(int n)1801 JavaScriptFrame* StackFrameLocator::FindJavaScriptFrame(int n) {
1802 DCHECK(n >= 0);
1803 for (int i = 0; i <= n; i++) {
1804 while (!iterator_.frame()->is_java_script()) iterator_.Advance();
1805 if (i == n) return JavaScriptFrame::cast(iterator_.frame());
1806 iterator_.Advance();
1807 }
1808 UNREACHABLE();
1809 return NULL;
1810 }
1811
1812
1813 // -------------------------------------------------------------------------
1814
1815
GcSafeMapOfCodeSpaceObject(HeapObject * object)1816 static Map* GcSafeMapOfCodeSpaceObject(HeapObject* object) {
1817 MapWord map_word = object->map_word();
1818 return map_word.IsForwardingAddress() ?
1819 map_word.ToForwardingAddress()->map() : map_word.ToMap();
1820 }
1821
1822
GcSafeSizeOfCodeSpaceObject(HeapObject * object)1823 static int GcSafeSizeOfCodeSpaceObject(HeapObject* object) {
1824 return object->SizeFromMap(GcSafeMapOfCodeSpaceObject(object));
1825 }
1826
1827
1828 #ifdef DEBUG
GcSafeCodeContains(HeapObject * code,Address addr)1829 static bool GcSafeCodeContains(HeapObject* code, Address addr) {
1830 Map* map = GcSafeMapOfCodeSpaceObject(code);
1831 DCHECK(map == code->GetHeap()->code_map());
1832 Address start = code->address();
1833 Address end = code->address() + code->SizeFromMap(map);
1834 return start <= addr && addr < end;
1835 }
1836 #endif
1837
1838
GcSafeCastToCode(HeapObject * object,Address inner_pointer)1839 Code* InnerPointerToCodeCache::GcSafeCastToCode(HeapObject* object,
1840 Address inner_pointer) {
1841 Code* code = reinterpret_cast<Code*>(object);
1842 DCHECK(code != NULL && GcSafeCodeContains(code, inner_pointer));
1843 return code;
1844 }
1845
1846
GcSafeFindCodeForInnerPointer(Address inner_pointer)1847 Code* InnerPointerToCodeCache::GcSafeFindCodeForInnerPointer(
1848 Address inner_pointer) {
1849 Heap* heap = isolate_->heap();
1850
1851 // Check if the inner pointer points into a large object chunk.
1852 LargePage* large_page = heap->lo_space()->FindPage(inner_pointer);
1853 if (large_page != NULL) {
1854 return GcSafeCastToCode(large_page->GetObject(), inner_pointer);
1855 }
1856
1857 if (!heap->code_space()->Contains(inner_pointer)) {
1858 return nullptr;
1859 }
1860
1861 // Iterate through the page until we reach the end or find an object starting
1862 // after the inner pointer.
1863 Page* page = Page::FromAddress(inner_pointer);
1864
1865 DCHECK_EQ(page->owner(), heap->code_space());
1866 heap->mark_compact_collector()->sweeper().SweepOrWaitUntilSweepingCompleted(
1867 page);
1868
1869 Address addr = page->skip_list()->StartFor(inner_pointer);
1870
1871 Address top = heap->code_space()->top();
1872 Address limit = heap->code_space()->limit();
1873
1874 while (true) {
1875 if (addr == top && addr != limit) {
1876 addr = limit;
1877 continue;
1878 }
1879
1880 HeapObject* obj = HeapObject::FromAddress(addr);
1881 int obj_size = GcSafeSizeOfCodeSpaceObject(obj);
1882 Address next_addr = addr + obj_size;
1883 if (next_addr > inner_pointer) return GcSafeCastToCode(obj, inner_pointer);
1884 addr = next_addr;
1885 }
1886 }
1887
1888
1889 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry*
GetCacheEntry(Address inner_pointer)1890 InnerPointerToCodeCache::GetCacheEntry(Address inner_pointer) {
1891 isolate_->counters()->pc_to_code()->Increment();
1892 DCHECK(base::bits::IsPowerOfTwo32(kInnerPointerToCodeCacheSize));
1893 uint32_t hash = ComputeIntegerHash(ObjectAddressForHashing(inner_pointer),
1894 v8::internal::kZeroHashSeed);
1895 uint32_t index = hash & (kInnerPointerToCodeCacheSize - 1);
1896 InnerPointerToCodeCacheEntry* entry = cache(index);
1897 if (entry->inner_pointer == inner_pointer) {
1898 isolate_->counters()->pc_to_code_cached()->Increment();
1899 DCHECK(entry->code == GcSafeFindCodeForInnerPointer(inner_pointer));
1900 } else {
1901 // Because this code may be interrupted by a profiling signal that
1902 // also queries the cache, we cannot update inner_pointer before the code
1903 // has been set. Otherwise, we risk trying to use a cache entry before
1904 // the code has been computed.
1905 entry->code = GcSafeFindCodeForInnerPointer(inner_pointer);
1906 entry->safepoint_entry.Reset();
1907 entry->inner_pointer = inner_pointer;
1908 }
1909 return entry;
1910 }
1911
1912
1913 // -------------------------------------------------------------------------
1914
1915
NumRegs(RegList reglist)1916 int NumRegs(RegList reglist) { return base::bits::CountPopulation(reglist); }
1917
1918
1919 struct JSCallerSavedCodeData {
1920 int reg_code[kNumJSCallerSaved];
1921 };
1922
1923 JSCallerSavedCodeData caller_saved_code_data;
1924
SetUpJSCallerSavedCodeData()1925 void SetUpJSCallerSavedCodeData() {
1926 int i = 0;
1927 for (int r = 0; r < kNumRegs; r++)
1928 if ((kJSCallerSaved & (1 << r)) != 0)
1929 caller_saved_code_data.reg_code[i++] = r;
1930
1931 DCHECK(i == kNumJSCallerSaved);
1932 }
1933
1934
JSCallerSavedCode(int n)1935 int JSCallerSavedCode(int n) {
1936 DCHECK(0 <= n && n < kNumJSCallerSaved);
1937 return caller_saved_code_data.reg_code[n];
1938 }
1939
1940
1941 #define DEFINE_WRAPPER(type, field) \
1942 class field##_Wrapper : public ZoneObject { \
1943 public: /* NOLINT */ \
1944 field##_Wrapper(const field& original) : frame_(original) { \
1945 } \
1946 field frame_; \
1947 };
STACK_FRAME_TYPE_LIST(DEFINE_WRAPPER)1948 STACK_FRAME_TYPE_LIST(DEFINE_WRAPPER)
1949 #undef DEFINE_WRAPPER
1950
1951 static StackFrame* AllocateFrameCopy(StackFrame* frame, Zone* zone) {
1952 #define FRAME_TYPE_CASE(type, field) \
1953 case StackFrame::type: { \
1954 field##_Wrapper* wrapper = \
1955 new(zone) field##_Wrapper(*(reinterpret_cast<field*>(frame))); \
1956 return &wrapper->frame_; \
1957 }
1958
1959 switch (frame->type()) {
1960 STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
1961 default: UNREACHABLE();
1962 }
1963 #undef FRAME_TYPE_CASE
1964 return NULL;
1965 }
1966
1967
CreateStackMap(Isolate * isolate,Zone * zone)1968 Vector<StackFrame*> CreateStackMap(Isolate* isolate, Zone* zone) {
1969 ZoneList<StackFrame*> list(10, zone);
1970 for (StackFrameIterator it(isolate); !it.done(); it.Advance()) {
1971 StackFrame* frame = AllocateFrameCopy(it.frame(), zone);
1972 list.Add(frame, zone);
1973 }
1974 return list.ToVector();
1975 }
1976
1977
1978 } // namespace internal
1979 } // namespace v8
1980