1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/runtime/runtime-utils.h"
6
7 #include "src/arguments.h"
8 #include "src/asmjs/asm-js.h"
9 #include "src/compiler-dispatcher/optimizing-compile-dispatcher.h"
10 #include "src/compiler.h"
11 #include "src/deoptimizer.h"
12 #include "src/frames-inl.h"
13 #include "src/full-codegen/full-codegen.h"
14 #include "src/interpreter/bytecode-array-iterator.h"
15 #include "src/isolate-inl.h"
16 #include "src/messages.h"
17 #include "src/v8threads.h"
18 #include "src/vm-state-inl.h"
19
20 namespace v8 {
21 namespace internal {
22
RUNTIME_FUNCTION(Runtime_CompileLazy)23 RUNTIME_FUNCTION(Runtime_CompileLazy) {
24 HandleScope scope(isolate);
25 DCHECK_EQ(1, args.length());
26 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
27
28 #ifdef DEBUG
29 if (FLAG_trace_lazy && !function->shared()->is_compiled()) {
30 PrintF("[unoptimized: ");
31 function->PrintName();
32 PrintF("]\n");
33 }
34 #endif
35
36 StackLimitCheck check(isolate);
37 if (check.JsHasOverflowed(1 * KB)) return isolate->StackOverflow();
38 if (!Compiler::Compile(function, Compiler::KEEP_EXCEPTION)) {
39 return isolate->heap()->exception();
40 }
41 DCHECK(function->is_compiled());
42 return function->code();
43 }
44
RUNTIME_FUNCTION(Runtime_CompileBaseline)45 RUNTIME_FUNCTION(Runtime_CompileBaseline) {
46 HandleScope scope(isolate);
47 DCHECK_EQ(1, args.length());
48 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
49 StackLimitCheck check(isolate);
50 if (check.JsHasOverflowed(1 * KB)) return isolate->StackOverflow();
51 if (!Compiler::CompileBaseline(function)) {
52 return isolate->heap()->exception();
53 }
54 DCHECK(function->is_compiled());
55 return function->code();
56 }
57
RUNTIME_FUNCTION(Runtime_CompileOptimized_Concurrent)58 RUNTIME_FUNCTION(Runtime_CompileOptimized_Concurrent) {
59 HandleScope scope(isolate);
60 DCHECK_EQ(1, args.length());
61 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
62 StackLimitCheck check(isolate);
63 if (check.JsHasOverflowed(1 * KB)) return isolate->StackOverflow();
64 if (!Compiler::CompileOptimized(function, Compiler::CONCURRENT)) {
65 return isolate->heap()->exception();
66 }
67 DCHECK(function->is_compiled());
68 return function->code();
69 }
70
71
RUNTIME_FUNCTION(Runtime_CompileOptimized_NotConcurrent)72 RUNTIME_FUNCTION(Runtime_CompileOptimized_NotConcurrent) {
73 HandleScope scope(isolate);
74 DCHECK_EQ(1, args.length());
75 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
76 StackLimitCheck check(isolate);
77 if (check.JsHasOverflowed(1 * KB)) return isolate->StackOverflow();
78 if (!Compiler::CompileOptimized(function, Compiler::NOT_CONCURRENT)) {
79 return isolate->heap()->exception();
80 }
81 DCHECK(function->is_compiled());
82 return function->code();
83 }
84
RUNTIME_FUNCTION(Runtime_InstantiateAsmJs)85 RUNTIME_FUNCTION(Runtime_InstantiateAsmJs) {
86 HandleScope scope(isolate);
87 DCHECK_EQ(args.length(), 4);
88 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
89
90 Handle<JSReceiver> stdlib;
91 if (args[1]->IsJSReceiver()) {
92 stdlib = args.at<JSReceiver>(1);
93 }
94 Handle<JSObject> foreign;
95 if (args[2]->IsJSObject()) {
96 foreign = args.at<i::JSObject>(2);
97 }
98 Handle<JSArrayBuffer> memory;
99 if (args[3]->IsJSArrayBuffer()) {
100 memory = args.at<i::JSArrayBuffer>(3);
101 }
102 if (function->shared()->HasAsmWasmData() &&
103 AsmJs::IsStdlibValid(isolate, handle(function->shared()->asm_wasm_data()),
104 stdlib)) {
105 MaybeHandle<Object> result;
106 result = AsmJs::InstantiateAsmWasm(
107 isolate, handle(function->shared()->asm_wasm_data()), memory, foreign);
108 if (!result.is_null()) {
109 return *result.ToHandleChecked();
110 }
111 }
112 // Remove wasm data, mark as broken for asm->wasm,
113 // replace code with CompileLazy, and return a smi 0 to indicate failure.
114 if (function->shared()->HasAsmWasmData()) {
115 function->shared()->ClearAsmWasmData();
116 }
117 function->shared()->set_is_asm_wasm_broken(true);
118 DCHECK(function->code() ==
119 isolate->builtins()->builtin(Builtins::kInstantiateAsmJs));
120 function->ReplaceCode(isolate->builtins()->builtin(Builtins::kCompileLazy));
121 if (function->shared()->code() ==
122 isolate->builtins()->builtin(Builtins::kInstantiateAsmJs)) {
123 function->shared()->ReplaceCode(
124 isolate->builtins()->builtin(Builtins::kCompileLazy));
125 }
126 return Smi::kZero;
127 }
128
RUNTIME_FUNCTION(Runtime_NotifyStubFailure)129 RUNTIME_FUNCTION(Runtime_NotifyStubFailure) {
130 HandleScope scope(isolate);
131 DCHECK(args.length() == 0);
132 Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
133 DCHECK(AllowHeapAllocation::IsAllowed());
134 delete deoptimizer;
135 return isolate->heap()->undefined_value();
136 }
137
138 class ActivationsFinder : public ThreadVisitor {
139 public:
140 Code* code_;
141 bool has_code_activations_;
142
ActivationsFinder(Code * code)143 explicit ActivationsFinder(Code* code)
144 : code_(code), has_code_activations_(false) {}
145
VisitThread(Isolate * isolate,ThreadLocalTop * top)146 void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
147 JavaScriptFrameIterator it(isolate, top);
148 VisitFrames(&it);
149 }
150
VisitFrames(JavaScriptFrameIterator * it)151 void VisitFrames(JavaScriptFrameIterator* it) {
152 for (; !it->done(); it->Advance()) {
153 JavaScriptFrame* frame = it->frame();
154 if (code_->contains(frame->pc())) has_code_activations_ = true;
155 }
156 }
157 };
158
159
RUNTIME_FUNCTION(Runtime_NotifyDeoptimized)160 RUNTIME_FUNCTION(Runtime_NotifyDeoptimized) {
161 HandleScope scope(isolate);
162 DCHECK(args.length() == 1);
163 CONVERT_SMI_ARG_CHECKED(type_arg, 0);
164 Deoptimizer::BailoutType type =
165 static_cast<Deoptimizer::BailoutType>(type_arg);
166 Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
167 DCHECK(AllowHeapAllocation::IsAllowed());
168 TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
169 TRACE_EVENT0("v8", "V8.DeoptimizeCode");
170
171 Handle<JSFunction> function = deoptimizer->function();
172 Handle<Code> optimized_code = deoptimizer->compiled_code();
173
174 DCHECK(optimized_code->kind() == Code::OPTIMIZED_FUNCTION);
175 DCHECK(type == deoptimizer->bailout_type());
176 DCHECK_NULL(isolate->context());
177
178 // TODO(turbofan): For Crankshaft we restore the context before objects are
179 // being materialized, because it never de-materializes the context but it
180 // requires a context to materialize arguments objects. This is specific to
181 // Crankshaft and can be removed once only TurboFan goes through here.
182 if (!optimized_code->is_turbofanned()) {
183 JavaScriptFrameIterator top_it(isolate);
184 JavaScriptFrame* top_frame = top_it.frame();
185 isolate->set_context(Context::cast(top_frame->context()));
186 }
187
188 // Make sure to materialize objects before causing any allocation.
189 JavaScriptFrameIterator it(isolate);
190 deoptimizer->MaterializeHeapObjects(&it);
191 delete deoptimizer;
192
193 // Ensure the context register is updated for materialized objects.
194 if (optimized_code->is_turbofanned()) {
195 JavaScriptFrameIterator top_it(isolate);
196 JavaScriptFrame* top_frame = top_it.frame();
197 isolate->set_context(Context::cast(top_frame->context()));
198 }
199
200 if (type == Deoptimizer::LAZY) {
201 return isolate->heap()->undefined_value();
202 }
203
204 // Search for other activations of the same optimized code.
205 // At this point {it} is at the topmost frame of all the frames materialized
206 // by the deoptimizer. Note that this frame does not necessarily represent
207 // an activation of {function} because of potential inlined tail-calls.
208 ActivationsFinder activations_finder(*optimized_code);
209 activations_finder.VisitFrames(&it);
210 isolate->thread_manager()->IterateArchivedThreads(&activations_finder);
211
212 if (!activations_finder.has_code_activations_) {
213 if (function->code() == *optimized_code) {
214 if (FLAG_trace_deopt) {
215 PrintF("[removing optimized code for: ");
216 function->PrintName();
217 PrintF("]\n");
218 }
219 function->ReplaceCode(function->shared()->code());
220 }
221 // Evict optimized code for this function from the cache so that it
222 // doesn't get used for new closures.
223 function->shared()->EvictFromOptimizedCodeMap(*optimized_code,
224 "notify deoptimized");
225 } else {
226 // TODO(titzer): we should probably do DeoptimizeCodeList(code)
227 // unconditionally if the code is not already marked for deoptimization.
228 // If there is an index by shared function info, all the better.
229 Deoptimizer::DeoptimizeFunction(*function);
230 }
231
232 return isolate->heap()->undefined_value();
233 }
234
235
IsSuitableForOnStackReplacement(Isolate * isolate,Handle<JSFunction> function)236 static bool IsSuitableForOnStackReplacement(Isolate* isolate,
237 Handle<JSFunction> function) {
238 // Keep track of whether we've succeeded in optimizing.
239 if (function->shared()->optimization_disabled()) return false;
240 // If we are trying to do OSR when there are already optimized
241 // activations of the function, it means (a) the function is directly or
242 // indirectly recursive and (b) an optimized invocation has been
243 // deoptimized so that we are currently in an unoptimized activation.
244 // Check for optimized activations of this function.
245 for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
246 JavaScriptFrame* frame = it.frame();
247 if (frame->is_optimized() && frame->function() == *function) return false;
248 }
249
250 return true;
251 }
252
253 namespace {
254
DetermineEntryAndDisarmOSRForBaseline(JavaScriptFrame * frame)255 BailoutId DetermineEntryAndDisarmOSRForBaseline(JavaScriptFrame* frame) {
256 Handle<Code> caller_code(frame->function()->shared()->code());
257
258 // Passing the PC in the JavaScript frame from the caller directly is
259 // not GC safe, so we walk the stack to get it.
260 if (!caller_code->contains(frame->pc())) {
261 // Code on the stack may not be the code object referenced by the shared
262 // function info. It may have been replaced to include deoptimization data.
263 caller_code = Handle<Code>(frame->LookupCode());
264 }
265
266 DCHECK_EQ(frame->LookupCode(), *caller_code);
267 DCHECK_EQ(Code::FUNCTION, caller_code->kind());
268 DCHECK(caller_code->contains(frame->pc()));
269
270 // Revert the patched back edge table, regardless of whether OSR succeeds.
271 BackEdgeTable::Revert(frame->isolate(), *caller_code);
272
273 uint32_t pc_offset =
274 static_cast<uint32_t>(frame->pc() - caller_code->instruction_start());
275
276 return caller_code->TranslatePcOffsetToAstId(pc_offset);
277 }
278
DetermineEntryAndDisarmOSRForInterpreter(JavaScriptFrame * frame)279 BailoutId DetermineEntryAndDisarmOSRForInterpreter(JavaScriptFrame* frame) {
280 InterpretedFrame* iframe = reinterpret_cast<InterpretedFrame*>(frame);
281
282 // Note that the bytecode array active on the stack might be different from
283 // the one installed on the function (e.g. patched by debugger). This however
284 // is fine because we guarantee the layout to be in sync, hence any BailoutId
285 // representing the entry point will be valid for any copy of the bytecode.
286 Handle<BytecodeArray> bytecode(iframe->GetBytecodeArray());
287
288 DCHECK(frame->LookupCode()->is_interpreter_trampoline_builtin());
289 DCHECK(frame->function()->shared()->HasBytecodeArray());
290 DCHECK(frame->is_interpreted());
291 DCHECK(FLAG_ignition_osr);
292
293 // Reset the OSR loop nesting depth to disarm back edges.
294 bytecode->set_osr_loop_nesting_level(0);
295
296 // Translate the offset of the jump instruction to the jump target offset of
297 // that instruction so that the derived BailoutId points to the loop header.
298 // TODO(mstarzinger): This can be merged with {BytecodeBranchAnalysis} which
299 // already performs a pre-pass over the bytecode stream anyways.
300 int jump_offset = iframe->GetBytecodeOffset();
301 interpreter::BytecodeArrayIterator iterator(bytecode);
302 while (iterator.current_offset() + iterator.current_prefix_offset() <
303 jump_offset) {
304 iterator.Advance();
305 }
306 DCHECK(interpreter::Bytecodes::IsJump(iterator.current_bytecode()));
307 int jump_target_offset = iterator.GetJumpTargetOffset();
308
309 return BailoutId(jump_target_offset);
310 }
311
312 } // namespace
313
RUNTIME_FUNCTION(Runtime_CompileForOnStackReplacement)314 RUNTIME_FUNCTION(Runtime_CompileForOnStackReplacement) {
315 HandleScope scope(isolate);
316 DCHECK(args.length() == 1);
317 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
318
319 // We're not prepared to handle a function with arguments object.
320 DCHECK(!function->shared()->uses_arguments());
321
322 // Only reachable when OST is enabled.
323 CHECK(FLAG_use_osr);
324
325 // Determine frame triggering OSR request.
326 JavaScriptFrameIterator it(isolate);
327 JavaScriptFrame* frame = it.frame();
328 DCHECK_EQ(frame->function(), *function);
329
330 // Determine the entry point for which this OSR request has been fired and
331 // also disarm all back edges in the calling code to stop new requests.
332 BailoutId ast_id = frame->is_interpreted()
333 ? DetermineEntryAndDisarmOSRForInterpreter(frame)
334 : DetermineEntryAndDisarmOSRForBaseline(frame);
335 DCHECK(!ast_id.IsNone());
336
337 MaybeHandle<Code> maybe_result;
338 if (IsSuitableForOnStackReplacement(isolate, function)) {
339 if (FLAG_trace_osr) {
340 PrintF("[OSR - Compiling: ");
341 function->PrintName();
342 PrintF(" at AST id %d]\n", ast_id.ToInt());
343 }
344 maybe_result = Compiler::GetOptimizedCodeForOSR(function, ast_id, frame);
345 }
346
347 // Check whether we ended up with usable optimized code.
348 Handle<Code> result;
349 if (maybe_result.ToHandle(&result) &&
350 result->kind() == Code::OPTIMIZED_FUNCTION) {
351 DeoptimizationInputData* data =
352 DeoptimizationInputData::cast(result->deoptimization_data());
353
354 if (data->OsrPcOffset()->value() >= 0) {
355 DCHECK(BailoutId(data->OsrAstId()->value()) == ast_id);
356 if (FLAG_trace_osr) {
357 PrintF("[OSR - Entry at AST id %d, offset %d in optimized code]\n",
358 ast_id.ToInt(), data->OsrPcOffset()->value());
359 }
360 // TODO(titzer): this is a massive hack to make the deopt counts
361 // match. Fix heuristics for reenabling optimizations!
362 function->shared()->increment_deopt_count();
363
364 if (result->is_turbofanned()) {
365 // When we're waiting for concurrent optimization, set to compile on
366 // the next call - otherwise we'd run unoptimized once more
367 // and potentially compile for OSR another time as well.
368 if (function->IsMarkedForConcurrentOptimization()) {
369 if (FLAG_trace_osr) {
370 PrintF("[OSR - Re-marking ");
371 function->PrintName();
372 PrintF(" for non-concurrent optimization]\n");
373 }
374 function->ReplaceCode(
375 isolate->builtins()->builtin(Builtins::kCompileOptimized));
376 }
377 } else {
378 // Crankshafted OSR code can be installed into the function.
379 function->ReplaceCode(*result);
380 }
381 return *result;
382 }
383 }
384
385 // Failed.
386 if (FLAG_trace_osr) {
387 PrintF("[OSR - Failed: ");
388 function->PrintName();
389 PrintF(" at AST id %d]\n", ast_id.ToInt());
390 }
391
392 if (!function->IsOptimized()) {
393 function->ReplaceCode(function->shared()->code());
394 }
395 return NULL;
396 }
397
398
RUNTIME_FUNCTION(Runtime_TryInstallOptimizedCode)399 RUNTIME_FUNCTION(Runtime_TryInstallOptimizedCode) {
400 HandleScope scope(isolate);
401 DCHECK(args.length() == 1);
402 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
403
404 // First check if this is a real stack overflow.
405 StackLimitCheck check(isolate);
406 if (check.JsHasOverflowed()) {
407 SealHandleScope shs(isolate);
408 return isolate->StackOverflow();
409 }
410
411 isolate->optimizing_compile_dispatcher()->InstallOptimizedFunctions();
412 return (function->IsOptimized()) ? function->code()
413 : function->shared()->code();
414 }
415
416
CodeGenerationFromStringsAllowed(Isolate * isolate,Handle<Context> context)417 bool CodeGenerationFromStringsAllowed(Isolate* isolate,
418 Handle<Context> context) {
419 DCHECK(context->allow_code_gen_from_strings()->IsFalse(isolate));
420 // Check with callback if set.
421 AllowCodeGenerationFromStringsCallback callback =
422 isolate->allow_code_gen_callback();
423 if (callback == NULL) {
424 // No callback set and code generation disallowed.
425 return false;
426 } else {
427 // Callback set. Let it decide if code generation is allowed.
428 VMState<EXTERNAL> state(isolate);
429 return callback(v8::Utils::ToLocal(context));
430 }
431 }
432
CompileGlobalEval(Isolate * isolate,Handle<String> source,Handle<SharedFunctionInfo> outer_info,LanguageMode language_mode,int eval_scope_position,int eval_position)433 static Object* CompileGlobalEval(Isolate* isolate, Handle<String> source,
434 Handle<SharedFunctionInfo> outer_info,
435 LanguageMode language_mode,
436 int eval_scope_position, int eval_position) {
437 Handle<Context> context = Handle<Context>(isolate->context());
438 Handle<Context> native_context = Handle<Context>(context->native_context());
439
440 // Check if native context allows code generation from
441 // strings. Throw an exception if it doesn't.
442 if (native_context->allow_code_gen_from_strings()->IsFalse(isolate) &&
443 !CodeGenerationFromStringsAllowed(isolate, native_context)) {
444 Handle<Object> error_message =
445 native_context->ErrorMessageForCodeGenerationFromStrings();
446 Handle<Object> error;
447 MaybeHandle<Object> maybe_error = isolate->factory()->NewEvalError(
448 MessageTemplate::kCodeGenFromStrings, error_message);
449 if (maybe_error.ToHandle(&error)) isolate->Throw(*error);
450 return isolate->heap()->exception();
451 }
452
453 // Deal with a normal eval call with a string argument. Compile it
454 // and return the compiled function bound in the local context.
455 static const ParseRestriction restriction = NO_PARSE_RESTRICTION;
456 Handle<JSFunction> compiled;
457 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
458 isolate, compiled, Compiler::GetFunctionFromEval(
459 source, outer_info, context, language_mode,
460 restriction, eval_scope_position, eval_position),
461 isolate->heap()->exception());
462 return *compiled;
463 }
464
465
RUNTIME_FUNCTION(Runtime_ResolvePossiblyDirectEval)466 RUNTIME_FUNCTION(Runtime_ResolvePossiblyDirectEval) {
467 HandleScope scope(isolate);
468 DCHECK(args.length() == 6);
469
470 Handle<Object> callee = args.at<Object>(0);
471
472 // If "eval" didn't refer to the original GlobalEval, it's not a
473 // direct call to eval.
474 // (And even if it is, but the first argument isn't a string, just let
475 // execution default to an indirect call to eval, which will also return
476 // the first argument without doing anything).
477 if (*callee != isolate->native_context()->global_eval_fun() ||
478 !args[1]->IsString()) {
479 return *callee;
480 }
481
482 DCHECK(args[3]->IsSmi());
483 DCHECK(is_valid_language_mode(args.smi_at(3)));
484 LanguageMode language_mode = static_cast<LanguageMode>(args.smi_at(3));
485 DCHECK(args[4]->IsSmi());
486 Handle<SharedFunctionInfo> outer_info(args.at<JSFunction>(2)->shared(),
487 isolate);
488 return CompileGlobalEval(isolate, args.at<String>(1), outer_info,
489 language_mode, args.smi_at(4), args.smi_at(5));
490 }
491 } // namespace internal
492 } // namespace v8
493