1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/setup-isolate.h"
6
7 #include "src/assembler-inl.h"
8 #include "src/builtins/builtins.h"
9 #include "src/code-events.h"
10 #include "src/compiler/code-assembler.h"
11 #include "src/handles-inl.h"
12 #include "src/interface-descriptors.h"
13 #include "src/interpreter/bytecodes.h"
14 #include "src/interpreter/interpreter-generator.h"
15 #include "src/isolate.h"
16 #include "src/objects-inl.h"
17 #include "src/objects/shared-function-info.h"
18
19 namespace v8 {
20 namespace internal {
21
22 // Forward declarations for C++ builtins.
23 #define FORWARD_DECLARE(Name) \
24 Object* Builtin_##Name(int argc, Object** args, Isolate* isolate);
25 BUILTIN_LIST_C(FORWARD_DECLARE)
26 #undef FORWARD_DECLARE
27
28 namespace {
PostBuildProfileAndTracing(Isolate * isolate,Code * code,const char * name)29 void PostBuildProfileAndTracing(Isolate* isolate, Code* code,
30 const char* name) {
31 PROFILE(isolate, CodeCreateEvent(CodeEventListener::BUILTIN_TAG,
32 AbstractCode::cast(code), name));
33 #ifdef ENABLE_DISASSEMBLER
34 if (FLAG_print_builtin_code) {
35 code->PrintBuiltinCode(isolate, name);
36 }
37 #endif
38 }
39
BuiltinAssemblerOptions(Isolate * isolate,int32_t builtin_index)40 AssemblerOptions BuiltinAssemblerOptions(Isolate* isolate,
41 int32_t builtin_index) {
42 AssemblerOptions options = AssemblerOptions::Default(isolate);
43 CHECK(!options.isolate_independent_code);
44 CHECK(!options.use_pc_relative_calls_and_jumps);
45
46 if (!isolate->ShouldLoadConstantsFromRootList() ||
47 !Builtins::IsIsolateIndependent(builtin_index)) {
48 return options;
49 }
50
51 CodeRange* code_range = isolate->heap()->memory_allocator()->code_range();
52 bool pc_relative_calls_fit_in_code_range =
53 code_range->valid() &&
54 code_range->size() <= kMaxPCRelativeCodeRangeInMB * MB;
55
56 options.isolate_independent_code = true;
57 options.use_pc_relative_calls_and_jumps = pc_relative_calls_fit_in_code_range;
58
59 return options;
60 }
61
62 typedef void (*MacroAssemblerGenerator)(MacroAssembler*);
63 typedef void (*CodeAssemblerGenerator)(compiler::CodeAssemblerState*);
64
BuildPlaceholder(Isolate * isolate,int32_t builtin_index)65 Handle<Code> BuildPlaceholder(Isolate* isolate, int32_t builtin_index) {
66 HandleScope scope(isolate);
67 const size_t buffer_size = 1 * KB;
68 byte buffer[buffer_size]; // NOLINT(runtime/arrays)
69 MacroAssembler masm(isolate, buffer, buffer_size, CodeObjectRequired::kYes);
70 DCHECK(!masm.has_frame());
71 {
72 FrameScope scope(&masm, StackFrame::NONE);
73 // The contents of placeholder don't matter, as long as they don't create
74 // embedded constants or external references.
75 masm.Move(kJavaScriptCallCodeStartRegister, Smi::kZero);
76 masm.Call(kJavaScriptCallCodeStartRegister);
77 }
78 CodeDesc desc;
79 masm.GetCode(isolate, &desc);
80 Handle<Code> code = isolate->factory()->NewCode(
81 desc, Code::BUILTIN, masm.CodeObject(), builtin_index);
82 return scope.CloseAndEscape(code);
83 }
84
BuildWithMacroAssembler(Isolate * isolate,int32_t builtin_index,MacroAssemblerGenerator generator,const char * s_name)85 Code* BuildWithMacroAssembler(Isolate* isolate, int32_t builtin_index,
86 MacroAssemblerGenerator generator,
87 const char* s_name) {
88 HandleScope scope(isolate);
89 // Canonicalize handles, so that we can share constant pool entries pointing
90 // to code targets without dereferencing their handles.
91 CanonicalHandleScope canonical(isolate);
92 const size_t buffer_size = 32 * KB;
93 byte buffer[buffer_size]; // NOLINT(runtime/arrays)
94
95 MacroAssembler masm(isolate, BuiltinAssemblerOptions(isolate, builtin_index),
96 buffer, buffer_size, CodeObjectRequired::kYes);
97 masm.set_builtin_index(builtin_index);
98 DCHECK(!masm.has_frame());
99 generator(&masm);
100 CodeDesc desc;
101 masm.GetCode(isolate, &desc);
102 Handle<Code> code = isolate->factory()->NewCode(
103 desc, Code::BUILTIN, masm.CodeObject(), builtin_index);
104 PostBuildProfileAndTracing(isolate, *code, s_name);
105 return *code;
106 }
107
BuildAdaptor(Isolate * isolate,int32_t builtin_index,Address builtin_address,Builtins::ExitFrameType exit_frame_type,const char * name)108 Code* BuildAdaptor(Isolate* isolate, int32_t builtin_index,
109 Address builtin_address,
110 Builtins::ExitFrameType exit_frame_type, const char* name) {
111 HandleScope scope(isolate);
112 // Canonicalize handles, so that we can share constant pool entries pointing
113 // to code targets without dereferencing their handles.
114 CanonicalHandleScope canonical(isolate);
115 const size_t buffer_size = 32 * KB;
116 byte buffer[buffer_size]; // NOLINT(runtime/arrays)
117 MacroAssembler masm(isolate, BuiltinAssemblerOptions(isolate, builtin_index),
118 buffer, buffer_size, CodeObjectRequired::kYes);
119 masm.set_builtin_index(builtin_index);
120 DCHECK(!masm.has_frame());
121 Builtins::Generate_Adaptor(&masm, builtin_address, exit_frame_type);
122 CodeDesc desc;
123 masm.GetCode(isolate, &desc);
124 Handle<Code> code = isolate->factory()->NewCode(
125 desc, Code::BUILTIN, masm.CodeObject(), builtin_index);
126 PostBuildProfileAndTracing(isolate, *code, name);
127 return *code;
128 }
129
130 // Builder for builtins implemented in TurboFan with JS linkage.
BuildWithCodeStubAssemblerJS(Isolate * isolate,int32_t builtin_index,CodeAssemblerGenerator generator,int argc,const char * name)131 Code* BuildWithCodeStubAssemblerJS(Isolate* isolate, int32_t builtin_index,
132 CodeAssemblerGenerator generator, int argc,
133 const char* name) {
134 HandleScope scope(isolate);
135 // Canonicalize handles, so that we can share constant pool entries pointing
136 // to code targets without dereferencing their handles.
137 CanonicalHandleScope canonical(isolate);
138
139 SegmentSize segment_size = isolate->serializer_enabled()
140 ? SegmentSize::kLarge
141 : SegmentSize::kDefault;
142 Zone zone(isolate->allocator(), ZONE_NAME, segment_size);
143 const int argc_with_recv =
144 (argc == SharedFunctionInfo::kDontAdaptArgumentsSentinel) ? 0 : argc + 1;
145 compiler::CodeAssemblerState state(
146 isolate, &zone, argc_with_recv, Code::BUILTIN, name,
147 PoisoningMitigationLevel::kDontPoison, builtin_index);
148 generator(&state);
149 Handle<Code> code = compiler::CodeAssembler::GenerateCode(
150 &state, BuiltinAssemblerOptions(isolate, builtin_index));
151 PostBuildProfileAndTracing(isolate, *code, name);
152 return *code;
153 }
154
155 // Builder for builtins implemented in TurboFan with CallStub linkage.
BuildWithCodeStubAssemblerCS(Isolate * isolate,int32_t builtin_index,CodeAssemblerGenerator generator,CallDescriptors::Key interface_descriptor,const char * name,int result_size)156 Code* BuildWithCodeStubAssemblerCS(Isolate* isolate, int32_t builtin_index,
157 CodeAssemblerGenerator generator,
158 CallDescriptors::Key interface_descriptor,
159 const char* name, int result_size) {
160 HandleScope scope(isolate);
161 // Canonicalize handles, so that we can share constant pool entries pointing
162 // to code targets without dereferencing their handles.
163 CanonicalHandleScope canonical(isolate);
164 SegmentSize segment_size = isolate->serializer_enabled()
165 ? SegmentSize::kLarge
166 : SegmentSize::kDefault;
167 Zone zone(isolate->allocator(), ZONE_NAME, segment_size);
168 // The interface descriptor with given key must be initialized at this point
169 // and this construction just queries the details from the descriptors table.
170 CallInterfaceDescriptor descriptor(interface_descriptor);
171 // Ensure descriptor is already initialized.
172 DCHECK_EQ(result_size, descriptor.GetReturnCount());
173 DCHECK_LE(0, descriptor.GetRegisterParameterCount());
174 compiler::CodeAssemblerState state(
175 isolate, &zone, descriptor, Code::BUILTIN, name,
176 PoisoningMitigationLevel::kDontPoison, 0, builtin_index);
177 generator(&state);
178 Handle<Code> code = compiler::CodeAssembler::GenerateCode(
179 &state, BuiltinAssemblerOptions(isolate, builtin_index));
180 PostBuildProfileAndTracing(isolate, *code, name);
181 return *code;
182 }
183 } // anonymous namespace
184
185 // static
AddBuiltin(Builtins * builtins,int index,Code * code)186 void SetupIsolateDelegate::AddBuiltin(Builtins* builtins, int index,
187 Code* code) {
188 DCHECK_EQ(index, code->builtin_index());
189 builtins->set_builtin(index, code);
190 }
191
192 // static
PopulateWithPlaceholders(Isolate * isolate)193 void SetupIsolateDelegate::PopulateWithPlaceholders(Isolate* isolate) {
194 // Fill the builtins list with placeholders. References to these placeholder
195 // builtins are eventually replaced by the actual builtins. This is to
196 // support circular references between builtins.
197 Builtins* builtins = isolate->builtins();
198 HandleScope scope(isolate);
199 for (int i = 0; i < Builtins::builtin_count; i++) {
200 Handle<Code> placeholder = BuildPlaceholder(isolate, i);
201 AddBuiltin(builtins, i, *placeholder);
202 }
203 }
204
205 // static
ReplacePlaceholders(Isolate * isolate)206 void SetupIsolateDelegate::ReplacePlaceholders(Isolate* isolate) {
207 // Replace references from all code objects to placeholders.
208 Builtins* builtins = isolate->builtins();
209 DisallowHeapAllocation no_gc;
210 CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
211 static const int kRelocMask =
212 RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
213 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
214 RelocInfo::ModeMask(RelocInfo::RELATIVE_CODE_TARGET);
215 HeapIterator iterator(isolate->heap());
216 while (HeapObject* obj = iterator.next()) {
217 if (!obj->IsCode()) continue;
218 Code* code = Code::cast(obj);
219 bool flush_icache = false;
220 for (RelocIterator it(code, kRelocMask); !it.done(); it.next()) {
221 RelocInfo* rinfo = it.rinfo();
222 if (RelocInfo::IsCodeTargetMode(rinfo->rmode())) {
223 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
224 DCHECK_IMPLIES(RelocInfo::IsRelativeCodeTarget(rinfo->rmode()),
225 Builtins::IsIsolateIndependent(target->builtin_index()));
226 if (!target->is_builtin()) continue;
227 Code* new_target = builtins->builtin(target->builtin_index());
228 rinfo->set_target_address(new_target->raw_instruction_start(),
229 UPDATE_WRITE_BARRIER, SKIP_ICACHE_FLUSH);
230 } else {
231 DCHECK(RelocInfo::IsEmbeddedObject(rinfo->rmode()));
232 Object* object = rinfo->target_object();
233 if (!object->IsCode()) continue;
234 Code* target = Code::cast(object);
235 if (!target->is_builtin()) continue;
236 Code* new_target = builtins->builtin(target->builtin_index());
237 rinfo->set_target_object(isolate->heap(), new_target,
238 UPDATE_WRITE_BARRIER, SKIP_ICACHE_FLUSH);
239 }
240 flush_icache = true;
241 }
242 if (flush_icache) {
243 Assembler::FlushICache(code->raw_instruction_start(),
244 code->raw_instruction_size());
245 }
246 }
247 }
248
249 #ifdef V8_EMBEDDED_BYTECODE_HANDLERS
250 namespace {
GenerateBytecodeHandler(Isolate * isolate,int builtin_index,const char * name,interpreter::Bytecode bytecode,interpreter::OperandScale operand_scale)251 Code* GenerateBytecodeHandler(Isolate* isolate, int builtin_index,
252 const char* name, interpreter::Bytecode bytecode,
253 interpreter::OperandScale operand_scale) {
254 if (!interpreter::Bytecodes::BytecodeHasHandler(bytecode, operand_scale)) {
255 // TODO(v8:8068): Consider returning something else to avoid placeholders
256 // being serialized with the snapshot.
257 return nullptr;
258 }
259
260 Handle<Code> code = interpreter::GenerateBytecodeHandler(
261 isolate, bytecode, operand_scale, builtin_index);
262
263 PostBuildProfileAndTracing(isolate, *code, name);
264
265 return *code;
266 }
267 } // namespace
268 #endif
269
270 // static
SetupBuiltinsInternal(Isolate * isolate)271 void SetupIsolateDelegate::SetupBuiltinsInternal(Isolate* isolate) {
272 Builtins* builtins = isolate->builtins();
273 DCHECK(!builtins->initialized_);
274
275 PopulateWithPlaceholders(isolate);
276
277 // Create a scope for the handles in the builtins.
278 HandleScope scope(isolate);
279
280 int index = 0;
281 Code* code;
282 #define BUILD_CPP(Name) \
283 code = BuildAdaptor(isolate, index, FUNCTION_ADDR(Builtin_##Name), \
284 Builtins::BUILTIN_EXIT, #Name); \
285 AddBuiltin(builtins, index++, code);
286 #define BUILD_API(Name) \
287 code = BuildAdaptor(isolate, index, FUNCTION_ADDR(Builtin_##Name), \
288 Builtins::EXIT, #Name); \
289 AddBuiltin(builtins, index++, code);
290 #define BUILD_TFJ(Name, Argc, ...) \
291 code = BuildWithCodeStubAssemblerJS( \
292 isolate, index, &Builtins::Generate_##Name, Argc, #Name); \
293 AddBuiltin(builtins, index++, code);
294 #define BUILD_TFC(Name, InterfaceDescriptor, result_size) \
295 code = BuildWithCodeStubAssemblerCS( \
296 isolate, index, &Builtins::Generate_##Name, \
297 CallDescriptors::InterfaceDescriptor, #Name, result_size); \
298 AddBuiltin(builtins, index++, code);
299 #define BUILD_TFS(Name, ...) \
300 /* Return size for generic TF builtins (stub linkage) is always 1. */ \
301 code = \
302 BuildWithCodeStubAssemblerCS(isolate, index, &Builtins::Generate_##Name, \
303 CallDescriptors::Name, #Name, 1); \
304 AddBuiltin(builtins, index++, code);
305 #define BUILD_TFH(Name, InterfaceDescriptor) \
306 /* Return size for IC builtins/handlers is always 1. */ \
307 code = BuildWithCodeStubAssemblerCS( \
308 isolate, index, &Builtins::Generate_##Name, \
309 CallDescriptors::InterfaceDescriptor, #Name, 1); \
310 AddBuiltin(builtins, index++, code);
311
312 #define BUILD_BCH_WITH_SCALE(Code, Scale) \
313 code = GenerateBytecodeHandler(isolate, index, Builtins::name(index), \
314 interpreter::Bytecode::k##Code, \
315 interpreter::OperandScale::k##Scale); \
316 if (code) { \
317 AddBuiltin(builtins, index, code); \
318 } \
319 ++index;
320
321 #define BUILD_BCH(Code, ...) \
322 BUILD_BCH_WITH_SCALE(Code, Single) \
323 BUILD_BCH_WITH_SCALE(Code, Double) \
324 BUILD_BCH_WITH_SCALE(Code, Quadruple)
325
326 #define BUILD_ASM(Name) \
327 code = BuildWithMacroAssembler(isolate, index, Builtins::Generate_##Name, \
328 #Name); \
329 AddBuiltin(builtins, index++, code);
330
331 BUILTIN_LIST(BUILD_CPP, BUILD_API, BUILD_TFJ, BUILD_TFC, BUILD_TFS, BUILD_TFH,
332 BUILD_BCH, BUILD_ASM);
333
334 #undef BUILD_CPP
335 #undef BUILD_API
336 #undef BUILD_TFJ
337 #undef BUILD_TFC
338 #undef BUILD_TFS
339 #undef BUILD_TFH
340 #undef BUILD_BCH
341 #undef BUILD_BCH_WITH_SCALE
342 #undef BUILD_ASM
343 CHECK_EQ(Builtins::builtin_count, index);
344
345 ReplacePlaceholders(isolate);
346
347 #define SET_PROMISE_REJECTION_PREDICTION(Name) \
348 builtins->builtin(Builtins::k##Name)->set_is_promise_rejection(true);
349
350 BUILTIN_PROMISE_REJECTION_PREDICTION_LIST(SET_PROMISE_REJECTION_PREDICTION)
351 #undef SET_PROMISE_REJECTION_PREDICTION
352
353 #define SET_EXCEPTION_CAUGHT_PREDICTION(Name) \
354 builtins->builtin(Builtins::k##Name)->set_is_exception_caught(true);
355
356 BUILTIN_EXCEPTION_CAUGHT_PREDICTION_LIST(SET_EXCEPTION_CAUGHT_PREDICTION)
357 #undef SET_EXCEPTION_CAUGHT_PREDICTION
358
359 builtins->MarkInitialized();
360 }
361
362 } // namespace internal
363 } // namespace v8
364