1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #include "src/api.h"
8 #include "src/arguments.h"
9 #include "src/base/once.h"
10 #include "src/bootstrapper.h"
11 #include "src/builtins.h"
12 #include "src/cpu-profiler.h"
13 #include "src/gdb-jit.h"
14 #include "src/heap/mark-compact.h"
15 #include "src/heap-profiler.h"
16 #include "src/ic/handler-compiler.h"
17 #include "src/ic/ic.h"
18 #include "src/prototype.h"
19 #include "src/vm-state-inl.h"
20
21 namespace v8 {
22 namespace internal {
23
24 namespace {
25
26 // Arguments object passed to C++ builtins.
27 template <BuiltinExtraArguments extra_args>
28 class BuiltinArguments : public Arguments {
29 public:
BuiltinArguments(int length,Object ** arguments)30 BuiltinArguments(int length, Object** arguments)
31 : Arguments(length, arguments) { }
32
operator [](int index)33 Object*& operator[] (int index) {
34 DCHECK(index < length());
35 return Arguments::operator[](index);
36 }
37
at(int index)38 template <class S> Handle<S> at(int index) {
39 DCHECK(index < length());
40 return Arguments::at<S>(index);
41 }
42
receiver()43 Handle<Object> receiver() {
44 return Arguments::at<Object>(0);
45 }
46
called_function()47 Handle<JSFunction> called_function() {
48 STATIC_ASSERT(extra_args == NEEDS_CALLED_FUNCTION);
49 return Arguments::at<JSFunction>(Arguments::length() - 1);
50 }
51
52 // Gets the total number of arguments including the receiver (but
53 // excluding extra arguments).
length() const54 int length() const {
55 STATIC_ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
56 return Arguments::length();
57 }
58
59 #ifdef DEBUG
Verify()60 void Verify() {
61 // Check we have at least the receiver.
62 DCHECK(Arguments::length() >= 1);
63 }
64 #endif
65 };
66
67
68 // Specialize BuiltinArguments for the called function extra argument.
69
70 template <>
length() const71 int BuiltinArguments<NEEDS_CALLED_FUNCTION>::length() const {
72 return Arguments::length() - 1;
73 }
74
75 #ifdef DEBUG
76 template <>
Verify()77 void BuiltinArguments<NEEDS_CALLED_FUNCTION>::Verify() {
78 // Check we have at least the receiver and the called function.
79 DCHECK(Arguments::length() >= 2);
80 // Make sure cast to JSFunction succeeds.
81 called_function();
82 }
83 #endif
84
85
86 #define DEF_ARG_TYPE(name, spec) \
87 typedef BuiltinArguments<spec> name##ArgumentsType;
88 BUILTIN_LIST_C(DEF_ARG_TYPE)
89 #undef DEF_ARG_TYPE
90
91 } // namespace
92
93 // ----------------------------------------------------------------------------
94 // Support macro for defining builtins in C++.
95 // ----------------------------------------------------------------------------
96 //
97 // A builtin function is defined by writing:
98 //
99 // BUILTIN(name) {
100 // ...
101 // }
102 //
103 // In the body of the builtin function the arguments can be accessed
104 // through the BuiltinArguments object args.
105
106 #ifdef DEBUG
107
108 #define BUILTIN(name) \
109 MUST_USE_RESULT static Object* Builtin_Impl_##name( \
110 name##ArgumentsType args, Isolate* isolate); \
111 MUST_USE_RESULT static Object* Builtin_##name( \
112 int args_length, Object** args_object, Isolate* isolate) { \
113 name##ArgumentsType args(args_length, args_object); \
114 args.Verify(); \
115 return Builtin_Impl_##name(args, isolate); \
116 } \
117 MUST_USE_RESULT static Object* Builtin_Impl_##name( \
118 name##ArgumentsType args, Isolate* isolate)
119
120 #else // For release mode.
121
122 #define BUILTIN(name) \
123 static Object* Builtin_impl##name( \
124 name##ArgumentsType args, Isolate* isolate); \
125 static Object* Builtin_##name( \
126 int args_length, Object** args_object, Isolate* isolate) { \
127 name##ArgumentsType args(args_length, args_object); \
128 return Builtin_impl##name(args, isolate); \
129 } \
130 static Object* Builtin_impl##name( \
131 name##ArgumentsType args, Isolate* isolate)
132 #endif
133
134
135 #ifdef DEBUG
CalledAsConstructor(Isolate * isolate)136 static inline bool CalledAsConstructor(Isolate* isolate) {
137 // Calculate the result using a full stack frame iterator and check
138 // that the state of the stack is as we assume it to be in the
139 // code below.
140 StackFrameIterator it(isolate);
141 DCHECK(it.frame()->is_exit());
142 it.Advance();
143 StackFrame* frame = it.frame();
144 bool reference_result = frame->is_construct();
145 Address fp = Isolate::c_entry_fp(isolate->thread_local_top());
146 // Because we know fp points to an exit frame we can use the relevant
147 // part of ExitFrame::ComputeCallerState directly.
148 const int kCallerOffset = ExitFrameConstants::kCallerFPOffset;
149 Address caller_fp = Memory::Address_at(fp + kCallerOffset);
150 // This inlines the part of StackFrame::ComputeType that grabs the
151 // type of the current frame. Note that StackFrame::ComputeType
152 // has been specialized for each architecture so if any one of them
153 // changes this code has to be changed as well.
154 const int kMarkerOffset = StandardFrameConstants::kMarkerOffset;
155 const Smi* kConstructMarker = Smi::FromInt(StackFrame::CONSTRUCT);
156 Object* marker = Memory::Object_at(caller_fp + kMarkerOffset);
157 bool result = (marker == kConstructMarker);
158 DCHECK_EQ(result, reference_result);
159 return result;
160 }
161 #endif
162
163
164 // ----------------------------------------------------------------------------
165
BUILTIN(Illegal)166 BUILTIN(Illegal) {
167 UNREACHABLE();
168 return isolate->heap()->undefined_value(); // Make compiler happy.
169 }
170
171
BUILTIN(EmptyFunction)172 BUILTIN(EmptyFunction) {
173 return isolate->heap()->undefined_value();
174 }
175
176
MoveDoubleElements(FixedDoubleArray * dst,int dst_index,FixedDoubleArray * src,int src_index,int len)177 static void MoveDoubleElements(FixedDoubleArray* dst, int dst_index,
178 FixedDoubleArray* src, int src_index, int len) {
179 if (len == 0) return;
180 MemMove(dst->data_start() + dst_index, src->data_start() + src_index,
181 len * kDoubleSize);
182 }
183
184
ArrayPrototypeHasNoElements(Heap * heap,Context * native_context,JSObject * array_proto)185 static bool ArrayPrototypeHasNoElements(Heap* heap,
186 Context* native_context,
187 JSObject* array_proto) {
188 DisallowHeapAllocation no_gc;
189 // This method depends on non writability of Object and Array prototype
190 // fields.
191 if (array_proto->elements() != heap->empty_fixed_array()) return false;
192 // Object.prototype
193 PrototypeIterator iter(heap->isolate(), array_proto);
194 if (iter.IsAtEnd()) {
195 return false;
196 }
197 array_proto = JSObject::cast(iter.GetCurrent());
198 if (array_proto != native_context->initial_object_prototype()) return false;
199 if (array_proto->elements() != heap->empty_fixed_array()) return false;
200 iter.Advance();
201 return iter.IsAtEnd();
202 }
203
204
205 // Returns empty handle if not applicable.
206 MUST_USE_RESULT
EnsureJSArrayWithWritableFastElements(Isolate * isolate,Handle<Object> receiver,Arguments * args,int first_added_arg)207 static inline MaybeHandle<FixedArrayBase> EnsureJSArrayWithWritableFastElements(
208 Isolate* isolate,
209 Handle<Object> receiver,
210 Arguments* args,
211 int first_added_arg) {
212 if (!receiver->IsJSArray()) return MaybeHandle<FixedArrayBase>();
213 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
214 // If there may be elements accessors in the prototype chain, the fast path
215 // cannot be used if there arguments to add to the array.
216 if (args != NULL && array->map()->DictionaryElementsInPrototypeChainOnly()) {
217 return MaybeHandle<FixedArrayBase>();
218 }
219 if (array->map()->is_observed()) return MaybeHandle<FixedArrayBase>();
220 if (!array->map()->is_extensible()) return MaybeHandle<FixedArrayBase>();
221 Handle<FixedArrayBase> elms(array->elements(), isolate);
222 Heap* heap = isolate->heap();
223 Map* map = elms->map();
224 if (map == heap->fixed_array_map()) {
225 if (args == NULL || array->HasFastObjectElements()) return elms;
226 } else if (map == heap->fixed_cow_array_map()) {
227 elms = JSObject::EnsureWritableFastElements(array);
228 if (args == NULL || array->HasFastObjectElements()) return elms;
229 } else if (map == heap->fixed_double_array_map()) {
230 if (args == NULL) return elms;
231 } else {
232 return MaybeHandle<FixedArrayBase>();
233 }
234
235 // Need to ensure that the arguments passed in args can be contained in
236 // the array.
237 int args_length = args->length();
238 if (first_added_arg >= args_length) return handle(array->elements(), isolate);
239
240 ElementsKind origin_kind = array->map()->elements_kind();
241 DCHECK(!IsFastObjectElementsKind(origin_kind));
242 ElementsKind target_kind = origin_kind;
243 {
244 DisallowHeapAllocation no_gc;
245 int arg_count = args->length() - first_added_arg;
246 Object** arguments = args->arguments() - first_added_arg - (arg_count - 1);
247 for (int i = 0; i < arg_count; i++) {
248 Object* arg = arguments[i];
249 if (arg->IsHeapObject()) {
250 if (arg->IsHeapNumber()) {
251 target_kind = FAST_DOUBLE_ELEMENTS;
252 } else {
253 target_kind = FAST_ELEMENTS;
254 break;
255 }
256 }
257 }
258 }
259 if (target_kind != origin_kind) {
260 JSObject::TransitionElementsKind(array, target_kind);
261 return handle(array->elements(), isolate);
262 }
263 return elms;
264 }
265
266
IsJSArrayFastElementMovingAllowed(Heap * heap,JSArray * receiver)267 static inline bool IsJSArrayFastElementMovingAllowed(Heap* heap,
268 JSArray* receiver) {
269 if (!FLAG_clever_optimizations) return false;
270 DisallowHeapAllocation no_gc;
271 Context* native_context = heap->isolate()->context()->native_context();
272 JSObject* array_proto =
273 JSObject::cast(native_context->array_function()->prototype());
274 PrototypeIterator iter(heap->isolate(), receiver);
275 return iter.GetCurrent() == array_proto &&
276 ArrayPrototypeHasNoElements(heap, native_context, array_proto);
277 }
278
279
CallJsBuiltin(Isolate * isolate,const char * name,BuiltinArguments<NO_EXTRA_ARGUMENTS> args)280 MUST_USE_RESULT static Object* CallJsBuiltin(
281 Isolate* isolate,
282 const char* name,
283 BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
284 HandleScope handleScope(isolate);
285
286 Handle<Object> js_builtin = Object::GetProperty(
287 isolate,
288 handle(isolate->native_context()->builtins(), isolate),
289 name).ToHandleChecked();
290 Handle<JSFunction> function = Handle<JSFunction>::cast(js_builtin);
291 int argc = args.length() - 1;
292 ScopedVector<Handle<Object> > argv(argc);
293 for (int i = 0; i < argc; ++i) {
294 argv[i] = args.at<Object>(i + 1);
295 }
296 Handle<Object> result;
297 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
298 isolate, result,
299 Execution::Call(isolate,
300 function,
301 args.receiver(),
302 argc,
303 argv.start()));
304 return *result;
305 }
306
307
BUILTIN(ArrayPush)308 BUILTIN(ArrayPush) {
309 HandleScope scope(isolate);
310 Handle<Object> receiver = args.receiver();
311 MaybeHandle<FixedArrayBase> maybe_elms_obj =
312 EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 1);
313 Handle<FixedArrayBase> elms_obj;
314 if (!maybe_elms_obj.ToHandle(&elms_obj)) {
315 return CallJsBuiltin(isolate, "ArrayPush", args);
316 }
317
318 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
319 int len = Smi::cast(array->length())->value();
320 int to_add = args.length() - 1;
321 if (to_add > 0 && JSArray::WouldChangeReadOnlyLength(array, len + to_add)) {
322 return CallJsBuiltin(isolate, "ArrayPush", args);
323 }
324 DCHECK(!array->map()->is_observed());
325
326 ElementsKind kind = array->GetElementsKind();
327
328 if (IsFastSmiOrObjectElementsKind(kind)) {
329 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
330 if (to_add == 0) {
331 return Smi::FromInt(len);
332 }
333 // Currently fixed arrays cannot grow too big, so
334 // we should never hit this case.
335 DCHECK(to_add <= (Smi::kMaxValue - len));
336
337 int new_length = len + to_add;
338
339 if (new_length > elms->length()) {
340 // New backing storage is needed.
341 int capacity = new_length + (new_length >> 1) + 16;
342 Handle<FixedArray> new_elms =
343 isolate->factory()->NewUninitializedFixedArray(capacity);
344
345 ElementsAccessor* accessor = array->GetElementsAccessor();
346 accessor->CopyElements(
347 elms_obj, 0, kind, new_elms, 0,
348 ElementsAccessor::kCopyToEndAndInitializeToHole);
349
350 elms = new_elms;
351 }
352
353 // Add the provided values.
354 DisallowHeapAllocation no_gc;
355 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
356 for (int index = 0; index < to_add; index++) {
357 elms->set(index + len, args[index + 1], mode);
358 }
359
360 if (*elms != array->elements()) {
361 array->set_elements(*elms);
362 }
363
364 // Set the length.
365 array->set_length(Smi::FromInt(new_length));
366 return Smi::FromInt(new_length);
367 } else {
368 int elms_len = elms_obj->length();
369 if (to_add == 0) {
370 return Smi::FromInt(len);
371 }
372 // Currently fixed arrays cannot grow too big, so
373 // we should never hit this case.
374 DCHECK(to_add <= (Smi::kMaxValue - len));
375
376 int new_length = len + to_add;
377
378 Handle<FixedDoubleArray> new_elms;
379
380 if (new_length > elms_len) {
381 // New backing storage is needed.
382 int capacity = new_length + (new_length >> 1) + 16;
383 // Create new backing store; since capacity > 0, we can
384 // safely cast to FixedDoubleArray.
385 new_elms = Handle<FixedDoubleArray>::cast(
386 isolate->factory()->NewFixedDoubleArray(capacity));
387
388 ElementsAccessor* accessor = array->GetElementsAccessor();
389 accessor->CopyElements(
390 elms_obj, 0, kind, new_elms, 0,
391 ElementsAccessor::kCopyToEndAndInitializeToHole);
392
393 } else {
394 // to_add is > 0 and new_length <= elms_len, so elms_obj cannot be the
395 // empty_fixed_array.
396 new_elms = Handle<FixedDoubleArray>::cast(elms_obj);
397 }
398
399 // Add the provided values.
400 DisallowHeapAllocation no_gc;
401 int index;
402 for (index = 0; index < to_add; index++) {
403 Object* arg = args[index + 1];
404 new_elms->set(index + len, arg->Number());
405 }
406
407 if (*new_elms != array->elements()) {
408 array->set_elements(*new_elms);
409 }
410
411 // Set the length.
412 array->set_length(Smi::FromInt(new_length));
413 return Smi::FromInt(new_length);
414 }
415 }
416
417
BUILTIN(ArrayPop)418 BUILTIN(ArrayPop) {
419 HandleScope scope(isolate);
420 Handle<Object> receiver = args.receiver();
421 MaybeHandle<FixedArrayBase> maybe_elms_obj =
422 EnsureJSArrayWithWritableFastElements(isolate, receiver, NULL, 0);
423 Handle<FixedArrayBase> elms_obj;
424 if (!maybe_elms_obj.ToHandle(&elms_obj)) {
425 return CallJsBuiltin(isolate, "ArrayPop", args);
426 }
427
428 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
429 DCHECK(!array->map()->is_observed());
430
431 int len = Smi::cast(array->length())->value();
432 if (len == 0) return isolate->heap()->undefined_value();
433
434 ElementsAccessor* accessor = array->GetElementsAccessor();
435 int new_length = len - 1;
436 Handle<Object> element =
437 accessor->Get(array, array, new_length, elms_obj).ToHandleChecked();
438 if (element->IsTheHole()) {
439 return CallJsBuiltin(isolate, "ArrayPop", args);
440 }
441 RETURN_FAILURE_ON_EXCEPTION(
442 isolate,
443 accessor->SetLength(array, handle(Smi::FromInt(new_length), isolate)));
444 return *element;
445 }
446
447
BUILTIN(ArrayShift)448 BUILTIN(ArrayShift) {
449 HandleScope scope(isolate);
450 Heap* heap = isolate->heap();
451 Handle<Object> receiver = args.receiver();
452 MaybeHandle<FixedArrayBase> maybe_elms_obj =
453 EnsureJSArrayWithWritableFastElements(isolate, receiver, NULL, 0);
454 Handle<FixedArrayBase> elms_obj;
455 if (!maybe_elms_obj.ToHandle(&elms_obj) ||
456 !IsJSArrayFastElementMovingAllowed(heap,
457 *Handle<JSArray>::cast(receiver))) {
458 return CallJsBuiltin(isolate, "ArrayShift", args);
459 }
460 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
461 DCHECK(!array->map()->is_observed());
462
463 int len = Smi::cast(array->length())->value();
464 if (len == 0) return heap->undefined_value();
465
466 // Get first element
467 ElementsAccessor* accessor = array->GetElementsAccessor();
468 Handle<Object> first =
469 accessor->Get(array, array, 0, elms_obj).ToHandleChecked();
470 if (first->IsTheHole()) {
471 return CallJsBuiltin(isolate, "ArrayShift", args);
472 }
473
474 if (heap->CanMoveObjectStart(*elms_obj)) {
475 array->set_elements(heap->LeftTrimFixedArray(*elms_obj, 1));
476 } else {
477 // Shift the elements.
478 if (elms_obj->IsFixedArray()) {
479 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
480 DisallowHeapAllocation no_gc;
481 heap->MoveElements(*elms, 0, 1, len - 1);
482 elms->set(len - 1, heap->the_hole_value());
483 } else {
484 Handle<FixedDoubleArray> elms = Handle<FixedDoubleArray>::cast(elms_obj);
485 MoveDoubleElements(*elms, 0, *elms, 1, len - 1);
486 elms->set_the_hole(len - 1);
487 }
488 }
489
490 // Set the length.
491 array->set_length(Smi::FromInt(len - 1));
492
493 return *first;
494 }
495
496
BUILTIN(ArrayUnshift)497 BUILTIN(ArrayUnshift) {
498 HandleScope scope(isolate);
499 Heap* heap = isolate->heap();
500 Handle<Object> receiver = args.receiver();
501 MaybeHandle<FixedArrayBase> maybe_elms_obj =
502 EnsureJSArrayWithWritableFastElements(isolate, receiver, NULL, 0);
503 Handle<FixedArrayBase> elms_obj;
504 if (!maybe_elms_obj.ToHandle(&elms_obj) ||
505 !IsJSArrayFastElementMovingAllowed(heap,
506 *Handle<JSArray>::cast(receiver))) {
507 return CallJsBuiltin(isolate, "ArrayUnshift", args);
508 }
509 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
510 DCHECK(!array->map()->is_observed());
511 if (!array->HasFastSmiOrObjectElements()) {
512 return CallJsBuiltin(isolate, "ArrayUnshift", args);
513 }
514 int len = Smi::cast(array->length())->value();
515 int to_add = args.length() - 1;
516 int new_length = len + to_add;
517 // Currently fixed arrays cannot grow too big, so
518 // we should never hit this case.
519 DCHECK(to_add <= (Smi::kMaxValue - len));
520
521 if (to_add > 0 && JSArray::WouldChangeReadOnlyLength(array, len + to_add)) {
522 return CallJsBuiltin(isolate, "ArrayUnshift", args);
523 }
524
525 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
526
527 JSObject::EnsureCanContainElements(array, &args, 1, to_add,
528 DONT_ALLOW_DOUBLE_ELEMENTS);
529
530 if (new_length > elms->length()) {
531 // New backing storage is needed.
532 int capacity = new_length + (new_length >> 1) + 16;
533 Handle<FixedArray> new_elms =
534 isolate->factory()->NewUninitializedFixedArray(capacity);
535
536 ElementsKind kind = array->GetElementsKind();
537 ElementsAccessor* accessor = array->GetElementsAccessor();
538 accessor->CopyElements(
539 elms, 0, kind, new_elms, to_add,
540 ElementsAccessor::kCopyToEndAndInitializeToHole);
541
542 elms = new_elms;
543 array->set_elements(*elms);
544 } else {
545 DisallowHeapAllocation no_gc;
546 heap->MoveElements(*elms, to_add, 0, len);
547 }
548
549 // Add the provided values.
550 DisallowHeapAllocation no_gc;
551 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
552 for (int i = 0; i < to_add; i++) {
553 elms->set(i, args[i + 1], mode);
554 }
555
556 // Set the length.
557 array->set_length(Smi::FromInt(new_length));
558 return Smi::FromInt(new_length);
559 }
560
561
BUILTIN(ArraySlice)562 BUILTIN(ArraySlice) {
563 HandleScope scope(isolate);
564 Heap* heap = isolate->heap();
565 Handle<Object> receiver = args.receiver();
566 int len = -1;
567 int relative_start = 0;
568 int relative_end = 0;
569 {
570 DisallowHeapAllocation no_gc;
571 if (receiver->IsJSArray()) {
572 JSArray* array = JSArray::cast(*receiver);
573 if (!IsJSArrayFastElementMovingAllowed(heap, array)) {
574 AllowHeapAllocation allow_allocation;
575 return CallJsBuiltin(isolate, "ArraySlice", args);
576 }
577
578 if (!array->HasFastElements()) {
579 AllowHeapAllocation allow_allocation;
580 return CallJsBuiltin(isolate, "ArraySlice", args);
581 }
582
583 len = Smi::cast(array->length())->value();
584 } else {
585 // Array.slice(arguments, ...) is quite a common idiom (notably more
586 // than 50% of invocations in Web apps). Treat it in C++ as well.
587 Map* arguments_map =
588 isolate->context()->native_context()->sloppy_arguments_map();
589
590 bool is_arguments_object_with_fast_elements =
591 receiver->IsJSObject() &&
592 JSObject::cast(*receiver)->map() == arguments_map;
593 if (!is_arguments_object_with_fast_elements) {
594 AllowHeapAllocation allow_allocation;
595 return CallJsBuiltin(isolate, "ArraySlice", args);
596 }
597 JSObject* object = JSObject::cast(*receiver);
598
599 if (!object->HasFastElements()) {
600 AllowHeapAllocation allow_allocation;
601 return CallJsBuiltin(isolate, "ArraySlice", args);
602 }
603
604 Object* len_obj = object->InObjectPropertyAt(Heap::kArgumentsLengthIndex);
605 if (!len_obj->IsSmi()) {
606 AllowHeapAllocation allow_allocation;
607 return CallJsBuiltin(isolate, "ArraySlice", args);
608 }
609 len = Smi::cast(len_obj)->value();
610 if (len > object->elements()->length()) {
611 AllowHeapAllocation allow_allocation;
612 return CallJsBuiltin(isolate, "ArraySlice", args);
613 }
614 }
615
616 DCHECK(len >= 0);
617 int n_arguments = args.length() - 1;
618
619 // Note carefully choosen defaults---if argument is missing,
620 // it's undefined which gets converted to 0 for relative_start
621 // and to len for relative_end.
622 relative_start = 0;
623 relative_end = len;
624 if (n_arguments > 0) {
625 Object* arg1 = args[1];
626 if (arg1->IsSmi()) {
627 relative_start = Smi::cast(arg1)->value();
628 } else if (arg1->IsHeapNumber()) {
629 double start = HeapNumber::cast(arg1)->value();
630 if (start < kMinInt || start > kMaxInt) {
631 AllowHeapAllocation allow_allocation;
632 return CallJsBuiltin(isolate, "ArraySlice", args);
633 }
634 relative_start = std::isnan(start) ? 0 : static_cast<int>(start);
635 } else if (!arg1->IsUndefined()) {
636 AllowHeapAllocation allow_allocation;
637 return CallJsBuiltin(isolate, "ArraySlice", args);
638 }
639 if (n_arguments > 1) {
640 Object* arg2 = args[2];
641 if (arg2->IsSmi()) {
642 relative_end = Smi::cast(arg2)->value();
643 } else if (arg2->IsHeapNumber()) {
644 double end = HeapNumber::cast(arg2)->value();
645 if (end < kMinInt || end > kMaxInt) {
646 AllowHeapAllocation allow_allocation;
647 return CallJsBuiltin(isolate, "ArraySlice", args);
648 }
649 relative_end = std::isnan(end) ? 0 : static_cast<int>(end);
650 } else if (!arg2->IsUndefined()) {
651 AllowHeapAllocation allow_allocation;
652 return CallJsBuiltin(isolate, "ArraySlice", args);
653 }
654 }
655 }
656 }
657
658 // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 6.
659 int k = (relative_start < 0) ? Max(len + relative_start, 0)
660 : Min(relative_start, len);
661
662 // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 8.
663 int final = (relative_end < 0) ? Max(len + relative_end, 0)
664 : Min(relative_end, len);
665
666 // Calculate the length of result array.
667 int result_len = Max(final - k, 0);
668
669 Handle<JSObject> object = Handle<JSObject>::cast(receiver);
670 Handle<FixedArrayBase> elms(object->elements(), isolate);
671
672 ElementsKind kind = object->GetElementsKind();
673 if (IsHoleyElementsKind(kind)) {
674 DisallowHeapAllocation no_gc;
675 bool packed = true;
676 ElementsAccessor* accessor = ElementsAccessor::ForKind(kind);
677 for (int i = k; i < final; i++) {
678 if (!accessor->HasElement(object, object, i, elms)) {
679 packed = false;
680 break;
681 }
682 }
683 if (packed) {
684 kind = GetPackedElementsKind(kind);
685 } else if (!receiver->IsJSArray()) {
686 AllowHeapAllocation allow_allocation;
687 return CallJsBuiltin(isolate, "ArraySlice", args);
688 }
689 }
690
691 Handle<JSArray> result_array =
692 isolate->factory()->NewJSArray(kind, result_len, result_len);
693
694 DisallowHeapAllocation no_gc;
695 if (result_len == 0) return *result_array;
696
697 ElementsAccessor* accessor = object->GetElementsAccessor();
698 accessor->CopyElements(
699 elms, k, kind, handle(result_array->elements(), isolate), 0, result_len);
700 return *result_array;
701 }
702
703
BUILTIN(ArraySplice)704 BUILTIN(ArraySplice) {
705 HandleScope scope(isolate);
706 Heap* heap = isolate->heap();
707 Handle<Object> receiver = args.receiver();
708 MaybeHandle<FixedArrayBase> maybe_elms_obj =
709 EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 3);
710 Handle<FixedArrayBase> elms_obj;
711 if (!maybe_elms_obj.ToHandle(&elms_obj) ||
712 !IsJSArrayFastElementMovingAllowed(heap,
713 *Handle<JSArray>::cast(receiver))) {
714 return CallJsBuiltin(isolate, "ArraySplice", args);
715 }
716 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
717 DCHECK(!array->map()->is_observed());
718
719 int len = Smi::cast(array->length())->value();
720
721 int n_arguments = args.length() - 1;
722
723 int relative_start = 0;
724 if (n_arguments > 0) {
725 DisallowHeapAllocation no_gc;
726 Object* arg1 = args[1];
727 if (arg1->IsSmi()) {
728 relative_start = Smi::cast(arg1)->value();
729 } else if (arg1->IsHeapNumber()) {
730 double start = HeapNumber::cast(arg1)->value();
731 if (start < kMinInt || start > kMaxInt) {
732 AllowHeapAllocation allow_allocation;
733 return CallJsBuiltin(isolate, "ArraySplice", args);
734 }
735 relative_start = std::isnan(start) ? 0 : static_cast<int>(start);
736 } else if (!arg1->IsUndefined()) {
737 AllowHeapAllocation allow_allocation;
738 return CallJsBuiltin(isolate, "ArraySplice", args);
739 }
740 }
741 int actual_start = (relative_start < 0) ? Max(len + relative_start, 0)
742 : Min(relative_start, len);
743
744 // SpiderMonkey, TraceMonkey and JSC treat the case where no delete count is
745 // given as a request to delete all the elements from the start.
746 // And it differs from the case of undefined delete count.
747 // This does not follow ECMA-262, but we do the same for
748 // compatibility.
749 int actual_delete_count;
750 if (n_arguments == 1) {
751 DCHECK(len - actual_start >= 0);
752 actual_delete_count = len - actual_start;
753 } else {
754 int value = 0; // ToInteger(undefined) == 0
755 if (n_arguments > 1) {
756 DisallowHeapAllocation no_gc;
757 Object* arg2 = args[2];
758 if (arg2->IsSmi()) {
759 value = Smi::cast(arg2)->value();
760 } else {
761 AllowHeapAllocation allow_allocation;
762 return CallJsBuiltin(isolate, "ArraySplice", args);
763 }
764 }
765 actual_delete_count = Min(Max(value, 0), len - actual_start);
766 }
767
768 ElementsKind elements_kind = array->GetElementsKind();
769
770 int item_count = (n_arguments > 1) ? (n_arguments - 2) : 0;
771 int new_length = len - actual_delete_count + item_count;
772
773 // For double mode we do not support changing the length.
774 if (new_length > len && IsFastDoubleElementsKind(elements_kind)) {
775 return CallJsBuiltin(isolate, "ArraySplice", args);
776 }
777
778 if (new_length == 0) {
779 Handle<JSArray> result = isolate->factory()->NewJSArrayWithElements(
780 elms_obj, elements_kind, actual_delete_count);
781 array->set_elements(heap->empty_fixed_array());
782 array->set_length(Smi::FromInt(0));
783 return *result;
784 }
785
786 Handle<JSArray> result_array =
787 isolate->factory()->NewJSArray(elements_kind,
788 actual_delete_count,
789 actual_delete_count);
790
791 if (actual_delete_count > 0) {
792 DisallowHeapAllocation no_gc;
793 ElementsAccessor* accessor = array->GetElementsAccessor();
794 accessor->CopyElements(
795 elms_obj, actual_start, elements_kind,
796 handle(result_array->elements(), isolate), 0, actual_delete_count);
797 }
798
799 bool elms_changed = false;
800 if (item_count < actual_delete_count) {
801 // Shrink the array.
802 const bool trim_array = !heap->lo_space()->Contains(*elms_obj) &&
803 ((actual_start + item_count) <
804 (len - actual_delete_count - actual_start));
805 if (trim_array) {
806 const int delta = actual_delete_count - item_count;
807
808 if (elms_obj->IsFixedDoubleArray()) {
809 Handle<FixedDoubleArray> elms =
810 Handle<FixedDoubleArray>::cast(elms_obj);
811 MoveDoubleElements(*elms, delta, *elms, 0, actual_start);
812 } else {
813 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
814 DisallowHeapAllocation no_gc;
815 heap->MoveElements(*elms, delta, 0, actual_start);
816 }
817
818 if (heap->CanMoveObjectStart(*elms_obj)) {
819 // On the fast path we move the start of the object in memory.
820 elms_obj = handle(heap->LeftTrimFixedArray(*elms_obj, delta));
821 } else {
822 // This is the slow path. We are going to move the elements to the left
823 // by copying them. For trimmed values we store the hole.
824 if (elms_obj->IsFixedDoubleArray()) {
825 Handle<FixedDoubleArray> elms =
826 Handle<FixedDoubleArray>::cast(elms_obj);
827 MoveDoubleElements(*elms, 0, *elms, delta, len - delta);
828 elms->FillWithHoles(len - delta, len);
829 } else {
830 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
831 DisallowHeapAllocation no_gc;
832 heap->MoveElements(*elms, 0, delta, len - delta);
833 elms->FillWithHoles(len - delta, len);
834 }
835 }
836 elms_changed = true;
837 } else {
838 if (elms_obj->IsFixedDoubleArray()) {
839 Handle<FixedDoubleArray> elms =
840 Handle<FixedDoubleArray>::cast(elms_obj);
841 MoveDoubleElements(*elms, actual_start + item_count,
842 *elms, actual_start + actual_delete_count,
843 (len - actual_delete_count - actual_start));
844 elms->FillWithHoles(new_length, len);
845 } else {
846 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
847 DisallowHeapAllocation no_gc;
848 heap->MoveElements(*elms, actual_start + item_count,
849 actual_start + actual_delete_count,
850 (len - actual_delete_count - actual_start));
851 elms->FillWithHoles(new_length, len);
852 }
853 }
854 } else if (item_count > actual_delete_count) {
855 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
856 // Currently fixed arrays cannot grow too big, so
857 // we should never hit this case.
858 DCHECK((item_count - actual_delete_count) <= (Smi::kMaxValue - len));
859
860 // Check if array need to grow.
861 if (new_length > elms->length()) {
862 // New backing storage is needed.
863 int capacity = new_length + (new_length >> 1) + 16;
864 Handle<FixedArray> new_elms =
865 isolate->factory()->NewUninitializedFixedArray(capacity);
866
867 DisallowHeapAllocation no_gc;
868
869 ElementsKind kind = array->GetElementsKind();
870 ElementsAccessor* accessor = array->GetElementsAccessor();
871 if (actual_start > 0) {
872 // Copy the part before actual_start as is.
873 accessor->CopyElements(
874 elms, 0, kind, new_elms, 0, actual_start);
875 }
876 accessor->CopyElements(
877 elms, actual_start + actual_delete_count, kind,
878 new_elms, actual_start + item_count,
879 ElementsAccessor::kCopyToEndAndInitializeToHole);
880
881 elms_obj = new_elms;
882 elms_changed = true;
883 } else {
884 DisallowHeapAllocation no_gc;
885 heap->MoveElements(*elms, actual_start + item_count,
886 actual_start + actual_delete_count,
887 (len - actual_delete_count - actual_start));
888 }
889 }
890
891 if (IsFastDoubleElementsKind(elements_kind)) {
892 Handle<FixedDoubleArray> elms = Handle<FixedDoubleArray>::cast(elms_obj);
893 for (int k = actual_start; k < actual_start + item_count; k++) {
894 Object* arg = args[3 + k - actual_start];
895 if (arg->IsSmi()) {
896 elms->set(k, Smi::cast(arg)->value());
897 } else {
898 elms->set(k, HeapNumber::cast(arg)->value());
899 }
900 }
901 } else {
902 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
903 DisallowHeapAllocation no_gc;
904 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
905 for (int k = actual_start; k < actual_start + item_count; k++) {
906 elms->set(k, args[3 + k - actual_start], mode);
907 }
908 }
909
910 if (elms_changed) {
911 array->set_elements(*elms_obj);
912 }
913 // Set the length.
914 array->set_length(Smi::FromInt(new_length));
915
916 return *result_array;
917 }
918
919
BUILTIN(ArrayConcat)920 BUILTIN(ArrayConcat) {
921 HandleScope scope(isolate);
922
923 int n_arguments = args.length();
924 int result_len = 0;
925 ElementsKind elements_kind = GetInitialFastElementsKind();
926 bool has_double = false;
927 {
928 DisallowHeapAllocation no_gc;
929 Heap* heap = isolate->heap();
930 Context* native_context = isolate->context()->native_context();
931 JSObject* array_proto =
932 JSObject::cast(native_context->array_function()->prototype());
933 if (!ArrayPrototypeHasNoElements(heap, native_context, array_proto)) {
934 AllowHeapAllocation allow_allocation;
935 return CallJsBuiltin(isolate, "ArrayConcatJS", args);
936 }
937
938 // Iterate through all the arguments performing checks
939 // and calculating total length.
940 bool is_holey = false;
941 for (int i = 0; i < n_arguments; i++) {
942 Object* arg = args[i];
943 PrototypeIterator iter(isolate, arg);
944 if (!arg->IsJSArray() || !JSArray::cast(arg)->HasFastElements() ||
945 iter.GetCurrent() != array_proto) {
946 AllowHeapAllocation allow_allocation;
947 return CallJsBuiltin(isolate, "ArrayConcatJS", args);
948 }
949 int len = Smi::cast(JSArray::cast(arg)->length())->value();
950
951 // We shouldn't overflow when adding another len.
952 const int kHalfOfMaxInt = 1 << (kBitsPerInt - 2);
953 STATIC_ASSERT(FixedArray::kMaxLength < kHalfOfMaxInt);
954 USE(kHalfOfMaxInt);
955 result_len += len;
956 DCHECK(result_len >= 0);
957
958 if (result_len > FixedDoubleArray::kMaxLength) {
959 AllowHeapAllocation allow_allocation;
960 return CallJsBuiltin(isolate, "ArrayConcatJS", args);
961 }
962
963 ElementsKind arg_kind = JSArray::cast(arg)->map()->elements_kind();
964 has_double = has_double || IsFastDoubleElementsKind(arg_kind);
965 is_holey = is_holey || IsFastHoleyElementsKind(arg_kind);
966 if (IsMoreGeneralElementsKindTransition(elements_kind, arg_kind)) {
967 elements_kind = arg_kind;
968 }
969 }
970 if (is_holey) elements_kind = GetHoleyElementsKind(elements_kind);
971 }
972
973 // If a double array is concatted into a fast elements array, the fast
974 // elements array needs to be initialized to contain proper holes, since
975 // boxing doubles may cause incremental marking.
976 ArrayStorageAllocationMode mode =
977 has_double && IsFastObjectElementsKind(elements_kind)
978 ? INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE : DONT_INITIALIZE_ARRAY_ELEMENTS;
979 Handle<JSArray> result_array =
980 isolate->factory()->NewJSArray(elements_kind,
981 result_len,
982 result_len,
983 mode);
984 if (result_len == 0) return *result_array;
985
986 int j = 0;
987 Handle<FixedArrayBase> storage(result_array->elements(), isolate);
988 ElementsAccessor* accessor = ElementsAccessor::ForKind(elements_kind);
989 for (int i = 0; i < n_arguments; i++) {
990 // TODO(ishell): It is crucial to keep |array| as a raw pointer to avoid
991 // performance degradation. Revisit this later.
992 JSArray* array = JSArray::cast(args[i]);
993 int len = Smi::cast(array->length())->value();
994 ElementsKind from_kind = array->GetElementsKind();
995 if (len > 0) {
996 accessor->CopyElements(array, 0, from_kind, storage, j, len);
997 j += len;
998 }
999 }
1000
1001 DCHECK(j == result_len);
1002
1003 return *result_array;
1004 }
1005
1006
1007 // -----------------------------------------------------------------------------
1008 // Generator and strict mode poison pills
1009
1010
BUILTIN(StrictModePoisonPill)1011 BUILTIN(StrictModePoisonPill) {
1012 HandleScope scope(isolate);
1013 THROW_NEW_ERROR_RETURN_FAILURE(
1014 isolate,
1015 NewTypeError("strict_poison_pill", HandleVector<Object>(NULL, 0)));
1016 }
1017
1018
BUILTIN(GeneratorPoisonPill)1019 BUILTIN(GeneratorPoisonPill) {
1020 HandleScope scope(isolate);
1021 THROW_NEW_ERROR_RETURN_FAILURE(
1022 isolate,
1023 NewTypeError("generator_poison_pill", HandleVector<Object>(NULL, 0)));
1024 }
1025
1026
1027 // -----------------------------------------------------------------------------
1028 //
1029
1030
1031 // Searches the hidden prototype chain of the given object for the first
1032 // object that is an instance of the given type. If no such object can
1033 // be found then Heap::null_value() is returned.
FindHidden(Heap * heap,Object * object,FunctionTemplateInfo * type)1034 static inline Object* FindHidden(Heap* heap,
1035 Object* object,
1036 FunctionTemplateInfo* type) {
1037 for (PrototypeIterator iter(heap->isolate(), object,
1038 PrototypeIterator::START_AT_RECEIVER);
1039 !iter.IsAtEnd(PrototypeIterator::END_AT_NON_HIDDEN); iter.Advance()) {
1040 if (type->IsTemplateFor(iter.GetCurrent())) {
1041 return iter.GetCurrent();
1042 }
1043 }
1044 return heap->null_value();
1045 }
1046
1047
1048 // Returns the holder JSObject if the function can legally be called
1049 // with this receiver. Returns Heap::null_value() if the call is
1050 // illegal. Any arguments that don't fit the expected type is
1051 // overwritten with undefined. Note that holder and the arguments are
1052 // implicitly rewritten with the first object in the hidden prototype
1053 // chain that actually has the expected type.
TypeCheck(Heap * heap,int argc,Object ** argv,FunctionTemplateInfo * info)1054 static inline Object* TypeCheck(Heap* heap,
1055 int argc,
1056 Object** argv,
1057 FunctionTemplateInfo* info) {
1058 Object* recv = argv[0];
1059 // API calls are only supported with JSObject receivers.
1060 if (!recv->IsJSObject()) return heap->null_value();
1061 Object* sig_obj = info->signature();
1062 if (sig_obj->IsUndefined()) return recv;
1063 SignatureInfo* sig = SignatureInfo::cast(sig_obj);
1064 // If necessary, check the receiver
1065 Object* recv_type = sig->receiver();
1066 Object* holder = recv;
1067 if (!recv_type->IsUndefined()) {
1068 holder = FindHidden(heap, holder, FunctionTemplateInfo::cast(recv_type));
1069 if (holder == heap->null_value()) return heap->null_value();
1070 }
1071 Object* args_obj = sig->args();
1072 // If there is no argument signature we're done
1073 if (args_obj->IsUndefined()) return holder;
1074 FixedArray* args = FixedArray::cast(args_obj);
1075 int length = args->length();
1076 if (argc <= length) length = argc - 1;
1077 for (int i = 0; i < length; i++) {
1078 Object* argtype = args->get(i);
1079 if (argtype->IsUndefined()) continue;
1080 Object** arg = &argv[-1 - i];
1081 Object* current = *arg;
1082 current = FindHidden(heap, current, FunctionTemplateInfo::cast(argtype));
1083 if (current == heap->null_value()) current = heap->undefined_value();
1084 *arg = current;
1085 }
1086 return holder;
1087 }
1088
1089
1090 template <bool is_construct>
HandleApiCallHelper(BuiltinArguments<NEEDS_CALLED_FUNCTION> args,Isolate * isolate)1091 MUST_USE_RESULT static Object* HandleApiCallHelper(
1092 BuiltinArguments<NEEDS_CALLED_FUNCTION> args, Isolate* isolate) {
1093 DCHECK(is_construct == CalledAsConstructor(isolate));
1094 Heap* heap = isolate->heap();
1095
1096 HandleScope scope(isolate);
1097 Handle<JSFunction> function = args.called_function();
1098 DCHECK(function->shared()->IsApiFunction());
1099
1100 Handle<FunctionTemplateInfo> fun_data(
1101 function->shared()->get_api_func_data(), isolate);
1102 if (is_construct) {
1103 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
1104 isolate, fun_data,
1105 isolate->factory()->ConfigureInstance(
1106 fun_data, Handle<JSObject>::cast(args.receiver())));
1107 }
1108
1109 SharedFunctionInfo* shared = function->shared();
1110 if (shared->strict_mode() == SLOPPY && !shared->native()) {
1111 Object* recv = args[0];
1112 DCHECK(!recv->IsNull());
1113 if (recv->IsUndefined()) args[0] = function->global_proxy();
1114 }
1115
1116 Object* raw_holder = TypeCheck(heap, args.length(), &args[0], *fun_data);
1117
1118 if (raw_holder->IsNull()) {
1119 // This function cannot be called with the given receiver. Abort!
1120 THROW_NEW_ERROR_RETURN_FAILURE(
1121 isolate,
1122 NewTypeError("illegal_invocation", HandleVector(&function, 1)));
1123 }
1124
1125 Object* raw_call_data = fun_data->call_code();
1126 if (!raw_call_data->IsUndefined()) {
1127 CallHandlerInfo* call_data = CallHandlerInfo::cast(raw_call_data);
1128 Object* callback_obj = call_data->callback();
1129 v8::FunctionCallback callback =
1130 v8::ToCData<v8::FunctionCallback>(callback_obj);
1131 Object* data_obj = call_data->data();
1132 Object* result;
1133
1134 LOG(isolate, ApiObjectAccess("call", JSObject::cast(*args.receiver())));
1135 DCHECK(raw_holder->IsJSObject());
1136
1137 FunctionCallbackArguments custom(isolate,
1138 data_obj,
1139 *function,
1140 raw_holder,
1141 &args[0] - 1,
1142 args.length() - 1,
1143 is_construct);
1144
1145 v8::Handle<v8::Value> value = custom.Call(callback);
1146 if (value.IsEmpty()) {
1147 result = heap->undefined_value();
1148 } else {
1149 result = *reinterpret_cast<Object**>(*value);
1150 result->VerifyApiCallResultType();
1151 }
1152
1153 RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate);
1154 if (!is_construct || result->IsJSObject()) return result;
1155 }
1156
1157 return *args.receiver();
1158 }
1159
1160
BUILTIN(HandleApiCall)1161 BUILTIN(HandleApiCall) {
1162 return HandleApiCallHelper<false>(args, isolate);
1163 }
1164
1165
BUILTIN(HandleApiCallConstruct)1166 BUILTIN(HandleApiCallConstruct) {
1167 return HandleApiCallHelper<true>(args, isolate);
1168 }
1169
1170
1171 // Helper function to handle calls to non-function objects created through the
1172 // API. The object can be called as either a constructor (using new) or just as
1173 // a function (without new).
HandleApiCallAsFunctionOrConstructor(Isolate * isolate,bool is_construct_call,BuiltinArguments<NO_EXTRA_ARGUMENTS> args)1174 MUST_USE_RESULT static Object* HandleApiCallAsFunctionOrConstructor(
1175 Isolate* isolate,
1176 bool is_construct_call,
1177 BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
1178 // Non-functions are never called as constructors. Even if this is an object
1179 // called as a constructor the delegate call is not a construct call.
1180 DCHECK(!CalledAsConstructor(isolate));
1181 Heap* heap = isolate->heap();
1182
1183 Handle<Object> receiver = args.receiver();
1184
1185 // Get the object called.
1186 JSObject* obj = JSObject::cast(*receiver);
1187
1188 // Get the invocation callback from the function descriptor that was
1189 // used to create the called object.
1190 DCHECK(obj->map()->has_instance_call_handler());
1191 JSFunction* constructor = JSFunction::cast(obj->map()->constructor());
1192 DCHECK(constructor->shared()->IsApiFunction());
1193 Object* handler =
1194 constructor->shared()->get_api_func_data()->instance_call_handler();
1195 DCHECK(!handler->IsUndefined());
1196 CallHandlerInfo* call_data = CallHandlerInfo::cast(handler);
1197 Object* callback_obj = call_data->callback();
1198 v8::FunctionCallback callback =
1199 v8::ToCData<v8::FunctionCallback>(callback_obj);
1200
1201 // Get the data for the call and perform the callback.
1202 Object* result;
1203 {
1204 HandleScope scope(isolate);
1205 LOG(isolate, ApiObjectAccess("call non-function", obj));
1206
1207 FunctionCallbackArguments custom(isolate,
1208 call_data->data(),
1209 constructor,
1210 obj,
1211 &args[0] - 1,
1212 args.length() - 1,
1213 is_construct_call);
1214 v8::Handle<v8::Value> value = custom.Call(callback);
1215 if (value.IsEmpty()) {
1216 result = heap->undefined_value();
1217 } else {
1218 result = *reinterpret_cast<Object**>(*value);
1219 result->VerifyApiCallResultType();
1220 }
1221 }
1222 // Check for exceptions and return result.
1223 RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate);
1224 return result;
1225 }
1226
1227
1228 // Handle calls to non-function objects created through the API. This delegate
1229 // function is used when the call is a normal function call.
BUILTIN(HandleApiCallAsFunction)1230 BUILTIN(HandleApiCallAsFunction) {
1231 return HandleApiCallAsFunctionOrConstructor(isolate, false, args);
1232 }
1233
1234
1235 // Handle calls to non-function objects created through the API. This delegate
1236 // function is used when the call is a construct call.
BUILTIN(HandleApiCallAsConstructor)1237 BUILTIN(HandleApiCallAsConstructor) {
1238 return HandleApiCallAsFunctionOrConstructor(isolate, true, args);
1239 }
1240
1241
Generate_LoadIC_Miss(MacroAssembler * masm)1242 static void Generate_LoadIC_Miss(MacroAssembler* masm) {
1243 LoadIC::GenerateMiss(masm);
1244 }
1245
1246
Generate_LoadIC_Normal(MacroAssembler * masm)1247 static void Generate_LoadIC_Normal(MacroAssembler* masm) {
1248 LoadIC::GenerateNormal(masm);
1249 }
1250
1251
Generate_LoadIC_Getter_ForDeopt(MacroAssembler * masm)1252 static void Generate_LoadIC_Getter_ForDeopt(MacroAssembler* masm) {
1253 NamedLoadHandlerCompiler::GenerateLoadViaGetterForDeopt(masm);
1254 }
1255
1256
Generate_LoadIC_Slow(MacroAssembler * masm)1257 static void Generate_LoadIC_Slow(MacroAssembler* masm) {
1258 LoadIC::GenerateRuntimeGetProperty(masm);
1259 }
1260
1261
Generate_KeyedLoadIC_Initialize(MacroAssembler * masm)1262 static void Generate_KeyedLoadIC_Initialize(MacroAssembler* masm) {
1263 KeyedLoadIC::GenerateInitialize(masm);
1264 }
1265
1266
Generate_KeyedLoadIC_Slow(MacroAssembler * masm)1267 static void Generate_KeyedLoadIC_Slow(MacroAssembler* masm) {
1268 KeyedLoadIC::GenerateRuntimeGetProperty(masm);
1269 }
1270
1271
Generate_KeyedLoadIC_Miss(MacroAssembler * masm)1272 static void Generate_KeyedLoadIC_Miss(MacroAssembler* masm) {
1273 KeyedLoadIC::GenerateMiss(masm);
1274 }
1275
1276
Generate_KeyedLoadIC_Generic(MacroAssembler * masm)1277 static void Generate_KeyedLoadIC_Generic(MacroAssembler* masm) {
1278 KeyedLoadIC::GenerateGeneric(masm);
1279 }
1280
1281
Generate_KeyedLoadIC_String(MacroAssembler * masm)1282 static void Generate_KeyedLoadIC_String(MacroAssembler* masm) {
1283 KeyedLoadIC::GenerateString(masm);
1284 }
1285
1286
Generate_KeyedLoadIC_PreMonomorphic(MacroAssembler * masm)1287 static void Generate_KeyedLoadIC_PreMonomorphic(MacroAssembler* masm) {
1288 KeyedLoadIC::GeneratePreMonomorphic(masm);
1289 }
1290
1291
Generate_StoreIC_Miss(MacroAssembler * masm)1292 static void Generate_StoreIC_Miss(MacroAssembler* masm) {
1293 StoreIC::GenerateMiss(masm);
1294 }
1295
1296
Generate_StoreIC_Normal(MacroAssembler * masm)1297 static void Generate_StoreIC_Normal(MacroAssembler* masm) {
1298 StoreIC::GenerateNormal(masm);
1299 }
1300
1301
Generate_StoreIC_Slow(MacroAssembler * masm)1302 static void Generate_StoreIC_Slow(MacroAssembler* masm) {
1303 NamedStoreHandlerCompiler::GenerateSlow(masm);
1304 }
1305
1306
Generate_KeyedStoreIC_Slow(MacroAssembler * masm)1307 static void Generate_KeyedStoreIC_Slow(MacroAssembler* masm) {
1308 ElementHandlerCompiler::GenerateStoreSlow(masm);
1309 }
1310
1311
Generate_StoreIC_Setter_ForDeopt(MacroAssembler * masm)1312 static void Generate_StoreIC_Setter_ForDeopt(MacroAssembler* masm) {
1313 NamedStoreHandlerCompiler::GenerateStoreViaSetterForDeopt(masm);
1314 }
1315
1316
Generate_KeyedStoreIC_Generic(MacroAssembler * masm)1317 static void Generate_KeyedStoreIC_Generic(MacroAssembler* masm) {
1318 KeyedStoreIC::GenerateGeneric(masm, SLOPPY);
1319 }
1320
1321
Generate_KeyedStoreIC_Generic_Strict(MacroAssembler * masm)1322 static void Generate_KeyedStoreIC_Generic_Strict(MacroAssembler* masm) {
1323 KeyedStoreIC::GenerateGeneric(masm, STRICT);
1324 }
1325
1326
Generate_KeyedStoreIC_Miss(MacroAssembler * masm)1327 static void Generate_KeyedStoreIC_Miss(MacroAssembler* masm) {
1328 KeyedStoreIC::GenerateMiss(masm);
1329 }
1330
1331
Generate_KeyedStoreIC_Initialize(MacroAssembler * masm)1332 static void Generate_KeyedStoreIC_Initialize(MacroAssembler* masm) {
1333 KeyedStoreIC::GenerateInitialize(masm);
1334 }
1335
1336
Generate_KeyedStoreIC_Initialize_Strict(MacroAssembler * masm)1337 static void Generate_KeyedStoreIC_Initialize_Strict(MacroAssembler* masm) {
1338 KeyedStoreIC::GenerateInitialize(masm);
1339 }
1340
1341
Generate_KeyedStoreIC_PreMonomorphic(MacroAssembler * masm)1342 static void Generate_KeyedStoreIC_PreMonomorphic(MacroAssembler* masm) {
1343 KeyedStoreIC::GeneratePreMonomorphic(masm);
1344 }
1345
1346
Generate_KeyedStoreIC_PreMonomorphic_Strict(MacroAssembler * masm)1347 static void Generate_KeyedStoreIC_PreMonomorphic_Strict(MacroAssembler* masm) {
1348 KeyedStoreIC::GeneratePreMonomorphic(masm);
1349 }
1350
1351
Generate_KeyedStoreIC_SloppyArguments(MacroAssembler * masm)1352 static void Generate_KeyedStoreIC_SloppyArguments(MacroAssembler* masm) {
1353 KeyedStoreIC::GenerateSloppyArguments(masm);
1354 }
1355
1356
Generate_CallICStub_DebugBreak(MacroAssembler * masm)1357 static void Generate_CallICStub_DebugBreak(MacroAssembler* masm) {
1358 DebugCodegen::GenerateCallICStubDebugBreak(masm);
1359 }
1360
1361
Generate_LoadIC_DebugBreak(MacroAssembler * masm)1362 static void Generate_LoadIC_DebugBreak(MacroAssembler* masm) {
1363 DebugCodegen::GenerateLoadICDebugBreak(masm);
1364 }
1365
1366
Generate_StoreIC_DebugBreak(MacroAssembler * masm)1367 static void Generate_StoreIC_DebugBreak(MacroAssembler* masm) {
1368 DebugCodegen::GenerateStoreICDebugBreak(masm);
1369 }
1370
1371
Generate_KeyedLoadIC_DebugBreak(MacroAssembler * masm)1372 static void Generate_KeyedLoadIC_DebugBreak(MacroAssembler* masm) {
1373 DebugCodegen::GenerateKeyedLoadICDebugBreak(masm);
1374 }
1375
1376
Generate_KeyedStoreIC_DebugBreak(MacroAssembler * masm)1377 static void Generate_KeyedStoreIC_DebugBreak(MacroAssembler* masm) {
1378 DebugCodegen::GenerateKeyedStoreICDebugBreak(masm);
1379 }
1380
1381
Generate_CompareNilIC_DebugBreak(MacroAssembler * masm)1382 static void Generate_CompareNilIC_DebugBreak(MacroAssembler* masm) {
1383 DebugCodegen::GenerateCompareNilICDebugBreak(masm);
1384 }
1385
1386
Generate_Return_DebugBreak(MacroAssembler * masm)1387 static void Generate_Return_DebugBreak(MacroAssembler* masm) {
1388 DebugCodegen::GenerateReturnDebugBreak(masm);
1389 }
1390
1391
Generate_CallFunctionStub_DebugBreak(MacroAssembler * masm)1392 static void Generate_CallFunctionStub_DebugBreak(MacroAssembler* masm) {
1393 DebugCodegen::GenerateCallFunctionStubDebugBreak(masm);
1394 }
1395
1396
Generate_CallConstructStub_DebugBreak(MacroAssembler * masm)1397 static void Generate_CallConstructStub_DebugBreak(MacroAssembler* masm) {
1398 DebugCodegen::GenerateCallConstructStubDebugBreak(masm);
1399 }
1400
1401
Generate_CallConstructStub_Recording_DebugBreak(MacroAssembler * masm)1402 static void Generate_CallConstructStub_Recording_DebugBreak(
1403 MacroAssembler* masm) {
1404 DebugCodegen::GenerateCallConstructStubRecordDebugBreak(masm);
1405 }
1406
1407
Generate_Slot_DebugBreak(MacroAssembler * masm)1408 static void Generate_Slot_DebugBreak(MacroAssembler* masm) {
1409 DebugCodegen::GenerateSlotDebugBreak(masm);
1410 }
1411
1412
Generate_PlainReturn_LiveEdit(MacroAssembler * masm)1413 static void Generate_PlainReturn_LiveEdit(MacroAssembler* masm) {
1414 DebugCodegen::GeneratePlainReturnLiveEdit(masm);
1415 }
1416
1417
Generate_FrameDropper_LiveEdit(MacroAssembler * masm)1418 static void Generate_FrameDropper_LiveEdit(MacroAssembler* masm) {
1419 DebugCodegen::GenerateFrameDropperLiveEdit(masm);
1420 }
1421
1422
Builtins()1423 Builtins::Builtins() : initialized_(false) {
1424 memset(builtins_, 0, sizeof(builtins_[0]) * builtin_count);
1425 memset(names_, 0, sizeof(names_[0]) * builtin_count);
1426 }
1427
1428
~Builtins()1429 Builtins::~Builtins() {
1430 }
1431
1432
1433 #define DEF_ENUM_C(name, ignore) FUNCTION_ADDR(Builtin_##name),
1434 Address const Builtins::c_functions_[cfunction_count] = {
1435 BUILTIN_LIST_C(DEF_ENUM_C)
1436 };
1437 #undef DEF_ENUM_C
1438
1439 #define DEF_JS_NAME(name, ignore) #name,
1440 #define DEF_JS_ARGC(ignore, argc) argc,
1441 const char* const Builtins::javascript_names_[id_count] = {
1442 BUILTINS_LIST_JS(DEF_JS_NAME)
1443 };
1444
1445 int const Builtins::javascript_argc_[id_count] = {
1446 BUILTINS_LIST_JS(DEF_JS_ARGC)
1447 };
1448 #undef DEF_JS_NAME
1449 #undef DEF_JS_ARGC
1450
1451 struct BuiltinDesc {
1452 byte* generator;
1453 byte* c_code;
1454 const char* s_name; // name is only used for generating log information.
1455 int name;
1456 Code::Flags flags;
1457 BuiltinExtraArguments extra_args;
1458 };
1459
1460 #define BUILTIN_FUNCTION_TABLE_INIT { V8_ONCE_INIT, {} }
1461
1462 class BuiltinFunctionTable {
1463 public:
functions()1464 BuiltinDesc* functions() {
1465 base::CallOnce(&once_, &Builtins::InitBuiltinFunctionTable);
1466 return functions_;
1467 }
1468
1469 base::OnceType once_;
1470 BuiltinDesc functions_[Builtins::builtin_count + 1];
1471
1472 friend class Builtins;
1473 };
1474
1475 static BuiltinFunctionTable builtin_function_table =
1476 BUILTIN_FUNCTION_TABLE_INIT;
1477
1478 // Define array of pointers to generators and C builtin functions.
1479 // We do this in a sort of roundabout way so that we can do the initialization
1480 // within the lexical scope of Builtins:: and within a context where
1481 // Code::Flags names a non-abstract type.
InitBuiltinFunctionTable()1482 void Builtins::InitBuiltinFunctionTable() {
1483 BuiltinDesc* functions = builtin_function_table.functions_;
1484 functions[builtin_count].generator = NULL;
1485 functions[builtin_count].c_code = NULL;
1486 functions[builtin_count].s_name = NULL;
1487 functions[builtin_count].name = builtin_count;
1488 functions[builtin_count].flags = static_cast<Code::Flags>(0);
1489 functions[builtin_count].extra_args = NO_EXTRA_ARGUMENTS;
1490
1491 #define DEF_FUNCTION_PTR_C(aname, aextra_args) \
1492 functions->generator = FUNCTION_ADDR(Generate_Adaptor); \
1493 functions->c_code = FUNCTION_ADDR(Builtin_##aname); \
1494 functions->s_name = #aname; \
1495 functions->name = c_##aname; \
1496 functions->flags = Code::ComputeFlags(Code::BUILTIN); \
1497 functions->extra_args = aextra_args; \
1498 ++functions;
1499
1500 #define DEF_FUNCTION_PTR_A(aname, kind, state, extra) \
1501 functions->generator = FUNCTION_ADDR(Generate_##aname); \
1502 functions->c_code = NULL; \
1503 functions->s_name = #aname; \
1504 functions->name = k##aname; \
1505 functions->flags = Code::ComputeFlags(Code::kind, \
1506 state, \
1507 extra); \
1508 functions->extra_args = NO_EXTRA_ARGUMENTS; \
1509 ++functions;
1510
1511 #define DEF_FUNCTION_PTR_H(aname, kind) \
1512 functions->generator = FUNCTION_ADDR(Generate_##aname); \
1513 functions->c_code = NULL; \
1514 functions->s_name = #aname; \
1515 functions->name = k##aname; \
1516 functions->flags = Code::ComputeHandlerFlags(Code::kind); \
1517 functions->extra_args = NO_EXTRA_ARGUMENTS; \
1518 ++functions;
1519
1520 BUILTIN_LIST_C(DEF_FUNCTION_PTR_C)
1521 BUILTIN_LIST_A(DEF_FUNCTION_PTR_A)
1522 BUILTIN_LIST_H(DEF_FUNCTION_PTR_H)
1523 BUILTIN_LIST_DEBUG_A(DEF_FUNCTION_PTR_A)
1524
1525 #undef DEF_FUNCTION_PTR_C
1526 #undef DEF_FUNCTION_PTR_A
1527 }
1528
1529
SetUp(Isolate * isolate,bool create_heap_objects)1530 void Builtins::SetUp(Isolate* isolate, bool create_heap_objects) {
1531 DCHECK(!initialized_);
1532
1533 // Create a scope for the handles in the builtins.
1534 HandleScope scope(isolate);
1535
1536 const BuiltinDesc* functions = builtin_function_table.functions();
1537
1538 // For now we generate builtin adaptor code into a stack-allocated
1539 // buffer, before copying it into individual code objects. Be careful
1540 // with alignment, some platforms don't like unaligned code.
1541 #ifdef DEBUG
1542 // We can generate a lot of debug code on Arm64.
1543 const size_t buffer_size = 32*KB;
1544 #else
1545 const size_t buffer_size = 8*KB;
1546 #endif
1547 union { int force_alignment; byte buffer[buffer_size]; } u;
1548
1549 // Traverse the list of builtins and generate an adaptor in a
1550 // separate code object for each one.
1551 for (int i = 0; i < builtin_count; i++) {
1552 if (create_heap_objects) {
1553 MacroAssembler masm(isolate, u.buffer, sizeof u.buffer);
1554 // Generate the code/adaptor.
1555 typedef void (*Generator)(MacroAssembler*, int, BuiltinExtraArguments);
1556 Generator g = FUNCTION_CAST<Generator>(functions[i].generator);
1557 // We pass all arguments to the generator, but it may not use all of
1558 // them. This works because the first arguments are on top of the
1559 // stack.
1560 DCHECK(!masm.has_frame());
1561 g(&masm, functions[i].name, functions[i].extra_args);
1562 // Move the code into the object heap.
1563 CodeDesc desc;
1564 masm.GetCode(&desc);
1565 Code::Flags flags = functions[i].flags;
1566 Handle<Code> code =
1567 isolate->factory()->NewCode(desc, flags, masm.CodeObject());
1568 // Log the event and add the code to the builtins array.
1569 PROFILE(isolate,
1570 CodeCreateEvent(Logger::BUILTIN_TAG, *code, functions[i].s_name));
1571 builtins_[i] = *code;
1572 if (code->kind() == Code::BUILTIN) code->set_builtin_index(i);
1573 #ifdef ENABLE_DISASSEMBLER
1574 if (FLAG_print_builtin_code) {
1575 CodeTracer::Scope trace_scope(isolate->GetCodeTracer());
1576 OFStream os(trace_scope.file());
1577 os << "Builtin: " << functions[i].s_name << "\n";
1578 code->Disassemble(functions[i].s_name, os);
1579 os << "\n";
1580 }
1581 #endif
1582 } else {
1583 // Deserializing. The values will be filled in during IterateBuiltins.
1584 builtins_[i] = NULL;
1585 }
1586 names_[i] = functions[i].s_name;
1587 }
1588
1589 // Mark as initialized.
1590 initialized_ = true;
1591 }
1592
1593
TearDown()1594 void Builtins::TearDown() {
1595 initialized_ = false;
1596 }
1597
1598
IterateBuiltins(ObjectVisitor * v)1599 void Builtins::IterateBuiltins(ObjectVisitor* v) {
1600 v->VisitPointers(&builtins_[0], &builtins_[0] + builtin_count);
1601 }
1602
1603
Lookup(byte * pc)1604 const char* Builtins::Lookup(byte* pc) {
1605 // may be called during initialization (disassembler!)
1606 if (initialized_) {
1607 for (int i = 0; i < builtin_count; i++) {
1608 Code* entry = Code::cast(builtins_[i]);
1609 if (entry->contains(pc)) {
1610 return names_[i];
1611 }
1612 }
1613 }
1614 return NULL;
1615 }
1616
1617
Generate_InterruptCheck(MacroAssembler * masm)1618 void Builtins::Generate_InterruptCheck(MacroAssembler* masm) {
1619 masm->TailCallRuntime(Runtime::kInterrupt, 0, 1);
1620 }
1621
1622
Generate_StackCheck(MacroAssembler * masm)1623 void Builtins::Generate_StackCheck(MacroAssembler* masm) {
1624 masm->TailCallRuntime(Runtime::kStackGuard, 0, 1);
1625 }
1626
1627
1628 #define DEFINE_BUILTIN_ACCESSOR_C(name, ignore) \
1629 Handle<Code> Builtins::name() { \
1630 Code** code_address = \
1631 reinterpret_cast<Code**>(builtin_address(k##name)); \
1632 return Handle<Code>(code_address); \
1633 }
1634 #define DEFINE_BUILTIN_ACCESSOR_A(name, kind, state, extra) \
1635 Handle<Code> Builtins::name() { \
1636 Code** code_address = \
1637 reinterpret_cast<Code**>(builtin_address(k##name)); \
1638 return Handle<Code>(code_address); \
1639 }
1640 #define DEFINE_BUILTIN_ACCESSOR_H(name, kind) \
1641 Handle<Code> Builtins::name() { \
1642 Code** code_address = \
1643 reinterpret_cast<Code**>(builtin_address(k##name)); \
1644 return Handle<Code>(code_address); \
1645 }
1646 BUILTIN_LIST_C(DEFINE_BUILTIN_ACCESSOR_C)
1647 BUILTIN_LIST_A(DEFINE_BUILTIN_ACCESSOR_A)
1648 BUILTIN_LIST_H(DEFINE_BUILTIN_ACCESSOR_H)
1649 BUILTIN_LIST_DEBUG_A(DEFINE_BUILTIN_ACCESSOR_A)
1650 #undef DEFINE_BUILTIN_ACCESSOR_C
1651 #undef DEFINE_BUILTIN_ACCESSOR_A
1652
1653
1654 } } // namespace v8::internal
1655