1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_MIPS
6
7 #include "src/api-arguments-inl.h"
8 #include "src/base/bits.h"
9 #include "src/bootstrapper.h"
10 #include "src/code-stubs.h"
11 #include "src/frame-constants.h"
12 #include "src/frames.h"
13 #include "src/heap/heap-inl.h"
14 #include "src/ic/ic.h"
15 #include "src/ic/stub-cache.h"
16 #include "src/isolate.h"
17 #include "src/objects/api-callbacks.h"
18 #include "src/regexp/jsregexp.h"
19 #include "src/regexp/regexp-macro-assembler.h"
20 #include "src/runtime/runtime.h"
21
22 #include "src/mips/code-stubs-mips.h" // Cannot be the first include.
23
24 namespace v8 {
25 namespace internal {
26
27 #define __ ACCESS_MASM(masm)
28
Generate(MacroAssembler * masm)29 void JSEntryStub::Generate(MacroAssembler* masm) {
30 Label invoke, handler_entry, exit;
31 Isolate* isolate = masm->isolate();
32
33 {
34 NoRootArrayScope no_root_array(masm);
35
36 // Registers:
37 // a0: entry address
38 // a1: function
39 // a2: receiver
40 // a3: argc
41 //
42 // Stack:
43 // 4 args slots
44 // args
45
46 ProfileEntryHookStub::MaybeCallEntryHook(masm);
47
48 // Save callee saved registers on the stack.
49 __ MultiPush(kCalleeSaved | ra.bit());
50
51 // Save callee-saved FPU registers.
52 __ MultiPushFPU(kCalleeSavedFPU);
53 // Set up the reserved register for 0.0.
54 __ Move(kDoubleRegZero, 0.0);
55
56 __ InitializeRootRegister();
57 }
58
59 // Load argv in s0 register.
60 int offset_to_argv = (kNumCalleeSaved + 1) * kPointerSize;
61 offset_to_argv += kNumCalleeSavedFPU * kDoubleSize;
62
63 __ lw(s0, MemOperand(sp, offset_to_argv + kCArgsSlotsSize));
64
65 // We build an EntryFrame.
66 __ li(t3, Operand(-1)); // Push a bad frame pointer to fail if it is used.
67 StackFrame::Type marker = type();
68 __ li(t2, Operand(StackFrame::TypeToMarker(marker)));
69 __ li(t1, Operand(StackFrame::TypeToMarker(marker)));
70 __ li(t0,
71 ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate));
72 __ lw(t0, MemOperand(t0));
73 __ Push(t3, t2, t1, t0);
74 // Set up frame pointer for the frame to be pushed.
75 __ addiu(fp, sp, -EntryFrameConstants::kCallerFPOffset);
76
77 // Registers:
78 // a0: entry_address
79 // a1: function
80 // a2: receiver_pointer
81 // a3: argc
82 // s0: argv
83 //
84 // Stack:
85 // caller fp |
86 // function slot | entry frame
87 // context slot |
88 // bad fp (0xFF...F) |
89 // callee saved registers + ra
90 // 4 args slots
91 // args
92
93 // If this is the outermost JS call, set js_entry_sp value.
94 Label non_outermost_js;
95 ExternalReference js_entry_sp =
96 ExternalReference::Create(IsolateAddressId::kJSEntrySPAddress, isolate);
97 __ li(t1, js_entry_sp);
98 __ lw(t2, MemOperand(t1));
99 __ Branch(&non_outermost_js, ne, t2, Operand(zero_reg));
100 __ sw(fp, MemOperand(t1));
101 __ li(t0, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
102 Label cont;
103 __ b(&cont);
104 __ nop(); // Branch delay slot nop.
105 __ bind(&non_outermost_js);
106 __ li(t0, Operand(StackFrame::INNER_JSENTRY_FRAME));
107 __ bind(&cont);
108 __ push(t0);
109
110 // Jump to a faked try block that does the invoke, with a faked catch
111 // block that sets the pending exception.
112 __ jmp(&invoke);
113 __ bind(&handler_entry);
114 handler_offset_ = handler_entry.pos();
115 // Caught exception: Store result (exception) in the pending exception
116 // field in the JSEnv and return a failure sentinel. Coming in here the
117 // fp will be invalid because the PushStackHandler below sets it to 0 to
118 // signal the existence of the JSEntry frame.
119 __ li(t0, ExternalReference::Create(
120 IsolateAddressId::kPendingExceptionAddress, isolate));
121 __ sw(v0, MemOperand(t0)); // We come back from 'invoke'. result is in v0.
122 __ LoadRoot(v0, Heap::kExceptionRootIndex);
123 __ b(&exit); // b exposes branch delay slot.
124 __ nop(); // Branch delay slot nop.
125
126 // Invoke: Link this frame into the handler chain.
127 __ bind(&invoke);
128 __ PushStackHandler();
129 // If an exception not caught by another handler occurs, this handler
130 // returns control to the code after the bal(&invoke) above, which
131 // restores all kCalleeSaved registers (including cp and fp) to their
132 // saved values before returning a failure to C.
133
134 // Invoke the function by calling through JS entry trampoline builtin.
135 // Notice that we cannot store a reference to the trampoline code directly in
136 // this stub, because runtime stubs are not traversed when doing GC.
137
138 // Registers:
139 // a0: entry_address
140 // a1: function
141 // a2: receiver_pointer
142 // a3: argc
143 // s0: argv
144 //
145 // Stack:
146 // handler frame
147 // entry frame
148 // callee saved registers + ra
149 // 4 args slots
150 // args
151 __ Call(EntryTrampoline(), RelocInfo::CODE_TARGET);
152
153 // Unlink this frame from the handler chain.
154 __ PopStackHandler();
155
156 __ bind(&exit); // v0 holds result
157 // Check if the current stack frame is marked as the outermost JS frame.
158 Label non_outermost_js_2;
159 __ pop(t1);
160 __ Branch(&non_outermost_js_2, ne, t1,
161 Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
162 __ li(t1, ExternalReference(js_entry_sp));
163 __ sw(zero_reg, MemOperand(t1));
164 __ bind(&non_outermost_js_2);
165
166 // Restore the top frame descriptors from the stack.
167 __ pop(t1);
168 __ li(t0,
169 ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate));
170 __ sw(t1, MemOperand(t0));
171
172 // Reset the stack to the callee saved registers.
173 __ addiu(sp, sp, -EntryFrameConstants::kCallerFPOffset);
174
175 // Restore callee-saved fpu registers.
176 __ MultiPopFPU(kCalleeSavedFPU);
177
178 // Restore callee saved registers from the stack.
179 __ MultiPop(kCalleeSaved | ra.bit());
180 // Return.
181 __ Jump(ra);
182 }
183
Generate(MacroAssembler * masm)184 void DirectCEntryStub::Generate(MacroAssembler* masm) {
185 // Make place for arguments to fit C calling convention. Most of the callers
186 // of DirectCEntryStub::GenerateCall are using EnterExitFrame/LeaveExitFrame
187 // so they handle stack restoring and we don't have to do that here.
188 // Any caller of DirectCEntryStub::GenerateCall must take care of dropping
189 // kCArgsSlotsSize stack space after the call.
190 __ Subu(sp, sp, Operand(kCArgsSlotsSize));
191 // Place the return address on the stack, making the call
192 // GC safe. The RegExp backend also relies on this.
193 __ sw(ra, MemOperand(sp, kCArgsSlotsSize));
194 __ Call(t9); // Call the C++ function.
195 __ lw(t9, MemOperand(sp, kCArgsSlotsSize));
196
197 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
198 // In case of an error the return address may point to a memory area
199 // filled with kZapValue by the GC.
200 // Dereference the address and check for this.
201 __ lw(t0, MemOperand(t9));
202 __ Assert(ne, AbortReason::kReceivedInvalidReturnAddress, t0,
203 Operand(reinterpret_cast<uint32_t>(kZapValue)));
204 }
205 __ Jump(t9);
206 }
207
208
GenerateCall(MacroAssembler * masm,Register target)209 void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
210 Register target) {
211 if (FLAG_embedded_builtins) {
212 if (masm->root_array_available() &&
213 isolate()->ShouldLoadConstantsFromRootList()) {
214 // This is basically an inlined version of Call(Handle<Code>) that loads
215 // the code object into kScratchReg instead of t9.
216 __ Move(t9, target);
217 __ IndirectLoadConstant(kScratchReg, GetCode());
218 __ Call(kScratchReg, Code::kHeaderSize - kHeapObjectTag);
219 return;
220 }
221 }
222 intptr_t loc =
223 reinterpret_cast<intptr_t>(GetCode().location());
224 __ Move(t9, target);
225 __ li(kScratchReg, Operand(loc, RelocInfo::CODE_TARGET), CONSTANT_SIZE);
226 __ Call(kScratchReg);
227 }
228
229
MaybeCallEntryHookDelayed(TurboAssembler * tasm,Zone * zone)230 void ProfileEntryHookStub::MaybeCallEntryHookDelayed(TurboAssembler* tasm,
231 Zone* zone) {
232 if (tasm->isolate()->function_entry_hook() != nullptr) {
233 tasm->push(ra);
234 tasm->CallStubDelayed(new (zone) ProfileEntryHookStub(nullptr));
235 tasm->pop(ra);
236 }
237 }
238
MaybeCallEntryHook(MacroAssembler * masm)239 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
240 if (masm->isolate()->function_entry_hook() != nullptr) {
241 ProfileEntryHookStub stub(masm->isolate());
242 __ push(ra);
243 __ CallStub(&stub);
244 __ pop(ra);
245 }
246 }
247
248
Generate(MacroAssembler * masm)249 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
250 // The entry hook is a "push ra" instruction, followed by a call.
251 // Note: on MIPS "push" is 2 instruction
252 const int32_t kReturnAddressDistanceFromFunctionStart =
253 Assembler::kCallTargetAddressOffset + (2 * kInstrSize);
254
255 // This should contain all kJSCallerSaved registers.
256 const RegList kSavedRegs =
257 kJSCallerSaved | // Caller saved registers.
258 s5.bit(); // Saved stack pointer.
259
260 // We also save ra, so the count here is one higher than the mask indicates.
261 const int32_t kNumSavedRegs = kNumJSCallerSaved + 2;
262
263 // Save all caller-save registers as this may be called from anywhere.
264 __ MultiPush(kSavedRegs | ra.bit());
265
266 // Compute the function's address for the first argument.
267 __ Subu(a0, ra, Operand(kReturnAddressDistanceFromFunctionStart));
268
269 // The caller's return address is above the saved temporaries.
270 // Grab that for the second argument to the hook.
271 __ Addu(a1, sp, Operand(kNumSavedRegs * kPointerSize));
272
273 // Align the stack if necessary.
274 int frame_alignment = masm->ActivationFrameAlignment();
275 if (frame_alignment > kPointerSize) {
276 __ mov(s5, sp);
277 DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
278 __ And(sp, sp, Operand(-frame_alignment));
279 }
280 __ Subu(sp, sp, kCArgsSlotsSize);
281 #if defined(V8_HOST_ARCH_MIPS)
282 int32_t entry_hook =
283 reinterpret_cast<int32_t>(isolate()->function_entry_hook());
284 __ li(t9, Operand(entry_hook));
285 #else
286 // Under the simulator we need to indirect the entry hook through a
287 // trampoline function at a known address.
288 // It additionally takes an isolate as a third parameter.
289 __ li(a2, ExternalReference::isolate_address(isolate()));
290
291 ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline));
292 __ li(t9, ExternalReference::Create(&dispatcher,
293 ExternalReference::BUILTIN_CALL));
294 #endif
295 // Call C function through t9 to conform ABI for PIC.
296 __ Call(t9);
297
298 // Restore the stack pointer if needed.
299 if (frame_alignment > kPointerSize) {
300 __ mov(sp, s5);
301 } else {
302 __ Addu(sp, sp, kCArgsSlotsSize);
303 }
304
305 // Also pop ra to get Ret(0).
306 __ MultiPop(kSavedRegs | ra.bit());
307 __ Ret();
308 }
309
AddressOffset(ExternalReference ref0,ExternalReference ref1)310 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
311 return ref0.address() - ref1.address();
312 }
313
314
315 // Calls an API function. Allocates HandleScope, extracts returned value
316 // from handle and propagates exceptions. Restores context. stack_space
317 // - space to be unwound on exit (includes the call JS arguments space and
318 // the additional space allocated for the fast call).
CallApiFunctionAndReturn(MacroAssembler * masm,Register function_address,ExternalReference thunk_ref,int stack_space,int32_t stack_space_offset,MemOperand return_value_operand)319 static void CallApiFunctionAndReturn(MacroAssembler* masm,
320 Register function_address,
321 ExternalReference thunk_ref,
322 int stack_space,
323 int32_t stack_space_offset,
324 MemOperand return_value_operand) {
325 Isolate* isolate = masm->isolate();
326 ExternalReference next_address =
327 ExternalReference::handle_scope_next_address(isolate);
328 const int kNextOffset = 0;
329 const int kLimitOffset = AddressOffset(
330 ExternalReference::handle_scope_limit_address(isolate), next_address);
331 const int kLevelOffset = AddressOffset(
332 ExternalReference::handle_scope_level_address(isolate), next_address);
333
334 DCHECK(function_address == a1 || function_address == a2);
335
336 Label profiler_disabled;
337 Label end_profiler_check;
338 __ li(t9, ExternalReference::is_profiling_address(isolate));
339 __ lb(t9, MemOperand(t9, 0));
340 __ Branch(&profiler_disabled, eq, t9, Operand(zero_reg));
341
342 // Additional parameter is the address of the actual callback.
343 __ li(t9, thunk_ref);
344 __ jmp(&end_profiler_check);
345
346 __ bind(&profiler_disabled);
347 __ mov(t9, function_address);
348 __ bind(&end_profiler_check);
349
350 // Allocate HandleScope in callee-save registers.
351 __ li(s5, next_address);
352 __ lw(s0, MemOperand(s5, kNextOffset));
353 __ lw(s1, MemOperand(s5, kLimitOffset));
354 __ lw(s2, MemOperand(s5, kLevelOffset));
355 __ Addu(s2, s2, Operand(1));
356 __ sw(s2, MemOperand(s5, kLevelOffset));
357
358 if (FLAG_log_timer_events) {
359 FrameScope frame(masm, StackFrame::MANUAL);
360 __ PushSafepointRegisters();
361 __ PrepareCallCFunction(1, a0);
362 __ li(a0, ExternalReference::isolate_address(isolate));
363 __ CallCFunction(ExternalReference::log_enter_external_function(), 1);
364 __ PopSafepointRegisters();
365 }
366
367 // Native call returns to the DirectCEntry stub which redirects to the
368 // return address pushed on stack (could have moved after GC).
369 // DirectCEntry stub itself is generated early and never moves.
370 DirectCEntryStub stub(isolate);
371 stub.GenerateCall(masm, t9);
372
373 if (FLAG_log_timer_events) {
374 FrameScope frame(masm, StackFrame::MANUAL);
375 __ PushSafepointRegisters();
376 __ PrepareCallCFunction(1, a0);
377 __ li(a0, ExternalReference::isolate_address(isolate));
378 __ CallCFunction(ExternalReference::log_leave_external_function(), 1);
379 __ PopSafepointRegisters();
380 }
381
382 Label promote_scheduled_exception;
383 Label delete_allocated_handles;
384 Label leave_exit_frame;
385 Label return_value_loaded;
386
387 // Load value from ReturnValue.
388 __ lw(v0, return_value_operand);
389 __ bind(&return_value_loaded);
390
391 // No more valid handles (the result handle was the last one). Restore
392 // previous handle scope.
393 __ sw(s0, MemOperand(s5, kNextOffset));
394 if (__ emit_debug_code()) {
395 __ lw(a1, MemOperand(s5, kLevelOffset));
396 __ Check(eq, AbortReason::kUnexpectedLevelAfterReturnFromApiCall, a1,
397 Operand(s2));
398 }
399 __ Subu(s2, s2, Operand(1));
400 __ sw(s2, MemOperand(s5, kLevelOffset));
401 __ lw(kScratchReg, MemOperand(s5, kLimitOffset));
402 __ Branch(&delete_allocated_handles, ne, s1, Operand(kScratchReg));
403
404 // Leave the API exit frame.
405 __ bind(&leave_exit_frame);
406
407 if (stack_space_offset != kInvalidStackOffset) {
408 // ExitFrame contains four MIPS argument slots after DirectCEntryStub call
409 // so this must be accounted for.
410 __ lw(s0, MemOperand(sp, stack_space_offset + kCArgsSlotsSize));
411 } else {
412 __ li(s0, Operand(stack_space));
413 }
414 __ LeaveExitFrame(false, s0, NO_EMIT_RETURN,
415 stack_space_offset != kInvalidStackOffset);
416
417 // Check if the function scheduled an exception.
418 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
419 __ li(kScratchReg, ExternalReference::scheduled_exception_address(isolate));
420 __ lw(t1, MemOperand(kScratchReg));
421 __ Branch(&promote_scheduled_exception, ne, t0, Operand(t1));
422
423 __ Ret();
424
425 // Re-throw by promoting a scheduled exception.
426 __ bind(&promote_scheduled_exception);
427 __ TailCallRuntime(Runtime::kPromoteScheduledException);
428
429 // HandleScope limit has changed. Delete allocated extensions.
430 __ bind(&delete_allocated_handles);
431 __ sw(s1, MemOperand(s5, kLimitOffset));
432 __ mov(s0, v0);
433 __ mov(a0, v0);
434 __ PrepareCallCFunction(1, s1);
435 __ li(a0, ExternalReference::isolate_address(isolate));
436 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(), 1);
437 __ mov(v0, s0);
438 __ jmp(&leave_exit_frame);
439 }
440
Generate(MacroAssembler * masm)441 void CallApiCallbackStub::Generate(MacroAssembler* masm) {
442 // ----------- S t a t e -------------
443 // -- t0 : call_data
444 // -- a2 : holder
445 // -- a1 : api_function_address
446 // -- cp : context
447 // --
448 // -- sp[0] : last argument
449 // -- ...
450 // -- sp[(argc - 1)* 4] : first argument
451 // -- sp[argc * 4] : receiver
452 // -----------------------------------
453
454 Register call_data = t0;
455 Register holder = a2;
456 Register api_function_address = a1;
457
458 typedef FunctionCallbackArguments FCA;
459
460 STATIC_ASSERT(FCA::kArgsLength == 6);
461 STATIC_ASSERT(FCA::kNewTargetIndex == 5);
462 STATIC_ASSERT(FCA::kDataIndex == 4);
463 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
464 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
465 STATIC_ASSERT(FCA::kIsolateIndex == 1);
466 STATIC_ASSERT(FCA::kHolderIndex == 0);
467
468 // new target
469 __ PushRoot(Heap::kUndefinedValueRootIndex);
470
471 // call data.
472 __ Push(call_data);
473
474 Register scratch = call_data;
475 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
476 // Push return value and default return value.
477 __ Push(scratch, scratch);
478 __ li(scratch, ExternalReference::isolate_address(masm->isolate()));
479 // Push isolate and holder.
480 __ Push(scratch, holder);
481
482 // Prepare arguments.
483 __ mov(scratch, sp);
484
485 // Allocate the v8::Arguments structure in the arguments' space since
486 // it's not controlled by GC.
487 const int kApiStackSpace = 3;
488
489 FrameScope frame_scope(masm, StackFrame::MANUAL);
490 __ EnterExitFrame(false, kApiStackSpace);
491
492 DCHECK(api_function_address != a0 && scratch != a0);
493 // a0 = FunctionCallbackInfo&
494 // Arguments is after the return address.
495 __ Addu(a0, sp, Operand(1 * kPointerSize));
496 // FunctionCallbackInfo::implicit_args_
497 __ sw(scratch, MemOperand(a0, 0 * kPointerSize));
498 // FunctionCallbackInfo::values_
499 __ Addu(kScratchReg, scratch,
500 Operand((FCA::kArgsLength - 1 + argc()) * kPointerSize));
501 __ sw(kScratchReg, MemOperand(a0, 1 * kPointerSize));
502 // FunctionCallbackInfo::length_ = argc
503 __ li(kScratchReg, Operand(argc()));
504 __ sw(kScratchReg, MemOperand(a0, 2 * kPointerSize));
505
506 ExternalReference thunk_ref = ExternalReference::invoke_function_callback();
507
508 AllowExternalCallThatCantCauseGC scope(masm);
509 // Stores return the first js argument.
510 int return_value_offset = 2 + FCA::kReturnValueOffset;
511 MemOperand return_value_operand(fp, return_value_offset * kPointerSize);
512 const int stack_space = argc() + FCA::kArgsLength + 1;
513 // TODO(adamk): Why are we clobbering this immediately?
514 const int32_t stack_space_offset = kInvalidStackOffset;
515 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, stack_space,
516 stack_space_offset, return_value_operand);
517 }
518
519
Generate(MacroAssembler * masm)520 void CallApiGetterStub::Generate(MacroAssembler* masm) {
521 // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
522 // name below the exit frame to make GC aware of them.
523 STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
524 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
525 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
526 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
527 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
528 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
529 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
530 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
531
532 Register receiver = ApiGetterDescriptor::ReceiverRegister();
533 Register holder = ApiGetterDescriptor::HolderRegister();
534 Register callback = ApiGetterDescriptor::CallbackRegister();
535 Register scratch = t0;
536 DCHECK(!AreAliased(receiver, holder, callback, scratch));
537
538 Register api_function_address = a2;
539
540 // Here and below +1 is for name() pushed after the args_ array.
541 typedef PropertyCallbackArguments PCA;
542 __ Subu(sp, sp, (PCA::kArgsLength + 1) * kPointerSize);
543 __ sw(receiver, MemOperand(sp, (PCA::kThisIndex + 1) * kPointerSize));
544 __ lw(scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset));
545 __ sw(scratch, MemOperand(sp, (PCA::kDataIndex + 1) * kPointerSize));
546 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
547 __ sw(scratch, MemOperand(sp, (PCA::kReturnValueOffset + 1) * kPointerSize));
548 __ sw(scratch, MemOperand(sp, (PCA::kReturnValueDefaultValueIndex + 1) *
549 kPointerSize));
550 __ li(scratch, ExternalReference::isolate_address(isolate()));
551 __ sw(scratch, MemOperand(sp, (PCA::kIsolateIndex + 1) * kPointerSize));
552 __ sw(holder, MemOperand(sp, (PCA::kHolderIndex + 1) * kPointerSize));
553 // should_throw_on_error -> false
554 DCHECK_NULL(Smi::kZero);
555 __ sw(zero_reg,
556 MemOperand(sp, (PCA::kShouldThrowOnErrorIndex + 1) * kPointerSize));
557 __ lw(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset));
558 __ sw(scratch, MemOperand(sp, 0 * kPointerSize));
559
560 // v8::PropertyCallbackInfo::args_ array and name handle.
561 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
562
563 // Load address of v8::PropertyAccessorInfo::args_ array and name handle.
564 __ mov(a0, sp); // a0 = Handle<Name>
565 __ Addu(a1, a0, Operand(1 * kPointerSize)); // a1 = v8::PCI::args_
566
567 const int kApiStackSpace = 1;
568 FrameScope frame_scope(masm, StackFrame::MANUAL);
569 __ EnterExitFrame(false, kApiStackSpace);
570
571 // Create v8::PropertyCallbackInfo object on the stack and initialize
572 // it's args_ field.
573 __ sw(a1, MemOperand(sp, 1 * kPointerSize));
574 __ Addu(a1, sp, Operand(1 * kPointerSize)); // a1 = v8::PropertyCallbackInfo&
575
576 ExternalReference thunk_ref =
577 ExternalReference::invoke_accessor_getter_callback();
578
579 __ lw(scratch, FieldMemOperand(callback, AccessorInfo::kJsGetterOffset));
580 __ lw(api_function_address,
581 FieldMemOperand(scratch, Foreign::kForeignAddressOffset));
582
583 // +3 is to skip prolog, return address and name handle.
584 MemOperand return_value_operand(
585 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
586 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
587 kStackUnwindSpace, kInvalidStackOffset,
588 return_value_operand);
589 }
590
591 #undef __
592
593 } // namespace internal
594 } // namespace v8
595
596 #endif // V8_TARGET_ARCH_MIPS
597