1%def header():
2/*
3 * Copyright (C) 2019 The Android Open Source Project
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 *      http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18/*
19 * This is a #include, not a %include, because we want the C pre-processor
20 * to expand the macros into assembler assignment statements.
21 */
22#include "asm_support.h"
23#include "arch/x86_64/asm_support_x86_64.S"
24
25/**
26 * x86_64 ABI general notes:
27 *
28 * Caller save set:
29 *    rax, rdx, rcx, rsi, rdi, r8-r11, st(0)-st(7)
30 * Callee save set:
31 *    rbx, rbp, r12-r15
32 * Return regs:
33 *    32-bit in eax
34 *    64-bit in rax
35 *    fp on xmm0
36 *
37 * First 8 fp parameters came in xmm0-xmm7.
38 * First 6 non-fp parameters came in rdi, rsi, rdx, rcx, r8, r9.
39 * Other parameters passed on stack, pushed right-to-left.  On entry to target, first
40 * param is at 8(%esp).
41 *
42 * Stack must be 16-byte aligned to support SSE in native code.
43 */
44
45#define IN_ARG3        %rcx
46#define IN_ARG2        %rdx
47#define IN_ARG1        %rsi
48#define IN_ARG0        %rdi
49/* Out Args  */
50#define OUT_ARG3       %rcx
51#define OUT_ARG2       %rdx
52#define OUT_ARG1       %rsi
53#define OUT_ARG0       %rdi
54#define OUT_32_ARG3    %ecx
55#define OUT_32_ARG2    %edx
56#define OUT_32_ARG1    %esi
57#define OUT_32_ARG0    %edi
58#define OUT_FP_ARG1    %xmm1
59#define OUT_FP_ARG0    %xmm0
60
61/*
62 * single-purpose registers, given names for clarity
63 */
64#define rSELF    %gs
65#define rPC      %r12
66#define CFI_DEX  12 // DWARF register number of the register holding dex-pc (rPC).
67#define CFI_TMP  5  // DWARF register number of the first argument register (rdi).
68#define rFP      %r13
69#define rINST    %ebx
70#define rINSTq   %rbx
71#define rINSTw   %bx
72#define rINSTbh  %bh
73#define rINSTbl  %bl
74#define rIBASE   %r14
75#define rREFS    %r15
76#define rREFS32  %r15d
77#define CFI_REFS 15 // DWARF register number of the reference array (r15).
78
79// Temporary registers while setting up a frame.
80#define rNEW_FP   %r8
81#define rNEW_REFS %r9
82#define rNEW_REFS32 %r9d
83#define CFI_NEW_REFS 9
84
85/*
86 * Get/set the 32-bit value from a Dalvik register.
87 */
88#define VREG_ADDRESS(_vreg) (rFP,_vreg,4)
89#define VREG_HIGH_ADDRESS(_vreg) 4(rFP,_vreg,4)
90#define VREG_REF_ADDRESS(_vreg) (rREFS,_vreg,4)
91#define VREG_REF_HIGH_ADDRESS(_vreg) 4(rREFS,_vreg,4)
92
93// Includes the return address implictly pushed on stack by 'call'.
94#define CALLEE_SAVES_SIZE (6 * 8 + 4 * 8 + 1 * 8)
95
96// +8 for the ArtMethod of the caller.
97#define OFFSET_TO_FIRST_ARGUMENT_IN_STACK (CALLEE_SAVES_SIZE + 8)
98
99/*
100 * Refresh rINST.
101 * At enter to handler rINST does not contain the opcode number.
102 * However some utilities require the full value, so this macro
103 * restores the opcode number.
104 */
105.macro REFRESH_INST _opnum
106    movb    rINSTbl, rINSTbh
107    movb    $$\_opnum, rINSTbl
108.endm
109
110/*
111 * Fetch the next instruction from rPC into rINSTw.  Does not advance rPC.
112 */
113.macro FETCH_INST
114    movzwq  (rPC), rINSTq
115.endm
116
117/*
118 * Remove opcode from rINST, compute the address of handler and jump to it.
119 */
120.macro GOTO_NEXT
121    movzx   rINSTbl,%ecx
122    movzbl  rINSTbh,rINST
123    shll    MACRO_LITERAL(${handler_size_bits}), %ecx
124    addq    rIBASE, %rcx
125    jmp     *%rcx
126.endm
127
128/*
129 * Advance rPC by instruction count.
130 */
131.macro ADVANCE_PC _count
132    leaq    2*\_count(rPC), rPC
133.endm
134
135/*
136 * Advance rPC by instruction count, fetch instruction and jump to handler.
137 */
138.macro ADVANCE_PC_FETCH_AND_GOTO_NEXT _count
139    ADVANCE_PC \_count
140    FETCH_INST
141    GOTO_NEXT
142.endm
143
144.macro GET_VREG _reg _vreg
145    movl    VREG_ADDRESS(\_vreg), \_reg
146.endm
147
148.macro GET_VREG_OBJECT _reg _vreg
149    movl    VREG_REF_ADDRESS(\_vreg), \_reg
150.endm
151
152/* Read wide value. */
153.macro GET_WIDE_VREG _reg _vreg
154    movq    VREG_ADDRESS(\_vreg), \_reg
155.endm
156
157.macro SET_VREG _reg _vreg
158    movl    \_reg, VREG_ADDRESS(\_vreg)
159    movl    MACRO_LITERAL(0), VREG_REF_ADDRESS(\_vreg)
160.endm
161
162/* Write wide value. reg is clobbered. */
163.macro SET_WIDE_VREG _reg _vreg
164    movq    \_reg, VREG_ADDRESS(\_vreg)
165    xorq    \_reg, \_reg
166    movq    \_reg, VREG_REF_ADDRESS(\_vreg)
167.endm
168
169.macro SET_VREG_OBJECT _reg _vreg
170    movl    \_reg, VREG_ADDRESS(\_vreg)
171    movl    \_reg, VREG_REF_ADDRESS(\_vreg)
172.endm
173
174.macro GET_VREG_HIGH _reg _vreg
175    movl    VREG_HIGH_ADDRESS(\_vreg), \_reg
176.endm
177
178.macro SET_VREG_HIGH _reg _vreg
179    movl    \_reg, VREG_HIGH_ADDRESS(\_vreg)
180    movl    MACRO_LITERAL(0), VREG_REF_HIGH_ADDRESS(\_vreg)
181.endm
182
183.macro CLEAR_REF _vreg
184    movl    MACRO_LITERAL(0), VREG_REF_ADDRESS(\_vreg)
185.endm
186
187.macro CLEAR_WIDE_REF _vreg
188    movl    MACRO_LITERAL(0), VREG_REF_ADDRESS(\_vreg)
189    movl    MACRO_LITERAL(0), VREG_REF_HIGH_ADDRESS(\_vreg)
190.endm
191
192.macro GET_VREG_XMMs _xmmreg _vreg
193    movss VREG_ADDRESS(\_vreg), \_xmmreg
194.endm
195.macro GET_VREG_XMMd _xmmreg _vreg
196    movsd VREG_ADDRESS(\_vreg), \_xmmreg
197.endm
198.macro SET_VREG_XMMs _xmmreg _vreg
199    movss \_xmmreg, VREG_ADDRESS(\_vreg)
200.endm
201.macro SET_VREG_XMMd _xmmreg _vreg
202    movsd \_xmmreg, VREG_ADDRESS(\_vreg)
203.endm
204
205// An assembly entry for nterp.
206.macro OAT_ENTRY name
207    FUNCTION_TYPE(\name)
208    ASM_HIDDEN SYMBOL(\name)
209    .global SYMBOL(\name)
210    .balign 16
211SYMBOL(\name):
212.endm
213
214.macro ENTRY name
215    .text
216    ASM_HIDDEN SYMBOL(\name)
217    .global SYMBOL(\name)
218    FUNCTION_TYPE(\name)
219SYMBOL(\name):
220.endm
221
222.macro END name
223    SIZE(\name)
224.endm
225
226// Macro for defining entrypoints into runtime. We don't need to save registers
227// (we're not holding references there), but there is no
228// kDontSave runtime method. So just use the kSaveRefsOnly runtime method.
229.macro NTERP_TRAMPOLINE name, helper
230DEFINE_FUNCTION \name
231  SETUP_SAVE_REFS_ONLY_FRAME
232  call \helper
233  RESTORE_SAVE_REFS_ONLY_FRAME
234  cmpq LITERAL(0), %gs:THREAD_EXCEPTION_OFFSET
235  jne nterp_deliver_pending_exception
236  ret
237END_FUNCTION \name
238.endm
239
240.macro CLEAR_VOLATILE_MARKER reg
241  andq MACRO_LITERAL(-2), \reg
242.endm
243
244.macro EXPORT_PC
245    movq    rPC, -16(rREFS)
246.endm
247
248
249.macro BRANCH
250    leaq    (rPC, rINSTq, 2), rPC
251    // Update method counter and do a suspend check if the branch is negative or zero.
252    testq rINSTq, rINSTq
253    jle 3f
2542:  // We use 2 and not 1 for this local label as the users of the BRANCH macro have a 1 label.
255    FETCH_INST
256    GOTO_NEXT
2573:
258    movq (%rsp), %rdi
259    movzwl ART_METHOD_HOTNESS_COUNT_OFFSET(%rdi), %esi
260#if (NTERP_HOTNESS_VALUE != 0)
261#error Expected 0 for hotness value
262#endif
263    // If the counter is at zero, handle this in the runtime.
264    testw %si, %si
265    je NterpHandleHotnessOverflow
266    // Update counter.
267    addl $$-1, %esi
268    movw %si, ART_METHOD_HOTNESS_COUNT_OFFSET(%rdi)
269    DO_SUSPEND_CHECK continue_label=2b
270    jmp 2b
271.endm
272
273// Expects:
274// - r10, and r11 to be available.
275// Outputs:
276// - \registers contains the dex registers size
277// - \outs contains the outs size
278// - if load_ins is 1, \ins contains the ins
279// - \code_item is replace with a pointer to the instructions
280.macro FETCH_CODE_ITEM_INFO code_item, registers, outs, ins, load_ins
281    testq MACRO_LITERAL(1), \code_item
282    je 5f
283    andq $$-2, \code_item  // Remove the extra bit that marks it's a compact dex file.
284    movzwl COMPACT_CODE_ITEM_FIELDS_OFFSET(\code_item), %r10d
285    movl %r10d, \registers
286    sarl $$COMPACT_CODE_ITEM_REGISTERS_SIZE_SHIFT, \registers
287    andl $$0xf, \registers
288    movl %r10d, \outs
289    sarl $$COMPACT_CODE_ITEM_OUTS_SIZE_SHIFT, \outs
290    andl $$0xf, \outs
291    .if \load_ins
292    movl %r10d, \ins
293    sarl $$COMPACT_CODE_ITEM_INS_SIZE_SHIFT, \ins
294    andl $$0xf, \ins
295    .else
296    movl %r10d, %r11d
297    sarl $$COMPACT_CODE_ITEM_INS_SIZE_SHIFT, %r11d
298    andl $$0xf, %r11d
299    addl %r11d, \registers
300    .endif
301    testw $$COMPACT_CODE_ITEM_REGISTERS_INS_OUTS_FLAGS, COMPACT_CODE_ITEM_FLAGS_OFFSET(\code_item)
302    je 4f
303    movq \code_item, %r11
304    testw $$COMPACT_CODE_ITEM_INSNS_FLAG, COMPACT_CODE_ITEM_FLAGS_OFFSET(\code_item)
305    je 1f
306    subq $$4, %r11
3071:
308    testw $$COMPACT_CODE_ITEM_REGISTERS_FLAG, COMPACT_CODE_ITEM_FLAGS_OFFSET(\code_item)
309    je 2f
310    subq $$2, %r11
311    movzwl (%r11), %r10d
312    addl %r10d, \registers
3132:
314    testw $$COMPACT_CODE_ITEM_INS_FLAG, COMPACT_CODE_ITEM_FLAGS_OFFSET(\code_item)
315    je 3f
316    subq $$2, %r11
317    movzwl (%r11), %r10d
318    .if \load_ins
319    addl %r10d, \ins
320    .else
321    addl %r10d, \registers
322    .endif
3233:
324    testw $$COMPACT_CODE_ITEM_OUTS_FLAG, COMPACT_CODE_ITEM_FLAGS_OFFSET(\code_item)
325    je 4f
326    subq $$2, %r11
327    movzwl (%r11), %r10d
328    addl %r10d, \outs
3294:
330    .if \load_ins
331    addl \ins, \registers
332    .endif
333    addq $$COMPACT_CODE_ITEM_INSNS_OFFSET, \code_item
334    jmp 6f
3355:
336    // Fetch dex register size.
337    movzwl CODE_ITEM_REGISTERS_SIZE_OFFSET(\code_item), \registers
338    // Fetch outs size.
339    movzwl CODE_ITEM_OUTS_SIZE_OFFSET(\code_item), \outs
340    .if \load_ins
341    movzwl CODE_ITEM_INS_SIZE_OFFSET(\code_item), \ins
342    .endif
343    addq $$CODE_ITEM_INSNS_OFFSET, \code_item
3446:
345.endm
346
347// Setup the stack to start executing the method. Expects:
348// - rdi to contain the ArtMethod
349// - rbx, r10, r11 to be available.
350//
351// Outputs
352// - rbx contains the dex registers size
353// - r11 contains the old stack pointer.
354// - \code_item is replace with a pointer to the instructions
355// - if load_ins is 1, r14 contains the ins
356.macro SETUP_STACK_FRAME code_item, refs, refs32, fp, cfi_refs, load_ins
357    FETCH_CODE_ITEM_INFO \code_item, %ebx, \refs32, %r14d, \load_ins
358
359    // Compute required frame size for dex registers: ((2 * ebx) + refs)
360    leaq (\refs, %rbx, 2), %r11
361    salq $$2, %r11
362
363    // Compute new stack pointer in r10: add 24 for saving the previous frame,
364    // pc, and method being executed.
365    leaq -24(%rsp), %r10
366    subq %r11, %r10
367    // Alignment
368    // Note: There may be two pieces of alignment but there is no need to align
369    // out args to `kPointerSize` separately before aligning to kStackAlignment.
370    andq $$-16, %r10
371
372    // Set reference and dex registers, align to pointer size for previous frame and dex pc.
373    leaq 24 + 4(%r10, \refs, 4), \refs
374    andq LITERAL(-__SIZEOF_POINTER__), \refs
375    leaq (\refs, %rbx, 4), \fp
376
377    // Now setup the stack pointer.
378    movq %rsp, %r11
379    CFI_DEF_CFA_REGISTER(r11)
380    movq %r10, %rsp
381    movq %r11, -8(\refs)
382    CFI_DEF_CFA_BREG_PLUS_UCONST \cfi_refs, -8, ((6 + 4 + 1) * 8)
383
384    // Put nulls in reference frame.
385    testl %ebx, %ebx
386    je 2f
387    movq \refs, %r10
3881:
389    movl $$0, (%r10)
390    addq $$4, %r10
391    cmpq %r10, \fp
392    jne 1b
3932:
394    // Save the ArtMethod.
395    movq %rdi, (%rsp)
396.endm
397
398// Puts the next floating point argument into the expected register,
399// fetching values based on a non-range invoke.
400// Uses rax as temporary.
401//
402// TODO: We could simplify a lot of code by loading the G argument into
403// the "inst" register. Given that we enter the handler with "1(rPC)" in
404// the rINST, we can just add rINST<<16 to the args and we don't even
405// need to pass "arg_index" around.
406.macro LOOP_OVER_SHORTY_LOADING_XMMS xmm_reg, inst, shorty, arg_index, finished
4071: // LOOP
408    movb (REG_VAR(shorty)), %al             // bl := *shorty
409    addq MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
410    cmpb MACRO_LITERAL(0), %al              // if (al == '\0') goto finished
411    je VAR(finished)
412    cmpb MACRO_LITERAL(68), %al             // if (al == 'D') goto FOUND_DOUBLE
413    je 2f
414    cmpb MACRO_LITERAL(70), %al             // if (al == 'F') goto FOUND_FLOAT
415    je 3f
416    shrq MACRO_LITERAL(4), REG_VAR(inst)
417    addq MACRO_LITERAL(1), REG_VAR(arg_index)
418    //  Handle extra argument in arg array taken by a long.
419    cmpb MACRO_LITERAL(74), %al   // if (al != 'J') goto LOOP
420    jne 1b
421    shrq MACRO_LITERAL(4), REG_VAR(inst)
422    addq MACRO_LITERAL(1), REG_VAR(arg_index)
423    jmp 1b                        // goto LOOP
4242:  // FOUND_DOUBLE
425    subq MACRO_LITERAL(8), %rsp
426    movq REG_VAR(inst), %rax
427    andq MACRO_LITERAL(0xf), %rax
428    GET_VREG %eax, %rax
429    movl %eax, (%rsp)
430    shrq MACRO_LITERAL(4), REG_VAR(inst)
431    addq MACRO_LITERAL(1), REG_VAR(arg_index)
432    cmpq MACRO_LITERAL(4), REG_VAR(arg_index)
433    je 5f
434    movq REG_VAR(inst), %rax
435    andq MACRO_LITERAL(0xf), %rax
436    shrq MACRO_LITERAL(4), REG_VAR(inst)
437    addq MACRO_LITERAL(1), REG_VAR(arg_index)
438    jmp 6f
4395:
440    movzbl 1(rPC), %eax
441    andq MACRO_LITERAL(0xf), %rax
4426:
443    GET_VREG %eax, %rax
444    movl %eax, 4(%rsp)
445    movsd (%rsp), REG_VAR(xmm_reg)
446    addq MACRO_LITERAL(8), %rsp
447    jmp 4f
4483:  // FOUND_FLOAT
449    cmpq MACRO_LITERAL(4), REG_VAR(arg_index)
450    je 7f
451    movq REG_VAR(inst), %rax
452    andq MACRO_LITERAL(0xf), %rax
453    shrq MACRO_LITERAL(4), REG_VAR(inst)
454    addq MACRO_LITERAL(1), REG_VAR(arg_index)
455    jmp 8f
4567:
457    movzbl 1(rPC), %eax
458    andq MACRO_LITERAL(0xf), %rax
4598:
460    GET_VREG_XMMs REG_VAR(xmm_reg), %rax
4614:
462.endm
463
464// Puts the next int/long/object argument in the expected register,
465// fetching values based on a non-range invoke.
466// Uses rax as temporary.
467.macro LOOP_OVER_SHORTY_LOADING_GPRS gpr_reg64, gpr_reg32, inst, shorty, arg_index, finished
4681: // LOOP
469    movb (REG_VAR(shorty)), %al   // al := *shorty
470    addq MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
471    cmpb MACRO_LITERAL(0), %al    // if (al == '\0') goto finished
472    je  VAR(finished)
473    cmpb MACRO_LITERAL(74), %al   // if (al == 'J') goto FOUND_LONG
474    je 2f
475    cmpb MACRO_LITERAL(70), %al   // if (al == 'F') goto SKIP_FLOAT
476    je 3f
477    cmpb MACRO_LITERAL(68), %al   // if (al == 'D') goto SKIP_DOUBLE
478    je 4f
479    cmpq MACRO_LITERAL(4), REG_VAR(arg_index)
480    je 7f
481    movq REG_VAR(inst), %rax
482    andq MACRO_LITERAL(0xf), %rax
483    shrq MACRO_LITERAL(4), REG_VAR(inst)
484    addq MACRO_LITERAL(1), REG_VAR(arg_index)
485    jmp 8f
4867:
487    movzbl 1(rPC), %eax
488    andq MACRO_LITERAL(0xf), %rax
4898:
490    GET_VREG REG_VAR(gpr_reg32), %rax
491    jmp 5f
4922:  // FOUND_LONG
493    subq MACRO_LITERAL(8), %rsp
494    movq REG_VAR(inst), %rax
495    andq MACRO_LITERAL(0xf), %rax
496    GET_VREG %eax, %rax
497    movl %eax, (%rsp)
498    shrq MACRO_LITERAL(4), REG_VAR(inst)
499    addq MACRO_LITERAL(1), REG_VAR(arg_index)
500    cmpq MACRO_LITERAL(4), REG_VAR(arg_index)
501    je 9f
502    movq REG_VAR(inst), %rax
503    andq MACRO_LITERAL(0xf), %rax
504    shrq MACRO_LITERAL(4), REG_VAR(inst)
505    addq MACRO_LITERAL(1), REG_VAR(arg_index)
506    jmp 10f
5079:
508    movzbl 1(rPC), %eax
509    andq MACRO_LITERAL(0xf), %rax
51010:
511    GET_VREG %eax, %rax
512    movl %eax, 4(%rsp)
513    movq (%rsp), REG_VAR(gpr_reg64)
514    addq MACRO_LITERAL(8), %rsp
515    jmp 5f
5163:  // SKIP_FLOAT
517    shrq MACRO_LITERAL(4), REG_VAR(inst)
518    addq MACRO_LITERAL(1), REG_VAR(arg_index)
519    jmp 1b
5204:  // SKIP_DOUBLE
521    shrq MACRO_LITERAL(4), REG_VAR(inst)
522    addq MACRO_LITERAL(1), REG_VAR(arg_index)
523    cmpq MACRO_LITERAL(4), REG_VAR(arg_index)
524    je 1b
525    shrq MACRO_LITERAL(4), REG_VAR(inst)
526    addq MACRO_LITERAL(1), REG_VAR(arg_index)
527    jmp 1b
5285:
529.endm
530
531// Puts the next floating point argument into the expected register,
532// fetching values based on a range invoke.
533// Uses rax as temporary.
534.macro LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm_reg, shorty, arg_index, stack_index, finished
5351: // LOOP
536    movb (REG_VAR(shorty)), %al             // al := *shorty
537    addq MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
538    cmpb MACRO_LITERAL(0), %al              // if (al == '\0') goto finished
539    je VAR(finished)
540    cmpb MACRO_LITERAL(68), %al             // if (al == 'D') goto FOUND_DOUBLE
541    je 2f
542    cmpb MACRO_LITERAL(70), %al             // if (al == 'F') goto FOUND_FLOAT
543    je 3f
544    addq MACRO_LITERAL(1), REG_VAR(arg_index)
545    addq MACRO_LITERAL(1), REG_VAR(stack_index)
546    //  Handle extra argument in arg array taken by a long.
547    cmpb MACRO_LITERAL(74), %al   // if (al != 'J') goto LOOP
548    jne 1b
549    addq MACRO_LITERAL(1), REG_VAR(arg_index)
550    addq MACRO_LITERAL(1), REG_VAR(stack_index)
551    jmp 1b                        // goto LOOP
5522:  // FOUND_DOUBLE
553    GET_VREG_XMMd REG_VAR(xmm_reg), REG_VAR(arg_index)
554    addq MACRO_LITERAL(2), REG_VAR(arg_index)
555    addq MACRO_LITERAL(2), REG_VAR(stack_index)
556    jmp 4f
5573:  // FOUND_FLOAT
558    GET_VREG_XMMs REG_VAR(xmm_reg), REG_VAR(arg_index)
559    addq MACRO_LITERAL(1), REG_VAR(arg_index)
560    addq MACRO_LITERAL(1), REG_VAR(stack_index)
5614:
562.endm
563
564// Puts the next floating point argument into the expected stack slot,
565// fetching values based on a range invoke.
566// Uses rax as temporary.
567//
568// TODO: We could just copy all the vregs to the stack slots in a simple loop
569// (or REP MOVSD) without looking at the shorty at all. (We could also drop
570// the "stack_index" from the macros for loading registers.) We could also do
571// that conditionally if argument word count > 6; otherwise we know that all
572// args fit into registers.
573.macro LOOP_RANGE_OVER_FPs shorty, arg_index, stack_index, finished
5741: // LOOP
575    movb (REG_VAR(shorty)), %al             // bl := *shorty
576    addq MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
577    cmpb MACRO_LITERAL(0), %al              // if (al == '\0') goto finished
578    je VAR(finished)
579    cmpb MACRO_LITERAL(68), %al             // if (al == 'D') goto FOUND_DOUBLE
580    je 2f
581    cmpb MACRO_LITERAL(70), %al             // if (al == 'F') goto FOUND_FLOAT
582    je 3f
583    addq MACRO_LITERAL(1), REG_VAR(arg_index)
584    addq MACRO_LITERAL(1), REG_VAR(stack_index)
585    //  Handle extra argument in arg array taken by a long.
586    cmpb MACRO_LITERAL(74), %al   // if (al != 'J') goto LOOP
587    jne 1b
588    addq MACRO_LITERAL(1), REG_VAR(arg_index)
589    addq MACRO_LITERAL(1), REG_VAR(stack_index)
590    jmp 1b                        // goto LOOP
5912:  // FOUND_DOUBLE
592    movq (rFP, REG_VAR(arg_index), 4), %rax
593    movq %rax, 8(%rsp, REG_VAR(stack_index), 4)
594    addq MACRO_LITERAL(2), REG_VAR(arg_index)
595    addq MACRO_LITERAL(2), REG_VAR(stack_index)
596    jmp 1b
5973:  // FOUND_FLOAT
598    movl (rFP, REG_VAR(arg_index), 4), %eax
599    movl %eax, 8(%rsp, REG_VAR(stack_index), 4)
600    addq MACRO_LITERAL(1), REG_VAR(arg_index)
601    addq MACRO_LITERAL(1), REG_VAR(stack_index)
602    jmp 1b
603.endm
604
605// Puts the next int/long/object argument in the expected register,
606// fetching values based on a range invoke.
607// Uses rax as temporary.
608.macro LOOP_RANGE_OVER_SHORTY_LOADING_GPRS gpr_reg64, gpr_reg32, shorty, arg_index, stack_index, finished
6091: // LOOP
610    movb (REG_VAR(shorty)), %al             // al := *shorty
611    addq MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
612    cmpb MACRO_LITERAL(0), %al    // if (al == '\0') goto finished
613    je  VAR(finished)
614    cmpb MACRO_LITERAL(74), %al   // if (al == 'J') goto FOUND_LONG
615    je 2f
616    cmpb MACRO_LITERAL(70), %al   // if (al == 'F') goto SKIP_FLOAT
617    je 3f
618    cmpb MACRO_LITERAL(68), %al   // if (al == 'D') goto SKIP_DOUBLE
619    je 4f
620    movl       (rFP, REG_VAR(arg_index), 4), REG_VAR(gpr_reg32)
621    addq MACRO_LITERAL(1), REG_VAR(arg_index)
622    addq MACRO_LITERAL(1), REG_VAR(stack_index)
623    jmp 5f
6242:  // FOUND_LONG
625    movq (rFP, REG_VAR(arg_index), 4), REG_VAR(gpr_reg64)
626    addq MACRO_LITERAL(2), REG_VAR(arg_index)
627    addq MACRO_LITERAL(2), REG_VAR(stack_index)
628    jmp 5f
6293:  // SKIP_FLOAT
630    addq MACRO_LITERAL(1), REG_VAR(arg_index)
631    addq MACRO_LITERAL(1), REG_VAR(stack_index)
632    jmp 1b
6334:  // SKIP_DOUBLE
634    addq MACRO_LITERAL(2), REG_VAR(arg_index)
635    addq MACRO_LITERAL(2), REG_VAR(stack_index)
636    jmp 1b
6375:
638.endm
639
640// Puts the next int/long/object argument in the expected stack slot,
641// fetching values based on a range invoke.
642// Uses rax as temporary.
643.macro LOOP_RANGE_OVER_INTs shorty, arg_index, stack_index, finished
6441: // LOOP
645    movb (REG_VAR(shorty)), %al             // al := *shorty
646    addq MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
647    cmpb MACRO_LITERAL(0), %al    // if (al == '\0') goto finished
648    je  VAR(finished)
649    cmpb MACRO_LITERAL(74), %al   // if (al == 'J') goto FOUND_LONG
650    je 2f
651    cmpb MACRO_LITERAL(70), %al   // if (al == 'F') goto SKIP_FLOAT
652    je 3f
653    cmpb MACRO_LITERAL(68), %al   // if (al == 'D') goto SKIP_DOUBLE
654    je 4f
655    movl (rFP, REG_VAR(arg_index), 4), %eax
656    movl %eax, 8(%rsp, REG_VAR(stack_index), 4)
657    addq MACRO_LITERAL(1), REG_VAR(arg_index)
658    addq MACRO_LITERAL(1), REG_VAR(stack_index)
659    jmp 1b
6602:  // FOUND_LONG
661    movq (rFP, REG_VAR(arg_index), 4), %rax
662    movq %rax, 8(%rsp, REG_VAR(stack_index), 4)
663    addq MACRO_LITERAL(2), REG_VAR(arg_index)
664    addq MACRO_LITERAL(2), REG_VAR(stack_index)
665    jmp 1b
6663:  // SKIP_FLOAT
667    addq MACRO_LITERAL(1), REG_VAR(arg_index)
668    addq MACRO_LITERAL(1), REG_VAR(stack_index)
669    jmp 1b
6704:  // SKIP_DOUBLE
671    addq MACRO_LITERAL(2), REG_VAR(arg_index)
672    addq MACRO_LITERAL(2), REG_VAR(stack_index)
673    jmp 1b
674.endm
675
676// Puts the next floating point parameter passed in physical register
677// in the expected dex register array entry.
678// Uses rax as temporary.
679.macro LOOP_OVER_SHORTY_STORING_XMMS xmm_reg, shorty, arg_index, fp, finished
6801: // LOOP
681    movb (REG_VAR(shorty)), %al             // al := *shorty
682    addq MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
683    cmpb MACRO_LITERAL(0), %al              // if (al == '\0') goto finished
684    je VAR(finished)
685    cmpb MACRO_LITERAL(68), %al             // if (al == 'D') goto FOUND_DOUBLE
686    je 2f
687    cmpb MACRO_LITERAL(70), %al             // if (al == 'F') goto FOUND_FLOAT
688    je 3f
689    addq MACRO_LITERAL(1), REG_VAR(arg_index)
690    //  Handle extra argument in arg array taken by a long.
691    cmpb MACRO_LITERAL(74), %al   // if (al != 'J') goto LOOP
692    jne 1b
693    addq MACRO_LITERAL(1), REG_VAR(arg_index)
694    jmp 1b                        // goto LOOP
6952:  // FOUND_DOUBLE
696    movsd REG_VAR(xmm_reg),(REG_VAR(fp), REG_VAR(arg_index), 4)
697    addq MACRO_LITERAL(2), REG_VAR(arg_index)
698    jmp 4f
6993:  // FOUND_FLOAT
700    movss REG_VAR(xmm_reg), (REG_VAR(fp), REG_VAR(arg_index), 4)
701    addq MACRO_LITERAL(1), REG_VAR(arg_index)
7024:
703.endm
704
705// Puts the next int/long/object parameter passed in physical register
706// in the expected dex register array entry, and in case of object in the
707// expected reference array entry.
708// Uses rax as temporary.
709.macro LOOP_OVER_SHORTY_STORING_GPRS gpr_reg64, gpr_reg32, shorty, arg_index, regs, refs, finished
7101: // LOOP
711    movb (REG_VAR(shorty)), %al             // al := *shorty
712    addq MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
713    cmpb MACRO_LITERAL(0), %al    // if (al == '\0') goto finished
714    je  VAR(finished)
715    cmpb MACRO_LITERAL(74), %al   // if (al == 'J') goto FOUND_LONG
716    je 2f
717    cmpb MACRO_LITERAL(70), %al   // if (al == 'F') goto SKIP_FLOAT
718    je 3f
719    cmpb MACRO_LITERAL(68), %al   // if (al == 'D') goto SKIP_DOUBLE
720    je 4f
721    movl REG_VAR(gpr_reg32), (REG_VAR(regs), REG_VAR(arg_index), 4)
722    cmpb MACRO_LITERAL(76), %al   // if (al != 'L') goto NOT_REFERENCE
723    jne 6f
724    movl REG_VAR(gpr_reg32), (REG_VAR(refs), REG_VAR(arg_index), 4)
7256:  // NOT_REFERENCE
726    addq MACRO_LITERAL(1), REG_VAR(arg_index)
727    jmp 5f
7282:  // FOUND_LONG
729    movq REG_VAR(gpr_reg64), (REG_VAR(regs), REG_VAR(arg_index), 4)
730    addq MACRO_LITERAL(2), REG_VAR(arg_index)
731    jmp 5f
7323:  // SKIP_FLOAT
733    addq MACRO_LITERAL(1), REG_VAR(arg_index)
734    jmp 1b
7354:  // SKIP_DOUBLE
736    addq MACRO_LITERAL(2), REG_VAR(arg_index)
737    jmp 1b
7385:
739.endm
740
741// Puts the next floating point parameter passed in stack
742// in the expected dex register array entry.
743// Uses rax as temporary.
744//
745// TODO: Or we could just spill regs to the reserved slots in the caller's
746// frame and copy all regs in a simple loop. This time, however, we would
747// need to look at the shorty anyway to look for the references.
748// (The trade-off is different for passing arguments and receiving them.)
749.macro LOOP_OVER_FPs shorty, arg_index, regs, stack_ptr, finished
7501: // LOOP
751    movb (REG_VAR(shorty)), %al             // al := *shorty
752    addq MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
753    cmpb MACRO_LITERAL(0), %al              // if (al == '\0') goto finished
754    je VAR(finished)
755    cmpb MACRO_LITERAL(68), %al             // if (al == 'D') goto FOUND_DOUBLE
756    je 2f
757    cmpb MACRO_LITERAL(70), %al             // if (al == 'F') goto FOUND_FLOAT
758    je 3f
759    addq MACRO_LITERAL(1), REG_VAR(arg_index)
760    //  Handle extra argument in arg array taken by a long.
761    cmpb MACRO_LITERAL(74), %al   // if (al != 'J') goto LOOP
762    jne 1b
763    addq MACRO_LITERAL(1), REG_VAR(arg_index)
764    jmp 1b                        // goto LOOP
7652:  // FOUND_DOUBLE
766    movq OFFSET_TO_FIRST_ARGUMENT_IN_STACK(REG_VAR(stack_ptr), REG_VAR(arg_index), 4), %rax
767    movq %rax, (REG_VAR(regs), REG_VAR(arg_index), 4)
768    addq MACRO_LITERAL(2), REG_VAR(arg_index)
769    jmp 1b
7703:  // FOUND_FLOAT
771    movl OFFSET_TO_FIRST_ARGUMENT_IN_STACK(REG_VAR(stack_ptr), REG_VAR(arg_index), 4), %eax
772    movl %eax, (REG_VAR(regs), REG_VAR(arg_index), 4)
773    addq MACRO_LITERAL(1), REG_VAR(arg_index)
774    jmp 1b
775.endm
776
777// Puts the next int/long/object parameter passed in stack
778// in the expected dex register array entry, and in case of object in the
779// expected reference array entry.
780// Uses rax as temporary.
781.macro LOOP_OVER_INTs shorty, arg_index, regs, refs, stack_ptr, finished
7821: // LOOP
783    movb (REG_VAR(shorty)), %al             // al := *shorty
784    addq MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
785    cmpb MACRO_LITERAL(0), %al    // if (al == '\0') goto finished
786    je  VAR(finished)
787    cmpb MACRO_LITERAL(74), %al   // if (al == 'J') goto FOUND_LONG
788    je 2f
789    cmpb MACRO_LITERAL(76), %al   // if (al == 'L') goto FOUND_REFERENCE
790    je 6f
791    cmpb MACRO_LITERAL(70), %al   // if (al == 'F') goto SKIP_FLOAT
792    je 3f
793    cmpb MACRO_LITERAL(68), %al   // if (al == 'D') goto SKIP_DOUBLE
794    je 4f
795    movl OFFSET_TO_FIRST_ARGUMENT_IN_STACK(REG_VAR(stack_ptr), REG_VAR(arg_index), 4), %eax
796    movl %eax, (REG_VAR(regs), REG_VAR(arg_index), 4)
797    addq MACRO_LITERAL(1), REG_VAR(arg_index)
798    jmp 1b
7996:  // FOUND_REFERENCE
800    movl OFFSET_TO_FIRST_ARGUMENT_IN_STACK(REG_VAR(stack_ptr), REG_VAR(arg_index), 4), %eax
801    movl %eax, (REG_VAR(regs), REG_VAR(arg_index), 4)
802    movl %eax, (REG_VAR(refs), REG_VAR(arg_index), 4)
803    addq MACRO_LITERAL(1), REG_VAR(arg_index)
804    jmp 1b
8052:  // FOUND_LONG
806    movq OFFSET_TO_FIRST_ARGUMENT_IN_STACK(REG_VAR(stack_ptr), REG_VAR(arg_index), 4), %rax
807    movq %rax, (REG_VAR(regs), REG_VAR(arg_index), 4)
808    addq MACRO_LITERAL(2), REG_VAR(arg_index)
809    jmp 1b
8103:  // SKIP_FLOAT
811    addq MACRO_LITERAL(1), REG_VAR(arg_index)
812    jmp 1b
8134:  // SKIP_DOUBLE
814    addq MACRO_LITERAL(2), REG_VAR(arg_index)
815    jmp 1b
816.endm
817
818// Increase method hotness and do suspend check before starting executing the method.
819.macro START_EXECUTING_INSTRUCTIONS
820   movq (%rsp), %rdi
821   movzwl ART_METHOD_HOTNESS_COUNT_OFFSET(%rdi), %esi
822#if (NTERP_HOTNESS_VALUE != 0)
823#error Expected 0 for hotness value
824#endif
825   // If the counter is at zero, handle this in the runtime.
826   testl %esi, %esi
827   je 3f
828   // Update counter.
829   addl $$-1, %esi
830   movw %si, ART_METHOD_HOTNESS_COUNT_OFFSET(%rdi)
8311:
832   DO_SUSPEND_CHECK continue_label=2f
8332:
834   FETCH_INST
835   GOTO_NEXT
8363:
837   CHECK_AND_UPDATE_SHARED_MEMORY_METHOD if_hot=4f, if_not_hot=1b
8384:
839   movq $$0, %rsi
840   movq rFP, %rdx
841   call nterp_hot_method
842   jmp 2b
843.endm
844
845.macro SPILL_ALL_CALLEE_SAVES
846    PUSH r15
847    PUSH r14
848    PUSH r13
849    PUSH r12
850    PUSH rbp
851    PUSH rbx
852    SETUP_FP_CALLEE_SAVE_FRAME
853.endm
854
855.macro RESTORE_ALL_CALLEE_SAVES
856    RESTORE_FP_CALLEE_SAVE_FRAME
857    POP rbx
858    POP rbp
859    POP r12
860    POP r13
861    POP r14
862    POP r15
863.endm
864
865// Helper to setup the stack after doing a nterp to nterp call. This will setup:
866// - rNEW_FP: the new pointer to dex registers
867// - rNEW_REFS: the new pointer to references
868// - rPC: the new PC pointer to execute
869// - edi: number of arguments
870// - ecx: first dex register
871//
872// This helper expects:
873// - rax to contain the code item
874.macro SETUP_STACK_FOR_INVOKE
875   // We do the same stack overflow check as the compiler. See CanMethodUseNterp
876   // in how we limit the maximum nterp frame size.
877   testq %rax, -STACK_OVERFLOW_RESERVED_BYTES(%rsp)
878
879   // Spill all callee saves to have a consistent stack frame whether we
880   // are called by compiled code or nterp.
881   SPILL_ALL_CALLEE_SAVES
882
883   // Setup the frame.
884   SETUP_STACK_FRAME %rax, rNEW_REFS, rNEW_REFS32, rNEW_FP, CFI_NEW_REFS, load_ins=0
885   // Make r11 point to the top of the dex register array.
886   leaq (rNEW_FP, %rbx, 4), %r11
887
888   // Fetch instruction information before replacing rPC.
889   movzbl 1(rPC), %edi
890   movzwl 4(rPC), %ecx
891
892   // Set the dex pc pointer.
893   movq %rax, rPC
894   CFI_DEFINE_DEX_PC_WITH_OFFSET(CFI_TMP, CFI_DEX, 0)
895.endm
896
897// Setup arguments based on a non-range nterp to nterp call, and start executing
898// the method. We expect:
899// - rNEW_FP: the new pointer to dex registers
900// - rNEW_REFS: the new pointer to references
901// - rPC: the new PC pointer to execute
902// - edi: number of arguments
903// - ecx: first dex register
904// - r11: top of dex register array
905// - esi: receiver if non-static.
906.macro SETUP_NON_RANGE_ARGUMENTS_AND_EXECUTE is_static=0, is_string_init=0
907   // Now all temporary registers (except r11 containing top of registers array)
908   // are available, copy the parameters.
909   // /* op vA, vB, {vC...vG} */
910   movl %edi, %eax
911   shrl $$4, %eax # Number of arguments
912   jz 6f  # shl sets the Z flag
913   movq MACRO_LITERAL(-1), %r10
914   cmpl MACRO_LITERAL(2), %eax
915   jl 1f
916   je 2f
917   cmpl MACRO_LITERAL(4), %eax
918   jl 3f
919   je 4f
920
921  // We use a decrementing r10 to store references relative
922  // to rNEW_FP and dex registers relative to r11.
923  //
924  // TODO: We could set up r10 as the number of registers (this can be an additional output from
925  // SETUP_STACK_FOR_INVOKE) and then just decrement it by one before copying each arg to
926  // (rNEW_FP, r10, 4) and (rNEW_REFS, r10, 4).
927  // Maybe even introduce macros NEW_VREG_ADDRESS/NEW_VREG_REF_ADDRESS.
9285:
929   andq        MACRO_LITERAL(15), %rdi
930   GET_VREG_OBJECT %edx, %rdi
931   movl        %edx, (rNEW_FP, %r10, 4)
932   GET_VREG    %edx, %rdi
933   movl        %edx, (%r11, %r10, 4)
934   subq        MACRO_LITERAL(1), %r10
9354:
936   movl        %ecx, %eax
937   shrl        MACRO_LITERAL(12), %eax
938   GET_VREG_OBJECT %edx, %rax
939   movl        %edx, (rNEW_FP, %r10, 4)
940   GET_VREG    %edx, %rax
941   movl        %edx, (%r11, %r10, 4)
942   subq        MACRO_LITERAL(1), %r10
9433:
944   movl        %ecx, %eax
945   shrl        MACRO_LITERAL(8), %eax
946   andl        MACRO_LITERAL(0xf), %eax
947   GET_VREG_OBJECT %edx, %rax
948   movl        %edx, (rNEW_FP, %r10, 4)
949   GET_VREG    %edx, %rax
950   movl        %edx, (%r11, %r10, 4)
951   subq        MACRO_LITERAL(1), %r10
9522:
953   movl        %ecx, %eax
954   shrl        MACRO_LITERAL(4), %eax
955   andl        MACRO_LITERAL(0xf), %eax
956   GET_VREG_OBJECT %edx, %rax
957   movl        %edx, (rNEW_FP, %r10, 4)
958   GET_VREG    %edx, %rax
959   movl        %edx, (%r11, %r10, 4)
960   subq        MACRO_LITERAL(1), %r10
9611:
962   .if \is_string_init
963   // Ignore the first argument
964   .elseif \is_static
965   movl        %ecx, %eax
966   andq        MACRO_LITERAL(0x000f), %rax
967   GET_VREG_OBJECT %edx, %rax
968   movl        %edx, (rNEW_FP, %r10, 4)
969   GET_VREG    %edx, %rax
970   movl        %edx, (%r11, %r10, 4)
971   .else
972   movl        %esi, (rNEW_FP, %r10, 4)
973   movl        %esi, (%r11, %r10, 4)
974   .endif
975
9766:
977   // Start executing the method.
978   movq rNEW_FP, rFP
979   movq rNEW_REFS, rREFS
980   CFI_DEF_CFA_BREG_PLUS_UCONST CFI_REFS, -8, ((6 + 4 + 1) * 8)
981   START_EXECUTING_INSTRUCTIONS
982.endm
983
984// Setup arguments based on a range nterp to nterp call, and start executing
985// the method.
986.macro SETUP_RANGE_ARGUMENTS_AND_EXECUTE is_static=0, is_string_init=0
987   // edi is number of arguments
988   // ecx is first register
989   movq MACRO_LITERAL(-4), %r10
990   .if \is_string_init
991   // Ignore the first argument
992   subl $$1, %edi
993   addl $$1, %ecx
994   .elseif !\is_static
995   subl $$1, %edi
996   addl $$1, %ecx
997   .endif
998
999   testl %edi, %edi
1000   je 2f
1001   leaq  (rREFS, %rcx, 4), %rax  # pointer to first argument in reference array
1002   leaq  (%rax, %rdi, 4), %rax   # pointer to last argument in reference array
1003   leaq  (rFP, %rcx, 4), %rcx    # pointer to first argument in register array
1004   leaq  (%rcx, %rdi, 4), %rdi   # pointer to last argument in register array
1005   // TODO: Same comment for copying arguments as in SETUP_NON_RANGE_ARGUMENTS_AND_EXECUTE.
10061:
1007   movl  -4(%rax), %edx
1008   movl  %edx, (rNEW_FP, %r10, 1)
1009   movl  -4(%rdi), %edx
1010   movl  %edx, (%r11, %r10, 1)
1011   subq  MACRO_LITERAL(4), %r10
1012   subq  MACRO_LITERAL(4), %rax
1013   subq  MACRO_LITERAL(4), %rdi
1014   cmpq  %rcx, %rdi
1015   jne 1b
1016
10172:
1018   .if \is_string_init
1019   // Ignore first argument
1020   .elseif !\is_static
1021   movl        %esi, (rNEW_FP, %r10, 1)
1022   movl        %esi, (%r11, %r10, 1)
1023   .endif
1024   movq rNEW_FP, rFP
1025   movq rNEW_REFS, rREFS
1026   CFI_DEF_CFA_BREG_PLUS_UCONST CFI_REFS, -8, ((6 + 4 + 1) * 8)
1027   START_EXECUTING_INSTRUCTIONS
1028.endm
1029
1030.macro GET_SHORTY dest, is_interface, is_polymorphic, is_custom
1031   push %rdi
1032   push %rsi
1033   .if \is_polymorphic
1034   movq 16(%rsp), %rdi
1035   movq rPC, %rsi
1036   call SYMBOL(NterpGetShortyFromInvokePolymorphic)
1037   .elseif \is_custom
1038   movq 16(%rsp), %rdi
1039   movq rPC, %rsi
1040   call SYMBOL(NterpGetShortyFromInvokeCustom)
1041   .elseif \is_interface
1042   movq 16(%rsp), %rdi
1043   movzwl 2(rPC), %esi
1044   call SYMBOL(NterpGetShortyFromMethodId)
1045   .else
1046   call SYMBOL(NterpGetShorty)
1047   .endif
1048   pop %rsi
1049   pop %rdi
1050   movq %rax, \dest
1051.endm
1052
1053.macro GET_SHORTY_SLOW_PATH dest, is_interface
1054   // Save all registers that can hold arguments in the fast path.
1055   push %rdi
1056   push %rsi
1057   push %rdx
1058   subq MACRO_LITERAL(8), %rsp
1059   mov %xmm0, (%rsp)
1060   .if \is_interface
1061   movq 32(%rsp), %rdi
1062   movzwl 2(rPC), %esi
1063   call SYMBOL(NterpGetShortyFromMethodId)
1064   .else
1065   call SYMBOL(NterpGetShorty)
1066   .endif
1067   mov (%rsp), %xmm0
1068   addq MACRO_LITERAL(8), %rsp
1069   pop %rdx
1070   pop %rsi
1071   pop %rdi
1072   movq %rax, \dest
1073.endm
1074
1075// Uses r9 as temporary.
1076.macro DO_ENTRY_POINT_CHECK call_compiled_code
1077   // On entry, the method is %rdi, the instance is %rsi
1078   leaq ExecuteNterpImpl(%rip), %r9
1079   cmpq %r9, ART_METHOD_QUICK_CODE_OFFSET_64(%rdi)
1080   jne  VAR(call_compiled_code)
1081
1082   movq ART_METHOD_DATA_OFFSET_64(%rdi), %rax
1083.endm
1084
1085// Uses r9 and r10 as temporary
1086.macro UPDATE_REGISTERS_FOR_STRING_INIT old_value, new_value
1087   movq rREFS, %r9
1088   movq rFP, %r10
10891:
1090   cmpl (%r9), \old_value
1091   jne 2f
1092   movl \new_value, (%r9)
1093   movl \new_value, (%r10)
10942:
1095   addq $$4, %r9
1096   addq $$4, %r10
1097   cmpq %r9, rFP
1098   jne 1b
1099.endm
1100
1101.macro COMMON_INVOKE_NON_RANGE is_static=0, is_interface=0, suffix="", is_string_init=0, is_polymorphic=0, is_custom=0
1102   .if \is_polymorphic
1103   // We always go to compiled code for polymorphic calls.
1104   .elseif \is_custom
1105   // We always go to compiled code for custom calls.
1106   .else
1107     DO_ENTRY_POINT_CHECK .Lcall_compiled_code_\suffix
1108     .if \is_string_init
1109     call nterp_to_nterp_string_init_non_range
1110     .elseif \is_static
1111     call nterp_to_nterp_static_non_range
1112     .else
1113     call nterp_to_nterp_instance_non_range
1114     .endif
1115     jmp .Ldone_return_\suffix
1116   .endif
1117
1118.Lcall_compiled_code_\suffix:
1119   .if \is_polymorphic
1120   // No fast path for polymorphic calls.
1121   .elseif \is_custom
1122   // No fast path for custom calls.
1123   .elseif \is_string_init
1124   // No fast path for string.init.
1125   .else
1126     testl $$ART_METHOD_NTERP_INVOKE_FAST_PATH_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%rdi)
1127     je .Lfast_path_with_few_args_\suffix
1128     movzbl 1(rPC), %r9d
1129     movl %r9d, %ebp
1130     shrl MACRO_LITERAL(4), %ebp # Number of arguments
1131     .if \is_static
1132     jz .Linvoke_fast_path_\suffix  # shl sets the Z flag
1133     .else
1134     cmpl MACRO_LITERAL(1), %ebp
1135     je .Linvoke_fast_path_\suffix
1136     .endif
1137     movzwl 4(rPC), %r11d
1138     cmpl MACRO_LITERAL(2), %ebp
1139     .if \is_static
1140     jl .Lone_arg_fast_path_\suffix
1141     .endif
1142     je .Ltwo_args_fast_path_\suffix
1143     cmpl MACRO_LITERAL(4), %ebp
1144     jl .Lthree_args_fast_path_\suffix
1145     je .Lfour_args_fast_path_\suffix
1146
1147     andl        MACRO_LITERAL(0xf), %r9d
1148     GET_VREG    %r9d, %r9
1149.Lfour_args_fast_path_\suffix:
1150     movl        %r11d, %r8d
1151     shrl        MACRO_LITERAL(12), %r8d
1152     GET_VREG    %r8d, %r8
1153.Lthree_args_fast_path_\suffix:
1154     movl        %r11d, %ecx
1155     shrl        MACRO_LITERAL(8), %ecx
1156     andl        MACRO_LITERAL(0xf), %ecx
1157     GET_VREG    %ecx, %rcx
1158.Ltwo_args_fast_path_\suffix:
1159     movl        %r11d, %edx
1160     shrl        MACRO_LITERAL(4), %edx
1161     andl        MACRO_LITERAL(0xf), %edx
1162     GET_VREG    %edx, %rdx
1163.Lone_arg_fast_path_\suffix:
1164     .if \is_static
1165     andl        MACRO_LITERAL(0xf), %r11d
1166     GET_VREG    %esi, %r11
1167     .else
1168     // First argument already in %esi.
1169     .endif
1170.Linvoke_fast_path_\suffix:
1171     call *ART_METHOD_QUICK_CODE_OFFSET_64(%rdi) // Call the method.
1172     ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
1173
1174.Lfast_path_with_few_args_\suffix:
1175     // Fast path when we have zero or one argument (modulo 'this'). If there
1176     // is one argument, we can put it in both floating point and core register.
1177     movzbl 1(rPC), %r9d
1178     shrl MACRO_LITERAL(4), %r9d # Number of arguments
1179     .if \is_static
1180     cmpl MACRO_LITERAL(1), %r9d
1181     jl .Linvoke_with_few_args_\suffix
1182     jne .Lget_shorty_\suffix
1183     movzwl 4(rPC), %r9d
1184     andl MACRO_LITERAL(0xf), %r9d  // dex register of first argument
1185     GET_VREG %esi, %r9
1186     movd %esi, %xmm0
1187     .else
1188     cmpl MACRO_LITERAL(2), %r9d
1189     jl .Linvoke_with_few_args_\suffix
1190     jne .Lget_shorty_\suffix
1191     movzwl 4(rPC), %r9d
1192     shrl MACRO_LITERAL(4), %r9d
1193     andl MACRO_LITERAL(0xf), %r9d  // dex register of second argument
1194     GET_VREG %edx, %r9
1195     movd %edx, %xmm0
1196     .endif
1197.Linvoke_with_few_args_\suffix:
1198     // Check if the next instruction is move-result or move-result-wide.
1199     // If it is, we fetch the shorty and jump to the regular invocation.
1200     movzwq  6(rPC), %r9
1201     andl MACRO_LITERAL(0xfe), %r9d
1202     cmpl MACRO_LITERAL(0x0a), %r9d
1203     je .Lget_shorty_and_invoke_\suffix
1204     call *ART_METHOD_QUICK_CODE_OFFSET_64(%rdi) // Call the method.
1205     ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
1206.Lget_shorty_and_invoke_\suffix:
1207     .if \is_interface
1208     // Save interface method, used for conflict resolution, in a callee-save register.
1209     movq %rax, %xmm12
1210     .endif
1211     GET_SHORTY_SLOW_PATH rINSTq, \is_interface
1212     jmp .Lgpr_setup_finished_\suffix
1213   .endif
1214
1215.Lget_shorty_\suffix:
1216   .if \is_interface
1217   // Save interface method, used for conflict resolution, in a callee-save register.
1218   movq %rax, %xmm12
1219   .endif
1220   GET_SHORTY rINSTq, \is_interface, \is_polymorphic, \is_custom
1221   // From this point:
1222   // - rISNTq contains shorty (in callee-save to switch over return value after call).
1223   // - rdi contains method
1224   // - rsi contains 'this' pointer for instance method.
1225   leaq 1(rINSTq), %r9  // shorty + 1  ; ie skip return arg character
1226   movzwl 4(rPC), %r11d // arguments
1227   .if \is_string_init
1228   shrq MACRO_LITERAL(4), %r11
1229   movq $$1, %r10       // ignore first argument
1230   .elseif \is_static
1231   movq $$0, %r10       // arg_index
1232   .else
1233   shrq MACRO_LITERAL(4), %r11
1234   movq $$1, %r10       // arg_index
1235   .endif
1236   LOOP_OVER_SHORTY_LOADING_XMMS xmm0, r11, r9, r10, .Lxmm_setup_finished_\suffix
1237   LOOP_OVER_SHORTY_LOADING_XMMS xmm1, r11, r9, r10, .Lxmm_setup_finished_\suffix
1238   LOOP_OVER_SHORTY_LOADING_XMMS xmm2, r11, r9, r10, .Lxmm_setup_finished_\suffix
1239   LOOP_OVER_SHORTY_LOADING_XMMS xmm3, r11, r9, r10, .Lxmm_setup_finished_\suffix
1240   LOOP_OVER_SHORTY_LOADING_XMMS xmm4, r11, r9, r10, .Lxmm_setup_finished_\suffix
1241.Lxmm_setup_finished_\suffix:
1242   leaq 1(rINSTq), %r9  // shorty + 1  ; ie skip return arg character
1243   movzwl 4(rPC), %r11d // arguments
1244   .if \is_string_init
1245   movq $$1, %r10       // ignore first argument
1246   shrq MACRO_LITERAL(4), %r11
1247   LOOP_OVER_SHORTY_LOADING_GPRS rsi, esi, r11, r9, r10, .Lgpr_setup_finished_\suffix
1248   .elseif \is_static
1249   movq $$0, %r10       // arg_index
1250   LOOP_OVER_SHORTY_LOADING_GPRS rsi, esi, r11, r9, r10, .Lgpr_setup_finished_\suffix
1251   .else
1252   shrq MACRO_LITERAL(4), %r11
1253   movq $$1, %r10       // arg_index
1254   .endif
1255   LOOP_OVER_SHORTY_LOADING_GPRS rdx, edx, r11, r9, r10, .Lgpr_setup_finished_\suffix
1256   LOOP_OVER_SHORTY_LOADING_GPRS rcx, ecx, r11, r9, r10, .Lgpr_setup_finished_\suffix
1257   LOOP_OVER_SHORTY_LOADING_GPRS r8, r8d, r11, r9, r10, .Lgpr_setup_finished_\suffix
1258   LOOP_OVER_SHORTY_LOADING_GPRS r9, r9d, r11, r9, r10, .Lgpr_setup_finished_\suffix
1259.Lgpr_setup_finished_\suffix:
1260   .if \is_polymorphic
1261   call SYMBOL(art_quick_invoke_polymorphic)
1262   .elseif \is_custom
1263   call SYMBOL(art_quick_invoke_custom)
1264   .else
1265      .if \is_interface
1266      movq %xmm12, %rax
1267      .endif
1268      call *ART_METHOD_QUICK_CODE_OFFSET_64(%rdi) // Call the method.
1269   .endif
1270   cmpb LITERAL(68), (rINSTq)       // Test if result type char == 'D'.
1271   je .Lreturn_double_\suffix
1272   cmpb LITERAL(70), (rINSTq)       // Test if result type char == 'F'.
1273   jne .Ldone_return_\suffix
1274.Lreturn_float_\suffix:
1275   movd %xmm0, %eax
1276   jmp .Ldone_return_\suffix
1277.Lreturn_double_\suffix:
1278   movq %xmm0, %rax
1279.Ldone_return_\suffix:
1280   /* resume execution of caller */
1281   .if \is_string_init
1282   movzwl 4(rPC), %r11d // arguments
1283   andq $$0xf, %r11
1284   GET_VREG %esi, %r11
1285   UPDATE_REGISTERS_FOR_STRING_INIT %esi, %eax
1286   .endif
1287
1288   .if \is_polymorphic
1289   ADVANCE_PC_FETCH_AND_GOTO_NEXT 4
1290   .else
1291   ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
1292   .endif
1293.endm
1294
1295.macro COMMON_INVOKE_RANGE is_static=0, is_interface=0, suffix="", is_string_init=0, is_polymorphic=0, is_custom=0
1296   .if \is_polymorphic
1297   // We always go to compiled code for polymorphic calls.
1298   .elseif \is_custom
1299   // We always go to compiled code for custom calls.
1300   .else
1301     DO_ENTRY_POINT_CHECK .Lcall_compiled_code_range_\suffix
1302     .if \is_string_init
1303     call nterp_to_nterp_string_init_range
1304     .elseif \is_static
1305     call nterp_to_nterp_static_range
1306     .else
1307     call nterp_to_nterp_instance_range
1308     .endif
1309     jmp .Ldone_return_range_\suffix
1310   .endif
1311
1312.Lcall_compiled_code_range_\suffix:
1313   .if \is_polymorphic
1314   // No fast path for polymorphic calls.
1315   .elseif \is_custom
1316   // No fast path for custom calls.
1317   .elseif \is_string_init
1318   // No fast path for string.init.
1319   .else
1320     testl $$ART_METHOD_NTERP_INVOKE_FAST_PATH_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%rdi)
1321     je .Lfast_path_with_few_args_range_\suffix
1322     movzbl 1(rPC), %r9d  // number of arguments
1323     .if \is_static
1324     testl %r9d, %r9d
1325     je .Linvoke_fast_path_range_\suffix
1326     .else
1327     cmpl MACRO_LITERAL(1), %r9d
1328     je .Linvoke_fast_path_range_\suffix
1329     .endif
1330     movzwl 4(rPC), %r11d  // dex register of first argument
1331     leaq (rFP, %r11, 4), %r11  // location of first dex register value
1332     cmpl MACRO_LITERAL(2), %r9d
1333     .if \is_static
1334     jl .Lone_arg_fast_path_range_\suffix
1335     .endif
1336     je .Ltwo_args_fast_path_range_\suffix
1337     cmp MACRO_LITERAL(4), %r9d
1338     jl .Lthree_args_fast_path_range_\suffix
1339     je .Lfour_args_fast_path_range_\suffix
1340     cmp MACRO_LITERAL(5), %r9d
1341     je .Lfive_args_fast_path_range_\suffix
1342
1343.Lloop_over_fast_path_range_\suffix:
1344     subl MACRO_LITERAL(1), %r9d
1345     movl (%r11, %r9, 4), %r8d
1346     movl %r8d, 8(%rsp, %r9, 4)  // Add 8 for the ArtMethod
1347     cmpl MACRO_LITERAL(5), %r9d
1348     jne .Lloop_over_fast_path_range_\suffix
1349
1350.Lfive_args_fast_path_range_\suffix:
1351     movl 16(%r11), %r9d
1352.Lfour_args_fast_path_range_\suffix:
1353     movl 12(%r11), %r8d
1354.Lthree_args_fast_path_range_\suffix:
1355     movl 8(%r11), %ecx
1356.Ltwo_args_fast_path_range_\suffix:
1357     movl 4(%r11), %edx
1358.Lone_arg_fast_path_range_\suffix:
1359     .if \is_static
1360     movl 0(%r11), %esi
1361     .else
1362     // First argument already in %esi.
1363     .endif
1364.Linvoke_fast_path_range_\suffix:
1365     call *ART_METHOD_QUICK_CODE_OFFSET_64(%rdi) // Call the method.
1366     ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
1367
1368.Lfast_path_with_few_args_range_\suffix:
1369     // Fast path when we have zero or one argument (modulo 'this'). If there
1370     // is one argument, we can put it in both floating point and core register.
1371     movzbl 1(rPC), %r9d # Number of arguments
1372     .if \is_static
1373     cmpl MACRO_LITERAL(1), %r9d
1374     jl .Linvoke_with_few_args_range_\suffix
1375     jne .Lget_shorty_range_\suffix
1376     movzwl 4(rPC), %r9d  // Dex register of first argument
1377     GET_VREG %esi, %r9
1378     movd %esi, %xmm0
1379     .else
1380     cmpl MACRO_LITERAL(2), %r9d
1381     jl .Linvoke_with_few_args_range_\suffix
1382     jne .Lget_shorty_range_\suffix
1383     movzwl 4(rPC), %r9d
1384     addl MACRO_LITERAL(1), %r9d  // dex register of second argument
1385     GET_VREG %edx, %r9
1386     movd %edx, %xmm0
1387     .endif
1388.Linvoke_with_few_args_range_\suffix:
1389     // Check if the next instruction is move-result or move-result-wide.
1390     // If it is, we fetch the shorty and jump to the regular invocation.
1391     movzwq  6(rPC), %r9
1392     and MACRO_LITERAL(0xfe), %r9d
1393     cmpl MACRO_LITERAL(0x0a), %r9d
1394     je .Lget_shorty_and_invoke_range_\suffix
1395     call *ART_METHOD_QUICK_CODE_OFFSET_64(%rdi) // Call the method.
1396     ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
1397.Lget_shorty_and_invoke_range_\suffix:
1398     .if \is_interface
1399     // Save interface method, used for conflict resolution, in a callee-save register.
1400     movq %rax, %xmm12
1401     .endif
1402     GET_SHORTY_SLOW_PATH rINSTq, \is_interface
1403     jmp .Lgpr_setup_finished_range_\suffix
1404   .endif
1405
1406.Lget_shorty_range_\suffix:
1407   .if \is_interface
1408   // Save interface method, used for conflict resolution, in a callee-saved register.
1409   movq %rax, %xmm12
1410   .endif
1411   GET_SHORTY rINSTq, \is_interface, \is_polymorphic, \is_custom
1412   // From this point:
1413   // - rINSTq contains shorty (in callee-save to switch over return value after call).
1414   // - rdi contains method
1415   // - rsi contains 'this' pointer for instance method.
1416   leaq 1(rINSTq), %r9  // shorty + 1  ; ie skip return arg character
1417   movzwl 4(rPC), %r10d // arg start index
1418   .if \is_string_init
1419   addq $$1, %r10       // arg start index
1420   movq $$1, %rbp       // index in stack
1421   .elseif \is_static
1422   movq $$0, %rbp       // index in stack
1423   .else
1424   addq $$1, %r10       // arg start index
1425   movq $$1, %rbp       // index in stack
1426   .endif
1427   LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm0, r9, r10, rbp, .Lxmm_setup_finished_range_\suffix
1428   LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm1, r9, r10, rbp, .Lxmm_setup_finished_range_\suffix
1429   LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm2, r9, r10, rbp, .Lxmm_setup_finished_range_\suffix
1430   LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm3, r9, r10, rbp, .Lxmm_setup_finished_range_\suffix
1431   LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm4, r9, r10, rbp, .Lxmm_setup_finished_range_\suffix
1432   LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm5, r9, r10, rbp, .Lxmm_setup_finished_range_\suffix
1433   LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm6, r9, r10, rbp, .Lxmm_setup_finished_range_\suffix
1434   LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm7, r9, r10, rbp, .Lxmm_setup_finished_range_\suffix
1435   LOOP_RANGE_OVER_FPs r9, r10, rbp, .Lxmm_setup_finished_range_\suffix
1436.Lxmm_setup_finished_range_\suffix:
1437   leaq 1(%rbx), %r11  // shorty + 1  ; ie skip return arg character
1438   movzwl 4(rPC), %r10d // arg start index
1439   .if \is_string_init
1440   addq $$1, %r10       // arg start index
1441   movq $$1, %rbp       // index in stack
1442   LOOP_RANGE_OVER_SHORTY_LOADING_GPRS rsi, esi, r11, r10, rbp, .Lgpr_setup_finished_range_\suffix
1443   .elseif \is_static
1444   movq $$0, %rbp // index in stack
1445   LOOP_RANGE_OVER_SHORTY_LOADING_GPRS rsi, esi, r11, r10, rbp, .Lgpr_setup_finished_range_\suffix
1446   .else
1447   addq $$1, %r10       // arg start index
1448   movq $$1, %rbp // index in stack
1449   .endif
1450   LOOP_RANGE_OVER_SHORTY_LOADING_GPRS rdx, edx, r11, r10, rbp, .Lgpr_setup_finished_range_\suffix
1451   LOOP_RANGE_OVER_SHORTY_LOADING_GPRS rcx, ecx, r11, r10, rbp, .Lgpr_setup_finished_range_\suffix
1452   LOOP_RANGE_OVER_SHORTY_LOADING_GPRS r8, r8d, r11, r10, rbp, .Lgpr_setup_finished_range_\suffix
1453   LOOP_RANGE_OVER_SHORTY_LOADING_GPRS r9, r9d, r11, r10, rbp, .Lgpr_setup_finished_range_\suffix
1454   LOOP_RANGE_OVER_INTs r11, r10, rbp, .Lgpr_setup_finished_range_\suffix
1455
1456.Lgpr_setup_finished_range_\suffix:
1457   .if \is_polymorphic
1458   call SYMBOL(art_quick_invoke_polymorphic)
1459   .elseif \is_custom
1460   call SYMBOL(art_quick_invoke_custom)
1461   .else
1462     .if \is_interface
1463     // Set the hidden argument for conflict resolution.
1464     movq %xmm12, %rax
1465     .endif
1466     call *ART_METHOD_QUICK_CODE_OFFSET_64(%rdi) // Call the method.
1467   .endif
1468   cmpb LITERAL(68), (%rbx)       // Test if result type char == 'D'.
1469   je .Lreturn_range_double_\suffix
1470   cmpb LITERAL(70), (%rbx)       // Test if result type char == 'F'.
1471   je .Lreturn_range_float_\suffix
1472   /* resume execution of caller */
1473.Ldone_return_range_\suffix:
1474   .if \is_string_init
1475   movzwl 4(rPC), %r11d // arguments
1476   GET_VREG %esi, %r11
1477   UPDATE_REGISTERS_FOR_STRING_INIT %esi, %eax
1478   .endif
1479
1480   .if \is_polymorphic
1481   ADVANCE_PC_FETCH_AND_GOTO_NEXT 4
1482   .else
1483   ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
1484   .endif
1485.Lreturn_range_double_\suffix:
1486    movq %xmm0, %rax
1487    jmp .Ldone_return_range_\suffix
1488.Lreturn_range_float_\suffix:
1489    movd %xmm0, %eax
1490    jmp .Ldone_return_range_\suffix
1491.endm
1492
1493// Helper for static field get.
1494.macro OP_SGET load="movl", wide="0"
1495   // Fast-path which gets the field from thread-local cache.
1496%  fetch_from_thread_cache("%rax", miss_label="2f")
14971:
1498   movl ART_FIELD_OFFSET_OFFSET(%rax), %edx
1499   movl ART_FIELD_DECLARING_CLASS_OFFSET(%rax), %eax
1500   cmpq $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
1501   jne 3f
15024:
1503   .if \wide
1504   movq (%eax,%edx,1), %rax
1505   SET_WIDE_VREG %rax, rINSTq              # fp[A] <- value
1506   .else
1507   \load (%eax, %edx, 1), %eax
1508   SET_VREG %eax, rINSTq            # fp[A] <- value
1509   .endif
1510   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
15112:
1512   EXPORT_PC
1513   movq rSELF:THREAD_SELF_OFFSET, %rdi
1514   movq 0(%rsp), %rsi
1515   movq rPC, %rdx
1516   movq $$0, %rcx
1517   call nterp_get_static_field
1518   // Clear the marker that we put for volatile fields. The x86 memory
1519   // model doesn't require a barrier.
1520   andq $$-2, %rax
1521   jmp 1b
15223:
1523   call art_quick_read_barrier_mark_reg00
1524   jmp 4b
1525.endm
1526
1527// Helper for static field put.
1528.macro OP_SPUT rINST_reg="rINST", store="movl", wide="0":
1529   // Fast-path which gets the field from thread-local cache.
1530%  fetch_from_thread_cache("%rax", miss_label="2f")
15311:
1532   movl ART_FIELD_OFFSET_OFFSET(%rax), %edx
1533   movl ART_FIELD_DECLARING_CLASS_OFFSET(%rax), %eax
1534   cmpq $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
1535   jne 3f
15364:
1537   .if \wide
1538   GET_WIDE_VREG rINSTq, rINSTq           # rINST <- v[A]
1539   .else
1540   GET_VREG rINST, rINSTq                  # rINST <- v[A]
1541   .endif
1542   \store    \rINST_reg, (%rax,%rdx,1)
1543   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
15442:
1545   EXPORT_PC
1546   movq rSELF:THREAD_SELF_OFFSET, %rdi
1547   movq 0(%rsp), %rsi
1548   movq rPC, %rdx
1549   movq $$0, %rcx
1550   call nterp_get_static_field
1551   testq MACRO_LITERAL(1), %rax
1552   je 1b
1553   // Clear the marker that we put for volatile fields. The x86 memory
1554   // model doesn't require a barrier.
1555   CLEAR_VOLATILE_MARKER %rax
1556   movl ART_FIELD_OFFSET_OFFSET(%rax), %edx
1557   movl ART_FIELD_DECLARING_CLASS_OFFSET(%rax), %eax
1558   cmpq $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
1559   jne 6f
15605:
1561   .if \wide
1562   GET_WIDE_VREG rINSTq, rINSTq           # rINST <- v[A]
1563   .else
1564   GET_VREG rINST, rINSTq                  # rINST <- v[A]
1565   .endif
1566   \store    \rINST_reg, (%rax,%rdx,1)
1567   lock addl $$0, (%rsp)
1568   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
15693:
1570   call art_quick_read_barrier_mark_reg00
1571   jmp 4b
15726:
1573   call art_quick_read_barrier_mark_reg00
1574   jmp 5b
1575.endm
1576
1577
1578.macro OP_IPUT_INTERNAL rINST_reg="rINST", store="movl", wide="0":
1579   movzbq  rINSTbl, %rcx                   # rcx <- BA
1580   sarl    $$4, %ecx                       # ecx <- B
1581   GET_VREG %ecx, %rcx                     # vB (object we're operating on)
1582   testl   %ecx, %ecx                      # is object null?
1583   je      common_errNullObject
1584   andb    $$0xf, rINSTbl                  # rINST <- A
1585   .if \wide
1586   GET_WIDE_VREG rINSTq, rINSTq              # rax<- fp[A]/fp[A+1]
1587   .else
1588   GET_VREG rINST, rINSTq                  # rINST <- v[A]
1589   .endif
1590   \store \rINST_reg, (%rcx,%rax,1)
1591.endm
1592
1593// Helper for instance field put.
1594.macro OP_IPUT rINST_reg="rINST", store="movl", wide="0":
1595   // Fast-path which gets the field from thread-local cache.
1596%  fetch_from_thread_cache("%rax", miss_label="2f")
15971:
1598   OP_IPUT_INTERNAL \rINST_reg, \store, \wide
1599   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
16002:
1601   EXPORT_PC
1602   movq rSELF:THREAD_SELF_OFFSET, %rdi
1603   movq 0(%rsp), %rsi
1604   movq rPC, %rdx
1605   movq $$0, %rcx
1606   call nterp_get_instance_field_offset
1607   testl %eax, %eax
1608   jns 1b
1609   negl %eax
1610   OP_IPUT_INTERNAL \rINST_reg, \store, \wide
1611   lock addl $$0, (%rsp)
1612   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
1613.endm
1614
1615// Helper for instance field get.
1616.macro OP_IGET load="movl", wide="0"
1617   // Fast-path which gets the field from thread-local cache.
1618%  fetch_from_thread_cache("%rax", miss_label="2f")
16191:
1620   movl    rINST, %ecx                     # rcx <- BA
1621   sarl    $$4, %ecx                       # ecx <- B
1622   GET_VREG %ecx, %rcx                     # vB (object we're operating on)
1623   testl   %ecx, %ecx                      # is object null?
1624   je      common_errNullObject
1625   andb    $$0xf,rINSTbl                   # rINST <- A
1626   .if \wide
1627   movq (%rcx,%rax,1), %rax
1628   SET_WIDE_VREG %rax, rINSTq              # fp[A] <- value
1629   .else
1630   \load (%rcx,%rax,1), %eax
1631   SET_VREG %eax, rINSTq                   # fp[A] <- value
1632   .endif
1633   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
16342:
1635   EXPORT_PC
1636   movq rSELF:THREAD_SELF_OFFSET, %rdi
1637   movq 0(%rsp), %rsi
1638   movq rPC, %rdx
1639   movq $$0, %rcx
1640   call nterp_get_instance_field_offset
1641   testl %eax, %eax
1642   jns 1b
1643   negl %eax
1644   jmp 1b
1645.endm
1646
1647.macro SETUP_REFERENCE_PARAMETER_IN_GPR gpr32, regs, refs, ins, arg_offset, finished
1648    movl REG_VAR(gpr32), (REG_VAR(regs), REG_VAR(arg_offset))
1649    movl REG_VAR(gpr32), (REG_VAR(refs), REG_VAR(arg_offset))
1650    addq MACRO_LITERAL(4), REG_VAR(arg_offset)
1651    subl MACRO_LITERAL(1), REG_VAR(ins)
1652    je \finished
1653.endm
1654
1655// Uses eax as temporary
1656.macro SETUP_REFERENCE_PARAMETERS_IN_STACK regs, refs, ins, stack_ptr, arg_offset
16571:
1658    movl OFFSET_TO_FIRST_ARGUMENT_IN_STACK(REG_VAR(stack_ptr), REG_VAR(arg_offset)), %eax
1659    movl %eax, (REG_VAR(regs), REG_VAR(arg_offset))
1660    movl %eax, (REG_VAR(refs), REG_VAR(arg_offset))
1661    addq MACRO_LITERAL(4), REG_VAR(arg_offset)
1662    subl MACRO_LITERAL(1), REG_VAR(ins)
1663    jne 1b
1664.endm
1665
1666.macro CHECK_AND_UPDATE_SHARED_MEMORY_METHOD if_hot, if_not_hot
1667    testl $$ART_METHOD_IS_MEMORY_SHARED_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%rdi)
1668    jz \if_hot
1669    movzwl rSELF:THREAD_SHARED_METHOD_HOTNESS_OFFSET, %esi
1670    testl %esi, %esi
1671    je \if_hot
1672    addl $$-1, %esi
1673    movw %si, rSELF:THREAD_SHARED_METHOD_HOTNESS_OFFSET
1674    jmp \if_not_hot
1675.endm
1676
1677.macro DO_SUSPEND_CHECK continue_label
1678    testl   $$(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), rSELF:THREAD_FLAGS_OFFSET
1679    jz      \continue_label
1680    EXPORT_PC
1681    call    SYMBOL(art_quick_test_suspend)
1682.endm
1683
1684%def entry():
1685/*
1686 * ArtMethod entry point.
1687 *
1688 * On entry:
1689 *  rdi   ArtMethod* callee
1690 *  rest  method parameters
1691 */
1692
1693OAT_ENTRY ExecuteNterpWithClinitImpl
1694    .cfi_startproc
1695    // For simplicity, we don't do a read barrier here, but instead rely
1696    // on art_quick_resolution_trampoline to always have a suspend point before
1697    // calling back here.
1698    movl ART_METHOD_DECLARING_CLASS_OFFSET(%rdi), %r10d
1699    cmpl $$(MIRROR_CLASS_STATUS_VISIBLY_INITIALIZED_SHIFTED), MIRROR_CLASS_STATUS_OFFSET(%r10d)
1700    jae ExecuteNterpImpl
1701    cmpl $$(MIRROR_CLASS_STATUS_INITIALIZING_SHIFTED), MIRROR_CLASS_STATUS_OFFSET(%r10d)
1702    jb art_quick_resolution_trampoline
1703    movl MIRROR_CLASS_CLINIT_THREAD_ID_OFFSET(%r10d), %r10d
1704    cmpl %r10d, rSELF:THREAD_TID_OFFSET
1705    je ExecuteNterpImpl
1706    jmp art_quick_resolution_trampoline
1707    .cfi_endproc
1708    .global SYMBOL(EndExecuteNterpWithClinitImpl)
1709SYMBOL(EndExecuteNterpWithClinitImpl):
1710
1711OAT_ENTRY ExecuteNterpImpl
1712    .cfi_startproc
1713    .cfi_def_cfa rsp, 8
1714    testq %rax, -STACK_OVERFLOW_RESERVED_BYTES(%rsp)
1715    /* Spill callee save regs */
1716    SPILL_ALL_CALLEE_SAVES
1717
1718    movq ART_METHOD_DATA_OFFSET_64(%rdi), rPC
1719
1720    // Setup the stack for executing the method.
1721    SETUP_STACK_FRAME rPC, rREFS, rREFS32, rFP, CFI_REFS, load_ins=1
1722
1723    // Setup the parameters
1724    testl %r14d, %r14d
1725    je .Lxmm_setup_finished
1726
1727    subq %r14, %rbx
1728    salq $$2, %rbx // rbx is now the offset for inputs into the registers array.
1729
1730    testl $$ART_METHOD_NTERP_ENTRY_POINT_FAST_PATH_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%rdi)
1731    je .Lsetup_slow_path
1732    leaq (rFP, %rbx, 1), %rdi
1733    leaq (rREFS, %rbx, 1), %rbx
1734    movq $$0, %r10
1735
1736    SETUP_REFERENCE_PARAMETER_IN_GPR esi, rdi, rbx, r14d, r10, .Lxmm_setup_finished
1737    SETUP_REFERENCE_PARAMETER_IN_GPR edx, rdi, rbx, r14d, r10, .Lxmm_setup_finished
1738    SETUP_REFERENCE_PARAMETER_IN_GPR ecx, rdi, rbx, r14d, r10, .Lxmm_setup_finished
1739    SETUP_REFERENCE_PARAMETER_IN_GPR r8d, rdi, rbx, r14d, r10, .Lxmm_setup_finished
1740    SETUP_REFERENCE_PARAMETER_IN_GPR r9d, rdi, rbx, r14d, r10, .Lxmm_setup_finished
1741    SETUP_REFERENCE_PARAMETERS_IN_STACK rdi, rbx, r14d, r11, r10
1742    jmp .Lxmm_setup_finished
1743
1744.Lsetup_slow_path:
1745    // If the method is not static and there is one argument ('this'), we don't need to fetch the
1746    // shorty.
1747    testl $$ART_METHOD_IS_STATIC_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%rdi)
1748    jne .Lsetup_with_shorty
1749
1750    movl %esi, (rFP, %rbx)
1751    movl %esi, (rREFS, %rbx)
1752
1753    cmpl $$1, %r14d
1754    je .Lxmm_setup_finished
1755
1756.Lsetup_with_shorty:
1757    // TODO: Get shorty in a better way and remove below
1758    push %rdi
1759    push %rsi
1760    push %rdx
1761    push %rcx
1762    push %r8
1763    push %r9
1764
1765    // Save xmm registers + alignment.
1766    subq MACRO_LITERAL(8 * 8 + 8), %rsp
1767    movq %xmm0, 0(%rsp)
1768    movq %xmm1, 8(%rsp)
1769    movq %xmm2, 16(%rsp)
1770    movq %xmm3, 24(%rsp)
1771    movq %xmm4, 32(%rsp)
1772    movq %xmm5, 40(%rsp)
1773    movq %xmm6, 48(%rsp)
1774    movq %xmm7, 56(%rsp)
1775
1776    call SYMBOL(NterpGetShorty)
1777    // Save shorty in callee-save rbp.
1778    movq %rax, %rbp
1779
1780    // Restore xmm registers + alignment.
1781    movq 0(%rsp), %xmm0
1782    movq 8(%rsp), %xmm1
1783    movq 16(%rsp), %xmm2
1784    movq 24(%rsp), %xmm3
1785    movq 32(%rsp), %xmm4
1786    movq 40(%rsp), %xmm5
1787    movq 48(%rsp), %xmm6
1788    movq 56(%rsp), %xmm7
1789    addq MACRO_LITERAL(8 * 8 + 8), %rsp
1790
1791    pop %r9
1792    pop %r8
1793    pop %rcx
1794    pop %rdx
1795    pop %rsi
1796    pop %rdi
1797    // Reload the old stack pointer, which used to be stored in %r11, which is not callee-saved.
1798    movq -8(rREFS), %r11
1799    // TODO: Get shorty in a better way and remove above
1800
1801    movq $$0, %r14
1802    testl $$ART_METHOD_IS_STATIC_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%rdi)
1803
1804    // Available: rdi, r10
1805    // Note the leaq below don't change the flags.
1806    leaq 1(%rbp), %r10  // shorty + 1  ; ie skip return arg character
1807    leaq (rFP, %rbx, 1), %rdi
1808    leaq (rREFS, %rbx, 1), %rbx
1809    jne .Lhandle_static_method
1810    addq $$4, %rdi
1811    addq $$4, %rbx
1812    addq $$4, %r11
1813    jmp .Lcontinue_setup_gprs
1814.Lhandle_static_method:
1815    LOOP_OVER_SHORTY_STORING_GPRS rsi, esi, r10, r14, rdi, rbx, .Lgpr_setup_finished
1816.Lcontinue_setup_gprs:
1817    LOOP_OVER_SHORTY_STORING_GPRS rdx, edx, r10, r14, rdi, rbx, .Lgpr_setup_finished
1818    LOOP_OVER_SHORTY_STORING_GPRS rcx, ecx, r10, r14, rdi, rbx, .Lgpr_setup_finished
1819    LOOP_OVER_SHORTY_STORING_GPRS r8, r8d, r10, r14, rdi, rbx, .Lgpr_setup_finished
1820    LOOP_OVER_SHORTY_STORING_GPRS r9, r9d, r10, r14, rdi, rbx, .Lgpr_setup_finished
1821    LOOP_OVER_INTs r10, r14, rdi, rbx, r11, .Lgpr_setup_finished
1822.Lgpr_setup_finished:
1823    leaq 1(%rbp), %r10  // shorty + 1  ; ie skip return arg character
1824    movq $$0, %r14 // reset counter
1825    LOOP_OVER_SHORTY_STORING_XMMS xmm0, r10, r14, rdi, .Lxmm_setup_finished
1826    LOOP_OVER_SHORTY_STORING_XMMS xmm1, r10, r14, rdi, .Lxmm_setup_finished
1827    LOOP_OVER_SHORTY_STORING_XMMS xmm2, r10, r14, rdi, .Lxmm_setup_finished
1828    LOOP_OVER_SHORTY_STORING_XMMS xmm3, r10, r14, rdi, .Lxmm_setup_finished
1829    LOOP_OVER_SHORTY_STORING_XMMS xmm4, r10, r14, rdi, .Lxmm_setup_finished
1830    LOOP_OVER_SHORTY_STORING_XMMS xmm5, r10, r14, rdi, .Lxmm_setup_finished
1831    LOOP_OVER_SHORTY_STORING_XMMS xmm6, r10, r14, rdi, .Lxmm_setup_finished
1832    LOOP_OVER_SHORTY_STORING_XMMS xmm7, r10, r14, rdi, .Lxmm_setup_finished
1833    LOOP_OVER_FPs r10, r14, rdi, r11, .Lxmm_setup_finished
1834.Lxmm_setup_finished:
1835    CFI_DEFINE_DEX_PC_WITH_OFFSET(CFI_TMP, CFI_DEX, 0)
1836
1837    // Set rIBASE
1838    leaq artNterpAsmInstructionStart(%rip), rIBASE
1839    /* start executing the instruction at rPC */
1840    START_EXECUTING_INSTRUCTIONS
1841    /* NOTE: no fallthrough */
1842    // cfi info continues, and covers the whole nterp implementation.
1843    END ExecuteNterpImpl
1844
1845%def opcode_pre():
1846
1847%def fetch_from_thread_cache(dest_reg, miss_label):
1848   // Fetch some information from the thread cache.
1849   // Uses rax, rdx, rcx as temporaries.
1850   movq rSELF:THREAD_SELF_OFFSET, %rax
1851   movq rPC, %rdx
1852   salq MACRO_LITERAL(THREAD_INTERPRETER_CACHE_SIZE_SHIFT), %rdx
1853   andq MACRO_LITERAL(THREAD_INTERPRETER_CACHE_SIZE_MASK), %rdx
1854   cmpq THREAD_INTERPRETER_CACHE_OFFSET(%rax, %rdx, 1), rPC
1855   jne ${miss_label}
1856   movq __SIZEOF_POINTER__+THREAD_INTERPRETER_CACHE_OFFSET(%rax, %rdx, 1), ${dest_reg}
1857
1858%def footer():
1859/*
1860 * ===========================================================================
1861 *  Common subroutines and data
1862 * ===========================================================================
1863 */
1864
1865    .text
1866    .align  2
1867
1868// Enclose all code below in a symbol (which gets printed in backtraces).
1869ENTRY nterp_helper
1870
1871// Note: mterp also uses the common_* names below for helpers, but that's OK
1872// as the C compiler compiled each interpreter separately.
1873common_errDivideByZero:
1874    EXPORT_PC
1875    call art_quick_throw_div_zero
1876
1877// Expect array in edi, index in esi.
1878common_errArrayIndex:
1879    EXPORT_PC
1880    movl MIRROR_ARRAY_LENGTH_OFFSET(%edi), %eax
1881    movl %esi, %edi
1882    movl %eax, %esi
1883    call art_quick_throw_array_bounds
1884
1885common_errNullObject:
1886    EXPORT_PC
1887    call art_quick_throw_null_pointer_exception
1888
1889NterpCommonInvokeStatic:
1890    COMMON_INVOKE_NON_RANGE is_static=1, is_interface=0, suffix="invokeStatic"
1891
1892NterpCommonInvokeStaticRange:
1893    COMMON_INVOKE_RANGE is_static=1, is_interface=0, suffix="invokeStatic"
1894
1895NterpCommonInvokeInstance:
1896    COMMON_INVOKE_NON_RANGE is_static=0, is_interface=0, suffix="invokeInstance"
1897
1898NterpCommonInvokeInstanceRange:
1899    COMMON_INVOKE_RANGE is_static=0, is_interface=0, suffix="invokeInstance"
1900
1901NterpCommonInvokeInterface:
1902    COMMON_INVOKE_NON_RANGE is_static=0, is_interface=1, suffix="invokeInterface"
1903
1904NterpCommonInvokeInterfaceRange:
1905    COMMON_INVOKE_RANGE is_static=0, is_interface=1, suffix="invokeInterface"
1906
1907NterpCommonInvokePolymorphic:
1908    COMMON_INVOKE_NON_RANGE is_static=0, is_interface=0, is_string_init=0, is_polymorphic=1, suffix="invokePolymorphic"
1909
1910NterpCommonInvokePolymorphicRange:
1911    COMMON_INVOKE_RANGE is_static=0, is_interface=0, is_polymorphic=1, suffix="invokePolymorphic"
1912
1913NterpCommonInvokeCustom:
1914    COMMON_INVOKE_NON_RANGE is_static=1, is_interface=0, is_string_init=0, is_polymorphic=0, is_custom=1, suffix="invokeCustom"
1915
1916NterpCommonInvokeCustomRange:
1917    COMMON_INVOKE_RANGE is_static=1, is_interface=0, is_polymorphic=0, is_custom=1, suffix="invokeCustom"
1918
1919NterpHandleStringInit:
1920   COMMON_INVOKE_NON_RANGE is_static=0, is_interface=0, is_string_init=1, suffix="stringInit"
1921
1922NterpHandleStringInitRange:
1923   COMMON_INVOKE_RANGE is_static=0, is_interface=0, is_string_init=1, suffix="stringInit"
1924
1925NterpNewInstance:
1926   EXPORT_PC
1927   // Fast-path which gets the class from thread-local cache.
1928%  fetch_from_thread_cache("%rdi", miss_label="2f")
1929   cmpq $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
1930   jne 3f
19314:
1932   callq *rSELF:THREAD_ALLOC_OBJECT_ENTRYPOINT_OFFSET
19331:
1934   SET_VREG_OBJECT %eax, rINSTq            # fp[A] <- value
1935   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
19362:
1937   movq rSELF:THREAD_SELF_OFFSET, %rdi
1938   movq 0(%rsp), %rsi
1939   movq rPC, %rdx
1940   call nterp_allocate_object
1941   jmp 1b
19423:
1943   // 07 is %rdi
1944   call art_quick_read_barrier_mark_reg07
1945   jmp 4b
1946
1947NterpNewArray:
1948   /* new-array vA, vB, class@CCCC */
1949   EXPORT_PC
1950   // Fast-path which gets the class from thread-local cache.
1951%  fetch_from_thread_cache("%rdi", miss_label="2f")
1952   cmpq $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
1953   jne 3f
19541:
1955   movzbl  rINSTbl,%esi
1956   sarl    $$4,%esi                          # esi<- B
1957   GET_VREG %esi %rsi                        # esi<- vB (array length)
1958   andb    $$0xf,rINSTbl                     # rINST<- A
1959   callq *rSELF:THREAD_ALLOC_ARRAY_ENTRYPOINT_OFFSET
1960   SET_VREG_OBJECT %eax, rINSTq            # fp[A] <- value
1961   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
19622:
1963   movq rSELF:THREAD_SELF_OFFSET, %rdi
1964   movq 0(%rsp), %rsi
1965   movq rPC, %rdx
1966   call nterp_get_class
1967   movq %rax, %rdi
1968   jmp 1b
19693:
1970   // 07 is %rdi
1971   call art_quick_read_barrier_mark_reg07
1972   jmp 1b
1973
1974NterpPutObjectInstanceField:
1975   movl    rINST, %ebp                     # rbp <- BA
1976   andl    $$0xf, %ebp                     # rbp <- A
1977   GET_VREG %ecx, %rbp                     # ecx <- v[A]
1978   sarl    $$4, rINST
1979   // Fast-path which gets the field from thread-local cache.
1980%  fetch_from_thread_cache("%rax", miss_label="2f")
19811:
1982   GET_VREG rINST, rINSTq                  # vB (object we're operating on)
1983   testl   rINST, rINST                    # is object null?
1984   je      common_errNullObject
1985   POISON_HEAP_REF ecx
1986   movl %ecx, (rINSTq,%rax,1)
1987   testl %ecx, %ecx
1988   je 4f
1989   movq rSELF:THREAD_CARD_TABLE_OFFSET, %rax
1990   shrq $$CARD_TABLE_CARD_SHIFT, rINSTq
1991   movb %al, (%rax, rINSTq, 1)
19924:
1993   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
19942:
1995   EXPORT_PC
1996   movq rSELF:THREAD_SELF_OFFSET, %rdi
1997   movq 0(%rsp), %rsi
1998   movq rPC, %rdx
1999   // %rcx is already set.
2000   call nterp_get_instance_field_offset
2001   // Reload the value as it may have moved.
2002   GET_VREG %ecx, %rbp                     # ecx <- v[A]
2003   testl %eax, %eax
2004   jns 1b
2005   GET_VREG rINST, rINSTq                  # vB (object we're operating on)
2006   testl   rINST, rINST                    # is object null?
2007   je      common_errNullObject
2008   negl %eax
2009   POISON_HEAP_REF ecx
2010   movl %ecx, (rINSTq,%rax,1)
2011   testl %ecx, %ecx
2012   je 5f
2013   movq rSELF:THREAD_CARD_TABLE_OFFSET, %rax
2014   shrq $$CARD_TABLE_CARD_SHIFT, rINSTq
2015   movb %al, (%rax, rINSTq, 1)
20165:
2017   lock addl $$0, (%rsp)
2018   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
2019
2020NterpGetObjectInstanceField:
2021   // Fast-path which gets the field from thread-local cache.
2022%  fetch_from_thread_cache("%rax", miss_label="2f")
20231:
2024   movl    rINST, %ecx                     # rcx <- BA
2025   sarl    $$4, %ecx                       # ecx <- B
2026   GET_VREG %ecx, %rcx                     # vB (object we're operating on)
2027   testl   %ecx, %ecx                      # is object null?
2028   je      common_errNullObject
2029   testb $$READ_BARRIER_TEST_VALUE, GRAY_BYTE_OFFSET(%ecx)
2030   movl (%rcx,%rax,1), %eax
2031   jnz 3f
2032   UNPOISON_HEAP_REF eax  // Affects flags, so we cannot unpoison before the jnz.
20334:
2034   andb    $$0xf,rINSTbl                   # rINST <- A
2035   SET_VREG_OBJECT %eax, rINSTq            # fp[A] <- value
2036   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
20372:
2038   EXPORT_PC
2039   movq rSELF:THREAD_SELF_OFFSET, %rdi
2040   movq 0(%rsp), %rsi
2041   movq rPC, %rdx
2042   movq $$0, %rcx
2043   call nterp_get_instance_field_offset
2044   testl %eax, %eax
2045   jns 1b
2046   // For volatile fields, we return a negative offset. Remove the sign
2047   // and no need for any barrier thanks to the memory model.
2048   negl %eax
2049   jmp 1b
20503:
2051   UNPOISON_HEAP_REF eax
2052   // reg00 is eax
2053   call art_quick_read_barrier_mark_reg00
2054   jmp 4b
2055
2056NterpPutObjectStaticField:
2057   GET_VREG %ebp, rINSTq
2058   // Fast-path which gets the field from thread-local cache.
2059%  fetch_from_thread_cache("%rax", miss_label="2f")
20601:
2061   movl ART_FIELD_OFFSET_OFFSET(%rax), %edx
2062   movl ART_FIELD_DECLARING_CLASS_OFFSET(%rax), %eax
2063   cmpq $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
2064   jne 3f
20655:
2066   POISON_HEAP_REF ebp
2067   movl %ebp, (%eax, %edx, 1)
2068   testl %ebp, %ebp
2069   je 4f
2070   movq rSELF:THREAD_CARD_TABLE_OFFSET, %rcx
2071   shrq $$CARD_TABLE_CARD_SHIFT, %rax
2072   movb %cl, (%rax, %rcx, 1)
20734:
2074   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
20752:
2076   EXPORT_PC
2077   movq rSELF:THREAD_SELF_OFFSET, %rdi
2078   movq 0(%rsp), %rsi
2079   movq rPC, %rdx
2080   movq %rbp, %rcx
2081   call nterp_get_static_field
2082   // Reload the value as it may have moved.
2083   GET_VREG %ebp, rINSTq
2084   testq MACRO_LITERAL(1), %rax
2085   je 1b
2086   CLEAR_VOLATILE_MARKER %rax
2087   movl ART_FIELD_OFFSET_OFFSET(%rax), %edx
2088   movl ART_FIELD_DECLARING_CLASS_OFFSET(%rax), %eax
2089   cmpq $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
2090   jne 7f
20916:
2092   POISON_HEAP_REF ebp
2093   movl %ebp, (%eax, %edx, 1)
2094   testl %ebp, %ebp
2095   je 8f
2096   movq rSELF:THREAD_CARD_TABLE_OFFSET, %rcx
2097   shrq $$CARD_TABLE_CARD_SHIFT, %rax
2098   movb %cl, (%rax, %rcx, 1)
20998:
2100   lock addl $$0, (%rsp)
2101   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
21023:
2103   call art_quick_read_barrier_mark_reg00
2104   jmp 5b
21057:
2106   call art_quick_read_barrier_mark_reg00
2107   jmp 6b
2108
2109NterpGetObjectStaticField:
2110   // Fast-path which gets the field from thread-local cache.
2111%  fetch_from_thread_cache("%rax", miss_label="2f")
21121:
2113   movl ART_FIELD_OFFSET_OFFSET(%rax), %edx
2114   movl ART_FIELD_DECLARING_CLASS_OFFSET(%rax), %eax
2115   cmpq $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
2116   jne 5f
21176:
2118   testb $$READ_BARRIER_TEST_VALUE, GRAY_BYTE_OFFSET(%eax)
2119   movl (%eax, %edx, 1), %eax
2120   jnz 3f
2121   UNPOISON_HEAP_REF eax  // Affects flags, so we cannot unpoison before the jnz.
21224:
2123   SET_VREG_OBJECT %eax, rINSTq            # fp[A] <- value
2124   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
21252:
2126   EXPORT_PC
2127   movq rSELF:THREAD_SELF_OFFSET, %rdi
2128   movq 0(%rsp), %rsi
2129   movq rPC, %rdx
2130   movq $$0, %rcx
2131   call nterp_get_static_field
2132   andq $$-2, %rax
2133   jmp 1b
21343:
2135   UNPOISON_HEAP_REF eax
2136   call art_quick_read_barrier_mark_reg00
2137   jmp 4b
21385:
2139   call art_quick_read_barrier_mark_reg00
2140   jmp 6b
2141
2142NterpGetBooleanStaticField:
2143  OP_SGET load="movsbl", wide=0
2144
2145NterpGetByteStaticField:
2146  OP_SGET load="movsbl", wide=0
2147
2148NterpGetCharStaticField:
2149  OP_SGET load="movzwl", wide=0
2150
2151NterpGetShortStaticField:
2152  OP_SGET load="movswl", wide=0
2153
2154NterpGetWideStaticField:
2155  OP_SGET load="movq", wide=1
2156
2157NterpGetIntStaticField:
2158  OP_SGET load="movl", wide=0
2159
2160NterpPutStaticField:
2161  OP_SPUT rINST_reg=rINST, store="movl", wide=0
2162
2163NterpPutBooleanStaticField:
2164NterpPutByteStaticField:
2165  OP_SPUT rINST_reg=rINSTbl, store="movb", wide=0
2166
2167NterpPutCharStaticField:
2168NterpPutShortStaticField:
2169  OP_SPUT rINST_reg=rINSTw, store="movw", wide=0
2170
2171NterpPutWideStaticField:
2172  OP_SPUT rINST_reg=rINSTq, store="movq", wide=1
2173
2174NterpPutInstanceField:
2175  OP_IPUT rINST_reg=rINST, store="movl", wide=0
2176
2177NterpPutBooleanInstanceField:
2178NterpPutByteInstanceField:
2179  OP_IPUT rINST_reg=rINSTbl, store="movb", wide=0
2180
2181NterpPutCharInstanceField:
2182NterpPutShortInstanceField:
2183  OP_IPUT rINST_reg=rINSTw, store="movw", wide=0
2184
2185NterpPutWideInstanceField:
2186  OP_IPUT rINST_reg=rINSTq, store="movq", wide=1
2187
2188NterpGetBooleanInstanceField:
2189  OP_IGET load="movzbl", wide=0
2190
2191NterpGetByteInstanceField:
2192  OP_IGET load="movsbl", wide=0
2193
2194NterpGetCharInstanceField:
2195  OP_IGET load="movzwl", wide=0
2196
2197NterpGetShortInstanceField:
2198  OP_IGET load="movswl", wide=0
2199
2200NterpGetWideInstanceField:
2201  OP_IGET load="movq", wide=1
2202
2203NterpGetInstanceField:
2204  OP_IGET load="movl", wide=0
2205
2206NterpHandleHotnessOverflow:
2207    CHECK_AND_UPDATE_SHARED_MEMORY_METHOD if_hot=1f, if_not_hot=4f
22081:
2209    movq rPC, %rsi
2210    movq rFP, %rdx
2211    call nterp_hot_method
2212    testq %rax, %rax
2213    jne 3f
22142:
2215    FETCH_INST
2216    GOTO_NEXT
22173:
2218    // Drop the current frame.
2219    movq -8(rREFS), %rsp
2220    CFI_DEF_CFA(rsp, CALLEE_SAVES_SIZE)
2221
2222    // Setup the new frame
2223    movq OSR_DATA_FRAME_SIZE(%rax), %rcx
2224    // Given stack size contains all callee saved registers, remove them.
2225    subq $$CALLEE_SAVES_SIZE, %rcx
2226
2227    // Remember CFA.
2228    movq %rsp, %rbp
2229    CFI_DEF_CFA_REGISTER(rbp)
2230
2231    subq %rcx, %rsp
2232    movq %rsp, %rdi               // rdi := beginning of stack
2233    leaq OSR_DATA_MEMORY(%rax), %rsi  // rsi := memory to copy
2234    rep movsb                     // while (rcx--) { *rdi++ = *rsi++ }
2235
2236    // Fetch the native PC to jump to and save it in a callee-save register.
2237    movq OSR_DATA_NATIVE_PC(%rax), %rbx
2238
2239    // Free the memory holding OSR Data.
2240    movq %rax, %rdi
2241    call free
2242
2243    // Jump to the compiled code.
2244    jmp *%rbx
22454:
2246    DO_SUSPEND_CHECK continue_label=2b
2247    jmp 2b
2248
2249NterpHandleInvokeInterfaceOnObjectMethodRange:
2250   shrl $$16, %eax
2251   movq MIRROR_CLASS_VTABLE_OFFSET_64(%edx, %eax, 8), %rdi
2252   jmp NterpCommonInvokeInstanceRange
2253
2254NterpHandleInvokeInterfaceOnObjectMethod:
2255   shrl $$16, %eax
2256   movq MIRROR_CLASS_VTABLE_OFFSET_64(%edx, %eax, 8), %rdi
2257   jmp NterpCommonInvokeInstance
2258
2259// This is the logical end of ExecuteNterpImpl, where the frame info applies.
2260// EndExecuteNterpImpl includes the methods below as we want the runtime to
2261// see them as part of the Nterp PCs.
2262.cfi_endproc
2263
2264nterp_to_nterp_static_non_range:
2265    .cfi_startproc
2266    .cfi_def_cfa rsp, 8
2267    SETUP_STACK_FOR_INVOKE
2268    SETUP_NON_RANGE_ARGUMENTS_AND_EXECUTE is_static=1, is_string_init=0
2269    .cfi_endproc
2270
2271nterp_to_nterp_string_init_non_range:
2272    .cfi_startproc
2273    .cfi_def_cfa rsp, 8
2274    SETUP_STACK_FOR_INVOKE
2275    SETUP_NON_RANGE_ARGUMENTS_AND_EXECUTE is_static=0, is_string_init=1
2276    .cfi_endproc
2277
2278nterp_to_nterp_instance_non_range:
2279    .cfi_startproc
2280    .cfi_def_cfa rsp, 8
2281    SETUP_STACK_FOR_INVOKE
2282    SETUP_NON_RANGE_ARGUMENTS_AND_EXECUTE is_static=0, is_string_init=0
2283    .cfi_endproc
2284
2285nterp_to_nterp_static_range:
2286    .cfi_startproc
2287    .cfi_def_cfa rsp, 8
2288    SETUP_STACK_FOR_INVOKE
2289    SETUP_RANGE_ARGUMENTS_AND_EXECUTE is_static=1
2290    .cfi_endproc
2291
2292nterp_to_nterp_instance_range:
2293    .cfi_startproc
2294    .cfi_def_cfa rsp, 8
2295    SETUP_STACK_FOR_INVOKE
2296    SETUP_RANGE_ARGUMENTS_AND_EXECUTE is_static=0
2297    .cfi_endproc
2298
2299nterp_to_nterp_string_init_range:
2300    .cfi_startproc
2301    .cfi_def_cfa rsp, 8
2302    SETUP_STACK_FOR_INVOKE
2303    SETUP_RANGE_ARGUMENTS_AND_EXECUTE is_static=0, is_string_init=1
2304    .cfi_endproc
2305
2306END nterp_helper
2307
2308// This is the end of PCs contained by the OatQuickMethodHeader created for the interpreter
2309// entry point.
2310    FUNCTION_TYPE(EndExecuteNterpImpl)
2311    ASM_HIDDEN SYMBOL(EndExecuteNterpImpl)
2312    .global SYMBOL(EndExecuteNterpImpl)
2313SYMBOL(EndExecuteNterpImpl):
2314
2315// Entrypoints into runtime.
2316NTERP_TRAMPOLINE nterp_get_static_field, NterpGetStaticField
2317NTERP_TRAMPOLINE nterp_get_instance_field_offset, NterpGetInstanceFieldOffset
2318NTERP_TRAMPOLINE nterp_filled_new_array, NterpFilledNewArray
2319NTERP_TRAMPOLINE nterp_filled_new_array_range, NterpFilledNewArrayRange
2320NTERP_TRAMPOLINE nterp_get_class, NterpGetClass
2321NTERP_TRAMPOLINE nterp_allocate_object, NterpAllocateObject
2322NTERP_TRAMPOLINE nterp_get_method, NterpGetMethod
2323NTERP_TRAMPOLINE nterp_hot_method, NterpHotMethod
2324NTERP_TRAMPOLINE nterp_load_object, NterpLoadObject
2325
2326DEFINE_FUNCTION nterp_deliver_pending_exception
2327    DELIVER_PENDING_EXCEPTION
2328END_FUNCTION nterp_deliver_pending_exception
2329
2330// gen_mterp.py will inline the following definitions
2331// within [ExecuteNterpImpl, EndExecuteNterpImpl).
2332%def instruction_end():
2333
2334    FUNCTION_TYPE(artNterpAsmInstructionEnd)
2335    ASM_HIDDEN SYMBOL(artNterpAsmInstructionEnd)
2336    .global SYMBOL(artNterpAsmInstructionEnd)
2337SYMBOL(artNterpAsmInstructionEnd):
2338    // artNterpAsmInstructionEnd is used as landing pad for exception handling.
2339    FETCH_INST
2340    GOTO_NEXT
2341
2342%def instruction_start():
2343
2344    FUNCTION_TYPE(artNterpAsmInstructionStart)
2345    ASM_HIDDEN SYMBOL(artNterpAsmInstructionStart)
2346    .global SYMBOL(artNterpAsmInstructionStart)
2347SYMBOL(artNterpAsmInstructionStart) = .L_op_nop
2348    .text
2349
2350%def opcode_name_prefix():
2351%   return "nterp_"
2352%def opcode_start():
2353    ENTRY nterp_${opcode}
2354%def opcode_end():
2355    END nterp_${opcode}
2356    // Advance to the end of this handler. Causes error if we are past that point.
2357    .org nterp_${opcode} + NTERP_HANDLER_SIZE  // ${opcode} handler is too big!
2358%def opcode_slow_path_start(name):
2359    ENTRY ${name}
2360%def opcode_slow_path_end(name):
2361    END ${name}
2362