/art/runtime/interpreter/mterp/mips/ |
D | footer.S | 18 move a0, rSELF 27 move a0, rSELF 36 move a0, rSELF 45 move a0, rSELF 54 move a0, rSELF 63 move a0, rSELF 72 move a0, rSELF 74 lw a2, THREAD_FLAGS_OFFSET(rSELF) 85 lw a0, THREAD_EXCEPTION_OFFSET(rSELF) 94 move a0, rSELF [all …]
|
D | op_return_void_no_barrier.S | 1 lw ra, THREAD_FLAGS_OFFSET(rSELF) 2 move a0, rSELF
|
D | op_return_void.S | 3 lw ra, THREAD_FLAGS_OFFSET(rSELF) 4 move a0, rSELF
|
D | op_goto_16.S | 11 move a0, rSELF 19 lw ra, THREAD_FLAGS_OFFSET(rSELF) 29 lw ra, THREAD_FLAGS_OFFSET(rSELF)
|
D | op_return.S | 9 lw ra, THREAD_FLAGS_OFFSET(rSELF) 10 move a0, rSELF
|
D | alt_stub.S | 9 lw rIBASE, THREAD_CURRENT_IBASE_OFFSET(rSELF) # refresh IBASE 10 move a0, rSELF # arg0
|
D | op_goto.S | 12 move a0, rSELF 21 lw ra, THREAD_FLAGS_OFFSET(rSELF) 33 lw ra, THREAD_FLAGS_OFFSET(rSELF)
|
D | op_return_wide.S | 7 lw ra, THREAD_FLAGS_OFFSET(rSELF) 8 move a0, rSELF
|
/art/runtime/interpreter/mterp/arm/ |
D | footer.S | 18 mov r0, rSELF 27 mov r0, rSELF 36 mov r0, rSELF 45 mov r0, rSELF 54 mov r0, rSELF 63 mov r0, rSELF 72 mov r0, rSELF 74 ldr r2, [rSELF, #THREAD_FLAGS_OFFSET] 85 ldr r0, [rSELF, #THREAD_EXCEPTION_OFFSET] 95 mov r0, rSELF [all …]
|
D | op_return_void_no_barrier.S | 1 ldr lr, [rSELF, #THREAD_FLAGS_OFFSET] 2 mov r0, rSELF
|
D | op_return_void.S | 3 ldr lr, [rSELF, #THREAD_FLAGS_OFFSET] 4 mov r0, rSELF
|
D | alt_stub.S | 8 ldr rIBASE, [rSELF, #THREAD_CURRENT_IBASE_OFFSET] @ refresh IBASE. 10 mov r0, rSELF
|
D | op_return.S | 9 ldr lr, [rSELF, #THREAD_FLAGS_OFFSET] 10 mov r0, rSELF
|
D | op_return_wide.S | 7 ldr lr, [rSELF, #THREAD_FLAGS_OFFSET] 8 mov r0, rSELF
|
/art/runtime/interpreter/mterp/x86_64/ |
D | footer.S | 18 movq rSELF, OUT_ARG0 27 movq rSELF, OUT_ARG0 36 movq rSELF, OUT_ARG0 45 movq rSELF, OUT_ARG0 54 movq rSELF, OUT_ARG0 63 movq rSELF, OUT_ARG0 72 movq rSELF, OUT_ARG0 85 movq rSELF, %rcx 96 movq rSELF, OUT_ARG0 230 movq rSELF, OUT_ARG0 [all …]
|
/art/runtime/interpreter/mterp/out/ |
D | mterp_arm.S | 103 #define rSELF r6 macro 295 ldr rIBASE, [rSELF, #THREAD_CURRENT_IBASE_OFFSET] 344 mov rSELF, r0 354 ldr rIBASE, [rSELF, #THREAD_CURRENT_IBASE_OFFSET] 606 ldr r3, [rSELF, #THREAD_EXCEPTION_OFFSET] 611 str r1, [rSELF, #THREAD_EXCEPTION_OFFSET] @ clear exception 620 ldr lr, [rSELF, #THREAD_FLAGS_OFFSET] 621 mov r0, rSELF 640 ldr lr, [rSELF, #THREAD_FLAGS_OFFSET] 641 mov r0, rSELF [all …]
|
D | mterp_x86_64.S | 164 #define rSELF SELF_SPILL(%rsp) macro 202 movq rSELF, rIBASE 366 movq IN_ARG0, rSELF 577 movq rSELF, %rcx 589 movq rSELF, OUT_ARG0 609 movq rSELF, OUT_ARG0 627 movq rSELF, OUT_ARG0 648 movq rSELF, OUT_ARG0 743 movq rSELF, OUT_ARG3 758 movq rSELF, OUT_ARG3 [all …]
|
D | mterp_x86.S | 164 #define rSELF IN_ARG0(%esp) macro 197 movl rSELF, rIBASE 210 movl rSELF, rIBASE 602 movl rSELF, %ecx 614 movl rSELF, %eax 636 movl rSELF, %eax 656 movl rSELF, %eax 679 movl rSELF, %eax 791 movl rSELF, %eax 810 movl rSELF, %eax [all …]
|
/art/runtime/interpreter/mterp/mips64/ |
D | footer.S | 11 move a0, rSELF 21 move a0, rSELF 31 move a0, rSELF 43 ld a0, THREAD_EXCEPTION_OFFSET(rSELF) 54 move a0, rSELF 86 move a0, rSELF 97 move a0, rSELF 112 move a0, rSELF 137 lw ra, THREAD_FLAGS_OFFSET(rSELF) 139 move a0, rSELF
|
D | op_return_void_no_barrier.S | 2 lw ra, THREAD_FLAGS_OFFSET(rSELF) 3 move a0, rSELF
|
D | op_return_void.S | 4 lw ra, THREAD_FLAGS_OFFSET(rSELF) 5 move a0, rSELF
|
D | op_return.S | 10 lw ra, THREAD_FLAGS_OFFSET(rSELF) 11 move a0, rSELF
|
D | op_return_wide.S | 9 lw ra, THREAD_FLAGS_OFFSET(rSELF) 10 move a0, rSELF
|
D | op_move_exception.S | 3 ld a0, THREAD_EXCEPTION_OFFSET(rSELF) # load exception obj 7 sd zero, THREAD_EXCEPTION_OFFSET(rSELF) # clear exception
|
/art/runtime/interpreter/mterp/x86/ |
D | footer.S | 18 movl rSELF, %eax 29 movl rSELF, %eax 40 movl rSELF, %eax 51 movl rSELF, %eax 62 movl rSELF, %eax 73 movl rSELF, %eax 84 movl rSELF, %eax 100 movl rSELF, %eax 111 movl rSELF, %eax 254 movl rSELF, %eax [all …]
|