1%def header(): 2/* 3 * Copyright (C) 2021 The Android Open Source Project 4 * 5 * Licensed under the Apache License, Version 2.0 (the "License"); 6 * you may not use this file except in compliance with the License. 7 * You may obtain a copy of the License at 8 * 9 * http://www.apache.org/licenses/LICENSE-2.0 10 * 11 * Unless required by applicable law or agreed to in writing, software 12 * distributed under the License is distributed on an "AS IS" BASIS, 13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 * See the License for the specific language governing permissions and 15 * limitations under the License. 16 */ 17 18/* 19 * This is a #include, not a %include, because we want the C pre-processor 20 * to expand the macros into assembler assignment statements. 21 */ 22#include "asm_support.h" 23#include "arch/x86/asm_support_x86.S" 24 25/** 26 * x86 ABI general notes: 27 * 28 * Caller save set: 29 * eax, ebx, edx, ecx, st(0)-st(7) 30 * Callee save set: 31 * esi, edi, ebp 32 * Return regs: 33 * 32-bit in eax 34 * 64-bit in edx:eax (low-order 32 in eax) 35 * fp on top of fp stack st(0) 36 * 37 * Stack must be 16-byte aligned to support SSE in native code. 38 */ 39 40#define ARG3 %ebx 41#define ARG2 %edx 42#define ARG1 %ecx 43#define ARG0 %eax 44 45/* 46 * single-purpose registers, given names for clarity 47 */ 48#define rSELF %fs 49#define rPC %esi 50#define CFI_DEX 6 // DWARF register number of the register holding dex-pc (esi). 51#define CFI_TMP 0 // DWARF register number of the first argument register (eax). 52#define rFP %edi 53#define rINST %ebx 54#define rINSTw %bx 55#define rINSTbh %bh 56#define rINSTbl %bl 57#define rIBASE %edx 58#define rREFS %ebp 59#define CFI_REFS 5 // DWARF register number of the reference array (ebp). 60 61// Temporary registers while setting up a frame. 62#define rNEW_FP %ecx 63#define rNEW_REFS %eax 64#define CFI_NEW_REFS 0 65 66#define LOCAL0 4 67#define LOCAL1 8 68#define LOCAL2 12 69 70/* 71 * Get/set the 32-bit value from a Dalvik register. 72 */ 73#define VREG_ADDRESS(_vreg) (rFP,_vreg,4) 74#define VREG_HIGH_ADDRESS(_vreg) 4(rFP,_vreg,4) 75#define VREG_REF_ADDRESS(_vreg) (rREFS,_vreg,4) 76#define VREG_REF_HIGH_ADDRESS(_vreg) 4(rREFS,_vreg,4) 77 78.macro GET_VREG _reg _vreg 79 movl VREG_ADDRESS(\_vreg), \_reg 80.endm 81 82.macro GET_VREG_OBJECT _reg _vreg 83 movl VREG_REF_ADDRESS(\_vreg), \_reg 84.endm 85 86/* Read wide value to xmm. */ 87.macro GET_WIDE_FP_VREG _reg _vreg 88 movq VREG_ADDRESS(\_vreg), \_reg 89.endm 90 91.macro SET_VREG _reg _vreg 92 movl \_reg, VREG_ADDRESS(\_vreg) 93 movl MACRO_LITERAL(0), VREG_REF_ADDRESS(\_vreg) 94.endm 95 96/* Write wide value from xmm. xmm is clobbered. */ 97.macro SET_WIDE_FP_VREG _reg _vreg 98 movq \_reg, VREG_ADDRESS(\_vreg) 99 pxor \_reg, \_reg 100 movq \_reg, VREG_REF_ADDRESS(\_vreg) 101.endm 102 103.macro SET_VREG_OBJECT _reg _vreg 104 movl \_reg, VREG_ADDRESS(\_vreg) 105 movl \_reg, VREG_REF_ADDRESS(\_vreg) 106.endm 107 108.macro GET_VREG_HIGH _reg _vreg 109 movl VREG_HIGH_ADDRESS(\_vreg), \_reg 110.endm 111 112.macro SET_VREG_HIGH _reg _vreg 113 movl \_reg, VREG_HIGH_ADDRESS(\_vreg) 114 movl MACRO_LITERAL(0), VREG_REF_HIGH_ADDRESS(\_vreg) 115.endm 116 117.macro CLEAR_REF _vreg 118 movl MACRO_LITERAL(0), VREG_REF_ADDRESS(\_vreg) 119.endm 120 121.macro CLEAR_WIDE_REF _vreg 122 movl MACRO_LITERAL(0), VREG_REF_ADDRESS(\_vreg) 123 movl MACRO_LITERAL(0), VREG_REF_HIGH_ADDRESS(\_vreg) 124.endm 125 126.macro GET_VREG_XMMs _xmmreg _vreg 127 movss VREG_ADDRESS(\_vreg), \_xmmreg 128.endm 129.macro GET_VREG_XMMd _xmmreg _vreg 130 movsd VREG_ADDRESS(\_vreg), \_xmmreg 131.endm 132.macro SET_VREG_XMMs _xmmreg _vreg 133 movss \_xmmreg, VREG_ADDRESS(\_vreg) 134.endm 135.macro SET_VREG_XMMd _xmmreg _vreg 136 movsd \_xmmreg, VREG_ADDRESS(\_vreg) 137.endm 138 139// Includes the return address implicitly pushed on stack by 'call'. 140#define CALLEE_SAVES_SIZE (3 * 4 + 1 * 4) 141 142#define PARAMETERS_SAVES_SIZE (4 * 4) 143 144// +4 for the ArtMethod of the caller. 145#define OFFSET_TO_FIRST_ARGUMENT_IN_STACK (CALLEE_SAVES_SIZE + PARAMETERS_SAVES_SIZE + 4) 146 147/* 148 * Refresh rINST. 149 * At enter to handler rINST does not contain the opcode number. 150 * However some utilities require the full value, so this macro 151 * restores the opcode number. 152 */ 153.macro REFRESH_INST _opnum 154 movb rINSTbl, rINSTbh 155 movb $$\_opnum, rINSTbl 156.endm 157 158/* 159 * Fetch the next instruction from rPC into rINSTw. Does not advance rPC. 160 */ 161.macro FETCH_INST 162 movzwl (rPC), rINST 163.endm 164 165.macro FETCH_INST_CLEAR_OPCODE 166 movzbl 1(rPC), rINST 167.endm 168 169/* 170 * Remove opcode from rINST, compute the address of handler and jump to it. 171 */ 172.macro GOTO_NEXT 173 movzx rINSTbl,%ecx 174 movzbl rINSTbh,rINST 175 shll MACRO_LITERAL(${handler_size_bits}), %ecx 176 addl rIBASE, %ecx 177 jmp *%ecx 178.endm 179 180/* 181 * Advance rPC by instruction count. 182 */ 183.macro ADVANCE_PC _count 184 leal 2*\_count(rPC), rPC 185.endm 186 187/* 188 * Advance rPC by instruction count, fetch instruction and jump to handler. 189 */ 190.macro ADVANCE_PC_FETCH_AND_GOTO_NEXT _count 191 ADVANCE_PC \_count 192 FETCH_INST 193 GOTO_NEXT 194.endm 195 196.macro NTERP_DEF_CFA cfi_reg 197 CFI_DEF_CFA_BREG_PLUS_UCONST \cfi_reg, -4, CALLEE_SAVES_SIZE + PARAMETERS_SAVES_SIZE 198.endm 199 200.macro RESTORE_IBASE 201 call 0f 2020: 203 popl rIBASE 204 addl MACRO_LITERAL(SYMBOL(artNterpAsmInstructionStart) - 0b), rIBASE 205.endm 206 207.macro RESTORE_IBASE_WITH_CFA 208 call 0f 2090: 210 CFI_ADJUST_CFA_OFFSET(4) 211 popl rIBASE 212 CFI_ADJUST_CFA_OFFSET(-4) 213 addl MACRO_LITERAL(SYMBOL(artNterpAsmInstructionStart) - 0b), rIBASE 214.endm 215 216.macro SPILL_ALL_CORE_PARAMETERS 217 PUSH_ARG eax 218 PUSH_ARG ecx 219 PUSH_ARG edx 220 PUSH_ARG ebx 221.endm 222 223.macro RESTORE_ALL_CORE_PARAMETERS 224 POP_ARG ebx 225 POP_ARG edx 226 POP_ARG ecx 227 POP_ARG eax 228.endm 229 230.macro DROP_PARAMETERS_SAVES 231 addl $$(PARAMETERS_SAVES_SIZE), %esp 232.endm 233 234.macro SAVE_WIDE_RETURN 235 movl %edx, LOCAL2(%esp) 236.endm 237 238.macro LOAD_WIDE_RETURN reg 239 movl LOCAL2(%esp), \reg 240.endm 241 242// An assembly entry for nterp. 243.macro OAT_ENTRY name 244 FUNCTION_TYPE(\name) 245 ASM_HIDDEN SYMBOL(\name) 246 .global SYMBOL(\name) 247 .balign 16 248SYMBOL(\name): 249.endm 250 251.macro ENTRY name 252 .text 253 ASM_HIDDEN SYMBOL(\name) 254 .global SYMBOL(\name) 255 FUNCTION_TYPE(\name) 256SYMBOL(\name): 257.endm 258 259.macro END name 260 SIZE(\name) 261.endm 262 263// Macro for defining entrypoints into runtime. We don't need to save registers 264// (we're not holding references there), but there is no 265// kDontSave runtime method. So just use the kSaveRefsOnly runtime method. 266.macro NTERP_TRAMPOLINE name, helper 267DEFINE_FUNCTION \name 268 movd %ebx, %xmm0 269 SETUP_SAVE_REFS_ONLY_FRAME ebx 270 movd %xmm0, %ebx 271 PUSH_ARG ebx 272 PUSH_ARG edx 273 PUSH_ARG ecx 274 PUSH_ARG eax 275 call \helper 276 DECREASE_FRAME 16 277 RESTORE_IBASE_WITH_CFA 278 FETCH_INST_CLEAR_OPCODE 279 RESTORE_SAVE_REFS_ONLY_FRAME 280 cmpl LITERAL(0), %fs:THREAD_EXCEPTION_OFFSET 281 jne nterp_deliver_pending_exception 282 ret 283END_FUNCTION \name 284.endm 285 286.macro CLEAR_VOLATILE_MARKER reg 287 andl MACRO_LITERAL(-2), \reg 288.endm 289 290.macro EXPORT_PC 291 movl rPC, -8(rREFS) 292.endm 293 294.macro FETCH_PC 295 movl -8(rREFS), rPC 296.endm 297 298 299.macro BRANCH 300 leal (rPC, rINST, 2), rPC 301 // Update method counter and do a suspend check if the branch is negative or zero. 302 testl rINST, rINST 303 jle 3f 3042: 305 FETCH_INST 306 GOTO_NEXT 3073: 308 movl (%esp), %eax 309 movzwl ART_METHOD_HOTNESS_COUNT_OFFSET(%eax), %ecx 310#if (NTERP_HOTNESS_VALUE != 0) 311#error Expected 0 for hotness value 312#endif 313 // If the counter is at zero, handle this in the runtime. 314 testw %cx, %cx 315 je NterpHandleHotnessOverflow 316 // Update counter. 317 addl $$-1, %ecx 318 movw %cx, ART_METHOD_HOTNESS_COUNT_OFFSET(%eax) 319 DO_SUSPEND_CHECK continue_label=2b 320.endm 321 322// Expects: 323// - edx, and eax to be available. 324// Outputs: 325// - \registers contains the dex registers size 326// - \outs contains the outs size 327// - if load_ins is 1, \ins contains the ins 328// - \code_item is replaced with a pointer to the instructions 329.macro FETCH_CODE_ITEM_INFO code_item, registers, outs, ins, load_ins 330 testl MACRO_LITERAL(1), \code_item 331 je 5f 332 andl $$-2, \code_item // Remove the extra bit that marks it's a compact dex file. 333 movzwl COMPACT_CODE_ITEM_FIELDS_OFFSET(\code_item), %edx 334 movl %edx, \registers 335 sarl $$COMPACT_CODE_ITEM_REGISTERS_SIZE_SHIFT, \registers 336 andl $$0xf, \registers 337 movl %edx, \outs 338 sarl $$COMPACT_CODE_ITEM_OUTS_SIZE_SHIFT, \outs 339 andl $$0xf, \outs 340 .if \load_ins 341 movl %edx, \ins 342 sarl $$COMPACT_CODE_ITEM_INS_SIZE_SHIFT, \ins 343 andl $$0xf, \ins 344 .else 345 movl %edx, %eax 346 sarl $$COMPACT_CODE_ITEM_INS_SIZE_SHIFT, %eax 347 andl $$0xf, %eax 348 addl %eax, \registers 349 .endif 350 testw $$COMPACT_CODE_ITEM_REGISTERS_INS_OUTS_FLAGS, COMPACT_CODE_ITEM_FLAGS_OFFSET(\code_item) 351 je 4f 352 movl \code_item, %eax 353 testw $$COMPACT_CODE_ITEM_INSNS_FLAG, COMPACT_CODE_ITEM_FLAGS_OFFSET(\code_item) 354 je 1f 355 subl $$4, %eax 3561: 357 testw $$COMPACT_CODE_ITEM_REGISTERS_FLAG, COMPACT_CODE_ITEM_FLAGS_OFFSET(\code_item) 358 je 2f 359 subl $$2, %eax 360 movzwl (%eax), %edx 361 addl %edx, \registers 3622: 363 testw $$COMPACT_CODE_ITEM_INS_FLAG, COMPACT_CODE_ITEM_FLAGS_OFFSET(\code_item) 364 je 3f 365 subl $$2, %eax 366 movzwl (%eax), %edx 367 .if \load_ins 368 addl %edx, \ins 369 .else 370 addl %edx, \registers 371 .endif 3723: 373 testw $$COMPACT_CODE_ITEM_OUTS_FLAG, COMPACT_CODE_ITEM_FLAGS_OFFSET(\code_item) 374 je 4f 375 subl $$2, %eax 376 movzwl (%eax), %edx 377 addl %edx, \outs 3784: 379 .if \load_ins 380 addl \ins, \registers 381 .endif 382 addl $$COMPACT_CODE_ITEM_INSNS_OFFSET, \code_item 383 jmp 6f 3845: 385 // Fetch dex register size. 386 movzwl CODE_ITEM_REGISTERS_SIZE_OFFSET(\code_item), \registers 387 // Fetch outs size. 388 movzwl CODE_ITEM_OUTS_SIZE_OFFSET(\code_item), \outs 389 .if \load_ins 390 movzwl CODE_ITEM_INS_SIZE_OFFSET(\code_item), \ins 391 .endif 392 addl $$CODE_ITEM_INSNS_OFFSET, \code_item 3936: 394.endm 395 396// Setup the stack to start executing the method. Expects: 397// - eax, edx, and ebx to be available. 398// 399// Inputs 400// - code_item: where the code item is 401// - refs: register where the pointer to dex references will be 402// - fp: register where the pointer to dex values will be 403// - cfi_refs: CFI register number of refs 404// - load_ins: whether to store the 'ins' value of the code item in esi 405// 406// Outputs 407// - ebx contains the dex registers size 408// - edx contains the old stack pointer. 409// - \code_item is replace with a pointer to the instructions 410// - if load_ins is 1, esi contains the ins 411.macro SETUP_STACK_FRAME code_item, refs, fp, cfi_refs, load_ins 412 FETCH_CODE_ITEM_INFO \code_item, %ebx, \refs, %esi, \load_ins 413 414 movl $$3, %eax 415 cmpl $$2, \refs 416 cmovle %eax, \refs 417 418 // Compute required frame size for dex registers: ((2 * ebx) + refs) 419 leal (\refs, %ebx, 2), %edx 420 sall $$2, %edx 421 422 // Compute new stack pointer in fp: add 12 for saving the previous frame, 423 // pc, and method being executed. 424 leal -12(%esp), \fp 425 subl %edx, \fp 426 // Alignment 427 andl $$-16, \fp 428 429 // Now setup the stack pointer. 430 movl %esp, %edx 431 CFI_DEF_CFA_REGISTER(edx) 432 movl \fp, %esp 433 434 leal 12(%esp, \refs, 4), \refs 435 leal (\refs, %ebx, 4), \fp 436 437 // Save old stack pointer. 438 movl %edx, -4(\refs) 439 NTERP_DEF_CFA \cfi_refs 440 441 // Save ArtMethod. 442 movl 12(%edx), %eax 443 movl %eax, (%esp) 444 445 // Put nulls in reference frame. 446 testl %ebx, %ebx 447 je 2f 448 movl \refs, %eax 4491: 450 movl $$0, (%eax) 451 addl $$4, %eax 452 cmpl %eax, \fp 453 jne 1b 4542: 455.endm 456 457// Puts the next floating point argument into the expected register, 458// fetching values based on a non-range invoke. 459// Uses eax as temporary. 460// 461// TODO: We could simplify a lot of code by loading the G argument into 462// the "inst" register. Given that we enter the handler with "1(rPC)" in 463// the rINST, we can just add rINST<<16 to the args and we don't even 464// need to pass "arg_index" around. 465.macro LOOP_OVER_SHORTY_LOADING_XMMS xmm_reg, inst, shorty, arg_index, finished 4661: // LOOP 467 movb (REG_VAR(shorty)), %al // al := *shorty 468 addl MACRO_LITERAL(1), REG_VAR(shorty) // shorty++ 469 cmpb MACRO_LITERAL(0), %al // if (al == '\0') goto finished 470 je VAR(finished) 471 cmpb MACRO_LITERAL(68), %al // if (al == 'D') goto FOUND_DOUBLE 472 je 2f 473 cmpb MACRO_LITERAL(70), %al // if (al == 'F') goto FOUND_FLOAT 474 je 3f 475 shrl MACRO_LITERAL(4), REG_VAR(inst) 476 addl MACRO_LITERAL(1), REG_VAR(arg_index) 477 // Handle extra argument in arg array taken by a long. 478 cmpb MACRO_LITERAL(74), %al // if (al != 'J') goto LOOP 479 jne 1b 480 shrl MACRO_LITERAL(4), REG_VAR(inst) 481 addl MACRO_LITERAL(1), REG_VAR(arg_index) 482 jmp 1b // goto LOOP 4832: // FOUND_DOUBLE 484 subl MACRO_LITERAL(8), %esp 485 movl REG_VAR(inst), %eax 486 andl MACRO_LITERAL(0xf), %eax 487 GET_VREG %eax, %eax 488 movl %eax, (%esp) 489 shrl MACRO_LITERAL(4), REG_VAR(inst) 490 addl MACRO_LITERAL(1), REG_VAR(arg_index) 491 cmpl MACRO_LITERAL(4), REG_VAR(arg_index) 492 je 5f 493 movl REG_VAR(inst), %eax 494 andl MACRO_LITERAL(0xf), %eax 495 shrl MACRO_LITERAL(4), REG_VAR(inst) 496 addl MACRO_LITERAL(1), REG_VAR(arg_index) 497 jmp 6f 4985: 499 movzbl 1(rPC), %eax 500 andl MACRO_LITERAL(0xf), %eax 5016: 502 GET_VREG %eax, %eax 503 movl %eax, 4(%esp) 504 movq (%esp), REG_VAR(xmm_reg) 505 addl MACRO_LITERAL(8), %esp 506 jmp 4f 5073: // FOUND_FLOAT 508 cmpl MACRO_LITERAL(4), REG_VAR(arg_index) 509 je 7f 510 movl REG_VAR(inst), %eax 511 andl MACRO_LITERAL(0xf), %eax 512 shrl MACRO_LITERAL(4), REG_VAR(inst) 513 addl MACRO_LITERAL(1), REG_VAR(arg_index) 514 jmp 8f 5157: 516 movzbl 1(rPC), %eax 517 andl MACRO_LITERAL(0xf), %eax 5188: 519 GET_VREG_XMMs REG_VAR(xmm_reg), %eax 5204: 521.endm 522 523// Puts the next int/long/object argument in the expected register, 524// fetching values based on a non-range invoke. 525// Uses eax as temporary. 526.macro LOOP_OVER_SHORTY_LOADING_GPRS gpr_reg, gpr_long_reg, inst, shorty, arg_index, finished, if_long, is_ebx 5271: // LOOP 528 movb (REG_VAR(shorty)), %al // al := *shorty 529 addl MACRO_LITERAL(1), REG_VAR(shorty) // shorty++ 530 cmpb MACRO_LITERAL(0), %al // if (al == '\0') goto finished 531 je VAR(finished) 532 cmpb MACRO_LITERAL(74), %al // if (al == 'J') goto FOUND_LONG 533 je 2f 534 cmpb MACRO_LITERAL(70), %al // if (al == 'F') goto SKIP_FLOAT 535 je 3f 536 cmpb MACRO_LITERAL(68), %al // if (al == 'D') goto SKIP_DOUBLE 537 je 4f 538 cmpl MACRO_LITERAL(4), REG_VAR(arg_index) 539 je 7f 540 movl REG_VAR(inst), %eax 541 andl MACRO_LITERAL(0xf), %eax 542 shrl MACRO_LITERAL(4), REG_VAR(inst) 543 addl MACRO_LITERAL(1), REG_VAR(arg_index) 544 jmp 8f 5457: 546 // Fetch PC 547 movl LOCAL1(%esp), %eax 548 movl -8(%eax), %eax 549 movzbl 1(%eax), %eax 550 andl MACRO_LITERAL(0xf), %eax 5518: 552 GET_VREG REG_VAR(gpr_reg), %eax 553 jmp 5f 5542: // FOUND_LONG 555 .if \is_ebx 556 // Put back shorty and exit 557 subl MACRO_LITERAL(1), REG_VAR(shorty) 558 jmp 5f 559 .else 560 movl REG_VAR(inst), %eax 561 andl MACRO_LITERAL(0xf), %eax 562 GET_VREG REG_VAR(gpr_reg), %eax 563 shrl MACRO_LITERAL(4), REG_VAR(inst) 564 addl MACRO_LITERAL(1), REG_VAR(arg_index) 565 cmpl MACRO_LITERAL(4), REG_VAR(arg_index) 566 je 9f 567 movl REG_VAR(inst), %eax 568 andl MACRO_LITERAL(0xf), %eax 569 shrl MACRO_LITERAL(4), REG_VAR(inst) 570 addl MACRO_LITERAL(1), REG_VAR(arg_index) 571 jmp 10f 5729: 573 // Fetch PC 574 movl LOCAL1(%esp), %eax 575 movl -8(%eax), %eax 576 movzbl 1(%eax), %eax 577 andl MACRO_LITERAL(0xf), %eax 57810: 579 GET_VREG REG_VAR(gpr_long_reg), %eax 580 jmp \if_long 581 .endif 5823: // SKIP_FLOAT 583 shrl MACRO_LITERAL(4), REG_VAR(inst) 584 addl MACRO_LITERAL(1), REG_VAR(arg_index) 585 jmp 1b 5864: // SKIP_DOUBLE 587 shrl MACRO_LITERAL(8), REG_VAR(inst) 588 addl MACRO_LITERAL(2), REG_VAR(arg_index) 589 jmp 1b 5905: 591.endm 592 593// Puts the next int/long/object argument in the expected stack slot, 594// fetching values based on a non-range invoke. 595// Uses eax as temporary. 596.macro LOOP_OVER_SHORTY_LOADING_INTS stack_offset, shorty, inst, arg_index, finished, is_string_init 5971: // LOOP 598 movb (REG_VAR(shorty)), %al // al := *shorty 599 addl MACRO_LITERAL(1), REG_VAR(shorty) // shorty++ 600 cmpb MACRO_LITERAL(0), %al // if (al == '\0') goto finished 601 je VAR(finished) 602 cmpb MACRO_LITERAL(74), %al // if (al == 'J') goto FOUND_LONG 603 je 2f 604 cmpb MACRO_LITERAL(70), %al // if (al == 'F') goto SKIP_FLOAT 605 je 3f 606 cmpb MACRO_LITERAL(68), %al // if (al == 'D') goto SKIP_DOUBLE 607 je 4f 608 .if \is_string_init 609 cmpl MACRO_LITERAL(3), REG_VAR(arg_index) 610 .else 611 cmpl MACRO_LITERAL(4), REG_VAR(arg_index) 612 .endif 613 je 7f 614 movl REG_VAR(inst), %eax 615 andl MACRO_LITERAL(0xf), %eax 616 shrl MACRO_LITERAL(4), REG_VAR(inst) 617 jmp 8f 6187: 619 // Fetch PC. 620 movl (LOCAL1 + \stack_offset)(%esp), %eax 621 movl -8(%eax), %eax 622 movzbl 1(%eax), %eax 623 andl MACRO_LITERAL(0xf), %eax 6248: 625 GET_VREG %eax, %eax 626 // Add 4 for the ArtMethod. 627 movl %eax, (4 + \stack_offset)(%esp, REG_VAR(arg_index), 4) 628 addl MACRO_LITERAL(1), REG_VAR(arg_index) 629 jmp 1b 6302: // FOUND_LONG 631 movl REG_VAR(inst), %eax 632 andl MACRO_LITERAL(0xf), %eax 633 GET_VREG %eax, %eax 634 // Add 4 for the ArtMethod. 635 movl %eax, (4 + \stack_offset)(%esp, REG_VAR(arg_index), 4) 636 shrl MACRO_LITERAL(4), REG_VAR(inst) 637 addl MACRO_LITERAL(1), REG_VAR(arg_index) 638 .if \is_string_init 639 cmpl MACRO_LITERAL(3), REG_VAR(arg_index) 640 .else 641 cmpl MACRO_LITERAL(4), REG_VAR(arg_index) 642 .endif 643 je 9f 644 movl REG_VAR(inst), %eax 645 andl MACRO_LITERAL(0xf), %eax 646 shrl MACRO_LITERAL(4), REG_VAR(inst) 647 jmp 10f 6489: 649 // Fetch PC. 650 movl (LOCAL1 + \stack_offset)(%esp), %eax 651 movl -8(%eax), %eax 652 movzbl 1(%eax), %eax 653 andl MACRO_LITERAL(0xf), %eax 65410: 655 GET_VREG %eax, %eax 656 // +4 for the ArtMethod. 657 movl %eax, (4 + \stack_offset)(%esp, REG_VAR(arg_index), 4) 658 addl MACRO_LITERAL(1), REG_VAR(arg_index) 659 jmp 1b 6603: // SKIP_FLOAT 661 shrl MACRO_LITERAL(4), REG_VAR(inst) 662 addl MACRO_LITERAL(1), REG_VAR(arg_index) 663 jmp 1b 6644: // SKIP_DOUBLE 665 shrl MACRO_LITERAL(8), REG_VAR(inst) 666 addl MACRO_LITERAL(2), REG_VAR(arg_index) 667 jmp 1b 668.endm 669 670// Puts the next floating point argument into the expected register, 671// fetching values based on a range invoke. 672// Uses eax as temporary. 673.macro LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm_reg, shorty, arg_index, stack_index, finished 6741: // LOOP 675 movb (REG_VAR(shorty)), %al // al := *shorty 676 addl MACRO_LITERAL(1), REG_VAR(shorty) // shorty++ 677 cmpb MACRO_LITERAL(0), %al // if (al == '\0') goto finished 678 je VAR(finished) 679 cmpb MACRO_LITERAL(68), %al // if (al == 'D') goto FOUND_DOUBLE 680 je 2f 681 cmpb MACRO_LITERAL(70), %al // if (al == 'F') goto FOUND_FLOAT 682 je 3f 683 addl MACRO_LITERAL(1), REG_VAR(arg_index) 684 addl MACRO_LITERAL(1), REG_VAR(stack_index) 685 // Handle extra argument in arg array taken by a long. 686 cmpb MACRO_LITERAL(74), %al // if (al != 'J') goto LOOP 687 jne 1b 688 addl MACRO_LITERAL(1), REG_VAR(arg_index) 689 addl MACRO_LITERAL(1), REG_VAR(stack_index) 690 jmp 1b // goto LOOP 6912: // FOUND_DOUBLE 692 GET_VREG_XMMd REG_VAR(xmm_reg), REG_VAR(arg_index) 693 addl MACRO_LITERAL(2), REG_VAR(arg_index) 694 addl MACRO_LITERAL(2), REG_VAR(stack_index) 695 jmp 4f 6963: // FOUND_FLOAT 697 GET_VREG_XMMs REG_VAR(xmm_reg), REG_VAR(arg_index) 698 addl MACRO_LITERAL(1), REG_VAR(arg_index) 699 add MACRO_LITERAL(1), REG_VAR(stack_index) 7004: 701.endm 702 703// Puts the next floating point argument into the expected stack slot, 704// fetching values based on a range invoke. 705// Uses eax as temporary. 706// 707// TODO: We could just copy all the vregs to the stack slots in a simple loop 708// (or REP MOVSD) without looking at the shorty at all. (We could also drop 709// the "stack_index" from the macros for loading registers.) We could also do 710// that conditionally if argument word count > 3; otherwise we know that all 711// args fit into registers. 712.macro LOOP_RANGE_OVER_FPs shorty, arg_index, stack_index, finished 7131: // LOOP 714 movb (REG_VAR(shorty)), %al // bl := *shorty 715 addl MACRO_LITERAL(1), REG_VAR(shorty) // shorty++ 716 cmpb MACRO_LITERAL(0), %al // if (al == '\0') goto finished 717 je VAR(finished) 718 cmpb MACRO_LITERAL(68), %al // if (al == 'D') goto FOUND_DOUBLE 719 je 2f 720 cmpb MACRO_LITERAL(70), %al // if (al == 'F') goto FOUND_FLOAT 721 je 3f 722 addl MACRO_LITERAL(1), REG_VAR(arg_index) 723 addl MACRO_LITERAL(1), REG_VAR(stack_index) 724 // Handle extra argument in arg array taken by a long. 725 cmpb MACRO_LITERAL(74), %al // if (al != 'J') goto LOOP 726 jne 1b 727 addl MACRO_LITERAL(1), REG_VAR(arg_index) 728 addl MACRO_LITERAL(1), REG_VAR(stack_index) 729 jmp 1b // goto LOOP 7302: // FOUND_DOUBLE 731 movq (rFP, REG_VAR(arg_index), 4), %xmm4 732 movq %xmm4, 4(%esp, REG_VAR(stack_index), 4) 733 addl MACRO_LITERAL(2), REG_VAR(arg_index) 734 addl MACRO_LITERAL(2), REG_VAR(stack_index) 735 jmp 1b 7363: // FOUND_FLOAT 737 movl (rFP, REG_VAR(arg_index), 4), %eax 738 movl %eax, 4(%esp, REG_VAR(stack_index), 4) 739 addl MACRO_LITERAL(1), REG_VAR(arg_index) 740 addl MACRO_LITERAL(1), REG_VAR(stack_index) 741 jmp 1b 742.endm 743 744// Puts the next int/long/object argument in the expected register, 745// fetching values based on a range invoke. 746// Uses eax as temporary. 747.macro LOOP_RANGE_OVER_SHORTY_LOADING_GPRS gpr_reg, gpr_long_reg, shorty, arg_index, stack_index, finished, if_long, is_ebx 7481: // LOOP 749 movb (REG_VAR(shorty)), %al // al := *shorty 750 addl MACRO_LITERAL(1), REG_VAR(shorty) // shorty++ 751 cmpb MACRO_LITERAL(0), %al // if (al == '\0') goto finished 752 je VAR(finished) 753 cmpb MACRO_LITERAL(74), %al // if (al == 'J') goto FOUND_LONG 754 je 2f 755 cmpb MACRO_LITERAL(70), %al // if (al == 'F') goto SKIP_FLOAT 756 je 3f 757 cmpb MACRO_LITERAL(68), %al // if (al == 'D') goto SKIP_DOUBLE 758 je 4f 759 movl (rFP, REG_VAR(arg_index), 4), REG_VAR(gpr_reg) 760 addl MACRO_LITERAL(1), REG_VAR(arg_index) 761 addl MACRO_LITERAL(1), REG_VAR(stack_index) 762 jmp 5f 7632: // FOUND_LONG 764 .if \is_ebx 765 // Put back shorty and exit 766 subl MACRO_LITERAL(1), REG_VAR(shorty) 767 .else 768 movl (rFP, REG_VAR(arg_index), 4), REG_VAR(gpr_reg) 769 movl 4(rFP, REG_VAR(arg_index), 4), REG_VAR(gpr_long_reg) 770 addl MACRO_LITERAL(2), REG_VAR(arg_index) 771 addl MACRO_LITERAL(2), REG_VAR(stack_index) 772 .endif 773 jmp \if_long 7743: // SKIP_FLOAT 775 addl MACRO_LITERAL(1), REG_VAR(arg_index) 776 addl MACRO_LITERAL(1), REG_VAR(stack_index) 777 jmp 1b 7784: // SKIP_DOUBLE 779 addl MACRO_LITERAL(2), REG_VAR(arg_index) 780 addl MACRO_LITERAL(2), REG_VAR(stack_index) 781 jmp 1b 7825: 783.endm 784 785// Puts the next int/long/object argument in the expected stack slot, 786// fetching values based on a range invoke. 787// Uses eax as temporary. 788.macro LOOP_RANGE_OVER_INTs offset, shorty, arg_index, stack_index, finished 7891: // LOOP 790 movb (REG_VAR(shorty)), %al // al := *shorty 791 addl MACRO_LITERAL(1), REG_VAR(shorty) // shorty++ 792 cmpb MACRO_LITERAL(0), %al // if (al == '\0') goto finished 793 je VAR(finished) 794 cmpb MACRO_LITERAL(74), %al // if (al == 'J') goto FOUND_LONG 795 je 2f 796 cmpb MACRO_LITERAL(70), %al // if (al == 'F') goto SKIP_FLOAT 797 je 3f 798 cmpb MACRO_LITERAL(68), %al // if (al == 'D') goto SKIP_DOUBLE 799 je 4f 800 movl (rFP, REG_VAR(arg_index), 4), %eax 801 // Add 4 for the ArtMethod. 802 movl %eax, (4 + \offset)(%esp, REG_VAR(stack_index), 4) 8033: // SKIP_FLOAT 804 addl MACRO_LITERAL(1), REG_VAR(arg_index) 805 addl MACRO_LITERAL(1), REG_VAR(stack_index) 806 jmp 1b 8072: // FOUND_LONG 808 movl (rFP, REG_VAR(arg_index), 4), %eax 809 // Add 4 for the ArtMethod. 810 movl %eax, (4 + \offset)(%esp, REG_VAR(stack_index), 4) 811 movl 4(rFP, REG_VAR(arg_index), 4), %eax 812 // Add 4 for the ArtMethod and 4 for other half. 813 movl %eax, (4 + 4 + \offset)(%esp, REG_VAR(stack_index), 4) 8144: // SKIP_DOUBLE 815 addl MACRO_LITERAL(2), REG_VAR(arg_index) 816 addl MACRO_LITERAL(2), REG_VAR(stack_index) 817 jmp 1b 818.endm 819 820// Puts the next floating point parameter passed in physical register 821// in the expected dex register array entry. 822// Uses eax as temporary. 823.macro LOOP_OVER_SHORTY_STORING_XMMS xmm_reg, shorty, arg_index, fp, finished 8241: // LOOP 825 movb (REG_VAR(shorty)), %al // al := *shorty 826 addl MACRO_LITERAL(1), REG_VAR(shorty) // shorty++ 827 cmpb MACRO_LITERAL(0), %al // if (al == '\0') goto finished 828 je VAR(finished) 829 cmpb MACRO_LITERAL(68), %al // if (al == 'D') goto FOUND_DOUBLE 830 je 2f 831 cmpb MACRO_LITERAL(70), %al // if (al == 'F') goto FOUND_FLOAT 832 je 3f 833 addl MACRO_LITERAL(1), REG_VAR(arg_index) 834 // Handle extra argument in arg array taken by a long. 835 cmpb MACRO_LITERAL(74), %al // if (al != 'J') goto LOOP 836 jne 1b 837 addl MACRO_LITERAL(1), REG_VAR(arg_index) 838 jmp 1b // goto LOOP 8392: // FOUND_DOUBLE 840 movq REG_VAR(xmm_reg),(REG_VAR(fp), REG_VAR(arg_index), 4) 841 addl MACRO_LITERAL(2), REG_VAR(arg_index) 842 jmp 4f 8433: // FOUND_FLOAT 844 movss REG_VAR(xmm_reg), (REG_VAR(fp), REG_VAR(arg_index), 4) 845 addl MACRO_LITERAL(1), REG_VAR(arg_index) 8464: 847.endm 848 849// Puts the next int/long/object parameter passed in physical register 850// in the expected dex register array entry, and in case of object in the 851// expected reference array entry. 852// Uses eax as temporary. 853.macro LOOP_OVER_SHORTY_STORING_GPRS offset, offset_long, stack_ptr, shorty, arg_index, regs, refs, finished, if_long, is_ebx 8541: // LOOP 855 movb (REG_VAR(shorty)), %al // al := *shorty 856 addl MACRO_LITERAL(1), REG_VAR(shorty) // shorty++ 857 cmpb MACRO_LITERAL(0), %al // if (al == '\0') goto finished 858 je VAR(finished) 859 cmpb MACRO_LITERAL(74), %al // if (al == 'J') goto FOUND_LONG 860 je 2f 861 cmpb MACRO_LITERAL(70), %al // if (al == 'F') goto SKIP_FLOAT 862 je 3f 863 cmpb MACRO_LITERAL(68), %al // if (al == 'D') goto SKIP_DOUBLE 864 je 4f 865 cmpb MACRO_LITERAL(76), %al // if (al != 'L') goto NOT_REFERENCE 866 jne 6f 867 movl \offset(REG_VAR(stack_ptr)), %eax 868 movl %eax, (REG_VAR(regs), REG_VAR(arg_index), 4) 869 movl %eax, (REG_VAR(refs), REG_VAR(arg_index), 4) 870 addl MACRO_LITERAL(1), REG_VAR(arg_index) 871 jmp 5f 8722: // FOUND_LONG 873 .if \is_ebx 874 // Put back shorty and jump to \if_long 875 subl MACRO_LITERAL(1), REG_VAR(shorty) 876 .else 877 movl \offset(REG_VAR(stack_ptr)), %eax 878 movl %eax, (REG_VAR(regs), REG_VAR(arg_index), 4) 879 movl \offset_long(REG_VAR(stack_ptr)), %eax 880 movl %eax, 4(REG_VAR(regs), REG_VAR(arg_index), 4) 881 addl MACRO_LITERAL(2), REG_VAR(arg_index) 882 .endif 883 jmp \if_long 8843: // SKIP_FLOAT 885 addl MACRO_LITERAL(1), REG_VAR(arg_index) 886 jmp 1b 8874: // SKIP_DOUBLE 888 addl MACRO_LITERAL(2), REG_VAR(arg_index) 889 jmp 1b 8906: // NOT_REFERENCE 891 movl \offset(REG_VAR(stack_ptr)), %eax 892 movl %eax, (REG_VAR(regs), REG_VAR(arg_index), 4) 893 addl MACRO_LITERAL(1), REG_VAR(arg_index) 8945: 895.endm 896 897// Puts the next floating point parameter passed in stack 898// in the expected dex register array entry. 899// Uses eax as temporary. 900// 901// TODO: Or we could just spill regs to the reserved slots in the caller's 902// frame and copy all regs in a simple loop. This time, however, we would 903// need to look at the shorty anyway to look for the references. 904// (The trade-off is different for passing arguments and receiving them.) 905.macro LOOP_OVER_FPs shorty, arg_index, regs, stack_ptr, finished 9061: // LOOP 907 movb (REG_VAR(shorty)), %al // al := *shorty 908 addl MACRO_LITERAL(1), REG_VAR(shorty) // shorty++ 909 cmpb MACRO_LITERAL(0), %al // if (al == '\0') goto finished 910 je VAR(finished) 911 cmpb MACRO_LITERAL(68), %al // if (al == 'D') goto FOUND_DOUBLE 912 je 2f 913 cmpb MACRO_LITERAL(70), %al // if (al == 'F') goto FOUND_FLOAT 914 je 3f 915 addl MACRO_LITERAL(1), REG_VAR(arg_index) 916 // Handle extra argument in arg array taken by a long. 917 cmpb MACRO_LITERAL(74), %al // if (al != 'J') goto LOOP 918 jne 1b 919 addl MACRO_LITERAL(1), REG_VAR(arg_index) 920 jmp 1b // goto LOOP 9212: // FOUND_DOUBLE 922 movq OFFSET_TO_FIRST_ARGUMENT_IN_STACK(REG_VAR(stack_ptr), REG_VAR(arg_index), 4), %xmm4 923 movq %xmm4, (REG_VAR(regs), REG_VAR(arg_index), 4) 924 addl MACRO_LITERAL(2), REG_VAR(arg_index) 925 jmp 1b 9263: // FOUND_FLOAT 927 movl OFFSET_TO_FIRST_ARGUMENT_IN_STACK(REG_VAR(stack_ptr), REG_VAR(arg_index), 4), %eax 928 movl %eax, (REG_VAR(regs), REG_VAR(arg_index), 4) 929 addl MACRO_LITERAL(1), REG_VAR(arg_index) 930 jmp 1b 931.endm 932 933// Puts the next int/long/object parameter passed in stack 934// in the expected dex register array entry, and in case of object in the 935// expected reference array entry. 936// Uses eax as temporary. 937.macro LOOP_OVER_INTs shorty, arg_index, regs, refs, stack_ptr, finished 9381: // LOOP 939 movb (REG_VAR(shorty)), %al // al := *shorty 940 addl MACRO_LITERAL(1), REG_VAR(shorty) // shorty++ 941 cmpb MACRO_LITERAL(0), %al // if (al == '\0') goto finished 942 je VAR(finished) 943 cmpb MACRO_LITERAL(74), %al // if (al == 'J') goto FOUND_LONG 944 je 2f 945 cmpb MACRO_LITERAL(76), %al // if (al == 'L') goto FOUND_REFERENCE 946 je 6f 947 cmpb MACRO_LITERAL(70), %al // if (al == 'F') goto SKIP_FLOAT 948 je 3f 949 cmpb MACRO_LITERAL(68), %al // if (al == 'D') goto SKIP_DOUBLE 950 je 4f 951 movl OFFSET_TO_FIRST_ARGUMENT_IN_STACK(REG_VAR(stack_ptr), REG_VAR(arg_index), 4), %eax 952 movl %eax, (REG_VAR(regs), REG_VAR(arg_index), 4) 953 addl MACRO_LITERAL(1), REG_VAR(arg_index) 954 jmp 1b 9556: // FOUND_REFERENCE 956 movl OFFSET_TO_FIRST_ARGUMENT_IN_STACK(REG_VAR(stack_ptr), REG_VAR(arg_index), 4), %eax 957 movl %eax, (REG_VAR(regs), REG_VAR(arg_index), 4) 958 movl %eax, (REG_VAR(refs), REG_VAR(arg_index), 4) 9593: // SKIP_FLOAT 960 addl MACRO_LITERAL(1), REG_VAR(arg_index) 961 jmp 1b 9622: // FOUND_LONG 963 movl OFFSET_TO_FIRST_ARGUMENT_IN_STACK(REG_VAR(stack_ptr), REG_VAR(arg_index), 4), %eax 964 movl %eax, (REG_VAR(regs), REG_VAR(arg_index), 4) 965 movl (OFFSET_TO_FIRST_ARGUMENT_IN_STACK+4)(REG_VAR(stack_ptr), REG_VAR(arg_index), 4), %eax 966 movl %eax, 4(REG_VAR(regs), REG_VAR(arg_index), 4) 9674: // SKIP_DOUBLE 968 addl MACRO_LITERAL(2), REG_VAR(arg_index) 969 jmp 1b 970.endm 971 972// Increase method hotness and do suspend check before starting executing the method. 973.macro START_EXECUTING_INSTRUCTIONS 974 movl (%esp), %eax 975 movzwl ART_METHOD_HOTNESS_COUNT_OFFSET(%eax), %ecx 976#if (NTERP_HOTNESS_VALUE != 0) 977#error Expected 0 for hotness value 978#endif 979 // If the counter is at zero, handle this in the runtime. 980 testl %ecx, %ecx 981 je 3f 982 // Update counter. 983 addl $$-1, %ecx 984 movw %cx, ART_METHOD_HOTNESS_COUNT_OFFSET(%eax) 9851: 986 testl $$(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), rSELF:THREAD_FLAGS_OFFSET 987 jz 2f 988 EXPORT_PC 989 call SYMBOL(art_quick_test_suspend) 990 RESTORE_IBASE 9912: 992 FETCH_INST 993 GOTO_NEXT 9943: 995 CHECK_AND_UPDATE_SHARED_MEMORY_METHOD if_hot=4f, if_not_hot=1b 9964: 997 movl $$0, ARG1 998 movl rFP, ARG2 999 call nterp_hot_method 1000 jmp 2b 1001.endm 1002 1003.macro SPILL_ALL_CALLEE_SAVES 1004 PUSH edi 1005 PUSH esi 1006 PUSH ebp 1007.endm 1008 1009.macro RESTORE_ALL_CALLEE_SAVES 1010 POP ebp 1011 POP esi 1012 POP edi 1013.endm 1014 1015.macro GET_SHORTY dest, is_interface, is_polymorphic, is_custom 1016 // Save eax (ArtMethod), ecx (potential this). 1017 push %eax 1018 push %ecx 1019 .if \is_polymorphic 1020 push rPC 1021 push 12(%esp) 1022 call SYMBOL(NterpGetShortyFromInvokePolymorphic) 1023 addl MACRO_LITERAL(8), %esp 1024 .elseif \is_custom 1025 push rPC 1026 push 12(%esp) 1027 call SYMBOL(NterpGetShortyFromInvokeCustom) 1028 addl MACRO_LITERAL(8), %esp 1029 .elseif \is_interface 1030 subl MACRO_LITERAL(16), %esp 1031 // Save interface method. 1032 movss %xmm7, (%esp) 1033 movzwl 2(rPC), %eax 1034 pushl %eax 1035 // Caller is at 8 (saved ArtMethod + ecx) + 16 + 4 (second argument) 1036 pushl 28(%esp) 1037 call SYMBOL(NterpGetShortyFromMethodId) 1038 // Restore interface method. 1039 movss 8(%esp), %xmm7 1040 addl MACRO_LITERAL(24), %esp 1041 .else 1042 subl MACRO_LITERAL(4), %esp // Alignment 1043 push %eax 1044 call SYMBOL(NterpGetShorty) 1045 addl MACRO_LITERAL(8), %esp 1046 .endif 1047 movl %eax, \dest 1048 pop %ecx 1049 pop %eax 1050.endm 1051 1052.macro GET_SHORTY_SLOW_PATH dest, is_interface 1053 // Save all registers that can hold arguments in the fast path. 1054 pushl %eax 1055 pushl %ecx 1056 pushl %edx 1057 subl MACRO_LITERAL(4), %esp 1058 movss %xmm0, (%esp) 1059 .if \is_interface 1060 // Alignment. 1061 subl MACRO_LITERAL(8), %esp 1062 movzwl 2(rPC), %eax 1063 pushl %eax 1064 // Caller is at 16 (parameters) + 8 (alignment) + 4 (second argument). 1065 pushl 28(%esp) 1066 call SYMBOL(NterpGetShortyFromMethodId) 1067 movl %eax, \dest 1068 movss 16(%esp), %xmm0 1069 addl MACRO_LITERAL(20), %esp 1070 .else 1071 // Alignment. 1072 subl MACRO_LITERAL(12), %esp 1073 pushl %eax 1074 call SYMBOL(NterpGetShorty) 1075 movl %eax, \dest 1076 movss 16(%esp), %xmm0 1077 addl MACRO_LITERAL(20), %esp 1078 .endif 1079 popl %edx 1080 popl %ecx 1081 popl %eax 1082.endm 1083 1084// Uses ecx and edx as temporary 1085.macro UPDATE_REGISTERS_FOR_STRING_INIT old_value, new_value 1086 movl rREFS, %edx 1087 movl rFP, %ecx 10881: 1089 cmpl (%edx), \old_value 1090 jne 2f 1091 movl \new_value, (%edx) 1092 movl \new_value, (%ecx) 10932: 1094 addl $$4, %edx 1095 addl $$4, %ecx 1096 cmpl %edx, rFP 1097 jne 1b 1098.endm 1099 1100.macro DO_CALL is_polymorphic, is_custom 1101 .if \is_polymorphic 1102 call SYMBOL(art_quick_invoke_polymorphic) 1103 .elseif \is_custom 1104 call SYMBOL(art_quick_invoke_custom) 1105 .else 1106 call *ART_METHOD_QUICK_CODE_OFFSET_32(%eax) 1107 .endif 1108.endm 1109 1110.macro COMMON_INVOKE_NON_RANGE is_static=0, is_interface=0, suffix="", is_string_init=0, is_polymorphic=0, is_custom=0 1111 .if \is_polymorphic 1112 // No fast path for polymorphic calls. 1113 .elseif \is_custom 1114 // No fast path for custom calls. 1115 .elseif \is_string_init 1116 // No fast path for string.init. 1117 .else 1118 testl $$ART_METHOD_NTERP_INVOKE_FAST_PATH_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%eax) 1119 je .Lfast_path_with_few_args_\suffix 1120 movzbl 1(rPC), %edx 1121 movl %edx, %ebx 1122 shrl MACRO_LITERAL(4), %ebx # Number of arguments 1123 .if \is_static 1124 jz .Linvoke_fast_path_\suffix # shl sets the Z flag 1125 .else 1126 cmpl MACRO_LITERAL(1), %ebx 1127 je .Linvoke_fast_path_\suffix 1128 .endif 1129 movzwl 4(rPC), %esi 1130 cmpl MACRO_LITERAL(2), %ebx 1131 .if \is_static 1132 jl .Lone_arg_fast_path_\suffix 1133 .endif 1134 je .Ltwo_args_fast_path_\suffix 1135 cmpl MACRO_LITERAL(4), %ebx 1136 jl .Lthree_args_fast_path_\suffix 1137 je .Lfour_args_fast_path_\suffix 1138 1139 andl MACRO_LITERAL(0xf), %edx 1140 GET_VREG %edx, %edx 1141 movl %edx, (4 + 4 * 4)(%esp) 1142.Lfour_args_fast_path_\suffix: 1143 movl %esi, %edx 1144 shrl MACRO_LITERAL(12), %edx 1145 GET_VREG %edx, %edx 1146 movl %edx, (4 + 3 * 4)(%esp) 1147.Lthree_args_fast_path_\suffix: 1148 movl %esi, %ebx 1149 shrl MACRO_LITERAL(8), %ebx 1150 andl MACRO_LITERAL(0xf), %ebx 1151 GET_VREG %ebx, %ebx 1152.Ltwo_args_fast_path_\suffix: 1153 movl %esi, %edx 1154 shrl MACRO_LITERAL(4), %edx 1155 andl MACRO_LITERAL(0xf), %edx 1156 GET_VREG %edx, %edx 1157.Lone_arg_fast_path_\suffix: 1158 .if \is_static 1159 andl MACRO_LITERAL(0xf), %esi 1160 GET_VREG %ecx, %esi 1161 .else 1162 // First argument already in %ecx. 1163 .endif 1164.Linvoke_fast_path_\suffix: 1165 // Fetch PC before calling for proper stack unwinding. 1166 FETCH_PC 1167 call *ART_METHOD_QUICK_CODE_OFFSET_32(%eax) // Call the method. 1168 // In case of a long return, save the high half into LOCAL0 1169 SAVE_WIDE_RETURN 1170 RESTORE_IBASE 1171 ADVANCE_PC_FETCH_AND_GOTO_NEXT 3 1172 1173.Lfast_path_with_few_args_\suffix: 1174 // Fast path when we have zero or one argument (modulo 'this'). If there 1175 // is one argument, we can put it in both floating point and core register. 1176 movzbl 1(rPC), %edx 1177 shrl MACRO_LITERAL(4), %edx # Number of arguments 1178 .if \is_static 1179 cmpl MACRO_LITERAL(1), %edx 1180 jl .Linvoke_with_few_args_\suffix 1181 jne .Lget_shorty_\suffix 1182 movzwl 4(rPC), %ecx 1183 andl MACRO_LITERAL(0xf), %ecx // dex register of first argument 1184 GET_VREG %ecx, %ecx 1185 movd %ecx, %xmm0 1186 .else 1187 cmpl MACRO_LITERAL(2), %edx 1188 jl .Linvoke_with_few_args_\suffix 1189 jne .Lget_shorty_\suffix 1190 movzwl 4(rPC), %edx 1191 shrl MACRO_LITERAL(4), %edx 1192 andl MACRO_LITERAL(0xf), %edx // dex register of second argument 1193 GET_VREG %edx, %edx 1194 movd %edx, %xmm0 1195 .endif 1196.Linvoke_with_few_args_\suffix: 1197 // Check if the next instruction is move-result or move-result-wide. 1198 // If it is, we fetch the shorty and jump to the regular invocation. 1199 movzwl 6(rPC), %ebx 1200 andl MACRO_LITERAL(0xfe), %ebx 1201 cmpl MACRO_LITERAL(0x0a), %ebx 1202 je .Lget_shorty_and_invoke_\suffix 1203 call *ART_METHOD_QUICK_CODE_OFFSET_32(%eax) // Call the method. 1204 RESTORE_IBASE 1205 ADVANCE_PC_FETCH_AND_GOTO_NEXT 3 1206.Lget_shorty_and_invoke_\suffix: 1207 GET_SHORTY_SLOW_PATH %esi, \is_interface 1208 jmp .Lgpr_setup_finished_\suffix 1209 .endif 1210 1211.Lget_shorty_\suffix: 1212 GET_SHORTY %ebx, \is_interface, \is_polymorphic, \is_custom 1213 movl %eax, LOCAL0(%esp) 1214 movl %ebp, LOCAL1(%esp) 1215 movl %ebx, LOCAL2(%esp) 1216 // From this point: 1217 // - ebx contains shorty (in callee-save to switch over return value after call). 1218 // - eax, edx, and ebp are available 1219 // - ecx contains 'this' pointer for instance method. 1220 // TODO: ebp/rREFS is used for stack unwinding, can we find a way to preserve it? 1221 leal 1(%ebx), %edx // shorty + 1 ; ie skip return arg character 1222 movzwl 4(rPC), %ebx // arguments 1223 .if \is_string_init 1224 shrl MACRO_LITERAL(4), %ebx 1225 movl $$1, %ebp // ignore first argument 1226 .elseif \is_static 1227 movl $$0, %ebp // arg_index 1228 .else 1229 shrl MACRO_LITERAL(4), %ebx 1230 movl $$1, %ebp // arg_index 1231 .endif 1232 LOOP_OVER_SHORTY_LOADING_XMMS xmm0, ebx, edx, ebp, .Lxmm_setup_finished_\suffix 1233 LOOP_OVER_SHORTY_LOADING_XMMS xmm1, ebx, edx, ebp, .Lxmm_setup_finished_\suffix 1234 LOOP_OVER_SHORTY_LOADING_XMMS xmm2, ebx, edx, ebp, .Lxmm_setup_finished_\suffix 1235 LOOP_OVER_SHORTY_LOADING_XMMS xmm3, ebx, edx, ebp, .Lxmm_setup_finished_\suffix 1236 // We know this can only be a float. 1237 movb (%edx), %al // al := *shorty 1238 cmpb MACRO_LITERAL(70), %al // if (al != 'F') goto finished 1239 jne .Lxmm_setup_finished_\suffix 1240 movzbl 1(rPC), %eax 1241 andl MACRO_LITERAL(0xf), %eax 1242 GET_VREG %eax, %eax 1243 // Add four for the ArtMethod. 1244 movl %eax, 4(%esp, %ebp, 4) 1245 // We know there is no more argument, jump to the call. 1246 jmp .Lrestore_saved_values_\suffix 1247.Lxmm_setup_finished_\suffix: 1248 // Reload rREFS for fetching the PC. 1249 movl LOCAL1(%esp), %ebp 1250 // Reload shorty 1251 movl LOCAL2(%esp), %ebx 1252 FETCH_PC 1253 leal 1(%ebx), %ebx // shorty + 1 ; ie skip return arg character 1254 movzwl 4(rPC), %esi // arguments 1255 .if \is_string_init 1256 movl $$0, %ebp // arg_index 1257 shrl MACRO_LITERAL(4), %esi 1258 LOOP_OVER_SHORTY_LOADING_GPRS ecx, edx, esi, ebx, ebp, .Lrestore_saved_values_\suffix, .Lif_long_ebx_\suffix, is_ebx=0 1259 .elseif \is_static 1260 movl $$0, %ebp // arg_index 1261 LOOP_OVER_SHORTY_LOADING_GPRS ecx, edx, esi, ebx, ebp, .Lrestore_saved_values_\suffix, .Lif_long_ebx_\suffix, is_ebx=0 1262 .else 1263 shrl MACRO_LITERAL(4), %esi 1264 movl $$1, %ebp // arg_index 1265 .endif 1266 // For long argument, store second half in eax to not overwrite the shorty. 1267 LOOP_OVER_SHORTY_LOADING_GPRS edx, eax, esi, ebx, ebp, .Lrestore_saved_values_\suffix, .Lif_long_\suffix, is_ebx=0 1268.Lif_long_ebx_\suffix: 1269 // Store in eax to not overwrite the shorty. 1270 LOOP_OVER_SHORTY_LOADING_GPRS eax, eax, esi, ebx, ebp, .Lrestore_saved_values_\suffix, .Lif_long_\suffix, is_ebx=1 1271.Lif_long_\suffix: 1272 // Save shorty, as LOOP_OVER_SHORTY_LOADING_INTS might overwrite the LOCAL2 slot for a long argument. 1273 pushl LOCAL2(%esp) 1274 pushl %eax 1275 LOOP_OVER_SHORTY_LOADING_INTS 8, ebx, esi, ebp, .Lrestore_ebx_\suffix, \is_string_init 1276.Lrestore_ebx_\suffix: 1277 popl %ebx 1278 popl %esi 1279 movl LOCAL0(%esp), %eax 1280 movl LOCAL1(%esp), %ebp 1281 jmp .Lgpr_setup_finished_\suffix 1282.Lrestore_saved_values_\suffix: 1283 movl LOCAL0(%esp), %eax 1284 movl LOCAL1(%esp), %ebp 1285 movl LOCAL2(%esp), %esi 1286.Lgpr_setup_finished_\suffix: 1287 // Look at the shorty now, as we'll want %esi to have the PC for proper stack unwinding 1288 // and we're running out of callee-save registers. 1289 cmpb LITERAL(68), (%esi) // Test if result type char == 'D'. 1290 je .Linvoke_double_\suffix 1291 cmpb LITERAL(70), (%esi) // Test if result type char == 'F'. 1292 je .Linvoke_float_\suffix 1293 FETCH_PC 1294 DO_CALL \is_polymorphic, \is_custom 1295 SAVE_WIDE_RETURN 1296.Ldone_return_\suffix: 1297 /* resume execution of caller */ 1298 .if \is_string_init 1299 movzwl 4(rPC), %ecx // arguments 1300 andl $$0xf, %ecx 1301 GET_VREG rINST, %ecx 1302 UPDATE_REGISTERS_FOR_STRING_INIT rINST, %eax 1303 .endif 1304 RESTORE_IBASE 1305 1306 .if \is_polymorphic 1307 ADVANCE_PC_FETCH_AND_GOTO_NEXT 4 1308 .else 1309 ADVANCE_PC_FETCH_AND_GOTO_NEXT 3 1310 .endif 1311 1312.Linvoke_double_\suffix: 1313 FETCH_PC 1314 DO_CALL \is_polymorphic, \is_custom 1315 movq %xmm0, LOCAL1(%esp) 1316 movl LOCAL1(%esp), %eax 1317 jmp .Ldone_return_\suffix 1318.Linvoke_float_\suffix: 1319 FETCH_PC 1320 DO_CALL \is_polymorphic, \is_custom 1321 movd %xmm0, %eax 1322 jmp .Ldone_return_\suffix 1323.endm 1324 1325.macro COMMON_INVOKE_RANGE is_static=0, is_interface=0, suffix="", is_string_init=0, is_polymorphic=0, is_custom=0 1326 .if \is_polymorphic 1327 // No fast path for polymorphic calls. 1328 .elseif \is_custom 1329 // No fast path for custom calls. 1330 .elseif \is_string_init 1331 // No fast path for string.init. 1332 .else 1333 testl $$ART_METHOD_NTERP_INVOKE_FAST_PATH_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%eax) 1334 je .Lfast_path_with_few_args_range_\suffix 1335 movzbl 1(rPC), %edx // number of arguments 1336 .if \is_static 1337 testl %edx, %edx 1338 je .Linvoke_fast_path_range_\suffix 1339 .else 1340 cmpl MACRO_LITERAL(1), %edx 1341 je .Linvoke_fast_path_range_\suffix 1342 .endif 1343 movzwl 4(rPC), %ebx // dex register of first argument 1344 leal (rFP, %ebx, 4), %esi // location of first dex register value 1345 cmpl MACRO_LITERAL(2), %edx 1346 .if \is_static 1347 jl .Lone_arg_fast_path_range_\suffix 1348 .endif 1349 je .Ltwo_args_fast_path_range_\suffix 1350 cmp MACRO_LITERAL(4), %edx 1351 jl .Lthree_args_fast_path_range_\suffix 1352 1353.Lloop_over_fast_path_range_\suffix: 1354 subl MACRO_LITERAL(1), %edx 1355 movl (%esi, %edx, 4), %ebx 1356 movl %ebx, 4(%esp, %edx, 4) // Add 4 for the ArtMethod 1357 cmpl MACRO_LITERAL(3), %edx 1358 jne .Lloop_over_fast_path_range_\suffix 1359 1360.Lthree_args_fast_path_range_\suffix: 1361 movl 8(%esi), %ebx 1362.Ltwo_args_fast_path_range_\suffix: 1363 movl 4(%esi), %edx 1364.Lone_arg_fast_path_range_\suffix: 1365 .if \is_static 1366 movl 0(%esi), %ecx 1367 .else 1368 // First argument already in %ecx. 1369 .endif 1370.Linvoke_fast_path_range_\suffix: 1371 FETCH_PC 1372 call *ART_METHOD_QUICK_CODE_OFFSET_32(%eax) // Call the method. 1373 SAVE_WIDE_RETURN 1374 RESTORE_IBASE 1375 ADVANCE_PC_FETCH_AND_GOTO_NEXT 3 1376 1377.Lfast_path_with_few_args_range_\suffix: 1378 // Fast path when we have zero or one argument (modulo 'this'). If there 1379 // is one argument, we can put it in both floating point and core register. 1380 movzbl 1(rPC), %ebx # Number of arguments 1381 .if \is_static 1382 cmpl MACRO_LITERAL(1), %ebx 1383 jl .Linvoke_with_few_args_range_\suffix 1384 jne .Lget_shorty_range_\suffix 1385 movzwl 4(rPC), %ebx // Dex register of first argument 1386 GET_VREG %ecx, %ebx 1387 movd %ecx, %xmm0 1388 .else 1389 cmpl MACRO_LITERAL(2), %ebx 1390 jl .Linvoke_with_few_args_range_\suffix 1391 jne .Lget_shorty_range_\suffix 1392 movzwl 4(rPC), %ebx 1393 addl MACRO_LITERAL(1), %ebx // dex register of second argument 1394 GET_VREG %edx, %ebx 1395 movd %edx, %xmm0 1396 .endif 1397.Linvoke_with_few_args_range_\suffix: 1398 // Check if the next instruction is move-result or move-result-wide. 1399 // If it is, we fetch the shorty and jump to the regular invocation. 1400 movzwl 6(rPC), %ebx 1401 and MACRO_LITERAL(0xfe), %ebx 1402 cmpl MACRO_LITERAL(0x0a), %ebx 1403 je .Lget_shorty_and_invoke_range_\suffix 1404 call *ART_METHOD_QUICK_CODE_OFFSET_32(%eax) // Call the method. 1405 RESTORE_IBASE 1406 ADVANCE_PC_FETCH_AND_GOTO_NEXT 3 1407.Lget_shorty_and_invoke_range_\suffix: 1408 GET_SHORTY_SLOW_PATH %esi, \is_interface 1409 jmp .Lgpr_setup_finished_range_\suffix 1410 .endif 1411 1412.Lget_shorty_range_\suffix: 1413 GET_SHORTY %ebx, \is_interface, \is_polymorphic, \is_custom 1414 movl %eax, LOCAL0(%esp) 1415 movl %ebp, LOCAL1(%esp) 1416 movl %ebx, LOCAL2(%esp) 1417 // From this point: 1418 // - ebx contains shorty (in callee-save to switch over return value after call). 1419 // - eax, edx, ebx, and ebp are available. 1420 // - ecx contains 'this' pointer for instance method. 1421 // TODO: ebp/rREFS is used for stack unwinding, can we find a way to preserve it? 1422 leal 1(%ebx), %edx // shorty + 1 ; ie skip return arg character 1423 movzwl 4(rPC), %ebx // arg start index 1424 .if \is_string_init 1425 addl $$1, %ebx // arg start index 1426 movl $$0, %ebp // index in stack 1427 .elseif \is_static 1428 movl $$0, %ebp // index in stack 1429 .else 1430 addl $$1, %ebx // arg start index 1431 movl $$1, %ebp // index in stack 1432 .endif 1433 LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm0, edx, ebx, ebp, .Lxmm_setup_finished_range_\suffix 1434 LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm1, edx, ebx, ebp, .Lxmm_setup_finished_range_\suffix 1435 LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm2, edx, ebx, ebp, .Lxmm_setup_finished_range_\suffix 1436 LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm3, edx, ebx, ebp, .Lxmm_setup_finished_range_\suffix 1437 LOOP_RANGE_OVER_FPs edx, ebx, ebp, .Lxmm_setup_finished_range_\suffix 1438.Lxmm_setup_finished_range_\suffix: 1439 // Reload rREFS for fetching the PC. 1440 movl LOCAL1(%esp), %ebp 1441 // Reload shorty 1442 movl LOCAL2(%esp), %ebx 1443 FETCH_PC 1444 leal 1(%ebx), %ebx // shorty + 1 ; ie skip return arg character 1445 // From this point: 1446 // - ebx contains shorty 1447 // - eax and ebp are available. 1448 // - ecx contains 'this' pointer for instance method. 1449 movzwl 4(rPC), %ebp // arg start index 1450 // rPC (esi) is now available 1451 .if \is_string_init 1452 addl $$1, %ebp // arg start index 1453 movl $$0, %esi // index in stack 1454 LOOP_RANGE_OVER_SHORTY_LOADING_GPRS ecx, edx, ebx, ebp, esi, .Lrestore_saved_values_range_\suffix, .Lif_long_ebx_range_\suffix, is_ebx=0 1455 .elseif \is_static 1456 movl $$0, %esi // index in stack 1457 LOOP_RANGE_OVER_SHORTY_LOADING_GPRS ecx, edx, ebx, ebp, esi, .Lrestore_saved_values_range_\suffix, .Lif_long_ebx_range_\suffix, is_ebx=0 1458 .else 1459 addl $$1, %ebp // arg start index 1460 movl $$1, %esi // index in stack 1461 .endif 1462 // For long argument, store second half in eax to not overwrite the shorty. 1463 LOOP_RANGE_OVER_SHORTY_LOADING_GPRS edx, eax, ebx, ebp, esi, .Lrestore_saved_values_range_\suffix, .Lif_long_range_\suffix, is_ebx=0 1464.Lif_long_ebx_range_\suffix: 1465 // Store in eax to not overwrite the shorty. 1466 LOOP_RANGE_OVER_SHORTY_LOADING_GPRS eax, eax, ebx, ebp, esi, .Lrestore_saved_values_range_\suffix, .Lif_long_range_\suffix, is_ebx=1 1467.Lif_long_range_\suffix: 1468 // Save shorty, as LOOP_RANGE_OVER_SHORTY_LOADING_INTS might overwrite the LOCAL2 slot for a long argument. 1469 pushl LOCAL2(%esp) 1470 pushl %eax 1471 LOOP_RANGE_OVER_INTs 8, ebx, ebp, esi, .Lrestore_ebx_range_\suffix 1472.Lrestore_ebx_range_\suffix: 1473 popl %ebx 1474 popl %esi 1475 movl LOCAL0(%esp), %eax 1476 movl LOCAL1(%esp), %ebp 1477 jmp .Lgpr_setup_finished_range_\suffix 1478 1479.Lrestore_saved_values_range_\suffix: 1480 movl LOCAL0(%esp), %eax 1481 movl LOCAL1(%esp), %ebp 1482 // Save shorty in callee-save register 1483 movl LOCAL2(%esp), %esi 1484 1485.Lgpr_setup_finished_range_\suffix: 1486 cmpb LITERAL(68), (%esi) // Test if result type char == 'D'. 1487 je .Lreturn_range_double_\suffix 1488 cmpb LITERAL(70), (%esi) // Test if result type char == 'F'. 1489 je .Lreturn_range_float_\suffix 1490 1491 FETCH_PC 1492 DO_CALL \is_polymorphic, \is_custom 1493 SAVE_WIDE_RETURN 1494.Ldone_return_range_\suffix: 1495 /* resume execution of caller */ 1496 .if \is_string_init 1497 movzwl 4(rPC), %ecx // arguments 1498 GET_VREG rINST, %ecx 1499 UPDATE_REGISTERS_FOR_STRING_INIT rINST, %eax 1500 .endif 1501 RESTORE_IBASE 1502 .if \is_polymorphic 1503 ADVANCE_PC_FETCH_AND_GOTO_NEXT 4 1504 .else 1505 ADVANCE_PC_FETCH_AND_GOTO_NEXT 3 1506 .endif 1507.Lreturn_range_double_\suffix: 1508 FETCH_PC 1509 DO_CALL \is_polymorphic, \is_custom 1510 movq %xmm0, LOCAL1(%esp) 1511 movl LOCAL1(%esp), %eax 1512 jmp .Ldone_return_range_\suffix 1513.Lreturn_range_float_\suffix: 1514 FETCH_PC 1515 DO_CALL \is_polymorphic, \is_custom 1516 movd %xmm0, %eax 1517 jmp .Ldone_return_range_\suffix 1518.endm 1519 1520// Helper for static field get. 1521.macro OP_SGET load="movl", wide="0" 1522 // Fast-path which gets the field from thread-local cache. 1523% fetch_from_thread_cache("%eax", miss_label="2f") 15241: 1525 movl ART_FIELD_OFFSET_OFFSET(%eax), %ecx 1526 movl ART_FIELD_DECLARING_CLASS_OFFSET(%eax), %eax 1527 cmpl $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET 1528 jne 3f 15294: 1530 .if \wide 1531 addl %ecx, %eax 1532 \load (%eax), %ecx 1533 SET_VREG %ecx, rINST # fp[A] <- value 1534 \load 4(%eax), %ecx 1535 SET_VREG_HIGH %ecx, rINST 1536 .else 1537 \load (%eax, %ecx, 1), %eax 1538 SET_VREG %eax, rINST # fp[A] <- value 1539 .endif 1540 ADVANCE_PC_FETCH_AND_GOTO_NEXT 2 15412: 1542 EXPORT_PC 1543 movl rSELF:THREAD_SELF_OFFSET, ARG0 1544 movl 0(%esp), ARG1 1545 movl rPC, ARG2 1546 movl $$0, ARG3 1547 call nterp_get_static_field 1548 .if !\wide 1549 CLEAR_VOLATILE_MARKER %eax 1550 jmp 1b 1551 .else 1552 testl MACRO_LITERAL(1), %eax 1553 je 1b 1554 CLEAR_VOLATILE_MARKER %eax 1555 movl ART_FIELD_OFFSET_OFFSET(%eax), %ecx 1556 movl ART_FIELD_DECLARING_CLASS_OFFSET(%eax), %eax 1557 cmpl $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET 1558 jne 5f 15596: 1560 movsd (%eax, %ecx, 1), %xmm0 1561 SET_WIDE_FP_VREG %xmm0, rINST 1562 ADVANCE_PC_FETCH_AND_GOTO_NEXT 2 15635: 1564 call art_quick_read_barrier_mark_reg00 1565 jmp 6b 1566 .endif 15673: 1568 call art_quick_read_barrier_mark_reg00 1569 jmp 4b 1570.endm 1571 1572// Helper for static field put. 1573.macro OP_SPUT rINST_reg="rINST", store="movl", wide="0": 1574 // Fast-path which gets the field from thread-local cache. 1575% fetch_from_thread_cache("%eax", miss_label="2f") 15761: 1577 movl ART_FIELD_OFFSET_OFFSET(%eax), %ecx 1578 movl ART_FIELD_DECLARING_CLASS_OFFSET(%eax), %eax 1579 cmpl $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET 1580 jne 3f 15814: 1582 .if \wide 1583 addl %ecx, %eax 1584 GET_VREG %ecx, rINST # rINST <- v[A] 1585 movl %ecx, (%eax) 1586 GET_VREG_HIGH %ecx, rINST 1587 movl %ecx, 4(%eax) 1588 .else 1589 GET_VREG rINST, rINST # rINST <- v[A] 1590 \store \rINST_reg, (%eax,%ecx,1) 1591 .endif 1592 ADVANCE_PC_FETCH_AND_GOTO_NEXT 2 15932: 1594 EXPORT_PC 1595 movl rSELF:THREAD_SELF_OFFSET, ARG0 1596 movl 0(%esp), ARG1 1597 movl rPC, ARG2 1598 movl $$0, ARG3 1599 call nterp_get_static_field 1600 testl MACRO_LITERAL(1), %eax 1601 je 1b 1602 // Clear the marker that we put for volatile fields. 1603 CLEAR_VOLATILE_MARKER %eax 1604 movl ART_FIELD_OFFSET_OFFSET(%eax), %ecx 1605 movl ART_FIELD_DECLARING_CLASS_OFFSET(%eax), %eax 1606 cmpl $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET 1607 jne 6f 16085: 1609 .if \wide 1610 addl %ecx, %eax 1611 GET_WIDE_FP_VREG %xmm0, rINST 1612 movsd %xmm0, (%eax) 1613 .else 1614 GET_VREG rINST, rINST # rINST <- v[A] 1615 \store \rINST_reg, (%eax,%ecx,1) 1616 .endif 1617 lock addl $$0, (%esp) 1618 ADVANCE_PC_FETCH_AND_GOTO_NEXT 2 16193: 1620 call art_quick_read_barrier_mark_reg00 1621 jmp 4b 16226: 1623 call art_quick_read_barrier_mark_reg00 1624 jmp 5b 1625.endm 1626 1627 1628.macro OP_IPUT_INTERNAL rINST_reg="rINST", store="movl", wide="0", volatile="0": 1629 movzbl rINSTbl, %ecx # ecx <- BA 1630 sarl $$4, %ecx # ecx <- B 1631 GET_VREG %ecx, %ecx # vB (object we're operating on) 1632 testl %ecx, %ecx # is object null? 1633 je common_errNullObject 1634 andb $$0xf, rINSTbl # rINST <- A 1635 .if \wide 1636 addl %ecx, %eax 1637 GET_WIDE_FP_VREG %xmm0, rINST 1638 movsd %xmm0, (%eax) 1639 .else 1640 GET_VREG rINST, rINST # rINST <- v[A] 1641 \store \rINST_reg, (%ecx,%eax,1) 1642 .endif 1643.endm 1644 1645// Helper for instance field put. 1646.macro OP_IPUT rINST_reg="rINST", store="movl", wide="0": 1647 // Fast-path which gets the field from thread-local cache. 1648% fetch_from_thread_cache("%eax", miss_label="2f") 16491: 1650 OP_IPUT_INTERNAL \rINST_reg, \store, \wide, volatile=0 1651 ADVANCE_PC_FETCH_AND_GOTO_NEXT 2 16522: 1653 EXPORT_PC 1654 movl rSELF:THREAD_SELF_OFFSET, ARG0 1655 movl 0(%esp), ARG1 1656 movl rPC, ARG2 1657 movl $$0, ARG3 1658 call nterp_get_instance_field_offset 1659 testl %eax, %eax 1660 jns 1b 1661 negl %eax 1662 OP_IPUT_INTERNAL \rINST_reg, \store, \wide, volatile=1 1663 lock addl $$0, (%esp) 1664 ADVANCE_PC_FETCH_AND_GOTO_NEXT 2 1665.endm 1666 1667// Helper for instance field get. 1668.macro OP_IGET load="movl", wide="0" 1669 // Fast-path which gets the field from thread-local cache. 1670% fetch_from_thread_cache("%eax", miss_label="2f") 16711: 1672 movl rINST, %ecx # ecx <- BA 1673 sarl $$4, %ecx # ecx <- B 1674 GET_VREG %ecx, %ecx # vB (object we're operating on) 1675 testl %ecx, %ecx # is object null? 1676 je common_errNullObject 1677 andb $$0xf,rINSTbl # rINST <- A 1678 .if \wide 1679 addl %ecx, %eax 1680 \load (%eax), %ecx 1681 SET_VREG %ecx, rINST 1682 \load 4(%eax), %ecx 1683 SET_VREG_HIGH %ecx, rINST 1684 .else 1685 \load (%ecx,%eax,1), %eax 1686 SET_VREG %eax, rINST # fp[A] <- value 1687 .endif 1688 ADVANCE_PC_FETCH_AND_GOTO_NEXT 2 16892: 1690 EXPORT_PC 1691 movl rSELF:THREAD_SELF_OFFSET, ARG0 1692 movl 0(%esp), ARG1 1693 movl rPC, ARG2 1694 movl $$0, ARG3 1695 call nterp_get_instance_field_offset 1696 testl %eax, %eax 1697 jns 1b 1698 negl %eax 1699 .if !\wide 1700 jmp 1b 1701 .else 1702 movl rINST, %ecx # ecx <- BA 1703 sarl $$4, %ecx # ecx <- B 1704 GET_VREG %ecx, %ecx # vB (object we're operating on) 1705 testl %ecx, %ecx # is object null? 1706 je common_errNullObject 1707 andb $$0xf,rINSTbl # rINST <- A 1708 movsd (%eax, %ecx, 1), %xmm0 1709 SET_WIDE_FP_VREG %xmm0, rINST 1710 ADVANCE_PC_FETCH_AND_GOTO_NEXT 2 1711 .endif 1712.endm 1713 1714// Store a reference parameter into our dex register frame. 1715// Uses xmm4 as temporary. 1716.macro SETUP_REFERENCE_PARAMETER_IN_GPR offset, stack_ptr, regs, refs, ins, arg_offset, finished 1717 movss \offset(REG_VAR(stack_ptr)), %xmm4 1718 movss %xmm4, (REG_VAR(regs), REG_VAR(arg_offset)) 1719 movss %xmm4, (REG_VAR(refs), REG_VAR(arg_offset)) 1720 addl MACRO_LITERAL(4), REG_VAR(arg_offset) 1721 subl MACRO_LITERAL(1), REG_VAR(ins) 1722 je \finished 1723.endm 1724 1725// Store a reference parameter into our dex register frame. 1726// Uses xmm4 as temporary. 1727.macro SETUP_REFERENCE_PARAMETERS_IN_STACK stack_ptr, regs, refs, ins, arg_offset 17281: 1729 movss OFFSET_TO_FIRST_ARGUMENT_IN_STACK(REG_VAR(stack_ptr), REG_VAR(arg_offset)), %xmm4 1730 movss %xmm4, (REG_VAR(regs), REG_VAR(arg_offset)) 1731 movss %xmm4, (REG_VAR(refs), REG_VAR(arg_offset)) 1732 addl MACRO_LITERAL(4), REG_VAR(arg_offset) 1733 subl MACRO_LITERAL(1), REG_VAR(ins) 1734 jne 1b 1735.endm 1736 1737.macro DO_SUSPEND_CHECK continue_label 1738 testl $$(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), rSELF:THREAD_FLAGS_OFFSET 1739 jz \continue_label 1740 jmp NterpCallSuspendAndGotoNext 1741.endm 1742 1743.macro CHECK_AND_UPDATE_SHARED_MEMORY_METHOD if_hot, if_not_hot 1744 testl $$ART_METHOD_IS_MEMORY_SHARED_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%eax) 1745 jz \if_hot 1746 movzwl rSELF:THREAD_SHARED_METHOD_HOTNESS_OFFSET, %ecx 1747 testl %ecx, %ecx 1748 je \if_hot 1749 addl $$-1, %ecx 1750 movw %cx, rSELF:THREAD_SHARED_METHOD_HOTNESS_OFFSET 1751 jmp \if_not_hot 1752.endm 1753 1754 1755%def entry(): 1756/* 1757 * ArtMethod entry point. 1758 * 1759 * On entry: 1760 * eax ArtMethod* callee 1761 * rest method parameters 1762 */ 1763 1764OAT_ENTRY ExecuteNterpWithClinitImpl 1765 .cfi_startproc 1766 PUSH_ARG esi 1767 // For simplicity, we don't do a read barrier here, but instead rely 1768 // on art_quick_resolution_trampoline to always have a suspend point before 1769 // calling back here. 1770 movl ART_METHOD_DECLARING_CLASS_OFFSET(%eax), %esi 1771 cmpl $$(MIRROR_CLASS_STATUS_VISIBLY_INITIALIZED_SHIFTED), MIRROR_CLASS_STATUS_OFFSET(%esi) 1772 jae .Lcontinue_execute_nterp 1773 cmpl $$(MIRROR_CLASS_STATUS_INITIALIZING_SHIFTED), MIRROR_CLASS_STATUS_OFFSET(%esi) 1774 jb .Linvoke_trampoline 1775 movl MIRROR_CLASS_CLINIT_THREAD_ID_OFFSET(%esi), %esi 1776 cmpl %esi, rSELF:THREAD_TID_OFFSET 1777 CFI_REMEMBER_STATE 1778 je .Lcontinue_execute_nterp 1779.Linvoke_trampoline: 1780 POP_ARG esi 1781 jmp art_quick_resolution_trampoline 1782.Lcontinue_execute_nterp: 1783 CFI_RESTORE_STATE_AND_DEF_CFA esp, 8 1784 POP_ARG esi 1785 jmp ExecuteNterpImpl 1786 .cfi_endproc 1787 .global SYMBOL(EndExecuteNterpWithClinitImpl) 1788SYMBOL(EndExecuteNterpWithClinitImpl): 1789 1790OAT_ENTRY ExecuteNterpImpl 1791 .cfi_startproc 1792 .cfi_def_cfa esp, 4 1793 testl %eax, -STACK_OVERFLOW_RESERVED_BYTES(%esp) 1794 // Spill callee save regs 1795 SPILL_ALL_CALLEE_SAVES 1796 1797 // Make argument registers available. 1798 SPILL_ALL_CORE_PARAMETERS 1799 1800 // Fetch code item. 1801 movl ART_METHOD_DATA_OFFSET_32(%eax), %ecx 1802 1803 // Setup the stack for executing the method. 1804 SETUP_STACK_FRAME %ecx, rREFS, rFP, CFI_REFS, load_ins=1 1805 1806 // Save the PC 1807 movl %ecx, -8(rREFS) 1808 1809 // Setup the parameters 1810 testl %esi, %esi 1811 je .Lxmm_setup_finished 1812 1813 subl %esi, %ebx 1814 sall $$2, %ebx // ebx is now the offset for inputs into the registers array. 1815 1816 // Reload ArtMethod. 1817 movl (%esp), %eax 1818 testl $$ART_METHOD_NTERP_ENTRY_POINT_FAST_PATH_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%eax) 1819 je .Lsetup_slow_path 1820 leal (rREFS, %ebx, 1), %ecx 1821 leal (rFP, %ebx, 1), %ebx 1822 movl $$0, %eax 1823 1824 // edx is the old stack pointer 1825 SETUP_REFERENCE_PARAMETER_IN_GPR 8, edx, ebx, ecx, esi, eax, .Lxmm_setup_finished 1826 SETUP_REFERENCE_PARAMETER_IN_GPR 4, edx, ebx, ecx, esi, eax, .Lxmm_setup_finished 1827 SETUP_REFERENCE_PARAMETER_IN_GPR 0, edx, ebx, ecx, esi, eax, .Lxmm_setup_finished 1828 SETUP_REFERENCE_PARAMETERS_IN_STACK edx, ebx, ecx, esi, eax 1829 jmp .Lxmm_setup_finished 1830 1831.Lsetup_slow_path: 1832 // If the method is not static and there is one argument ('this'), we don't need to fetch the 1833 // shorty. 1834 testl $$ART_METHOD_IS_STATIC_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%eax) 1835 jne .Lsetup_with_shorty 1836 1837 // Record 'this'. 1838 movl 8(%edx), %eax 1839 movl %eax, (rFP, %ebx) 1840 movl %eax, (rREFS, %ebx) 1841 1842 cmpl $$1, %esi 1843 je .Lxmm_setup_finished 1844 1845.Lsetup_with_shorty: 1846 // Save xmm registers. Core registers have already been saved. 1847 subl MACRO_LITERAL(4 * 8), %esp 1848 movq %xmm0, 0(%esp) 1849 movq %xmm1, 8(%esp) 1850 movq %xmm2, 16(%esp) 1851 movq %xmm3, 24(%esp) 1852 subl MACRO_LITERAL(12), %esp 1853 pushl (4 * 8 + 12)(%esp) 1854 call SYMBOL(NterpGetShorty) 1855 addl MACRO_LITERAL(16), %esp 1856 1857 // Restore xmm registers 1858 movq 0(%esp), %xmm0 1859 movq 8(%esp), %xmm1 1860 movq 16(%esp), %xmm2 1861 movq 24(%esp), %xmm3 1862 addl MACRO_LITERAL(4 * 8), %esp 1863 1864 // Reload the old stack pointer. 1865 movl -4(rREFS), %edx 1866 // TODO: Get shorty in a better way and remove above 1867 1868 movl $$0, %esi 1869 movl (%esp), %ecx 1870 testl $$ART_METHOD_IS_STATIC_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%ecx) 1871 1872 // Note the leal and movl below don't change the flags. 1873 leal (rFP, %ebx, 1), %ecx 1874 leal (rREFS, %ebx, 1), %ebx 1875 // Save rFP (%edi), we're using it as temporary below. 1876 movl rFP, LOCAL1(%esp) 1877 leal 1(%eax), %edi // shorty + 1 ; ie skip return arg character 1878 // Save shorty + 1 1879 movl %edi, LOCAL2(%esp) 1880 jne .Lhandle_static_method 1881 addl $$4, %ecx 1882 addl $$4, %ebx 1883 addl $$4, %edx 1884 LOOP_OVER_SHORTY_STORING_GPRS 0, -4, edx, edi, esi, ecx, ebx, .Lgpr_setup_finished, .Lif_long, is_ebx=0 1885 LOOP_OVER_SHORTY_STORING_GPRS -4, 0, edx, edi, esi, ecx, ebx, .Lgpr_setup_finished, .Lif_long, is_ebx=1 1886 jmp .Lif_long 1887.Lhandle_static_method: 1888 LOOP_OVER_SHORTY_STORING_GPRS 8, 4, edx, edi, esi, ecx, ebx, .Lgpr_setup_finished, .Lif_long_ebx, is_ebx=0 1889 LOOP_OVER_SHORTY_STORING_GPRS 4, 0, edx, edi, esi, ecx, ebx, .Lgpr_setup_finished, .Lif_long, is_ebx=0 1890.Lif_long_ebx: 1891 LOOP_OVER_SHORTY_STORING_GPRS 0, 0, edx, edi, esi, ecx, ebx, .Lgpr_setup_finished, .Lif_long, is_ebx=1 1892.Lif_long: 1893 LOOP_OVER_INTs edi, esi, ecx, ebx, edx, .Lgpr_setup_finished 1894.Lgpr_setup_finished: 1895 // Restore shorty + 1 1896 movl LOCAL2(%esp), %edi 1897 movl $$0, %esi // reset counter 1898 LOOP_OVER_SHORTY_STORING_XMMS xmm0, edi, esi, ecx, .Lrestore_fp 1899 LOOP_OVER_SHORTY_STORING_XMMS xmm1, edi, esi, ecx, .Lrestore_fp 1900 LOOP_OVER_SHORTY_STORING_XMMS xmm2, edi, esi, ecx, .Lrestore_fp 1901 LOOP_OVER_SHORTY_STORING_XMMS xmm3, edi, esi, ecx, .Lrestore_fp 1902 LOOP_OVER_FPs edi, esi, ecx, edx, .Lrestore_fp 1903.Lrestore_fp: 1904 movl LOCAL1(%esp), rFP 1905.Lxmm_setup_finished: 1906 FETCH_PC 1907 CFI_DEFINE_DEX_PC_WITH_OFFSET(CFI_TMP, CFI_DEX, 0) 1908 // Set rIBASE 1909 RESTORE_IBASE 1910 /* start executing the instruction at rPC */ 1911 START_EXECUTING_INSTRUCTIONS 1912 /* NOTE: no fallthrough */ 1913 // cfi info continues, and covers the whole nterp implementation. 1914 END ExecuteNterpImpl 1915 1916%def opcode_pre(): 1917 1918%def fetch_from_thread_cache(dest_reg, miss_label): 1919 // Fetch some information from the thread cache. 1920 // Uses eax, and ecx as temporaries. 1921 movl rSELF:THREAD_SELF_OFFSET, %eax 1922 movl rPC, %ecx 1923 sall MACRO_LITERAL(THREAD_INTERPRETER_CACHE_SIZE_SHIFT), %ecx 1924 andl MACRO_LITERAL(THREAD_INTERPRETER_CACHE_SIZE_MASK), %ecx 1925 cmpl THREAD_INTERPRETER_CACHE_OFFSET(%eax, %ecx, 1), rPC 1926 jne ${miss_label} 1927 movl __SIZEOF_POINTER__+THREAD_INTERPRETER_CACHE_OFFSET(%eax, %ecx, 1), ${dest_reg} 1928 1929%def footer(): 1930/* 1931 * =========================================================================== 1932 * Common subroutines and data 1933 * =========================================================================== 1934 */ 1935 1936 .text 1937 .align 2 1938 1939// Enclose all code below in a symbol (which gets printed in backtraces). 1940ENTRY nterp_helper 1941 1942// Note: mterp also uses the common_* names below for helpers, but that's OK 1943// as the assembler compiled each interpreter separately. 1944common_errDivideByZero: 1945 EXPORT_PC 1946 call art_quick_throw_div_zero 1947 1948// Expect array in eax, index in ecx. 1949common_errArrayIndex: 1950 EXPORT_PC 1951 movl MIRROR_ARRAY_LENGTH_OFFSET(%eax), %edx 1952 movl %ecx, %eax 1953 movl %edx, %ecx 1954 call art_quick_throw_array_bounds 1955 1956common_errNullObject: 1957 EXPORT_PC 1958 call art_quick_throw_null_pointer_exception 1959 1960NterpCommonInvokeStatic: 1961 COMMON_INVOKE_NON_RANGE is_static=1, is_interface=0, suffix="invokeStatic" 1962 1963NterpCommonInvokeStaticRange: 1964 COMMON_INVOKE_RANGE is_static=1, is_interface=0, suffix="invokeStatic" 1965 1966NterpCommonInvokeInstance: 1967 COMMON_INVOKE_NON_RANGE is_static=0, is_interface=0, suffix="invokeInstance" 1968 1969NterpCommonInvokeInstanceRange: 1970 COMMON_INVOKE_RANGE is_static=0, is_interface=0, suffix="invokeInstance" 1971 1972NterpCommonInvokeInterface: 1973 COMMON_INVOKE_NON_RANGE is_static=0, is_interface=1, suffix="invokeInterface" 1974 1975NterpCommonInvokeInterfaceRange: 1976 COMMON_INVOKE_RANGE is_static=0, is_interface=1, suffix="invokeInterface" 1977 1978NterpCommonInvokePolymorphic: 1979 COMMON_INVOKE_NON_RANGE is_static=0, is_interface=0, is_polymorphic=1, suffix="invokePolymorphic" 1980 1981NterpCommonInvokePolymorphicRange: 1982 COMMON_INVOKE_RANGE is_static=0, is_interface=0, is_polymorphic=1, suffix="invokePolymorphic" 1983 1984NterpCommonInvokeCustom: 1985 COMMON_INVOKE_NON_RANGE is_static=1, is_interface=0, is_polymorphic=0, is_custom=1, suffix="invokeCustom" 1986 1987NterpCommonInvokeCustomRange: 1988 COMMON_INVOKE_RANGE is_static=1, is_interface=0, is_polymorphic=0, is_custom=1, suffix="invokeCustom" 1989 1990NterpHandleStringInit: 1991 COMMON_INVOKE_NON_RANGE is_static=0, is_interface=0, is_string_init=1, suffix="stringInit" 1992 1993NterpHandleStringInitRange: 1994 COMMON_INVOKE_RANGE is_static=0, is_interface=0, is_string_init=1, suffix="stringInit" 1995 1996NterpNewInstance: 1997 EXPORT_PC 1998 // Fast-path which gets the class from thread-local cache. 1999% fetch_from_thread_cache("%eax", miss_label="2f") 2000 cmpl $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET 2001 jne 3f 20024: 2003 call *rSELF:THREAD_ALLOC_OBJECT_ENTRYPOINT_OFFSET 2004 RESTORE_IBASE 2005 FETCH_INST_CLEAR_OPCODE 20061: 2007 SET_VREG_OBJECT %eax, rINST # fp[A] <- value 2008 ADVANCE_PC_FETCH_AND_GOTO_NEXT 2 20092: 2010 movl rSELF:THREAD_SELF_OFFSET, ARG0 2011 movl 0(%esp), ARG1 2012 movl rPC, ARG2 2013 call nterp_allocate_object 2014 jmp 1b 20153: 2016 // 00 is %eax 2017 call art_quick_read_barrier_mark_reg00 2018 jmp 4b 2019 2020NterpNewArray: 2021 /* new-array vA, vB, class@CCCC */ 2022 EXPORT_PC 2023 // Fast-path which gets the class from thread-local cache. 2024% fetch_from_thread_cache("%eax", miss_label="2f") 2025 cmpl $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET 2026 jne 3f 20271: 2028 movzbl rINSTbl, %ecx 2029 sarl $$4, %ecx # ecx<- B 2030 GET_VREG %ecx %ecx # ecx<- vB (array length) 2031 call *rSELF:THREAD_ALLOC_ARRAY_ENTRYPOINT_OFFSET 2032 RESTORE_IBASE 2033 FETCH_INST_CLEAR_OPCODE 2034 andb $$0xf, rINSTbl # rINST<- A 2035 SET_VREG_OBJECT %eax, rINST # fp[A] <- value 2036 ADVANCE_PC_FETCH_AND_GOTO_NEXT 2 20372: 2038 movl rSELF:THREAD_SELF_OFFSET, ARG0 2039 movl 0(%esp), ARG1 2040 movl rPC, ARG2 2041 call nterp_get_class 2042 jmp 1b 20433: 2044 // 00 is %eax 2045 call art_quick_read_barrier_mark_reg00 2046 jmp 1b 2047 2048NterpPutObjectInstanceField: 2049 // Fast-path which gets the field from thread-local cache. 2050% fetch_from_thread_cache("%eax", miss_label="2f") 20511: 2052 movl rINST, %ecx # ecx <- BA 2053 andl $$0xf, %ecx # ecx <- A 2054 GET_VREG %ecx, %ecx # ecx <- v[A] 2055 sarl $$4, rINST 2056 GET_VREG rINST, rINST # vB (object we're operating on) 2057 testl rINST, rINST # is object null? 2058 je common_errNullObject 2059 POISON_HEAP_REF ecx 2060 movl %ecx, (rINST, %eax, 1) 2061 testl %ecx, %ecx 2062 je 4f 2063 movl rSELF:THREAD_CARD_TABLE_OFFSET, %eax 2064 shrl $$CARD_TABLE_CARD_SHIFT, rINST 2065 movb %al, (%eax, rINST, 1) 20664: 2067 ADVANCE_PC_FETCH_AND_GOTO_NEXT 2 20682: 2069 EXPORT_PC 2070 // Fetch the value, needed by nterp_get_instance_field_offset. 2071 movl rINST, %ecx # ecx <- BA 2072 andl $$0xf, %ecx # ecx <- A 2073 GET_VREG ARG3, %ecx # ecx <- v[A] 2074 movl rSELF:THREAD_SELF_OFFSET, ARG0 2075 movl 0(%esp), ARG1 2076 movl rPC, ARG2 2077 call nterp_get_instance_field_offset 2078 testl %eax, %eax 2079 jns 1b 2080 negl %eax 2081 // Reload the value as it may have moved. 2082 movl rINST, %ecx # ecx <- BA 2083 andl $$0xf, %ecx # ecx <- A 2084 GET_VREG %ecx, %ecx # ecx <- v[A] 2085 sarl $$4, rINST 2086 GET_VREG rINST, rINST # vB (object we're operating on) 2087 testl rINST, rINST # is object null? 2088 je common_errNullObject 2089 POISON_HEAP_REF ecx 2090 movl %ecx, (rINST, %eax, 1) 2091 testl %ecx, %ecx 2092 je 5f 2093 movl rSELF:THREAD_CARD_TABLE_OFFSET, %eax 2094 shrl $$CARD_TABLE_CARD_SHIFT, rINST 2095 movb %al, (%eax, rINST, 1) 20965: 2097 lock addl $$0, (%esp) 2098 ADVANCE_PC_FETCH_AND_GOTO_NEXT 2 2099 2100NterpGetObjectInstanceField: 2101 // Fast-path which gets the field from thread-local cache. 2102% fetch_from_thread_cache("%eax", miss_label="2f") 21031: 2104 movl rINST, %ecx # ecx <- BA 2105 sarl $$4, %ecx # ecx <- B 2106 GET_VREG %ecx, %ecx # vB (object we're operating on) 2107 testl %ecx, %ecx # is object null? 2108 je common_errNullObject 2109 testb $$READ_BARRIER_TEST_VALUE, GRAY_BYTE_OFFSET(%ecx) 2110 movl (%ecx,%eax,1), %eax 2111 jnz 3f 2112 UNPOISON_HEAP_REF eax // Affects flags, so we cannot unpoison before the jnz. 21134: 2114 andb $$0xf,rINSTbl # rINST <- A 2115 SET_VREG_OBJECT %eax, rINST # fp[A] <- value 2116 ADVANCE_PC_FETCH_AND_GOTO_NEXT 2 21172: 2118 EXPORT_PC 2119 movl rSELF:THREAD_SELF_OFFSET, ARG0 2120 movl 0(%esp), ARG1 2121 movl rPC, ARG2 2122 movl $$0, ARG3 2123 call nterp_get_instance_field_offset 2124 testl %eax, %eax 2125 jns 1b 2126 // For volatile fields, we return a negative offset. Remove the sign 2127 // and no need for any barrier thanks to the memory model. 2128 negl %eax 2129 jmp 1b 21303: 2131 UNPOISON_HEAP_REF eax 2132 // reg00 is eax 2133 call art_quick_read_barrier_mark_reg00 2134 jmp 4b 2135 2136NterpPutObjectStaticField: 2137 GET_VREG rINST, rINST 2138 // Fast-path which gets the field from thread-local cache. 2139% fetch_from_thread_cache("%eax", miss_label="2f") 21401: 2141 movl ART_FIELD_OFFSET_OFFSET(%eax), %ecx 2142 movl ART_FIELD_DECLARING_CLASS_OFFSET(%eax), %eax 2143 cmpl $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET 2144 jne 3f 21455: 2146 POISON_HEAP_REF ebx // `rINST` is `%ebx` but we need to pass `ebx`. 2147 movl rINST, (%eax, %ecx, 1) 2148 testl rINST, rINST 2149 je 4f 2150 movl rSELF:THREAD_CARD_TABLE_OFFSET, %ecx 2151 shrl $$CARD_TABLE_CARD_SHIFT, %eax 2152 movb %cl, (%ecx, %eax, 1) 21534: 2154 ADVANCE_PC_FETCH_AND_GOTO_NEXT 2 21552: 2156 EXPORT_PC 2157 movl rSELF:THREAD_SELF_OFFSET, ARG0 2158 movl 0(%esp), ARG1 2159 movl rPC, ARG2 2160 movl rINST, ARG3 2161 call nterp_get_static_field 2162 // Reload the value as it may have moved. 2163 GET_VREG rINST, rINST 2164 testl MACRO_LITERAL(1), %eax 2165 je 1b 2166 CLEAR_VOLATILE_MARKER %eax 2167 movl ART_FIELD_OFFSET_OFFSET(%eax), %ecx 2168 movl ART_FIELD_DECLARING_CLASS_OFFSET(%eax), %eax 2169 cmpl $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET 2170 jne 7f 21716: 2172 POISON_HEAP_REF ebx // `rINST` is `%ebx` but we need to pass `ebx`. 2173 movl rINST, (%eax, %ecx, 1) 2174 testl rINST, rINST 2175 je 8f 2176 movl rSELF:THREAD_CARD_TABLE_OFFSET, %ecx 2177 shrl $$CARD_TABLE_CARD_SHIFT, %eax 2178 movb %cl, (%ecx, %eax, 1) 21798: 2180 lock addl $$0, (%esp) 2181 ADVANCE_PC_FETCH_AND_GOTO_NEXT 2 21823: 2183 call art_quick_read_barrier_mark_reg00 2184 jmp 5b 21857: 2186 call art_quick_read_barrier_mark_reg00 2187 jmp 6b 2188 2189NterpGetObjectStaticField: 2190 // Fast-path which gets the field from thread-local cache. 2191% fetch_from_thread_cache("%eax", miss_label="2f") 21921: 2193 movl ART_FIELD_OFFSET_OFFSET(%eax), %ecx 2194 movl ART_FIELD_DECLARING_CLASS_OFFSET(%eax), %eax 2195 cmpl $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET 2196 jne 5f 21976: 2198 testb $$READ_BARRIER_TEST_VALUE, GRAY_BYTE_OFFSET(%eax) 2199 movl (%eax, %ecx, 1), %eax 2200 jnz 3f 2201 UNPOISON_HEAP_REF eax // Affects flags, so we cannot unpoison before the jnz. 22024: 2203 SET_VREG_OBJECT %eax, rINST # fp[A] <- value 2204 ADVANCE_PC_FETCH_AND_GOTO_NEXT 2 22052: 2206 EXPORT_PC 2207 movl rSELF:THREAD_SELF_OFFSET, ARG0 2208 movl 0(%esp), ARG1 2209 movl rPC, ARG2 2210 movl $$0, ARG3 2211 call nterp_get_static_field 2212 CLEAR_VOLATILE_MARKER %eax 2213 jmp 1b 22143: 2215 UNPOISON_HEAP_REF eax 2216 call art_quick_read_barrier_mark_reg00 2217 jmp 4b 22185: 2219 call art_quick_read_barrier_mark_reg00 2220 jmp 6b 2221 2222NterpGetBooleanStaticField: 2223 OP_SGET load="movzbl", wide=0 2224 2225NterpGetByteStaticField: 2226 OP_SGET load="movsbl", wide=0 2227 2228NterpGetCharStaticField: 2229 OP_SGET load="movzwl", wide=0 2230 2231NterpGetShortStaticField: 2232 OP_SGET load="movswl", wide=0 2233 2234NterpGetWideStaticField: 2235 OP_SGET load="movl", wide=1 2236 2237NterpGetIntStaticField: 2238 OP_SGET load="movl", wide=0 2239 2240NterpPutStaticField: 2241 OP_SPUT rINST_reg=rINST, store="movl", wide=0 2242 2243NterpPutBooleanStaticField: 2244NterpPutByteStaticField: 2245 OP_SPUT rINST_reg=rINSTbl, store="movb", wide=0 2246 2247NterpPutCharStaticField: 2248NterpPutShortStaticField: 2249 OP_SPUT rINST_reg=rINSTw, store="movw", wide=0 2250 2251NterpPutWideStaticField: 2252 OP_SPUT rINST_reg=rINST, store="movl", wide=1 2253 2254NterpPutInstanceField: 2255 OP_IPUT rINST_reg=rINST, store="movl", wide=0 2256 2257NterpPutBooleanInstanceField: 2258NterpPutByteInstanceField: 2259 OP_IPUT rINST_reg=rINSTbl, store="movb", wide=0 2260 2261NterpPutCharInstanceField: 2262NterpPutShortInstanceField: 2263 OP_IPUT rINST_reg=rINSTw, store="movw", wide=0 2264 2265NterpPutWideInstanceField: 2266 OP_IPUT rINST_reg=rINST, store="movl", wide=1 2267 2268NterpGetBooleanInstanceField: 2269 OP_IGET load="movzbl", wide=0 2270 2271NterpGetByteInstanceField: 2272 OP_IGET load="movsbl", wide=0 2273 2274NterpGetCharInstanceField: 2275 OP_IGET load="movzwl", wide=0 2276 2277NterpGetShortInstanceField: 2278 OP_IGET load="movswl", wide=0 2279 2280NterpGetWideInstanceField: 2281 OP_IGET load="movl", wide=1 2282 2283NterpGetInstanceField: 2284 OP_IGET load="movl", wide=0 2285 2286NterpCallSuspendAndGotoNext: 2287 EXPORT_PC 2288 // Save branch offset. 2289 movl rINST, LOCAL0(%esp) 2290 call SYMBOL(art_quick_test_suspend) 2291 RESTORE_IBASE 2292 movl LOCAL0(%esp), rINST 2293 FETCH_INST 2294 GOTO_NEXT 2295 2296NterpHandleHotnessOverflow: 2297 CHECK_AND_UPDATE_SHARED_MEMORY_METHOD if_hot=1f, if_not_hot=4f 22981: 2299 movl rPC, %ecx 2300 movl rFP, ARG2 2301 // Save next PC. 2302 movl %ecx, LOCAL0(%esp) 2303 call nterp_hot_method 2304 testl %eax, %eax 2305 jne 3f 2306 // Fetch next PC. 2307 mov LOCAL0(%esp), rPC 23082: 2309 FETCH_INST 2310 GOTO_NEXT 23113: 2312 // Drop the current frame. 2313 movl -4(rREFS), %esp 2314 CFI_DEF_CFA(esp, PARAMETERS_SAVES_SIZE+CALLEE_SAVES_SIZE) 2315 DROP_PARAMETERS_SAVES 2316 CFI_DEF_CFA(esp, CALLEE_SAVES_SIZE) 2317 2318 // Setup the new frame 2319 movl OSR_DATA_FRAME_SIZE(%eax), %ecx 2320 // Given stack size contains all callee saved registers, remove them. 2321 subl $$CALLEE_SAVES_SIZE, %ecx 2322 2323 // Remember CFA. 2324 movl %esp, %ebp 2325 CFI_DEF_CFA_REGISTER(ebp) 2326 2327 subl %ecx, %esp 2328 movl %esp, %edi // edi := beginning of stack 2329 leal OSR_DATA_MEMORY(%eax), %esi // esi := memory to copy 2330 rep movsb // while (ecx--) { *edi++ = *esi++ } 2331 2332 // Fetch the native PC to jump to and save it in stack. 2333 pushl OSR_DATA_NATIVE_PC(%eax) 2334 CFI_ADJUST_CFA_OFFSET(4) 2335 2336 subl MACRO_LITERAL(8), %esp 2337 CFI_ADJUST_CFA_OFFSET(8) 2338 pushl %eax 2339 CFI_ADJUST_CFA_OFFSET(4) 2340 // Free the memory holding OSR Data. 2341 call SYMBOL(NterpFree) 2342 addl MACRO_LITERAL(12), %esp 2343 CFI_ADJUST_CFA_OFFSET(-12) 2344 2345 // Jump to the compiled code. 2346 ret 23474: 2348 DO_SUSPEND_CHECK continue_label=2b 2349 2350 2351NterpHandleInvokeInterfaceOnObjectMethodRange: 2352 shrl $$16, %eax 2353 movl MIRROR_CLASS_VTABLE_OFFSET_32(%edx, %eax, 4), %eax 2354 jmp NterpCommonInvokeInstanceRange 2355 2356NterpHandleInvokeInterfaceOnObjectMethod: 2357 shrl $$16, %eax 2358 movl MIRROR_CLASS_VTABLE_OFFSET_32(%edx, %eax, 4), %eax 2359 jmp NterpCommonInvokeInstance 2360 2361// This is the logical end of ExecuteNterpImpl, where the frame info applies. 2362// EndExecuteNterpImpl includes the methods below as we want the runtime to 2363// see them as part of the Nterp PCs. 2364.cfi_endproc 2365 2366END nterp_helper 2367 2368// This is the end of PCs contained by the OatQuickMethodHeader created for the interpreter 2369// entry point. 2370 FUNCTION_TYPE(EndExecuteNterpImpl) 2371 ASM_HIDDEN SYMBOL(EndExecuteNterpImpl) 2372 .global SYMBOL(EndExecuteNterpImpl) 2373SYMBOL(EndExecuteNterpImpl): 2374 2375// Entrypoints into runtime. 2376NTERP_TRAMPOLINE nterp_get_static_field, NterpGetStaticField 2377NTERP_TRAMPOLINE nterp_get_instance_field_offset, NterpGetInstanceFieldOffset 2378NTERP_TRAMPOLINE nterp_filled_new_array, NterpFilledNewArray 2379NTERP_TRAMPOLINE nterp_filled_new_array_range, NterpFilledNewArrayRange 2380NTERP_TRAMPOLINE nterp_get_class, NterpGetClass 2381NTERP_TRAMPOLINE nterp_allocate_object, NterpAllocateObject 2382NTERP_TRAMPOLINE nterp_get_method, NterpGetMethod 2383NTERP_TRAMPOLINE nterp_hot_method, NterpHotMethod 2384NTERP_TRAMPOLINE nterp_load_object, NterpLoadObject 2385 2386DEFINE_FUNCTION nterp_deliver_pending_exception 2387 DELIVER_PENDING_EXCEPTION 2388END_FUNCTION nterp_deliver_pending_exception 2389 2390// gen_mterp.py will inline the following definitions 2391// within [ExecuteNterpImpl, EndExecuteNterpImpl). 2392%def instruction_end(): 2393 2394 FUNCTION_TYPE(artNterpAsmInstructionEnd) 2395 ASM_HIDDEN SYMBOL(artNterpAsmInstructionEnd) 2396 .global SYMBOL(artNterpAsmInstructionEnd) 2397SYMBOL(artNterpAsmInstructionEnd): 2398 // artNterpAsmInstructionEnd is used as landing pad for exception handling. 2399 RESTORE_IBASE 2400 FETCH_INST 2401 GOTO_NEXT 2402 2403%def instruction_start(): 2404 2405 FUNCTION_TYPE(artNterpAsmInstructionStart) 2406 ASM_HIDDEN SYMBOL(artNterpAsmInstructionStart) 2407 .global SYMBOL(artNterpAsmInstructionStart) 2408SYMBOL(artNterpAsmInstructionStart) = .L_op_nop 2409 .text 2410 2411%def opcode_name_prefix(): 2412% return "nterp_" 2413%def opcode_start(): 2414 ENTRY nterp_${opcode} 2415%def opcode_end(): 2416 END nterp_${opcode} 2417 // Advance to the end of this handler. Causes error if we are past that point. 2418 .org nterp_${opcode} + NTERP_HANDLER_SIZE // ${opcode} handler is too big! 2419%def opcode_slow_path_start(name): 2420 ENTRY ${name} 2421%def opcode_slow_path_end(name): 2422 END ${name} 2423