1%def bincmp(condition=""):
2    /*
3     * Generic two-operand compare-and-branch operation.  Provide a "condition"
4     * fragment that specifies the comparison to perform, e.g. for
5     * "if-le" you would use "le".
6     *
7     * For: if-eq, if-ne, if-lt, if-ge, if-gt, if-le
8     */
9    /* if-cmp vA, vB, +CCCC */
10    ext     a2, rINST, 8, 4             # a2 <- A
11    ext     a3, rINST, 12, 4            # a3 <- B
12    lh      rINST, 2(rPC)               # rINST <- offset (sign-extended CCCC)
13    GET_VREG a0, a2                     # a0 <- vA
14    GET_VREG a1, a3                     # a1 <- vB
15    b${condition}c a0, a1, MterpCommonTakenBranchNoFlags
16    li      v0, JIT_CHECK_OSR           # possible OSR re-entry?
17    beqc    rPROFILE, v0, .L_check_not_taken_osr
18    FETCH_ADVANCE_INST 2                # advance rPC, load rINST
19    GET_INST_OPCODE v0                  # extract opcode from rINST
20    GOTO_OPCODE v0                      # jump to next instruction
21
22%def zcmp(condition=""):
23    /*
24     * Generic one-operand compare-and-branch operation.  Provide a "condition"
25     * fragment that specifies the comparison to perform, e.g. for
26     * "if-lez" you would use "le".
27     *
28     * For: if-eqz, if-nez, if-ltz, if-gez, if-gtz, if-lez
29     */
30    /* if-cmp vAA, +BBBB */
31    srl     a2, rINST, 8                # a2 <- AA
32    lh      rINST, 2(rPC)               # rINST <- offset (sign-extended BBBB)
33    GET_VREG a0, a2                     # a0 <- vAA
34    b${condition}zc a0, MterpCommonTakenBranchNoFlags
35    li      v0, JIT_CHECK_OSR           # possible OSR re-entry?
36    beqc    rPROFILE, v0, .L_check_not_taken_osr
37    FETCH_ADVANCE_INST 2                # advance rPC, load rINST
38    GET_INST_OPCODE v0                  # extract opcode from rINST
39    GOTO_OPCODE v0                      # jump to next instruction
40
41%def op_goto():
42    /*
43     * Unconditional branch, 8-bit offset.
44     *
45     * The branch distance is a signed code-unit offset, which we need to
46     * double to get a byte offset.
47     */
48    /* goto +AA */
49    srl     rINST, rINST, 8
50    seb     rINST, rINST                # rINST <- offset (sign-extended AA)
51    b       MterpCommonTakenBranchNoFlags
52
53%def op_goto_16():
54    /*
55     * Unconditional branch, 16-bit offset.
56     *
57     * The branch distance is a signed code-unit offset, which we need to
58     * double to get a byte offset.
59     */
60    /* goto/16 +AAAA */
61    lh      rINST, 2(rPC)               # rINST <- offset (sign-extended AAAA)
62    b       MterpCommonTakenBranchNoFlags
63
64%def op_goto_32():
65    /*
66     * Unconditional branch, 32-bit offset.
67     *
68     * The branch distance is a signed code-unit offset, which we need to
69     * double to get a byte offset.
70     *
71     * Unlike most opcodes, this one is allowed to branch to itself, so
72     * our "backward branch" test must be "<=0" instead of "<0".
73     */
74    /* goto/32 +AAAAAAAA */
75    lh      rINST, 2(rPC)               # rINST <- aaaa (low)
76    lh      a1, 4(rPC)                  # a1 <- AAAA (high)
77    ins     rINST, a1, 16, 16           # rINST <- offset (sign-extended AAAAaaaa)
78    b       MterpCommonTakenBranchNoFlags
79
80%def op_if_eq():
81%  bincmp(condition="eq")
82
83%def op_if_eqz():
84%  zcmp(condition="eq")
85
86%def op_if_ge():
87%  bincmp(condition="ge")
88
89%def op_if_gez():
90%  zcmp(condition="ge")
91
92%def op_if_gt():
93%  bincmp(condition="gt")
94
95%def op_if_gtz():
96%  zcmp(condition="gt")
97
98%def op_if_le():
99%  bincmp(condition="le")
100
101%def op_if_lez():
102%  zcmp(condition="le")
103
104%def op_if_lt():
105%  bincmp(condition="lt")
106
107%def op_if_ltz():
108%  zcmp(condition="lt")
109
110%def op_if_ne():
111%  bincmp(condition="ne")
112
113%def op_if_nez():
114%  zcmp(condition="ne")
115
116%def op_packed_switch(func="MterpDoPackedSwitch"):
117    /*
118     * Handle a packed-switch or sparse-switch instruction.  In both cases
119     * we decode it and hand it off to a helper function.
120     *
121     * We don't really expect backward branches in a switch statement, but
122     * they're perfectly legal, so we check for them here.
123     *
124     * for: packed-switch, sparse-switch
125     */
126    /* op vAA, +BBBBBBBB */
127    .extern $func
128    lh      a0, 2(rPC)                  # a0 <- bbbb (lo)
129    lh      a1, 4(rPC)                  # a1 <- BBBB (hi)
130    srl     a3, rINST, 8                # a3 <- AA
131    ins     a0, a1, 16, 16              # a0 <- BBBBbbbb
132    GET_VREG a1, a3                     # a1 <- vAA
133    dlsa    a0, a0, rPC, 1              # a0 <- PC + BBBBbbbb*2
134    jal     $func                       # v0 <- code-unit branch offset
135    move    rINST, v0
136    b       MterpCommonTakenBranchNoFlags
137
138%def op_return(instr="GET_VREG"):
139    /*
140     * Return a 32-bit value.
141     *
142     * for: return (sign-extend), return-object (zero-extend)
143     */
144    /* op vAA */
145    .extern MterpThreadFenceForConstructor
146    .extern MterpSuspendCheck
147    jal     MterpThreadFenceForConstructor
148    lw      ra, THREAD_FLAGS_OFFSET(rSELF)
149    move    a0, rSELF
150    and     ra, ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
151    beqzc   ra, 1f
152    jal     MterpSuspendCheck           # (self)
1531:
154    srl     a2, rINST, 8                # a2 <- AA
155    $instr  a0, a2                      # a0 <- vAA
156    b       MterpReturn
157
158%def op_return_object():
159%  op_return(instr="GET_VREG_U")
160
161%def op_return_void():
162    .extern MterpThreadFenceForConstructor
163    .extern MterpSuspendCheck
164    jal     MterpThreadFenceForConstructor
165    lw      ra, THREAD_FLAGS_OFFSET(rSELF)
166    move    a0, rSELF
167    and     ra, ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
168    beqzc   ra, 1f
169    jal     MterpSuspendCheck           # (self)
1701:
171    li      a0, 0
172    b       MterpReturn
173
174%def op_return_void_no_barrier():
175    .extern MterpSuspendCheck
176    lw      ra, THREAD_FLAGS_OFFSET(rSELF)
177    move    a0, rSELF
178    and     ra, ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
179    beqzc   ra, 1f
180    jal     MterpSuspendCheck           # (self)
1811:
182    li      a0, 0
183    b       MterpReturn
184
185%def op_return_wide():
186    /*
187     * Return a 64-bit value.
188     */
189    /* return-wide vAA */
190    /* op vAA */
191    .extern MterpThreadFenceForConstructor
192    .extern MterpSuspendCheck
193    jal     MterpThreadFenceForConstructor
194    lw      ra, THREAD_FLAGS_OFFSET(rSELF)
195    move    a0, rSELF
196    and     ra, ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
197    beqzc   ra, 1f
198    jal     MterpSuspendCheck           # (self)
1991:
200    srl     a2, rINST, 8                # a2 <- AA
201    GET_VREG_WIDE a0, a2                # a0 <- vAA
202    b       MterpReturn
203
204%def op_sparse_switch():
205%  op_packed_switch(func="MterpDoSparseSwitch")
206
207%def op_throw():
208    /*
209     * Throw an exception object in the current thread.
210     */
211    /* throw vAA */
212    EXPORT_PC
213    srl     a2, rINST, 8                # a2 <- AA
214    GET_VREG_U a0, a2                   # a0 <- vAA (exception object)
215    beqzc   a0, common_errNullObject
216    sd      a0, THREAD_EXCEPTION_OFFSET(rSELF)  # thread->exception <- obj
217    b       MterpException
218