1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_ARCH_X86_ASM_SUPPORT_X86_S_
18#define ART_RUNTIME_ARCH_X86_ASM_SUPPORT_X86_S_
19
20#include "asm_support_x86.h"
21#include "interpreter/cfi_asm_support.h"
22
23// Regular gas(1) & current clang/llvm assembler support named macro parameters.
24#define MACRO0(macro_name) .macro macro_name
25#define MACRO1(macro_name, macro_arg1) .macro macro_name macro_arg1
26#define MACRO2(macro_name, macro_arg1, macro_arg2) .macro macro_name macro_arg1, macro_arg2
27#define MACRO3(macro_name, macro_arg1, macro_arg2, macro_arg3) .macro macro_name macro_arg1, macro_arg2, macro_arg3
28#define MACRO4(macro_name, macro_arg1, macro_arg2, macro_arg3, macro_arg4) .macro macro_name macro_arg1, macro_arg2, macro_arg3, macro_arg4
29#define MACRO5(macro_name, macro_arg1, macro_arg2, macro_arg3, macro_arg4, macro_arg5) .macro macro_name macro_arg1, macro_arg2, macro_arg3, macro_arg4, macro_arg5
30#define END_MACRO .endm
31
32#if defined(__clang__)
33    // Clang/llvm does not support .altmacro. However, the clang/llvm preprocessor doesn't
34    // separate the backslash and parameter by a space. Everything just works.
35    #define RAW_VAR(name) \name
36    #define VAR(name) \name
37    #define CALLVAR(name) SYMBOL(\name)
38    #define PLT_VAR(name) \name@PLT
39    #define REG_VAR(name) %\name
40    #define CALL_MACRO(name) \name
41#else
42    // Regular gas(1) uses \argument_name for macro arguments.
43    // We need to turn on alternate macro syntax so we can use & instead or the preprocessor
44    // will screw us by inserting a space between the \ and the name. Even in this mode there's
45    // no special meaning to $, so literals are still just $x. The use of altmacro means % is a
46    // special character meaning care needs to be taken when passing registers as macro
47    // arguments.
48    .altmacro
49    #define RAW_VAR(name) name&
50    #define VAR(name) name&
51    #define CALLVAR(name) SYMBOL(name&)
52    #define PLT_VAR(name) name&@PLT
53    #define REG_VAR(name) %name
54    #define CALL_MACRO(name) name&
55#endif
56
57#define LITERAL(value) $value
58#if defined(__APPLE__)
59    #define MACRO_LITERAL(value) $(value)
60#else
61    #define MACRO_LITERAL(value) $value
62#endif
63
64#if defined(__APPLE__)
65    #define FUNCTION_TYPE(name)
66    #define SIZE(name)
67#else
68    #define FUNCTION_TYPE(name) .type name, @function
69    #define SIZE(name) .size name, .-name
70#endif
71
72    // CFI support.
73#if !defined(__APPLE__)
74    #define CFI_STARTPROC .cfi_startproc
75    #define CFI_ENDPROC .cfi_endproc
76    #define CFI_ADJUST_CFA_OFFSET(size) .cfi_adjust_cfa_offset size
77    #define CFI_DEF_CFA(reg,size) .cfi_def_cfa reg,size
78    #define CFI_DEF_CFA_REGISTER(reg) .cfi_def_cfa_register reg
79    #define CFI_RESTORE(reg) .cfi_restore reg
80    #define CFI_REL_OFFSET(reg,size) .cfi_rel_offset reg,size
81    #define CFI_REGISTER(orig_reg, current_reg) .cfi_register orig_reg, current_reg
82    #define CFI_REMEMBER_STATE .cfi_remember_state
83    // The spec is not clear whether the CFA is part of the saved state and tools
84    // differ in the behaviour, so explicitly set the CFA to avoid any ambiguity.
85    // The restored CFA state should match the CFA state during CFI_REMEMBER_STATE.
86    // `objdump -Wf libart.so | egrep "_cfa|_state"` is useful to audit the opcodes.
87    MACRO2(CFI_RESTORE_STATE_AND_DEF_CFA, reg, off)
88        .cfi_restore_state
89        .cfi_def_cfa \reg,\off
90    END_MACRO
91    #define CFI_ESCAPE(...) .cfi_escape __VA_ARGS__
92    #define CFI_RESTORE_STATE .cfi_restore_state
93#else
94    // Mac OS' doesn't like cfi_* directives.
95    #define CFI_STARTPROC
96    #define CFI_ENDPROC
97    #define CFI_ADJUST_CFA_OFFSET(size)
98    #define CFI_DEF_CFA(reg,size)
99    #define CFI_DEF_CFA_REGISTER(reg)
100    #define CFI_RESTORE(reg)
101    #define CFI_REL_OFFSET(reg,size)
102    #define CFI_REGISTER(orig_reg, current_reg)
103    #define CFI_REMEMBER_STATE
104    MACRO2(CFI_RESTORE_STATE_AND_DEF_CFA, reg, off)
105    END_MACRO
106    #define CFI_ESCAPE(...)
107    #define CFI_RESTORE_STATE
108#endif
109
110#define CFI_REG_eax 0
111#define CFI_REG_ecx 1
112#define CFI_REG_edx 2
113#define CFI_REG_ebx 3
114#define CFI_REG_esp 4
115#define CFI_REG_ebp 5
116#define CFI_REG_esi 6
117#define CFI_REG_edi 7
118#define CFI_REG_eip 8
119
120#define CFI_REG(reg) CFI_REG_##reg
121
122MACRO3(CFI_EXPRESSION_BREG, n, b, offset)
123    .if (-0x40 <= (\offset)) && ((\offset) < 0x40)
124        CFI_EXPRESSION_BREG_1(\n, \b, \offset)
125    .elseif (-0x2000 <= (\offset)) && ((\offset) < 0x2000)
126        CFI_EXPRESSION_BREG_2(\n, \b, \offset)
127    .else
128        .error "Unsupported offset"
129    .endif
130END_MACRO
131
132MACRO3(CFI_DEF_CFA_BREG_PLUS_UCONST, reg, offset, size)
133    .if ((\size) < 0)
134        .error "Size should be positive"
135    .endif
136    .if (((\offset) < -0x40) || ((\offset) >= 0x40))
137        .error "Unsupported offset"
138    .endif
139    .if ((\size) < 0x80)
140        CFI_DEF_CFA_BREG_PLUS_UCONST_1_1(\reg, \offset, \size)
141    .elseif ((\size) < 0x4000)
142        CFI_DEF_CFA_BREG_PLUS_UCONST_1_2(\reg, \offset, \size)
143    .else
144        .error "Unsupported size"
145    .endif
146END_MACRO
147
148    // Symbols. On a Mac, we need a leading underscore.
149#if !defined(__APPLE__)
150    #define SYMBOL(name) name
151    #define PLT_SYMBOL(name) name ## @PLT
152#else
153    // Mac OS' symbols have an _ prefix.
154    #define SYMBOL(name) _ ## name
155    #define PLT_SYMBOL(name) _ ## name
156#endif
157
158// Directive to hide a function symbol.
159#if defined(__APPLE__)
160    #define ASM_HIDDEN .private_extern
161#else
162    #define ASM_HIDDEN .hidden
163#endif
164
165    /* Cache alignment for function entry */
166MACRO0(ALIGN_FUNCTION_ENTRY)
167    .balign 16
168END_MACRO
169
170MACRO2(DEFINE_FUNCTION_CUSTOM_CFA, c_name, cfa_offset)
171    FUNCTION_TYPE(SYMBOL(\c_name))
172    ASM_HIDDEN CALLVAR(c_name)
173    .globl CALLVAR(c_name)
174    ALIGN_FUNCTION_ENTRY
175CALLVAR(c_name):
176    CFI_STARTPROC
177    // Ensure we get an appropriate starting CFA.
178    CFI_DEF_CFA(esp, RAW_VAR(cfa_offset))
179END_MACRO
180
181MACRO1(DEFINE_FUNCTION, c_name)
182    DEFINE_FUNCTION_CUSTOM_CFA RAW_VAR(c_name), __SIZEOF_POINTER__
183END_MACRO
184
185MACRO1(END_FUNCTION, c_name)
186    CFI_ENDPROC
187    SIZE(SYMBOL(\c_name))
188END_MACRO
189
190MACRO1(PUSH, reg)
191    pushl REG_VAR(reg)
192    CFI_ADJUST_CFA_OFFSET(4)
193    CFI_REL_OFFSET(REG_VAR(reg), 0)
194END_MACRO
195
196MACRO1(POP, reg)
197    popl REG_VAR(reg)
198    CFI_ADJUST_CFA_OFFSET(-4)
199    CFI_RESTORE(REG_VAR(reg))
200END_MACRO
201
202// Arguments do not need .cfi_rel_offset as they are caller-saved and
203// therefore cannot hold caller's variables or unwinding data.
204MACRO1(PUSH_ARG, reg)
205    pushl REG_VAR(reg)
206    CFI_ADJUST_CFA_OFFSET(4)
207END_MACRO
208
209MACRO1(POP_ARG, reg)
210    popl REG_VAR(reg)
211    CFI_ADJUST_CFA_OFFSET(-4)
212END_MACRO
213
214MACRO1(CFI_RESTORE_REG, reg)
215    CFI_RESTORE(REG_VAR(reg))
216END_MACRO
217
218MACRO1(INCREASE_FRAME, frame_adjustment)
219    subl MACRO_LITERAL(RAW_VAR(frame_adjustment)), %esp
220    CFI_ADJUST_CFA_OFFSET((RAW_VAR(frame_adjustment)))
221END_MACRO
222
223MACRO1(DECREASE_FRAME, frame_adjustment)
224    addl MACRO_LITERAL(RAW_VAR(frame_adjustment)), %esp
225    CFI_ADJUST_CFA_OFFSET(-(RAW_VAR(frame_adjustment)))
226END_MACRO
227
228#define UNREACHABLE int3
229
230MACRO1(UNIMPLEMENTED,name)
231    FUNCTION_TYPE(\name)
232    .globl VAR(name)
233    ALIGN_FUNCTION_ENTRY
234VAR(name):
235    CFI_STARTPROC
236    UNREACHABLE
237    UNREACHABLE
238    CFI_ENDPROC
239    SIZE(\name)
240END_MACRO
241
242MACRO3(SETUP_PC_REL_BASE_IMPL, reg, label, call_label)
243    call RAW_VAR(call_label)
244    CFI_ADJUST_CFA_OFFSET(4)
245RAW_VAR(label):
246    popl REG_VAR(reg)
247    CFI_ADJUST_CFA_OFFSET(-4)
248END_MACRO
249
250MACRO1(SETUP_PC_REL_BASE_0, reg)
251    SETUP_PC_REL_BASE_IMPL \reg, 0, 0f
252END_MACRO
253
254MACRO2(SETUP_PC_REL_BASE, reg, label)
255    SETUP_PC_REL_BASE_IMPL \reg, \label, \label
256END_MACRO
257
258MACRO1(LOAD_RUNTIME_INSTANCE, reg)
259    SETUP_PC_REL_BASE_0 \reg
260    // Load Runtime::instance_.
261    movl SYMBOL(_ZN3art7Runtime9instance_E) - 0b(REG_VAR(reg)), REG_VAR(reg)
262END_MACRO
263
264// Macros to poison (negate) the reference for heap poisoning.
265MACRO1(POISON_HEAP_REF, rRef)
266#ifdef USE_HEAP_POISONING
267    neg REG_VAR(rRef)
268#endif  // USE_HEAP_POISONING
269END_MACRO
270
271// Macros to unpoison (negate) the reference for heap poisoning.
272MACRO1(UNPOISON_HEAP_REF, rRef)
273#ifdef USE_HEAP_POISONING
274    neg REG_VAR(rRef)
275#endif  // USE_HEAP_POISONING
276END_MACRO
277
278    /*
279     * Macro that sets up the callee save frame to conform with
280     * Runtime::CreateCalleeSaveMethod(kSaveRefsOnly)
281     */
282MACRO1(SETUP_SAVE_REFS_ONLY_FRAME, temp_reg)
283    PUSH edi  // Save callee saves (ebx is saved/restored by the upcall)
284    PUSH esi
285    PUSH ebp
286    subl MACRO_LITERAL(12), %esp  // Grow stack by 3 words.
287    CFI_ADJUST_CFA_OFFSET(12)
288    LOAD_RUNTIME_INSTANCE \temp_reg
289    // Push save all callee-save method.
290    pushl RUNTIME_SAVE_REFS_ONLY_METHOD_OFFSET(REG_VAR(temp_reg))
291    CFI_ADJUST_CFA_OFFSET(4)
292    // Store esp as the top quick frame.
293    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
294
295    // Ugly compile-time check, but we only have the preprocessor.
296    // Last +4: implicit return address pushed on stack when caller made call.
297#if (FRAME_SIZE_SAVE_REFS_ONLY != 3*4 + 16 + 4)
298#error "FRAME_SIZE_SAVE_REFS_ONLY(X86) size not as expected."
299#endif
300END_MACRO
301
302MACRO0(RESTORE_SAVE_REFS_ONLY_FRAME)
303    addl MACRO_LITERAL(16), %esp  // Unwind stack up to saved values
304    CFI_ADJUST_CFA_OFFSET(-16)
305    POP ebp  // Restore callee saves (ebx is saved/restored by the upcall)
306    POP esi
307    POP edi
308END_MACRO
309
310    /*
311     * Macro that sets up the callee save frame to conform with
312     * Runtime::CreateCalleeSaveMethod(kSaveAllCalleeSaves)
313     */
314MACRO1(SETUP_SAVE_ALL_CALLEE_SAVES_FRAME, temp_reg)
315    PUSH edi  // Save callee saves (ebx is saved/restored by the upcall)
316    PUSH esi
317    PUSH ebp
318    subl MACRO_LITERAL(12), %esp  // Grow stack by 3 words.
319    CFI_ADJUST_CFA_OFFSET(12)
320    LOAD_RUNTIME_INSTANCE \temp_reg
321    // Push save all callee-save method.
322    pushl RUNTIME_SAVE_ALL_CALLEE_SAVES_METHOD_OFFSET(REG_VAR(temp_reg))
323    CFI_ADJUST_CFA_OFFSET(4)
324    // Store esp as the top quick frame.
325    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
326    // Ugly compile-time check, but we only have the preprocessor.
327    // Last +4: implicit return address pushed on stack when caller made call.
328#if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVES != 3*4 + 16 + 4)
329#error "FRAME_SIZE_SAVE_ALL_CALLEE_SAVES(X86) size not as expected."
330#endif
331END_MACRO
332
333    /*
334     * Macro that sets up the callee save frame to conform with
335     * Runtime::CreateCalleeSaveMethod(kSaveRefsAndArgs), except for pushing the method
336     */
337MACRO0(SETUP_SAVE_REFS_AND_ARGS_FRAME_REGISTERS_ONLY)
338    PUSH edi      // Save callee saves
339    PUSH esi
340    PUSH ebp
341    PUSH_ARG ebx  // Save args.
342    PUSH_ARG edx
343    PUSH_ARG ecx
344    // Create space for FPR args.
345    INCREASE_FRAME 4 * 8
346    // Save FPRs.
347    movsd %xmm0, 0(%esp)
348    movsd %xmm1, 8(%esp)
349    movsd %xmm2, 16(%esp)
350    movsd %xmm3, 24(%esp)
351
352    // Ugly compile-time check, but we only have the preprocessor.
353    // First +4: implicit return address pushed on stack when caller made call.
354    // Last +4: we're not pushing the method on the stack here.
355#if (FRAME_SIZE_SAVE_REFS_AND_ARGS != 4 + 6*4 + 4*8 + 4)
356#error "FRAME_SIZE_SAVE_REFS_AND_ARGS(X86) size not as expected."
357#endif
358END_MACRO
359
360MACRO0(RESTORE_SAVE_REFS_AND_ARGS_FRAME)
361    // Restore FPRs. The method is still on the stack.
362    movsd 4(%esp), %xmm0
363    movsd 12(%esp), %xmm1
364    movsd 20(%esp), %xmm2
365    movsd 28(%esp), %xmm3
366
367    DECREASE_FRAME 36             // Remove FPRs and method pointer.
368
369    POP_ARG ecx                   // Restore args
370    POP_ARG edx
371    POP_ARG ebx
372    POP ebp                       // Restore callee saves
373    POP esi
374    POP edi
375END_MACRO
376
377    /*
378     * Macro that calls through to artDeliverPendingExceptionFromCode, where the pending
379     * exception is Thread::Current()->exception_ when the runtime method frame is ready.
380     */
381MACRO0(DELIVER_PENDING_EXCEPTION_FRAME_READY)
382    // Outgoing argument set up
383    INCREASE_FRAME 12                          // alignment padding
384    pushl %fs:THREAD_SELF_OFFSET               // pass Thread::Current()
385    CFI_ADJUST_CFA_OFFSET(4)
386    call SYMBOL(artDeliverPendingExceptionFromCode)  // artDeliverPendingExceptionFromCode(Thread*)
387    UNREACHABLE
388    CFI_ADJUST_CFA_OFFSET(-16)                 // Reset CFA in case there is more code afterwards.
389END_MACRO
390
391    /*
392     * Macro that calls through to artDeliverPendingExceptionFromCode, where the pending
393     * exception is Thread::Current()->exception_.
394     */
395MACRO0(DELIVER_PENDING_EXCEPTION)
396    SETUP_SAVE_ALL_CALLEE_SAVES_FRAME ebx      // save callee saves for throw
397    DELIVER_PENDING_EXCEPTION_FRAME_READY
398END_MACRO
399
400MACRO0(RETURN_OR_DELIVER_PENDING_EXCEPTION)
401    cmpl MACRO_LITERAL(0),%fs:THREAD_EXCEPTION_OFFSET // exception field == 0 ?
402    jne 1f                                            // if exception field != 0 goto 1
403    ret                                               // return
4041:                                                    // deliver exception on current thread
405    DELIVER_PENDING_EXCEPTION
406END_MACRO
407
408// Locking is needed for both managed code and JNI stubs.
409MACRO4(LOCK_OBJECT_FAST_PATH, obj, tmp, saved_eax, slow_lock)
4101:
411    movl MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(obj)), %eax  // EAX := lock word
412    movl %fs:THREAD_ID_OFFSET, REG_VAR(tmp)  // tmp: thread id.
413    xorl %eax, REG_VAR(tmp)               // tmp: thread id with count 0 + read barrier bits.
414    testl LITERAL(LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED), %eax  // Test the non-gc bits.
415    jnz  2f                               // Check if unlocked.
416    // Unlocked case - store tmp: original lock word plus thread id, preserved read barrier bits.
417                                          // EAX: old val, tmp: new val.
418    lock cmpxchg REG_VAR(tmp), MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(obj))
419    jnz  1b                               // cmpxchg failed retry
420    .ifnc \saved_eax, none
421        movl REG_VAR(saved_eax), %eax     // Restore EAX.
422    .endif
423    ret
4242:  // EAX: original lock word, tmp: thread id ^ EAX
425                                          // Check lock word state and thread id together,
426    testl LITERAL(LOCK_WORD_STATE_MASK_SHIFTED | LOCK_WORD_THIN_LOCK_OWNER_MASK_SHIFTED), \
427          REG_VAR(tmp)
428    jne  \slow_lock                       // Slow path if either of the two high bits are set.
429                                          // Increment the recursive lock count.
430    leal LOCK_WORD_THIN_LOCK_COUNT_ONE(%eax), REG_VAR(tmp)
431    testl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_MASK_SHIFTED), REG_VAR(tmp)
432    jz   \slow_lock                       // If count overflowed, go to slow lock.
433    // Update lockword for recursive lock, cmpxchg necessary for read barrier bits.
434                                          // EAX: old val, tmp: new val.
435    lock cmpxchg REG_VAR(tmp), MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(obj))
436    jnz  1b                               // cmpxchg failed retry
437    .ifnc \saved_eax, none
438        movl REG_VAR(saved_eax), %eax     // Restore EAX.
439    .endif
440    ret
441END_MACRO
442
443// Unlocking is needed for both managed code and JNI stubs.
444MACRO4(UNLOCK_OBJECT_FAST_PATH, obj, tmp, saved_eax, slow_unlock)
4451:
446    movl MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(obj)), %eax  // EAX := lock word
447    movl %fs:THREAD_ID_OFFSET, REG_VAR(tmp)  // tmp := thread id
448    xorl %eax, REG_VAR(tmp)               // tmp := thread id ^ lock word
449    test LITERAL(LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED), REG_VAR(tmp)
450    jnz  2f                               // Check if simply locked.
451    // Transition to unlocked.
452#ifndef USE_READ_BARRIER
453    movl REG_VAR(tmp), MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(obj))
454#else
455    lock cmpxchg REG_VAR(tmp), MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(obj))
456    jnz  1b                               // cmpxchg failed retry
457#endif
458    .ifnc \saved_eax, none
459        movl REG_VAR(saved_eax), %eax     // Restore EAX.
460    .endif
461    ret
4622:  // EAX: original lock word, tmp: lock_word ^ thread id
463                                          // Check lock word state and thread id together.
464    testl LITERAL(LOCK_WORD_STATE_MASK_SHIFTED | LOCK_WORD_THIN_LOCK_OWNER_MASK_SHIFTED), \
465          REG_VAR(tmp)
466    jnz  \slow_unlock
467    // Update lockword for recursive unlock, cmpxchg necessary for read barrier bits.
468                                          // tmp: new lock word with decremented count.
469    leal -LOCK_WORD_THIN_LOCK_COUNT_ONE(%eax), REG_VAR(tmp)
470#ifndef USE_READ_BARRIER
471    movl REG_VAR(tmp), MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(obj))
472#else
473    lock cmpxchg REG_VAR(tmp), MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(obj))
474    jnz  1b                               // cmpxchg failed retry
475#endif
476    .ifnc \saved_eax, none
477        movl REG_VAR(saved_eax), %eax     // Restore EAX.
478    .endif
479    ret
480END_MACRO
481
482#endif  // ART_RUNTIME_ARCH_X86_ASM_SUPPORT_X86_S_
483