1#------------------------------------------------------------------------------ ;
2# Copyright (c) 2012 - 2014, Intel Corporation. All rights reserved.<BR>
3# This program and the accompanying materials
4# are licensed and made available under the terms and conditions of the BSD License
5# which accompanies this distribution.  The full text of the license may be found at
6# http://opensource.org/licenses/bsd-license.php.
7#
8# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
9# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
10#
11# Module Name:
12#
13#   ExceptionHandlerAsm.S
14#
15# Abstract:
16#
17#   x64 CPU Exception Handler
18#
19# Notes:
20#
21#------------------------------------------------------------------------------
22
23
24
25ASM_GLOBAL ASM_PFX(CommonExceptionHandler)
26
27#EXTRN ASM_PFX(mErrorCodeFlag):DWORD    # Error code flags for exceptions
28#EXTRN ASM_PFX(mDoFarReturnFlag):QWORD  # Do far return flag
29.text
30
31#ifdef __APPLE__
32# macros are different between GNU and Xcode as.
33.macro IDT_MACRO
34  push     $0
35#else
36.macro IDT_MACRO arg
37  push    \arg
38#endif
39  jmp    ASM_PFX(CommonInterruptEntry)
40.endm
41
42AsmIdtVectorBegin:
43  IDT_MACRO $0
44  IDT_MACRO $1
45  IDT_MACRO $2
46  IDT_MACRO $3
47  IDT_MACRO $4
48  IDT_MACRO $5
49  IDT_MACRO $6
50  IDT_MACRO $7
51  IDT_MACRO $8
52  IDT_MACRO $9
53  IDT_MACRO $10
54  IDT_MACRO $11
55  IDT_MACRO $12
56  IDT_MACRO $13
57  IDT_MACRO $14
58  IDT_MACRO $15
59  IDT_MACRO $16
60  IDT_MACRO $17
61  IDT_MACRO $18
62  IDT_MACRO $19
63  IDT_MACRO $20
64  IDT_MACRO $21
65  IDT_MACRO $22
66  IDT_MACRO $23
67  IDT_MACRO $24
68  IDT_MACRO $25
69  IDT_MACRO $26
70  IDT_MACRO $27
71  IDT_MACRO $28
72  IDT_MACRO $29
73  IDT_MACRO $30
74  IDT_MACRO $31
75AsmIdtVectorEnd:
76
77HookAfterStubHeaderBegin:
78    .byte   0x6a      # push
79PatchVectorNum:
80    .byte   0         # 0 will be fixed
81    .byte   0xe9      # jmp     ASM_PFX(HookAfterStubHeaderEnd)
82PatchFuncAddress:
83     .set   HOOK_ADDRESS, ASM_PFX(HookAfterStubHeaderEnd) - . - 4
84    .long   HOOK_ADDRESS  # will be fixed
85ASM_GLOBAL ASM_PFX(HookAfterStubHeaderEnd)
86ASM_PFX(HookAfterStubHeaderEnd):
87    pushq   %rax
88    movq    %rsp, %rax
89    andl    $0x0fffffff0, %esp  # make sure 16-byte aligned for exception context
90    subq    $0x18, %rsp         # reserve room for filling exception data later
91    pushq   %rcx
92    movq    8(%rax), %rcx
93    bt      %ecx, ASM_PFX(mErrorCodeFlag)(%rip)
94    jnc     NoErrorData
95    pushq   (%rsp)            # push additional rcx to make stack alignment
96NoErrorData:
97    xchgq   (%rsp), %rcx      # restore rcx, save Exception Number in stack
98    movq    (%rax), %rax      # restore rax
99
100#---------------------------------------;
101# CommonInterruptEntry                  ;
102#---------------------------------------;
103# The follow algorithm is used for the common interrupt routine.
104
105ASM_GLOBAL ASM_PFX(CommonInterruptEntry)
106ASM_PFX(CommonInterruptEntry):
107    cli
108    #
109    # All interrupt handlers are invoked through interrupt gates, so
110    # IF flag automatically cleared at the entry point
111    #
112    #
113    # Calculate vector number
114    #
115    xchgq   (%rsp), %rcx       # get the return address of call, actually, it is the address of vector number.
116    andq     $0x0FF, %rcx
117    cmp     $32, %ecx          # Intel reserved vector for exceptions?
118    jae     NoErrorCode
119    pushq   %rax
120    movl    ASM_PFX(mErrorCodeFlag)(%rip), %eax
121    bt      %ecx, %eax
122    popq    %rax
123    jc      CommonInterruptEntry_al_0000
124
125NoErrorCode:
126
127    #
128    # Push a dummy error code on the stack
129    # to maintain coherent stack map
130    #
131    pushq   (%rsp)
132    movq    $0, 8(%rsp)
133CommonInterruptEntry_al_0000:
134    pushq   %rbp
135    movq    %rsp, %rbp
136    pushq   $0          # check EXCEPTION_HANDLER_CONTEXT.OldIdtHandler
137    pushq   $0          # check EXCEPTION_HANDLER_CONTEXT.ExceptionDataFlag
138
139    #
140    # Stack:
141    # +---------------------+ <-- 16-byte aligned ensured by processor
142    # +    Old SS           +
143    # +---------------------+
144    # +    Old RSP          +
145    # +---------------------+
146    # +    RFlags           +
147    # +---------------------+
148    # +    CS               +
149    # +---------------------+
150    # +    RIP              +
151    # +---------------------+
152    # +    Error Code       +
153    # +---------------------+
154    # + RCX / Vector Number +
155    # +---------------------+
156    # +    RBP              +
157    # +---------------------+ <-- RBP, 16-byte aligned
158    #
159
160
161    #
162    # Since here the stack pointer is 16-byte aligned, so
163    # EFI_FX_SAVE_STATE_X64 of EFI_SYSTEM_CONTEXT_x64
164    # is 16-byte aligned
165    #
166
167#; UINT64  Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;
168#; UINT64  R8, R9, R10, R11, R12, R13, R14, R15;
169    pushq    %r15
170    pushq    %r14
171    pushq    %r13
172    pushq    %r12
173    pushq    %r11
174    pushq    %r10
175    pushq    %r9
176    pushq    %r8
177    pushq    %rax
178    pushq    8(%rbp)   # RCX
179    pushq    %rdx
180    pushq    %rbx
181    pushq    48(%rbp)  # RSP
182    pushq    (%rbp)    # RBP
183    pushq    %rsi
184    pushq    %rdi
185
186#; UINT64  Gs, Fs, Es, Ds, Cs, Ss;  insure high 16 bits of each is zero
187    movzwq  56(%rbp), %rax
188    pushq   %rax                      # for ss
189    movzwq  32(%rbp), %rax
190    pushq   %rax                      # for cs
191    mov     %ds, %rax
192    pushq   %rax
193    mov     %es, %rax
194    pushq   %rax
195    mov     %fs, %rax
196    pushq   %rax
197    mov     %gs, %rax
198    pushq   %rax
199
200    movq    %rcx, 8(%rbp)                # save vector number
201
202#; UINT64  Rip;
203    pushq   24(%rbp)
204
205#; UINT64  Gdtr[2], Idtr[2];
206    xorq    %rax, %rax
207    pushq   %rax
208    pushq   %rax
209    sidt    (%rsp)
210    xchgq   2(%rsp), %rax
211    xchgq   (%rsp), %rax
212    xchgq   8(%rsp), %rax
213
214    xorq    %rax, %rax
215    pushq   %rax
216    pushq   %rax
217    sgdt    (%rsp)
218    xchgq   2(%rsp), %rax
219    xchgq   (%rsp), %rax
220    xchgq   8(%rsp), %rax
221
222#; UINT64  Ldtr, Tr;
223    xorq    %rax, %rax
224    str     %ax
225    pushq   %rax
226    sldt    %ax
227    pushq   %rax
228
229#; UINT64  RFlags;
230    pushq   40(%rbp)
231
232#; UINT64  Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;
233    movq    %cr8, %rax
234    pushq   %rax
235    movq    %cr4, %rax
236    orq     $0x208, %rax
237    movq    %rax, %cr4
238    pushq   %rax
239    mov     %cr3, %rax
240    pushq   %rax
241    mov     %cr2, %rax
242    pushq   %rax
243    xorq    %rax, %rax
244    pushq   %rax
245    mov     %cr0, %rax
246    pushq   %rax
247
248#; UINT64  Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;
249    movq    %dr7, %rax
250    pushq   %rax
251    movq    %dr6, %rax
252    pushq   %rax
253    movq    %dr3, %rax
254    pushq   %rax
255    movq    %dr2, %rax
256    pushq   %rax
257    movq    %dr1, %rax
258    pushq   %rax
259    movq    %dr0, %rax
260    pushq   %rax
261
262#; FX_SAVE_STATE_X64 FxSaveState;
263    subq    $512, %rsp
264    movq    %rsp, %rdi
265    .byte 0x0f, 0x0ae, 0x07 #fxsave [rdi]
266
267#; UEFI calling convention for x64 requires that Direction flag in EFLAGs is clear
268    cld
269
270#; UINT32  ExceptionData;
271    pushq   16(%rbp)
272
273#; Prepare parameter and call
274    mov     8(%rbp), %rcx
275    mov     %rsp, %rdx
276    #
277    # Per X64 calling convention, allocate maximum parameter stack space
278    # and make sure RSP is 16-byte aligned
279    #
280    subq    $40, %rsp
281    call    ASM_PFX(CommonExceptionHandler)
282    addq    $40, %rsp
283
284    cli
285#; UINT64  ExceptionData;
286    addq    $8, %rsp
287
288#; FX_SAVE_STATE_X64 FxSaveState;
289
290    movq    %rsp, %rsi
291    .byte   0x0f, 0x0ae, 0x0E # fxrstor [rsi]
292    addq    $512, %rsp
293
294#; UINT64  Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;
295#; Skip restoration of DRx registers to support in-circuit emualators
296#; or debuggers set breakpoint in interrupt/exception context
297    addq    $48, %rsp
298
299#; UINT64  Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;
300    popq    %rax
301    movq    %rax, %cr0
302    addq    $8, %rsp   # not for Cr1
303    popq    %rax
304    movq    %rax, %cr2
305    popq    %rax
306    movq    %rax, %cr3
307    popq    %rax
308    movq    %rax, %cr4
309    popq    %rax
310    movq    %rax, %cr8
311
312#; UINT64  RFlags;
313    popq    40(%rbp)
314
315#; UINT64  Ldtr, Tr;
316#; UINT64  Gdtr[2], Idtr[2];
317#; Best not let anyone mess with these particular registers...
318    addq    $48, %rsp
319
320#; UINT64  Rip;
321    popq    24(%rbp)
322
323#; UINT64  Gs, Fs, Es, Ds, Cs, Ss;
324    popq    %rax
325    # mov   %rax, %gs ; not for gs
326    popq    %rax
327    # mov   %rax, %fs ; not for fs
328    # (X64 will not use fs and gs, so we do not restore it)
329    popq    %rax
330    mov     %rax, %es
331    popq    %rax
332    mov     %rax, %ds
333    popq    32(%rbp)  # for cs
334    popq    56(%rbp)  # for ss
335
336#; UINT64  Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;
337#; UINT64  R8, R9, R10, R11, R12, R13, R14, R15;
338    popq    %rdi
339    popq    %rsi
340    addq    $8, %rsp              # not for rbp
341    popq    48(%rbp)              # for rsp
342    popq    %rbx
343    popq    %rdx
344    popq    %rcx
345    popq    %rax
346    popq    %r8
347    popq    %r9
348    popq    %r10
349    popq    %r11
350    popq    %r12
351    popq    %r13
352    popq    %r14
353    popq    %r15
354
355    movq    %rbp, %rsp
356    popq    %rbp
357    addq    $16, %rsp
358    cmpq    $0, -32(%rsp)      # check EXCEPTION_HANDLER_CONTEXT.OldIdtHandler
359    jz      DoReturn           # check EXCEPTION_HANDLER_CONTEXT.ExceptionDataFlag
360    cmpb    $1, -40(%rsp)
361    jz      ErrorCode
362    jmp     *-32(%rsp)
363ErrorCode:
364    subq    $8, %rsp
365    jmp     *-24(%rsp)
366
367DoReturn:
368    pushq   %rax
369    movq    ASM_PFX(mDoFarReturnFlag)(%rip), %rax
370    cmpq    $0, %rax          # Check if need to do far return instead of IRET
371    popq    %rax
372    jz      DoIret
373    pushq   %rax
374    movq    %rsp, %rax        # save old RSP to rax
375    movq    0x20(%rsp), %rsp
376    pushq   0x10(%rax)        # save CS in new location
377    pushq   0x8(%rax)         # save EIP in new location
378    pushq   0x18(%rax)        # save EFLAGS in new location
379    movq    (%rax), %rax      # restore rax
380    popfq                     # restore EFLAGS
381    lretq                     # far return
382DoIret:
383    iretq
384
385
386#-------------------------------------------------------------------------------------
387#  AsmGetTemplateAddressMap (&AddressMap);
388#-------------------------------------------------------------------------------------
389# comments here for definition of address map
390ASM_GLOBAL ASM_PFX(AsmGetTemplateAddressMap)
391ASM_PFX(AsmGetTemplateAddressMap):
392    pushq     %rbp
393    movq      %rsp, %rbp
394
395    leaq         AsmIdtVectorBegin(%rip), %rax
396    movq         %rax, (%rcx)
397    .set         ENTRY_SIZE, ASM_PFX(HookAfterStubHeaderEnd) - HookAfterStubHeaderBegin
398    movq         $(ENTRY_SIZE), 0x08(%rcx)
399    leaq         HookAfterStubHeaderBegin(%rip), %rax
400    movq         %rax, 0x10(%rcx)
401
402    popq      %rbp
403    ret
404
405#-------------------------------------------------------------------------------------
406# VOID
407# EFIAPI
408# AsmVectorNumFixup (
409#   IN VOID    *NewVectorAddr,  // RCX
410#   IN UINT8   VectorNum        // RDX
411#   IN VOID    *OldVectorAddr,  // R8
412#  );
413#-------------------------------------------------------------------------------------
414ASM_GLOBAL ASM_PFX(AsmVectorNumFixup)
415ASM_PFX(AsmVectorNumFixup):
416    pushq     %rbp
417    movq      %rsp, %rbp
418
419# Patch vector #
420    movb      %dl, (PatchVectorNum - HookAfterStubHeaderBegin)(%rcx)
421
422# Patch Function address
423    subq      %rcx, %r8     # Calculate the offset value
424    movl      (PatchFuncAddress - HookAfterStubHeaderBegin)(%rcx), %eax
425    addq      %r8, %rax
426    movl      %eax, (PatchFuncAddress - HookAfterStubHeaderBegin)(%rcx)
427
428    popq      %rbp
429    ret
430
431#END
432
433
434