1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "context_x86.h"
18
19 #include "mirror/art_method-inl.h"
20 #include "mirror/object-inl.h"
21 #include "quick/quick_method_frame_info.h"
22 #include "stack.h"
23
24 namespace art {
25 namespace x86 {
26
27 static constexpr uintptr_t gZero = 0;
28
Reset()29 void X86Context::Reset() {
30 for (size_t i = 0; i < kNumberOfCpuRegisters; i++) {
31 gprs_[i] = nullptr;
32 }
33 gprs_[ESP] = &esp_;
34 // Initialize registers with easy to spot debug values.
35 esp_ = X86Context::kBadGprBase + ESP;
36 eip_ = X86Context::kBadGprBase + kNumberOfCpuRegisters;
37 }
38
FillCalleeSaves(const StackVisitor & fr)39 void X86Context::FillCalleeSaves(const StackVisitor& fr) {
40 mirror::ArtMethod* method = fr.GetMethod();
41 const QuickMethodFrameInfo frame_info = method->GetQuickFrameInfo();
42 size_t spill_count = POPCOUNT(frame_info.CoreSpillMask());
43 DCHECK_EQ(frame_info.FpSpillMask(), 0u);
44 if (spill_count > 0) {
45 // Lowest number spill is farthest away, walk registers and fill into context.
46 int j = 2; // Offset j to skip return address spill.
47 for (int i = 0; i < kNumberOfCpuRegisters; i++) {
48 if (((frame_info.CoreSpillMask() >> i) & 1) != 0) {
49 gprs_[i] = fr.CalleeSaveAddress(spill_count - j, frame_info.FrameSizeInBytes());
50 j++;
51 }
52 }
53 }
54 }
55
SmashCallerSaves()56 void X86Context::SmashCallerSaves() {
57 // This needs to be 0 because we want a null/zero return value.
58 gprs_[EAX] = const_cast<uintptr_t*>(&gZero);
59 gprs_[EDX] = const_cast<uintptr_t*>(&gZero);
60 gprs_[ECX] = nullptr;
61 gprs_[EBX] = nullptr;
62 }
63
SetGPR(uint32_t reg,uintptr_t value)64 bool X86Context::SetGPR(uint32_t reg, uintptr_t value) {
65 CHECK_LT(reg, static_cast<uint32_t>(kNumberOfCpuRegisters));
66 CHECK_NE(gprs_[reg], &gZero);
67 if (gprs_[reg] != nullptr) {
68 *gprs_[reg] = value;
69 return true;
70 } else {
71 return false;
72 }
73 }
74
DoLongJump()75 void X86Context::DoLongJump() {
76 #if defined(__i386__)
77 // Array of GPR values, filled from the context backward for the long jump pop. We add a slot at
78 // the top for the stack pointer that doesn't get popped in a pop-all.
79 volatile uintptr_t gprs[kNumberOfCpuRegisters + 1];
80 for (size_t i = 0; i < kNumberOfCpuRegisters; ++i) {
81 gprs[kNumberOfCpuRegisters - i - 1] = gprs_[i] != nullptr ? *gprs_[i] : X86Context::kBadGprBase + i;
82 }
83 // We want to load the stack pointer one slot below so that the ret will pop eip.
84 uintptr_t esp = gprs[kNumberOfCpuRegisters - ESP - 1] - kWordSize;
85 gprs[kNumberOfCpuRegisters] = esp;
86 *(reinterpret_cast<uintptr_t*>(esp)) = eip_;
87 __asm__ __volatile__(
88 "movl %0, %%esp\n\t" // ESP points to gprs.
89 "popal\n\t" // Load all registers except ESP and EIP with values in gprs.
90 "popl %%esp\n\t" // Load stack pointer.
91 "ret\n\t" // From higher in the stack pop eip.
92 : // output.
93 : "g"(&gprs[0]) // input.
94 :); // clobber.
95 #else
96 UNIMPLEMENTED(FATAL);
97 #endif
98 }
99
100 } // namespace x86
101 } // namespace art
102