1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "assembler.h"
18 
19 #include <algorithm>
20 #include <vector>
21 
22 #include "arm/assembler_arm32.h"
23 #include "arm/assembler_thumb2.h"
24 #include "arm64/assembler_arm64.h"
25 #include "mips/assembler_mips.h"
26 #include "x86/assembler_x86.h"
27 #include "x86_64/assembler_x86_64.h"
28 #include "globals.h"
29 #include "memory_region.h"
30 
31 namespace art {
32 
NewContents(size_t capacity)33 static byte* NewContents(size_t capacity) {
34   return new byte[capacity];
35 }
36 
37 
AssemblerBuffer()38 AssemblerBuffer::AssemblerBuffer() {
39   static const size_t kInitialBufferCapacity = 4 * KB;
40   contents_ = NewContents(kInitialBufferCapacity);
41   cursor_ = contents_;
42   limit_ = ComputeLimit(contents_, kInitialBufferCapacity);
43   fixup_ = NULL;
44   slow_path_ = NULL;
45 #ifndef NDEBUG
46   has_ensured_capacity_ = false;
47   fixups_processed_ = false;
48 #endif
49 
50   // Verify internal state.
51   CHECK_EQ(Capacity(), kInitialBufferCapacity);
52   CHECK_EQ(Size(), 0U);
53 }
54 
55 
~AssemblerBuffer()56 AssemblerBuffer::~AssemblerBuffer() {
57   delete[] contents_;
58 }
59 
60 
ProcessFixups(const MemoryRegion & region)61 void AssemblerBuffer::ProcessFixups(const MemoryRegion& region) {
62   AssemblerFixup* fixup = fixup_;
63   while (fixup != NULL) {
64     fixup->Process(region, fixup->position());
65     fixup = fixup->previous();
66   }
67 }
68 
69 
FinalizeInstructions(const MemoryRegion & instructions)70 void AssemblerBuffer::FinalizeInstructions(const MemoryRegion& instructions) {
71   // Copy the instructions from the buffer.
72   MemoryRegion from(reinterpret_cast<void*>(contents()), Size());
73   instructions.CopyFrom(0, from);
74   // Process fixups in the instructions.
75   ProcessFixups(instructions);
76 #ifndef NDEBUG
77   fixups_processed_ = true;
78 #endif
79 }
80 
81 
ExtendCapacity()82 void AssemblerBuffer::ExtendCapacity() {
83   size_t old_size = Size();
84   size_t old_capacity = Capacity();
85   size_t new_capacity = std::min(old_capacity * 2, old_capacity + 1 * MB);
86 
87   // Allocate the new data area and copy contents of the old one to it.
88   byte* new_contents = NewContents(new_capacity);
89   memmove(reinterpret_cast<void*>(new_contents),
90           reinterpret_cast<void*>(contents_),
91           old_size);
92 
93   // Compute the relocation delta and switch to the new contents area.
94   ptrdiff_t delta = new_contents - contents_;
95   contents_ = new_contents;
96 
97   // Update the cursor and recompute the limit.
98   cursor_ += delta;
99   limit_ = ComputeLimit(new_contents, new_capacity);
100 
101   // Verify internal state.
102   CHECK_EQ(Capacity(), new_capacity);
103   CHECK_EQ(Size(), old_size);
104 }
105 
106 
Create(InstructionSet instruction_set)107 Assembler* Assembler::Create(InstructionSet instruction_set) {
108   switch (instruction_set) {
109     case kArm:
110       return new arm::Arm32Assembler();
111     case kThumb2:
112       return new arm::Thumb2Assembler();
113     case kArm64:
114       return new arm64::Arm64Assembler();
115     case kMips:
116       return new mips::MipsAssembler();
117     case kX86:
118       return new x86::X86Assembler();
119     case kX86_64:
120       return new x86_64::X86_64Assembler();
121     default:
122       LOG(FATAL) << "Unknown InstructionSet: " << instruction_set;
123       return NULL;
124   }
125 }
126 
StoreImmediateToThread32(ThreadOffset<4> dest,uint32_t imm,ManagedRegister scratch)127 void Assembler::StoreImmediateToThread32(ThreadOffset<4> dest, uint32_t imm,
128                                          ManagedRegister scratch) {
129   UNIMPLEMENTED(FATAL);
130 }
131 
StoreImmediateToThread64(ThreadOffset<8> dest,uint32_t imm,ManagedRegister scratch)132 void Assembler::StoreImmediateToThread64(ThreadOffset<8> dest, uint32_t imm,
133                                          ManagedRegister scratch) {
134   UNIMPLEMENTED(FATAL);
135 }
136 
StoreStackOffsetToThread32(ThreadOffset<4> thr_offs,FrameOffset fr_offs,ManagedRegister scratch)137 void Assembler::StoreStackOffsetToThread32(ThreadOffset<4> thr_offs,
138                                            FrameOffset fr_offs,
139                                            ManagedRegister scratch) {
140   UNIMPLEMENTED(FATAL);
141 }
142 
StoreStackOffsetToThread64(ThreadOffset<8> thr_offs,FrameOffset fr_offs,ManagedRegister scratch)143 void Assembler::StoreStackOffsetToThread64(ThreadOffset<8> thr_offs,
144                                            FrameOffset fr_offs,
145                                            ManagedRegister scratch) {
146   UNIMPLEMENTED(FATAL);
147 }
148 
StoreStackPointerToThread32(ThreadOffset<4> thr_offs)149 void Assembler::StoreStackPointerToThread32(ThreadOffset<4> thr_offs) {
150   UNIMPLEMENTED(FATAL);
151 }
152 
StoreStackPointerToThread64(ThreadOffset<8> thr_offs)153 void Assembler::StoreStackPointerToThread64(ThreadOffset<8> thr_offs) {
154   UNIMPLEMENTED(FATAL);
155 }
156 
LoadFromThread32(ManagedRegister dest,ThreadOffset<4> src,size_t size)157 void Assembler::LoadFromThread32(ManagedRegister dest, ThreadOffset<4> src, size_t size) {
158   UNIMPLEMENTED(FATAL);
159 }
160 
LoadFromThread64(ManagedRegister dest,ThreadOffset<8> src,size_t size)161 void Assembler::LoadFromThread64(ManagedRegister dest, ThreadOffset<8> src, size_t size) {
162   UNIMPLEMENTED(FATAL);
163 }
164 
LoadRawPtrFromThread32(ManagedRegister dest,ThreadOffset<4> offs)165 void Assembler::LoadRawPtrFromThread32(ManagedRegister dest, ThreadOffset<4> offs) {
166   UNIMPLEMENTED(FATAL);
167 }
168 
LoadRawPtrFromThread64(ManagedRegister dest,ThreadOffset<8> offs)169 void Assembler::LoadRawPtrFromThread64(ManagedRegister dest, ThreadOffset<8> offs) {
170   UNIMPLEMENTED(FATAL);
171 }
172 
CopyRawPtrFromThread32(FrameOffset fr_offs,ThreadOffset<4> thr_offs,ManagedRegister scratch)173 void Assembler::CopyRawPtrFromThread32(FrameOffset fr_offs, ThreadOffset<4> thr_offs,
174                                        ManagedRegister scratch) {
175   UNIMPLEMENTED(FATAL);
176 }
177 
CopyRawPtrFromThread64(FrameOffset fr_offs,ThreadOffset<8> thr_offs,ManagedRegister scratch)178 void Assembler::CopyRawPtrFromThread64(FrameOffset fr_offs, ThreadOffset<8> thr_offs,
179                                        ManagedRegister scratch) {
180   UNIMPLEMENTED(FATAL);
181 }
182 
CopyRawPtrToThread32(ThreadOffset<4> thr_offs,FrameOffset fr_offs,ManagedRegister scratch)183 void Assembler::CopyRawPtrToThread32(ThreadOffset<4> thr_offs, FrameOffset fr_offs,
184                                      ManagedRegister scratch) {
185   UNIMPLEMENTED(FATAL);
186 }
187 
CopyRawPtrToThread64(ThreadOffset<8> thr_offs,FrameOffset fr_offs,ManagedRegister scratch)188 void Assembler::CopyRawPtrToThread64(ThreadOffset<8> thr_offs, FrameOffset fr_offs,
189                                      ManagedRegister scratch) {
190   UNIMPLEMENTED(FATAL);
191 }
192 
CallFromThread32(ThreadOffset<4> offset,ManagedRegister scratch)193 void Assembler::CallFromThread32(ThreadOffset<4> offset, ManagedRegister scratch) {
194   UNIMPLEMENTED(FATAL);
195 }
196 
CallFromThread64(ThreadOffset<8> offset,ManagedRegister scratch)197 void Assembler::CallFromThread64(ThreadOffset<8> offset, ManagedRegister scratch) {
198   UNIMPLEMENTED(FATAL);
199 }
200 
201 }  // namespace art
202