1 /*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "linker/arm/relative_patcher_arm_base.h"
18
19 #include "compiled_method.h"
20 #include "linker/output_stream.h"
21 #include "oat.h"
22 #include "oat_quick_method_header.h"
23
24 namespace art {
25 namespace linker {
26
ReserveSpace(uint32_t offset,const CompiledMethod * compiled_method,MethodReference method_ref)27 uint32_t ArmBaseRelativePatcher::ReserveSpace(uint32_t offset,
28 const CompiledMethod* compiled_method,
29 MethodReference method_ref) {
30 return ReserveSpaceInternal(offset, compiled_method, method_ref, 0u);
31 }
32
ReserveSpaceEnd(uint32_t offset)33 uint32_t ArmBaseRelativePatcher::ReserveSpaceEnd(uint32_t offset) {
34 // NOTE: The final thunk can be reserved from InitCodeMethodVisitor::EndClass() while it
35 // may be written early by WriteCodeMethodVisitor::VisitMethod() for a deduplicated chunk
36 // of code. To avoid any alignment discrepancies for the final chunk, we always align the
37 // offset after reserving of writing any chunk.
38 uint32_t aligned_offset = CompiledMethod::AlignCode(offset, instruction_set_);
39 bool needs_thunk = ReserveSpaceProcessPatches(aligned_offset,
40 MethodReference(nullptr, 0u),
41 aligned_offset);
42 if (needs_thunk) {
43 // All remaining patches will be handled by this thunk.
44 DCHECK(!unprocessed_patches_.empty());
45 DCHECK_LE(aligned_offset - unprocessed_patches_.front().second, max_positive_displacement_);
46 unprocessed_patches_.clear();
47
48 thunk_locations_.push_back(aligned_offset);
49 offset = CompiledMethod::AlignCode(aligned_offset + thunk_code_.size(), instruction_set_);
50 }
51 return offset;
52 }
53
WriteThunks(OutputStream * out,uint32_t offset)54 uint32_t ArmBaseRelativePatcher::WriteThunks(OutputStream* out, uint32_t offset) {
55 if (current_thunk_to_write_ == thunk_locations_.size()) {
56 return offset;
57 }
58 uint32_t aligned_offset = CompiledMethod::AlignCode(offset, instruction_set_);
59 if (UNLIKELY(aligned_offset == thunk_locations_[current_thunk_to_write_])) {
60 ++current_thunk_to_write_;
61 uint32_t aligned_code_delta = aligned_offset - offset;
62 if (aligned_code_delta != 0u && !WriteCodeAlignment(out, aligned_code_delta)) {
63 return 0u;
64 }
65 if (UNLIKELY(!WriteRelCallThunk(out, ArrayRef<const uint8_t>(thunk_code_)))) {
66 return 0u;
67 }
68 uint32_t thunk_end_offset = aligned_offset + thunk_code_.size();
69 // Align after writing chunk, see the ReserveSpace() above.
70 offset = CompiledMethod::AlignCode(thunk_end_offset, instruction_set_);
71 aligned_code_delta = offset - thunk_end_offset;
72 if (aligned_code_delta != 0u && !WriteCodeAlignment(out, aligned_code_delta)) {
73 return 0u;
74 }
75 }
76 return offset;
77 }
78
ArmBaseRelativePatcher(RelativePatcherTargetProvider * provider,InstructionSet instruction_set,std::vector<uint8_t> thunk_code,uint32_t max_positive_displacement,uint32_t max_negative_displacement)79 ArmBaseRelativePatcher::ArmBaseRelativePatcher(RelativePatcherTargetProvider* provider,
80 InstructionSet instruction_set,
81 std::vector<uint8_t> thunk_code,
82 uint32_t max_positive_displacement,
83 uint32_t max_negative_displacement)
84 : provider_(provider), instruction_set_(instruction_set), thunk_code_(thunk_code),
85 max_positive_displacement_(max_positive_displacement),
86 max_negative_displacement_(max_negative_displacement),
87 thunk_locations_(), current_thunk_to_write_(0u), unprocessed_patches_() {
88 }
89
ReserveSpaceInternal(uint32_t offset,const CompiledMethod * compiled_method,MethodReference method_ref,uint32_t max_extra_space)90 uint32_t ArmBaseRelativePatcher::ReserveSpaceInternal(uint32_t offset,
91 const CompiledMethod* compiled_method,
92 MethodReference method_ref,
93 uint32_t max_extra_space) {
94 uint32_t quick_code_size = compiled_method->GetQuickCode().size();
95 uint32_t quick_code_offset = compiled_method->AlignCode(offset) + sizeof(OatQuickMethodHeader);
96 uint32_t next_aligned_offset = compiled_method->AlignCode(quick_code_offset + quick_code_size);
97 // Adjust for extra space required by the subclass.
98 next_aligned_offset = compiled_method->AlignCode(next_aligned_offset + max_extra_space);
99 // TODO: ignore unprocessed patches targeting this method if they can reach quick_code_offset.
100 // We need the MethodReference for that.
101 if (!unprocessed_patches_.empty() &&
102 next_aligned_offset - unprocessed_patches_.front().second > max_positive_displacement_) {
103 bool needs_thunk = ReserveSpaceProcessPatches(quick_code_offset,
104 method_ref,
105 next_aligned_offset);
106 if (needs_thunk) {
107 // A single thunk will cover all pending patches.
108 unprocessed_patches_.clear();
109 uint32_t thunk_location = compiled_method->AlignCode(offset);
110 thunk_locations_.push_back(thunk_location);
111 offset = CompiledMethod::AlignCode(thunk_location + thunk_code_.size(), instruction_set_);
112 }
113 }
114 for (const LinkerPatch& patch : compiled_method->GetPatches()) {
115 if (patch.GetType() == LinkerPatch::Type::kCallRelative) {
116 unprocessed_patches_.emplace_back(patch.TargetMethod(),
117 quick_code_offset + patch.LiteralOffset());
118 }
119 }
120 return offset;
121 }
122
CalculateDisplacement(uint32_t patch_offset,uint32_t target_offset)123 uint32_t ArmBaseRelativePatcher::CalculateDisplacement(uint32_t patch_offset,
124 uint32_t target_offset) {
125 // Unsigned arithmetic with its well-defined overflow behavior is just fine here.
126 uint32_t displacement = target_offset - patch_offset;
127 // NOTE: With unsigned arithmetic we do mean to use && rather than || below.
128 if (displacement > max_positive_displacement_ && displacement < -max_negative_displacement_) {
129 // Unwritten thunks have higher offsets, check if it's within range.
130 DCHECK(current_thunk_to_write_ == thunk_locations_.size() ||
131 thunk_locations_[current_thunk_to_write_] > patch_offset);
132 if (current_thunk_to_write_ != thunk_locations_.size() &&
133 thunk_locations_[current_thunk_to_write_] - patch_offset < max_positive_displacement_) {
134 displacement = thunk_locations_[current_thunk_to_write_] - patch_offset;
135 } else {
136 // We must have a previous thunk then.
137 DCHECK_NE(current_thunk_to_write_, 0u);
138 DCHECK_LT(thunk_locations_[current_thunk_to_write_ - 1], patch_offset);
139 displacement = thunk_locations_[current_thunk_to_write_ - 1] - patch_offset;
140 DCHECK(displacement >= -max_negative_displacement_);
141 }
142 }
143 return displacement;
144 }
145
ReserveSpaceProcessPatches(uint32_t quick_code_offset,MethodReference method_ref,uint32_t next_aligned_offset)146 bool ArmBaseRelativePatcher::ReserveSpaceProcessPatches(uint32_t quick_code_offset,
147 MethodReference method_ref,
148 uint32_t next_aligned_offset) {
149 // Process as many patches as possible, stop only on unresolved targets or calls too far back.
150 while (!unprocessed_patches_.empty()) {
151 MethodReference patch_ref = unprocessed_patches_.front().first;
152 uint32_t patch_offset = unprocessed_patches_.front().second;
153 DCHECK(thunk_locations_.empty() || thunk_locations_.back() <= patch_offset);
154 if (patch_ref.dex_file == method_ref.dex_file &&
155 patch_ref.dex_method_index == method_ref.dex_method_index) {
156 DCHECK_GT(quick_code_offset, patch_offset);
157 if (quick_code_offset - patch_offset > max_positive_displacement_) {
158 return true;
159 }
160 } else {
161 auto result = provider_->FindMethodOffset(patch_ref);
162 if (!result.first) {
163 // If still unresolved, check if we have a thunk within range.
164 if (thunk_locations_.empty() ||
165 patch_offset - thunk_locations_.back() > max_negative_displacement_) {
166 // No thunk in range, we need a thunk if the next aligned offset
167 // is out of range, or if we're at the end of all code.
168 return (next_aligned_offset - patch_offset > max_positive_displacement_) ||
169 (quick_code_offset == next_aligned_offset); // End of code.
170 }
171 } else {
172 uint32_t target_offset = result.second - CompiledCode::CodeDelta(instruction_set_);
173 if (target_offset >= patch_offset) {
174 DCHECK_LE(target_offset - patch_offset, max_positive_displacement_);
175 } else {
176 // When calling back, check if we have a thunk that's closer than the actual target.
177 if (!thunk_locations_.empty()) {
178 target_offset = std::max(target_offset, thunk_locations_.back());
179 }
180 if (patch_offset - target_offset > max_negative_displacement_) {
181 return true;
182 }
183 }
184 }
185 }
186 unprocessed_patches_.pop_front();
187 }
188 return false;
189 }
190
191 } // namespace linker
192 } // namespace art
193