1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "entrypoints/quick/quick_alloc_entrypoints.h"
18
19 #include "art_method-inl.h"
20 #include "callee_save_frame.h"
21 #include "entrypoints/entrypoint_utils-inl.h"
22 #include "mirror/class-inl.h"
23 #include "mirror/object_array-inl.h"
24 #include "mirror/object-inl.h"
25
26 namespace art {
27
28 static constexpr bool kUseTlabFastPath = true;
29
30 #define GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, suffix2, instrumented_bool, allocator_type) \
31 extern "C" mirror::Object* artAllocObjectFromCode ##suffix##suffix2( \
32 uint32_t type_idx, ArtMethod* method, Thread* self) \
33 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \
34 ScopedQuickEntrypointChecks sqec(self); \
35 if (kUseTlabFastPath && !instrumented_bool && allocator_type == gc::kAllocatorTypeTLAB) { \
36 mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx); \
37 if (LIKELY(klass != nullptr && klass->IsInitialized() && !klass->IsFinalizable())) { \
38 size_t byte_count = klass->GetObjectSize(); \
39 byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \
40 mirror::Object* obj; \
41 if (LIKELY(byte_count < self->TlabSize())) { \
42 obj = self->AllocTlab(byte_count); \
43 DCHECK(obj != nullptr) << "AllocTlab can't fail"; \
44 obj->SetClass(klass); \
45 if (kUseBakerOrBrooksReadBarrier) { \
46 if (kUseBrooksReadBarrier) { \
47 obj->SetReadBarrierPointer(obj); \
48 } \
49 obj->AssertReadBarrierPointer(); \
50 } \
51 QuasiAtomic::ThreadFenceForConstructor(); \
52 return obj; \
53 } \
54 } \
55 } \
56 return AllocObjectFromCode<false, instrumented_bool>(type_idx, method, self, allocator_type); \
57 } \
58 extern "C" mirror::Object* artAllocObjectFromCodeResolved##suffix##suffix2( \
59 mirror::Class* klass, ArtMethod* method, Thread* self) \
60 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \
61 UNUSED(method); \
62 ScopedQuickEntrypointChecks sqec(self); \
63 if (kUseTlabFastPath && !instrumented_bool && allocator_type == gc::kAllocatorTypeTLAB) { \
64 if (LIKELY(klass->IsInitialized())) { \
65 size_t byte_count = klass->GetObjectSize(); \
66 byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \
67 mirror::Object* obj; \
68 if (LIKELY(byte_count < self->TlabSize())) { \
69 obj = self->AllocTlab(byte_count); \
70 DCHECK(obj != nullptr) << "AllocTlab can't fail"; \
71 obj->SetClass(klass); \
72 if (kUseBakerOrBrooksReadBarrier) { \
73 if (kUseBrooksReadBarrier) { \
74 obj->SetReadBarrierPointer(obj); \
75 } \
76 obj->AssertReadBarrierPointer(); \
77 } \
78 QuasiAtomic::ThreadFenceForConstructor(); \
79 return obj; \
80 } \
81 } \
82 } \
83 return AllocObjectFromCodeResolved<instrumented_bool>(klass, self, allocator_type); \
84 } \
85 extern "C" mirror::Object* artAllocObjectFromCodeInitialized##suffix##suffix2( \
86 mirror::Class* klass, ArtMethod* method, Thread* self) \
87 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \
88 UNUSED(method); \
89 ScopedQuickEntrypointChecks sqec(self); \
90 if (kUseTlabFastPath && !instrumented_bool && allocator_type == gc::kAllocatorTypeTLAB) { \
91 size_t byte_count = klass->GetObjectSize(); \
92 byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \
93 mirror::Object* obj; \
94 if (LIKELY(byte_count < self->TlabSize())) { \
95 obj = self->AllocTlab(byte_count); \
96 DCHECK(obj != nullptr) << "AllocTlab can't fail"; \
97 obj->SetClass(klass); \
98 if (kUseBakerOrBrooksReadBarrier) { \
99 if (kUseBrooksReadBarrier) { \
100 obj->SetReadBarrierPointer(obj); \
101 } \
102 obj->AssertReadBarrierPointer(); \
103 } \
104 QuasiAtomic::ThreadFenceForConstructor(); \
105 return obj; \
106 } \
107 } \
108 return AllocObjectFromCodeInitialized<instrumented_bool>(klass, self, allocator_type); \
109 } \
110 extern "C" mirror::Object* artAllocObjectFromCodeWithAccessCheck##suffix##suffix2( \
111 uint32_t type_idx, ArtMethod* method, Thread* self) \
112 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \
113 ScopedQuickEntrypointChecks sqec(self); \
114 return AllocObjectFromCode<true, instrumented_bool>(type_idx, method, self, allocator_type); \
115 } \
116 extern "C" mirror::Array* artAllocArrayFromCode##suffix##suffix2( \
117 uint32_t type_idx, int32_t component_count, ArtMethod* method, Thread* self) \
118 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \
119 ScopedQuickEntrypointChecks sqec(self); \
120 return AllocArrayFromCode<false, instrumented_bool>(type_idx, component_count, method, self, \
121 allocator_type); \
122 } \
123 extern "C" mirror::Array* artAllocArrayFromCodeResolved##suffix##suffix2( \
124 mirror::Class* klass, int32_t component_count, ArtMethod* method, Thread* self) \
125 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \
126 ScopedQuickEntrypointChecks sqec(self); \
127 return AllocArrayFromCodeResolved<false, instrumented_bool>(klass, component_count, method, self, \
128 allocator_type); \
129 } \
130 extern "C" mirror::Array* artAllocArrayFromCodeWithAccessCheck##suffix##suffix2( \
131 uint32_t type_idx, int32_t component_count, ArtMethod* method, Thread* self) \
132 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \
133 ScopedQuickEntrypointChecks sqec(self); \
134 return AllocArrayFromCode<true, instrumented_bool>(type_idx, component_count, method, self, \
135 allocator_type); \
136 } \
137 extern "C" mirror::Array* artCheckAndAllocArrayFromCode##suffix##suffix2( \
138 uint32_t type_idx, int32_t component_count, ArtMethod* method, Thread* self) \
139 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \
140 ScopedQuickEntrypointChecks sqec(self); \
141 if (!instrumented_bool) { \
142 return CheckAndAllocArrayFromCode(type_idx, component_count, method, self, false, allocator_type); \
143 } else { \
144 return CheckAndAllocArrayFromCodeInstrumented(type_idx, component_count, method, self, false, allocator_type); \
145 } \
146 } \
147 extern "C" mirror::Array* artCheckAndAllocArrayFromCodeWithAccessCheck##suffix##suffix2( \
148 uint32_t type_idx, int32_t component_count, ArtMethod* method, Thread* self) \
149 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \
150 ScopedQuickEntrypointChecks sqec(self); \
151 if (!instrumented_bool) { \
152 return CheckAndAllocArrayFromCode(type_idx, component_count, method, self, true, allocator_type); \
153 } else { \
154 return CheckAndAllocArrayFromCodeInstrumented(type_idx, component_count, method, self, true, allocator_type); \
155 } \
156 } \
157 extern "C" mirror::String* artAllocStringFromBytesFromCode##suffix##suffix2( \
158 mirror::ByteArray* byte_array, int32_t high, int32_t offset, int32_t byte_count, \
159 Thread* self) \
160 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \
161 ScopedQuickEntrypointChecks sqec(self); \
162 StackHandleScope<1> hs(self); \
163 Handle<mirror::ByteArray> handle_array(hs.NewHandle(byte_array)); \
164 return mirror::String::AllocFromByteArray<instrumented_bool>(self, byte_count, handle_array, \
165 offset, high, allocator_type); \
166 } \
167 extern "C" mirror::String* artAllocStringFromCharsFromCode##suffix##suffix2( \
168 int32_t offset, int32_t char_count, mirror::CharArray* char_array, Thread* self) \
169 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \
170 StackHandleScope<1> hs(self); \
171 Handle<mirror::CharArray> handle_array(hs.NewHandle(char_array)); \
172 return mirror::String::AllocFromCharArray<instrumented_bool>(self, char_count, handle_array, \
173 offset, allocator_type); \
174 } \
175 extern "C" mirror::String* artAllocStringFromStringFromCode##suffix##suffix2( \
176 mirror::String* string, Thread* self) \
177 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \
178 StackHandleScope<1> hs(self); \
179 Handle<mirror::String> handle_string(hs.NewHandle(string)); \
180 return mirror::String::AllocFromString<instrumented_bool>(self, handle_string->GetLength(), \
181 handle_string, 0, allocator_type); \
182 }
183
184 #define GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(suffix, allocator_type) \
185 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, Instrumented, true, allocator_type) \
186 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, , false, allocator_type)
187
188 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(DlMalloc, gc::kAllocatorTypeDlMalloc)
189 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(RosAlloc, gc::kAllocatorTypeRosAlloc)
190 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(BumpPointer, gc::kAllocatorTypeBumpPointer)
191 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(TLAB, gc::kAllocatorTypeTLAB)
192 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(Region, gc::kAllocatorTypeRegion)
193 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(RegionTLAB, gc::kAllocatorTypeRegionTLAB)
194
195 #define GENERATE_ENTRYPOINTS(suffix) \
196 extern "C" void* art_quick_alloc_array##suffix(uint32_t, int32_t, ArtMethod* ref); \
197 extern "C" void* art_quick_alloc_array_resolved##suffix(mirror::Class* klass, int32_t, ArtMethod* ref); \
198 extern "C" void* art_quick_alloc_array_with_access_check##suffix(uint32_t, int32_t, ArtMethod* ref); \
199 extern "C" void* art_quick_alloc_object##suffix(uint32_t type_idx, ArtMethod* ref); \
200 extern "C" void* art_quick_alloc_object_resolved##suffix(mirror::Class* klass, ArtMethod* ref); \
201 extern "C" void* art_quick_alloc_object_initialized##suffix(mirror::Class* klass, ArtMethod* ref); \
202 extern "C" void* art_quick_alloc_object_with_access_check##suffix(uint32_t type_idx, ArtMethod* ref); \
203 extern "C" void* art_quick_check_and_alloc_array##suffix(uint32_t, int32_t, ArtMethod* ref); \
204 extern "C" void* art_quick_check_and_alloc_array_with_access_check##suffix(uint32_t, int32_t, ArtMethod* ref); \
205 extern "C" void* art_quick_alloc_string_from_bytes##suffix(void*, int32_t, int32_t, int32_t); \
206 extern "C" void* art_quick_alloc_string_from_chars##suffix(int32_t, int32_t, void*); \
207 extern "C" void* art_quick_alloc_string_from_string##suffix(void*); \
208 extern "C" void* art_quick_alloc_array##suffix##_instrumented(uint32_t, int32_t, ArtMethod* ref); \
209 extern "C" void* art_quick_alloc_array_resolved##suffix##_instrumented(mirror::Class* klass, int32_t, ArtMethod* ref); \
210 extern "C" void* art_quick_alloc_array_with_access_check##suffix##_instrumented(uint32_t, int32_t, ArtMethod* ref); \
211 extern "C" void* art_quick_alloc_object##suffix##_instrumented(uint32_t type_idx, ArtMethod* ref); \
212 extern "C" void* art_quick_alloc_object_resolved##suffix##_instrumented(mirror::Class* klass, ArtMethod* ref); \
213 extern "C" void* art_quick_alloc_object_initialized##suffix##_instrumented(mirror::Class* klass, ArtMethod* ref); \
214 extern "C" void* art_quick_alloc_object_with_access_check##suffix##_instrumented(uint32_t type_idx, ArtMethod* ref); \
215 extern "C" void* art_quick_check_and_alloc_array##suffix##_instrumented(uint32_t, int32_t, ArtMethod* ref); \
216 extern "C" void* art_quick_check_and_alloc_array_with_access_check##suffix##_instrumented(uint32_t, int32_t, ArtMethod* ref); \
217 extern "C" void* art_quick_alloc_string_from_bytes##suffix##_instrumented(void*, int32_t, int32_t, int32_t); \
218 extern "C" void* art_quick_alloc_string_from_chars##suffix##_instrumented(int32_t, int32_t, void*); \
219 extern "C" void* art_quick_alloc_string_from_string##suffix##_instrumented(void*); \
220 void SetQuickAllocEntryPoints##suffix(QuickEntryPoints* qpoints, bool instrumented) { \
221 if (instrumented) { \
222 qpoints->pAllocArray = art_quick_alloc_array##suffix##_instrumented; \
223 qpoints->pAllocArrayResolved = art_quick_alloc_array_resolved##suffix##_instrumented; \
224 qpoints->pAllocArrayWithAccessCheck = art_quick_alloc_array_with_access_check##suffix##_instrumented; \
225 qpoints->pAllocObject = art_quick_alloc_object##suffix##_instrumented; \
226 qpoints->pAllocObjectResolved = art_quick_alloc_object_resolved##suffix##_instrumented; \
227 qpoints->pAllocObjectInitialized = art_quick_alloc_object_initialized##suffix##_instrumented; \
228 qpoints->pAllocObjectWithAccessCheck = art_quick_alloc_object_with_access_check##suffix##_instrumented; \
229 qpoints->pCheckAndAllocArray = art_quick_check_and_alloc_array##suffix##_instrumented; \
230 qpoints->pCheckAndAllocArrayWithAccessCheck = art_quick_check_and_alloc_array_with_access_check##suffix##_instrumented; \
231 qpoints->pAllocStringFromBytes = art_quick_alloc_string_from_bytes##suffix##_instrumented; \
232 qpoints->pAllocStringFromChars = art_quick_alloc_string_from_chars##suffix##_instrumented; \
233 qpoints->pAllocStringFromString = art_quick_alloc_string_from_string##suffix##_instrumented; \
234 } else { \
235 qpoints->pAllocArray = art_quick_alloc_array##suffix; \
236 qpoints->pAllocArrayResolved = art_quick_alloc_array_resolved##suffix; \
237 qpoints->pAllocArrayWithAccessCheck = art_quick_alloc_array_with_access_check##suffix; \
238 qpoints->pAllocObject = art_quick_alloc_object##suffix; \
239 qpoints->pAllocObjectResolved = art_quick_alloc_object_resolved##suffix; \
240 qpoints->pAllocObjectInitialized = art_quick_alloc_object_initialized##suffix; \
241 qpoints->pAllocObjectWithAccessCheck = art_quick_alloc_object_with_access_check##suffix; \
242 qpoints->pCheckAndAllocArray = art_quick_check_and_alloc_array##suffix; \
243 qpoints->pCheckAndAllocArrayWithAccessCheck = art_quick_check_and_alloc_array_with_access_check##suffix; \
244 qpoints->pAllocStringFromBytes = art_quick_alloc_string_from_bytes##suffix; \
245 qpoints->pAllocStringFromChars = art_quick_alloc_string_from_chars##suffix; \
246 qpoints->pAllocStringFromString = art_quick_alloc_string_from_string##suffix; \
247 } \
248 }
249
250 // Generate the entrypoint functions.
251 #if !defined(__APPLE__) || !defined(__LP64__)
252 GENERATE_ENTRYPOINTS(_dlmalloc)
253 GENERATE_ENTRYPOINTS(_rosalloc)
254 GENERATE_ENTRYPOINTS(_bump_pointer)
255 GENERATE_ENTRYPOINTS(_tlab)
256 GENERATE_ENTRYPOINTS(_region)
257 GENERATE_ENTRYPOINTS(_region_tlab)
258 #endif
259
260 static bool entry_points_instrumented = false;
261 static gc::AllocatorType entry_points_allocator = gc::kAllocatorTypeDlMalloc;
262
SetQuickAllocEntryPointsAllocator(gc::AllocatorType allocator)263 void SetQuickAllocEntryPointsAllocator(gc::AllocatorType allocator) {
264 entry_points_allocator = allocator;
265 }
266
SetQuickAllocEntryPointsInstrumented(bool instrumented)267 void SetQuickAllocEntryPointsInstrumented(bool instrumented) {
268 entry_points_instrumented = instrumented;
269 }
270
ResetQuickAllocEntryPoints(QuickEntryPoints * qpoints)271 void ResetQuickAllocEntryPoints(QuickEntryPoints* qpoints) {
272 #if !defined(__APPLE__) || !defined(__LP64__)
273 switch (entry_points_allocator) {
274 case gc::kAllocatorTypeDlMalloc: {
275 SetQuickAllocEntryPoints_dlmalloc(qpoints, entry_points_instrumented);
276 return;
277 }
278 case gc::kAllocatorTypeRosAlloc: {
279 SetQuickAllocEntryPoints_rosalloc(qpoints, entry_points_instrumented);
280 return;
281 }
282 case gc::kAllocatorTypeBumpPointer: {
283 CHECK(kMovingCollector);
284 SetQuickAllocEntryPoints_bump_pointer(qpoints, entry_points_instrumented);
285 return;
286 }
287 case gc::kAllocatorTypeTLAB: {
288 CHECK(kMovingCollector);
289 SetQuickAllocEntryPoints_tlab(qpoints, entry_points_instrumented);
290 return;
291 }
292 case gc::kAllocatorTypeRegion: {
293 CHECK(kMovingCollector);
294 SetQuickAllocEntryPoints_region(qpoints, entry_points_instrumented);
295 return;
296 }
297 case gc::kAllocatorTypeRegionTLAB: {
298 CHECK(kMovingCollector);
299 SetQuickAllocEntryPoints_region_tlab(qpoints, entry_points_instrumented);
300 return;
301 }
302 default:
303 break;
304 }
305 #else
306 UNUSED(qpoints);
307 #endif
308 UNIMPLEMENTED(FATAL);
309 UNREACHABLE();
310 }
311
312 } // namespace art
313