1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "entrypoints/quick/quick_alloc_entrypoints.h"
18 
19 #include "art_method-inl.h"
20 #include "base/enums.h"
21 #include "base/quasi_atomic.h"
22 #include "callee_save_frame.h"
23 #include "dex/dex_file_types.h"
24 #include "entrypoints/entrypoint_utils-inl.h"
25 #include "mirror/class-inl.h"
26 #include "mirror/object-inl.h"
27 #include "mirror/object_array-inl.h"
28 #include "mirror/string-alloc-inl.h"
29 
30 namespace art {
31 
32 static constexpr bool kUseTlabFastPath = true;
33 
34 template <bool kInitialized,
35           bool kFinalize,
36           bool kInstrumented,
37           gc::AllocatorType allocator_type>
artAllocObjectFromCode(mirror::Class * klass,Thread * self)38 static ALWAYS_INLINE inline mirror::Object* artAllocObjectFromCode(
39     mirror::Class* klass,
40     Thread* self) REQUIRES_SHARED(Locks::mutator_lock_) {
41   ScopedQuickEntrypointChecks sqec(self);
42   DCHECK(klass != nullptr);
43   if (kUseTlabFastPath && !kInstrumented && allocator_type == gc::kAllocatorTypeTLAB) {
44     if (kInitialized || klass->IsInitialized()) {
45       if (!kFinalize || !klass->IsFinalizable()) {
46         size_t byte_count = klass->GetObjectSize();
47         byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment);
48         mirror::Object* obj;
49         if (LIKELY(byte_count < self->TlabSize())) {
50           obj = self->AllocTlab(byte_count);
51           DCHECK(obj != nullptr) << "AllocTlab can't fail";
52           obj->SetClass(klass);
53           if (kUseBakerReadBarrier) {
54             obj->AssertReadBarrierState();
55           }
56           QuasiAtomic::ThreadFenceForConstructor();
57           return obj;
58         }
59       }
60     }
61   }
62   if (kInitialized) {
63     return AllocObjectFromCodeInitialized<kInstrumented>(klass, self, allocator_type).Ptr();
64   } else if (!kFinalize) {
65     return AllocObjectFromCodeResolved<kInstrumented>(klass, self, allocator_type).Ptr();
66   } else {
67     return AllocObjectFromCode<kInstrumented>(klass, self, allocator_type).Ptr();
68   }
69 }
70 
71 #define GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, suffix2, instrumented_bool, allocator_type) \
72 extern "C" mirror::Object* artAllocObjectFromCodeWithChecks##suffix##suffix2( \
73     mirror::Class* klass, Thread* self) \
74     REQUIRES_SHARED(Locks::mutator_lock_) { \
75   return artAllocObjectFromCode<false, true, instrumented_bool, allocator_type>(klass, self); \
76 } \
77 extern "C" mirror::Object* artAllocObjectFromCodeResolved##suffix##suffix2( \
78     mirror::Class* klass, Thread* self) \
79     REQUIRES_SHARED(Locks::mutator_lock_) { \
80   return artAllocObjectFromCode<false, false, instrumented_bool, allocator_type>(klass, self); \
81 } \
82 extern "C" mirror::Object* artAllocObjectFromCodeInitialized##suffix##suffix2( \
83     mirror::Class* klass, Thread* self) \
84     REQUIRES_SHARED(Locks::mutator_lock_) { \
85   return artAllocObjectFromCode<true, false, instrumented_bool, allocator_type>(klass, self); \
86 } \
87 extern "C" mirror::String* artAllocStringObject##suffix##suffix2( \
88     mirror::Class* klass, Thread* self) \
89     REQUIRES_SHARED(Locks::mutator_lock_) { \
90   /* The klass arg is so it matches the ABI of the other object alloc callbacks. */ \
91   DCHECK(klass->IsStringClass()) << klass->PrettyClass(); \
92   return mirror::String::AllocEmptyString<instrumented_bool>(self, allocator_type).Ptr(); \
93 } \
94 extern "C" mirror::Array* artAllocArrayFromCodeResolved##suffix##suffix2( \
95     mirror::Class* klass, int32_t component_count, Thread* self) \
96     REQUIRES_SHARED(Locks::mutator_lock_) { \
97   ScopedQuickEntrypointChecks sqec(self); \
98   return AllocArrayFromCodeResolved<instrumented_bool>( \
99       klass, component_count, self, allocator_type).Ptr(); \
100 } \
101 extern "C" mirror::String* artAllocStringFromBytesFromCode##suffix##suffix2( \
102     mirror::ByteArray* byte_array, int32_t high, int32_t offset, int32_t byte_count, \
103     Thread* self) \
104     REQUIRES_SHARED(Locks::mutator_lock_) { \
105   ScopedQuickEntrypointChecks sqec(self); \
106   StackHandleScope<1> hs(self); \
107   Handle<mirror::ByteArray> handle_array(hs.NewHandle(byte_array)); \
108   return mirror::String::AllocFromByteArray<instrumented_bool>( \
109       self, byte_count, handle_array, offset, high, allocator_type).Ptr(); \
110 } \
111 extern "C" mirror::String* artAllocStringFromCharsFromCode##suffix##suffix2( \
112     int32_t offset, int32_t char_count, mirror::CharArray* char_array, Thread* self) \
113     REQUIRES_SHARED(Locks::mutator_lock_) { \
114   StackHandleScope<1> hs(self); \
115   Handle<mirror::CharArray> handle_array(hs.NewHandle(char_array)); \
116   return mirror::String::AllocFromCharArray<instrumented_bool>( \
117       self, char_count, handle_array, offset, allocator_type).Ptr(); \
118 } \
119 extern "C" mirror::String* artAllocStringFromStringFromCode##suffix##suffix2( /* NOLINT */ \
120     mirror::String* string, Thread* self) \
121     REQUIRES_SHARED(Locks::mutator_lock_) { \
122   StackHandleScope<1> hs(self); \
123   Handle<mirror::String> handle_string(hs.NewHandle(string)); \
124   return mirror::String::AllocFromString<instrumented_bool>( \
125     self, handle_string->GetLength(), handle_string, 0, allocator_type).Ptr(); \
126 }
127 
128 #define GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(suffix, allocator_type) \
129     GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, Instrumented, true, allocator_type) \
130     GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, , false, allocator_type)
131 
132 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(DlMalloc, gc::kAllocatorTypeDlMalloc)
133 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(RosAlloc, gc::kAllocatorTypeRosAlloc)
134 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(BumpPointer, gc::kAllocatorTypeBumpPointer)
135 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(TLAB, gc::kAllocatorTypeTLAB)
136 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(Region, gc::kAllocatorTypeRegion)
137 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(RegionTLAB, gc::kAllocatorTypeRegionTLAB)
138 
139 #define GENERATE_ENTRYPOINTS(suffix) \
140 extern "C" void* art_quick_alloc_array_resolved##suffix(mirror::Class* klass, int32_t); \
141 extern "C" void* art_quick_alloc_array_resolved8##suffix(mirror::Class* klass, int32_t); \
142 extern "C" void* art_quick_alloc_array_resolved16##suffix(mirror::Class* klass, int32_t); \
143 extern "C" void* art_quick_alloc_array_resolved32##suffix(mirror::Class* klass, int32_t); \
144 extern "C" void* art_quick_alloc_array_resolved64##suffix(mirror::Class* klass, int32_t); \
145 extern "C" void* art_quick_alloc_object_resolved##suffix(mirror::Class* klass); \
146 extern "C" void* art_quick_alloc_object_initialized##suffix(mirror::Class* klass); \
147 extern "C" void* art_quick_alloc_object_with_checks##suffix(mirror::Class* klass); \
148 extern "C" void* art_quick_alloc_string_object##suffix(mirror::Class* klass); \
149 extern "C" void* art_quick_alloc_string_from_bytes##suffix(void*, int32_t, int32_t, int32_t); \
150 extern "C" void* art_quick_alloc_string_from_chars##suffix(int32_t, int32_t, void*); \
151 extern "C" void* art_quick_alloc_string_from_string##suffix(void*); \
152 extern "C" void* art_quick_alloc_array_resolved##suffix##_instrumented(mirror::Class* klass, int32_t); \
153 extern "C" void* art_quick_alloc_array_resolved8##suffix##_instrumented(mirror::Class* klass, int32_t); \
154 extern "C" void* art_quick_alloc_array_resolved16##suffix##_instrumented(mirror::Class* klass, int32_t); \
155 extern "C" void* art_quick_alloc_array_resolved32##suffix##_instrumented(mirror::Class* klass, int32_t); \
156 extern "C" void* art_quick_alloc_array_resolved64##suffix##_instrumented(mirror::Class* klass, int32_t); \
157 extern "C" void* art_quick_alloc_object_resolved##suffix##_instrumented(mirror::Class* klass); \
158 extern "C" void* art_quick_alloc_object_initialized##suffix##_instrumented(mirror::Class* klass); \
159 extern "C" void* art_quick_alloc_object_with_checks##suffix##_instrumented(mirror::Class* klass); \
160 extern "C" void* art_quick_alloc_string_object##suffix##_instrumented(mirror::Class* klass); \
161 extern "C" void* art_quick_alloc_string_from_bytes##suffix##_instrumented(void*, int32_t, int32_t, int32_t); \
162 extern "C" void* art_quick_alloc_string_from_chars##suffix##_instrumented(int32_t, int32_t, void*); \
163 extern "C" void* art_quick_alloc_string_from_string##suffix##_instrumented(void*); \
164 void SetQuickAllocEntryPoints##suffix(QuickEntryPoints* qpoints, bool instrumented) { \
165   if (instrumented) { \
166     qpoints->pAllocArrayResolved = art_quick_alloc_array_resolved##suffix##_instrumented; \
167     qpoints->pAllocArrayResolved8 = art_quick_alloc_array_resolved8##suffix##_instrumented; \
168     qpoints->pAllocArrayResolved16 = art_quick_alloc_array_resolved16##suffix##_instrumented; \
169     qpoints->pAllocArrayResolved32 = art_quick_alloc_array_resolved32##suffix##_instrumented; \
170     qpoints->pAllocArrayResolved64 = art_quick_alloc_array_resolved64##suffix##_instrumented; \
171     qpoints->pAllocObjectResolved = art_quick_alloc_object_resolved##suffix##_instrumented; \
172     qpoints->pAllocObjectInitialized = art_quick_alloc_object_initialized##suffix##_instrumented; \
173     qpoints->pAllocObjectWithChecks = art_quick_alloc_object_with_checks##suffix##_instrumented; \
174     qpoints->pAllocStringObject = art_quick_alloc_string_object##suffix##_instrumented; \
175     qpoints->pAllocStringFromBytes = art_quick_alloc_string_from_bytes##suffix##_instrumented; \
176     qpoints->pAllocStringFromChars = art_quick_alloc_string_from_chars##suffix##_instrumented; \
177     qpoints->pAllocStringFromString = art_quick_alloc_string_from_string##suffix##_instrumented; \
178   } else { \
179     qpoints->pAllocArrayResolved = art_quick_alloc_array_resolved##suffix; \
180     qpoints->pAllocArrayResolved8 = art_quick_alloc_array_resolved8##suffix; \
181     qpoints->pAllocArrayResolved16 = art_quick_alloc_array_resolved16##suffix; \
182     qpoints->pAllocArrayResolved32 = art_quick_alloc_array_resolved32##suffix; \
183     qpoints->pAllocArrayResolved64 = art_quick_alloc_array_resolved64##suffix; \
184     qpoints->pAllocObjectResolved = art_quick_alloc_object_resolved##suffix; \
185     qpoints->pAllocObjectInitialized = art_quick_alloc_object_initialized##suffix; \
186     qpoints->pAllocObjectWithChecks = art_quick_alloc_object_with_checks##suffix; \
187     qpoints->pAllocStringObject = art_quick_alloc_string_object##suffix; \
188     qpoints->pAllocStringFromBytes = art_quick_alloc_string_from_bytes##suffix; \
189     qpoints->pAllocStringFromChars = art_quick_alloc_string_from_chars##suffix; \
190     qpoints->pAllocStringFromString = art_quick_alloc_string_from_string##suffix; \
191   } \
192 }
193 
194 // Generate the entrypoint functions.
195 #if !defined(__APPLE__) || !defined(__LP64__)
196 GENERATE_ENTRYPOINTS(_dlmalloc)
197 GENERATE_ENTRYPOINTS(_rosalloc)
198 GENERATE_ENTRYPOINTS(_bump_pointer)
199 GENERATE_ENTRYPOINTS(_tlab)
200 GENERATE_ENTRYPOINTS(_region)
201 GENERATE_ENTRYPOINTS(_region_tlab)
202 #endif
203 
204 static bool entry_points_instrumented = false;
205 static gc::AllocatorType entry_points_allocator = gc::kAllocatorTypeDlMalloc;
206 
SetQuickAllocEntryPointsAllocator(gc::AllocatorType allocator)207 void SetQuickAllocEntryPointsAllocator(gc::AllocatorType allocator) {
208   entry_points_allocator = allocator;
209 }
210 
SetQuickAllocEntryPointsInstrumented(bool instrumented)211 void SetQuickAllocEntryPointsInstrumented(bool instrumented) {
212   entry_points_instrumented = instrumented;
213 }
214 
ResetQuickAllocEntryPoints(QuickEntryPoints * qpoints,bool is_marking)215 void ResetQuickAllocEntryPoints(QuickEntryPoints* qpoints, bool is_marking) {
216 #if !defined(__APPLE__) || !defined(__LP64__)
217   switch (entry_points_allocator) {
218     case gc::kAllocatorTypeDlMalloc: {
219       SetQuickAllocEntryPoints_dlmalloc(qpoints, entry_points_instrumented);
220       return;
221     }
222     case gc::kAllocatorTypeRosAlloc: {
223       SetQuickAllocEntryPoints_rosalloc(qpoints, entry_points_instrumented);
224       return;
225     }
226     case gc::kAllocatorTypeBumpPointer: {
227       CHECK(kMovingCollector);
228       SetQuickAllocEntryPoints_bump_pointer(qpoints, entry_points_instrumented);
229       return;
230     }
231     case gc::kAllocatorTypeTLAB: {
232       CHECK(kMovingCollector);
233       SetQuickAllocEntryPoints_tlab(qpoints, entry_points_instrumented);
234       return;
235     }
236     case gc::kAllocatorTypeRegion: {
237       CHECK(kMovingCollector);
238       SetQuickAllocEntryPoints_region(qpoints, entry_points_instrumented);
239       return;
240     }
241     case gc::kAllocatorTypeRegionTLAB: {
242       CHECK(kMovingCollector);
243       if (is_marking) {
244         SetQuickAllocEntryPoints_region_tlab(qpoints, entry_points_instrumented);
245       } else {
246         // Not marking means we need no read barriers and can just use the normal TLAB case.
247         SetQuickAllocEntryPoints_tlab(qpoints, entry_points_instrumented);
248       }
249       return;
250     }
251     default:
252       break;
253   }
254 #else
255   UNUSED(qpoints);
256   UNUSED(is_marking);
257 #endif
258   UNIMPLEMENTED(FATAL);
259   UNREACHABLE();
260 }
261 
262 }  // namespace art
263