1 /*
2 * Copyright (C) 2009 The Android Open Source Project
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * * Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * * Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in
12 * the documentation and/or other materials provided with the
13 * distribution.
14 *
15 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
16 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
17 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
18 * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
19 * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
21 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
22 * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
23 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
25 * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26 * SUCH DAMAGE.
27 */
28
29 // Contains a thin layer that calls whatever real native allocator
30 // has been defined. For the libc shared library, this allows the
31 // implementation of a debug malloc that can intercept all of the allocation
32 // calls and add special debugging code to attempt to catch allocation
33 // errors. All of the debugging code is implemented in a separate shared
34 // library that is only loaded when the property "libc.debug.malloc.options"
35 // is set to a non-zero value.
36
37 #include <errno.h>
38 #include <stdint.h>
39 #include <stdio.h>
40
41 #include <platform/bionic/malloc.h>
42 #include <private/ScopedPthreadMutexLocker.h>
43 #include <private/bionic_config.h>
44
45 #include "gwp_asan_wrappers.h"
46 #include "heap_tagging.h"
47 #include "heap_zero_init.h"
48 #include "malloc_common.h"
49 #include "malloc_limit.h"
50 #include "malloc_tagged_pointers.h"
51
52 // =============================================================================
53 // Global variables instantations.
54 // =============================================================================
55
56 // Malloc hooks globals.
57 void* (*volatile __malloc_hook)(size_t, const void*);
58 void* (*volatile __realloc_hook)(void*, size_t, const void*);
59 void (*volatile __free_hook)(void*, const void*);
60 void* (*volatile __memalign_hook)(size_t, size_t, const void*);
61 // =============================================================================
62
63 // =============================================================================
64 // Allocation functions
65 // =============================================================================
calloc(size_t n_elements,size_t elem_size)66 extern "C" void* calloc(size_t n_elements, size_t elem_size) {
67 auto dispatch_table = GetDispatchTable();
68 if (__predict_false(dispatch_table != nullptr)) {
69 return MaybeTagPointer(dispatch_table->calloc(n_elements, elem_size));
70 }
71 void* result = Malloc(calloc)(n_elements, elem_size);
72 if (__predict_false(result == nullptr)) {
73 warning_log("calloc(%zu, %zu) failed: returning null pointer", n_elements, elem_size);
74 }
75 return MaybeTagPointer(result);
76 }
77
free(void * mem)78 extern "C" void free(void* mem) {
79 auto dispatch_table = GetDispatchTable();
80 mem = MaybeUntagAndCheckPointer(mem);
81 if (__predict_false(dispatch_table != nullptr)) {
82 dispatch_table->free(mem);
83 } else {
84 Malloc(free)(mem);
85 }
86 }
87
mallinfo()88 extern "C" struct mallinfo mallinfo() {
89 auto dispatch_table = GetDispatchTable();
90 if (__predict_false(dispatch_table != nullptr)) {
91 return dispatch_table->mallinfo();
92 }
93 return Malloc(mallinfo)();
94 }
95
malloc_info(int options,FILE * fp)96 extern "C" int malloc_info(int options, FILE* fp) {
97 auto dispatch_table = GetDispatchTable();
98 if (__predict_false(dispatch_table != nullptr)) {
99 return dispatch_table->malloc_info(options, fp);
100 }
101 return Malloc(malloc_info)(options, fp);
102 }
103
mallopt(int param,int value)104 extern "C" int mallopt(int param, int value) {
105 // Some are handled by libc directly rather than by the allocator.
106 if (param == M_BIONIC_SET_HEAP_TAGGING_LEVEL) {
107 ScopedPthreadMutexLocker locker(&g_heap_tagging_lock);
108 return SetHeapTaggingLevel(static_cast<HeapTaggingLevel>(value));
109 }
110 if (param == M_BIONIC_ZERO_INIT) {
111 return SetHeapZeroInitialize(value);
112 }
113 // The rest we pass on...
114 auto dispatch_table = GetDispatchTable();
115 if (__predict_false(dispatch_table != nullptr)) {
116 return dispatch_table->mallopt(param, value);
117 }
118 return Malloc(mallopt)(param, value);
119 }
120
malloc(size_t bytes)121 extern "C" void* malloc(size_t bytes) {
122 auto dispatch_table = GetDispatchTable();
123 void *result;
124 if (__predict_false(dispatch_table != nullptr)) {
125 result = dispatch_table->malloc(bytes);
126 } else {
127 result = Malloc(malloc)(bytes);
128 }
129 if (__predict_false(result == nullptr)) {
130 warning_log("malloc(%zu) failed: returning null pointer", bytes);
131 return nullptr;
132 }
133 return MaybeTagPointer(result);
134 }
135
malloc_usable_size(const void * mem)136 extern "C" size_t malloc_usable_size(const void* mem) {
137 auto dispatch_table = GetDispatchTable();
138 mem = MaybeUntagAndCheckPointer(mem);
139 if (__predict_false(dispatch_table != nullptr)) {
140 return dispatch_table->malloc_usable_size(mem);
141 }
142 return Malloc(malloc_usable_size)(mem);
143 }
144
memalign(size_t alignment,size_t bytes)145 extern "C" void* memalign(size_t alignment, size_t bytes) {
146 auto dispatch_table = GetDispatchTable();
147 if (__predict_false(dispatch_table != nullptr)) {
148 return MaybeTagPointer(dispatch_table->memalign(alignment, bytes));
149 }
150 void* result = Malloc(memalign)(alignment, bytes);
151 if (__predict_false(result == nullptr)) {
152 warning_log("memalign(%zu, %zu) failed: returning null pointer", alignment, bytes);
153 }
154 return MaybeTagPointer(result);
155 }
156
posix_memalign(void ** memptr,size_t alignment,size_t size)157 extern "C" int posix_memalign(void** memptr, size_t alignment, size_t size) {
158 auto dispatch_table = GetDispatchTable();
159 int result;
160 if (__predict_false(dispatch_table != nullptr)) {
161 result = dispatch_table->posix_memalign(memptr, alignment, size);
162 } else {
163 result = Malloc(posix_memalign)(memptr, alignment, size);
164 }
165 if (result == 0) {
166 *memptr = MaybeTagPointer(*memptr);
167 }
168 return result;
169 }
170
aligned_alloc(size_t alignment,size_t size)171 extern "C" void* aligned_alloc(size_t alignment, size_t size) {
172 auto dispatch_table = GetDispatchTable();
173 if (__predict_false(dispatch_table != nullptr)) {
174 return MaybeTagPointer(dispatch_table->aligned_alloc(alignment, size));
175 }
176 void* result = Malloc(aligned_alloc)(alignment, size);
177 if (__predict_false(result == nullptr)) {
178 warning_log("aligned_alloc(%zu, %zu) failed: returning null pointer", alignment, size);
179 }
180 return MaybeTagPointer(result);
181 }
182
realloc(void * old_mem,size_t bytes)183 extern "C" __attribute__((__noinline__)) void* realloc(void* old_mem, size_t bytes) {
184 auto dispatch_table = GetDispatchTable();
185 old_mem = MaybeUntagAndCheckPointer(old_mem);
186 if (__predict_false(dispatch_table != nullptr)) {
187 return MaybeTagPointer(dispatch_table->realloc(old_mem, bytes));
188 }
189 void* result = Malloc(realloc)(old_mem, bytes);
190 if (__predict_false(result == nullptr && bytes != 0)) {
191 warning_log("realloc(%p, %zu) failed: returning null pointer", old_mem, bytes);
192 }
193 return MaybeTagPointer(result);
194 }
195
reallocarray(void * old_mem,size_t item_count,size_t item_size)196 extern "C" void* reallocarray(void* old_mem, size_t item_count, size_t item_size) {
197 size_t new_size;
198 if (__builtin_mul_overflow(item_count, item_size, &new_size)) {
199 warning_log("reallocaray(%p, %zu, %zu) failed: returning null pointer",
200 old_mem, item_count, item_size);
201 errno = ENOMEM;
202 return nullptr;
203 }
204 return realloc(old_mem, new_size);
205 }
206
207 #if defined(HAVE_DEPRECATED_MALLOC_FUNCS)
pvalloc(size_t bytes)208 extern "C" void* pvalloc(size_t bytes) {
209 auto dispatch_table = GetDispatchTable();
210 if (__predict_false(dispatch_table != nullptr)) {
211 return MaybeTagPointer(dispatch_table->pvalloc(bytes));
212 }
213 void* result = Malloc(pvalloc)(bytes);
214 if (__predict_false(result == nullptr)) {
215 warning_log("pvalloc(%zu) failed: returning null pointer", bytes);
216 }
217 return MaybeTagPointer(result);
218 }
219
valloc(size_t bytes)220 extern "C" void* valloc(size_t bytes) {
221 auto dispatch_table = GetDispatchTable();
222 if (__predict_false(dispatch_table != nullptr)) {
223 return MaybeTagPointer(dispatch_table->valloc(bytes));
224 }
225 void* result = Malloc(valloc)(bytes);
226 if (__predict_false(result == nullptr)) {
227 warning_log("valloc(%zu) failed: returning null pointer", bytes);
228 }
229 return MaybeTagPointer(result);
230 }
231 #endif
232 // =============================================================================
233
234 struct CallbackWrapperArg {
235 void (*callback)(uintptr_t base, size_t size, void* arg);
236 void* arg;
237 };
238
CallbackWrapper(uintptr_t base,size_t size,void * arg)239 void CallbackWrapper(uintptr_t base, size_t size, void* arg) {
240 CallbackWrapperArg* wrapper_arg = reinterpret_cast<CallbackWrapperArg*>(arg);
241 wrapper_arg->callback(
242 reinterpret_cast<uintptr_t>(MaybeTagPointer(reinterpret_cast<void*>(base))),
243 size, wrapper_arg->arg);
244 }
245
246 // =============================================================================
247 // Exported for use by libmemunreachable.
248 // =============================================================================
249
250 // Calls callback for every allocation in the anonymous heap mapping
251 // [base, base+size). Must be called between malloc_disable and malloc_enable.
252 // `base` in this can take either a tagged or untagged pointer, but we always
253 // provide a tagged pointer to the `base` argument of `callback` if the kernel
254 // supports tagged pointers.
malloc_iterate(uintptr_t base,size_t size,void (* callback)(uintptr_t base,size_t size,void * arg),void * arg)255 extern "C" int malloc_iterate(uintptr_t base, size_t size,
256 void (*callback)(uintptr_t base, size_t size, void* arg), void* arg) {
257 auto dispatch_table = GetDispatchTable();
258 // Wrap the malloc_iterate callback we were provided, in order to provide
259 // pointer tagging support.
260 CallbackWrapperArg wrapper_arg;
261 wrapper_arg.callback = callback;
262 wrapper_arg.arg = arg;
263 uintptr_t untagged_base =
264 reinterpret_cast<uintptr_t>(UntagPointer(reinterpret_cast<void*>(base)));
265 if (__predict_false(dispatch_table != nullptr)) {
266 return dispatch_table->malloc_iterate(
267 untagged_base, size, CallbackWrapper, &wrapper_arg);
268 }
269 return Malloc(malloc_iterate)(
270 untagged_base, size, CallbackWrapper, &wrapper_arg);
271 }
272
273 // Disable calls to malloc so malloc_iterate gets a consistent view of
274 // allocated memory.
malloc_disable()275 extern "C" void malloc_disable() {
276 auto dispatch_table = GetDispatchTable();
277 if (__predict_false(dispatch_table != nullptr)) {
278 return dispatch_table->malloc_disable();
279 }
280 return Malloc(malloc_disable)();
281 }
282
283 // Re-enable calls to malloc after a previous call to malloc_disable.
malloc_enable()284 extern "C" void malloc_enable() {
285 auto dispatch_table = GetDispatchTable();
286 if (__predict_false(dispatch_table != nullptr)) {
287 return dispatch_table->malloc_enable();
288 }
289 return Malloc(malloc_enable)();
290 }
291
292 #if defined(LIBC_STATIC)
malloc_backtrace(void *,uintptr_t *,size_t)293 extern "C" ssize_t malloc_backtrace(void*, uintptr_t*, size_t) {
294 return 0;
295 }
296 #endif
297
298 #if __has_feature(hwaddress_sanitizer)
299 // FIXME: implement these in HWASan allocator.
__sanitizer_malloc_iterate(uintptr_t base __unused,size_t size __unused,void (* callback)(uintptr_t base,size_t size,void * arg)__unused,void * arg __unused)300 extern "C" int __sanitizer_malloc_iterate(uintptr_t base __unused, size_t size __unused,
301 void (*callback)(uintptr_t base, size_t size, void* arg)
302 __unused,
303 void* arg __unused) {
304 return 0;
305 }
306
__sanitizer_malloc_disable()307 extern "C" void __sanitizer_malloc_disable() {
308 }
309
__sanitizer_malloc_enable()310 extern "C" void __sanitizer_malloc_enable() {
311 }
312
__sanitizer_malloc_info(int,FILE *)313 extern "C" int __sanitizer_malloc_info(int, FILE*) {
314 errno = ENOTSUP;
315 return -1;
316 }
317 #endif
318 // =============================================================================
319
320 // =============================================================================
321 // Platform-internal mallopt variant.
322 // =============================================================================
323 #if defined(LIBC_STATIC)
android_mallopt(int opcode,void * arg,size_t arg_size)324 extern "C" bool android_mallopt(int opcode, void* arg, size_t arg_size) {
325 if (opcode == M_SET_ALLOCATION_LIMIT_BYTES) {
326 return LimitEnable(arg, arg_size);
327 }
328 if (opcode == M_INITIALIZE_GWP_ASAN) {
329 if (arg == nullptr || arg_size != sizeof(bool)) {
330 errno = EINVAL;
331 return false;
332 }
333 __libc_globals.mutate([&](libc_globals* globals) {
334 return MaybeInitGwpAsan(globals, *reinterpret_cast<bool*>(arg));
335 });
336 }
337 errno = ENOTSUP;
338 return false;
339 }
340 #endif
341 // =============================================================================
342
343 static constexpr MallocDispatch __libc_malloc_default_dispatch __attribute__((unused)) = {
344 Malloc(calloc),
345 Malloc(free),
346 Malloc(mallinfo),
347 Malloc(malloc),
348 Malloc(malloc_usable_size),
349 Malloc(memalign),
350 Malloc(posix_memalign),
351 #if defined(HAVE_DEPRECATED_MALLOC_FUNCS)
352 Malloc(pvalloc),
353 #endif
354 Malloc(realloc),
355 #if defined(HAVE_DEPRECATED_MALLOC_FUNCS)
356 Malloc(valloc),
357 #endif
358 Malloc(malloc_iterate),
359 Malloc(malloc_disable),
360 Malloc(malloc_enable),
361 Malloc(mallopt),
362 Malloc(aligned_alloc),
363 Malloc(malloc_info),
364 };
365
NativeAllocatorDispatch()366 const MallocDispatch* NativeAllocatorDispatch() {
367 return &__libc_malloc_default_dispatch;
368 }
369