1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_RUNTIME_GC_SPACE_VALGRIND_MALLOC_SPACE_INL_H_
18 #define ART_RUNTIME_GC_SPACE_VALGRIND_MALLOC_SPACE_INL_H_
19
20 #include "valgrind_malloc_space.h"
21
22 #include <memcheck/memcheck.h>
23
24 #include "valgrind_settings.h"
25
26 namespace art {
27 namespace gc {
28 namespace space {
29
30 namespace valgrind_details {
31
32 template <size_t kValgrindRedZoneBytes, bool kUseObjSizeForUsable>
AdjustForValgrind(void * obj_with_rdz,size_t num_bytes,size_t bytes_allocated,size_t usable_size,size_t bytes_tl_bulk_allocated,size_t * bytes_allocated_out,size_t * usable_size_out,size_t * bytes_tl_bulk_allocated_out)33 inline mirror::Object* AdjustForValgrind(void* obj_with_rdz, size_t num_bytes,
34 size_t bytes_allocated, size_t usable_size,
35 size_t bytes_tl_bulk_allocated,
36 size_t* bytes_allocated_out, size_t* usable_size_out,
37 size_t* bytes_tl_bulk_allocated_out) {
38 if (bytes_allocated_out != nullptr) {
39 *bytes_allocated_out = bytes_allocated;
40 }
41 if (bytes_tl_bulk_allocated_out != nullptr) {
42 *bytes_tl_bulk_allocated_out = bytes_tl_bulk_allocated;
43 }
44
45 // This cuts over-provision and is a trade-off between testing the over-provisioning code paths
46 // vs checking overflows in the regular paths.
47 if (usable_size_out != nullptr) {
48 if (kUseObjSizeForUsable) {
49 *usable_size_out = num_bytes;
50 } else {
51 *usable_size_out = usable_size - 2 * kValgrindRedZoneBytes;
52 }
53 }
54
55 // Left redzone.
56 VALGRIND_MAKE_MEM_NOACCESS(obj_with_rdz, kValgrindRedZoneBytes);
57
58 // Make requested memory readable.
59 // (If the allocator assumes memory is zeroed out, we might get UNDEFINED warnings, so make
60 // everything DEFINED initially.)
61 mirror::Object* result = reinterpret_cast<mirror::Object*>(
62 reinterpret_cast<uint8_t*>(obj_with_rdz) + kValgrindRedZoneBytes);
63 VALGRIND_MAKE_MEM_DEFINED(result, num_bytes);
64
65 // Right redzone. Assumes that if bytes_allocated > usable_size, then the difference is
66 // management data at the upper end, and for simplicity we will not protect that.
67 // At the moment, this fits RosAlloc (no management data in a slot, usable_size == alloc_size)
68 // and DlMalloc (allocation_size = (usable_size == num_bytes) + 4, 4 is management)
69 VALGRIND_MAKE_MEM_NOACCESS(reinterpret_cast<uint8_t*>(result) + num_bytes,
70 usable_size - (num_bytes + kValgrindRedZoneBytes));
71
72 return result;
73 }
74
GetObjSizeNoThreadSafety(mirror::Object * obj)75 inline size_t GetObjSizeNoThreadSafety(mirror::Object* obj) NO_THREAD_SAFETY_ANALYSIS {
76 return obj->SizeOf<kVerifyNone>();
77 }
78
79 } // namespace valgrind_details
80
81 template <typename S,
82 size_t kValgrindRedZoneBytes,
83 bool kAdjustForRedzoneInAllocSize,
84 bool kUseObjSizeForUsable>
85 mirror::Object*
86 ValgrindMallocSpace<S,
87 kValgrindRedZoneBytes,
88 kAdjustForRedzoneInAllocSize,
AllocWithGrowth(Thread * self,size_t num_bytes,size_t * bytes_allocated_out,size_t * usable_size_out,size_t * bytes_tl_bulk_allocated_out)89 kUseObjSizeForUsable>::AllocWithGrowth(
90 Thread* self, size_t num_bytes, size_t* bytes_allocated_out, size_t* usable_size_out,
91 size_t* bytes_tl_bulk_allocated_out) {
92 size_t bytes_allocated;
93 size_t usable_size;
94 size_t bytes_tl_bulk_allocated;
95 void* obj_with_rdz = S::AllocWithGrowth(self, num_bytes + 2 * kValgrindRedZoneBytes,
96 &bytes_allocated, &usable_size,
97 &bytes_tl_bulk_allocated);
98 if (obj_with_rdz == nullptr) {
99 return nullptr;
100 }
101
102 return valgrind_details::AdjustForValgrind<kValgrindRedZoneBytes, kUseObjSizeForUsable>(
103 obj_with_rdz, num_bytes,
104 bytes_allocated, usable_size,
105 bytes_tl_bulk_allocated,
106 bytes_allocated_out,
107 usable_size_out,
108 bytes_tl_bulk_allocated_out);
109 }
110
111 template <typename S,
112 size_t kValgrindRedZoneBytes,
113 bool kAdjustForRedzoneInAllocSize,
114 bool kUseObjSizeForUsable>
115 mirror::Object* ValgrindMallocSpace<S,
116 kValgrindRedZoneBytes,
117 kAdjustForRedzoneInAllocSize,
Alloc(Thread * self,size_t num_bytes,size_t * bytes_allocated_out,size_t * usable_size_out,size_t * bytes_tl_bulk_allocated_out)118 kUseObjSizeForUsable>::Alloc(
119 Thread* self, size_t num_bytes, size_t* bytes_allocated_out, size_t* usable_size_out,
120 size_t* bytes_tl_bulk_allocated_out) {
121 size_t bytes_allocated;
122 size_t usable_size;
123 size_t bytes_tl_bulk_allocated;
124 void* obj_with_rdz = S::Alloc(self, num_bytes + 2 * kValgrindRedZoneBytes,
125 &bytes_allocated, &usable_size, &bytes_tl_bulk_allocated);
126 if (obj_with_rdz == nullptr) {
127 return nullptr;
128 }
129
130 return valgrind_details::AdjustForValgrind<kValgrindRedZoneBytes,
131 kUseObjSizeForUsable>(obj_with_rdz, num_bytes,
132 bytes_allocated, usable_size,
133 bytes_tl_bulk_allocated,
134 bytes_allocated_out,
135 usable_size_out,
136 bytes_tl_bulk_allocated_out);
137 }
138
139 template <typename S,
140 size_t kValgrindRedZoneBytes,
141 bool kAdjustForRedzoneInAllocSize,
142 bool kUseObjSizeForUsable>
143 mirror::Object* ValgrindMallocSpace<S,
144 kValgrindRedZoneBytes,
145 kAdjustForRedzoneInAllocSize,
AllocThreadUnsafe(Thread * self,size_t num_bytes,size_t * bytes_allocated_out,size_t * usable_size_out,size_t * bytes_tl_bulk_allocated_out)146 kUseObjSizeForUsable>::AllocThreadUnsafe(
147 Thread* self, size_t num_bytes, size_t* bytes_allocated_out, size_t* usable_size_out,
148 size_t* bytes_tl_bulk_allocated_out) {
149 size_t bytes_allocated;
150 size_t usable_size;
151 size_t bytes_tl_bulk_allocated;
152 void* obj_with_rdz = S::AllocThreadUnsafe(self, num_bytes + 2 * kValgrindRedZoneBytes,
153 &bytes_allocated, &usable_size,
154 &bytes_tl_bulk_allocated);
155 if (obj_with_rdz == nullptr) {
156 return nullptr;
157 }
158
159 return valgrind_details::AdjustForValgrind<kValgrindRedZoneBytes, kUseObjSizeForUsable>(
160 obj_with_rdz, num_bytes,
161 bytes_allocated, usable_size,
162 bytes_tl_bulk_allocated,
163 bytes_allocated_out,
164 usable_size_out,
165 bytes_tl_bulk_allocated_out);
166 }
167
168 template <typename S,
169 size_t kValgrindRedZoneBytes,
170 bool kAdjustForRedzoneInAllocSize,
171 bool kUseObjSizeForUsable>
172 size_t ValgrindMallocSpace<S,
173 kValgrindRedZoneBytes,
174 kAdjustForRedzoneInAllocSize,
AllocationSize(mirror::Object * obj,size_t * usable_size)175 kUseObjSizeForUsable>::AllocationSize(
176 mirror::Object* obj, size_t* usable_size) {
177 size_t result = S::AllocationSize(reinterpret_cast<mirror::Object*>(
178 reinterpret_cast<uint8_t*>(obj) - (kAdjustForRedzoneInAllocSize ? kValgrindRedZoneBytes : 0)),
179 usable_size);
180 if (usable_size != nullptr) {
181 if (kUseObjSizeForUsable) {
182 *usable_size = valgrind_details::GetObjSizeNoThreadSafety(obj);
183 } else {
184 *usable_size = *usable_size - 2 * kValgrindRedZoneBytes;
185 }
186 }
187 return result;
188 }
189
190 template <typename S,
191 size_t kValgrindRedZoneBytes,
192 bool kAdjustForRedzoneInAllocSize,
193 bool kUseObjSizeForUsable>
194 size_t ValgrindMallocSpace<S,
195 kValgrindRedZoneBytes,
196 kAdjustForRedzoneInAllocSize,
Free(Thread * self,mirror::Object * ptr)197 kUseObjSizeForUsable>::Free(
198 Thread* self, mirror::Object* ptr) {
199 void* obj_after_rdz = reinterpret_cast<void*>(ptr);
200 uint8_t* obj_with_rdz = reinterpret_cast<uint8_t*>(obj_after_rdz) - kValgrindRedZoneBytes;
201 // Make redzones undefined.
202 size_t usable_size;
203 size_t allocation_size = AllocationSize(ptr, &usable_size);
204
205 // Unprotect the allocation.
206 // Use the obj-size-for-usable flag to determine whether usable_size is the more important one,
207 // e.g., whether there's data in the allocation_size (and usable_size can't be trusted).
208 if (kUseObjSizeForUsable) {
209 VALGRIND_MAKE_MEM_UNDEFINED(obj_with_rdz, allocation_size);
210 } else {
211 VALGRIND_MAKE_MEM_UNDEFINED(obj_with_rdz, usable_size + 2 * kValgrindRedZoneBytes);
212 }
213
214 return S::Free(self, reinterpret_cast<mirror::Object*>(obj_with_rdz));
215 }
216
217 template <typename S,
218 size_t kValgrindRedZoneBytes,
219 bool kAdjustForRedzoneInAllocSize,
220 bool kUseObjSizeForUsable>
221 size_t ValgrindMallocSpace<S,
222 kValgrindRedZoneBytes,
223 kAdjustForRedzoneInAllocSize,
FreeList(Thread * self,size_t num_ptrs,mirror::Object ** ptrs)224 kUseObjSizeForUsable>::FreeList(
225 Thread* self, size_t num_ptrs, mirror::Object** ptrs) {
226 size_t freed = 0;
227 for (size_t i = 0; i < num_ptrs; i++) {
228 freed += Free(self, ptrs[i]);
229 ptrs[i] = nullptr;
230 }
231 return freed;
232 }
233
234 template <typename S,
235 size_t kValgrindRedZoneBytes,
236 bool kAdjustForRedzoneInAllocSize,
237 bool kUseObjSizeForUsable>
238 template <typename... Params>
239 ValgrindMallocSpace<S,
240 kValgrindRedZoneBytes,
241 kAdjustForRedzoneInAllocSize,
ValgrindMallocSpace(MemMap * mem_map,size_t initial_size,Params...params)242 kUseObjSizeForUsable>::ValgrindMallocSpace(
243 MemMap* mem_map, size_t initial_size, Params... params) : S(mem_map, initial_size, params...) {
244 VALGRIND_MAKE_MEM_UNDEFINED(mem_map->Begin() + initial_size,
245 mem_map->Size() - initial_size);
246 }
247
248 template <typename S,
249 size_t kValgrindRedZoneBytes,
250 bool kAdjustForRedzoneInAllocSize,
251 bool kUseObjSizeForUsable>
252 size_t ValgrindMallocSpace<S,
253 kValgrindRedZoneBytes,
254 kAdjustForRedzoneInAllocSize,
MaxBytesBulkAllocatedFor(size_t num_bytes)255 kUseObjSizeForUsable>::MaxBytesBulkAllocatedFor(size_t num_bytes) {
256 return S::MaxBytesBulkAllocatedFor(num_bytes + 2 * kValgrindRedZoneBytes);
257 }
258
259 } // namespace space
260 } // namespace gc
261 } // namespace art
262
263 #endif // ART_RUNTIME_GC_SPACE_VALGRIND_MALLOC_SPACE_INL_H_
264