1 /*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "space_test.h"
18
19 #include "dlmalloc_space.h"
20 #include "rosalloc_space.h"
21 #include "scoped_thread_state_change-inl.h"
22
23 namespace art {
24 namespace gc {
25 namespace space {
26
27 enum MallocSpaceType {
28 kMallocSpaceDlMalloc,
29 kMallocSpaceRosAlloc,
30 };
31
32 class SpaceCreateTest : public SpaceTest<CommonRuntimeTestWithParam<MallocSpaceType>> {
33 public:
CreateSpace(const std::string & name,size_t initial_size,size_t growth_limit,size_t capacity)34 MallocSpace* CreateSpace(const std::string& name,
35 size_t initial_size,
36 size_t growth_limit,
37 size_t capacity) {
38 const MallocSpaceType type = GetParam();
39 if (type == kMallocSpaceDlMalloc) {
40 return DlMallocSpace::Create(name,
41 initial_size,
42 growth_limit,
43 capacity,
44 /*can_move_objects=*/ false);
45 }
46 DCHECK_EQ(static_cast<uint32_t>(type), static_cast<uint32_t>(kMallocSpaceRosAlloc));
47 return RosAllocSpace::Create(name,
48 initial_size,
49 growth_limit,
50 capacity,
51 Runtime::Current()->GetHeap()->IsLowMemoryMode(),
52 /*can_move_objects=*/ false);
53 }
54 };
55
TEST_P(SpaceCreateTest,InitTestBody)56 TEST_P(SpaceCreateTest, InitTestBody) {
57 // This will lead to error messages in the log.
58 ScopedLogSeverity sls(LogSeverity::FATAL);
59
60 {
61 // Init < max == growth
62 std::unique_ptr<Space> space(CreateSpace("test", 16 * MB, 32 * MB, 32 * MB));
63 EXPECT_TRUE(space != nullptr);
64 // Init == max == growth
65 space.reset(CreateSpace("test", 16 * MB, 16 * MB, 16 * MB));
66 EXPECT_TRUE(space != nullptr);
67 // Init > max == growth
68 space.reset(CreateSpace("test", 32 * MB, 16 * MB, 16 * MB));
69 EXPECT_TRUE(space == nullptr);
70 // Growth == init < max
71 space.reset(CreateSpace("test", 16 * MB, 16 * MB, 32 * MB));
72 EXPECT_TRUE(space != nullptr);
73 // Growth < init < max
74 space.reset(CreateSpace("test", 16 * MB, 8 * MB, 32 * MB));
75 EXPECT_TRUE(space == nullptr);
76 // Init < growth < max
77 space.reset(CreateSpace("test", 8 * MB, 16 * MB, 32 * MB));
78 EXPECT_TRUE(space != nullptr);
79 // Init < max < growth
80 space.reset(CreateSpace("test", 8 * MB, 32 * MB, 16 * MB));
81 EXPECT_TRUE(space == nullptr);
82 }
83 }
84
85 // TODO: This test is not very good, we should improve it.
86 // The test should do more allocations before the creation of the ZygoteSpace, and then do
87 // allocations after the ZygoteSpace is created. The test should also do some GCs to ensure that
88 // the GC works with the ZygoteSpace.
TEST_P(SpaceCreateTest,ZygoteSpaceTestBody)89 TEST_P(SpaceCreateTest, ZygoteSpaceTestBody) {
90 size_t unused;
91 MallocSpace* space(CreateSpace("test", 4 * MB, 16 * MB, 16 * MB));
92 ASSERT_TRUE(space != nullptr);
93
94 // Make space findable to the heap, will also delete space when runtime is cleaned up
95 AddSpace(space);
96 Thread* self = Thread::Current();
97 ScopedObjectAccess soa(self);
98
99 // Succeeds, fits without adjusting the footprint limit.
100 size_t ptr1_bytes_allocated, ptr1_usable_size, ptr1_bytes_tl_bulk_allocated;
101 StackHandleScope<3> hs(soa.Self());
102 MutableHandle<mirror::Object> ptr1(hs.NewHandle(Alloc(space,
103 self,
104 1 * MB,
105 &ptr1_bytes_allocated,
106 &ptr1_usable_size,
107 &ptr1_bytes_tl_bulk_allocated)));
108 EXPECT_TRUE(ptr1 != nullptr);
109 EXPECT_LE(1U * MB, ptr1_bytes_allocated);
110 EXPECT_LE(1U * MB, ptr1_usable_size);
111 EXPECT_LE(ptr1_usable_size, ptr1_bytes_allocated);
112 EXPECT_EQ(ptr1_bytes_tl_bulk_allocated, ptr1_bytes_allocated);
113
114 // Fails, requires a higher footprint limit.
115 mirror::Object* ptr2 = Alloc(space, self, 8 * MB, &unused, nullptr, &unused);
116 EXPECT_TRUE(ptr2 == nullptr);
117
118 // Succeeds, adjusts the footprint.
119 size_t ptr3_bytes_allocated, ptr3_usable_size, ptr3_bytes_tl_bulk_allocated;
120 MutableHandle<mirror::Object> ptr3(hs.NewHandle(AllocWithGrowth(space,
121 self,
122 8 * MB,
123 &ptr3_bytes_allocated,
124 &ptr3_usable_size,
125 &ptr3_bytes_tl_bulk_allocated)));
126 EXPECT_TRUE(ptr3 != nullptr);
127 EXPECT_LE(8U * MB, ptr3_bytes_allocated);
128 EXPECT_LE(8U * MB, ptr3_usable_size);
129 EXPECT_LE(ptr3_usable_size, ptr3_bytes_allocated);
130 EXPECT_EQ(ptr3_bytes_tl_bulk_allocated, ptr3_bytes_allocated);
131
132 // Fails, requires a higher footprint limit.
133 mirror::Object* ptr4 = space->Alloc(self, 8 * MB, &unused, nullptr, &unused);
134 EXPECT_TRUE(ptr4 == nullptr);
135
136 // Also fails, requires a higher allowed footprint.
137 mirror::Object* ptr5 = space->AllocWithGrowth(self, 8 * MB, &unused, nullptr, &unused);
138 EXPECT_TRUE(ptr5 == nullptr);
139
140 // Release some memory.
141 size_t free3 = space->AllocationSize(ptr3.Get(), nullptr);
142 EXPECT_EQ(free3, ptr3_bytes_allocated);
143 EXPECT_EQ(free3, space->Free(self, ptr3.Assign(nullptr)));
144 EXPECT_LE(8U * MB, free3);
145
146 // Succeeds, now that memory has been freed.
147 size_t ptr6_bytes_allocated, ptr6_usable_size, ptr6_bytes_tl_bulk_allocated;
148 Handle<mirror::Object> ptr6(hs.NewHandle(AllocWithGrowth(space,
149 self,
150 9 * MB,
151 &ptr6_bytes_allocated,
152 &ptr6_usable_size,
153 &ptr6_bytes_tl_bulk_allocated)));
154 EXPECT_TRUE(ptr6 != nullptr);
155 EXPECT_LE(9U * MB, ptr6_bytes_allocated);
156 EXPECT_LE(9U * MB, ptr6_usable_size);
157 EXPECT_LE(ptr6_usable_size, ptr6_bytes_allocated);
158 EXPECT_EQ(ptr6_bytes_tl_bulk_allocated, ptr6_bytes_allocated);
159
160 // Final clean up.
161 size_t free1 = space->AllocationSize(ptr1.Get(), nullptr);
162 space->Free(self, ptr1.Assign(nullptr));
163 EXPECT_LE(1U * MB, free1);
164
165 // Make sure that the zygote space isn't directly at the start of the space.
166 EXPECT_TRUE(space->Alloc(self, 1U * MB, &unused, nullptr, &unused) != nullptr);
167
168 gc::Heap* heap = Runtime::Current()->GetHeap();
169 space::Space* old_space = space;
170 {
171 ScopedThreadSuspension sts(self, kSuspended);
172 ScopedSuspendAll ssa("Add image space");
173 heap->RemoveSpace(old_space);
174 }
175 heap->RevokeAllThreadLocalBuffers();
176 space::ZygoteSpace* zygote_space = space->CreateZygoteSpace("alloc space",
177 heap->IsLowMemoryMode(),
178 &space);
179 delete old_space;
180 // Add the zygote space.
181 AddSpace(zygote_space, false);
182
183 // Make space findable to the heap, will also delete space when runtime is cleaned up
184 AddSpace(space, false);
185
186 // Succeeds, fits without adjusting the footprint limit.
187 ptr1.Assign(Alloc(space,
188 self,
189 1 * MB,
190 &ptr1_bytes_allocated,
191 &ptr1_usable_size,
192 &ptr1_bytes_tl_bulk_allocated));
193 EXPECT_TRUE(ptr1 != nullptr);
194 EXPECT_LE(1U * MB, ptr1_bytes_allocated);
195 EXPECT_LE(1U * MB, ptr1_usable_size);
196 EXPECT_LE(ptr1_usable_size, ptr1_bytes_allocated);
197 EXPECT_EQ(ptr1_bytes_tl_bulk_allocated, ptr1_bytes_allocated);
198
199 // Fails, requires a higher footprint limit.
200 ptr2 = Alloc(space, self, 8 * MB, &unused, nullptr, &unused);
201 EXPECT_TRUE(ptr2 == nullptr);
202
203 // Succeeds, adjusts the footprint.
204 ptr3.Assign(AllocWithGrowth(space,
205 self,
206 2 * MB,
207 &ptr3_bytes_allocated,
208 &ptr3_usable_size,
209 &ptr3_bytes_tl_bulk_allocated));
210 EXPECT_TRUE(ptr3 != nullptr);
211 EXPECT_LE(2U * MB, ptr3_bytes_allocated);
212 EXPECT_LE(2U * MB, ptr3_usable_size);
213 EXPECT_LE(ptr3_usable_size, ptr3_bytes_allocated);
214 EXPECT_EQ(ptr3_bytes_tl_bulk_allocated, ptr3_bytes_allocated);
215 space->Free(self, ptr3.Assign(nullptr));
216
217 // Final clean up.
218 free1 = space->AllocationSize(ptr1.Get(), nullptr);
219 space->Free(self, ptr1.Assign(nullptr));
220 EXPECT_LE(1U * MB, free1);
221 }
222
TEST_P(SpaceCreateTest,AllocAndFreeTestBody)223 TEST_P(SpaceCreateTest, AllocAndFreeTestBody) {
224 size_t unused = 0;
225 MallocSpace* space(CreateSpace("test", 4 * MB, 16 * MB, 16 * MB));
226 ASSERT_TRUE(space != nullptr);
227 Thread* self = Thread::Current();
228 ScopedObjectAccess soa(self);
229
230 // Make space findable to the heap, will also delete space when runtime is cleaned up
231 AddSpace(space);
232
233 // Succeeds, fits without adjusting the footprint limit.
234 size_t ptr1_bytes_allocated, ptr1_usable_size, ptr1_bytes_tl_bulk_allocated;
235 StackHandleScope<3> hs(soa.Self());
236 MutableHandle<mirror::Object> ptr1(hs.NewHandle(Alloc(space,
237 self,
238 1 * MB,
239 &ptr1_bytes_allocated,
240 &ptr1_usable_size,
241 &ptr1_bytes_tl_bulk_allocated)));
242 EXPECT_TRUE(ptr1 != nullptr);
243 EXPECT_LE(1U * MB, ptr1_bytes_allocated);
244 EXPECT_LE(1U * MB, ptr1_usable_size);
245 EXPECT_LE(ptr1_usable_size, ptr1_bytes_allocated);
246 EXPECT_EQ(ptr1_bytes_tl_bulk_allocated, ptr1_bytes_allocated);
247
248 // Fails, requires a higher footprint limit.
249 mirror::Object* ptr2 = Alloc(space, self, 8 * MB, &unused, nullptr, &unused);
250 EXPECT_TRUE(ptr2 == nullptr);
251
252 // Succeeds, adjusts the footprint.
253 size_t ptr3_bytes_allocated, ptr3_usable_size, ptr3_bytes_tl_bulk_allocated;
254 MutableHandle<mirror::Object> ptr3(hs.NewHandle(AllocWithGrowth(space,
255 self,
256 8 * MB,
257 &ptr3_bytes_allocated,
258 &ptr3_usable_size,
259 &ptr3_bytes_tl_bulk_allocated)));
260 EXPECT_TRUE(ptr3 != nullptr);
261 EXPECT_LE(8U * MB, ptr3_bytes_allocated);
262 EXPECT_LE(8U * MB, ptr3_usable_size);
263 EXPECT_LE(ptr3_usable_size, ptr3_bytes_allocated);
264 EXPECT_EQ(ptr3_bytes_tl_bulk_allocated, ptr3_bytes_allocated);
265
266 // Fails, requires a higher footprint limit.
267 mirror::Object* ptr4 = Alloc(space, self, 8 * MB, &unused, nullptr, &unused);
268 EXPECT_TRUE(ptr4 == nullptr);
269
270 // Also fails, requires a higher allowed footprint.
271 mirror::Object* ptr5 = AllocWithGrowth(space, self, 8 * MB, &unused, nullptr, &unused);
272 EXPECT_TRUE(ptr5 == nullptr);
273
274 // Release some memory.
275 size_t free3 = space->AllocationSize(ptr3.Get(), nullptr);
276 EXPECT_EQ(free3, ptr3_bytes_allocated);
277 space->Free(self, ptr3.Assign(nullptr));
278 EXPECT_LE(8U * MB, free3);
279
280 // Succeeds, now that memory has been freed.
281 size_t ptr6_bytes_allocated, ptr6_usable_size, ptr6_bytes_tl_bulk_allocated;
282 Handle<mirror::Object> ptr6(hs.NewHandle(AllocWithGrowth(space,
283 self,
284 9 * MB,
285 &ptr6_bytes_allocated,
286 &ptr6_usable_size,
287 &ptr6_bytes_tl_bulk_allocated)));
288 EXPECT_TRUE(ptr6 != nullptr);
289 EXPECT_LE(9U * MB, ptr6_bytes_allocated);
290 EXPECT_LE(9U * MB, ptr6_usable_size);
291 EXPECT_LE(ptr6_usable_size, ptr6_bytes_allocated);
292 EXPECT_EQ(ptr6_bytes_tl_bulk_allocated, ptr6_bytes_allocated);
293
294 // Final clean up.
295 size_t free1 = space->AllocationSize(ptr1.Get(), nullptr);
296 space->Free(self, ptr1.Assign(nullptr));
297 EXPECT_LE(1U * MB, free1);
298 }
299
TEST_P(SpaceCreateTest,AllocAndFreeListTestBody)300 TEST_P(SpaceCreateTest, AllocAndFreeListTestBody) {
301 MallocSpace* space(CreateSpace("test", 4 * MB, 16 * MB, 16 * MB));
302 ASSERT_TRUE(space != nullptr);
303
304 // Make space findable to the heap, will also delete space when runtime is cleaned up
305 AddSpace(space);
306 Thread* self = Thread::Current();
307 ScopedObjectAccess soa(self);
308
309 // Succeeds, fits without adjusting the max allowed footprint.
310 mirror::Object* lots_of_objects[1024];
311 for (size_t i = 0; i < arraysize(lots_of_objects); i++) {
312 size_t allocation_size, usable_size, bytes_tl_bulk_allocated;
313 size_t size_of_zero_length_byte_array = SizeOfZeroLengthByteArray();
314 lots_of_objects[i] = Alloc(space,
315 self,
316 size_of_zero_length_byte_array,
317 &allocation_size,
318 &usable_size,
319 &bytes_tl_bulk_allocated);
320 EXPECT_TRUE(lots_of_objects[i] != nullptr);
321 size_t computed_usable_size;
322 EXPECT_EQ(allocation_size, space->AllocationSize(lots_of_objects[i], &computed_usable_size));
323 EXPECT_EQ(usable_size, computed_usable_size);
324 EXPECT_TRUE(bytes_tl_bulk_allocated == 0 ||
325 bytes_tl_bulk_allocated >= allocation_size);
326 }
327
328 // Release memory.
329 space->FreeList(self, arraysize(lots_of_objects), lots_of_objects);
330
331 // Succeeds, fits by adjusting the max allowed footprint.
332 for (size_t i = 0; i < arraysize(lots_of_objects); i++) {
333 size_t allocation_size, usable_size, bytes_tl_bulk_allocated;
334 lots_of_objects[i] = AllocWithGrowth(space,
335 self,
336 1024,
337 &allocation_size,
338 &usable_size,
339 &bytes_tl_bulk_allocated);
340 EXPECT_TRUE(lots_of_objects[i] != nullptr);
341 size_t computed_usable_size;
342 EXPECT_EQ(allocation_size, space->AllocationSize(lots_of_objects[i], &computed_usable_size));
343 EXPECT_EQ(usable_size, computed_usable_size);
344 EXPECT_TRUE(bytes_tl_bulk_allocated == 0 ||
345 bytes_tl_bulk_allocated >= allocation_size);
346 }
347
348 // Release memory.
349 space->FreeList(self, arraysize(lots_of_objects), lots_of_objects);
350 }
351
352 INSTANTIATE_TEST_CASE_P(CreateRosAllocSpace,
353 SpaceCreateTest,
354 testing::Values(kMallocSpaceRosAlloc));
355 INSTANTIATE_TEST_CASE_P(CreateDlMallocSpace,
356 SpaceCreateTest,
357 testing::Values(kMallocSpaceDlMalloc));
358
359 } // namespace space
360 } // namespace gc
361 } // namespace art
362