1 /*
2  * Copyright (C) 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "space_test.h"
18 
19 #include "dlmalloc_space.h"
20 #include "rosalloc_space.h"
21 #include "scoped_thread_state_change-inl.h"
22 
23 namespace art {
24 namespace gc {
25 namespace space {
26 
27 enum MallocSpaceType {
28   kMallocSpaceDlMalloc,
29   kMallocSpaceRosAlloc,
30 };
31 
32 class SpaceCreateTest : public SpaceTest<CommonRuntimeTestWithParam<MallocSpaceType>> {
33  public:
CreateSpace(const std::string & name,size_t initial_size,size_t growth_limit,size_t capacity,uint8_t * requested_begin)34   MallocSpace* CreateSpace(const std::string& name,
35                            size_t initial_size,
36                            size_t growth_limit,
37                            size_t capacity,
38                            uint8_t* requested_begin) {
39     const MallocSpaceType type = GetParam();
40     if (type == kMallocSpaceDlMalloc) {
41       return DlMallocSpace::Create(name,
42                                    initial_size,
43                                    growth_limit,
44                                    capacity,
45                                    requested_begin,
46                                    false);
47     }
48     DCHECK_EQ(static_cast<uint32_t>(type), static_cast<uint32_t>(kMallocSpaceRosAlloc));
49     return RosAllocSpace::Create(name,
50                                  initial_size,
51                                  growth_limit,
52                                  capacity,
53                                  requested_begin,
54                                  Runtime::Current()->GetHeap()->IsLowMemoryMode(),
55                                  false);
56   }
57 };
58 
TEST_P(SpaceCreateTest,InitTestBody)59 TEST_P(SpaceCreateTest, InitTestBody) {
60   // This will lead to error messages in the log.
61   ScopedLogSeverity sls(LogSeverity::FATAL);
62 
63   {
64     // Init < max == growth
65     std::unique_ptr<Space> space(CreateSpace("test", 16 * MB, 32 * MB, 32 * MB, nullptr));
66     EXPECT_TRUE(space != nullptr);
67     // Init == max == growth
68     space.reset(CreateSpace("test", 16 * MB, 16 * MB, 16 * MB, nullptr));
69     EXPECT_TRUE(space != nullptr);
70     // Init > max == growth
71     space.reset(CreateSpace("test", 32 * MB, 16 * MB, 16 * MB, nullptr));
72     EXPECT_TRUE(space == nullptr);
73     // Growth == init < max
74     space.reset(CreateSpace("test", 16 * MB, 16 * MB, 32 * MB, nullptr));
75     EXPECT_TRUE(space != nullptr);
76     // Growth < init < max
77     space.reset(CreateSpace("test", 16 * MB, 8 * MB, 32 * MB, nullptr));
78     EXPECT_TRUE(space == nullptr);
79     // Init < growth < max
80     space.reset(CreateSpace("test", 8 * MB, 16 * MB, 32 * MB, nullptr));
81     EXPECT_TRUE(space != nullptr);
82     // Init < max < growth
83     space.reset(CreateSpace("test", 8 * MB, 32 * MB, 16 * MB, nullptr));
84     EXPECT_TRUE(space == nullptr);
85   }
86 }
87 
88 // TODO: This test is not very good, we should improve it.
89 // The test should do more allocations before the creation of the ZygoteSpace, and then do
90 // allocations after the ZygoteSpace is created. The test should also do some GCs to ensure that
91 // the GC works with the ZygoteSpace.
TEST_P(SpaceCreateTest,ZygoteSpaceTestBody)92 TEST_P(SpaceCreateTest, ZygoteSpaceTestBody) {
93   size_t dummy;
94   MallocSpace* space(CreateSpace("test", 4 * MB, 16 * MB, 16 * MB, nullptr));
95   ASSERT_TRUE(space != nullptr);
96 
97   // Make space findable to the heap, will also delete space when runtime is cleaned up
98   AddSpace(space);
99   Thread* self = Thread::Current();
100   ScopedObjectAccess soa(self);
101 
102   // Succeeds, fits without adjusting the footprint limit.
103   size_t ptr1_bytes_allocated, ptr1_usable_size, ptr1_bytes_tl_bulk_allocated;
104   StackHandleScope<3> hs(soa.Self());
105   MutableHandle<mirror::Object> ptr1(hs.NewHandle(Alloc(space,
106                                                         self,
107                                                         1 * MB,
108                                                         &ptr1_bytes_allocated,
109                                                         &ptr1_usable_size,
110                                                         &ptr1_bytes_tl_bulk_allocated)));
111   EXPECT_TRUE(ptr1 != nullptr);
112   EXPECT_LE(1U * MB, ptr1_bytes_allocated);
113   EXPECT_LE(1U * MB, ptr1_usable_size);
114   EXPECT_LE(ptr1_usable_size, ptr1_bytes_allocated);
115   EXPECT_EQ(ptr1_bytes_tl_bulk_allocated, ptr1_bytes_allocated);
116 
117   // Fails, requires a higher footprint limit.
118   mirror::Object* ptr2 = Alloc(space, self, 8 * MB, &dummy, nullptr, &dummy);
119   EXPECT_TRUE(ptr2 == nullptr);
120 
121   // Succeeds, adjusts the footprint.
122   size_t ptr3_bytes_allocated, ptr3_usable_size, ptr3_bytes_tl_bulk_allocated;
123   MutableHandle<mirror::Object> ptr3(hs.NewHandle(AllocWithGrowth(space,
124                                                                   self,
125                                                                   8 * MB,
126                                                                   &ptr3_bytes_allocated,
127                                                                   &ptr3_usable_size,
128                                                                   &ptr3_bytes_tl_bulk_allocated)));
129   EXPECT_TRUE(ptr3 != nullptr);
130   EXPECT_LE(8U * MB, ptr3_bytes_allocated);
131   EXPECT_LE(8U * MB, ptr3_usable_size);
132   EXPECT_LE(ptr3_usable_size, ptr3_bytes_allocated);
133   EXPECT_EQ(ptr3_bytes_tl_bulk_allocated, ptr3_bytes_allocated);
134 
135   // Fails, requires a higher footprint limit.
136   mirror::Object* ptr4 = space->Alloc(self, 8 * MB, &dummy, nullptr, &dummy);
137   EXPECT_TRUE(ptr4 == nullptr);
138 
139   // Also fails, requires a higher allowed footprint.
140   mirror::Object* ptr5 = space->AllocWithGrowth(self, 8 * MB, &dummy, nullptr, &dummy);
141   EXPECT_TRUE(ptr5 == nullptr);
142 
143   // Release some memory.
144   size_t free3 = space->AllocationSize(ptr3.Get(), nullptr);
145   EXPECT_EQ(free3, ptr3_bytes_allocated);
146   EXPECT_EQ(free3, space->Free(self, ptr3.Assign(nullptr)));
147   EXPECT_LE(8U * MB, free3);
148 
149   // Succeeds, now that memory has been freed.
150   size_t ptr6_bytes_allocated, ptr6_usable_size, ptr6_bytes_tl_bulk_allocated;
151   Handle<mirror::Object> ptr6(hs.NewHandle(AllocWithGrowth(space,
152                                                            self,
153                                                            9 * MB,
154                                                            &ptr6_bytes_allocated,
155                                                            &ptr6_usable_size,
156                                                            &ptr6_bytes_tl_bulk_allocated)));
157   EXPECT_TRUE(ptr6 != nullptr);
158   EXPECT_LE(9U * MB, ptr6_bytes_allocated);
159   EXPECT_LE(9U * MB, ptr6_usable_size);
160   EXPECT_LE(ptr6_usable_size, ptr6_bytes_allocated);
161   EXPECT_EQ(ptr6_bytes_tl_bulk_allocated, ptr6_bytes_allocated);
162 
163   // Final clean up.
164   size_t free1 = space->AllocationSize(ptr1.Get(), nullptr);
165   space->Free(self, ptr1.Assign(nullptr));
166   EXPECT_LE(1U * MB, free1);
167 
168   // Make sure that the zygote space isn't directly at the start of the space.
169   EXPECT_TRUE(space->Alloc(self, 1U * MB, &dummy, nullptr, &dummy) != nullptr);
170 
171   gc::Heap* heap = Runtime::Current()->GetHeap();
172   space::Space* old_space = space;
173   {
174     ScopedThreadSuspension sts(self, kSuspended);
175     ScopedSuspendAll ssa("Add image space");
176     heap->RemoveSpace(old_space);
177   }
178   heap->RevokeAllThreadLocalBuffers();
179   space::ZygoteSpace* zygote_space = space->CreateZygoteSpace("alloc space",
180                                                               heap->IsLowMemoryMode(),
181                                                               &space);
182   delete old_space;
183   // Add the zygote space.
184   AddSpace(zygote_space, false);
185 
186   // Make space findable to the heap, will also delete space when runtime is cleaned up
187   AddSpace(space, false);
188 
189   // Succeeds, fits without adjusting the footprint limit.
190   ptr1.Assign(Alloc(space,
191                     self,
192                     1 * MB,
193                     &ptr1_bytes_allocated,
194                     &ptr1_usable_size,
195                     &ptr1_bytes_tl_bulk_allocated));
196   EXPECT_TRUE(ptr1 != nullptr);
197   EXPECT_LE(1U * MB, ptr1_bytes_allocated);
198   EXPECT_LE(1U * MB, ptr1_usable_size);
199   EXPECT_LE(ptr1_usable_size, ptr1_bytes_allocated);
200   EXPECT_EQ(ptr1_bytes_tl_bulk_allocated, ptr1_bytes_allocated);
201 
202   // Fails, requires a higher footprint limit.
203   ptr2 = Alloc(space, self, 8 * MB, &dummy, nullptr, &dummy);
204   EXPECT_TRUE(ptr2 == nullptr);
205 
206   // Succeeds, adjusts the footprint.
207   ptr3.Assign(AllocWithGrowth(space,
208                               self,
209                               2 * MB,
210                               &ptr3_bytes_allocated,
211                               &ptr3_usable_size,
212                               &ptr3_bytes_tl_bulk_allocated));
213   EXPECT_TRUE(ptr3 != nullptr);
214   EXPECT_LE(2U * MB, ptr3_bytes_allocated);
215   EXPECT_LE(2U * MB, ptr3_usable_size);
216   EXPECT_LE(ptr3_usable_size, ptr3_bytes_allocated);
217   EXPECT_EQ(ptr3_bytes_tl_bulk_allocated, ptr3_bytes_allocated);
218   space->Free(self, ptr3.Assign(nullptr));
219 
220   // Final clean up.
221   free1 = space->AllocationSize(ptr1.Get(), nullptr);
222   space->Free(self, ptr1.Assign(nullptr));
223   EXPECT_LE(1U * MB, free1);
224 }
225 
TEST_P(SpaceCreateTest,AllocAndFreeTestBody)226 TEST_P(SpaceCreateTest, AllocAndFreeTestBody) {
227   size_t dummy = 0;
228   MallocSpace* space(CreateSpace("test", 4 * MB, 16 * MB, 16 * MB, nullptr));
229   ASSERT_TRUE(space != nullptr);
230   Thread* self = Thread::Current();
231   ScopedObjectAccess soa(self);
232 
233   // Make space findable to the heap, will also delete space when runtime is cleaned up
234   AddSpace(space);
235 
236   // Succeeds, fits without adjusting the footprint limit.
237   size_t ptr1_bytes_allocated, ptr1_usable_size, ptr1_bytes_tl_bulk_allocated;
238   StackHandleScope<3> hs(soa.Self());
239   MutableHandle<mirror::Object> ptr1(hs.NewHandle(Alloc(space,
240                                                         self,
241                                                         1 * MB,
242                                                         &ptr1_bytes_allocated,
243                                                         &ptr1_usable_size,
244                                                         &ptr1_bytes_tl_bulk_allocated)));
245   EXPECT_TRUE(ptr1 != nullptr);
246   EXPECT_LE(1U * MB, ptr1_bytes_allocated);
247   EXPECT_LE(1U * MB, ptr1_usable_size);
248   EXPECT_LE(ptr1_usable_size, ptr1_bytes_allocated);
249   EXPECT_EQ(ptr1_bytes_tl_bulk_allocated, ptr1_bytes_allocated);
250 
251   // Fails, requires a higher footprint limit.
252   mirror::Object* ptr2 = Alloc(space, self, 8 * MB, &dummy, nullptr, &dummy);
253   EXPECT_TRUE(ptr2 == nullptr);
254 
255   // Succeeds, adjusts the footprint.
256   size_t ptr3_bytes_allocated, ptr3_usable_size, ptr3_bytes_tl_bulk_allocated;
257   MutableHandle<mirror::Object> ptr3(hs.NewHandle(AllocWithGrowth(space,
258                                                                   self,
259                                                                   8 * MB,
260                                                                   &ptr3_bytes_allocated,
261                                                                   &ptr3_usable_size,
262                                                                   &ptr3_bytes_tl_bulk_allocated)));
263   EXPECT_TRUE(ptr3 != nullptr);
264   EXPECT_LE(8U * MB, ptr3_bytes_allocated);
265   EXPECT_LE(8U * MB, ptr3_usable_size);
266   EXPECT_LE(ptr3_usable_size, ptr3_bytes_allocated);
267   EXPECT_EQ(ptr3_bytes_tl_bulk_allocated, ptr3_bytes_allocated);
268 
269   // Fails, requires a higher footprint limit.
270   mirror::Object* ptr4 = Alloc(space, self, 8 * MB, &dummy, nullptr, &dummy);
271   EXPECT_TRUE(ptr4 == nullptr);
272 
273   // Also fails, requires a higher allowed footprint.
274   mirror::Object* ptr5 = AllocWithGrowth(space, self, 8 * MB, &dummy, nullptr, &dummy);
275   EXPECT_TRUE(ptr5 == nullptr);
276 
277   // Release some memory.
278   size_t free3 = space->AllocationSize(ptr3.Get(), nullptr);
279   EXPECT_EQ(free3, ptr3_bytes_allocated);
280   space->Free(self, ptr3.Assign(nullptr));
281   EXPECT_LE(8U * MB, free3);
282 
283   // Succeeds, now that memory has been freed.
284   size_t ptr6_bytes_allocated, ptr6_usable_size, ptr6_bytes_tl_bulk_allocated;
285   Handle<mirror::Object> ptr6(hs.NewHandle(AllocWithGrowth(space,
286                                                            self,
287                                                            9 * MB,
288                                                            &ptr6_bytes_allocated,
289                                                            &ptr6_usable_size,
290                                                            &ptr6_bytes_tl_bulk_allocated)));
291   EXPECT_TRUE(ptr6 != nullptr);
292   EXPECT_LE(9U * MB, ptr6_bytes_allocated);
293   EXPECT_LE(9U * MB, ptr6_usable_size);
294   EXPECT_LE(ptr6_usable_size, ptr6_bytes_allocated);
295   EXPECT_EQ(ptr6_bytes_tl_bulk_allocated, ptr6_bytes_allocated);
296 
297   // Final clean up.
298   size_t free1 = space->AllocationSize(ptr1.Get(), nullptr);
299   space->Free(self, ptr1.Assign(nullptr));
300   EXPECT_LE(1U * MB, free1);
301 }
302 
TEST_P(SpaceCreateTest,AllocAndFreeListTestBody)303 TEST_P(SpaceCreateTest, AllocAndFreeListTestBody) {
304   MallocSpace* space(CreateSpace("test", 4 * MB, 16 * MB, 16 * MB, nullptr));
305   ASSERT_TRUE(space != nullptr);
306 
307   // Make space findable to the heap, will also delete space when runtime is cleaned up
308   AddSpace(space);
309   Thread* self = Thread::Current();
310   ScopedObjectAccess soa(self);
311 
312   // Succeeds, fits without adjusting the max allowed footprint.
313   mirror::Object* lots_of_objects[1024];
314   for (size_t i = 0; i < arraysize(lots_of_objects); i++) {
315     size_t allocation_size, usable_size, bytes_tl_bulk_allocated;
316     size_t size_of_zero_length_byte_array = SizeOfZeroLengthByteArray();
317     lots_of_objects[i] = Alloc(space,
318                                self,
319                                size_of_zero_length_byte_array,
320                                &allocation_size,
321                                &usable_size,
322                                &bytes_tl_bulk_allocated);
323     EXPECT_TRUE(lots_of_objects[i] != nullptr);
324     size_t computed_usable_size;
325     EXPECT_EQ(allocation_size, space->AllocationSize(lots_of_objects[i], &computed_usable_size));
326     EXPECT_EQ(usable_size, computed_usable_size);
327     EXPECT_TRUE(bytes_tl_bulk_allocated == 0 ||
328                 bytes_tl_bulk_allocated >= allocation_size);
329   }
330 
331   // Release memory.
332   space->FreeList(self, arraysize(lots_of_objects), lots_of_objects);
333 
334   // Succeeds, fits by adjusting the max allowed footprint.
335   for (size_t i = 0; i < arraysize(lots_of_objects); i++) {
336     size_t allocation_size, usable_size, bytes_tl_bulk_allocated;
337     lots_of_objects[i] = AllocWithGrowth(space,
338                                          self,
339                                          1024,
340                                          &allocation_size,
341                                          &usable_size,
342                                          &bytes_tl_bulk_allocated);
343     EXPECT_TRUE(lots_of_objects[i] != nullptr);
344     size_t computed_usable_size;
345     EXPECT_EQ(allocation_size, space->AllocationSize(lots_of_objects[i], &computed_usable_size));
346     EXPECT_EQ(usable_size, computed_usable_size);
347     EXPECT_TRUE(bytes_tl_bulk_allocated == 0 ||
348                 bytes_tl_bulk_allocated >= allocation_size);
349   }
350 
351   // Release memory.
352   space->FreeList(self, arraysize(lots_of_objects), lots_of_objects);
353 }
354 
355 INSTANTIATE_TEST_CASE_P(CreateRosAllocSpace,
356                         SpaceCreateTest,
357                         testing::Values(kMallocSpaceRosAlloc));
358 INSTANTIATE_TEST_CASE_P(CreateDlMallocSpace,
359                         SpaceCreateTest,
360                         testing::Values(kMallocSpaceDlMalloc));
361 
362 }  // namespace space
363 }  // namespace gc
364 }  // namespace art
365