1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_GC_SPACE_BUMP_POINTER_SPACE_INL_H_
18 #define ART_RUNTIME_GC_SPACE_BUMP_POINTER_SPACE_INL_H_
19 
20 #include "bump_pointer_space.h"
21 
22 namespace art {
23 namespace gc {
24 namespace space {
25 
Alloc(Thread *,size_t num_bytes,size_t * bytes_allocated,size_t * usable_size)26 inline mirror::Object* BumpPointerSpace::Alloc(Thread*, size_t num_bytes, size_t* bytes_allocated,
27                                                size_t* usable_size) {
28   num_bytes = RoundUp(num_bytes, kAlignment);
29   mirror::Object* ret = AllocNonvirtual(num_bytes);
30   if (LIKELY(ret != nullptr)) {
31     *bytes_allocated = num_bytes;
32     if (usable_size != nullptr) {
33       *usable_size = num_bytes;
34     }
35   }
36   return ret;
37 }
38 
AllocThreadUnsafe(Thread * self,size_t num_bytes,size_t * bytes_allocated,size_t * usable_size)39 inline mirror::Object* BumpPointerSpace::AllocThreadUnsafe(Thread* self, size_t num_bytes,
40                                                            size_t* bytes_allocated,
41                                                            size_t* usable_size) {
42   Locks::mutator_lock_->AssertExclusiveHeld(self);
43   num_bytes = RoundUp(num_bytes, kAlignment);
44   byte* end = end_.LoadRelaxed();
45   if (end + num_bytes > growth_end_) {
46     return nullptr;
47   }
48   mirror::Object* obj = reinterpret_cast<mirror::Object*>(end);
49   end_.StoreRelaxed(end + num_bytes);
50   *bytes_allocated = num_bytes;
51   // Use the CAS free versions as an optimization.
52   objects_allocated_.StoreRelaxed(objects_allocated_.LoadRelaxed() + 1);
53   bytes_allocated_.StoreRelaxed(bytes_allocated_.LoadRelaxed() + num_bytes);
54   if (UNLIKELY(usable_size != nullptr)) {
55     *usable_size = num_bytes;
56   }
57   return obj;
58 }
59 
AllocNonvirtualWithoutAccounting(size_t num_bytes)60 inline mirror::Object* BumpPointerSpace::AllocNonvirtualWithoutAccounting(size_t num_bytes) {
61   DCHECK(IsAligned<kAlignment>(num_bytes));
62   byte* old_end;
63   byte* new_end;
64   do {
65     old_end = end_.LoadRelaxed();
66     new_end = old_end + num_bytes;
67     // If there is no more room in the region, we are out of memory.
68     if (UNLIKELY(new_end > growth_end_)) {
69       return nullptr;
70     }
71   } while (!end_.CompareExchangeWeakSequentiallyConsistent(old_end, new_end));
72   return reinterpret_cast<mirror::Object*>(old_end);
73 }
74 
AllocNonvirtual(size_t num_bytes)75 inline mirror::Object* BumpPointerSpace::AllocNonvirtual(size_t num_bytes) {
76   mirror::Object* ret = AllocNonvirtualWithoutAccounting(num_bytes);
77   if (ret != nullptr) {
78     objects_allocated_.FetchAndAddSequentiallyConsistent(1);
79     bytes_allocated_.FetchAndAddSequentiallyConsistent(num_bytes);
80   }
81   return ret;
82 }
83 
AllocationSizeNonvirtual(mirror::Object * obj,size_t * usable_size)84 inline size_t BumpPointerSpace::AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size)
85     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
86   size_t num_bytes = obj->SizeOf();
87   if (usable_size != nullptr) {
88     *usable_size = RoundUp(num_bytes, kAlignment);
89   }
90   return num_bytes;
91 }
92 
93 }  // namespace space
94 }  // namespace gc
95 }  // namespace art
96 
97 #endif  // ART_RUNTIME_GC_SPACE_BUMP_POINTER_SPACE_INL_H_
98