1 /*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_RUNTIME_GC_SPACE_BUMP_POINTER_SPACE_INL_H_
18 #define ART_RUNTIME_GC_SPACE_BUMP_POINTER_SPACE_INL_H_
19
20 #include "base/bit_utils.h"
21 #include "bump_pointer_space.h"
22
23 namespace art {
24 namespace gc {
25 namespace space {
26
Alloc(Thread *,size_t num_bytes,size_t * bytes_allocated,size_t * usable_size,size_t * bytes_tl_bulk_allocated)27 inline mirror::Object* BumpPointerSpace::Alloc(Thread*, size_t num_bytes, size_t* bytes_allocated,
28 size_t* usable_size,
29 size_t* bytes_tl_bulk_allocated) {
30 num_bytes = RoundUp(num_bytes, kAlignment);
31 mirror::Object* ret = AllocNonvirtual(num_bytes);
32 if (LIKELY(ret != nullptr)) {
33 *bytes_allocated = num_bytes;
34 if (usable_size != nullptr) {
35 *usable_size = num_bytes;
36 }
37 *bytes_tl_bulk_allocated = num_bytes;
38 }
39 return ret;
40 }
41
AllocThreadUnsafe(Thread * self,size_t num_bytes,size_t * bytes_allocated,size_t * usable_size,size_t * bytes_tl_bulk_allocated)42 inline mirror::Object* BumpPointerSpace::AllocThreadUnsafe(Thread* self, size_t num_bytes,
43 size_t* bytes_allocated,
44 size_t* usable_size,
45 size_t* bytes_tl_bulk_allocated) {
46 Locks::mutator_lock_->AssertExclusiveHeld(self);
47 num_bytes = RoundUp(num_bytes, kAlignment);
48 uint8_t* end = end_.LoadRelaxed();
49 if (end + num_bytes > growth_end_) {
50 return nullptr;
51 }
52 mirror::Object* obj = reinterpret_cast<mirror::Object*>(end);
53 end_.StoreRelaxed(end + num_bytes);
54 *bytes_allocated = num_bytes;
55 // Use the CAS free versions as an optimization.
56 objects_allocated_.StoreRelaxed(objects_allocated_.LoadRelaxed() + 1);
57 bytes_allocated_.StoreRelaxed(bytes_allocated_.LoadRelaxed() + num_bytes);
58 if (UNLIKELY(usable_size != nullptr)) {
59 *usable_size = num_bytes;
60 }
61 *bytes_tl_bulk_allocated = num_bytes;
62 return obj;
63 }
64
AllocNonvirtualWithoutAccounting(size_t num_bytes)65 inline mirror::Object* BumpPointerSpace::AllocNonvirtualWithoutAccounting(size_t num_bytes) {
66 DCHECK_ALIGNED(num_bytes, kAlignment);
67 uint8_t* old_end;
68 uint8_t* new_end;
69 do {
70 old_end = end_.LoadRelaxed();
71 new_end = old_end + num_bytes;
72 // If there is no more room in the region, we are out of memory.
73 if (UNLIKELY(new_end > growth_end_)) {
74 return nullptr;
75 }
76 } while (!end_.CompareExchangeWeakSequentiallyConsistent(old_end, new_end));
77 return reinterpret_cast<mirror::Object*>(old_end);
78 }
79
AllocNonvirtual(size_t num_bytes)80 inline mirror::Object* BumpPointerSpace::AllocNonvirtual(size_t num_bytes) {
81 mirror::Object* ret = AllocNonvirtualWithoutAccounting(num_bytes);
82 if (ret != nullptr) {
83 objects_allocated_.FetchAndAddSequentiallyConsistent(1);
84 bytes_allocated_.FetchAndAddSequentiallyConsistent(num_bytes);
85 }
86 return ret;
87 }
88
AllocationSizeNonvirtual(mirror::Object * obj,size_t * usable_size)89 inline size_t BumpPointerSpace::AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size)
90 SHARED_REQUIRES(Locks::mutator_lock_) {
91 size_t num_bytes = obj->SizeOf();
92 if (usable_size != nullptr) {
93 *usable_size = RoundUp(num_bytes, kAlignment);
94 }
95 return num_bytes;
96 }
97
98 } // namespace space
99 } // namespace gc
100 } // namespace art
101
102 #endif // ART_RUNTIME_GC_SPACE_BUMP_POINTER_SPACE_INL_H_
103