1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_GC_ALLOCATOR_ROSALLOC_INL_H_
18 #define ART_RUNTIME_GC_ALLOCATOR_ROSALLOC_INL_H_
19 
20 #include "rosalloc.h"
21 
22 namespace art {
23 namespace gc {
24 namespace allocator {
25 
ShouldCheckZeroMemory()26 inline ALWAYS_INLINE bool RosAlloc::ShouldCheckZeroMemory() {
27   return kCheckZeroMemory && !running_on_valgrind_;
28 }
29 
30 template<bool kThreadSafe>
Alloc(Thread * self,size_t size,size_t * bytes_allocated,size_t * usable_size,size_t * bytes_tl_bulk_allocated)31 inline ALWAYS_INLINE void* RosAlloc::Alloc(Thread* self, size_t size, size_t* bytes_allocated,
32                                            size_t* usable_size,
33                                            size_t* bytes_tl_bulk_allocated) {
34   if (UNLIKELY(size > kLargeSizeThreshold)) {
35     return AllocLargeObject(self, size, bytes_allocated, usable_size,
36                             bytes_tl_bulk_allocated);
37   }
38   void* m;
39   if (kThreadSafe) {
40     m = AllocFromRun(self, size, bytes_allocated, usable_size, bytes_tl_bulk_allocated);
41   } else {
42     m = AllocFromRunThreadUnsafe(self, size, bytes_allocated, usable_size,
43                                  bytes_tl_bulk_allocated);
44   }
45   // Check if the returned memory is really all zero.
46   if (ShouldCheckZeroMemory() && m != nullptr) {
47     uint8_t* bytes = reinterpret_cast<uint8_t*>(m);
48     for (size_t i = 0; i < size; ++i) {
49       DCHECK_EQ(bytes[i], 0);
50     }
51   }
52   return m;
53 }
54 
IsFull()55 inline bool RosAlloc::Run::IsFull() {
56   const size_t num_vec = NumberOfBitmapVectors();
57   for (size_t v = 0; v < num_vec; ++v) {
58     if (~alloc_bit_map_[v] != 0) {
59       return false;
60     }
61   }
62   return true;
63 }
64 
CanAllocFromThreadLocalRun(Thread * self,size_t size)65 inline bool RosAlloc::CanAllocFromThreadLocalRun(Thread* self, size_t size) {
66   if (UNLIKELY(!IsSizeForThreadLocal(size))) {
67     return false;
68   }
69   size_t bracket_size;
70   size_t idx = SizeToIndexAndBracketSize(size, &bracket_size);
71   DCHECK_EQ(idx, SizeToIndex(size));
72   DCHECK_EQ(bracket_size, IndexToBracketSize(idx));
73   DCHECK_EQ(bracket_size, bracketSizes[idx]);
74   DCHECK_LE(size, bracket_size);
75   DCHECK(size > 512 || bracket_size - size < 16);
76   DCHECK_LT(idx, kNumThreadLocalSizeBrackets);
77   Run* thread_local_run = reinterpret_cast<Run*>(self->GetRosAllocRun(idx));
78   if (kIsDebugBuild) {
79     // Need the lock to prevent race conditions.
80     MutexLock mu(self, *size_bracket_locks_[idx]);
81     CHECK(non_full_runs_[idx].find(thread_local_run) == non_full_runs_[idx].end());
82     CHECK(full_runs_[idx].find(thread_local_run) == full_runs_[idx].end());
83   }
84   DCHECK(thread_local_run != nullptr);
85   DCHECK(thread_local_run->IsThreadLocal() || thread_local_run == dedicated_full_run_);
86   return !thread_local_run->IsFull();
87 }
88 
AllocFromThreadLocalRun(Thread * self,size_t size,size_t * bytes_allocated)89 inline void* RosAlloc::AllocFromThreadLocalRun(Thread* self, size_t size,
90                                                size_t* bytes_allocated) {
91   DCHECK(bytes_allocated != nullptr);
92   if (UNLIKELY(!IsSizeForThreadLocal(size))) {
93     return nullptr;
94   }
95   size_t bracket_size;
96   size_t idx = SizeToIndexAndBracketSize(size, &bracket_size);
97   Run* thread_local_run = reinterpret_cast<Run*>(self->GetRosAllocRun(idx));
98   if (kIsDebugBuild) {
99     // Need the lock to prevent race conditions.
100     MutexLock mu(self, *size_bracket_locks_[idx]);
101     CHECK(non_full_runs_[idx].find(thread_local_run) == non_full_runs_[idx].end());
102     CHECK(full_runs_[idx].find(thread_local_run) == full_runs_[idx].end());
103   }
104   DCHECK(thread_local_run != nullptr);
105   DCHECK(thread_local_run->IsThreadLocal() || thread_local_run == dedicated_full_run_);
106   void* slot_addr = thread_local_run->AllocSlot();
107   if (LIKELY(slot_addr != nullptr)) {
108     *bytes_allocated = bracket_size;
109   }
110   return slot_addr;
111 }
112 
MaxBytesBulkAllocatedFor(size_t size)113 inline size_t RosAlloc::MaxBytesBulkAllocatedFor(size_t size) {
114   if (UNLIKELY(!IsSizeForThreadLocal(size))) {
115     return size;
116   }
117   size_t bracket_size;
118   size_t idx = SizeToIndexAndBracketSize(size, &bracket_size);
119   return numOfSlots[idx] * bracket_size;
120 }
121 
AllocSlot()122 inline void* RosAlloc::Run::AllocSlot() {
123   const size_t idx = size_bracket_idx_;
124   while (true) {
125     if (kIsDebugBuild) {
126       // Make sure that no slots leaked, the bitmap should be full for all previous vectors.
127       for (size_t i = 0; i < first_search_vec_idx_; ++i) {
128         CHECK_EQ(~alloc_bit_map_[i], 0U);
129       }
130     }
131     uint32_t* const alloc_bitmap_ptr = &alloc_bit_map_[first_search_vec_idx_];
132     uint32_t ffz1 = __builtin_ffs(~*alloc_bitmap_ptr);
133     if (LIKELY(ffz1 != 0)) {
134       const uint32_t ffz = ffz1 - 1;
135       const uint32_t slot_idx = ffz +
136           first_search_vec_idx_ * sizeof(*alloc_bitmap_ptr) * kBitsPerByte;
137       const uint32_t mask = 1U << ffz;
138       DCHECK_LT(slot_idx, numOfSlots[idx]) << "out of range";
139       // Found an empty slot. Set the bit.
140       DCHECK_EQ(*alloc_bitmap_ptr & mask, 0U);
141       *alloc_bitmap_ptr |= mask;
142       DCHECK_NE(*alloc_bitmap_ptr & mask, 0U);
143       uint8_t* slot_addr = reinterpret_cast<uint8_t*>(this) +
144           headerSizes[idx] + slot_idx * bracketSizes[idx];
145       if (kTraceRosAlloc) {
146         LOG(INFO) << "RosAlloc::Run::AllocSlot() : 0x" << std::hex
147                   << reinterpret_cast<intptr_t>(slot_addr)
148                   << ", bracket_size=" << std::dec << bracketSizes[idx]
149                   << ", slot_idx=" << slot_idx;
150       }
151       return slot_addr;
152     }
153     const size_t num_words = RoundUp(numOfSlots[idx], 32) / 32;
154     if (first_search_vec_idx_ + 1 >= num_words) {
155       DCHECK(IsFull());
156       // Already at the last word, return null.
157       return nullptr;
158     }
159     // Increase the index to the next word and try again.
160     ++first_search_vec_idx_;
161   }
162 }
163 
164 }  // namespace allocator
165 }  // namespace gc
166 }  // namespace art
167 
168 #endif  // ART_RUNTIME_GC_ALLOCATOR_ROSALLOC_INL_H_
169