1 /*
2  * Copyright (C) 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_LINEAR_ALLOC_H_
18 #define ART_RUNTIME_LINEAR_ALLOC_H_
19 
20 #include "base/arena_allocator.h"
21 #include "base/casts.h"
22 #include "base/macros.h"
23 #include "base/mutex.h"
24 
25 namespace art HIDDEN {
26 
27 class ArenaPool;
28 
29 enum class LinearAllocKind : uint32_t {
30   kNoGCRoots = 0,  // No GC-root kind should always be 0.
31   kGCRootArray,
32   kArtMethodArray,
33   kArtFieldArray,
34   kDexCacheArray,
35   kArtMethod
36 };
37 
38 // Header for every allocation in LinearAlloc. The header provides the type
39 // and size information to the GC for invoking the right visitor.
40 class TrackingHeader final {
41  public:
42   static constexpr uint32_t kIs16Aligned = 1;
43   TrackingHeader(size_t size, LinearAllocKind kind, bool is_16_aligned = false)
kind_(kind)44       : kind_(kind), size_(dchecked_integral_cast<uint32_t>(size)) {
45     // We need the last bit to store 16-byte alignment flag.
46     CHECK_EQ(size_ & kIs16Aligned, 0u);
47     if (is_16_aligned) {
48       size_ |= kIs16Aligned;
49     }
50   }
51 
GetKind()52   LinearAllocKind GetKind() const { return kind_; }
53   // Since we are linearly allocating and hop from one object to the next during
54   // visits, reading 'size_ == 0' indicates that there are no more objects to
55   // visit in the given page. But ASAN detects it as use-after-poison access.
GetSize()56   ATTRIBUTE_NO_SANITIZE_ADDRESS size_t GetSize() const { return size_ & ~kIs16Aligned; }
Is16Aligned()57   bool Is16Aligned() const { return size_ & kIs16Aligned; }
58 
59  private:
SetKind(LinearAllocKind kind)60   void SetKind(LinearAllocKind kind) { kind_ = kind; }
61 
62   LinearAllocKind kind_;
63   uint32_t size_;
64 
65   friend class LinearAlloc;  // For SetKind()
66 
67   DISALLOW_IMPLICIT_CONSTRUCTORS(TrackingHeader);
68 };
69 
70 std::ostream& operator<<(std::ostream& os, LinearAllocKind value);
71 
72 class LinearAlloc {
73  public:
74   static constexpr size_t kAlignment = 8u;
75   static_assert(kAlignment >= ArenaAllocator::kAlignment);
76   static_assert(sizeof(TrackingHeader) == ArenaAllocator::kAlignment);
77 
LinearAlloc(ArenaPool * pool,bool track_allocs)78   explicit LinearAlloc(ArenaPool* pool, bool track_allocs)
79       : lock_("linear alloc"), allocator_(pool), track_allocations_(track_allocs) {}
80 
81   void* Alloc(Thread* self, size_t size, LinearAllocKind kind) REQUIRES(!lock_);
82   void* AllocAlign16(Thread* self, size_t size, LinearAllocKind kind) REQUIRES(!lock_);
83 
84   // Realloc never frees the input pointer, it is the caller's job to do this if necessary.
85   void* Realloc(Thread* self, void* ptr, size_t old_size, size_t new_size, LinearAllocKind kind)
86       REQUIRES(!lock_);
87 
88   // Allocate an array of structs of type T.
89   template<class T>
AllocArray(Thread * self,size_t elements,LinearAllocKind kind)90   T* AllocArray(Thread* self, size_t elements, LinearAllocKind kind) REQUIRES(!lock_) {
91     return reinterpret_cast<T*>(Alloc(self, elements * sizeof(T), kind));
92   }
93 
94   // Return the number of bytes used in the allocator.
95   size_t GetUsedMemory() const REQUIRES(!lock_);
96 
97   ArenaPool* GetArenaPool() REQUIRES(!lock_);
98   // Force arena allocator to ask for a new arena on next allocation. This
99   // is to preserve private/shared clean pages across zygote fork.
100   void SetupForPostZygoteFork(Thread* self) REQUIRES(!lock_);
101   // Convert the given allocated object into a `no GC-root` so that compaction
102   // skips it. Currently only used during class linking for ArtMethod array.
103   void ConvertToNoGcRoots(void* ptr, LinearAllocKind orig_kind);
104 
105   // Return true if the linear alloc contains an address.
106   bool Contains(void* ptr) const REQUIRES(!lock_);
107 
108   // Unsafe version of 'Contains' only to be used when the allocator is going
109   // to be deleted.
ContainsUnsafe(void * ptr)110   bool ContainsUnsafe(void* ptr) const NO_THREAD_SAFETY_ANALYSIS {
111     return allocator_.Contains(ptr);
112   }
113 
114   // Set the given object as the first object for all the pages where the
115   // page-beginning overlaps with the object.
116   void SetFirstObject(void* begin, size_t bytes) const REQUIRES(lock_);
117 
118  private:
119   mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
120   ArenaAllocator allocator_ GUARDED_BY(lock_);
121   const bool track_allocations_;
122 
123   DISALLOW_IMPLICIT_CONSTRUCTORS(LinearAlloc);
124 };
125 
126 }  // namespace art
127 
128 #endif  // ART_RUNTIME_LINEAR_ALLOC_H_
129