1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_RUNTIME_BASE_SCOPED_ARENA_ALLOCATOR_H_ 18 #define ART_RUNTIME_BASE_SCOPED_ARENA_ALLOCATOR_H_ 19 20 #include "arena_allocator.h" 21 #include "debug_stack.h" 22 #include "globals.h" 23 #include "logging.h" 24 #include "macros.h" 25 26 namespace art { 27 28 class ArenaStack; 29 class ScopedArenaAllocator; 30 31 template <typename T> 32 class ScopedArenaAllocatorAdapter; 33 34 // Holds a list of Arenas for use by ScopedArenaAllocator stack. 35 class ArenaStack : private DebugStackRefCounter { 36 public: 37 explicit ArenaStack(ArenaPool* arena_pool); 38 ~ArenaStack(); 39 40 void Reset(); 41 PeakBytesAllocated()42 size_t PeakBytesAllocated() { 43 return PeakStats()->BytesAllocated(); 44 } 45 46 MemStats GetPeakStats() const; 47 48 private: 49 struct Peak; 50 struct Current; 51 template <typename Tag> struct TaggedStats : ArenaAllocatorStats { }; 52 struct StatsAndPool : TaggedStats<Peak>, TaggedStats<Current> { StatsAndPoolStatsAndPool53 explicit StatsAndPool(ArenaPool* arena_pool) : pool(arena_pool) { } 54 ArenaPool* const pool; 55 }; 56 PeakStats()57 ArenaAllocatorStats* PeakStats() { 58 return static_cast<TaggedStats<Peak>*>(&stats_and_pool_); 59 } 60 CurrentStats()61 ArenaAllocatorStats* CurrentStats() { 62 return static_cast<TaggedStats<Current>*>(&stats_and_pool_); 63 } 64 65 // Private - access via ScopedArenaAllocator or ScopedArenaAllocatorAdapter. Alloc(size_t bytes,ArenaAllocKind kind)66 void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE { 67 if (UNLIKELY(running_on_valgrind_)) { 68 return AllocValgrind(bytes, kind); 69 } 70 size_t rounded_bytes = RoundUp(bytes, 8); 71 uint8_t* ptr = top_ptr_; 72 if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) { 73 ptr = AllocateFromNextArena(rounded_bytes); 74 } 75 CurrentStats()->RecordAlloc(bytes, kind); 76 top_ptr_ = ptr + rounded_bytes; 77 return ptr; 78 } 79 80 uint8_t* AllocateFromNextArena(size_t rounded_bytes); 81 void UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats); 82 void UpdateBytesAllocated(); 83 void* AllocValgrind(size_t bytes, ArenaAllocKind kind); 84 85 StatsAndPool stats_and_pool_; 86 Arena* bottom_arena_; 87 Arena* top_arena_; 88 uint8_t* top_ptr_; 89 uint8_t* top_end_; 90 91 const bool running_on_valgrind_; 92 93 friend class ScopedArenaAllocator; 94 template <typename T> 95 friend class ScopedArenaAllocatorAdapter; 96 97 DISALLOW_COPY_AND_ASSIGN(ArenaStack); 98 }; 99 100 class ScopedArenaAllocator 101 : private DebugStackReference, private DebugStackRefCounter, private ArenaAllocatorStats { 102 public: 103 // Create a ScopedArenaAllocator directly on the ArenaStack when the scope of 104 // the allocator is not exactly a C++ block scope. For example, an optimization 105 // pass can create the scoped allocator in Start() and destroy it in End(). Create(ArenaStack * arena_stack)106 static ScopedArenaAllocator* Create(ArenaStack* arena_stack) { 107 void* addr = arena_stack->Alloc(sizeof(ScopedArenaAllocator), kArenaAllocMisc); 108 ScopedArenaAllocator* allocator = new(addr) ScopedArenaAllocator(arena_stack); 109 allocator->mark_ptr_ = reinterpret_cast<uint8_t*>(addr); 110 return allocator; 111 } 112 113 explicit ScopedArenaAllocator(ArenaStack* arena_stack); 114 ~ScopedArenaAllocator(); 115 116 void Reset(); 117 118 void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE { 119 DebugStackReference::CheckTop(); 120 return arena_stack_->Alloc(bytes, kind); 121 } 122 123 template <typename T> 124 T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) { 125 return static_cast<T*>(Alloc(length * sizeof(T), kind)); 126 } 127 128 // Get adapter for use in STL containers. See scoped_arena_containers.h . 129 ScopedArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL); 130 131 // Allow a delete-expression to destroy but not deallocate allocators created by Create(). delete(void * ptr)132 static void operator delete(void* ptr) { UNUSED(ptr); } 133 134 private: 135 ArenaStack* const arena_stack_; 136 Arena* mark_arena_; 137 uint8_t* mark_ptr_; 138 uint8_t* mark_end_; 139 140 void DoReset(); 141 142 template <typename T> 143 friend class ScopedArenaAllocatorAdapter; 144 145 DISALLOW_COPY_AND_ASSIGN(ScopedArenaAllocator); 146 }; 147 148 } // namespace art 149 150 #endif // ART_RUNTIME_BASE_SCOPED_ARENA_ALLOCATOR_H_ 151