1 /* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_RUNTIME_GC_SPACE_DLMALLOC_SPACE_H_ 18 #define ART_RUNTIME_GC_SPACE_DLMALLOC_SPACE_H_ 19 20 #include "malloc_space.h" 21 #include "space.h" 22 23 namespace art { 24 namespace gc { 25 26 namespace collector { 27 class MarkSweep; 28 } // namespace collector 29 30 namespace space { 31 32 // An alloc space is a space where objects may be allocated and garbage collected. Not final as may 33 // be overridden by a MemoryToolMallocSpace. 34 class DlMallocSpace : public MallocSpace { 35 public: 36 // Create a DlMallocSpace from an existing mem_map. 37 static DlMallocSpace* CreateFromMemMap(MemMap&& mem_map, 38 const std::string& name, 39 size_t starting_size, 40 size_t initial_size, 41 size_t growth_limit, 42 size_t capacity, 43 bool can_move_objects); 44 45 // Create a DlMallocSpace with the requested sizes. The requested 46 // base address is not guaranteed to be granted, if it is required, 47 // the caller should call Begin on the returned space to confirm the 48 // request was granted. 49 static DlMallocSpace* Create(const std::string& name, 50 size_t initial_size, 51 size_t growth_limit, 52 size_t capacity, 53 bool can_move_objects); 54 55 // Virtual to allow MemoryToolMallocSpace to intercept. 56 mirror::Object* AllocWithGrowth(Thread* self, 57 size_t num_bytes, 58 size_t* bytes_allocated, 59 size_t* usable_size, 60 size_t* bytes_tl_bulk_allocated) override REQUIRES(!lock_); 61 // Virtual to allow MemoryToolMallocSpace to intercept. Alloc(Thread * self,size_t num_bytes,size_t * bytes_allocated,size_t * usable_size,size_t * bytes_tl_bulk_allocated)62 mirror::Object* Alloc(Thread* self, 63 size_t num_bytes, 64 size_t* bytes_allocated, 65 size_t* usable_size, 66 size_t* bytes_tl_bulk_allocated) override REQUIRES(!lock_) { 67 return AllocNonvirtual(self, num_bytes, bytes_allocated, usable_size, 68 bytes_tl_bulk_allocated); 69 } 70 // Virtual to allow MemoryToolMallocSpace to intercept. AllocationSize(mirror::Object * obj,size_t * usable_size)71 size_t AllocationSize(mirror::Object* obj, size_t* usable_size) override { 72 return AllocationSizeNonvirtual(obj, usable_size); 73 } 74 // Virtual to allow MemoryToolMallocSpace to intercept. 75 size_t Free(Thread* self, mirror::Object* ptr) override 76 REQUIRES(!lock_) 77 REQUIRES_SHARED(Locks::mutator_lock_); 78 // Virtual to allow MemoryToolMallocSpace to intercept. 79 size_t FreeList(Thread* self, size_t num_ptrs, mirror::Object** ptrs) override 80 REQUIRES(!lock_) 81 REQUIRES_SHARED(Locks::mutator_lock_); 82 MaxBytesBulkAllocatedFor(size_t num_bytes)83 size_t MaxBytesBulkAllocatedFor(size_t num_bytes) override { 84 return num_bytes; 85 } 86 87 // DlMallocSpaces don't have thread local state. RevokeThreadLocalBuffers(art::Thread *)88 size_t RevokeThreadLocalBuffers(art::Thread*) override { 89 return 0U; 90 } RevokeAllThreadLocalBuffers()91 size_t RevokeAllThreadLocalBuffers() override { 92 return 0U; 93 } 94 95 // Faster non-virtual allocation path. 96 mirror::Object* AllocNonvirtual(Thread* self, size_t num_bytes, size_t* bytes_allocated, 97 size_t* usable_size, size_t* bytes_tl_bulk_allocated) 98 REQUIRES(!lock_); 99 100 // Faster non-virtual allocation size path. 101 size_t AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size); 102 103 #ifndef NDEBUG 104 // Override only in the debug build. 105 void CheckMoreCoreForPrecondition() override; 106 #endif 107 GetMspace()108 void* GetMspace() const { 109 return mspace_; 110 } 111 112 size_t Trim() override; 113 114 // Perform a mspace_inspect_all which calls back for each allocation chunk. The chunk may not be 115 // in use, indicated by num_bytes equaling zero. 116 void Walk(WalkCallback callback, void* arg) override REQUIRES(!lock_); 117 118 // Returns the number of bytes that the space has currently obtained from the system. This is 119 // greater or equal to the amount of live data in the space. 120 size_t GetFootprint() override; 121 122 // Returns the number of bytes that the heap is allowed to obtain from the system via MoreCore. 123 size_t GetFootprintLimit() override; 124 125 // Set the maximum number of bytes that the heap is allowed to obtain from the system via 126 // MoreCore. Note this is used to stop the mspace growing beyond the limit to Capacity. When 127 // allocations fail we GC before increasing the footprint limit and allowing the mspace to grow. 128 void SetFootprintLimit(size_t limit) override; 129 130 MallocSpace* CreateInstance(MemMap&& mem_map, 131 const std::string& name, 132 void* allocator, 133 uint8_t* begin, 134 uint8_t* end, 135 uint8_t* limit, 136 size_t growth_limit, 137 bool can_move_objects) override; 138 139 uint64_t GetBytesAllocated() override; 140 uint64_t GetObjectsAllocated() override; 141 142 void Clear() override; 143 IsDlMallocSpace()144 bool IsDlMallocSpace() const override { 145 return true; 146 } 147 AsDlMallocSpace()148 DlMallocSpace* AsDlMallocSpace() override { 149 return this; 150 } 151 152 bool LogFragmentationAllocFailure(std::ostream& os, size_t failed_alloc_bytes) override 153 REQUIRES_SHARED(Locks::mutator_lock_); 154 155 protected: 156 DlMallocSpace(MemMap&& mem_map, 157 size_t initial_size, 158 const std::string& name, 159 void* mspace, 160 uint8_t* begin, 161 uint8_t* end, 162 uint8_t* limit, 163 size_t growth_limit, 164 bool can_move_objects, 165 size_t starting_size); 166 167 private: 168 mirror::Object* AllocWithoutGrowthLocked(Thread* self, size_t num_bytes, size_t* bytes_allocated, 169 size_t* usable_size, 170 size_t* bytes_tl_bulk_allocated) 171 REQUIRES(lock_); 172 CreateAllocator(void * base,size_t morecore_start,size_t initial_size,size_t,bool)173 void* CreateAllocator(void* base, size_t morecore_start, size_t initial_size, 174 size_t /*maximum_size*/, bool /*low_memory_mode*/) override { 175 return CreateMspace(base, morecore_start, initial_size); 176 } 177 static void* CreateMspace(void* base, size_t morecore_start, size_t initial_size); 178 179 // The boundary tag overhead. 180 static const size_t kChunkOverhead = sizeof(intptr_t); 181 182 // Underlying malloc space. 183 void* mspace_; 184 185 friend class collector::MarkSweep; 186 187 DISALLOW_COPY_AND_ASSIGN(DlMallocSpace); 188 }; 189 190 } // namespace space 191 } // namespace gc 192 } // namespace art 193 194 #endif // ART_RUNTIME_GC_SPACE_DLMALLOC_SPACE_H_ 195