1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_GC_SPACE_LARGE_OBJECT_SPACE_H_
18 #define ART_RUNTIME_GC_SPACE_LARGE_OBJECT_SPACE_H_
19 
20 #include "base/allocator.h"
21 #include "dlmalloc_space.h"
22 #include "safe_map.h"
23 #include "space.h"
24 
25 #include <set>
26 #include <vector>
27 
28 namespace art {
29 namespace gc {
30 namespace space {
31 
32 class AllocationInfo;
33 
34 // Abstraction implemented by all large object spaces.
35 class LargeObjectSpace : public DiscontinuousSpace, public AllocSpace {
36  public:
GetType()37   SpaceType GetType() const OVERRIDE {
38     return kSpaceTypeLargeObjectSpace;
39   }
40   void SwapBitmaps();
41   void CopyLiveToMarked();
42   virtual void Walk(DlMallocSpace::WalkCallback, void* arg) = 0;
~LargeObjectSpace()43   virtual ~LargeObjectSpace() {}
44 
GetBytesAllocated()45   uint64_t GetBytesAllocated() OVERRIDE {
46     return num_bytes_allocated_;
47   }
GetObjectsAllocated()48   uint64_t GetObjectsAllocated() OVERRIDE {
49     return num_objects_allocated_;
50   }
GetTotalBytesAllocated()51   uint64_t GetTotalBytesAllocated() const {
52     return total_bytes_allocated_;
53   }
GetTotalObjectsAllocated()54   uint64_t GetTotalObjectsAllocated() const {
55     return total_objects_allocated_;
56   }
57   size_t FreeList(Thread* self, size_t num_ptrs, mirror::Object** ptrs) OVERRIDE;
58   // LargeObjectSpaces don't have thread local state.
RevokeThreadLocalBuffers(art::Thread *)59   void RevokeThreadLocalBuffers(art::Thread*) OVERRIDE {
60   }
RevokeAllThreadLocalBuffers()61   void RevokeAllThreadLocalBuffers() OVERRIDE {
62   }
IsAllocSpace()63   bool IsAllocSpace() const OVERRIDE {
64     return true;
65   }
AsAllocSpace()66   AllocSpace* AsAllocSpace() OVERRIDE {
67     return this;
68   }
69   collector::ObjectBytePair Sweep(bool swap_bitmaps);
CanMoveObjects()70   virtual bool CanMoveObjects() const OVERRIDE {
71     return false;
72   }
73   // Current address at which the space begins, which may vary as the space is filled.
Begin()74   byte* Begin() const {
75     return begin_;
76   }
77   // Current address at which the space ends, which may vary as the space is filled.
End()78   byte* End() const {
79     return end_;
80   }
81   // Current size of space
Size()82   size_t Size() const {
83     return End() - Begin();
84   }
85   // Return true if we contain the specified address.
Contains(const mirror::Object * obj)86   bool Contains(const mirror::Object* obj) const {
87     const byte* byte_obj = reinterpret_cast<const byte*>(obj);
88     return Begin() <= byte_obj && byte_obj < End();
89   }
90   void LogFragmentationAllocFailure(std::ostream& os, size_t failed_alloc_bytes) OVERRIDE
91       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
92 
93  protected:
94   explicit LargeObjectSpace(const std::string& name, byte* begin, byte* end);
95   static void SweepCallback(size_t num_ptrs, mirror::Object** ptrs, void* arg);
96 
97   // Approximate number of bytes which have been allocated into the space.
98   uint64_t num_bytes_allocated_;
99   uint64_t num_objects_allocated_;
100   uint64_t total_bytes_allocated_;
101   uint64_t total_objects_allocated_;
102   // Begin and end, may change as more large objects are allocated.
103   byte* begin_;
104   byte* end_;
105 
106   friend class Space;
107 
108  private:
109   DISALLOW_COPY_AND_ASSIGN(LargeObjectSpace);
110 };
111 
112 // A discontinuous large object space implemented by individual mmap/munmap calls.
113 class LargeObjectMapSpace : public LargeObjectSpace {
114  public:
115   // Creates a large object space. Allocations into the large object space use memory maps instead
116   // of malloc.
117   static LargeObjectMapSpace* Create(const std::string& name);
118   // Return the storage space required by obj.
119   size_t AllocationSize(mirror::Object* obj, size_t* usable_size);
120   mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated,
121                         size_t* usable_size);
122   size_t Free(Thread* self, mirror::Object* ptr);
123   void Walk(DlMallocSpace::WalkCallback, void* arg) OVERRIDE LOCKS_EXCLUDED(lock_);
124   // TODO: disabling thread safety analysis as this may be called when we already hold lock_.
125   bool Contains(const mirror::Object* obj) const NO_THREAD_SAFETY_ANALYSIS;
126 
127  protected:
128   explicit LargeObjectMapSpace(const std::string& name);
~LargeObjectMapSpace()129   virtual ~LargeObjectMapSpace() {}
130 
131   // Used to ensure mutual exclusion when the allocation spaces data structures are being modified.
132   mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
133   std::vector<mirror::Object*, TrackingAllocator<mirror::Object*, kAllocatorTagLOS>> large_objects_
134       GUARDED_BY(lock_);
135   typedef SafeMap<mirror::Object*, MemMap*, std::less<mirror::Object*>,
136       TrackingAllocator<std::pair<mirror::Object*, MemMap*>, kAllocatorTagLOSMaps>> MemMaps;
137   MemMaps mem_maps_ GUARDED_BY(lock_);
138 };
139 
140 // A continuous large object space with a free-list to handle holes.
141 class FreeListSpace FINAL : public LargeObjectSpace {
142  public:
143   static constexpr size_t kAlignment = kPageSize;
144 
145   virtual ~FreeListSpace();
146   static FreeListSpace* Create(const std::string& name, byte* requested_begin, size_t capacity);
147   size_t AllocationSize(mirror::Object* obj, size_t* usable_size) OVERRIDE
148       EXCLUSIVE_LOCKS_REQUIRED(lock_);
149   mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated,
150                         size_t* usable_size) OVERRIDE;
151   size_t Free(Thread* self, mirror::Object* obj) OVERRIDE;
152   void Walk(DlMallocSpace::WalkCallback callback, void* arg) OVERRIDE LOCKS_EXCLUDED(lock_);
153   void Dump(std::ostream& os) const;
154 
155  protected:
156   FreeListSpace(const std::string& name, MemMap* mem_map, byte* begin, byte* end);
GetSlotIndexForAddress(uintptr_t address)157   size_t GetSlotIndexForAddress(uintptr_t address) const {
158     DCHECK(Contains(reinterpret_cast<mirror::Object*>(address)));
159     return (address - reinterpret_cast<uintptr_t>(Begin())) / kAlignment;
160   }
161   size_t GetSlotIndexForAllocationInfo(const AllocationInfo* info) const;
162   AllocationInfo* GetAllocationInfoForAddress(uintptr_t address);
163   const AllocationInfo* GetAllocationInfoForAddress(uintptr_t address) const;
GetAllocationAddressForSlot(size_t slot)164   uintptr_t GetAllocationAddressForSlot(size_t slot) const {
165     return reinterpret_cast<uintptr_t>(Begin()) + slot * kAlignment;
166   }
GetAddressForAllocationInfo(const AllocationInfo * info)167   uintptr_t GetAddressForAllocationInfo(const AllocationInfo* info) const {
168     return GetAllocationAddressForSlot(GetSlotIndexForAllocationInfo(info));
169   }
170   // Removes header from the free blocks set by finding the corresponding iterator and erasing it.
171   void RemoveFreePrev(AllocationInfo* info) EXCLUSIVE_LOCKS_REQUIRED(lock_);
172 
173   class SortByPrevFree {
174    public:
175     bool operator()(const AllocationInfo* a, const AllocationInfo* b) const;
176   };
177   typedef std::set<AllocationInfo*, SortByPrevFree,
178                    TrackingAllocator<AllocationInfo*, kAllocatorTagLOSFreeList>> FreeBlocks;
179 
180   // There is not footer for any allocations at the end of the space, so we keep track of how much
181   // free space there is at the end manually.
182   std::unique_ptr<MemMap> mem_map_;
183   // Side table for allocation info, one per page.
184   std::unique_ptr<MemMap> allocation_info_map_;
185   AllocationInfo* allocation_info_;
186 
187   Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
188   // Free bytes at the end of the space.
189   size_t free_end_ GUARDED_BY(lock_);
190   FreeBlocks free_blocks_ GUARDED_BY(lock_);
191 };
192 
193 }  // namespace space
194 }  // namespace gc
195 }  // namespace art
196 
197 #endif  // ART_RUNTIME_GC_SPACE_LARGE_OBJECT_SPACE_H_
198