1 /*
2  * Copyright (C) 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_CLASS_TABLE_H_
18 #define ART_RUNTIME_CLASS_TABLE_H_
19 
20 #include <string>
21 #include <utility>
22 #include <vector>
23 
24 #include "base/allocator.h"
25 #include "base/hash_set.h"
26 #include "base/macros.h"
27 #include "base/mutex.h"
28 #include "gc_root.h"
29 #include "obj_ptr.h"
30 
31 namespace art {
32 
33 class OatFile;
34 
35 namespace linker {
36 class ImageWriter;
37 }  // namespace linker
38 
39 namespace linker {
40 class OatWriter;
41 }  // namespace linker
42 
43 namespace mirror {
44 class Class;
45 class ClassLoader;
46 class Object;
47 }  // namespace mirror
48 
49 // Each loader has a ClassTable
50 class ClassTable {
51  public:
52   class TableSlot {
53    public:
TableSlot()54     TableSlot() : data_(0u) {}
55 
TableSlot(const TableSlot & copy)56     TableSlot(const TableSlot& copy) : data_(copy.data_.load(std::memory_order_relaxed)) {}
57 
58     explicit TableSlot(ObjPtr<mirror::Class> klass);
59 
60     TableSlot(ObjPtr<mirror::Class> klass, uint32_t descriptor_hash);
61 
62     TableSlot& operator=(const TableSlot& copy) {
63       data_.store(copy.data_.load(std::memory_order_relaxed), std::memory_order_relaxed);
64       return *this;
65     }
66 
67     bool IsNull() const REQUIRES_SHARED(Locks::mutator_lock_);
68 
Hash()69     uint32_t Hash() const {
70       return MaskHash(data_.load(std::memory_order_relaxed));
71     }
72 
MaskHash(uint32_t hash)73     static uint32_t MaskHash(uint32_t hash) {
74       return hash & kHashMask;
75     }
76 
MaskedHashEquals(uint32_t other)77     bool MaskedHashEquals(uint32_t other) const {
78       return MaskHash(other) == Hash();
79     }
80 
81     static uint32_t HashDescriptor(ObjPtr<mirror::Class> klass)
82         REQUIRES_SHARED(Locks::mutator_lock_);
83 
84     template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
85     ObjPtr<mirror::Class> Read() const REQUIRES_SHARED(Locks::mutator_lock_);
86 
87     // NO_THREAD_SAFETY_ANALYSIS since the visitor may require heap bitmap lock.
88     template<typename Visitor>
89     void VisitRoot(const Visitor& visitor) const NO_THREAD_SAFETY_ANALYSIS;
90 
91    private:
92     // Extract a raw pointer from an address.
93     static ObjPtr<mirror::Class> ExtractPtr(uint32_t data)
94         REQUIRES_SHARED(Locks::mutator_lock_);
95 
96     static uint32_t Encode(ObjPtr<mirror::Class> klass, uint32_t hash_bits)
97         REQUIRES_SHARED(Locks::mutator_lock_);
98 
99     // Data contains the class pointer GcRoot as well as the low bits of the descriptor hash.
100     mutable Atomic<uint32_t> data_;
101     static const uint32_t kHashMask = kObjectAlignment - 1;
102   };
103 
104   using DescriptorHashPair = std::pair<const char*, uint32_t>;
105 
106   class ClassDescriptorHash {
107    public:
108     // uint32_t for cross compilation.
109     uint32_t operator()(const TableSlot& slot) const NO_THREAD_SAFETY_ANALYSIS;
110     // uint32_t for cross compilation.
111     uint32_t operator()(const DescriptorHashPair& pair) const NO_THREAD_SAFETY_ANALYSIS;
112   };
113 
114   class ClassDescriptorEquals {
115    public:
116     // Same class loader and descriptor.
117     bool operator()(const TableSlot& a, const TableSlot& b) const
118         NO_THREAD_SAFETY_ANALYSIS;
119     // Same descriptor.
120     bool operator()(const TableSlot& a, const DescriptorHashPair& b) const
121         NO_THREAD_SAFETY_ANALYSIS;
122   };
123 
124   class TableSlotEmptyFn {
125    public:
MakeEmpty(TableSlot & item)126     void MakeEmpty(TableSlot& item) const NO_THREAD_SAFETY_ANALYSIS {
127       item = TableSlot();
128       DCHECK(IsEmpty(item));
129     }
IsEmpty(const TableSlot & item)130     bool IsEmpty(const TableSlot& item) const NO_THREAD_SAFETY_ANALYSIS {
131       return item.IsNull();
132     }
133   };
134 
135   // Hash set that hashes class descriptor, and compares descriptors and class loaders. Results
136   // should be compared for a matching class descriptor and class loader.
137   typedef HashSet<TableSlot,
138                   TableSlotEmptyFn,
139                   ClassDescriptorHash,
140                   ClassDescriptorEquals,
141                   TrackingAllocator<TableSlot, kAllocatorTagClassTable>> ClassSet;
142 
143   ClassTable();
144 
145   // Freeze the current class tables by allocating a new table and never updating or modifying the
146   // existing table. This helps prevents dirty pages after caused by inserting after zygote fork.
147   void FreezeSnapshot()
148       REQUIRES(!lock_)
149       REQUIRES_SHARED(Locks::mutator_lock_);
150 
151   // Returns the number of classes in previous snapshots defined by `defining_loader`.
152   size_t NumZygoteClasses(ObjPtr<mirror::ClassLoader> defining_loader) const
153       REQUIRES(!lock_)
154       REQUIRES_SHARED(Locks::mutator_lock_);
155 
156   // Returns all off the classes in the lastest snapshot defined by `defining_loader`.
157   size_t NumNonZygoteClasses(ObjPtr<mirror::ClassLoader> defining_loader) const
158       REQUIRES(!lock_)
159       REQUIRES_SHARED(Locks::mutator_lock_);
160 
161   // Returns the number of classes in previous snapshots no matter the defining loader.
162   size_t NumReferencedZygoteClasses() const
163       REQUIRES(!lock_)
164       REQUIRES_SHARED(Locks::mutator_lock_);
165 
166   // Returns all off the classes in the lastest snapshot no matter the defining loader.
167   size_t NumReferencedNonZygoteClasses() const
168       REQUIRES(!lock_)
169       REQUIRES_SHARED(Locks::mutator_lock_);
170 
171   // Update a class in the table with the new class. Returns the existing class which was replaced.
172   ObjPtr<mirror::Class> UpdateClass(const char* descriptor,
173                                     ObjPtr<mirror::Class> new_klass,
174                                     size_t hash)
175       REQUIRES(!lock_)
176       REQUIRES_SHARED(Locks::mutator_lock_);
177 
178   // NO_THREAD_SAFETY_ANALYSIS for object marking requiring heap bitmap lock.
179   template<class Visitor>
180   void VisitRoots(Visitor& visitor)
181       NO_THREAD_SAFETY_ANALYSIS
182       REQUIRES(!lock_)
183       REQUIRES_SHARED(Locks::mutator_lock_);
184 
185   template<class Visitor>
186   void VisitRoots(const Visitor& visitor)
187       NO_THREAD_SAFETY_ANALYSIS
188       REQUIRES(!lock_)
189       REQUIRES_SHARED(Locks::mutator_lock_);
190 
191   // Stops visit if the visitor returns false.
192   template <typename Visitor, ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
193   bool Visit(Visitor& visitor)
194       REQUIRES(!lock_)
195       REQUIRES_SHARED(Locks::mutator_lock_);
196   template <typename Visitor, ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
197   bool Visit(const Visitor& visitor)
198       REQUIRES(!lock_)
199       REQUIRES_SHARED(Locks::mutator_lock_);
200 
201   // Return the first class that matches the descriptor. Returns null if there are none.
202   ObjPtr<mirror::Class> Lookup(const char* descriptor, size_t hash)
203       REQUIRES(!lock_)
204       REQUIRES_SHARED(Locks::mutator_lock_);
205 
206   // Return the first class that matches the descriptor of klass. Returns null if there are none.
207   // Used for tests and debug-build checks.
208   ObjPtr<mirror::Class> LookupByDescriptor(ObjPtr<mirror::Class> klass)
209       REQUIRES(!lock_)
210       REQUIRES_SHARED(Locks::mutator_lock_);
211 
212   void Insert(ObjPtr<mirror::Class> klass)
213       REQUIRES(!lock_)
214       REQUIRES_SHARED(Locks::mutator_lock_);
215 
216   void InsertWithHash(ObjPtr<mirror::Class> klass, size_t hash)
217       REQUIRES(!lock_)
218       REQUIRES_SHARED(Locks::mutator_lock_);
219 
220   // Returns true if the class was found and removed, false otherwise.
221   bool Remove(const char* descriptor)
222       REQUIRES(!lock_)
223       REQUIRES_SHARED(Locks::mutator_lock_);
224 
225   // Return true if we inserted the strong root, false if it already exists.
226   bool InsertStrongRoot(ObjPtr<mirror::Object> obj)
227       REQUIRES(!lock_)
228       REQUIRES_SHARED(Locks::mutator_lock_);
229 
230   // Return true if we inserted the oat file, false if it already exists.
231   bool InsertOatFile(const OatFile* oat_file)
232       REQUIRES(!lock_)
233       REQUIRES_SHARED(Locks::mutator_lock_);
234 
235   // Read a table from ptr and put it at the front of the class set.
236   size_t ReadFromMemory(uint8_t* ptr)
237       REQUIRES(!lock_)
238       REQUIRES_SHARED(Locks::mutator_lock_);
239 
240   // Add a class set to the front of classes.
241   void AddClassSet(ClassSet&& set)
242       REQUIRES(!lock_)
243       REQUIRES_SHARED(Locks::mutator_lock_);
244 
245   // Clear strong roots (other than classes themselves).
246   void ClearStrongRoots()
247       REQUIRES(!lock_)
248       REQUIRES_SHARED(Locks::mutator_lock_);
249 
250   // Filter strong roots (other than classes themselves).
251   template <typename Filter>
252   void RemoveStrongRoots(const Filter& filter)
253       REQUIRES(!lock_)
254       REQUIRES_SHARED(Locks::mutator_lock_);
255 
GetLock()256   ReaderWriterMutex& GetLock() {
257     return lock_;
258   }
259 
260  private:
261   size_t CountDefiningLoaderClasses(ObjPtr<mirror::ClassLoader> defining_loader,
262                                     const ClassSet& set) const
263       REQUIRES(lock_)
264       REQUIRES_SHARED(Locks::mutator_lock_);
265 
266   // Return true if we inserted the oat file, false if it already exists.
267   bool InsertOatFileLocked(const OatFile* oat_file)
268       REQUIRES(lock_)
269       REQUIRES_SHARED(Locks::mutator_lock_);
270 
271   // Lock to guard inserting and removing.
272   mutable ReaderWriterMutex lock_;
273   // We have a vector to help prevent dirty pages after the zygote forks by calling FreezeSnapshot.
274   std::vector<ClassSet> classes_ GUARDED_BY(lock_);
275   // Extra strong roots that can be either dex files or dex caches. Dex files used by the class
276   // loader which may not be owned by the class loader must be held strongly live. Also dex caches
277   // are held live to prevent them being unloading once they have classes in them.
278   std::vector<GcRoot<mirror::Object>> strong_roots_ GUARDED_BY(lock_);
279   // Keep track of oat files with GC roots associated with dex caches in `strong_roots_`.
280   std::vector<const OatFile*> oat_files_ GUARDED_BY(lock_);
281 
282   friend class linker::ImageWriter;  // for InsertWithoutLocks.
283 };
284 
285 }  // namespace art
286 
287 #endif  // ART_RUNTIME_CLASS_TABLE_H_
288