1 /*
2  * Copyright (C) 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_CLASS_TABLE_H_
18 #define ART_RUNTIME_CLASS_TABLE_H_
19 
20 #include <string>
21 #include <utility>
22 #include <vector>
23 
24 #include "base/allocator.h"
25 #include "base/hash_set.h"
26 #include "base/macros.h"
27 #include "base/mutex.h"
28 #include "dex_file.h"
29 #include "gc_root.h"
30 #include "obj_ptr.h"
31 #include "object_callbacks.h"
32 #include "runtime.h"
33 
34 namespace art {
35 
36 class OatFile;
37 
38 namespace mirror {
39   class ClassLoader;
40 }  // namespace mirror
41 
42 // Each loader has a ClassTable
43 class ClassTable {
44  public:
45   class TableSlot {
46    public:
TableSlot()47     TableSlot() : data_(0u) {}
48 
TableSlot(const TableSlot & copy)49     TableSlot(const TableSlot& copy) : data_(copy.data_.LoadRelaxed()) {}
50 
51     explicit TableSlot(ObjPtr<mirror::Class> klass);
52 
53     TableSlot(ObjPtr<mirror::Class> klass, uint32_t descriptor_hash);
54 
55     TableSlot& operator=(const TableSlot& copy) {
56       data_.StoreRelaxed(copy.data_.LoadRelaxed());
57       return *this;
58     }
59 
IsNull()60     bool IsNull() const REQUIRES_SHARED(Locks::mutator_lock_) {
61       return Read<kWithoutReadBarrier>() == nullptr;
62     }
63 
Hash()64     uint32_t Hash() const {
65       return MaskHash(data_.LoadRelaxed());
66     }
67 
MaskHash(uint32_t hash)68     static uint32_t MaskHash(uint32_t hash) {
69       return hash & kHashMask;
70     }
71 
MaskedHashEquals(uint32_t other)72     bool MaskedHashEquals(uint32_t other) const {
73       return MaskHash(other) == Hash();
74     }
75 
76     static uint32_t HashDescriptor(ObjPtr<mirror::Class> klass)
77         REQUIRES_SHARED(Locks::mutator_lock_);
78 
79     template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
80     mirror::Class* Read() const REQUIRES_SHARED(Locks::mutator_lock_);
81 
82     // NO_THREAD_SAFETY_ANALYSIS since the visitor may require heap bitmap lock.
83     template<typename Visitor>
84     void VisitRoot(const Visitor& visitor) const NO_THREAD_SAFETY_ANALYSIS;
85 
86    private:
87     // Extract a raw pointer from an address.
88     static ObjPtr<mirror::Class> ExtractPtr(uint32_t data)
89         REQUIRES_SHARED(Locks::mutator_lock_);
90 
91     static uint32_t Encode(ObjPtr<mirror::Class> klass, uint32_t hash_bits)
92         REQUIRES_SHARED(Locks::mutator_lock_);
93 
94     // Data contains the class pointer GcRoot as well as the low bits of the descriptor hash.
95     mutable Atomic<uint32_t> data_;
96     static const uint32_t kHashMask = kObjectAlignment - 1;
97   };
98 
99   using DescriptorHashPair = std::pair<const char*, uint32_t>;
100 
101   class ClassDescriptorHashEquals {
102    public:
103     // uint32_t for cross compilation.
104     uint32_t operator()(const TableSlot& slot) const NO_THREAD_SAFETY_ANALYSIS;
105     // Same class loader and descriptor.
106     bool operator()(const TableSlot& a, const TableSlot& b) const
107         NO_THREAD_SAFETY_ANALYSIS;
108     // Same descriptor.
109     bool operator()(const TableSlot& a, const DescriptorHashPair& b) const
110         NO_THREAD_SAFETY_ANALYSIS;
111     // uint32_t for cross compilation.
112     uint32_t operator()(const DescriptorHashPair& pair) const NO_THREAD_SAFETY_ANALYSIS;
113   };
114 
115   class TableSlotEmptyFn {
116    public:
MakeEmpty(TableSlot & item)117     void MakeEmpty(TableSlot& item) const NO_THREAD_SAFETY_ANALYSIS {
118       item = TableSlot();
119       DCHECK(IsEmpty(item));
120     }
IsEmpty(const TableSlot & item)121     bool IsEmpty(const TableSlot& item) const NO_THREAD_SAFETY_ANALYSIS {
122       return item.IsNull();
123     }
124   };
125 
126   // Hash set that hashes class descriptor, and compares descriptors and class loaders. Results
127   // should be compared for a matching class descriptor and class loader.
128   typedef HashSet<TableSlot,
129                   TableSlotEmptyFn,
130                   ClassDescriptorHashEquals,
131                   ClassDescriptorHashEquals,
132                   TrackingAllocator<TableSlot, kAllocatorTagClassTable>> ClassSet;
133 
134   ClassTable();
135 
136   // Used by image writer for checking.
137   bool Contains(ObjPtr<mirror::Class> klass)
138       REQUIRES(!lock_)
139       REQUIRES_SHARED(Locks::mutator_lock_);
140 
141   // Freeze the current class tables by allocating a new table and never updating or modifying the
142   // existing table. This helps prevents dirty pages after caused by inserting after zygote fork.
143   void FreezeSnapshot()
144       REQUIRES(!lock_)
145       REQUIRES_SHARED(Locks::mutator_lock_);
146 
147   // Returns the number of classes in previous snapshots defined by `defining_loader`.
148   size_t NumZygoteClasses(ObjPtr<mirror::ClassLoader> defining_loader) const
149       REQUIRES(!lock_)
150       REQUIRES_SHARED(Locks::mutator_lock_);
151 
152   // Returns all off the classes in the lastest snapshot defined by `defining_loader`.
153   size_t NumNonZygoteClasses(ObjPtr<mirror::ClassLoader> defining_loader) const
154       REQUIRES(!lock_)
155       REQUIRES_SHARED(Locks::mutator_lock_);
156 
157   // Returns the number of classes in previous snapshots no matter the defining loader.
158   size_t NumReferencedZygoteClasses() const
159       REQUIRES(!lock_)
160       REQUIRES_SHARED(Locks::mutator_lock_);
161 
162   // Returns all off the classes in the lastest snapshot no matter the defining loader.
163   size_t NumReferencedNonZygoteClasses() const
164       REQUIRES(!lock_)
165       REQUIRES_SHARED(Locks::mutator_lock_);
166 
167   // Update a class in the table with the new class. Returns the existing class which was replaced.
168   mirror::Class* UpdateClass(const char* descriptor, mirror::Class* new_klass, size_t hash)
169       REQUIRES(!lock_)
170       REQUIRES_SHARED(Locks::mutator_lock_);
171 
172   // NO_THREAD_SAFETY_ANALYSIS for object marking requiring heap bitmap lock.
173   template<class Visitor>
174   void VisitRoots(Visitor& visitor)
175       NO_THREAD_SAFETY_ANALYSIS
176       REQUIRES(!lock_)
177       REQUIRES_SHARED(Locks::mutator_lock_);
178 
179   template<class Visitor>
180   void VisitRoots(const Visitor& visitor)
181       NO_THREAD_SAFETY_ANALYSIS
182       REQUIRES(!lock_)
183       REQUIRES_SHARED(Locks::mutator_lock_);
184 
185   // Stops visit if the visitor returns false.
186   template <typename Visitor>
187   bool Visit(Visitor& visitor)
188       REQUIRES(!lock_)
189       REQUIRES_SHARED(Locks::mutator_lock_);
190   template <typename Visitor>
191   bool Visit(const Visitor& visitor)
192       REQUIRES(!lock_)
193       REQUIRES_SHARED(Locks::mutator_lock_);
194 
195   // Return the first class that matches the descriptor. Returns null if there are none.
196   mirror::Class* Lookup(const char* descriptor, size_t hash)
197       REQUIRES(!lock_)
198       REQUIRES_SHARED(Locks::mutator_lock_);
199 
200   // Return the first class that matches the descriptor of klass. Returns null if there are none.
201   mirror::Class* LookupByDescriptor(ObjPtr<mirror::Class> klass)
202       REQUIRES(!lock_)
203       REQUIRES_SHARED(Locks::mutator_lock_);
204 
205   // Try to insert a class and return the inserted class if successful. If another class
206   // with the same descriptor is already in the table, return the existing entry.
207   ObjPtr<mirror::Class> TryInsert(ObjPtr<mirror::Class> klass)
208       REQUIRES(!lock_)
209       REQUIRES_SHARED(Locks::mutator_lock_);
210 
211   void Insert(ObjPtr<mirror::Class> klass)
212       REQUIRES(!lock_)
213       REQUIRES_SHARED(Locks::mutator_lock_);
214 
215   void InsertWithHash(ObjPtr<mirror::Class> klass, size_t hash)
216       REQUIRES(!lock_)
217       REQUIRES_SHARED(Locks::mutator_lock_);
218 
219   // Returns true if the class was found and removed, false otherwise.
220   bool Remove(const char* descriptor)
221       REQUIRES(!lock_)
222       REQUIRES_SHARED(Locks::mutator_lock_);
223 
224   // Return true if we inserted the strong root, false if it already exists.
225   bool InsertStrongRoot(ObjPtr<mirror::Object> obj)
226       REQUIRES(!lock_)
227       REQUIRES_SHARED(Locks::mutator_lock_);
228 
229   // Return true if we inserted the oat file, false if it already exists.
230   bool InsertOatFile(const OatFile* oat_file)
231       REQUIRES(!lock_)
232       REQUIRES_SHARED(Locks::mutator_lock_);
233 
234   // Combines all of the tables into one class set.
235   size_t WriteToMemory(uint8_t* ptr) const
236       REQUIRES(!lock_)
237       REQUIRES_SHARED(Locks::mutator_lock_);
238 
239   // Read a table from ptr and put it at the front of the class set.
240   size_t ReadFromMemory(uint8_t* ptr)
241       REQUIRES(!lock_)
242       REQUIRES_SHARED(Locks::mutator_lock_);
243 
244   // Add a class set to the front of classes.
245   void AddClassSet(ClassSet&& set)
246       REQUIRES(!lock_)
247       REQUIRES_SHARED(Locks::mutator_lock_);
248 
249   // Clear strong roots (other than classes themselves).
250   void ClearStrongRoots()
251       REQUIRES(!lock_)
252       REQUIRES_SHARED(Locks::mutator_lock_);
253 
GetLock()254   ReaderWriterMutex& GetLock() {
255     return lock_;
256   }
257 
258  private:
259   // Only copies classes.
260   void CopyWithoutLocks(const ClassTable& source_table) NO_THREAD_SAFETY_ANALYSIS;
261   void InsertWithoutLocks(ObjPtr<mirror::Class> klass) NO_THREAD_SAFETY_ANALYSIS;
262 
263   size_t CountDefiningLoaderClasses(ObjPtr<mirror::ClassLoader> defining_loader,
264                                     const ClassSet& set) const
265       REQUIRES(lock_)
266       REQUIRES_SHARED(Locks::mutator_lock_);
267 
268   // Return true if we inserted the oat file, false if it already exists.
269   bool InsertOatFileLocked(const OatFile* oat_file)
270       REQUIRES(lock_)
271       REQUIRES_SHARED(Locks::mutator_lock_);
272 
273   // Lock to guard inserting and removing.
274   mutable ReaderWriterMutex lock_;
275   // We have a vector to help prevent dirty pages after the zygote forks by calling FreezeSnapshot.
276   std::vector<ClassSet> classes_ GUARDED_BY(lock_);
277   // Extra strong roots that can be either dex files or dex caches. Dex files used by the class
278   // loader which may not be owned by the class loader must be held strongly live. Also dex caches
279   // are held live to prevent them being unloading once they have classes in them.
280   std::vector<GcRoot<mirror::Object>> strong_roots_ GUARDED_BY(lock_);
281   // Keep track of oat files with GC roots associated with dex caches in `strong_roots_`.
282   std::vector<const OatFile*> oat_files_ GUARDED_BY(lock_);
283 
284   friend class ImageWriter;  // for InsertWithoutLocks.
285 };
286 
287 }  // namespace art
288 
289 #endif  // ART_RUNTIME_CLASS_TABLE_H_
290