1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_GC_ROOT_H_
18 #define ART_RUNTIME_GC_ROOT_H_
19 
20 #include "base/macros.h"
21 #include "base/mutex.h"       // For Locks::mutator_lock_.
22 #include "mirror/object_reference.h"
23 
24 namespace art {
25 class ArtField;
26 class ArtMethod;
27 template<class MirrorType> class ObjPtr;
28 
29 namespace mirror {
30 class Object;
31 }  // namespace mirror
32 
33 template <size_t kBufferSize>
34 class BufferedRootVisitor;
35 
36 // Dependent on pointer size so that we don't have frames that are too big on 64 bit.
37 static const size_t kDefaultBufferedRootCount = 1024 / sizeof(void*);
38 
39 enum RootType {
40   kRootUnknown = 0,
41   kRootJNIGlobal,
42   kRootJNILocal,
43   kRootJavaFrame,
44   kRootNativeStack,
45   kRootStickyClass,
46   kRootThreadBlock,
47   kRootMonitorUsed,
48   kRootThreadObject,
49   kRootInternedString,
50   kRootFinalizing,  // used for HPROF's conversion to HprofHeapTag
51   kRootDebugger,
52   kRootReferenceCleanup,  // used for HPROF's conversion to HprofHeapTag
53   kRootVMInternal,
54   kRootJNIMonitor,
55 };
56 std::ostream& operator<<(std::ostream& os, const RootType& root_type);
57 
58 // Only used by hprof. thread_id_ and type_ are only used by hprof.
59 class RootInfo {
60  public:
61   // Thread id 0 is for non thread roots.
62   explicit RootInfo(RootType type, uint32_t thread_id = 0)
type_(type)63      : type_(type), thread_id_(thread_id) {
64   }
65   RootInfo(const RootInfo&) = default;
~RootInfo()66   virtual ~RootInfo() {
67   }
GetType()68   RootType GetType() const {
69     return type_;
70   }
GetThreadId()71   uint32_t GetThreadId() const {
72     return thread_id_;
73   }
Describe(std::ostream & os)74   virtual void Describe(std::ostream& os) const {
75     os << "Type=" << type_ << " thread_id=" << thread_id_;
76   }
77   std::string ToString() const;
78 
79  private:
80   const RootType type_;
81   const uint32_t thread_id_;
82 };
83 
84 inline std::ostream& operator<<(std::ostream& os, const RootInfo& root_info) {
85   root_info.Describe(os);
86   return os;
87 }
88 
89 // Not all combinations of flags are valid. You may not visit all roots as well as the new roots
90 // (no logical reason to do this). You also may not start logging new roots and stop logging new
91 // roots (also no logical reason to do this).
92 //
93 // The precise flag ensures that more metadata is supplied. An example is vreg data for compiled
94 // method frames.
95 enum VisitRootFlags : uint8_t {
96   kVisitRootFlagAllRoots = 0x1,
97   kVisitRootFlagNewRoots = 0x2,
98   kVisitRootFlagStartLoggingNewRoots = 0x4,
99   kVisitRootFlagStopLoggingNewRoots = 0x8,
100   kVisitRootFlagClearRootLog = 0x10,
101   kVisitRootFlagClassLoader = 0x20,
102   kVisitRootFlagPrecise = 0x80,
103 };
104 
105 class RootVisitor {
106  public:
~RootVisitor()107   virtual ~RootVisitor() { }
108 
109   // Single root version, not overridable.
VisitRoot(mirror::Object ** root,const RootInfo & info)110   ALWAYS_INLINE void VisitRoot(mirror::Object** root, const RootInfo& info)
111       REQUIRES_SHARED(Locks::mutator_lock_) {
112     VisitRoots(&root, 1, info);
113   }
114 
115   // Single root version, not overridable.
VisitRootIfNonNull(mirror::Object ** root,const RootInfo & info)116   ALWAYS_INLINE void VisitRootIfNonNull(mirror::Object** root, const RootInfo& info)
117       REQUIRES_SHARED(Locks::mutator_lock_) {
118     if (*root != nullptr) {
119       VisitRoot(root, info);
120     }
121   }
122 
123   virtual void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info)
124       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
125 
126   virtual void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
127                           const RootInfo& info)
128       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
129 };
130 
131 // Only visits roots one at a time, doesn't handle updating roots. Used when performance isn't
132 // critical.
133 class SingleRootVisitor : public RootVisitor {
134  private:
VisitRoots(mirror::Object *** roots,size_t count,const RootInfo & info)135   void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info) OVERRIDE
136       REQUIRES_SHARED(Locks::mutator_lock_) {
137     for (size_t i = 0; i < count; ++i) {
138       VisitRoot(*roots[i], info);
139     }
140   }
141 
VisitRoots(mirror::CompressedReference<mirror::Object> ** roots,size_t count,const RootInfo & info)142   void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
143                           const RootInfo& info) OVERRIDE
144       REQUIRES_SHARED(Locks::mutator_lock_) {
145     for (size_t i = 0; i < count; ++i) {
146       VisitRoot(roots[i]->AsMirrorPtr(), info);
147     }
148   }
149 
150   virtual void VisitRoot(mirror::Object* root, const RootInfo& info) = 0;
151 };
152 
153 class GcRootSource {
154  public:
GcRootSource()155   GcRootSource()
156       : field_(nullptr), method_(nullptr) {
157   }
GcRootSource(ArtField * field)158   explicit GcRootSource(ArtField* field)
159       : field_(field), method_(nullptr) {
160   }
GcRootSource(ArtMethod * method)161   explicit GcRootSource(ArtMethod* method)
162       : field_(nullptr), method_(method) {
163   }
GetArtField()164   ArtField* GetArtField() const {
165     return field_;
166   }
GetArtMethod()167   ArtMethod* GetArtMethod() const {
168     return method_;
169   }
HasArtField()170   bool HasArtField() const {
171     return field_ != nullptr;
172   }
HasArtMethod()173   bool HasArtMethod() const {
174     return method_ != nullptr;
175   }
176 
177  private:
178   ArtField* const field_;
179   ArtMethod* const method_;
180 
181   DISALLOW_COPY_AND_ASSIGN(GcRootSource);
182 };
183 
184 template<class MirrorType>
185 class GcRoot {
186  public:
187   template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
188   ALWAYS_INLINE MirrorType* Read(GcRootSource* gc_root_source = nullptr) const
189       REQUIRES_SHARED(Locks::mutator_lock_);
190 
VisitRoot(RootVisitor * visitor,const RootInfo & info)191   void VisitRoot(RootVisitor* visitor, const RootInfo& info) const
192       REQUIRES_SHARED(Locks::mutator_lock_) {
193     DCHECK(!IsNull());
194     mirror::CompressedReference<mirror::Object>* roots[1] = { &root_ };
195     visitor->VisitRoots(roots, 1u, info);
196     DCHECK(!IsNull());
197   }
198 
VisitRootIfNonNull(RootVisitor * visitor,const RootInfo & info)199   void VisitRootIfNonNull(RootVisitor* visitor, const RootInfo& info) const
200       REQUIRES_SHARED(Locks::mutator_lock_) {
201     if (!IsNull()) {
202       VisitRoot(visitor, info);
203     }
204   }
205 
AddressWithoutBarrier()206   ALWAYS_INLINE mirror::CompressedReference<mirror::Object>* AddressWithoutBarrier() {
207     return &root_;
208   }
209 
IsNull()210   ALWAYS_INLINE bool IsNull() const {
211     // It's safe to null-check it without a read barrier.
212     return root_.IsNull();
213   }
214 
GcRoot()215   ALWAYS_INLINE GcRoot() {}
216   explicit ALWAYS_INLINE GcRoot(MirrorType* ref)
217       REQUIRES_SHARED(Locks::mutator_lock_);
218   explicit ALWAYS_INLINE GcRoot(ObjPtr<MirrorType> ref)
219       REQUIRES_SHARED(Locks::mutator_lock_);
220 
221  private:
222   // Root visitors take pointers to root_ and place them in CompressedReference** arrays. We use a
223   // CompressedReference<mirror::Object> here since it violates strict aliasing requirements to
224   // cast CompressedReference<MirrorType>* to CompressedReference<mirror::Object>*.
225   mutable mirror::CompressedReference<mirror::Object> root_;
226 
227   template <size_t kBufferSize> friend class BufferedRootVisitor;
228 };
229 
230 // Simple data structure for buffered root visiting to avoid virtual dispatch overhead. Currently
231 // only for CompressedReferences since these are more common than the Object** roots which are only
232 // for thread local roots.
233 template <size_t kBufferSize>
234 class BufferedRootVisitor {
235  public:
BufferedRootVisitor(RootVisitor * visitor,const RootInfo & root_info)236   BufferedRootVisitor(RootVisitor* visitor, const RootInfo& root_info)
237       : visitor_(visitor), root_info_(root_info), buffer_pos_(0) {
238   }
239 
~BufferedRootVisitor()240   ~BufferedRootVisitor() {
241     Flush();
242   }
243 
244   template <class MirrorType>
VisitRootIfNonNull(GcRoot<MirrorType> & root)245   ALWAYS_INLINE void VisitRootIfNonNull(GcRoot<MirrorType>& root)
246       REQUIRES_SHARED(Locks::mutator_lock_) {
247     if (!root.IsNull()) {
248       VisitRoot(root);
249     }
250   }
251 
252   template <class MirrorType>
VisitRootIfNonNull(mirror::CompressedReference<MirrorType> * root)253   ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<MirrorType>* root)
254       REQUIRES_SHARED(Locks::mutator_lock_) {
255     if (!root->IsNull()) {
256       VisitRoot(root);
257     }
258   }
259 
260   template <class MirrorType>
VisitRoot(GcRoot<MirrorType> & root)261   void VisitRoot(GcRoot<MirrorType>& root) REQUIRES_SHARED(Locks::mutator_lock_) {
262     VisitRoot(root.AddressWithoutBarrier());
263   }
264 
265   template <class MirrorType>
VisitRoot(mirror::CompressedReference<MirrorType> * root)266   void VisitRoot(mirror::CompressedReference<MirrorType>* root)
267       REQUIRES_SHARED(Locks::mutator_lock_) {
268     if (UNLIKELY(buffer_pos_ >= kBufferSize)) {
269       Flush();
270     }
271     roots_[buffer_pos_++] = root;
272   }
273 
Flush()274   void Flush() REQUIRES_SHARED(Locks::mutator_lock_) {
275     visitor_->VisitRoots(roots_, buffer_pos_, root_info_);
276     buffer_pos_ = 0;
277   }
278 
279  private:
280   RootVisitor* const visitor_;
281   RootInfo root_info_;
282   mirror::CompressedReference<mirror::Object>* roots_[kBufferSize];
283   size_t buffer_pos_;
284 };
285 
286 class UnbufferedRootVisitor {
287  public:
UnbufferedRootVisitor(RootVisitor * visitor,const RootInfo & root_info)288   UnbufferedRootVisitor(RootVisitor* visitor, const RootInfo& root_info)
289       : visitor_(visitor), root_info_(root_info) {}
290 
291   template <class MirrorType>
VisitRootIfNonNull(GcRoot<MirrorType> & root)292   ALWAYS_INLINE void VisitRootIfNonNull(GcRoot<MirrorType>& root) const
293       REQUIRES_SHARED(Locks::mutator_lock_) {
294     if (!root.IsNull()) {
295       VisitRoot(root);
296     }
297   }
298 
299   template <class MirrorType>
VisitRootIfNonNull(mirror::CompressedReference<MirrorType> * root)300   ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<MirrorType>* root) const
301       REQUIRES_SHARED(Locks::mutator_lock_) {
302     if (!root->IsNull()) {
303       VisitRoot(root);
304     }
305   }
306 
307   template <class MirrorType>
VisitRoot(GcRoot<MirrorType> & root)308   void VisitRoot(GcRoot<MirrorType>& root) const REQUIRES_SHARED(Locks::mutator_lock_) {
309     VisitRoot(root.AddressWithoutBarrier());
310   }
311 
312   template <class MirrorType>
VisitRoot(mirror::CompressedReference<MirrorType> * root)313   void VisitRoot(mirror::CompressedReference<MirrorType>* root) const
314       REQUIRES_SHARED(Locks::mutator_lock_) {
315     visitor_->VisitRoots(&root, 1, root_info_);
316   }
317 
318  private:
319   RootVisitor* const visitor_;
320   RootInfo root_info_;
321 };
322 
323 }  // namespace art
324 
325 #endif  // ART_RUNTIME_GC_ROOT_H_
326