1 /* 2 * Copyright 2012 Google Inc. 3 * 4 * Use of this source code is governed by a BSD-style license that can be 5 * found in the LICENSE file. 6 */ 7 8 #ifndef SkWeakRefCnt_DEFINED 9 #define SkWeakRefCnt_DEFINED 10 11 #include "SkRefCnt.h" 12 #include "../private/SkAtomics.h" 13 14 /** \class SkWeakRefCnt 15 16 SkWeakRefCnt is the base class for objects that may be shared by multiple 17 objects. When an existing strong owner wants to share a reference, it calls 18 ref(). When a strong owner wants to release its reference, it calls 19 unref(). When the shared object's strong reference count goes to zero as 20 the result of an unref() call, its (virtual) weak_dispose method is called. 21 It is an error for the destructor to be called explicitly (or via the 22 object going out of scope on the stack or calling delete) if 23 getRefCnt() > 1. 24 25 In addition to strong ownership, an owner may instead obtain a weak 26 reference by calling weak_ref(). A call to weak_ref() must be balanced by a 27 call to weak_unref(). To obtain a strong reference from a weak reference, 28 call try_ref(). If try_ref() returns true, the owner's pointer is now also 29 a strong reference on which unref() must be called. Note that this does not 30 affect the original weak reference, weak_unref() must still be called. When 31 the weak reference count goes to zero, the object is deleted. While the 32 weak reference count is positive and the strong reference count is zero the 33 object still exists, but will be in the disposed state. It is up to the 34 object to define what this means. 35 36 Note that a strong reference implicitly implies a weak reference. As a 37 result, it is allowable for the owner of a strong ref to call try_ref(). 38 This will have the same effect as calling ref(), but may be more expensive. 39 40 Example: 41 42 SkWeakRefCnt myRef = strongRef.weak_ref(); 43 ... // strongRef.unref() may or may not be called 44 if (myRef.try_ref()) { 45 ... // use myRef 46 myRef.unref(); 47 } else { 48 // myRef is in the disposed state 49 } 50 myRef.weak_unref(); 51 */ 52 class SK_API SkWeakRefCnt : public SkRefCnt { 53 public: 54 /** Default construct, initializing the reference counts to 1. 55 The strong references collectively hold one weak reference. When the 56 strong reference count goes to zero, the collectively held weak 57 reference is released. 58 */ SkWeakRefCnt()59 SkWeakRefCnt() : SkRefCnt(), fWeakCnt(1) {} 60 61 /** Destruct, asserting that the weak reference count is 1. 62 */ ~SkWeakRefCnt()63 virtual ~SkWeakRefCnt() { 64 #ifdef SK_DEBUG 65 SkASSERT(fWeakCnt == 1); 66 fWeakCnt = 0; 67 #endif 68 } 69 70 /** Return the weak reference count. 71 */ getWeakCnt()72 int32_t getWeakCnt() const { return fWeakCnt; } 73 74 #ifdef SK_DEBUG validate()75 void validate() const { 76 this->INHERITED::validate(); 77 SkASSERT(fWeakCnt > 0); 78 } 79 #endif 80 81 /** Creates a strong reference from a weak reference, if possible. The 82 caller must already be an owner. If try_ref() returns true the owner 83 is in posession of an additional strong reference. Both the original 84 reference and new reference must be properly unreferenced. If try_ref() 85 returns false, no strong reference could be created and the owner's 86 reference is in the same state as before the call. 87 */ try_ref()88 bool SK_WARN_UNUSED_RESULT try_ref() const { 89 if (sk_atomic_conditional_inc(&fRefCnt) != 0) { 90 // Acquire barrier (L/SL), if not provided above. 91 // Prevents subsequent code from happening before the increment. 92 sk_membar_acquire__after_atomic_conditional_inc(); 93 return true; 94 } 95 return false; 96 } 97 98 /** Increment the weak reference count. Must be balanced by a call to 99 weak_unref(). 100 */ weak_ref()101 void weak_ref() const { 102 SkASSERT(fRefCnt > 0); 103 SkASSERT(fWeakCnt > 0); 104 sk_atomic_inc(&fWeakCnt); // No barrier required. 105 } 106 107 /** Decrement the weak reference count. If the weak reference count is 1 108 before the decrement, then call delete on the object. Note that if this 109 is the case, then the object needs to have been allocated via new, and 110 not on the stack. 111 */ weak_unref()112 void weak_unref() const { 113 SkASSERT(fWeakCnt > 0); 114 // Release barrier (SL/S), if not provided below. 115 if (sk_atomic_dec(&fWeakCnt) == 1) { 116 // Acquire barrier (L/SL), if not provided above. 117 // Prevents code in destructor from happening before the decrement. 118 sk_membar_acquire__after_atomic_dec(); 119 #ifdef SK_DEBUG 120 // so our destructor won't complain 121 fWeakCnt = 1; 122 #endif 123 this->INHERITED::internal_dispose(); 124 } 125 } 126 127 /** Returns true if there are no strong references to the object. When this 128 is the case all future calls to try_ref() will return false. 129 */ weak_expired()130 bool weak_expired() const { 131 return fRefCnt == 0; 132 } 133 134 protected: 135 /** Called when the strong reference count goes to zero. This allows the 136 object to free any resources it may be holding. Weak references may 137 still exist and their level of allowed access to the object is defined 138 by the object's class. 139 */ weak_dispose()140 virtual void weak_dispose() const { 141 } 142 143 private: 144 /** Called when the strong reference count goes to zero. Calls weak_dispose 145 on the object and releases the implicit weak reference held 146 collectively by the strong references. 147 */ internal_dispose()148 void internal_dispose() const override { 149 weak_dispose(); 150 weak_unref(); 151 } 152 153 /* Invariant: fWeakCnt = #weak + (fRefCnt > 0 ? 1 : 0) */ 154 mutable int32_t fWeakCnt; 155 156 typedef SkRefCnt INHERITED; 157 }; 158 159 #endif 160