1 /*
2  * Copyright 2011 Google Inc.
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #include "SkBitmapCache.h"
9 #include "SkMutex.h"
10 #include "SkPixelRef.h"
11 #include "SkTraceEvent.h"
12 
13 //#define SK_SUPPORT_LEGACY_UNBALANCED_PIXELREF_LOCKCOUNT
14 //#define SK_TRACE_PIXELREF_LIFETIME
15 
16 #include "SkNextID.h"
17 
ImageID()18 uint32_t SkNextID::ImageID() {
19     static uint32_t gID = 0;
20     uint32_t id;
21     // Loop in case our global wraps around, as we never want to return a 0.
22     do {
23         id = sk_atomic_fetch_add(&gID, 2u) + 2;  // Never set the low bit.
24     } while (0 == id);
25     return id;
26 }
27 
28 ///////////////////////////////////////////////////////////////////////////////
29 
30 // just need a > 0 value, so pick a funny one to aid in debugging
31 #define SKPIXELREF_PRELOCKED_LOCKCOUNT     123456789
32 
validate_info(const SkImageInfo & info)33 static SkImageInfo validate_info(const SkImageInfo& info) {
34     SkAlphaType newAlphaType = info.alphaType();
35     SkAssertResult(SkColorTypeValidateAlphaType(info.colorType(), info.alphaType(), &newAlphaType));
36     return info.makeAlphaType(newAlphaType);
37 }
38 
39 #ifdef SK_TRACE_PIXELREF_LIFETIME
40     static int32_t gInstCounter;
41 #endif
42 
SkPixelRef(const SkImageInfo & info)43 SkPixelRef::SkPixelRef(const SkImageInfo& info)
44     : fInfo(validate_info(info))
45 #ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
46     , fStableID(SkNextID::ImageID())
47 #endif
48 
49 {
50 #ifdef SK_TRACE_PIXELREF_LIFETIME
51     SkDebugf(" pixelref %d\n", sk_atomic_inc(&gInstCounter));
52 #endif
53     fRec.zero();
54     fLockCount = 0;
55     this->needsNewGenID();
56     fMutability = kMutable;
57     fPreLocked = false;
58     fAddedToCache.store(false);
59 }
60 
~SkPixelRef()61 SkPixelRef::~SkPixelRef() {
62 #ifndef SK_SUPPORT_LEGACY_UNBALANCED_PIXELREF_LOCKCOUNT
63     SkASSERT(SKPIXELREF_PRELOCKED_LOCKCOUNT == fLockCount || 0 == fLockCount);
64 #endif
65 
66 #ifdef SK_TRACE_PIXELREF_LIFETIME
67     SkDebugf("~pixelref %d\n", sk_atomic_dec(&gInstCounter) - 1);
68 #endif
69     this->callGenIDChangeListeners();
70 }
71 
needsNewGenID()72 void SkPixelRef::needsNewGenID() {
73     fTaggedGenID.store(0);
74     SkASSERT(!this->genIDIsUnique()); // This method isn't threadsafe, so the assert should be fine.
75 }
76 
cloneGenID(const SkPixelRef & that)77 void SkPixelRef::cloneGenID(const SkPixelRef& that) {
78     // This is subtle.  We must call that.getGenerationID() to make sure its genID isn't 0.
79     uint32_t genID = that.getGenerationID();
80 
81     // Neither ID is unique any more.
82     // (These & ~1u are actually redundant.  that.getGenerationID() just did it for us.)
83     this->fTaggedGenID.store(genID & ~1u);
84     that. fTaggedGenID.store(genID & ~1u);
85 
86     // This method isn't threadsafe, so these asserts should be fine.
87     SkASSERT(!this->genIDIsUnique());
88     SkASSERT(!that. genIDIsUnique());
89 }
90 
validate_pixels_ctable(const SkImageInfo & info,const SkColorTable * ctable)91 static void validate_pixels_ctable(const SkImageInfo& info, const SkColorTable* ctable) {
92     if (info.isEmpty()) {
93         return; // can't require ctable if the dimensions are empty
94     }
95     if (kIndex_8_SkColorType == info.colorType()) {
96         SkASSERT(ctable);
97     } else {
98         SkASSERT(nullptr == ctable);
99     }
100 }
101 
setPreLocked(void * pixels,size_t rowBytes,SkColorTable * ctable)102 void SkPixelRef::setPreLocked(void* pixels, size_t rowBytes, SkColorTable* ctable) {
103     SkASSERT(pixels);
104     validate_pixels_ctable(fInfo, ctable);
105     // only call me in your constructor, otherwise fLockCount tracking can get
106     // out of sync.
107     fRec.fPixels = pixels;
108     fRec.fColorTable = ctable;
109     fRec.fRowBytes = rowBytes;
110     fLockCount = SKPIXELREF_PRELOCKED_LOCKCOUNT;
111     fPreLocked = true;
112 }
113 
114 // Increments fLockCount only on success
lockPixelsInsideMutex()115 bool SkPixelRef::lockPixelsInsideMutex() {
116     fMutex.assertHeld();
117 
118     if (1 == ++fLockCount) {
119         SkASSERT(fRec.isZero());
120         if (!this->onNewLockPixels(&fRec)) {
121             fRec.zero();
122             fLockCount -= 1;    // we return fLockCount unchanged if we fail.
123             return false;
124         }
125     }
126     if (fRec.fPixels) {
127         validate_pixels_ctable(fInfo, fRec.fColorTable);
128         return true;
129     }
130     // no pixels, so we failed (somehow)
131     --fLockCount;
132     return false;
133 }
134 
135 // For historical reasons, we always inc fLockCount, even if we return false.
136 // It would be nice to change this (it seems), and only inc if we actually succeed...
lockPixels()137 bool SkPixelRef::lockPixels() {
138     SkASSERT(!fPreLocked || SKPIXELREF_PRELOCKED_LOCKCOUNT == fLockCount);
139 
140     if (!fPreLocked) {
141         TRACE_EVENT_BEGIN0("skia", "SkPixelRef::lockPixelsMutex");
142         SkAutoMutexAcquire  ac(fMutex);
143         TRACE_EVENT_END0("skia", "SkPixelRef::lockPixelsMutex");
144         SkDEBUGCODE(int oldCount = fLockCount;)
145         bool success = this->lockPixelsInsideMutex();
146         // lockPixelsInsideMutex only increments the count if it succeeds.
147         SkASSERT(oldCount + (int)success == fLockCount);
148 
149         if (!success) {
150             // For compatibility with SkBitmap calling lockPixels, we still want to increment
151             // fLockCount even if we failed. If we updated SkBitmap we could remove this oddity.
152             fLockCount += 1;
153             return false;
154         }
155     }
156     if (fRec.fPixels) {
157         validate_pixels_ctable(fInfo, fRec.fColorTable);
158         return true;
159     }
160     return false;
161 }
162 
lockPixels(LockRec * rec)163 bool SkPixelRef::lockPixels(LockRec* rec) {
164     if (this->lockPixels()) {
165         *rec = fRec;
166         return true;
167     }
168     return false;
169 }
170 
unlockPixels()171 void SkPixelRef::unlockPixels() {
172     SkASSERT(!fPreLocked || SKPIXELREF_PRELOCKED_LOCKCOUNT == fLockCount);
173 
174     if (!fPreLocked) {
175         SkAutoMutexAcquire  ac(fMutex);
176 
177         SkASSERT(fLockCount > 0);
178         if (0 == --fLockCount) {
179             // don't call onUnlockPixels unless onLockPixels succeeded
180             if (fRec.fPixels) {
181                 this->onUnlockPixels();
182                 fRec.zero();
183             } else {
184                 SkASSERT(fRec.isZero());
185             }
186         }
187     }
188 }
189 
requestLock(const LockRequest & request,LockResult * result)190 bool SkPixelRef::requestLock(const LockRequest& request, LockResult* result) {
191     SkASSERT(result);
192     if (request.fSize.isEmpty()) {
193         return false;
194     }
195     // until we support subsets, we have to check this...
196     if (request.fSize.width() != fInfo.width() || request.fSize.height() != fInfo.height()) {
197         return false;
198     }
199 
200     if (fPreLocked) {
201         result->fUnlockProc = nullptr;
202         result->fUnlockContext = nullptr;
203         result->fCTable = fRec.fColorTable;
204         result->fPixels = fRec.fPixels;
205         result->fRowBytes = fRec.fRowBytes;
206         result->fSize.set(fInfo.width(), fInfo.height());
207     } else {
208         SkAutoMutexAcquire  ac(fMutex);
209         if (!this->onRequestLock(request, result)) {
210             return false;
211         }
212     }
213     if (result->fPixels) {
214         validate_pixels_ctable(fInfo, result->fCTable);
215         return true;
216     }
217     return false;
218 }
219 
lockPixelsAreWritable() const220 bool SkPixelRef::lockPixelsAreWritable() const {
221     return this->onLockPixelsAreWritable();
222 }
223 
onLockPixelsAreWritable() const224 bool SkPixelRef::onLockPixelsAreWritable() const {
225     return true;
226 }
227 
getGenerationID() const228 uint32_t SkPixelRef::getGenerationID() const {
229     uint32_t id = fTaggedGenID.load();
230     if (0 == id) {
231         uint32_t next = SkNextID::ImageID() | 1u;
232         if (fTaggedGenID.compare_exchange(&id, next)) {
233             id = next;  // There was no race or we won the race.  fTaggedGenID is next now.
234         } else {
235             // We lost a race to set fTaggedGenID. compare_exchange() filled id with the winner.
236         }
237         // We can't quite SkASSERT(this->genIDIsUnique()). It could be non-unique
238         // if we got here via the else path (pretty unlikely, but possible).
239     }
240     return id & ~1u;  // Mask off bottom unique bit.
241 }
242 
addGenIDChangeListener(GenIDChangeListener * listener)243 void SkPixelRef::addGenIDChangeListener(GenIDChangeListener* listener) {
244     if (nullptr == listener || !this->genIDIsUnique()) {
245         // No point in tracking this if we're not going to call it.
246         delete listener;
247         return;
248     }
249     *fGenIDChangeListeners.append() = listener;
250 }
251 
252 // we need to be called *before* the genID gets changed or zerod
callGenIDChangeListeners()253 void SkPixelRef::callGenIDChangeListeners() {
254     // We don't invalidate ourselves if we think another SkPixelRef is sharing our genID.
255     if (this->genIDIsUnique()) {
256         for (int i = 0; i < fGenIDChangeListeners.count(); i++) {
257             fGenIDChangeListeners[i]->onChange();
258         }
259 
260         // TODO: SkAtomic could add "old_value = atomic.xchg(new_value)" to make this clearer.
261         if (fAddedToCache.load()) {
262             SkNotifyBitmapGenIDIsStale(this->getGenerationID());
263             fAddedToCache.store(false);
264         }
265     }
266     // Listeners get at most one shot, so whether these triggered or not, blow them away.
267     fGenIDChangeListeners.deleteAll();
268 }
269 
notifyPixelsChanged()270 void SkPixelRef::notifyPixelsChanged() {
271 #ifdef SK_DEBUG
272     if (this->isImmutable()) {
273         SkDebugf("========== notifyPixelsChanged called on immutable pixelref");
274     }
275 #endif
276     this->callGenIDChangeListeners();
277     this->needsNewGenID();
278     this->onNotifyPixelsChanged();
279 }
280 
changeAlphaType(SkAlphaType at)281 void SkPixelRef::changeAlphaType(SkAlphaType at) {
282     *const_cast<SkImageInfo*>(&fInfo) = fInfo.makeAlphaType(at);
283 }
284 
setImmutable()285 void SkPixelRef::setImmutable() {
286     fMutability = kImmutable;
287 }
288 
setImmutableWithID(uint32_t genID)289 void SkPixelRef::setImmutableWithID(uint32_t genID) {
290     /*
291      *  We are forcing the genID to match an external value. The caller must ensure that this
292      *  value does not conflict with other content.
293      *
294      *  One use is to force this pixelref's id to match an SkImage's id
295      */
296     fMutability = kImmutable;
297     fTaggedGenID.store(genID);
298 }
299 
setTemporarilyImmutable()300 void SkPixelRef::setTemporarilyImmutable() {
301     SkASSERT(fMutability != kImmutable);
302     fMutability = kTemporarilyImmutable;
303 }
304 
restoreMutability()305 void SkPixelRef::restoreMutability() {
306     SkASSERT(fMutability != kImmutable);
307     fMutability = kMutable;
308 }
309 
readPixels(SkBitmap * dst,SkColorType ct,const SkIRect * subset)310 bool SkPixelRef::readPixels(SkBitmap* dst, SkColorType ct, const SkIRect* subset) {
311     return this->onReadPixels(dst, ct, subset);
312 }
313 
314 ///////////////////////////////////////////////////////////////////////////////////////////////////
315 
onReadPixels(SkBitmap * dst,SkColorType,const SkIRect * subset)316 bool SkPixelRef::onReadPixels(SkBitmap* dst, SkColorType, const SkIRect* subset) {
317     return false;
318 }
319 
onNotifyPixelsChanged()320 void SkPixelRef::onNotifyPixelsChanged() { }
321 
onRefEncodedData()322 SkData* SkPixelRef::onRefEncodedData() {
323     return nullptr;
324 }
325 
onGetYUV8Planes(SkISize sizes[3],void * planes[3],size_t rowBytes[3],SkYUVColorSpace * colorSpace)326 bool SkPixelRef::onGetYUV8Planes(SkISize sizes[3], void* planes[3], size_t rowBytes[3],
327                                  SkYUVColorSpace* colorSpace) {
328     return false;
329 }
330 
getAllocatedSizeInBytes() const331 size_t SkPixelRef::getAllocatedSizeInBytes() const {
332     return 0;
333 }
334 
unlock_legacy_result(void * ctx)335 static void unlock_legacy_result(void* ctx) {
336     SkPixelRef* pr = (SkPixelRef*)ctx;
337     pr->unlockPixels();
338     pr->unref();    // balancing the Ref in onRequestLoc
339 }
340 
onRequestLock(const LockRequest & request,LockResult * result)341 bool SkPixelRef::onRequestLock(const LockRequest& request, LockResult* result) {
342     if (!this->lockPixelsInsideMutex()) {
343         return false;
344     }
345 
346     result->fUnlockProc = unlock_legacy_result;
347     result->fUnlockContext = SkRef(this);   // this is balanced in our fUnlockProc
348     result->fCTable = fRec.fColorTable;
349     result->fPixels = fRec.fPixels;
350     result->fRowBytes = fRec.fRowBytes;
351     result->fSize.set(fInfo.width(), fInfo.height());
352     return true;
353 }
354