1 /* 2 * Copyright 2014 Google Inc. 3 * 4 * Use of this source code is governed by a BSD-style license that can be 5 * found in the LICENSE file. 6 */ 7 8 #ifndef GrLayerCache_DEFINED 9 #define GrLayerCache_DEFINED 10 11 #include "GrAtlas.h" 12 #include "GrRect.h" 13 14 #include "SkChecksum.h" 15 #include "SkImageFilter.h" 16 #include "SkMessageBus.h" 17 #include "SkPicture.h" 18 #include "SkTDynamicHash.h" 19 20 // Set to 0 to disable caching of hoisted layers 21 #define GR_CACHE_HOISTED_LAYERS 0 22 23 // GrPictureInfo stores the atlas plots used by a single picture. A single 24 // plot may be used to store layers from multiple pictures. 25 struct GrPictureInfo { 26 public: 27 static const int kNumPlots = 4; 28 29 // for SkTDynamicHash - just use the pictureID as the hash key GetKeyGrPictureInfo30 static const uint32_t& GetKey(const GrPictureInfo& pictInfo) { return pictInfo.fPictureID; } HashGrPictureInfo31 static uint32_t Hash(const uint32_t& key) { return SkChecksum::Mix(key); } 32 33 // GrPictureInfo proper GrPictureInfoGrPictureInfo34 GrPictureInfo(uint32_t pictureID) : fPictureID(pictureID) { 35 #if !GR_CACHE_HOISTED_LAYERS 36 memset(fPlotUses, 0, sizeof(fPlotUses)); 37 #endif 38 } 39 40 #if !GR_CACHE_HOISTED_LAYERS incPlotUsageGrPictureInfo41 void incPlotUsage(int plotID) { 42 SkASSERT(plotID < kNumPlots); 43 fPlotUses[plotID]++; 44 } 45 decPlotUsageGrPictureInfo46 void decPlotUsage(int plotID) { 47 SkASSERT(plotID < kNumPlots); 48 SkASSERT(fPlotUses[plotID] > 0); 49 fPlotUses[plotID]--; 50 } 51 plotUsageGrPictureInfo52 int plotUsage(int plotID) const { 53 SkASSERT(plotID < kNumPlots); 54 return fPlotUses[plotID]; 55 } 56 #endif 57 58 const uint32_t fPictureID; 59 GrAtlas::ClientPlotUsage fPlotUsage; 60 61 #if !GR_CACHE_HOISTED_LAYERS 62 private: 63 int fPlotUses[kNumPlots]; 64 #endif 65 }; 66 67 // GrCachedLayer encapsulates the caching information for a single saveLayer. 68 // 69 // Atlased layers get a ref to the backing GrTexture while non-atlased layers 70 // get a ref to the GrTexture in which they reside. In both cases 'fRect' 71 // contains the layer's extent in its texture. 72 // Atlased layers also get a pointer to the plot in which they reside. 73 // For non-atlased layers, the lock field just corresponds to locking in 74 // the resource cache. For atlased layers, it implements an additional level 75 // of locking to allow atlased layers to be reused multiple times. 76 struct GrCachedLayer { 77 public: 78 // For SkTDynamicHash 79 struct Key { 80 Key(uint32_t pictureID, const SkMatrix& initialMat, 81 const unsigned* key, int keySize, bool copyKey = false) fKeySizeGrCachedLayer::Key82 : fKeySize(keySize) 83 , fFreeKey(copyKey) { 84 fIDMatrix.fPictureID = pictureID; 85 fIDMatrix.fInitialMat = initialMat; 86 fIDMatrix.fInitialMat.getType(); // force initialization of type so hashes match 87 88 if (copyKey) { 89 unsigned* tempKey = SkNEW_ARRAY(unsigned, keySize); 90 memcpy(tempKey, key, keySize*sizeof(unsigned)); 91 fKey = tempKey; 92 } else { 93 fKey = key; 94 } 95 96 // The pictureID/matrix portion needs to be tightly packed. 97 GR_STATIC_ASSERT(sizeof(IDMatrix) == sizeof(uint32_t)+ // pictureID 98 9 * sizeof(SkScalar) + sizeof(uint32_t)); // matrix 99 } 100 ~KeyGrCachedLayer::Key101 ~Key() { 102 if (fFreeKey) { 103 SkDELETE_ARRAY(fKey); 104 } 105 } 106 107 bool operator==(const Key& other) const { 108 if (fKeySize != other.fKeySize) { 109 return false; 110 } 111 return fIDMatrix.fPictureID == other.fIDMatrix.fPictureID && 112 fIDMatrix.fInitialMat.cheapEqualTo(other.fIDMatrix.fInitialMat) && 113 !memcmp(fKey, other.fKey, fKeySize * sizeof(int)); 114 } 115 pictureIDGrCachedLayer::Key116 uint32_t pictureID() const { return fIDMatrix.fPictureID; } 117 118 // TODO: remove these when GrCachedLayer & ReplacementInfo fuse keyGrCachedLayer::Key119 const unsigned* key() const { SkASSERT(fFreeKey); return fKey; } keySizeGrCachedLayer::Key120 int keySize() const { SkASSERT(fFreeKey); return fKeySize; } 121 hashGrCachedLayer::Key122 uint32_t hash() const { 123 uint32_t hash = SkChecksum::Murmur3(reinterpret_cast<const uint32_t*>(fKey), 124 fKeySize * sizeof(int)); 125 return SkChecksum::Murmur3(reinterpret_cast<const uint32_t*>(&fIDMatrix), 126 sizeof(IDMatrix), hash); 127 } 128 129 private: 130 struct IDMatrix { 131 // ID of the picture of which this layer is a part 132 uint32_t fPictureID; 133 // The initial matrix passed into drawPicture 134 SkMatrix fInitialMat; 135 } fIDMatrix; 136 137 const unsigned* fKey; 138 const int fKeySize; 139 bool fFreeKey; 140 }; 141 GetKeyGrCachedLayer142 static const Key& GetKey(const GrCachedLayer& layer) { return layer.fKey; } HashGrCachedLayer143 static uint32_t Hash(const Key& key) { return key.hash(); } 144 145 // GrCachedLayer proper GrCachedLayerGrCachedLayer146 GrCachedLayer(uint32_t pictureID, unsigned start, unsigned stop, 147 const SkIRect& srcIR, const SkIRect& dstIR, 148 const SkMatrix& ctm, 149 const unsigned* key, int keySize, 150 const SkPaint* paint) 151 : fKey(pictureID, ctm, key, keySize, true) 152 , fStart(start) 153 , fStop(stop) 154 , fSrcIR(srcIR) 155 , fDstIR(dstIR) 156 , fOffset(SkIPoint::Make(0, 0)) 157 , fPaint(paint ? SkNEW_ARGS(SkPaint, (*paint)) : NULL) 158 , fFilter(NULL) 159 , fTexture(NULL) 160 , fRect(SkIRect::MakeEmpty()) 161 , fPlot(NULL) 162 , fUses(0) 163 , fLocked(false) { 164 SkASSERT(SK_InvalidGenID != pictureID); 165 166 if (fPaint) { 167 if (fPaint->getImageFilter()) { 168 fFilter = SkSafeRef(fPaint->getImageFilter()); 169 fPaint->setImageFilter(NULL); 170 } 171 } 172 } 173 ~GrCachedLayerGrCachedLayer174 ~GrCachedLayer() { 175 SkSafeUnref(fTexture); 176 SkSafeUnref(fFilter); 177 SkDELETE(fPaint); 178 } 179 pictureIDGrCachedLayer180 uint32_t pictureID() const { return fKey.pictureID(); } 181 // TODO: remove these when GrCachedLayer & ReplacementInfo fuse keyGrCachedLayer182 const unsigned* key() const { return fKey.key(); } keySizeGrCachedLayer183 int keySize() const { return fKey.keySize(); } 184 startGrCachedLayer185 unsigned start() const { return fStart; } 186 // TODO: make bound debug only srcIRGrCachedLayer187 const SkIRect& srcIR() const { return fSrcIR; } dstIRGrCachedLayer188 const SkIRect& dstIR() const { return fDstIR; } stopGrCachedLayer189 unsigned stop() const { return fStop; } setTextureGrCachedLayer190 void setTexture(GrTexture* texture, const SkIRect& rect) { 191 SkRefCnt_SafeAssign(fTexture, texture); 192 fRect = rect; 193 if (!fTexture) { 194 fLocked = false; 195 } 196 } textureGrCachedLayer197 GrTexture* texture() { return fTexture; } paintGrCachedLayer198 const SkPaint* paint() const { return fPaint; } filterGrCachedLayer199 const SkImageFilter* filter() const { return fFilter; } rectGrCachedLayer200 const SkIRect& rect() const { return fRect; } 201 setOffsetGrCachedLayer202 void setOffset(const SkIPoint& offset) { fOffset = offset; } offsetGrCachedLayer203 const SkIPoint& offset() const { return fOffset; } 204 setPlotGrCachedLayer205 void setPlot(GrPlot* plot) { 206 SkASSERT(NULL == plot || NULL == fPlot); 207 fPlot = plot; 208 } plotGrCachedLayer209 GrPlot* plot() { return fPlot; } 210 isAtlasedGrCachedLayer211 bool isAtlased() const { return SkToBool(fPlot); } 212 setLockedGrCachedLayer213 void setLocked(bool locked) { fLocked = locked; } lockedGrCachedLayer214 bool locked() const { return fLocked; } 215 216 SkDEBUGCODE(const GrPlot* plot() const { return fPlot; }) 217 SkDEBUGCODE(void validate(const GrTexture* backingTexture) const;) 218 219 private: 220 const Key fKey; 221 222 // The "saveLayer" operation index of the cached layer 223 const unsigned fStart; 224 // The final "restore" operation index of the cached layer 225 const unsigned fStop; 226 227 // The layer's src rect (i.e., the portion of the source scene required 228 // for filtering). 229 const SkIRect fSrcIR; 230 // The layer's dest rect (i.e., where it will land in device space) 231 const SkIRect fDstIR; 232 // Offset sometimes required by image filters 233 SkIPoint fOffset; 234 235 // The paint used when dropping the layer down into the owning canvas. 236 // Can be NULL. This class makes a copy for itself. 237 SkPaint* fPaint; 238 239 // The imagefilter that needs to be applied to the layer prior to it being 240 // composited with the rest of the scene. 241 const SkImageFilter* fFilter; 242 243 // fTexture is a ref on the atlasing texture for atlased layers and a 244 // ref on a GrTexture for non-atlased textures. 245 GrTexture* fTexture; 246 247 // For both atlased and non-atlased layers 'fRect' contains the bound of 248 // the layer in whichever texture it resides. It is empty when 'fTexture' 249 // is NULL. 250 SkIRect fRect; 251 252 // For atlased layers, fPlot stores the atlas plot in which the layer rests. 253 // It is always NULL for non-atlased layers. 254 GrPlot* fPlot; 255 256 // The number of actively hoisted layers using this cached image (e.g., 257 // extant GrHoistedLayers pointing at this object). This object will 258 // be unlocked when the use count reaches 0. 259 int fUses; 260 261 // For non-atlased layers 'fLocked' should always match "fTexture". 262 // (i.e., if there is a texture it is locked). 263 // For atlased layers, 'fLocked' is true if the layer is in a plot and 264 // actively required for rendering. If the layer is in a plot but not 265 // actively required for rendering, then 'fLocked' is false. If the 266 // layer isn't in a plot then is can never be locked. 267 bool fLocked; 268 addUseGrCachedLayer269 void addUse() { ++fUses; } removeUseGrCachedLayer270 void removeUse() { SkASSERT(fUses > 0); --fUses; } usesGrCachedLayer271 int uses() const { return fUses; } 272 273 friend class GrLayerCache; // for access to usage methods 274 friend class TestingAccess; // for testing 275 }; 276 277 // The GrLayerCache caches pre-computed saveLayers for later rendering. 278 // Non-atlased layers are stored in their own GrTexture while the atlased 279 // layers share a single GrTexture. 280 // Unlike the GrFontCache, the GrTexture atlas only has one GrAtlas (for 8888) 281 // and one GrPlot (for the entire atlas). As such, the GrLayerCache 282 // roughly combines the functionality of the GrFontCache and GrTextStrike 283 // classes. 284 class GrLayerCache { 285 public: 286 GrLayerCache(GrContext*); 287 ~GrLayerCache(); 288 289 // As a cache, the GrLayerCache can be ordered to free up all its cached 290 // elements by the GrContext 291 void freeAll(); 292 293 GrCachedLayer* findLayer(uint32_t pictureID, const SkMatrix& ctm, 294 const unsigned* key, int keySize); 295 GrCachedLayer* findLayerOrCreate(uint32_t pictureID, 296 int start, int stop, 297 const SkIRect& srcIR, 298 const SkIRect& dstIR, 299 const SkMatrix& initialMat, 300 const unsigned* key, int keySize, 301 const SkPaint* paint); 302 303 // Attempt to place 'layer' in the atlas. Return true on success; false on failure. 304 // When true is returned, 'needsRendering' will indicate if the layer must be (re)drawn. 305 // Additionally, the GPU resources will be locked. 306 bool tryToAtlas(GrCachedLayer* layer, const GrSurfaceDesc& desc, bool* needsRendering); 307 308 // Attempt to lock the GPU resources required for a layer. Return true on success; 309 // false on failure. When true is returned 'needsRendering' will indicate if the 310 // layer must be (re)drawn. 311 // Note that atlased layers should already have been locked and rendered so only 312 // free floating layers will have 'needsRendering' set. 313 // Currently, this path always uses a new scratch texture for non-Atlased layers 314 // and (thus) doesn't cache anything. This can yield a lot of re-rendering. 315 // TODO: allow rediscovery of free-floating layers that are still in the resource cache. 316 bool lock(GrCachedLayer* layer, const GrSurfaceDesc& desc, bool* needsRendering); 317 318 // addUse is just here to keep the API symmetric addUse(GrCachedLayer * layer)319 void addUse(GrCachedLayer* layer) { layer->addUse(); } removeUse(GrCachedLayer * layer)320 void removeUse(GrCachedLayer* layer) { 321 layer->removeUse(); 322 if (layer->uses() == 0) { 323 // If no one cares about the layer allow it to be recycled. 324 this->unlock(layer); 325 } 326 } 327 328 // Cleanup after any SkPicture deletions 329 void processDeletedPictures(); 330 331 SkDEBUGCODE(void validate() const;) 332 333 #ifdef SK_DEVELOPER 334 void writeLayersToDisk(const SkString& dirName); 335 #endif 336 PlausiblyAtlasable(int width,int height)337 static bool PlausiblyAtlasable(int width, int height) { 338 return width <= kPlotWidth && height <= kPlotHeight; 339 } 340 341 #if !GR_CACHE_HOISTED_LAYERS 342 void purgeAll(); 343 #endif 344 345 private: 346 static const int kAtlasTextureWidth = 1024; 347 static const int kAtlasTextureHeight = 1024; 348 349 static const int kNumPlotsX = 2; 350 static const int kNumPlotsY = 2; 351 352 static const int kPlotWidth = kAtlasTextureWidth / kNumPlotsX; 353 static const int kPlotHeight = kAtlasTextureHeight / kNumPlotsY; 354 355 GrContext* fContext; // pointer back to owning context 356 SkAutoTDelete<GrAtlas> fAtlas; // TODO: could lazily allocate 357 358 // We cache this information here (rather then, say, on the owning picture) 359 // because we want to be able to clean it up as needed (e.g., if a picture 360 // is leaked and never cleans itself up we still want to be able to 361 // remove the GrPictureInfo once its layers are purged from all the atlas 362 // plots). 363 SkTDynamicHash<GrPictureInfo, uint32_t> fPictureHash; 364 365 SkTDynamicHash<GrCachedLayer, GrCachedLayer::Key> fLayerHash; 366 367 SkMessageBus<SkPicture::DeletionMessage>::Inbox fPictDeletionInbox; 368 369 // This implements a plot-centric locking mechanism (since the atlas 370 // backing texture is always locked). Each layer that is locked (i.e., 371 // needed for the current rendering) in a plot increments the plot lock 372 // count for that plot. Similarly, once a rendering is complete all the 373 // layers used in it decrement the lock count for the used plots. 374 // Plots with a 0 lock count are open for recycling/purging. 375 int fPlotLocks[kNumPlotsX * kNumPlotsY]; 376 377 // Inform the cache that layer's cached image is not currently required 378 void unlock(GrCachedLayer* layer); 379 380 void initAtlas(); 381 GrCachedLayer* createLayer(uint32_t pictureID, int start, int stop, 382 const SkIRect& srcIR, const SkIRect& dstIR, 383 const SkMatrix& initialMat, 384 const unsigned* key, int keySize, 385 const SkPaint* paint); 386 387 // Remove all the layers (and unlock any resources) associated with 'pictureID' 388 void purge(uint32_t pictureID); 389 390 void purgePlot(GrPlot* plot); 391 392 // Try to find a purgeable plot and clear it out. Return true if a plot 393 // was purged; false otherwise. 394 bool purgePlot(); 395 incPlotLock(int plotIdx)396 void incPlotLock(int plotIdx) { ++fPlotLocks[plotIdx]; } decPlotLock(int plotIdx)397 void decPlotLock(int plotIdx) { 398 SkASSERT(fPlotLocks[plotIdx] > 0); 399 --fPlotLocks[plotIdx]; 400 } 401 402 // for testing 403 friend class TestingAccess; numLayers()404 int numLayers() const { return fLayerHash.count(); } 405 }; 406 407 #endif 408