1 /*
2 * Copyright 2014 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "Test.h"
9 #include "SkBitmapCache.h"
10 #include "SkCanvas.h"
11 #include "SkDiscardableMemoryPool.h"
12 #include "SkGraphics.h"
13 #include "SkPicture.h"
14 #include "SkPictureRecorder.h"
15 #include "SkResourceCache.h"
16 #include "SkSurface.h"
17 #include "SkTypes.h"
18
19 ////////////////////////////////////////////////////////////////////////////////////////
20
make_bitmap(SkBitmap * bitmap,const SkImageInfo & info,SkBitmap::Allocator * allocator)21 static void make_bitmap(SkBitmap* bitmap, const SkImageInfo& info, SkBitmap::Allocator* allocator) {
22 if (info.colorType() == kIndex_8_SkColorType) {
23 bitmap->setInfo(info);
24 SkPMColor ctStorage[256];
25 memset(ctStorage, 0xFF, sizeof(ctStorage)); // init with opaque-white for the moment
26 SkAutoTUnref<SkColorTable> ctable(new SkColorTable(ctStorage, 256));
27 bitmap->allocPixels(allocator, ctable);
28 } else if (allocator) {
29 bitmap->setInfo(info);
30 allocator->allocPixelRef(bitmap, 0);
31 } else {
32 bitmap->allocPixels(info);
33 }
34 }
35
36 // https://bug.skia.org/2894
DEF_TEST(BitmapCache_add_rect,reporter)37 DEF_TEST(BitmapCache_add_rect, reporter) {
38 SkResourceCache::DiscardableFactory factory = SkResourceCache::GetDiscardableFactory();
39 SkBitmap::Allocator* allocator = SkBitmapCache::GetAllocator();
40
41 SkAutoTDelete<SkResourceCache> cache;
42 if (factory) {
43 cache.reset(new SkResourceCache(factory));
44 } else {
45 const size_t byteLimit = 100 * 1024;
46 cache.reset(new SkResourceCache(byteLimit));
47 }
48 SkBitmap cachedBitmap;
49 make_bitmap(&cachedBitmap, SkImageInfo::MakeN32Premul(5, 5), allocator);
50 cachedBitmap.setImmutable();
51
52 SkBitmap bm;
53 SkIRect rect = SkIRect::MakeWH(5, 5);
54 uint32_t cachedID = cachedBitmap.getGenerationID();
55 SkPixelRef* cachedPR = cachedBitmap.pixelRef();
56
57 // Wrong subset size
58 REPORTER_ASSERT(reporter, !SkBitmapCache::Add(cachedPR, SkIRect::MakeWH(4, 6), cachedBitmap, cache));
59 REPORTER_ASSERT(reporter, !SkBitmapCache::Find(cachedID, rect, &bm, cache));
60 // Wrong offset value
61 REPORTER_ASSERT(reporter, !SkBitmapCache::Add(cachedPR, SkIRect::MakeXYWH(-1, 0, 5, 5), cachedBitmap, cache));
62 REPORTER_ASSERT(reporter, !SkBitmapCache::Find(cachedID, rect, &bm, cache));
63
64 // Should not be in the cache
65 REPORTER_ASSERT(reporter, !SkBitmapCache::Find(cachedID, rect, &bm, cache));
66
67 REPORTER_ASSERT(reporter, SkBitmapCache::Add(cachedPR, rect, cachedBitmap, cache));
68 // Should be in the cache, we just added it
69 REPORTER_ASSERT(reporter, SkBitmapCache::Find(cachedID, rect, &bm, cache));
70 }
71
72 #include "SkMipMap.h"
73
74 enum LockedState {
75 kNotLocked,
76 kLocked,
77 };
78
79 enum CachedState {
80 kNotInCache,
81 kInCache,
82 };
83
check_data(skiatest::Reporter * reporter,const SkCachedData * data,int refcnt,CachedState cacheState,LockedState lockedState)84 static void check_data(skiatest::Reporter* reporter, const SkCachedData* data,
85 int refcnt, CachedState cacheState, LockedState lockedState) {
86 REPORTER_ASSERT(reporter, data->testing_only_getRefCnt() == refcnt);
87 REPORTER_ASSERT(reporter, data->testing_only_isInCache() == (kInCache == cacheState));
88 bool isLocked = (data->data() != nullptr);
89 REPORTER_ASSERT(reporter, isLocked == (lockedState == kLocked));
90 }
91
test_mipmapcache(skiatest::Reporter * reporter,SkResourceCache * cache)92 static void test_mipmapcache(skiatest::Reporter* reporter, SkResourceCache* cache) {
93 cache->purgeAll();
94
95 SkBitmap src;
96 src.allocN32Pixels(5, 5);
97 src.setImmutable();
98
99 const SkMipMap* mipmap = SkMipMapCache::FindAndRef(SkBitmapCacheDesc::Make(src), cache);
100 REPORTER_ASSERT(reporter, nullptr == mipmap);
101
102 mipmap = SkMipMapCache::AddAndRef(src, cache);
103 REPORTER_ASSERT(reporter, mipmap);
104
105 {
106 const SkMipMap* mm = SkMipMapCache::FindAndRef(SkBitmapCacheDesc::Make(src), cache);
107 REPORTER_ASSERT(reporter, mm);
108 REPORTER_ASSERT(reporter, mm == mipmap);
109 mm->unref();
110 }
111
112 check_data(reporter, mipmap, 2, kInCache, kLocked);
113
114 mipmap->unref();
115 // tricky, since technically after this I'm no longer an owner, but since the cache is
116 // local, I know it won't get purged behind my back
117 check_data(reporter, mipmap, 1, kInCache, kNotLocked);
118
119 // find us again
120 mipmap = SkMipMapCache::FindAndRef(SkBitmapCacheDesc::Make(src), cache);
121 check_data(reporter, mipmap, 2, kInCache, kLocked);
122
123 cache->purgeAll();
124 check_data(reporter, mipmap, 1, kNotInCache, kLocked);
125
126 mipmap->unref();
127 }
128
test_mipmap_notify(skiatest::Reporter * reporter,SkResourceCache * cache)129 static void test_mipmap_notify(skiatest::Reporter* reporter, SkResourceCache* cache) {
130 const int N = 3;
131 SkBitmap src[N];
132 for (int i = 0; i < N; ++i) {
133 src[i].allocN32Pixels(5, 5);
134 src[i].setImmutable();
135 SkMipMapCache::AddAndRef(src[i], cache)->unref();
136 }
137
138 for (int i = 0; i < N; ++i) {
139 const SkMipMap* mipmap = SkMipMapCache::FindAndRef(SkBitmapCacheDesc::Make(src[i]), cache);
140 if (cache) {
141 // if cache is null, we're working on the global cache, and other threads might purge
142 // it, making this check fragile.
143 REPORTER_ASSERT(reporter, mipmap);
144 }
145 SkSafeUnref(mipmap);
146
147 src[i].reset(); // delete the underlying pixelref, which *should* remove us from the cache
148
149 mipmap = SkMipMapCache::FindAndRef(SkBitmapCacheDesc::Make(src[i]), cache);
150 REPORTER_ASSERT(reporter, !mipmap);
151 }
152 }
153
test_bitmap_notify(skiatest::Reporter * reporter,SkResourceCache * cache)154 static void test_bitmap_notify(skiatest::Reporter* reporter, SkResourceCache* cache) {
155 const SkIRect subset = SkIRect::MakeWH(5, 5);
156 const int N = 3;
157 SkBitmap src[N], dst[N];
158 for (int i = 0; i < N; ++i) {
159 src[i].allocN32Pixels(5, 5);
160 src[i].setImmutable();
161 dst[i].allocN32Pixels(5, 5);
162 dst[i].setImmutable();
163 SkBitmapCache::Add(src[i].pixelRef(), subset, dst[i], cache);
164 }
165
166 for (int i = 0; i < N; ++i) {
167 const uint32_t genID = src[i].getGenerationID();
168 SkBitmap result;
169 bool found = SkBitmapCache::Find(genID, subset, &result, cache);
170 if (cache) {
171 // if cache is null, we're working on the global cache, and other threads might purge
172 // it, making this check fragile.
173 REPORTER_ASSERT(reporter, found);
174 }
175
176 src[i].reset(); // delete the underlying pixelref, which *should* remove us from the cache
177
178 found = SkBitmapCache::Find(genID, subset, &result, cache);
179 REPORTER_ASSERT(reporter, !found);
180 }
181 }
182
183 #include "SkDiscardableMemoryPool.h"
184
185 static SkDiscardableMemoryPool* gPool = 0;
pool_factory(size_t bytes)186 static SkDiscardableMemory* pool_factory(size_t bytes) {
187 SkASSERT(gPool);
188 return gPool->create(bytes);
189 }
190
testBitmapCache_discarded_bitmap(skiatest::Reporter * reporter,SkResourceCache * cache,SkResourceCache::DiscardableFactory factory)191 static void testBitmapCache_discarded_bitmap(skiatest::Reporter* reporter, SkResourceCache* cache,
192 SkResourceCache::DiscardableFactory factory) {
193 SkBitmap::Allocator* allocator = cache->allocator();
194 const SkColorType testTypes[] = {
195 kAlpha_8_SkColorType,
196 kRGB_565_SkColorType,
197 kRGBA_8888_SkColorType,
198 kBGRA_8888_SkColorType,
199 kIndex_8_SkColorType,
200 kGray_8_SkColorType
201 };
202 for (const SkColorType testType : testTypes) {
203 SkBitmap cachedBitmap;
204 make_bitmap(&cachedBitmap, SkImageInfo::Make(5, 5, testType, kPremul_SkAlphaType),
205 allocator);
206 cachedBitmap.setImmutable();
207 cachedBitmap.unlockPixels();
208
209 SkBitmap bm;
210 SkIRect rect = SkIRect::MakeWH(5, 5);
211
212 // Add a bitmap to the cache.
213 REPORTER_ASSERT(reporter, SkBitmapCache::Add(cachedBitmap.pixelRef(), rect, cachedBitmap,
214 cache));
215 REPORTER_ASSERT(reporter, SkBitmapCache::Find(cachedBitmap.getGenerationID(), rect, &bm,
216 cache));
217
218 // Finding more than once works fine.
219 REPORTER_ASSERT(reporter, SkBitmapCache::Find(cachedBitmap.getGenerationID(), rect, &bm,
220 cache));
221 bm.unlockPixels();
222
223 // Drop the pixels in the bitmap.
224 if (factory) {
225 REPORTER_ASSERT(reporter, gPool->getRAMUsed() > 0);
226 gPool->dumpPool();
227
228 // The bitmap is not in the cache since it has been dropped.
229 REPORTER_ASSERT(reporter, !SkBitmapCache::Find(cachedBitmap.getGenerationID(), rect,
230 &bm, cache));
231 }
232
233 make_bitmap(&cachedBitmap, SkImageInfo::Make(5, 5, testType, kPremul_SkAlphaType),
234 allocator);
235 cachedBitmap.setImmutable();
236 cachedBitmap.unlockPixels();
237
238 // We can add the bitmap back to the cache and find it again.
239 REPORTER_ASSERT(reporter, SkBitmapCache::Add(cachedBitmap.pixelRef(), rect, cachedBitmap,
240 cache));
241 REPORTER_ASSERT(reporter, SkBitmapCache::Find(cachedBitmap.getGenerationID(), rect, &bm,
242 cache));
243 }
244 test_mipmapcache(reporter, cache);
245 test_bitmap_notify(reporter, cache);
246 test_mipmap_notify(reporter, cache);
247 }
248
DEF_TEST(BitmapCache_discarded_bitmap,reporter)249 DEF_TEST(BitmapCache_discarded_bitmap, reporter) {
250 const size_t byteLimit = 100 * 1024;
251 {
252 SkResourceCache cache(byteLimit);
253 testBitmapCache_discarded_bitmap(reporter, &cache, nullptr);
254 }
255 {
256 SkAutoTUnref<SkDiscardableMemoryPool> pool(
257 SkDiscardableMemoryPool::Create(byteLimit, nullptr));
258 gPool = pool.get();
259 SkResourceCache::DiscardableFactory factory = pool_factory;
260 SkResourceCache cache(factory);
261 testBitmapCache_discarded_bitmap(reporter, &cache, factory);
262 }
263 }
264
test_discarded_image(skiatest::Reporter * reporter,const SkMatrix & transform,SkImage * (* buildImage)())265 static void test_discarded_image(skiatest::Reporter* reporter, const SkMatrix& transform,
266 SkImage* (*buildImage)()) {
267 SkAutoTUnref<SkSurface> surface(SkSurface::NewRasterN32Premul(10, 10));
268 SkCanvas* canvas = surface->getCanvas();
269
270 // SkBitmapCache is global, so other threads could be evicting our bitmaps. Loop a few times
271 // to mitigate this risk.
272 const unsigned kRepeatCount = 42;
273 for (unsigned i = 0; i < kRepeatCount; ++i) {
274 SkAutoCanvasRestore acr(canvas, true);
275
276 SkAutoTUnref<SkImage> image(buildImage());
277
278 // always use high quality to ensure caching when scaled
279 SkPaint paint;
280 paint.setFilterQuality(kHigh_SkFilterQuality);
281
282 // draw the image (with a transform, to tickle different code paths) to ensure
283 // any associated resources get cached
284 canvas->concat(transform);
285 canvas->drawImage(image, 0, 0, &paint);
286
287 auto imageId = image->uniqueID();
288
289 // delete the image
290 image.reset(nullptr);
291
292 // all resources should have been purged
293 SkBitmap result;
294 REPORTER_ASSERT(reporter, !SkBitmapCache::Find(imageId, &result));
295 }
296 }
297
298
299 // Verify that associated bitmap cache entries are purged on SkImage destruction.
DEF_TEST(BitmapCache_discarded_image,reporter)300 DEF_TEST(BitmapCache_discarded_image, reporter) {
301 // Cache entries associated with SkImages fall into two categories:
302 //
303 // 1) generated image bitmaps (managed by the image cacherator)
304 // 2) scaled/resampled bitmaps (cached when HQ filters are used)
305 //
306 // To exercise the first cache type, we use generated/picture-backed SkImages.
307 // To exercise the latter, we draw scaled bitmap images using HQ filters.
308
309 const SkMatrix xforms[] = {
310 SkMatrix::MakeScale(1, 1),
311 SkMatrix::MakeScale(1.7f, 0.5f),
312 };
313
314 for (size_t i = 0; i < SK_ARRAY_COUNT(xforms); ++i) {
315 test_discarded_image(reporter, xforms[i], []() {
316 SkAutoTUnref<SkSurface> surface(SkSurface::NewRasterN32Premul(10, 10));
317 surface->getCanvas()->clear(SK_ColorCYAN);
318 return surface->newImageSnapshot();
319 });
320
321 test_discarded_image(reporter, xforms[i], []() {
322 SkPictureRecorder recorder;
323 SkCanvas* canvas = recorder.beginRecording(10, 10);
324 canvas->clear(SK_ColorCYAN);
325 SkAutoTUnref<SkPicture> picture(recorder.endRecording());
326 return SkImage::NewFromPicture(picture, SkISize::Make(10, 10), nullptr, nullptr);
327 });
328 }
329 }
330