1 /* 2 * Copyright 2012 Google Inc. 3 * 4 * Use of this source code is governed by a BSD-style license that can be 5 * found in the LICENSE file. 6 */ 7 8 #include "SkImage_Base.h" 9 #include "SkBitmap.h" 10 #include "SkBitmapProcShader.h" 11 #include "SkCanvas.h" 12 #include "SkColorTable.h" 13 #include "SkConvertPixels.h" 14 #include "SkData.h" 15 #include "SkImageInfoPriv.h" 16 #include "SkImagePriv.h" 17 #include "SkPixelRef.h" 18 #include "SkSurface.h" 19 #include "SkTLazy.h" 20 21 #if SK_SUPPORT_GPU 22 #include "GrContext.h" 23 #include "GrTextureAdjuster.h" 24 #include "SkGr.h" 25 #endif 26 27 // fixes https://bug.skia.org/5096 28 static bool is_not_subset(const SkBitmap& bm) { 29 SkASSERT(bm.pixelRef()); 30 SkISize dim = SkISize::Make(bm.pixelRef()->width(), bm.pixelRef()->height()); 31 SkASSERT(dim != bm.dimensions() || bm.pixelRefOrigin().isZero()); 32 return dim == bm.dimensions(); 33 } 34 35 class SkImage_Raster : public SkImage_Base { 36 public: 37 static bool ValidArgs(const SkImageInfo& info, size_t rowBytes, size_t* minSize) { 38 const int maxDimension = SK_MaxS32 >> 2; 39 40 if (info.width() <= 0 || info.height() <= 0) { 41 return false; 42 } 43 if (info.width() > maxDimension || info.height() > maxDimension) { 44 return false; 45 } 46 if ((unsigned)info.colorType() > (unsigned)kLastEnum_SkColorType) { 47 return false; 48 } 49 if ((unsigned)info.alphaType() > (unsigned)kLastEnum_SkAlphaType) { 50 return false; 51 } 52 53 if (kUnknown_SkColorType == info.colorType()) { 54 return false; 55 } 56 if (!info.validRowBytes(rowBytes)) { 57 return false; 58 } 59 60 size_t size = info.computeByteSize(rowBytes); 61 if (SkImageInfo::ByteSizeOverflowed(size)) { 62 return false; 63 } 64 65 if (minSize) { 66 *minSize = size; 67 } 68 return true; 69 } 70 71 SkImage_Raster(const SkImageInfo&, sk_sp<SkData>, size_t rb, 72 uint32_t id = kNeedNewImageUniqueID); 73 ~SkImage_Raster() override; 74 75 SkImageInfo onImageInfo() const override { 76 return fBitmap.info(); 77 } 78 79 bool onReadPixels(const SkImageInfo&, void*, size_t, int srcX, int srcY, CachingHint) const override; 80 bool onPeekPixels(SkPixmap*) const override; 81 const SkBitmap* onPeekBitmap() const override { return &fBitmap; } 82 83 #if SK_SUPPORT_GPU 84 sk_sp<GrTextureProxy> asTextureProxyRef(GrContext*, const GrSamplerState&, 85 SkScalar scaleAdjust[2]) const override; 86 #endif 87 88 bool getROPixels(SkBitmap*, CachingHint) const override; 89 sk_sp<SkImage> onMakeSubset(const SkIRect&) const override; 90 91 SkPixelRef* getPixelRef() const { return fBitmap.pixelRef(); } 92 93 bool onAsLegacyBitmap(SkBitmap*) const override; 94 95 SkImage_Raster(const SkBitmap& bm, bool bitmapMayBeMutable = false) 96 : INHERITED(bm.width(), bm.height(), 97 is_not_subset(bm) ? bm.getGenerationID() 98 : (uint32_t)kNeedNewImageUniqueID) 99 , fBitmap(bm) 100 { 101 SkASSERT(bitmapMayBeMutable || fBitmap.isImmutable()); 102 } 103 104 sk_sp<SkImage> onMakeColorTypeAndColorSpace(SkColorType, sk_sp<SkColorSpace>) const override; 105 106 bool onIsValid(GrContext* context) const override { return true; } 107 void notifyAddedToRasterCache() const override { 108 // We explicitly DON'T want to call INHERITED::notifyAddedToRasterCache. That ties the 109 // lifetime of derived/cached resources to the image. In this case, we only want cached 110 // data (eg mips) tied to the lifetime of the underlying pixelRef. 111 SkASSERT(fBitmap.pixelRef()); 112 fBitmap.pixelRef()->notifyAddedToCache(); 113 } 114 115 #if SK_SUPPORT_GPU 116 sk_sp<GrTextureProxy> refPinnedTextureProxy(uint32_t* uniqueID) const override; 117 bool onPinAsTexture(GrContext*) const override; 118 void onUnpinAsTexture(GrContext*) const override; 119 #endif 120 121 private: 122 SkBitmap fBitmap; 123 124 #if SK_SUPPORT_GPU 125 mutable sk_sp<GrTextureProxy> fPinnedProxy; 126 mutable int32_t fPinnedCount = 0; 127 mutable uint32_t fPinnedUniqueID = 0; 128 #endif 129 130 typedef SkImage_Base INHERITED; 131 }; 132 133 /////////////////////////////////////////////////////////////////////////////// 134 135 static void release_data(void* addr, void* context) { 136 SkData* data = static_cast<SkData*>(context); 137 data->unref(); 138 } 139 140 SkImage_Raster::SkImage_Raster(const SkImageInfo& info, sk_sp<SkData> data, size_t rowBytes, 141 uint32_t id) 142 : INHERITED(info.width(), info.height(), id) 143 { 144 void* addr = const_cast<void*>(data->data()); 145 146 fBitmap.installPixels(info, addr, rowBytes, release_data, data.release()); 147 fBitmap.setImmutable(); 148 } 149 150 SkImage_Raster::~SkImage_Raster() { 151 #if SK_SUPPORT_GPU 152 SkASSERT(nullptr == fPinnedProxy.get()); // want the caller to have manually unpinned 153 #endif 154 } 155 156 bool SkImage_Raster::onReadPixels(const SkImageInfo& dstInfo, void* dstPixels, size_t dstRowBytes, 157 int srcX, int srcY, CachingHint) const { 158 SkBitmap shallowCopy(fBitmap); 159 return shallowCopy.readPixels(dstInfo, dstPixels, dstRowBytes, srcX, srcY); 160 } 161 162 bool SkImage_Raster::onPeekPixels(SkPixmap* pm) const { 163 return fBitmap.peekPixels(pm); 164 } 165 166 bool SkImage_Raster::getROPixels(SkBitmap* dst, CachingHint) const { 167 *dst = fBitmap; 168 return true; 169 } 170 171 #if SK_SUPPORT_GPU 172 sk_sp<GrTextureProxy> SkImage_Raster::asTextureProxyRef(GrContext* context, 173 const GrSamplerState& params, 174 SkScalar scaleAdjust[2]) const { 175 if (!context) { 176 return nullptr; 177 } 178 179 uint32_t uniqueID; 180 sk_sp<GrTextureProxy> tex = this->refPinnedTextureProxy(&uniqueID); 181 if (tex) { 182 GrTextureAdjuster adjuster(context, fPinnedProxy, fBitmap.alphaType(), fPinnedUniqueID, 183 fBitmap.colorSpace()); 184 return adjuster.refTextureProxyForParams(params, scaleAdjust); 185 } 186 187 return GrRefCachedBitmapTextureProxy(context, fBitmap, params, scaleAdjust); 188 } 189 #endif 190 191 #if SK_SUPPORT_GPU 192 193 sk_sp<GrTextureProxy> SkImage_Raster::refPinnedTextureProxy(uint32_t* uniqueID) const { 194 if (fPinnedProxy) { 195 SkASSERT(fPinnedCount > 0); 196 SkASSERT(fPinnedUniqueID != 0); 197 *uniqueID = fPinnedUniqueID; 198 return fPinnedProxy; 199 } 200 return nullptr; 201 } 202 203 bool SkImage_Raster::onPinAsTexture(GrContext* ctx) const { 204 if (fPinnedProxy) { 205 SkASSERT(fPinnedCount > 0); 206 SkASSERT(fPinnedUniqueID != 0); 207 } else { 208 SkASSERT(fPinnedCount == 0); 209 SkASSERT(fPinnedUniqueID == 0); 210 fPinnedProxy = GrRefCachedBitmapTextureProxy(ctx, fBitmap, GrSamplerState::ClampNearest(), 211 nullptr); 212 if (!fPinnedProxy) { 213 return false; 214 } 215 fPinnedUniqueID = fBitmap.getGenerationID(); 216 } 217 // Note: we only increment if the texture was successfully pinned 218 ++fPinnedCount; 219 return true; 220 } 221 222 void SkImage_Raster::onUnpinAsTexture(GrContext* ctx) const { 223 // Note: we always decrement, even if fPinnedTexture is null 224 SkASSERT(fPinnedCount > 0); 225 SkASSERT(fPinnedUniqueID != 0); 226 227 if (0 == --fPinnedCount) { 228 fPinnedProxy.reset(nullptr); 229 fPinnedUniqueID = 0; 230 } 231 } 232 #endif 233 234 sk_sp<SkImage> SkImage_Raster::onMakeSubset(const SkIRect& subset) const { 235 SkImageInfo info = fBitmap.info().makeWH(subset.width(), subset.height()); 236 SkBitmap bitmap; 237 if (!bitmap.tryAllocPixels(info)) { 238 return nullptr; 239 } 240 241 void* dst = bitmap.getPixels(); 242 void* src = fBitmap.getAddr(subset.x(), subset.y()); 243 if (!dst || !src) { 244 SkDEBUGFAIL("SkImage_Raster::onMakeSubset with nullptr src or dst"); 245 return nullptr; 246 } 247 248 SkRectMemcpy(dst, bitmap.rowBytes(), src, fBitmap.rowBytes(), bitmap.rowBytes(), 249 subset.height()); 250 251 bitmap.setImmutable(); 252 return MakeFromBitmap(bitmap); 253 } 254 255 /////////////////////////////////////////////////////////////////////////////// 256 257 sk_sp<SkImage> MakeRasterCopyPriv(const SkPixmap& pmap, uint32_t id) { 258 size_t size; 259 if (!SkImage_Raster::ValidArgs(pmap.info(), pmap.rowBytes(), &size) || !pmap.addr()) { 260 return nullptr; 261 } 262 263 // Here we actually make a copy of the caller's pixel data 264 sk_sp<SkData> data(SkData::MakeWithCopy(pmap.addr(), size)); 265 return sk_make_sp<SkImage_Raster>(pmap.info(), std::move(data), pmap.rowBytes(), id); 266 } 267 268 sk_sp<SkImage> SkImage::MakeRasterCopy(const SkPixmap& pmap) { 269 return MakeRasterCopyPriv(pmap, kNeedNewImageUniqueID); 270 } 271 272 sk_sp<SkImage> SkImage::MakeRasterData(const SkImageInfo& info, sk_sp<SkData> data, 273 size_t rowBytes) { 274 size_t size; 275 if (!SkImage_Raster::ValidArgs(info, rowBytes, &size) || !data) { 276 return nullptr; 277 } 278 279 // did they give us enough data? 280 if (data->size() < size) { 281 return nullptr; 282 } 283 284 return sk_make_sp<SkImage_Raster>(info, std::move(data), rowBytes); 285 } 286 287 sk_sp<SkImage> SkImage::MakeFromRaster(const SkPixmap& pmap, RasterReleaseProc proc, 288 ReleaseContext ctx) { 289 size_t size; 290 if (!SkImage_Raster::ValidArgs(pmap.info(), pmap.rowBytes(), &size) || !pmap.addr()) { 291 return nullptr; 292 } 293 294 sk_sp<SkData> data(SkData::MakeWithProc(pmap.addr(), size, proc, ctx)); 295 return sk_make_sp<SkImage_Raster>(pmap.info(), std::move(data), pmap.rowBytes()); 296 } 297 298 sk_sp<SkImage> SkMakeImageFromRasterBitmapPriv(const SkBitmap& bm, SkCopyPixelsMode cpm, 299 uint32_t idForCopy) { 300 if (kAlways_SkCopyPixelsMode == cpm || (!bm.isImmutable() && kNever_SkCopyPixelsMode != cpm)) { 301 SkPixmap pmap; 302 if (bm.peekPixels(&pmap)) { 303 return MakeRasterCopyPriv(pmap, idForCopy); 304 } else { 305 return sk_sp<SkImage>(); 306 } 307 } 308 309 return sk_make_sp<SkImage_Raster>(bm, kNever_SkCopyPixelsMode == cpm); 310 } 311 312 sk_sp<SkImage> SkMakeImageFromRasterBitmap(const SkBitmap& bm, SkCopyPixelsMode cpm) { 313 if (!SkImageInfoIsValid(bm.info()) || bm.rowBytes() < bm.info().minRowBytes()) { 314 return nullptr; 315 } 316 317 return SkMakeImageFromRasterBitmapPriv(bm, cpm, kNeedNewImageUniqueID); 318 } 319 320 const SkPixelRef* SkBitmapImageGetPixelRef(const SkImage* image) { 321 return ((const SkImage_Raster*)image)->getPixelRef(); 322 } 323 324 bool SkImage_Raster::onAsLegacyBitmap(SkBitmap* bitmap) const { 325 // When we're a snapshot from a surface, our bitmap may not be marked immutable 326 // even though logically always we are, but in that case we can't physically share our 327 // pixelref since the caller might call setImmutable() themselves 328 // (thus changing our state). 329 if (fBitmap.isImmutable()) { 330 SkIPoint origin = fBitmap.pixelRefOrigin(); 331 bitmap->setInfo(fBitmap.info(), fBitmap.rowBytes()); 332 bitmap->setPixelRef(sk_ref_sp(fBitmap.pixelRef()), origin.x(), origin.y()); 333 return true; 334 } 335 return this->INHERITED::onAsLegacyBitmap(bitmap); 336 } 337 338 /////////////////////////////////////////////////////////////////////////////// 339 340 sk_sp<SkImage> SkImage_Raster::onMakeColorTypeAndColorSpace(SkColorType targetCT, 341 sk_sp<SkColorSpace> targetCS) const { 342 SkPixmap src; 343 SkAssertResult(fBitmap.peekPixels(&src)); 344 345 SkBitmap dst; 346 dst.allocPixels(fBitmap.info().makeColorType(targetCT).makeColorSpace(targetCS)); 347 348 SkAssertResult(dst.writePixels(src)); 349 dst.setImmutable(); 350 return SkImage::MakeFromBitmap(dst); 351 } 352