| 1 | /* |
| 2 | * Copyright 2012 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #include "include/core/SkBitmap.h" |
| 9 | #include "include/core/SkCanvas.h" |
| 10 | #include "include/core/SkData.h" |
| 11 | #include "include/core/SkPixelRef.h" |
| 12 | #include "include/core/SkSurface.h" |
| 13 | #include "include/private/SkImageInfoPriv.h" |
| 14 | #include "src/codec/SkColorTable.h" |
| 15 | #include "src/core/SkCompressedDataUtils.h" |
| 16 | #include "src/core/SkConvertPixels.h" |
| 17 | #include "src/core/SkImagePriv.h" |
| 18 | #include "src/core/SkTLazy.h" |
| 19 | #include "src/image/SkImage_Base.h" |
| 20 | #include "src/shaders/SkBitmapProcShader.h" |
| 21 | |
| 22 | #if SK_SUPPORT_GPU |
| 23 | #include "include/gpu/GrContext.h" |
| 24 | #include "src/gpu/GrTextureAdjuster.h" |
| 25 | #include "src/gpu/SkGr.h" |
| 26 | #endif |
| 27 | |
| 28 | // fixes https://bug.skia.org/5096 |
| 29 | static bool is_not_subset(const SkBitmap& bm) { |
| 30 | SkASSERT(bm.pixelRef()); |
| 31 | SkISize dim = SkISize::Make(bm.pixelRef()->width(), bm.pixelRef()->height()); |
| 32 | SkASSERT(dim != bm.dimensions() || bm.pixelRefOrigin().isZero()); |
| 33 | return dim == bm.dimensions(); |
| 34 | } |
| 35 | |
| 36 | class SkImage_Raster : public SkImage_Base { |
| 37 | public: |
| 38 | static bool ValidArgs(const SkImageInfo& info, size_t rowBytes, size_t* minSize) { |
| 39 | const int maxDimension = SK_MaxS32 >> 2; |
| 40 | |
| 41 | // TODO(mtklein): eliminate anything here that setInfo() has already checked. |
| 42 | SkBitmap dummy; |
| 43 | if (!dummy.setInfo(info, rowBytes)) { |
| 44 | return false; |
| 45 | } |
| 46 | |
| 47 | if (info.width() <= 0 || info.height() <= 0) { |
| 48 | return false; |
| 49 | } |
| 50 | if (info.width() > maxDimension || info.height() > maxDimension) { |
| 51 | return false; |
| 52 | } |
| 53 | if ((unsigned)info.colorType() > (unsigned)kLastEnum_SkColorType) { |
| 54 | return false; |
| 55 | } |
| 56 | if ((unsigned)info.alphaType() > (unsigned)kLastEnum_SkAlphaType) { |
| 57 | return false; |
| 58 | } |
| 59 | |
| 60 | if (kUnknown_SkColorType == info.colorType()) { |
| 61 | return false; |
| 62 | } |
| 63 | if (!info.validRowBytes(rowBytes)) { |
| 64 | return false; |
| 65 | } |
| 66 | |
| 67 | size_t size = info.computeByteSize(rowBytes); |
| 68 | if (SkImageInfo::ByteSizeOverflowed(size)) { |
| 69 | return false; |
| 70 | } |
| 71 | |
| 72 | if (minSize) { |
| 73 | *minSize = size; |
| 74 | } |
| 75 | return true; |
| 76 | } |
| 77 | |
| 78 | SkImage_Raster(const SkImageInfo&, sk_sp<SkData>, size_t rb, |
| 79 | uint32_t id = kNeedNewImageUniqueID); |
| 80 | ~SkImage_Raster() override; |
| 81 | |
| 82 | bool onReadPixels(const SkImageInfo&, void*, size_t, int srcX, int srcY, |
| 83 | CachingHint) const override; |
| 84 | bool onPeekPixels(SkPixmap*) const override; |
| 85 | const SkBitmap* onPeekBitmap() const override { return &fBitmap; } |
| 86 | |
| 87 | #if SK_SUPPORT_GPU |
| 88 | GrSurfaceProxyView refView(GrRecordingContext*, GrMipMapped) const override; |
| 89 | #endif |
| 90 | |
| 91 | bool getROPixels(SkBitmap*, CachingHint) const override; |
| 92 | sk_sp<SkImage> onMakeSubset(GrRecordingContext*, const SkIRect&) const override; |
| 93 | |
| 94 | SkPixelRef* getPixelRef() const { return fBitmap.pixelRef(); } |
| 95 | |
| 96 | bool onAsLegacyBitmap(SkBitmap*) const override; |
| 97 | |
| 98 | SkImage_Raster(const SkBitmap& bm, bool bitmapMayBeMutable = false) |
| 99 | : INHERITED(bm.info(), |
| 100 | is_not_subset(bm) ? bm.getGenerationID() : (uint32_t)kNeedNewImageUniqueID) |
| 101 | , fBitmap(bm) { |
| 102 | SkASSERT(bitmapMayBeMutable || fBitmap.isImmutable()); |
| 103 | } |
| 104 | |
| 105 | sk_sp<SkImage> onMakeColorTypeAndColorSpace(GrRecordingContext*, |
| 106 | SkColorType, sk_sp<SkColorSpace>) const override; |
| 107 | |
| 108 | sk_sp<SkImage> onReinterpretColorSpace(sk_sp<SkColorSpace>) const override; |
| 109 | |
| 110 | bool onIsValid(GrContext* context) const override { return true; } |
| 111 | void notifyAddedToRasterCache() const override { |
| 112 | // We explicitly DON'T want to call INHERITED::notifyAddedToRasterCache. That ties the |
| 113 | // lifetime of derived/cached resources to the image. In this case, we only want cached |
| 114 | // data (eg mips) tied to the lifetime of the underlying pixelRef. |
| 115 | SkASSERT(fBitmap.pixelRef()); |
| 116 | fBitmap.pixelRef()->notifyAddedToCache(); |
| 117 | } |
| 118 | |
| 119 | #if SK_SUPPORT_GPU |
| 120 | GrSurfaceProxyView refPinnedView(GrRecordingContext* context, |
| 121 | uint32_t* uniqueID) const override; |
| 122 | bool onPinAsTexture(GrContext*) const override; |
| 123 | void onUnpinAsTexture(GrContext*) const override; |
| 124 | #endif |
| 125 | |
| 126 | private: |
| 127 | SkBitmap fBitmap; |
| 128 | |
| 129 | #if SK_SUPPORT_GPU |
| 130 | mutable GrSurfaceProxyView fPinnedView; |
| 131 | mutable int32_t fPinnedCount = 0; |
| 132 | mutable uint32_t fPinnedUniqueID = 0; |
| 133 | #endif |
| 134 | |
| 135 | typedef SkImage_Base INHERITED; |
| 136 | }; |
| 137 | |
| 138 | /////////////////////////////////////////////////////////////////////////////// |
| 139 | |
| 140 | static void release_data(void* addr, void* context) { |
| 141 | SkData* data = static_cast<SkData*>(context); |
| 142 | data->unref(); |
| 143 | } |
| 144 | |
| 145 | SkImage_Raster::SkImage_Raster(const SkImageInfo& info, sk_sp<SkData> data, size_t rowBytes, |
| 146 | uint32_t id) |
| 147 | : INHERITED(info, id) { |
| 148 | void* addr = const_cast<void*>(data->data()); |
| 149 | |
| 150 | fBitmap.installPixels(info, addr, rowBytes, release_data, data.release()); |
| 151 | fBitmap.setImmutable(); |
| 152 | } |
| 153 | |
| 154 | SkImage_Raster::~SkImage_Raster() { |
| 155 | #if SK_SUPPORT_GPU |
| 156 | SkASSERT(!fPinnedView); // want the caller to have manually unpinned |
| 157 | #endif |
| 158 | } |
| 159 | |
| 160 | bool SkImage_Raster::onReadPixels(const SkImageInfo& dstInfo, void* dstPixels, size_t dstRowBytes, |
| 161 | int srcX, int srcY, CachingHint) const { |
| 162 | SkBitmap shallowCopy(fBitmap); |
| 163 | return shallowCopy.readPixels(dstInfo, dstPixels, dstRowBytes, srcX, srcY); |
| 164 | } |
| 165 | |
| 166 | bool SkImage_Raster::onPeekPixels(SkPixmap* pm) const { |
| 167 | return fBitmap.peekPixels(pm); |
| 168 | } |
| 169 | |
| 170 | bool SkImage_Raster::getROPixels(SkBitmap* dst, CachingHint) const { |
| 171 | *dst = fBitmap; |
| 172 | return true; |
| 173 | } |
| 174 | |
| 175 | #if SK_SUPPORT_GPU |
| 176 | GrSurfaceProxyView SkImage_Raster::refView(GrRecordingContext* context, |
| 177 | GrMipMapped mipMapped) const { |
| 178 | if (!context) { |
| 179 | return {}; |
| 180 | } |
| 181 | |
| 182 | uint32_t uniqueID; |
| 183 | if (GrSurfaceProxyView view = this->refPinnedView(context, &uniqueID)) { |
| 184 | GrTextureAdjuster adjuster(context, std::move(view), fBitmap.info().colorInfo(), |
| 185 | fPinnedUniqueID); |
| 186 | return adjuster.view(mipMapped); |
| 187 | } |
| 188 | |
| 189 | return GrRefCachedBitmapView(context, fBitmap, mipMapped); |
| 190 | } |
| 191 | #endif |
| 192 | |
| 193 | #if SK_SUPPORT_GPU |
| 194 | |
| 195 | GrSurfaceProxyView SkImage_Raster::refPinnedView(GrRecordingContext*, uint32_t* uniqueID) const { |
| 196 | if (fPinnedView) { |
| 197 | SkASSERT(fPinnedCount > 0); |
| 198 | SkASSERT(fPinnedUniqueID != 0); |
| 199 | *uniqueID = fPinnedUniqueID; |
| 200 | return fPinnedView; |
| 201 | } |
| 202 | return {}; |
| 203 | } |
| 204 | |
| 205 | bool SkImage_Raster::onPinAsTexture(GrContext* ctx) const { |
| 206 | if (fPinnedView) { |
| 207 | SkASSERT(fPinnedCount > 0); |
| 208 | SkASSERT(fPinnedUniqueID != 0); |
| 209 | } else { |
| 210 | SkASSERT(fPinnedCount == 0); |
| 211 | SkASSERT(fPinnedUniqueID == 0); |
| 212 | fPinnedView = GrRefCachedBitmapView(ctx, fBitmap, GrMipMapped::kNo); |
| 213 | if (!fPinnedView) { |
| 214 | return false; |
| 215 | } |
| 216 | SkASSERT(fPinnedView.asTextureProxy()); |
| 217 | fPinnedUniqueID = fBitmap.getGenerationID(); |
| 218 | } |
| 219 | // Note: we only increment if the texture was successfully pinned |
| 220 | ++fPinnedCount; |
| 221 | return true; |
| 222 | } |
| 223 | |
| 224 | void SkImage_Raster::onUnpinAsTexture(GrContext* ctx) const { |
| 225 | // Note: we always decrement, even if fPinnedTexture is null |
| 226 | SkASSERT(fPinnedCount > 0); |
| 227 | SkASSERT(fPinnedUniqueID != 0); |
| 228 | |
| 229 | if (0 == --fPinnedCount) { |
| 230 | fPinnedView = GrSurfaceProxyView(); |
| 231 | fPinnedUniqueID = 0; |
| 232 | } |
| 233 | } |
| 234 | #endif |
| 235 | |
| 236 | sk_sp<SkImage> SkImage_Raster::onMakeSubset(GrRecordingContext*, const SkIRect& subset) const { |
| 237 | SkImageInfo info = fBitmap.info().makeDimensions(subset.size()); |
| 238 | SkBitmap bitmap; |
| 239 | if (!bitmap.tryAllocPixels(info)) { |
| 240 | return nullptr; |
| 241 | } |
| 242 | |
| 243 | void* dst = bitmap.getPixels(); |
| 244 | void* src = fBitmap.getAddr(subset.x(), subset.y()); |
| 245 | if (!dst || !src) { |
| 246 | SkDEBUGFAIL("SkImage_Raster::onMakeSubset with nullptr src or dst" ); |
| 247 | return nullptr; |
| 248 | } |
| 249 | |
| 250 | SkRectMemcpy(dst, bitmap.rowBytes(), src, fBitmap.rowBytes(), bitmap.rowBytes(), |
| 251 | subset.height()); |
| 252 | |
| 253 | bitmap.setImmutable(); |
| 254 | return MakeFromBitmap(bitmap); |
| 255 | } |
| 256 | |
| 257 | /////////////////////////////////////////////////////////////////////////////// |
| 258 | |
| 259 | sk_sp<SkImage> MakeRasterCopyPriv(const SkPixmap& pmap, uint32_t id) { |
| 260 | size_t size; |
| 261 | if (!SkImage_Raster::ValidArgs(pmap.info(), pmap.rowBytes(), &size) || !pmap.addr()) { |
| 262 | return nullptr; |
| 263 | } |
| 264 | |
| 265 | // Here we actually make a copy of the caller's pixel data |
| 266 | sk_sp<SkData> data(SkData::MakeWithCopy(pmap.addr(), size)); |
| 267 | return sk_make_sp<SkImage_Raster>(pmap.info(), std::move(data), pmap.rowBytes(), id); |
| 268 | } |
| 269 | |
| 270 | sk_sp<SkImage> SkImage::MakeRasterCopy(const SkPixmap& pmap) { |
| 271 | return MakeRasterCopyPriv(pmap, kNeedNewImageUniqueID); |
| 272 | } |
| 273 | |
| 274 | sk_sp<SkImage> SkImage::MakeRasterData(const SkImageInfo& info, sk_sp<SkData> data, |
| 275 | size_t rowBytes) { |
| 276 | size_t size; |
| 277 | if (!SkImage_Raster::ValidArgs(info, rowBytes, &size) || !data) { |
| 278 | return nullptr; |
| 279 | } |
| 280 | |
| 281 | // did they give us enough data? |
| 282 | if (data->size() < size) { |
| 283 | return nullptr; |
| 284 | } |
| 285 | |
| 286 | return sk_make_sp<SkImage_Raster>(info, std::move(data), rowBytes); |
| 287 | } |
| 288 | |
| 289 | // TODO: this could be improved to decode and make use of the mipmap |
| 290 | // levels potentially present in the compressed data. For now, any |
| 291 | // mipmap levels are discarded. |
| 292 | sk_sp<SkImage> SkImage::MakeRasterFromCompressed(sk_sp<SkData> data, |
| 293 | int width, int height, |
| 294 | CompressionType type) { |
| 295 | size_t expectedSize = SkCompressedFormatDataSize(type, { width, height }, false); |
| 296 | if (!data || data->size() < expectedSize) { |
| 297 | return nullptr; |
| 298 | } |
| 299 | |
| 300 | SkAlphaType at = SkCompressionTypeIsOpaque(type) ? kOpaque_SkAlphaType |
| 301 | : kPremul_SkAlphaType; |
| 302 | |
| 303 | SkImageInfo ii = SkImageInfo::MakeN32(width, height, at); |
| 304 | |
| 305 | if (!SkImage_Raster::ValidArgs(ii, ii.minRowBytes(), nullptr)) { |
| 306 | return nullptr; |
| 307 | } |
| 308 | |
| 309 | SkBitmap bitmap; |
| 310 | if (!bitmap.tryAllocPixels(ii)) { |
| 311 | return nullptr; |
| 312 | } |
| 313 | |
| 314 | if (!SkDecompress(std::move(data), { width, height }, type, &bitmap)) { |
| 315 | return nullptr; |
| 316 | } |
| 317 | |
| 318 | bitmap.setImmutable(); |
| 319 | return MakeFromBitmap(bitmap); |
| 320 | } |
| 321 | |
| 322 | sk_sp<SkImage> SkImage::MakeFromRaster(const SkPixmap& pmap, RasterReleaseProc proc, |
| 323 | ReleaseContext ctx) { |
| 324 | size_t size; |
| 325 | if (!SkImage_Raster::ValidArgs(pmap.info(), pmap.rowBytes(), &size) || !pmap.addr()) { |
| 326 | return nullptr; |
| 327 | } |
| 328 | |
| 329 | sk_sp<SkData> data(SkData::MakeWithProc(pmap.addr(), size, proc, ctx)); |
| 330 | return sk_make_sp<SkImage_Raster>(pmap.info(), std::move(data), pmap.rowBytes()); |
| 331 | } |
| 332 | |
| 333 | sk_sp<SkImage> SkMakeImageFromRasterBitmapPriv(const SkBitmap& bm, SkCopyPixelsMode cpm, |
| 334 | uint32_t idForCopy) { |
| 335 | if (kAlways_SkCopyPixelsMode == cpm || (!bm.isImmutable() && kNever_SkCopyPixelsMode != cpm)) { |
| 336 | SkPixmap pmap; |
| 337 | if (bm.peekPixels(&pmap)) { |
| 338 | return MakeRasterCopyPriv(pmap, idForCopy); |
| 339 | } else { |
| 340 | return sk_sp<SkImage>(); |
| 341 | } |
| 342 | } |
| 343 | |
| 344 | return sk_make_sp<SkImage_Raster>(bm, kNever_SkCopyPixelsMode == cpm); |
| 345 | } |
| 346 | |
| 347 | sk_sp<SkImage> SkMakeImageFromRasterBitmap(const SkBitmap& bm, SkCopyPixelsMode cpm) { |
| 348 | if (!SkImageInfoIsValid(bm.info()) || bm.rowBytes() < bm.info().minRowBytes()) { |
| 349 | return nullptr; |
| 350 | } |
| 351 | |
| 352 | return SkMakeImageFromRasterBitmapPriv(bm, cpm, kNeedNewImageUniqueID); |
| 353 | } |
| 354 | |
| 355 | const SkPixelRef* SkBitmapImageGetPixelRef(const SkImage* image) { |
| 356 | return ((const SkImage_Raster*)image)->getPixelRef(); |
| 357 | } |
| 358 | |
| 359 | bool SkImage_Raster::onAsLegacyBitmap(SkBitmap* bitmap) const { |
| 360 | // When we're a snapshot from a surface, our bitmap may not be marked immutable |
| 361 | // even though logically always we are, but in that case we can't physically share our |
| 362 | // pixelref since the caller might call setImmutable() themselves |
| 363 | // (thus changing our state). |
| 364 | if (fBitmap.isImmutable()) { |
| 365 | SkIPoint origin = fBitmap.pixelRefOrigin(); |
| 366 | bitmap->setInfo(fBitmap.info(), fBitmap.rowBytes()); |
| 367 | bitmap->setPixelRef(sk_ref_sp(fBitmap.pixelRef()), origin.x(), origin.y()); |
| 368 | return true; |
| 369 | } |
| 370 | return this->INHERITED::onAsLegacyBitmap(bitmap); |
| 371 | } |
| 372 | |
| 373 | /////////////////////////////////////////////////////////////////////////////// |
| 374 | |
| 375 | sk_sp<SkImage> SkImage_Raster::onMakeColorTypeAndColorSpace(GrRecordingContext*, |
| 376 | SkColorType targetCT, |
| 377 | sk_sp<SkColorSpace> targetCS) const { |
| 378 | SkPixmap src; |
| 379 | SkAssertResult(fBitmap.peekPixels(&src)); |
| 380 | |
| 381 | SkBitmap dst; |
| 382 | dst.allocPixels(fBitmap.info().makeColorType(targetCT).makeColorSpace(targetCS)); |
| 383 | |
| 384 | SkAssertResult(dst.writePixels(src)); |
| 385 | dst.setImmutable(); |
| 386 | return SkImage::MakeFromBitmap(dst); |
| 387 | } |
| 388 | |
| 389 | sk_sp<SkImage> SkImage_Raster::onReinterpretColorSpace(sk_sp<SkColorSpace> newCS) const { |
| 390 | // TODO: If our bitmap is immutable, then we could theoretically create another image sharing |
| 391 | // our pixelRef. That doesn't work (without more invasive logic), because the image gets its |
| 392 | // gen ID from the bitmap, which gets it from the pixelRef. |
| 393 | SkPixmap pixmap = fBitmap.pixmap(); |
| 394 | pixmap.setColorSpace(std::move(newCS)); |
| 395 | return SkImage::MakeRasterCopy(pixmap); |
| 396 | } |
| 397 | |