1 | /* |
2 | * Copyright 2012 Google Inc. |
3 | * |
4 | * Use of this source code is governed by a BSD-style license that can be |
5 | * found in the LICENSE file. |
6 | */ |
7 | |
8 | #include "include/core/SkBitmap.h" |
9 | #include "include/core/SkCanvas.h" |
10 | #include "include/core/SkData.h" |
11 | #include "include/core/SkPixelRef.h" |
12 | #include "include/core/SkSurface.h" |
13 | #include "include/private/SkImageInfoPriv.h" |
14 | #include "src/codec/SkColorTable.h" |
15 | #include "src/core/SkCompressedDataUtils.h" |
16 | #include "src/core/SkConvertPixels.h" |
17 | #include "src/core/SkImagePriv.h" |
18 | #include "src/core/SkTLazy.h" |
19 | #include "src/image/SkImage_Base.h" |
20 | #include "src/shaders/SkBitmapProcShader.h" |
21 | |
22 | #if SK_SUPPORT_GPU |
23 | #include "src/gpu/GrTextureAdjuster.h" |
24 | #include "src/gpu/SkGr.h" |
25 | #endif |
26 | |
27 | // fixes https://bug.skia.org/5096 |
28 | static bool is_not_subset(const SkBitmap& bm) { |
29 | SkASSERT(bm.pixelRef()); |
30 | SkISize dim = SkISize::Make(bm.pixelRef()->width(), bm.pixelRef()->height()); |
31 | SkASSERT(dim != bm.dimensions() || bm.pixelRefOrigin().isZero()); |
32 | return dim == bm.dimensions(); |
33 | } |
34 | |
35 | class SkImage_Raster : public SkImage_Base { |
36 | public: |
37 | static bool ValidArgs(const SkImageInfo& info, size_t rowBytes, size_t* minSize) { |
38 | const int maxDimension = SK_MaxS32 >> 2; |
39 | |
40 | // TODO(mtklein): eliminate anything here that setInfo() has already checked. |
41 | SkBitmap dummy; |
42 | if (!dummy.setInfo(info, rowBytes)) { |
43 | return false; |
44 | } |
45 | |
46 | if (info.width() <= 0 || info.height() <= 0) { |
47 | return false; |
48 | } |
49 | if (info.width() > maxDimension || info.height() > maxDimension) { |
50 | return false; |
51 | } |
52 | if ((unsigned)info.colorType() > (unsigned)kLastEnum_SkColorType) { |
53 | return false; |
54 | } |
55 | if ((unsigned)info.alphaType() > (unsigned)kLastEnum_SkAlphaType) { |
56 | return false; |
57 | } |
58 | |
59 | if (kUnknown_SkColorType == info.colorType()) { |
60 | return false; |
61 | } |
62 | if (!info.validRowBytes(rowBytes)) { |
63 | return false; |
64 | } |
65 | |
66 | size_t size = info.computeByteSize(rowBytes); |
67 | if (SkImageInfo::ByteSizeOverflowed(size)) { |
68 | return false; |
69 | } |
70 | |
71 | if (minSize) { |
72 | *minSize = size; |
73 | } |
74 | return true; |
75 | } |
76 | |
77 | SkImage_Raster(const SkImageInfo&, sk_sp<SkData>, size_t rb, |
78 | uint32_t id = kNeedNewImageUniqueID); |
79 | ~SkImage_Raster() override; |
80 | |
81 | bool onReadPixels(const SkImageInfo&, void*, size_t, int srcX, int srcY, |
82 | CachingHint) const override; |
83 | bool onPeekPixels(SkPixmap*) const override; |
84 | const SkBitmap* onPeekBitmap() const override { return &fBitmap; } |
85 | |
86 | #if SK_SUPPORT_GPU |
87 | GrSurfaceProxyView refView(GrRecordingContext*, GrMipmapped) const override; |
88 | #endif |
89 | |
90 | bool getROPixels(SkBitmap*, CachingHint) const override; |
91 | sk_sp<SkImage> onMakeSubset(const SkIRect&, GrDirectContext*) const override; |
92 | |
93 | SkPixelRef* getPixelRef() const { return fBitmap.pixelRef(); } |
94 | |
95 | bool onAsLegacyBitmap(SkBitmap*) const override; |
96 | |
97 | SkImage_Raster(const SkBitmap& bm, bool bitmapMayBeMutable = false) |
98 | : INHERITED(bm.info(), |
99 | is_not_subset(bm) ? bm.getGenerationID() : (uint32_t)kNeedNewImageUniqueID) |
100 | , fBitmap(bm) { |
101 | SkASSERT(bitmapMayBeMutable || fBitmap.isImmutable()); |
102 | } |
103 | |
104 | sk_sp<SkImage> onMakeColorTypeAndColorSpace(SkColorType, sk_sp<SkColorSpace>, |
105 | GrDirectContext*) const override; |
106 | |
107 | sk_sp<SkImage> onReinterpretColorSpace(sk_sp<SkColorSpace>) const override; |
108 | |
109 | bool onIsValid(GrRecordingContext* context) const override { return true; } |
110 | void notifyAddedToRasterCache() const override { |
111 | // We explicitly DON'T want to call INHERITED::notifyAddedToRasterCache. That ties the |
112 | // lifetime of derived/cached resources to the image. In this case, we only want cached |
113 | // data (eg mips) tied to the lifetime of the underlying pixelRef. |
114 | SkASSERT(fBitmap.pixelRef()); |
115 | fBitmap.pixelRef()->notifyAddedToCache(); |
116 | } |
117 | |
118 | #if SK_SUPPORT_GPU |
119 | GrSurfaceProxyView refPinnedView(GrRecordingContext* context, |
120 | uint32_t* uniqueID) const override; |
121 | bool onPinAsTexture(GrRecordingContext*) const override; |
122 | void onUnpinAsTexture(GrRecordingContext*) const override; |
123 | #endif |
124 | |
125 | SkMipmap* onPeekMips() const override { return fBitmap.fMips.get(); } |
126 | |
127 | sk_sp<SkImage> onMakeWithMipmaps(sk_sp<SkMipmap> mips) const override { |
128 | auto img = new SkImage_Raster(fBitmap); |
129 | if (mips) { |
130 | img->fBitmap.fMips = std::move(mips); |
131 | } else { |
132 | img->fBitmap.fMips.reset(SkMipmap::Build(fBitmap.pixmap(), nullptr)); |
133 | } |
134 | return sk_sp<SkImage>(img); |
135 | } |
136 | |
137 | private: |
138 | SkBitmap fBitmap; |
139 | |
140 | #if SK_SUPPORT_GPU |
141 | mutable GrSurfaceProxyView fPinnedView; |
142 | mutable int32_t fPinnedCount = 0; |
143 | mutable uint32_t fPinnedUniqueID = 0; |
144 | #endif |
145 | |
146 | typedef SkImage_Base INHERITED; |
147 | }; |
148 | |
149 | /////////////////////////////////////////////////////////////////////////////// |
150 | |
151 | static void release_data(void* addr, void* context) { |
152 | SkData* data = static_cast<SkData*>(context); |
153 | data->unref(); |
154 | } |
155 | |
156 | SkImage_Raster::SkImage_Raster(const SkImageInfo& info, sk_sp<SkData> data, size_t rowBytes, |
157 | uint32_t id) |
158 | : INHERITED(info, id) { |
159 | void* addr = const_cast<void*>(data->data()); |
160 | |
161 | fBitmap.installPixels(info, addr, rowBytes, release_data, data.release()); |
162 | fBitmap.setImmutable(); |
163 | } |
164 | |
165 | SkImage_Raster::~SkImage_Raster() { |
166 | #if SK_SUPPORT_GPU |
167 | SkASSERT(!fPinnedView); // want the caller to have manually unpinned |
168 | #endif |
169 | } |
170 | |
171 | bool SkImage_Raster::onReadPixels(const SkImageInfo& dstInfo, void* dstPixels, size_t dstRowBytes, |
172 | int srcX, int srcY, CachingHint) const { |
173 | SkBitmap shallowCopy(fBitmap); |
174 | return shallowCopy.readPixels(dstInfo, dstPixels, dstRowBytes, srcX, srcY); |
175 | } |
176 | |
177 | bool SkImage_Raster::onPeekPixels(SkPixmap* pm) const { |
178 | return fBitmap.peekPixels(pm); |
179 | } |
180 | |
181 | bool SkImage_Raster::getROPixels(SkBitmap* dst, CachingHint) const { |
182 | *dst = fBitmap; |
183 | return true; |
184 | } |
185 | |
186 | #if SK_SUPPORT_GPU |
187 | GrSurfaceProxyView SkImage_Raster::refView(GrRecordingContext* context, |
188 | GrMipmapped mipMapped) const { |
189 | if (!context) { |
190 | return {}; |
191 | } |
192 | |
193 | uint32_t uniqueID; |
194 | if (GrSurfaceProxyView view = this->refPinnedView(context, &uniqueID)) { |
195 | GrTextureAdjuster adjuster(context, std::move(view), fBitmap.info().colorInfo(), |
196 | fPinnedUniqueID); |
197 | return adjuster.view(mipMapped); |
198 | } |
199 | |
200 | return GrRefCachedBitmapView(context, fBitmap, mipMapped); |
201 | } |
202 | #endif |
203 | |
204 | #if SK_SUPPORT_GPU |
205 | |
206 | GrSurfaceProxyView SkImage_Raster::refPinnedView(GrRecordingContext*, uint32_t* uniqueID) const { |
207 | if (fPinnedView) { |
208 | SkASSERT(fPinnedCount > 0); |
209 | SkASSERT(fPinnedUniqueID != 0); |
210 | *uniqueID = fPinnedUniqueID; |
211 | return fPinnedView; |
212 | } |
213 | return {}; |
214 | } |
215 | |
216 | bool SkImage_Raster::onPinAsTexture(GrRecordingContext* rContext) const { |
217 | if (fPinnedView) { |
218 | SkASSERT(fPinnedCount > 0); |
219 | SkASSERT(fPinnedUniqueID != 0); |
220 | } else { |
221 | SkASSERT(fPinnedCount == 0); |
222 | SkASSERT(fPinnedUniqueID == 0); |
223 | fPinnedView = GrRefCachedBitmapView(rContext, fBitmap, GrMipmapped::kNo); |
224 | if (!fPinnedView) { |
225 | return false; |
226 | } |
227 | SkASSERT(fPinnedView.asTextureProxy()); |
228 | fPinnedUniqueID = fBitmap.getGenerationID(); |
229 | } |
230 | // Note: we only increment if the texture was successfully pinned |
231 | ++fPinnedCount; |
232 | return true; |
233 | } |
234 | |
235 | void SkImage_Raster::onUnpinAsTexture(GrRecordingContext*) const { |
236 | // Note: we always decrement, even if fPinnedTexture is null |
237 | SkASSERT(fPinnedCount > 0); |
238 | SkASSERT(fPinnedUniqueID != 0); |
239 | |
240 | if (0 == --fPinnedCount) { |
241 | fPinnedView = GrSurfaceProxyView(); |
242 | fPinnedUniqueID = 0; |
243 | } |
244 | } |
245 | #endif |
246 | |
247 | sk_sp<SkImage> SkImage_Raster::onMakeSubset(const SkIRect& subset, GrDirectContext*) const { |
248 | SkImageInfo info = fBitmap.info().makeDimensions(subset.size()); |
249 | SkBitmap bitmap; |
250 | if (!bitmap.tryAllocPixels(info)) { |
251 | return nullptr; |
252 | } |
253 | |
254 | void* dst = bitmap.getPixels(); |
255 | void* src = fBitmap.getAddr(subset.x(), subset.y()); |
256 | if (!dst || !src) { |
257 | SkDEBUGFAIL("SkImage_Raster::onMakeSubset with nullptr src or dst" ); |
258 | return nullptr; |
259 | } |
260 | |
261 | SkRectMemcpy(dst, bitmap.rowBytes(), src, fBitmap.rowBytes(), bitmap.rowBytes(), |
262 | subset.height()); |
263 | |
264 | bitmap.setImmutable(); |
265 | return MakeFromBitmap(bitmap); |
266 | } |
267 | |
268 | /////////////////////////////////////////////////////////////////////////////// |
269 | |
270 | sk_sp<SkImage> MakeRasterCopyPriv(const SkPixmap& pmap, uint32_t id) { |
271 | size_t size; |
272 | if (!SkImage_Raster::ValidArgs(pmap.info(), pmap.rowBytes(), &size) || !pmap.addr()) { |
273 | return nullptr; |
274 | } |
275 | |
276 | // Here we actually make a copy of the caller's pixel data |
277 | sk_sp<SkData> data(SkData::MakeWithCopy(pmap.addr(), size)); |
278 | return sk_make_sp<SkImage_Raster>(pmap.info(), std::move(data), pmap.rowBytes(), id); |
279 | } |
280 | |
281 | sk_sp<SkImage> SkImage::MakeRasterCopy(const SkPixmap& pmap) { |
282 | return MakeRasterCopyPriv(pmap, kNeedNewImageUniqueID); |
283 | } |
284 | |
285 | sk_sp<SkImage> SkImage::MakeRasterData(const SkImageInfo& info, sk_sp<SkData> data, |
286 | size_t rowBytes) { |
287 | size_t size; |
288 | if (!SkImage_Raster::ValidArgs(info, rowBytes, &size) || !data) { |
289 | return nullptr; |
290 | } |
291 | |
292 | // did they give us enough data? |
293 | if (data->size() < size) { |
294 | return nullptr; |
295 | } |
296 | |
297 | return sk_make_sp<SkImage_Raster>(info, std::move(data), rowBytes); |
298 | } |
299 | |
300 | // TODO: this could be improved to decode and make use of the mipmap |
301 | // levels potentially present in the compressed data. For now, any |
302 | // mipmap levels are discarded. |
303 | sk_sp<SkImage> SkImage::MakeRasterFromCompressed(sk_sp<SkData> data, |
304 | int width, int height, |
305 | CompressionType type) { |
306 | size_t expectedSize = SkCompressedFormatDataSize(type, { width, height }, false); |
307 | if (!data || data->size() < expectedSize) { |
308 | return nullptr; |
309 | } |
310 | |
311 | SkAlphaType at = SkCompressionTypeIsOpaque(type) ? kOpaque_SkAlphaType |
312 | : kPremul_SkAlphaType; |
313 | |
314 | SkImageInfo ii = SkImageInfo::MakeN32(width, height, at); |
315 | |
316 | if (!SkImage_Raster::ValidArgs(ii, ii.minRowBytes(), nullptr)) { |
317 | return nullptr; |
318 | } |
319 | |
320 | SkBitmap bitmap; |
321 | if (!bitmap.tryAllocPixels(ii)) { |
322 | return nullptr; |
323 | } |
324 | |
325 | if (!SkDecompress(std::move(data), { width, height }, type, &bitmap)) { |
326 | return nullptr; |
327 | } |
328 | |
329 | bitmap.setImmutable(); |
330 | return MakeFromBitmap(bitmap); |
331 | } |
332 | |
333 | sk_sp<SkImage> SkImage::MakeFromRaster(const SkPixmap& pmap, RasterReleaseProc proc, |
334 | ReleaseContext ctx) { |
335 | size_t size; |
336 | if (!SkImage_Raster::ValidArgs(pmap.info(), pmap.rowBytes(), &size) || !pmap.addr()) { |
337 | return nullptr; |
338 | } |
339 | |
340 | sk_sp<SkData> data(SkData::MakeWithProc(pmap.addr(), size, proc, ctx)); |
341 | return sk_make_sp<SkImage_Raster>(pmap.info(), std::move(data), pmap.rowBytes()); |
342 | } |
343 | |
344 | sk_sp<SkImage> SkMakeImageFromRasterBitmapPriv(const SkBitmap& bm, SkCopyPixelsMode cpm, |
345 | uint32_t idForCopy) { |
346 | if (kAlways_SkCopyPixelsMode == cpm || (!bm.isImmutable() && kNever_SkCopyPixelsMode != cpm)) { |
347 | SkPixmap pmap; |
348 | if (bm.peekPixels(&pmap)) { |
349 | return MakeRasterCopyPriv(pmap, idForCopy); |
350 | } else { |
351 | return sk_sp<SkImage>(); |
352 | } |
353 | } |
354 | |
355 | return sk_make_sp<SkImage_Raster>(bm, kNever_SkCopyPixelsMode == cpm); |
356 | } |
357 | |
358 | sk_sp<SkImage> SkMakeImageFromRasterBitmap(const SkBitmap& bm, SkCopyPixelsMode cpm) { |
359 | if (!SkImageInfoIsValid(bm.info()) || bm.rowBytes() < bm.info().minRowBytes()) { |
360 | return nullptr; |
361 | } |
362 | |
363 | return SkMakeImageFromRasterBitmapPriv(bm, cpm, kNeedNewImageUniqueID); |
364 | } |
365 | |
366 | const SkPixelRef* SkBitmapImageGetPixelRef(const SkImage* image) { |
367 | return ((const SkImage_Raster*)image)->getPixelRef(); |
368 | } |
369 | |
370 | bool SkImage_Raster::onAsLegacyBitmap(SkBitmap* bitmap) const { |
371 | // When we're a snapshot from a surface, our bitmap may not be marked immutable |
372 | // even though logically always we are, but in that case we can't physically share our |
373 | // pixelref since the caller might call setImmutable() themselves |
374 | // (thus changing our state). |
375 | if (fBitmap.isImmutable()) { |
376 | SkIPoint origin = fBitmap.pixelRefOrigin(); |
377 | bitmap->setInfo(fBitmap.info(), fBitmap.rowBytes()); |
378 | bitmap->setPixelRef(sk_ref_sp(fBitmap.pixelRef()), origin.x(), origin.y()); |
379 | return true; |
380 | } |
381 | return this->INHERITED::onAsLegacyBitmap(bitmap); |
382 | } |
383 | |
384 | /////////////////////////////////////////////////////////////////////////////// |
385 | |
386 | sk_sp<SkImage> SkImage_Raster::onMakeColorTypeAndColorSpace(SkColorType targetCT, |
387 | sk_sp<SkColorSpace> targetCS, |
388 | GrDirectContext*) const { |
389 | SkPixmap src; |
390 | SkAssertResult(fBitmap.peekPixels(&src)); |
391 | |
392 | SkBitmap dst; |
393 | dst.allocPixels(fBitmap.info().makeColorType(targetCT).makeColorSpace(targetCS)); |
394 | |
395 | SkAssertResult(dst.writePixels(src)); |
396 | dst.setImmutable(); |
397 | return SkImage::MakeFromBitmap(dst); |
398 | } |
399 | |
400 | sk_sp<SkImage> SkImage_Raster::onReinterpretColorSpace(sk_sp<SkColorSpace> newCS) const { |
401 | // TODO: If our bitmap is immutable, then we could theoretically create another image sharing |
402 | // our pixelRef. That doesn't work (without more invasive logic), because the image gets its |
403 | // gen ID from the bitmap, which gets it from the pixelRef. |
404 | SkPixmap pixmap = fBitmap.pixmap(); |
405 | pixmap.setColorSpace(std::move(newCS)); |
406 | return SkImage::MakeRasterCopy(pixmap); |
407 | } |
408 | |