| 1 | /* |
| 2 | * Copyright 2018 Google Inc. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license that can be |
| 5 | * found in the LICENSE file. |
| 6 | */ |
| 7 | |
| 8 | #include "src/image/SkImage_GpuBase.h" |
| 9 | |
| 10 | #include "include/core/SkPromiseImageTexture.h" |
| 11 | #include "include/gpu/GrBackendSurface.h" |
| 12 | #include "include/gpu/GrContext.h" |
| 13 | #include "include/private/GrRecordingContext.h" |
| 14 | #include "src/core/SkBitmapCache.h" |
| 15 | #include "src/core/SkTLList.h" |
| 16 | #include "src/gpu/GrClip.h" |
| 17 | #include "src/gpu/GrContextPriv.h" |
| 18 | #include "src/gpu/GrImageInfo.h" |
| 19 | #include "src/gpu/GrProxyProvider.h" |
| 20 | #include "src/gpu/GrRecordingContextPriv.h" |
| 21 | #include "src/gpu/GrRenderTargetContext.h" |
| 22 | #include "src/gpu/GrTexture.h" |
| 23 | #include "src/gpu/GrTextureAdjuster.h" |
| 24 | #include "src/gpu/effects/GrYUVtoRGBEffect.h" |
| 25 | #include "src/image/SkImage_Gpu.h" |
| 26 | #include "src/image/SkReadPixelsRec.h" |
| 27 | |
| 28 | SkImage_GpuBase::SkImage_GpuBase(sk_sp<GrContext> context, SkISize size, uint32_t uniqueID, |
| 29 | SkColorType ct, SkAlphaType at, sk_sp<SkColorSpace> cs) |
| 30 | : INHERITED(SkImageInfo::Make(size, ct, at, std::move(cs)), uniqueID) |
| 31 | , fContext(std::move(context)) {} |
| 32 | |
| 33 | ////////////////////////////////////////////////////////////////////////////////////////////////// |
| 34 | |
| 35 | #if GR_TEST_UTILS |
| 36 | void SkImage_GpuBase::resetContext(sk_sp<GrContext> newContext) { |
| 37 | SkASSERT(fContext->priv().matches(newContext.get())); |
| 38 | fContext = newContext; |
| 39 | } |
| 40 | #endif |
| 41 | |
| 42 | bool SkImage_GpuBase::ValidateBackendTexture(const GrCaps* caps, const GrBackendTexture& tex, |
| 43 | GrColorType grCT, SkColorType ct, SkAlphaType at, |
| 44 | sk_sp<SkColorSpace> cs) { |
| 45 | if (!tex.isValid()) { |
| 46 | return false; |
| 47 | } |
| 48 | SkColorInfo info(ct, at, cs); |
| 49 | if (!SkColorInfoIsValid(info)) { |
| 50 | return false; |
| 51 | } |
| 52 | GrBackendFormat backendFormat = tex.getBackendFormat(); |
| 53 | if (!backendFormat.isValid()) { |
| 54 | return false; |
| 55 | } |
| 56 | |
| 57 | return caps->areColorTypeAndFormatCompatible(grCT, backendFormat); |
| 58 | } |
| 59 | |
| 60 | bool SkImage_GpuBase::ValidateCompressedBackendTexture(const GrCaps* caps, |
| 61 | const GrBackendTexture& tex, |
| 62 | SkAlphaType at) { |
| 63 | if (!tex.isValid() || tex.width() <= 0 || tex.height() <= 0) { |
| 64 | return false; |
| 65 | } |
| 66 | |
| 67 | if (tex.width() > caps->maxTextureSize() || tex.height() > caps->maxTextureSize()) { |
| 68 | return false; |
| 69 | } |
| 70 | |
| 71 | if (at == kUnknown_SkAlphaType) { |
| 72 | return false; |
| 73 | } |
| 74 | |
| 75 | GrBackendFormat backendFormat = tex.getBackendFormat(); |
| 76 | if (!backendFormat.isValid()) { |
| 77 | return false; |
| 78 | } |
| 79 | |
| 80 | if (!caps->isFormatCompressed(backendFormat)) { |
| 81 | return false; |
| 82 | } |
| 83 | |
| 84 | return true; |
| 85 | } |
| 86 | |
| 87 | ////////////////////////////////////////////////////////////////////////////////////////////////// |
| 88 | |
| 89 | bool SkImage_GpuBase::getROPixels(SkBitmap* dst, CachingHint chint) const { |
| 90 | auto direct = fContext->priv().asDirectContext(); |
| 91 | if (!direct) { |
| 92 | // DDL TODO: buffer up the readback so it occurs when the DDL is drawn? |
| 93 | return false; |
| 94 | } |
| 95 | |
| 96 | const auto desc = SkBitmapCacheDesc::Make(this); |
| 97 | if (SkBitmapCache::Find(desc, dst)) { |
| 98 | SkASSERT(dst->isImmutable()); |
| 99 | SkASSERT(dst->getPixels()); |
| 100 | return true; |
| 101 | } |
| 102 | |
| 103 | SkBitmapCache::RecPtr rec = nullptr; |
| 104 | SkPixmap pmap; |
| 105 | if (kAllow_CachingHint == chint) { |
| 106 | rec = SkBitmapCache::Alloc(desc, this->imageInfo(), &pmap); |
| 107 | if (!rec) { |
| 108 | return false; |
| 109 | } |
| 110 | } else { |
| 111 | if (!dst->tryAllocPixels(this->imageInfo()) || !dst->peekPixels(&pmap)) { |
| 112 | return false; |
| 113 | } |
| 114 | } |
| 115 | |
| 116 | const GrSurfaceProxyView* view = this->view(direct); |
| 117 | SkASSERT(view); |
| 118 | GrColorType grColorType = SkColorTypeAndFormatToGrColorType( |
| 119 | fContext->priv().caps(), this->colorType(), view->proxy()->backendFormat()); |
| 120 | |
| 121 | auto sContext = GrSurfaceContext::Make(direct, *view, grColorType, this->alphaType(), |
| 122 | this->refColorSpace()); |
| 123 | if (!sContext) { |
| 124 | return false; |
| 125 | } |
| 126 | |
| 127 | if (!sContext->readPixels(pmap.info(), pmap.writable_addr(), pmap.rowBytes(), {0, 0})) { |
| 128 | return false; |
| 129 | } |
| 130 | |
| 131 | if (rec) { |
| 132 | SkBitmapCache::Add(std::move(rec), dst); |
| 133 | this->notifyAddedToRasterCache(); |
| 134 | } |
| 135 | return true; |
| 136 | } |
| 137 | |
| 138 | sk_sp<SkImage> SkImage_GpuBase::onMakeSubset(GrRecordingContext* context, |
| 139 | const SkIRect& subset) const { |
| 140 | if (!context || !fContext->priv().matches(context)) { |
| 141 | return nullptr; |
| 142 | } |
| 143 | |
| 144 | const GrSurfaceProxyView* view = this->view(context); |
| 145 | SkASSERT(view && view->proxy()); |
| 146 | |
| 147 | auto copyView = GrSurfaceProxyView::Copy(context, *view, GrMipMapped::kNo, subset, |
| 148 | SkBackingFit::kExact, view->proxy()->isBudgeted()); |
| 149 | |
| 150 | if (!copyView) { |
| 151 | return nullptr; |
| 152 | } |
| 153 | |
| 154 | // MDB: this call is okay bc we know 'sContext' was kExact |
| 155 | return sk_make_sp<SkImage_Gpu>(fContext, kNeedNewImageUniqueID, std::move(copyView), |
| 156 | this->colorType(), this->alphaType(), this->refColorSpace()); |
| 157 | } |
| 158 | |
| 159 | bool SkImage_GpuBase::onReadPixels(const SkImageInfo& dstInfo, void* dstPixels, size_t dstRB, |
| 160 | int srcX, int srcY, CachingHint) const { |
| 161 | auto direct = fContext->priv().asDirectContext(); |
| 162 | if (!direct) { |
| 163 | // DDL TODO: buffer up the readback so it occurs when the DDL is drawn? |
| 164 | return false; |
| 165 | } |
| 166 | |
| 167 | if (!SkImageInfoValidConversion(dstInfo, this->imageInfo())) { |
| 168 | return false; |
| 169 | } |
| 170 | |
| 171 | const GrSurfaceProxyView* view = this->view(direct); |
| 172 | SkASSERT(view); |
| 173 | GrColorType grColorType = SkColorTypeAndFormatToGrColorType( |
| 174 | fContext->priv().caps(), this->colorType(), view->proxy()->backendFormat()); |
| 175 | |
| 176 | auto sContext = GrSurfaceContext::Make(direct, *view, grColorType, this->alphaType(), |
| 177 | this->refColorSpace()); |
| 178 | if (!sContext) { |
| 179 | return false; |
| 180 | } |
| 181 | |
| 182 | return sContext->readPixels(dstInfo, dstPixels, dstRB, {srcX, srcY}); |
| 183 | } |
| 184 | |
| 185 | GrSurfaceProxyView SkImage_GpuBase::refView(GrRecordingContext* context, |
| 186 | GrMipMapped mipMapped) const { |
| 187 | if (!context || !fContext->priv().matches(context)) { |
| 188 | SkASSERT(0); |
| 189 | return {}; |
| 190 | } |
| 191 | |
| 192 | GrTextureAdjuster adjuster(fContext.get(), *this->view(context), this->imageInfo().colorInfo(), |
| 193 | this->uniqueID()); |
| 194 | return adjuster.view(mipMapped); |
| 195 | } |
| 196 | |
| 197 | GrBackendTexture SkImage_GpuBase::onGetBackendTexture(bool flushPendingGrContextIO, |
| 198 | GrSurfaceOrigin* origin) const { |
| 199 | auto direct = fContext->priv().asDirectContext(); |
| 200 | if (!direct) { |
| 201 | // This image was created with a DDL context and cannot be instantiated. |
| 202 | return GrBackendTexture(); // invalid |
| 203 | } |
| 204 | |
| 205 | const GrSurfaceProxyView* view = this->view(direct); |
| 206 | SkASSERT(view && *view); |
| 207 | GrSurfaceProxy* proxy = view->proxy(); |
| 208 | |
| 209 | if (!proxy->isInstantiated()) { |
| 210 | auto resourceProvider = direct->priv().resourceProvider(); |
| 211 | |
| 212 | if (!proxy->instantiate(resourceProvider)) { |
| 213 | return GrBackendTexture(); // invalid |
| 214 | } |
| 215 | } |
| 216 | |
| 217 | GrTexture* texture = proxy->peekTexture(); |
| 218 | if (texture) { |
| 219 | if (flushPendingGrContextIO) { |
| 220 | direct->priv().flushSurface(proxy); |
| 221 | } |
| 222 | if (origin) { |
| 223 | *origin = view->origin(); |
| 224 | } |
| 225 | return texture->getBackendTexture(); |
| 226 | } |
| 227 | return GrBackendTexture(); // invalid |
| 228 | } |
| 229 | |
| 230 | GrTexture* SkImage_GpuBase::getTexture() const { |
| 231 | GrTextureProxy* proxy = this->peekProxy(); |
| 232 | if (proxy && proxy->isInstantiated()) { |
| 233 | return proxy->peekTexture(); |
| 234 | } |
| 235 | |
| 236 | auto direct = fContext->priv().asDirectContext(); |
| 237 | if (!direct) { |
| 238 | // This image was created with a DDL context and cannot be instantiated. |
| 239 | return nullptr; |
| 240 | } |
| 241 | |
| 242 | const GrSurfaceProxyView* view = this->view(direct); |
| 243 | SkASSERT(view && *view && !view->proxy()->isInstantiated()); |
| 244 | |
| 245 | if (!view->proxy()->instantiate(direct->priv().resourceProvider())) { |
| 246 | return nullptr; |
| 247 | } |
| 248 | |
| 249 | return view->proxy()->peekTexture(); |
| 250 | } |
| 251 | |
| 252 | bool SkImage_GpuBase::onIsValid(GrContext* context) const { |
| 253 | // The base class has already checked that context isn't abandoned (if it's not nullptr) |
| 254 | if (fContext->priv().abandoned()) { |
| 255 | return false; |
| 256 | } |
| 257 | |
| 258 | if (context && !fContext->priv().matches(context)) { |
| 259 | return false; |
| 260 | } |
| 261 | |
| 262 | return true; |
| 263 | } |
| 264 | |
| 265 | bool SkImage_GpuBase::MakeTempTextureProxies(GrContext* ctx, const GrBackendTexture yuvaTextures[], |
| 266 | int numTextures, const SkYUVAIndex yuvaIndices[4], |
| 267 | GrSurfaceOrigin imageOrigin, |
| 268 | GrSurfaceProxyView tempViews[4]) { |
| 269 | GrProxyProvider* proxyProvider = ctx->priv().proxyProvider(); |
| 270 | for (int textureIndex = 0; textureIndex < numTextures; ++textureIndex) { |
| 271 | const GrBackendFormat& backendFormat = yuvaTextures[textureIndex].getBackendFormat(); |
| 272 | if (!backendFormat.isValid()) { |
| 273 | return false; |
| 274 | } |
| 275 | |
| 276 | SkASSERT(yuvaTextures[textureIndex].isValid()); |
| 277 | |
| 278 | auto proxy = proxyProvider->wrapBackendTexture(yuvaTextures[textureIndex], |
| 279 | kBorrow_GrWrapOwnership, |
| 280 | GrWrapCacheable::kNo, kRead_GrIOType); |
| 281 | if (!proxy) { |
| 282 | return false; |
| 283 | } |
| 284 | tempViews[textureIndex] = |
| 285 | GrSurfaceProxyView(std::move(proxy), imageOrigin, GrSwizzle("rgba" )); |
| 286 | |
| 287 | // Check that each texture contains the channel data for the corresponding YUVA index |
| 288 | auto formatChannelMask = backendFormat.channelMask(); |
| 289 | if (formatChannelMask & kGray_SkColorChannelFlag) { |
| 290 | formatChannelMask |= kRGB_SkColorChannelFlags; |
| 291 | } |
| 292 | for (int yuvaIndex = 0; yuvaIndex < SkYUVAIndex::kIndexCount; ++yuvaIndex) { |
| 293 | if (yuvaIndices[yuvaIndex].fIndex == textureIndex) { |
| 294 | uint32_t channelAsMask = 1 << static_cast<int>(yuvaIndices[yuvaIndex].fChannel); |
| 295 | if (!(channelAsMask & formatChannelMask)) { |
| 296 | return false; |
| 297 | } |
| 298 | } |
| 299 | } |
| 300 | } |
| 301 | |
| 302 | return true; |
| 303 | } |
| 304 | |
| 305 | bool SkImage_GpuBase::RenderYUVAToRGBA(GrContext* ctx, GrRenderTargetContext* renderTargetContext, |
| 306 | const SkRect& rect, SkYUVColorSpace yuvColorSpace, |
| 307 | sk_sp<GrColorSpaceXform> colorSpaceXform, |
| 308 | GrSurfaceProxyView views[4], |
| 309 | const SkYUVAIndex yuvaIndices[4]) { |
| 310 | SkASSERT(renderTargetContext); |
| 311 | if (!renderTargetContext->asSurfaceProxy()) { |
| 312 | return false; |
| 313 | } |
| 314 | |
| 315 | GrPaint paint; |
| 316 | paint.setPorterDuffXPFactory(SkBlendMode::kSrc); |
| 317 | |
| 318 | const auto& caps = *ctx->priv().caps(); |
| 319 | auto fp = GrYUVtoRGBEffect::Make(views, yuvaIndices, yuvColorSpace, |
| 320 | GrSamplerState::Filter::kNearest, caps); |
| 321 | if (colorSpaceXform) { |
| 322 | fp = GrColorSpaceXformEffect::Make(std::move(fp), std::move(colorSpaceXform)); |
| 323 | } |
| 324 | paint.addColorFragmentProcessor(std::move(fp)); |
| 325 | |
| 326 | renderTargetContext->drawRect(GrNoClip(), std::move(paint), GrAA::kNo, SkMatrix::I(), rect); |
| 327 | return true; |
| 328 | } |
| 329 | |
| 330 | sk_sp<GrTextureProxy> SkImage_GpuBase::MakePromiseImageLazyProxy( |
| 331 | GrContext* context, int width, int height, |
| 332 | GrBackendFormat backendFormat, GrMipMapped mipMapped, |
| 333 | PromiseImageTextureFulfillProc fulfillProc, PromiseImageTextureReleaseProc releaseProc, |
| 334 | PromiseImageTextureDoneProc doneProc, PromiseImageTextureContext textureContext, |
| 335 | PromiseImageApiVersion version) { |
| 336 | SkASSERT(context); |
| 337 | SkASSERT(width > 0 && height > 0); |
| 338 | SkASSERT(doneProc); |
| 339 | |
| 340 | if (!fulfillProc || !releaseProc) { |
| 341 | doneProc(textureContext); |
| 342 | return nullptr; |
| 343 | } |
| 344 | |
| 345 | if (mipMapped == GrMipMapped::kYes && |
| 346 | GrTextureTypeHasRestrictedSampling(backendFormat.textureType())) { |
| 347 | // It is invalid to have a GL_TEXTURE_EXTERNAL or GL_TEXTURE_RECTANGLE and have mips as |
| 348 | // well. |
| 349 | doneProc(textureContext); |
| 350 | return nullptr; |
| 351 | } |
| 352 | |
| 353 | /** |
| 354 | * This class is the lazy instantiation callback for promise images. It manages calling the |
| 355 | * client's Fulfill, Release, and Done procs. It attempts to reuse a GrTexture instance in |
| 356 | * cases where the client provides the same SkPromiseImageTexture as Fulfill results for |
| 357 | * multiple SkImages. The created GrTexture is given a key based on a unique ID associated with |
| 358 | * the SkPromiseImageTexture. |
| 359 | * |
| 360 | * The GrTexutre idle proc mechanism is used to call the Release and Done procs. We use this |
| 361 | * instead of the GrSurface release proc because the GrTexture is cached and therefore may |
| 362 | * outlive the proxy into which this callback is installed. |
| 363 | * |
| 364 | * A key invalidation message is installed on the SkPromiseImageTexture so that the GrTexture |
| 365 | * is deleted once it can no longer be used to instantiate a proxy. |
| 366 | */ |
| 367 | class PromiseLazyInstantiateCallback { |
| 368 | public: |
| 369 | PromiseLazyInstantiateCallback(PromiseImageTextureFulfillProc fulfillProc, |
| 370 | PromiseImageTextureReleaseProc releaseProc, |
| 371 | PromiseImageTextureDoneProc doneProc, |
| 372 | PromiseImageTextureContext context, |
| 373 | PromiseImageApiVersion version) |
| 374 | : fFulfillProc(fulfillProc) |
| 375 | , fReleaseProc(releaseProc) |
| 376 | , fVersion(version) { |
| 377 | fDoneCallback = sk_make_sp<GrRefCntedCallback>(doneProc, context); |
| 378 | } |
| 379 | PromiseLazyInstantiateCallback(PromiseLazyInstantiateCallback&&) = default; |
| 380 | PromiseLazyInstantiateCallback(const PromiseLazyInstantiateCallback&) { |
| 381 | // Because we get wrapped in std::function we must be copyable. But we should never |
| 382 | // be copied. |
| 383 | SkASSERT(false); |
| 384 | } |
| 385 | PromiseLazyInstantiateCallback& operator=(PromiseLazyInstantiateCallback&&) = default; |
| 386 | PromiseLazyInstantiateCallback& operator=(const PromiseLazyInstantiateCallback&) { |
| 387 | SkASSERT(false); |
| 388 | return *this; |
| 389 | } |
| 390 | |
| 391 | ~PromiseLazyInstantiateCallback() { |
| 392 | // Our destructor can run on any thread. We trigger the unref of fTexture by message. |
| 393 | // This unreffed texture pointer is a real problem! When the context has been |
| 394 | // abandoned, the GrTexture pointed to by this pointer is deleted! Due to virtual |
| 395 | // inheritance any manipulation of this pointer at that point will cause a crash. |
| 396 | // For now we "work around" the problem by just passing it, untouched, into the |
| 397 | // message bus but this very fragile. |
| 398 | // In the future the GrSurface class hierarchy refactoring should eliminate this |
| 399 | // difficulty by removing the virtual inheritance. |
| 400 | if (fTexture) { |
| 401 | SkMessageBus<GrTextureFreedMessage>::Post({fTexture, fTextureContextID}); |
| 402 | } |
| 403 | } |
| 404 | |
| 405 | GrSurfaceProxy::LazyCallbackResult operator()(GrResourceProvider* resourceProvider) { |
| 406 | // We use the unique key in a way that is unrelated to the SkImage-based key that the |
| 407 | // proxy may receive, hence kUnsynced. |
| 408 | static constexpr auto kKeySyncMode = |
| 409 | GrSurfaceProxy::LazyInstantiationKeyMode::kUnsynced; |
| 410 | |
| 411 | // In order to make the SkImage "thread safe" we rely on holding an extra ref to the |
| 412 | // texture in the callback and signalling the unref via a message to the resource cache. |
| 413 | // We need to extend the callback's lifetime to that of the proxy. |
| 414 | static constexpr auto kReleaseCallbackOnInstantiation = false; |
| 415 | |
| 416 | // Our proxy is getting instantiated for the second+ time. We are only allowed to call |
| 417 | // Fulfill once. So return our cached result. |
| 418 | if (fTexture) { |
| 419 | return {sk_ref_sp(fTexture), kReleaseCallbackOnInstantiation, kKeySyncMode}; |
| 420 | } else if (fFulfillProcFailed) { |
| 421 | // We've already called fulfill and it failed. Our contract says that we should only |
| 422 | // call each callback once. |
| 423 | return {}; |
| 424 | } |
| 425 | SkASSERT(fDoneCallback); |
| 426 | PromiseImageTextureContext textureContext = fDoneCallback->context(); |
| 427 | sk_sp<SkPromiseImageTexture> promiseTexture = fFulfillProc(textureContext); |
| 428 | // From here on out our contract is that the release proc must be called, even if |
| 429 | // the return from fulfill was invalid or we fail for some other reason. |
| 430 | auto releaseCallback = sk_make_sp<GrRefCntedCallback>(fReleaseProc, textureContext); |
| 431 | if (!promiseTexture) { |
| 432 | fFulfillProcFailed = true; |
| 433 | return {}; |
| 434 | } |
| 435 | |
| 436 | const GrBackendTexture& backendTexture = promiseTexture->backendTexture(); |
| 437 | if (!backendTexture.isValid()) { |
| 438 | return {}; |
| 439 | } |
| 440 | |
| 441 | sk_sp<GrTexture> tex; |
| 442 | static const GrUniqueKey::Domain kDomain = GrUniqueKey::GenerateDomain(); |
| 443 | GrUniqueKey key; |
| 444 | GrUniqueKey::Builder builder(&key, kDomain, 1, "promise" ); |
| 445 | builder[0] = promiseTexture->uniqueID(); |
| 446 | builder.finish(); |
| 447 | // A texture with this key may already exist from a different instance of this lazy |
| 448 | // callback. This could happen if the client fulfills a promise image with a texture |
| 449 | // that was previously used to fulfill a different promise image. |
| 450 | if (auto surf = resourceProvider->findByUniqueKey<GrSurface>(key)) { |
| 451 | tex = sk_ref_sp(surf->asTexture()); |
| 452 | SkASSERT(tex); |
| 453 | } else { |
| 454 | if ((tex = resourceProvider->wrapBackendTexture( |
| 455 | backendTexture, kBorrow_GrWrapOwnership, GrWrapCacheable::kYes, |
| 456 | kRead_GrIOType))) { |
| 457 | tex->resourcePriv().setUniqueKey(key); |
| 458 | } else { |
| 459 | return {}; |
| 460 | } |
| 461 | } |
| 462 | auto releaseIdleState = fVersion == PromiseImageApiVersion::kLegacy |
| 463 | ? GrTexture::IdleState::kFinished |
| 464 | : GrTexture::IdleState::kFlushed; |
| 465 | tex->addIdleProc(std::move(releaseCallback), releaseIdleState); |
| 466 | tex->addIdleProc(std::move(fDoneCallback), GrTexture::IdleState::kFinished); |
| 467 | promiseTexture->addKeyToInvalidate(tex->getContext()->priv().contextID(), key); |
| 468 | fTexture = tex.get(); |
| 469 | // We need to hold on to the GrTexture in case our proxy gets reinstantiated. However, |
| 470 | // we can't unref in our destructor because we may be on another thread then. So we |
| 471 | // let the cache know it is waiting on an unref message. We will send that message from |
| 472 | // our destructor. |
| 473 | GrContext* context = fTexture->getContext(); |
| 474 | context->priv().getResourceCache()->insertDelayedTextureUnref(fTexture); |
| 475 | fTextureContextID = context->priv().contextID(); |
| 476 | return {std::move(tex), kReleaseCallbackOnInstantiation, kKeySyncMode}; |
| 477 | } |
| 478 | |
| 479 | private: |
| 480 | PromiseImageTextureFulfillProc fFulfillProc; |
| 481 | PromiseImageTextureReleaseProc fReleaseProc; |
| 482 | sk_sp<GrRefCntedCallback> fDoneCallback; |
| 483 | GrTexture* fTexture = nullptr; |
| 484 | uint32_t fTextureContextID = SK_InvalidUniqueID; |
| 485 | PromiseImageApiVersion fVersion; |
| 486 | bool fFulfillProcFailed = false; |
| 487 | } callback(fulfillProc, releaseProc, doneProc, textureContext, version); |
| 488 | |
| 489 | GrProxyProvider* proxyProvider = context->priv().proxyProvider(); |
| 490 | |
| 491 | // Ganesh assumes that, when wrapping a mipmapped backend texture from a client, that its |
| 492 | // mipmaps are fully fleshed out. |
| 493 | GrMipMapsStatus mipMapsStatus = (GrMipMapped::kYes == mipMapped) |
| 494 | ? GrMipMapsStatus::kValid : GrMipMapsStatus::kNotAllocated; |
| 495 | |
| 496 | // We pass kReadOnly here since we should treat content of the client's texture as immutable. |
| 497 | // The promise API provides no way for the client to indicated that the texture is protected. |
| 498 | return proxyProvider->createLazyProxy( |
| 499 | std::move(callback), backendFormat, {width, height}, GrRenderable::kNo, 1, mipMapped, |
| 500 | mipMapsStatus, GrInternalSurfaceFlags::kReadOnly, SkBackingFit::kExact, SkBudgeted::kNo, |
| 501 | GrProtected::kNo, GrSurfaceProxy::UseAllocator::kYes); |
| 502 | } |
| 503 | |