1 | // Copyright 2013 The Flutter Authors. All rights reserved. |
---|---|
2 | // Use of this source code is governed by a BSD-style license that can be |
3 | // found in the LICENSE file. |
4 | |
5 | #ifndef FLUTTER_FLOW_SKIA_GPU_OBJECT_H_ |
6 | #define FLUTTER_FLOW_SKIA_GPU_OBJECT_H_ |
7 | |
8 | #include <mutex> |
9 | #include <queue> |
10 | |
11 | #include "flutter/fml/memory/ref_counted.h" |
12 | #include "flutter/fml/memory/weak_ptr.h" |
13 | #include "flutter/fml/task_runner.h" |
14 | #include "third_party/skia/include/core/SkRefCnt.h" |
15 | #include "third_party/skia/include/gpu/GrDirectContext.h" |
16 | |
17 | namespace flutter { |
18 | |
19 | // A queue that holds Skia objects that must be destructed on the given task |
20 | // runner. |
21 | class SkiaUnrefQueue : public fml::RefCountedThreadSafe<SkiaUnrefQueue> { |
22 | public: |
23 | void Unref(SkRefCnt* object); |
24 | |
25 | // Usually, the drain is called automatically. However, during IO manager |
26 | // shutdown (when the platform side reference to the OpenGL context is about |
27 | // to go away), we may need to pre-emptively drain the unref queue. It is the |
28 | // responsibility of the caller to ensure that no further unrefs are queued |
29 | // after this call. |
30 | void Drain(); |
31 | |
32 | private: |
33 | const fml::RefPtr<fml::TaskRunner> task_runner_; |
34 | const fml::TimeDelta drain_delay_; |
35 | std::mutex mutex_; |
36 | std::deque<SkRefCnt*> objects_; |
37 | bool drain_pending_; |
38 | fml::WeakPtr<GrDirectContext> context_; |
39 | |
40 | // The `GrDirectContext* context` is only used for signaling Skia to |
41 | // performDeferredCleanup. It can be nullptr when such signaling is not needed |
42 | // (e.g., in unit tests). |
43 | SkiaUnrefQueue(fml::RefPtr<fml::TaskRunner> task_runner, |
44 | fml::TimeDelta delay, |
45 | fml::WeakPtr<GrDirectContext> context = {}); |
46 | |
47 | ~SkiaUnrefQueue(); |
48 | |
49 | FML_FRIEND_REF_COUNTED_THREAD_SAFE(SkiaUnrefQueue); |
50 | FML_FRIEND_MAKE_REF_COUNTED(SkiaUnrefQueue); |
51 | FML_DISALLOW_COPY_AND_ASSIGN(SkiaUnrefQueue); |
52 | }; |
53 | |
54 | /// An object whose deallocation needs to be performed on an specific unref |
55 | /// queue. The template argument U need to have a call operator that returns |
56 | /// that unref queue. |
57 | template <class T> |
58 | class SkiaGPUObject { |
59 | public: |
60 | using SkiaObjectType = T; |
61 | |
62 | SkiaGPUObject() = default; |
63 | SkiaGPUObject(sk_sp<SkiaObjectType> object, fml::RefPtr<SkiaUnrefQueue> queue) |
64 | : object_(std::move(object)), queue_(std::move(queue)) { |
65 | FML_DCHECK(object_); |
66 | } |
67 | SkiaGPUObject(SkiaGPUObject&&) = default; |
68 | ~SkiaGPUObject() { reset(); } |
69 | |
70 | SkiaGPUObject& operator=(SkiaGPUObject&&) = default; |
71 | |
72 | sk_sp<SkiaObjectType> get() const { return object_; } |
73 | |
74 | void reset() { |
75 | if (object_ && queue_) { |
76 | queue_->Unref(object_.release()); |
77 | } |
78 | queue_ = nullptr; |
79 | FML_DCHECK(object_ == nullptr); |
80 | } |
81 | |
82 | private: |
83 | sk_sp<SkiaObjectType> object_; |
84 | fml::RefPtr<SkiaUnrefQueue> queue_; |
85 | |
86 | FML_DISALLOW_COPY_AND_ASSIGN(SkiaGPUObject); |
87 | }; |
88 | |
89 | } // namespace flutter |
90 | |
91 | #endif // FLUTTER_FLOW_SKIA_GPU_OBJECT_H_ |
92 |