/src/skia/include/private/SkWeakRefCnt.h
Line | Count | Source (jump to first uncovered line) |
1 | | /* |
2 | | * Copyright 2012 Google Inc. |
3 | | * |
4 | | * Use of this source code is governed by a BSD-style license that can be |
5 | | * found in the LICENSE file. |
6 | | */ |
7 | | |
8 | | #ifndef SkWeakRefCnt_DEFINED |
9 | | #define SkWeakRefCnt_DEFINED |
10 | | |
11 | | #include "include/core/SkRefCnt.h" |
12 | | #include "include/core/SkTypes.h" |
13 | | |
14 | | #include <atomic> |
15 | | #include <cstdint> |
16 | | |
17 | | /** \class SkWeakRefCnt |
18 | | |
19 | | SkWeakRefCnt is the base class for objects that may be shared by multiple |
20 | | objects. When an existing strong owner wants to share a reference, it calls |
21 | | ref(). When a strong owner wants to release its reference, it calls |
22 | | unref(). When the shared object's strong reference count goes to zero as |
23 | | the result of an unref() call, its (virtual) weak_dispose method is called. |
24 | | It is an error for the destructor to be called explicitly (or via the |
25 | | object going out of scope on the stack or calling delete) if |
26 | | getRefCnt() > 1. |
27 | | |
28 | | In addition to strong ownership, an owner may instead obtain a weak |
29 | | reference by calling weak_ref(). A call to weak_ref() must be balanced by a |
30 | | call to weak_unref(). To obtain a strong reference from a weak reference, |
31 | | call try_ref(). If try_ref() returns true, the owner's pointer is now also |
32 | | a strong reference on which unref() must be called. Note that this does not |
33 | | affect the original weak reference, weak_unref() must still be called. When |
34 | | the weak reference count goes to zero, the object is deleted. While the |
35 | | weak reference count is positive and the strong reference count is zero the |
36 | | object still exists, but will be in the disposed state. It is up to the |
37 | | object to define what this means. |
38 | | |
39 | | Note that a strong reference implicitly implies a weak reference. As a |
40 | | result, it is allowable for the owner of a strong ref to call try_ref(). |
41 | | This will have the same effect as calling ref(), but may be more expensive. |
42 | | |
43 | | Example: |
44 | | |
45 | | SkWeakRefCnt myRef = strongRef.weak_ref(); |
46 | | ... // strongRef.unref() may or may not be called |
47 | | if (myRef.try_ref()) { |
48 | | ... // use myRef |
49 | | myRef.unref(); |
50 | | } else { |
51 | | // myRef is in the disposed state |
52 | | } |
53 | | myRef.weak_unref(); |
54 | | */ |
55 | | class SK_API SkWeakRefCnt : public SkRefCnt { |
56 | | public: |
57 | | /** Default construct, initializing the reference counts to 1. |
58 | | The strong references collectively hold one weak reference. When the |
59 | | strong reference count goes to zero, the collectively held weak |
60 | | reference is released. |
61 | | */ |
62 | 52.2k | SkWeakRefCnt() : SkRefCnt(), fWeakCnt(1) {} |
63 | | |
64 | | /** Destruct, asserting that the weak reference count is 1. |
65 | | */ |
66 | 0 | ~SkWeakRefCnt() override { |
67 | 0 | #ifdef SK_DEBUG |
68 | 0 | SkASSERT(getWeakCnt() == 1); |
69 | 0 | fWeakCnt.store(0, std::memory_order_relaxed); |
70 | 0 | #endif |
71 | 0 | } Unexecuted instantiation: SkWeakRefCnt::~SkWeakRefCnt() Unexecuted instantiation: SkWeakRefCnt::~SkWeakRefCnt() |
72 | | |
73 | | #ifdef SK_DEBUG |
74 | | /** Return the weak reference count. */ |
75 | 0 | int32_t getWeakCnt() const { |
76 | 0 | return fWeakCnt.load(std::memory_order_relaxed); |
77 | 0 | } |
78 | | #endif |
79 | | |
80 | | private: |
81 | | /** If fRefCnt is 0, returns 0. |
82 | | * Otherwise increments fRefCnt, acquires, and returns the old value. |
83 | | */ |
84 | 0 | int32_t atomic_conditional_acquire_strong_ref() const { |
85 | 0 | int32_t prev = fRefCnt.load(std::memory_order_relaxed); |
86 | 0 | do { |
87 | 0 | if (0 == prev) { |
88 | 0 | break; |
89 | 0 | } |
90 | 0 | } while(!fRefCnt.compare_exchange_weak(prev, prev+1, std::memory_order_acquire, |
91 | 0 | std::memory_order_relaxed)); |
92 | 0 | return prev; |
93 | 0 | } |
94 | | |
95 | | public: |
96 | | /** Creates a strong reference from a weak reference, if possible. The |
97 | | caller must already be an owner. If try_ref() returns true the owner |
98 | | is in posession of an additional strong reference. Both the original |
99 | | reference and new reference must be properly unreferenced. If try_ref() |
100 | | returns false, no strong reference could be created and the owner's |
101 | | reference is in the same state as before the call. |
102 | | */ |
103 | 0 | [[nodiscard]] bool try_ref() const { |
104 | 0 | if (atomic_conditional_acquire_strong_ref() != 0) { |
105 | 0 | // Acquire barrier (L/SL), if not provided above. |
106 | 0 | // Prevents subsequent code from happening before the increment. |
107 | 0 | return true; |
108 | 0 | } |
109 | 0 | return false; |
110 | 0 | } |
111 | | |
112 | | /** Increment the weak reference count. Must be balanced by a call to |
113 | | weak_unref(). |
114 | | */ |
115 | 0 | void weak_ref() const { |
116 | 0 | SkASSERT(getRefCnt() > 0); |
117 | 0 | SkASSERT(getWeakCnt() > 0); |
118 | 0 | // No barrier required. |
119 | 0 | (void)fWeakCnt.fetch_add(+1, std::memory_order_relaxed); |
120 | 0 | } |
121 | | |
122 | | /** Decrement the weak reference count. If the weak reference count is 1 |
123 | | before the decrement, then call delete on the object. Note that if this |
124 | | is the case, then the object needs to have been allocated via new, and |
125 | | not on the stack. |
126 | | */ |
127 | 52.2k | void weak_unref() const { |
128 | 52.2k | SkASSERT(getWeakCnt() > 0); |
129 | | // A release here acts in place of all releases we "should" have been doing in ref(). |
130 | 52.2k | if (1 == fWeakCnt.fetch_add(-1, std::memory_order_acq_rel)) { |
131 | | // Like try_ref(), the acquire is only needed on success, to make sure |
132 | | // code in internal_dispose() doesn't happen before the decrement. |
133 | | #ifdef SK_DEBUG |
134 | | // so our destructor won't complain |
135 | | fWeakCnt.store(1, std::memory_order_relaxed); |
136 | | #endif |
137 | 52.2k | this->INHERITED::internal_dispose(); |
138 | 52.2k | } |
139 | 52.2k | } |
140 | | |
141 | | /** Returns true if there are no strong references to the object. When this |
142 | | is the case all future calls to try_ref() will return false. |
143 | | */ |
144 | 0 | bool weak_expired() const { |
145 | 0 | return fRefCnt.load(std::memory_order_relaxed) == 0; |
146 | 0 | } |
147 | | |
148 | | protected: |
149 | | /** Called when the strong reference count goes to zero. This allows the |
150 | | object to free any resources it may be holding. Weak references may |
151 | | still exist and their level of allowed access to the object is defined |
152 | | by the object's class. |
153 | | */ |
154 | 52.2k | virtual void weak_dispose() const { |
155 | 52.2k | } |
156 | | |
157 | | private: |
158 | | /** Called when the strong reference count goes to zero. Calls weak_dispose |
159 | | on the object and releases the implicit weak reference held |
160 | | collectively by the strong references. |
161 | | */ |
162 | 52.2k | void internal_dispose() const override { |
163 | 52.2k | weak_dispose(); |
164 | 52.2k | weak_unref(); |
165 | 52.2k | } |
166 | | |
167 | | /* Invariant: fWeakCnt = #weak + (fRefCnt > 0 ? 1 : 0) */ |
168 | | mutable std::atomic<int32_t> fWeakCnt; |
169 | | |
170 | | using INHERITED = SkRefCnt; |
171 | | }; |
172 | | |
173 | | #endif |