SkRefCnt.h revision 06ca8ec87cf6fab57cadd043a5ac18c4154a4129
1/*
2 * Copyright 2006 The Android Open Source Project
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef SkRefCnt_DEFINED
9#define SkRefCnt_DEFINED
10
11#include "../private/SkTLogic.h"
12#include "SkTypes.h"
13#include <atomic>
14#include <functional>
15#include <memory>
16#include <type_traits>
17#include <utility>
18
19#define SK_SUPPORT_TRANSITION_TO_SP_INTERFACES
20
21/** \class SkRefCntBase
22
23    SkRefCntBase is the base class for objects that may be shared by multiple
24    objects. When an existing owner wants to share a reference, it calls ref().
25    When an owner wants to release its reference, it calls unref(). When the
26    shared object's reference count goes to zero as the result of an unref()
27    call, its (virtual) destructor is called. It is an error for the
28    destructor to be called explicitly (or via the object going out of scope on
29    the stack or calling delete) if getRefCnt() > 1.
30*/
31class SK_API SkRefCntBase : SkNoncopyable {
32public:
33    /** Default construct, initializing the reference count to 1.
34    */
35    SkRefCntBase() : fRefCnt(1) {}
36
37    /** Destruct, asserting that the reference count is 1.
38    */
39    virtual ~SkRefCntBase() {
40#ifdef SK_DEBUG
41        SkASSERTF(getRefCnt() == 1, "fRefCnt was %d", getRefCnt());
42        // illegal value, to catch us if we reuse after delete
43        fRefCnt.store(0, std::memory_order_relaxed);
44#endif
45    }
46
47#ifdef SK_DEBUG
48    /** Return the reference count. Use only for debugging. */
49    int32_t getRefCnt() const {
50        return fRefCnt.load(std::memory_order_relaxed);
51    }
52
53    void validate() const {
54        SkASSERT(getRefCnt() > 0);
55    }
56#endif
57
58    /** May return true if the caller is the only owner.
59     *  Ensures that all previous owner's actions are complete.
60     */
61    bool unique() const {
62        if (1 == fRefCnt.load(std::memory_order_acquire)) {
63            // The acquire barrier is only really needed if we return true.  It
64            // prevents code conditioned on the result of unique() from running
65            // until previous owners are all totally done calling unref().
66            return true;
67        }
68        return false;
69    }
70
71    /** Increment the reference count. Must be balanced by a call to unref().
72    */
73    void ref() const {
74#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK
75        // Android employs some special subclasses that enable the fRefCnt to
76        // go to zero, but not below, prior to reusing the object.  This breaks
77        // the use of unique() on such objects and as such should be removed
78        // once the Android code is fixed.
79        SkASSERT(getRefCnt() >= 0);
80#else
81        SkASSERT(getRefCnt() > 0);
82#endif
83        // No barrier required.
84        (void)fRefCnt.fetch_add(+1, std::memory_order_relaxed);
85    }
86
87    /** Decrement the reference count. If the reference count is 1 before the
88        decrement, then delete the object. Note that if this is the case, then
89        the object needs to have been allocated via new, and not on the stack.
90    */
91    void unref() const {
92        SkASSERT(getRefCnt() > 0);
93        // A release here acts in place of all releases we "should" have been doing in ref().
94        if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel)) {
95            // Like unique(), the acquire is only needed on success, to make sure
96            // code in internal_dispose() doesn't happen before the decrement.
97            this->internal_dispose();
98        }
99    }
100
101protected:
102    /**
103     *  Allow subclasses to call this if they've overridden internal_dispose
104     *  so they can reset fRefCnt before the destructor is called. Should only
105     *  be called right before calling through to inherited internal_dispose()
106     *  or before calling the destructor.
107     */
108    void internal_dispose_restore_refcnt_to_1() const {
109#ifdef SK_DEBUG
110        SkASSERT(0 == getRefCnt());
111        fRefCnt.store(1, std::memory_order_relaxed);
112#endif
113    }
114
115private:
116    /**
117     *  Called when the ref count goes to 0.
118     */
119    virtual void internal_dispose() const {
120        this->internal_dispose_restore_refcnt_to_1();
121        delete this;
122    }
123
124    // The following friends are those which override internal_dispose()
125    // and conditionally call SkRefCnt::internal_dispose().
126    friend class SkWeakRefCnt;
127
128    mutable std::atomic<int32_t> fRefCnt;
129
130    typedef SkNoncopyable INHERITED;
131};
132
133#ifdef SK_REF_CNT_MIXIN_INCLUDE
134// It is the responsibility of the following include to define the type SkRefCnt.
135// This SkRefCnt should normally derive from SkRefCntBase.
136#include SK_REF_CNT_MIXIN_INCLUDE
137#else
138class SK_API SkRefCnt : public SkRefCntBase {
139    // "#include SK_REF_CNT_MIXIN_INCLUDE" doesn't work with this build system.
140    #if defined(GOOGLE3)
141    public:
142        void deref() const { this->unref(); }
143    #endif
144};
145#endif
146
147///////////////////////////////////////////////////////////////////////////////
148
149/** Helper macro to safely assign one SkRefCnt[TS]* to another, checking for
150    null in on each side of the assignment, and ensuring that ref() is called
151    before unref(), in case the two pointers point to the same object.
152 */
153#define SkRefCnt_SafeAssign(dst, src)   \
154    do {                                \
155        if (src) src->ref();            \
156        if (dst) dst->unref();          \
157        dst = src;                      \
158    } while (0)
159
160
161/** Call obj->ref() and return obj. The obj must not be nullptr.
162 */
163template <typename T> static inline T* SkRef(T* obj) {
164    SkASSERT(obj);
165    obj->ref();
166    return obj;
167}
168
169/** Check if the argument is non-null, and if so, call obj->ref() and return obj.
170 */
171template <typename T> static inline T* SkSafeRef(T* obj) {
172    if (obj) {
173        obj->ref();
174    }
175    return obj;
176}
177
178/** Check if the argument is non-null, and if so, call obj->unref()
179 */
180template <typename T> static inline void SkSafeUnref(T* obj) {
181    if (obj) {
182        obj->unref();
183    }
184}
185
186template<typename T> static inline void SkSafeSetNull(T*& obj) {
187    if (obj) {
188        obj->unref();
189        obj = nullptr;
190    }
191}
192
193///////////////////////////////////////////////////////////////////////////////
194
195template <typename T> struct SkTUnref {
196    void operator()(T* t) { t->unref(); }
197};
198
199/**
200 *  Utility class that simply unref's its argument in the destructor.
201 */
202template <typename T> class SkAutoTUnref : public std::unique_ptr<T, SkTUnref<T>> {
203public:
204    explicit SkAutoTUnref(T* obj = nullptr) : std::unique_ptr<T, SkTUnref<T>>(obj) {}
205
206    operator T*() const { return this->get(); }
207
208#if defined(SK_BUILD_FOR_ANDROID_FRAMEWORK)
209    // Need to update graphics/Shader.cpp.
210    T* detach() { return this->release(); }
211#endif
212};
213// Can't use the #define trick below to guard a bare SkAutoTUnref(...) because it's templated. :(
214
215class SkAutoUnref : public SkAutoTUnref<SkRefCnt> {
216public:
217    SkAutoUnref(SkRefCnt* obj) : SkAutoTUnref<SkRefCnt>(obj) {}
218};
219#define SkAutoUnref(...) SK_REQUIRE_LOCAL_VAR(SkAutoUnref)
220
221// This is a variant of SkRefCnt that's Not Virtual, so weighs 4 bytes instead of 8 or 16.
222// There's only benefit to using this if the deriving class does not otherwise need a vtable.
223template <typename Derived>
224class SkNVRefCnt : SkNoncopyable {
225public:
226    SkNVRefCnt() : fRefCnt(1) {}
227    ~SkNVRefCnt() { SkASSERTF(1 == getRefCnt(), "NVRefCnt was %d", getRefCnt()); }
228
229    // Implementation is pretty much the same as SkRefCntBase. All required barriers are the same:
230    //   - unique() needs acquire when it returns true, and no barrier if it returns false;
231    //   - ref() doesn't need any barrier;
232    //   - unref() needs a release barrier, and an acquire if it's going to call delete.
233
234    bool unique() const { return 1 == fRefCnt.load(std::memory_order_acquire); }
235    void ref() const { (void)fRefCnt.fetch_add(+1, std::memory_order_relaxed); }
236    void  unref() const {
237        if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel)) {
238            // restore the 1 for our destructor's assert
239            SkDEBUGCODE(fRefCnt.store(1, std::memory_order_relaxed));
240            delete (const Derived*)this;
241        }
242    }
243    void  deref() const { this->unref(); }
244
245private:
246    mutable std::atomic<int32_t> fRefCnt;
247    int32_t getRefCnt() const {
248        return fRefCnt.load(std::memory_order_relaxed);
249    }
250};
251
252///////////////////////////////////////////////////////////////////////////////////////////////////
253
254/**
255 *  Shared pointer class to wrap classes that support a ref()/unref() interface.
256 *
257 *  This can be used for classes inheriting from SkRefCnt, but it also works for other
258 *  classes that match the interface, but have different internal choices: e.g. the hosted class
259 *  may have its ref/unref be thread-safe, but that is not assumed/imposed by sk_sp.
260 */
261template <typename T> class sk_sp {
262    /** Supports safe bool idiom. Obsolete with explicit operator bool. */
263    using unspecified_bool_type = T* sk_sp::*;
264public:
265    using element_type = T;
266
267    constexpr sk_sp() : fPtr(nullptr) {}
268    constexpr sk_sp(std::nullptr_t) : fPtr(nullptr) {}
269
270    /**
271     *  Shares the underlying object by calling ref(), so that both the argument and the newly
272     *  created sk_sp both have a reference to it.
273     */
274    sk_sp(const sk_sp<T>& that) : fPtr(SkSafeRef(that.get())) {}
275    template <typename U, typename = skstd::enable_if_t<std::is_convertible<U*, T*>::value>>
276    sk_sp(const sk_sp<U>& that) : fPtr(SkSafeRef(that.get())) {}
277
278    /**
279     *  Move the underlying object from the argument to the newly created sk_sp. Afterwards only
280     *  the new sk_sp will have a reference to the object, and the argument will point to null.
281     *  No call to ref() or unref() will be made.
282     */
283    sk_sp(sk_sp<T>&& that) : fPtr(that.release()) {}
284    template <typename U, typename = skstd::enable_if_t<std::is_convertible<U*, T*>::value>>
285    sk_sp(sk_sp<U>&& that) : fPtr(that.release()) {}
286
287    /**
288     *  Adopt the bare pointer into the newly created sk_sp.
289     *  No call to ref() or unref() will be made.
290     */
291    explicit sk_sp(T* obj) : fPtr(obj) {}
292
293    /**
294     *  Calls unref() on the underlying object pointer.
295     */
296    ~sk_sp() {
297        SkSafeUnref(fPtr);
298        SkDEBUGCODE(fPtr = nullptr);
299    }
300
301    sk_sp<T>& operator=(std::nullptr_t) { this->reset(); return *this; }
302
303    /**
304     *  Shares the underlying object referenced by the argument by calling ref() on it. If this
305     *  sk_sp previously had a reference to an object (i.e. not null) it will call unref() on that
306     *  object.
307     */
308    sk_sp<T>& operator=(const sk_sp<T>& that) {
309        this->reset(SkSafeRef(that.get()));
310        return *this;
311    }
312    template <typename U, typename = skstd::enable_if_t<std::is_convertible<U*, T*>::value>>
313    sk_sp<T>& operator=(const sk_sp<U>& that) {
314        this->reset(SkSafeRef(that.get()));
315        return *this;
316    }
317
318    /**
319     *  Move the underlying object from the argument to the sk_sp. If the sk_sp previously held
320     *  a reference to another object, unref() will be called on that object. No call to ref()
321     *  will be made.
322     */
323    sk_sp<T>& operator=(sk_sp<T>&& that) {
324        this->reset(that.release());
325        return *this;
326    }
327    template <typename U, typename = skstd::enable_if_t<std::is_convertible<U*, T*>::value>>
328    sk_sp<T>& operator=(sk_sp<U>&& that) {
329        this->reset(that.release());
330        return *this;
331    }
332
333    T& operator*() const {
334        SkASSERT(this->get() != nullptr);
335        return *this->get();
336    }
337
338    // MSVC 2013 does not work correctly with explicit operator bool.
339    // https://chromium-cpp.appspot.com/#core-blacklist
340    // When explicit operator bool can be used, remove operator! and operator unspecified_bool_type.
341    //explicit operator bool() const { return this->get() != nullptr; }
342    operator unspecified_bool_type() const { return this->get() ? &sk_sp::fPtr : nullptr; }
343    bool operator!() const { return this->get() == nullptr; }
344
345    T* get() const { return fPtr; }
346    T* operator->() const { return fPtr; }
347
348    /**
349     *  Adopt the new bare pointer, and call unref() on any previously held object (if not null).
350     *  No call to ref() will be made.
351     */
352    void reset(T* ptr = nullptr) {
353        // Calling fPtr->unref() may call this->~() or this->reset(T*).
354        // http://wg21.cmeerw.net/lwg/issue998
355        // http://wg21.cmeerw.net/lwg/issue2262
356        T* oldPtr = fPtr;
357        fPtr = ptr;
358        SkSafeUnref(oldPtr);
359    }
360
361    /**
362     *  Return the bare pointer, and set the internal object pointer to nullptr.
363     *  The caller must assume ownership of the object, and manage its reference count directly.
364     *  No call to unref() will be made.
365     */
366    T* SK_WARN_UNUSED_RESULT release() {
367        T* ptr = fPtr;
368        fPtr = nullptr;
369        return ptr;
370    }
371
372    void swap(sk_sp<T>& that) /*noexcept*/ {
373        using std::swap;
374        swap(fPtr, that.fPtr);
375    }
376
377private:
378    T*  fPtr;
379};
380
381template <typename T> inline void swap(sk_sp<T>& a, sk_sp<T>& b) /*noexcept*/ {
382    a.swap(b);
383}
384
385template <typename T, typename U> inline bool operator==(const sk_sp<T>& a, const sk_sp<U>& b) {
386    return a.get() == b.get();
387}
388template <typename T> inline bool operator==(const sk_sp<T>& a, std::nullptr_t) /*noexcept*/ {
389    return !a;
390}
391template <typename T> inline bool operator==(std::nullptr_t, const sk_sp<T>& b) /*noexcept*/ {
392    return !b;
393}
394
395template <typename T, typename U> inline bool operator!=(const sk_sp<T>& a, const sk_sp<U>& b) {
396    return a.get() != b.get();
397}
398template <typename T> inline bool operator!=(const sk_sp<T>& a, std::nullptr_t) /*noexcept*/ {
399    return static_cast<bool>(a);
400}
401template <typename T> inline bool operator!=(std::nullptr_t, const sk_sp<T>& b) /*noexcept*/ {
402    return static_cast<bool>(b);
403}
404
405template <typename T, typename U> inline bool operator<(const sk_sp<T>& a, const sk_sp<U>& b) {
406    // Provide defined total order on sk_sp.
407    // http://wg21.cmeerw.net/lwg/issue1297
408    // http://wg21.cmeerw.net/lwg/issue1401 .
409    return std::less<skstd::common_type_t<T*, U*>>()(a.get(), b.get());
410}
411template <typename T> inline bool operator<(const sk_sp<T>& a, std::nullptr_t) {
412    return std::less<T*>()(a.get(), nullptr);
413}
414template <typename T> inline bool operator<(std::nullptr_t, const sk_sp<T>& b) {
415    return std::less<T*>()(nullptr, b.get());
416}
417
418template <typename T, typename U> inline bool operator<=(const sk_sp<T>& a, const sk_sp<U>& b) {
419    return !(b < a);
420}
421template <typename T> inline bool operator<=(const sk_sp<T>& a, std::nullptr_t) {
422    return !(nullptr < a);
423}
424template <typename T> inline bool operator<=(std::nullptr_t, const sk_sp<T>& b) {
425    return !(b < nullptr);
426}
427
428template <typename T, typename U> inline bool operator>(const sk_sp<T>& a, const sk_sp<U>& b) {
429    return b < a;
430}
431template <typename T> inline bool operator>(const sk_sp<T>& a, std::nullptr_t) {
432    return nullptr < a;
433}
434template <typename T> inline bool operator>(std::nullptr_t, const sk_sp<T>& b) {
435    return b < nullptr;
436}
437
438template <typename T, typename U> inline bool operator>=(const sk_sp<T>& a, const sk_sp<U>& b) {
439    return !(a < b);
440}
441template <typename T> inline bool operator>=(const sk_sp<T>& a, std::nullptr_t) {
442    return !(a < nullptr);
443}
444template <typename T> inline bool operator>=(std::nullptr_t, const sk_sp<T>& b) {
445    return !(nullptr < b);
446}
447
448template <typename T, typename... Args>
449sk_sp<T> sk_make_sp(Args&&... args) {
450    return sk_sp<T>(new T(std::forward<Args>(args)...));
451}
452
453#ifdef SK_SUPPORT_TRANSITION_TO_SP_INTERFACES
454
455/*
456 *  Returns a sk_sp wrapping the provided ptr AND calls ref on it (if not null).
457 *
458 *  This is different than the semantics of the constructor for sk_sp, which just wraps the ptr,
459 *  effectively "adopting" it.
460 *
461 *  This function may be helpful while we convert callers from ptr-based to sk_sp-based parameters.
462 */
463template <typename T> sk_sp<T> sk_ref_sp(T* obj) {
464    return sk_sp<T>(SkSafeRef(obj));
465}
466
467#endif
468
469#endif
470