SkRefCnt.h revision 2691d76a06e1af6282f8b3a3140cc93361be10c4
1/* 2 * Copyright 2006 The Android Open Source Project 3 * 4 * Use of this source code is governed by a BSD-style license that can be 5 * found in the LICENSE file. 6 */ 7 8#ifndef SkRefCnt_DEFINED 9#define SkRefCnt_DEFINED 10 11#include "../private/SkAtomics.h" 12#include "../private/SkTLogic.h" 13#include "SkTypes.h" 14#include <functional> 15#include <memory> 16#include <utility> 17 18#define SK_SUPPORT_TRANSITION_TO_SP_INTERFACES 19 20/** \class SkRefCntBase 21 22 SkRefCntBase is the base class for objects that may be shared by multiple 23 objects. When an existing owner wants to share a reference, it calls ref(). 24 When an owner wants to release its reference, it calls unref(). When the 25 shared object's reference count goes to zero as the result of an unref() 26 call, its (virtual) destructor is called. It is an error for the 27 destructor to be called explicitly (or via the object going out of scope on 28 the stack or calling delete) if getRefCnt() > 1. 29*/ 30class SK_API SkRefCntBase : SkNoncopyable { 31public: 32 /** Default construct, initializing the reference count to 1. 33 */ 34 SkRefCntBase() : fRefCnt(1) {} 35 36 /** Destruct, asserting that the reference count is 1. 37 */ 38 virtual ~SkRefCntBase() { 39#ifdef SK_DEBUG 40 SkASSERTF(fRefCnt == 1, "fRefCnt was %d", fRefCnt); 41 fRefCnt = 0; // illegal value, to catch us if we reuse after delete 42#endif 43 } 44 45#ifdef SK_DEBUG 46 /** Return the reference count. Use only for debugging. */ 47 int32_t getRefCnt() const { return fRefCnt; } 48#endif 49 50 /** May return true if the caller is the only owner. 51 * Ensures that all previous owner's actions are complete. 52 */ 53 bool unique() const { 54 if (1 == sk_atomic_load(&fRefCnt, sk_memory_order_acquire)) { 55 // The acquire barrier is only really needed if we return true. It 56 // prevents code conditioned on the result of unique() from running 57 // until previous owners are all totally done calling unref(). 58 return true; 59 } 60 return false; 61 } 62 63 /** Increment the reference count. Must be balanced by a call to unref(). 64 */ 65 void ref() const { 66#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK 67 // Android employs some special subclasses that enable the fRefCnt to 68 // go to zero, but not below, prior to reusing the object. This breaks 69 // the use of unique() on such objects and as such should be removed 70 // once the Android code is fixed. 71 SkASSERT(fRefCnt >= 0); 72#else 73 SkASSERT(fRefCnt > 0); 74#endif 75 (void)sk_atomic_fetch_add(&fRefCnt, +1, sk_memory_order_relaxed); // No barrier required. 76 } 77 78 /** Decrement the reference count. If the reference count is 1 before the 79 decrement, then delete the object. Note that if this is the case, then 80 the object needs to have been allocated via new, and not on the stack. 81 */ 82 void unref() const { 83 SkASSERT(fRefCnt > 0); 84 // A release here acts in place of all releases we "should" have been doing in ref(). 85 if (1 == sk_atomic_fetch_add(&fRefCnt, -1, sk_memory_order_acq_rel)) { 86 // Like unique(), the acquire is only needed on success, to make sure 87 // code in internal_dispose() doesn't happen before the decrement. 88 this->internal_dispose(); 89 } 90 } 91 92#ifdef SK_DEBUG 93 void validate() const { 94 SkASSERT(fRefCnt > 0); 95 } 96#endif 97 98protected: 99 /** 100 * Allow subclasses to call this if they've overridden internal_dispose 101 * so they can reset fRefCnt before the destructor is called. Should only 102 * be called right before calling through to inherited internal_dispose() 103 * or before calling the destructor. 104 */ 105 void internal_dispose_restore_refcnt_to_1() const { 106#ifdef SK_DEBUG 107 SkASSERT(0 == fRefCnt); 108 fRefCnt = 1; 109#endif 110 } 111 112private: 113 /** 114 * Called when the ref count goes to 0. 115 */ 116 virtual void internal_dispose() const { 117 this->internal_dispose_restore_refcnt_to_1(); 118 delete this; 119 } 120 121 // The following friends are those which override internal_dispose() 122 // and conditionally call SkRefCnt::internal_dispose(). 123 friend class SkWeakRefCnt; 124 125 mutable int32_t fRefCnt; 126 127 typedef SkNoncopyable INHERITED; 128}; 129 130#ifdef SK_REF_CNT_MIXIN_INCLUDE 131// It is the responsibility of the following include to define the type SkRefCnt. 132// This SkRefCnt should normally derive from SkRefCntBase. 133#include SK_REF_CNT_MIXIN_INCLUDE 134#else 135class SK_API SkRefCnt : public SkRefCntBase { }; 136#endif 137 138/////////////////////////////////////////////////////////////////////////////// 139 140/** Helper macro to safely assign one SkRefCnt[TS]* to another, checking for 141 null in on each side of the assignment, and ensuring that ref() is called 142 before unref(), in case the two pointers point to the same object. 143 */ 144#define SkRefCnt_SafeAssign(dst, src) \ 145 do { \ 146 if (src) src->ref(); \ 147 if (dst) dst->unref(); \ 148 dst = src; \ 149 } while (0) 150 151 152/** Call obj->ref() and return obj. The obj must not be nullptr. 153 */ 154template <typename T> static inline T* SkRef(T* obj) { 155 SkASSERT(obj); 156 obj->ref(); 157 return obj; 158} 159 160/** Check if the argument is non-null, and if so, call obj->ref() and return obj. 161 */ 162template <typename T> static inline T* SkSafeRef(T* obj) { 163 if (obj) { 164 obj->ref(); 165 } 166 return obj; 167} 168 169/** Check if the argument is non-null, and if so, call obj->unref() 170 */ 171template <typename T> static inline void SkSafeUnref(T* obj) { 172 if (obj) { 173 obj->unref(); 174 } 175} 176 177template<typename T> static inline void SkSafeSetNull(T*& obj) { 178 if (obj) { 179 obj->unref(); 180 obj = nullptr; 181 } 182} 183 184/////////////////////////////////////////////////////////////////////////////// 185 186template <typename T> struct SkTUnref { 187 void operator()(T* t) { t->unref(); } 188}; 189 190/** 191 * Utility class that simply unref's its argument in the destructor. 192 */ 193template <typename T> class SkAutoTUnref : public std::unique_ptr<T, SkTUnref<T>> { 194public: 195 explicit SkAutoTUnref(T* obj = nullptr) : std::unique_ptr<T, SkTUnref<T>>(obj) {} 196 197 T* detach() { return this->release(); } 198 operator T*() const { return this->get(); } 199 200 // Android's std::unique_ptr's operator bool() is sometimes not explicit... 201 // so override it with our own explcitly explicit version. 202 explicit operator bool() const { return this->get() != nullptr; } 203}; 204// Can't use the #define trick below to guard a bare SkAutoTUnref(...) because it's templated. :( 205 206class SkAutoUnref : public SkAutoTUnref<SkRefCnt> { 207public: 208 SkAutoUnref(SkRefCnt* obj) : SkAutoTUnref<SkRefCnt>(obj) {} 209}; 210#define SkAutoUnref(...) SK_REQUIRE_LOCAL_VAR(SkAutoUnref) 211 212// This is a variant of SkRefCnt that's Not Virtual, so weighs 4 bytes instead of 8 or 16. 213// There's only benefit to using this if the deriving class does not otherwise need a vtable. 214template <typename Derived> 215class SkNVRefCnt : SkNoncopyable { 216public: 217 SkNVRefCnt() : fRefCnt(1) {} 218 ~SkNVRefCnt() { SkASSERTF(1 == fRefCnt, "NVRefCnt was %d", fRefCnt); } 219 220 // Implementation is pretty much the same as SkRefCntBase. All required barriers are the same: 221 // - unique() needs acquire when it returns true, and no barrier if it returns false; 222 // - ref() doesn't need any barrier; 223 // - unref() needs a release barrier, and an acquire if it's going to call delete. 224 225 bool unique() const { return 1 == sk_atomic_load(&fRefCnt, sk_memory_order_acquire); } 226 void ref() const { (void)sk_atomic_fetch_add(&fRefCnt, +1, sk_memory_order_relaxed); } 227 void unref() const { 228 if (1 == sk_atomic_fetch_add(&fRefCnt, -1, sk_memory_order_acq_rel)) { 229 SkDEBUGCODE(fRefCnt = 1;) // restore the 1 for our destructor's assert 230 delete (const Derived*)this; 231 } 232 } 233 void deref() const { this->unref(); } 234 235private: 236 mutable int32_t fRefCnt; 237}; 238 239/////////////////////////////////////////////////////////////////////////////////////////////////// 240 241/** 242 * Shared pointer class to wrap classes that support a ref()/unref() interface. 243 * 244 * This can be used for classes inheriting from SkRefCnt, but it also works for other 245 * classes that match the interface, but have different internal choices: e.g. the hosted class 246 * may have its ref/unref be thread-safe, but that is not assumed/imposed by sk_sp. 247 */ 248template <typename T> class sk_sp { 249 /** Supports safe bool idiom. Obsolete with explicit operator bool. */ 250 using unspecified_bool_type = T* sk_sp::*; 251public: 252 using element_type = T; 253 254 sk_sp() : fPtr(nullptr) {} 255 sk_sp(std::nullptr_t) : fPtr(nullptr) {} 256 257 /** 258 * Shares the underlying object by calling ref(), so that both the argument and the newly 259 * created sk_sp both have a reference to it. 260 */ 261 sk_sp(const sk_sp<T>& that) : fPtr(SkSafeRef(that.get())) {} 262 template <typename U, typename = skstd::enable_if_t<skstd::is_convertible<U*, T*>::value>> 263 sk_sp(const sk_sp<U>& that) : fPtr(SkSafeRef(that.get())) {} 264 265 /** 266 * Move the underlying object from the argument to the newly created sk_sp. Afterwards only 267 * the new sk_sp will have a reference to the object, and the argument will point to null. 268 * No call to ref() or unref() will be made. 269 */ 270 sk_sp(sk_sp<T>&& that) : fPtr(that.release()) {} 271 template <typename U, typename = skstd::enable_if_t<skstd::is_convertible<U*, T*>::value>> 272 sk_sp(sk_sp<U>&& that) : fPtr(that.release()) {} 273 274 /** 275 * Adopt the bare pointer into the newly created sk_sp. 276 * No call to ref() or unref() will be made. 277 */ 278 explicit sk_sp(T* obj) : fPtr(obj) {} 279 280 /** 281 * Calls unref() on the underlying object pointer. 282 */ 283 ~sk_sp() { 284 SkSafeUnref(fPtr); 285 SkDEBUGCODE(fPtr = nullptr); 286 } 287 288 sk_sp<T>& operator=(std::nullptr_t) { this->reset(); return *this; } 289 290 /** 291 * Shares the underlying object referenced by the argument by calling ref() on it. If this 292 * sk_sp previously had a reference to an object (i.e. not null) it will call unref() on that 293 * object. 294 */ 295 sk_sp<T>& operator=(const sk_sp<T>& that) { 296 this->reset(SkSafeRef(that.get())); 297 return *this; 298 } 299 template <typename U, typename = skstd::enable_if_t<skstd::is_convertible<U*, T*>::value>> 300 sk_sp<T>& operator=(const sk_sp<U>& that) { 301 this->reset(SkSafeRef(that.get())); 302 return *this; 303 } 304 305 /** 306 * Move the underlying object from the argument to the sk_sp. If the sk_sp previously held 307 * a reference to another object, unref() will be called on that object. No call to ref() 308 * will be made. 309 */ 310 sk_sp<T>& operator=(sk_sp<T>&& that) { 311 this->reset(that.release()); 312 return *this; 313 } 314 template <typename U, typename = skstd::enable_if_t<skstd::is_convertible<U*, T*>::value>> 315 sk_sp<T>& operator=(sk_sp<U>&& that) { 316 this->reset(that.release()); 317 return *this; 318 } 319 320 T& operator*() const { 321 SkASSERT(this->get() != nullptr); 322 return *this->get(); 323 } 324 325 // MSVC 2013 does not work correctly with explicit operator bool. 326 // https://chromium-cpp.appspot.com/#core-blacklist 327 // When explicit operator bool can be used, remove operator! and operator unspecified_bool_type. 328 //explicit operator bool() const { return this->get() != nullptr; } 329 operator unspecified_bool_type() const { return this->get() ? &sk_sp::fPtr : nullptr; } 330 bool operator!() const { return this->get() == nullptr; } 331 332 T* get() const { return fPtr; } 333 T* operator->() const { return fPtr; } 334 335 /** 336 * Adopt the new bare pointer, and call unref() on any previously held object (if not null). 337 * No call to ref() will be made. 338 */ 339 void reset(T* ptr = nullptr) { 340 // Calling fPtr->unref() may call this->~() or this->reset(T*). 341 // http://wg21.cmeerw.net/lwg/issue998 342 // http://wg21.cmeerw.net/lwg/issue2262 343 T* oldPtr = fPtr; 344 fPtr = ptr; 345 SkSafeUnref(oldPtr); 346 } 347 348 /** 349 * Return the bare pointer, and set the internal object pointer to nullptr. 350 * The caller must assume ownership of the object, and manage its reference count directly. 351 * No call to unref() will be made. 352 */ 353 T* SK_WARN_UNUSED_RESULT release() { 354 T* ptr = fPtr; 355 fPtr = nullptr; 356 return ptr; 357 } 358 359 void swap(sk_sp<T>& that) /*noexcept*/ { 360 using std::swap; 361 swap(fPtr, that.fPtr); 362 } 363 364private: 365 T* fPtr; 366}; 367 368template <typename T> inline void swap(sk_sp<T>& a, sk_sp<T>& b) /*noexcept*/ { 369 a.swap(b); 370} 371 372template <typename T, typename U> inline bool operator==(const sk_sp<T>& a, const sk_sp<U>& b) { 373 return a.get() == b.get(); 374} 375template <typename T> inline bool operator==(const sk_sp<T>& a, std::nullptr_t) /*noexcept*/ { 376 return !a; 377} 378template <typename T> inline bool operator==(std::nullptr_t, const sk_sp<T>& b) /*noexcept*/ { 379 return !b; 380} 381 382template <typename T, typename U> inline bool operator!=(const sk_sp<T>& a, const sk_sp<U>& b) { 383 return a.get() != b.get(); 384} 385template <typename T> inline bool operator!=(const sk_sp<T>& a, std::nullptr_t) /*noexcept*/ { 386 return static_cast<bool>(a); 387} 388template <typename T> inline bool operator!=(std::nullptr_t, const sk_sp<T>& b) /*noexcept*/ { 389 return static_cast<bool>(b); 390} 391 392template <typename T, typename U> inline bool operator<(const sk_sp<T>& a, const sk_sp<U>& b) { 393 // Provide defined total order on sk_sp. 394 // http://wg21.cmeerw.net/lwg/issue1297 395 // http://wg21.cmeerw.net/lwg/issue1401 . 396 return std::less<skstd::common_type_t<T*, U*>>()(a.get(), b.get()); 397} 398template <typename T> inline bool operator<(const sk_sp<T>& a, std::nullptr_t) { 399 return std::less<T*>()(a.get(), nullptr); 400} 401template <typename T> inline bool operator<(std::nullptr_t, const sk_sp<T>& b) { 402 return std::less<T*>()(nullptr, b.get()); 403} 404 405template <typename T, typename U> inline bool operator<=(const sk_sp<T>& a, const sk_sp<U>& b) { 406 return !(b < a); 407} 408template <typename T> inline bool operator<=(const sk_sp<T>& a, std::nullptr_t) { 409 return !(nullptr < a); 410} 411template <typename T> inline bool operator<=(std::nullptr_t, const sk_sp<T>& b) { 412 return !(b < nullptr); 413} 414 415template <typename T, typename U> inline bool operator>(const sk_sp<T>& a, const sk_sp<U>& b) { 416 return b < a; 417} 418template <typename T> inline bool operator>(const sk_sp<T>& a, std::nullptr_t) { 419 return nullptr < a; 420} 421template <typename T> inline bool operator>(std::nullptr_t, const sk_sp<T>& b) { 422 return b < nullptr; 423} 424 425template <typename T, typename U> inline bool operator>=(const sk_sp<T>& a, const sk_sp<U>& b) { 426 return !(a < b); 427} 428template <typename T> inline bool operator>=(const sk_sp<T>& a, std::nullptr_t) { 429 return !(a < nullptr); 430} 431template <typename T> inline bool operator>=(std::nullptr_t, const sk_sp<T>& b) { 432 return !(nullptr < b); 433} 434 435template <typename T, typename... Args> 436sk_sp<T> sk_make_sp(Args&&... args) { 437 return sk_sp<T>(new T(std::forward<Args>(args)...)); 438} 439 440#ifdef SK_SUPPORT_TRANSITION_TO_SP_INTERFACES 441 442/* 443 * Returns a sk_sp wrapping the provided ptr AND calls ref on it (if not null). 444 * 445 * This is different than the semantics of the constructor for sk_sp, which just wraps the ptr, 446 * effectively "adopting" it. 447 * 448 * This function may be helpful while we convert callers from ptr-based to sk_sp-based parameters. 449 */ 450template <typename T> sk_sp<T> sk_ref_sp(T* obj) { 451 return sk_sp<T>(SkSafeRef(obj)); 452} 453 454#endif 455 456#endif 457