SkRefCnt.h revision bbd60689bb080fd147565dbc171f19a6ef177997
1/* 2 * Copyright 2006 The Android Open Source Project 3 * 4 * Use of this source code is governed by a BSD-style license that can be 5 * found in the LICENSE file. 6 */ 7 8#ifndef SkRefCnt_DEFINED 9#define SkRefCnt_DEFINED 10 11#include "../private/SkAtomics.h" 12#include "../private/SkTLogic.h" 13#include "SkTypes.h" 14#include <functional> 15#include <memory> 16#include <utility> 17 18#define SK_SUPPORT_TRANSITION_TO_SP_INTERFACES 19 20/** \class SkRefCntBase 21 22 SkRefCntBase is the base class for objects that may be shared by multiple 23 objects. When an existing owner wants to share a reference, it calls ref(). 24 When an owner wants to release its reference, it calls unref(). When the 25 shared object's reference count goes to zero as the result of an unref() 26 call, its (virtual) destructor is called. It is an error for the 27 destructor to be called explicitly (or via the object going out of scope on 28 the stack or calling delete) if getRefCnt() > 1. 29*/ 30class SK_API SkRefCntBase : SkNoncopyable { 31public: 32 /** Default construct, initializing the reference count to 1. 33 */ 34 SkRefCntBase() : fRefCnt(1) {} 35 36 /** Destruct, asserting that the reference count is 1. 37 */ 38 virtual ~SkRefCntBase() { 39#ifdef SK_DEBUG 40 SkASSERTF(fRefCnt == 1, "fRefCnt was %d", fRefCnt); 41 fRefCnt = 0; // illegal value, to catch us if we reuse after delete 42#endif 43 } 44 45#ifdef SK_DEBUG 46 /** Return the reference count. Use only for debugging. */ 47 int32_t getRefCnt() const { return fRefCnt; } 48#endif 49 50 /** May return true if the caller is the only owner. 51 * Ensures that all previous owner's actions are complete. 52 */ 53 bool unique() const { 54 if (1 == sk_atomic_load(&fRefCnt, sk_memory_order_acquire)) { 55 // The acquire barrier is only really needed if we return true. It 56 // prevents code conditioned on the result of unique() from running 57 // until previous owners are all totally done calling unref(). 58 return true; 59 } 60 return false; 61 } 62 63 /** Increment the reference count. Must be balanced by a call to unref(). 64 */ 65 void ref() const { 66#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK 67 // Android employs some special subclasses that enable the fRefCnt to 68 // go to zero, but not below, prior to reusing the object. This breaks 69 // the use of unique() on such objects and as such should be removed 70 // once the Android code is fixed. 71 SkASSERT(fRefCnt >= 0); 72#else 73 SkASSERT(fRefCnt > 0); 74#endif 75 (void)sk_atomic_fetch_add(&fRefCnt, +1, sk_memory_order_relaxed); // No barrier required. 76 } 77 78 /** Decrement the reference count. If the reference count is 1 before the 79 decrement, then delete the object. Note that if this is the case, then 80 the object needs to have been allocated via new, and not on the stack. 81 */ 82 void unref() const { 83 SkASSERT(fRefCnt > 0); 84 // A release here acts in place of all releases we "should" have been doing in ref(). 85 if (1 == sk_atomic_fetch_add(&fRefCnt, -1, sk_memory_order_acq_rel)) { 86 // Like unique(), the acquire is only needed on success, to make sure 87 // code in internal_dispose() doesn't happen before the decrement. 88 this->internal_dispose(); 89 } 90 } 91 92#ifdef SK_DEBUG 93 void validate() const { 94 SkASSERT(fRefCnt > 0); 95 } 96#endif 97 98protected: 99 /** 100 * Allow subclasses to call this if they've overridden internal_dispose 101 * so they can reset fRefCnt before the destructor is called. Should only 102 * be called right before calling through to inherited internal_dispose() 103 * or before calling the destructor. 104 */ 105 void internal_dispose_restore_refcnt_to_1() const { 106#ifdef SK_DEBUG 107 SkASSERT(0 == fRefCnt); 108 fRefCnt = 1; 109#endif 110 } 111 112private: 113 /** 114 * Called when the ref count goes to 0. 115 */ 116 virtual void internal_dispose() const { 117 this->internal_dispose_restore_refcnt_to_1(); 118 delete this; 119 } 120 121 // The following friends are those which override internal_dispose() 122 // and conditionally call SkRefCnt::internal_dispose(). 123 friend class SkWeakRefCnt; 124 125 mutable int32_t fRefCnt; 126 127 typedef SkNoncopyable INHERITED; 128}; 129 130#ifdef SK_REF_CNT_MIXIN_INCLUDE 131// It is the responsibility of the following include to define the type SkRefCnt. 132// This SkRefCnt should normally derive from SkRefCntBase. 133#include SK_REF_CNT_MIXIN_INCLUDE 134#else 135class SK_API SkRefCnt : public SkRefCntBase { }; 136#endif 137 138/////////////////////////////////////////////////////////////////////////////// 139 140/** Helper macro to safely assign one SkRefCnt[TS]* to another, checking for 141 null in on each side of the assignment, and ensuring that ref() is called 142 before unref(), in case the two pointers point to the same object. 143 */ 144#define SkRefCnt_SafeAssign(dst, src) \ 145 do { \ 146 if (src) src->ref(); \ 147 if (dst) dst->unref(); \ 148 dst = src; \ 149 } while (0) 150 151 152/** Call obj->ref() and return obj. The obj must not be nullptr. 153 */ 154template <typename T> static inline T* SkRef(T* obj) { 155 SkASSERT(obj); 156 obj->ref(); 157 return obj; 158} 159 160/** Check if the argument is non-null, and if so, call obj->ref() and return obj. 161 */ 162template <typename T> static inline T* SkSafeRef(T* obj) { 163 if (obj) { 164 obj->ref(); 165 } 166 return obj; 167} 168 169/** Check if the argument is non-null, and if so, call obj->unref() 170 */ 171template <typename T> static inline void SkSafeUnref(T* obj) { 172 if (obj) { 173 obj->unref(); 174 } 175} 176 177template<typename T> static inline void SkSafeSetNull(T*& obj) { 178 if (obj) { 179 obj->unref(); 180 obj = nullptr; 181 } 182} 183 184/////////////////////////////////////////////////////////////////////////////// 185 186template <typename T> struct SkTUnref { 187 void operator()(T* t) { t->unref(); } 188}; 189 190/** 191 * Utility class that simply unref's its argument in the destructor. 192 */ 193template <typename T> class SkAutoTUnref : public std::unique_ptr<T, SkTUnref<T>> { 194public: 195 explicit SkAutoTUnref(T* obj = nullptr) : std::unique_ptr<T, SkTUnref<T>>(obj) {} 196 197 operator T*() const { return this->get(); } 198 199#if defined(SK_BUILD_FOR_ANDROID_FRAMEWORK) 200 // Need to update graphics/Shader.cpp. 201 T* detach() { return this->release(); } 202#endif 203 204 // Android's std::unique_ptr's operator bool() is sometimes not explicit... 205 // so override it with our own explcitly explicit version. 206 explicit operator bool() const { return this->get() != nullptr; } 207}; 208// Can't use the #define trick below to guard a bare SkAutoTUnref(...) because it's templated. :( 209 210class SkAutoUnref : public SkAutoTUnref<SkRefCnt> { 211public: 212 SkAutoUnref(SkRefCnt* obj) : SkAutoTUnref<SkRefCnt>(obj) {} 213}; 214#define SkAutoUnref(...) SK_REQUIRE_LOCAL_VAR(SkAutoUnref) 215 216// This is a variant of SkRefCnt that's Not Virtual, so weighs 4 bytes instead of 8 or 16. 217// There's only benefit to using this if the deriving class does not otherwise need a vtable. 218template <typename Derived> 219class SkNVRefCnt : SkNoncopyable { 220public: 221 SkNVRefCnt() : fRefCnt(1) {} 222 ~SkNVRefCnt() { SkASSERTF(1 == fRefCnt, "NVRefCnt was %d", fRefCnt); } 223 224 // Implementation is pretty much the same as SkRefCntBase. All required barriers are the same: 225 // - unique() needs acquire when it returns true, and no barrier if it returns false; 226 // - ref() doesn't need any barrier; 227 // - unref() needs a release barrier, and an acquire if it's going to call delete. 228 229 bool unique() const { return 1 == sk_atomic_load(&fRefCnt, sk_memory_order_acquire); } 230 void ref() const { (void)sk_atomic_fetch_add(&fRefCnt, +1, sk_memory_order_relaxed); } 231 void unref() const { 232 if (1 == sk_atomic_fetch_add(&fRefCnt, -1, sk_memory_order_acq_rel)) { 233 SkDEBUGCODE(fRefCnt = 1;) // restore the 1 for our destructor's assert 234 delete (const Derived*)this; 235 } 236 } 237 void deref() const { this->unref(); } 238 239private: 240 mutable int32_t fRefCnt; 241}; 242 243/////////////////////////////////////////////////////////////////////////////////////////////////// 244 245/** 246 * Shared pointer class to wrap classes that support a ref()/unref() interface. 247 * 248 * This can be used for classes inheriting from SkRefCnt, but it also works for other 249 * classes that match the interface, but have different internal choices: e.g. the hosted class 250 * may have its ref/unref be thread-safe, but that is not assumed/imposed by sk_sp. 251 */ 252template <typename T> class sk_sp { 253 /** Supports safe bool idiom. Obsolete with explicit operator bool. */ 254 using unspecified_bool_type = T* sk_sp::*; 255public: 256 using element_type = T; 257 258 sk_sp() : fPtr(nullptr) {} 259 sk_sp(std::nullptr_t) : fPtr(nullptr) {} 260 261 /** 262 * Shares the underlying object by calling ref(), so that both the argument and the newly 263 * created sk_sp both have a reference to it. 264 */ 265 sk_sp(const sk_sp<T>& that) : fPtr(SkSafeRef(that.get())) {} 266 template <typename U, typename = skstd::enable_if_t<skstd::is_convertible<U*, T*>::value>> 267 sk_sp(const sk_sp<U>& that) : fPtr(SkSafeRef(that.get())) {} 268 269 /** 270 * Move the underlying object from the argument to the newly created sk_sp. Afterwards only 271 * the new sk_sp will have a reference to the object, and the argument will point to null. 272 * No call to ref() or unref() will be made. 273 */ 274 sk_sp(sk_sp<T>&& that) : fPtr(that.release()) {} 275 template <typename U, typename = skstd::enable_if_t<skstd::is_convertible<U*, T*>::value>> 276 sk_sp(sk_sp<U>&& that) : fPtr(that.release()) {} 277 278 /** 279 * Adopt the bare pointer into the newly created sk_sp. 280 * No call to ref() or unref() will be made. 281 */ 282 explicit sk_sp(T* obj) : fPtr(obj) {} 283 284 /** 285 * Calls unref() on the underlying object pointer. 286 */ 287 ~sk_sp() { 288 SkSafeUnref(fPtr); 289 SkDEBUGCODE(fPtr = nullptr); 290 } 291 292 sk_sp<T>& operator=(std::nullptr_t) { this->reset(); return *this; } 293 294 /** 295 * Shares the underlying object referenced by the argument by calling ref() on it. If this 296 * sk_sp previously had a reference to an object (i.e. not null) it will call unref() on that 297 * object. 298 */ 299 sk_sp<T>& operator=(const sk_sp<T>& that) { 300 this->reset(SkSafeRef(that.get())); 301 return *this; 302 } 303 template <typename U, typename = skstd::enable_if_t<skstd::is_convertible<U*, T*>::value>> 304 sk_sp<T>& operator=(const sk_sp<U>& that) { 305 this->reset(SkSafeRef(that.get())); 306 return *this; 307 } 308 309 /** 310 * Move the underlying object from the argument to the sk_sp. If the sk_sp previously held 311 * a reference to another object, unref() will be called on that object. No call to ref() 312 * will be made. 313 */ 314 sk_sp<T>& operator=(sk_sp<T>&& that) { 315 this->reset(that.release()); 316 return *this; 317 } 318 template <typename U, typename = skstd::enable_if_t<skstd::is_convertible<U*, T*>::value>> 319 sk_sp<T>& operator=(sk_sp<U>&& that) { 320 this->reset(that.release()); 321 return *this; 322 } 323 324 T& operator*() const { 325 SkASSERT(this->get() != nullptr); 326 return *this->get(); 327 } 328 329 // MSVC 2013 does not work correctly with explicit operator bool. 330 // https://chromium-cpp.appspot.com/#core-blacklist 331 // When explicit operator bool can be used, remove operator! and operator unspecified_bool_type. 332 //explicit operator bool() const { return this->get() != nullptr; } 333 operator unspecified_bool_type() const { return this->get() ? &sk_sp::fPtr : nullptr; } 334 bool operator!() const { return this->get() == nullptr; } 335 336 T* get() const { return fPtr; } 337 T* operator->() const { return fPtr; } 338 339 /** 340 * Adopt the new bare pointer, and call unref() on any previously held object (if not null). 341 * No call to ref() will be made. 342 */ 343 void reset(T* ptr = nullptr) { 344 // Calling fPtr->unref() may call this->~() or this->reset(T*). 345 // http://wg21.cmeerw.net/lwg/issue998 346 // http://wg21.cmeerw.net/lwg/issue2262 347 T* oldPtr = fPtr; 348 fPtr = ptr; 349 SkSafeUnref(oldPtr); 350 } 351 352 /** 353 * Return the bare pointer, and set the internal object pointer to nullptr. 354 * The caller must assume ownership of the object, and manage its reference count directly. 355 * No call to unref() will be made. 356 */ 357 T* SK_WARN_UNUSED_RESULT release() { 358 T* ptr = fPtr; 359 fPtr = nullptr; 360 return ptr; 361 } 362 363 void swap(sk_sp<T>& that) /*noexcept*/ { 364 using std::swap; 365 swap(fPtr, that.fPtr); 366 } 367 368private: 369 T* fPtr; 370}; 371 372template <typename T> inline void swap(sk_sp<T>& a, sk_sp<T>& b) /*noexcept*/ { 373 a.swap(b); 374} 375 376template <typename T, typename U> inline bool operator==(const sk_sp<T>& a, const sk_sp<U>& b) { 377 return a.get() == b.get(); 378} 379template <typename T> inline bool operator==(const sk_sp<T>& a, std::nullptr_t) /*noexcept*/ { 380 return !a; 381} 382template <typename T> inline bool operator==(std::nullptr_t, const sk_sp<T>& b) /*noexcept*/ { 383 return !b; 384} 385 386template <typename T, typename U> inline bool operator!=(const sk_sp<T>& a, const sk_sp<U>& b) { 387 return a.get() != b.get(); 388} 389template <typename T> inline bool operator!=(const sk_sp<T>& a, std::nullptr_t) /*noexcept*/ { 390 return static_cast<bool>(a); 391} 392template <typename T> inline bool operator!=(std::nullptr_t, const sk_sp<T>& b) /*noexcept*/ { 393 return static_cast<bool>(b); 394} 395 396template <typename T, typename U> inline bool operator<(const sk_sp<T>& a, const sk_sp<U>& b) { 397 // Provide defined total order on sk_sp. 398 // http://wg21.cmeerw.net/lwg/issue1297 399 // http://wg21.cmeerw.net/lwg/issue1401 . 400 return std::less<skstd::common_type_t<T*, U*>>()(a.get(), b.get()); 401} 402template <typename T> inline bool operator<(const sk_sp<T>& a, std::nullptr_t) { 403 return std::less<T*>()(a.get(), nullptr); 404} 405template <typename T> inline bool operator<(std::nullptr_t, const sk_sp<T>& b) { 406 return std::less<T*>()(nullptr, b.get()); 407} 408 409template <typename T, typename U> inline bool operator<=(const sk_sp<T>& a, const sk_sp<U>& b) { 410 return !(b < a); 411} 412template <typename T> inline bool operator<=(const sk_sp<T>& a, std::nullptr_t) { 413 return !(nullptr < a); 414} 415template <typename T> inline bool operator<=(std::nullptr_t, const sk_sp<T>& b) { 416 return !(b < nullptr); 417} 418 419template <typename T, typename U> inline bool operator>(const sk_sp<T>& a, const sk_sp<U>& b) { 420 return b < a; 421} 422template <typename T> inline bool operator>(const sk_sp<T>& a, std::nullptr_t) { 423 return nullptr < a; 424} 425template <typename T> inline bool operator>(std::nullptr_t, const sk_sp<T>& b) { 426 return b < nullptr; 427} 428 429template <typename T, typename U> inline bool operator>=(const sk_sp<T>& a, const sk_sp<U>& b) { 430 return !(a < b); 431} 432template <typename T> inline bool operator>=(const sk_sp<T>& a, std::nullptr_t) { 433 return !(a < nullptr); 434} 435template <typename T> inline bool operator>=(std::nullptr_t, const sk_sp<T>& b) { 436 return !(nullptr < b); 437} 438 439template <typename T, typename... Args> 440sk_sp<T> sk_make_sp(Args&&... args) { 441 return sk_sp<T>(new T(std::forward<Args>(args)...)); 442} 443 444#ifdef SK_SUPPORT_TRANSITION_TO_SP_INTERFACES 445 446/* 447 * Returns a sk_sp wrapping the provided ptr AND calls ref on it (if not null). 448 * 449 * This is different than the semantics of the constructor for sk_sp, which just wraps the ptr, 450 * effectively "adopting" it. 451 * 452 * This function may be helpful while we convert callers from ptr-based to sk_sp-based parameters. 453 */ 454template <typename T> sk_sp<T> sk_ref_sp(T* obj) { 455 return sk_sp<T>(SkSafeRef(obj)); 456} 457 458#endif 459 460#endif 461