SkPixelRef.cpp revision 3ae4701fe8a55880da4d75510f2eb1b57729196b
1/* 2 * Copyright 2011 Google Inc. 3 * 4 * Use of this source code is governed by a BSD-style license that can be 5 * found in the LICENSE file. 6 */ 7 8#include "SkBitmapCache.h" 9#include "SkMutex.h" 10#include "SkPixelRef.h" 11#include "SkTraceEvent.h" 12 13//#define SK_SUPPORT_LEGACY_UNBALANCED_PIXELREF_LOCKCOUNT 14//#define SK_TRACE_PIXELREF_LIFETIME 15 16#ifdef SK_BUILD_FOR_WIN32 17 // We don't have SK_BASE_MUTEX_INIT on Windows. 18 19 // must be a power-of-2. undef to just use 1 mutex 20 #define PIXELREF_MUTEX_RING_COUNT 32 21 static SkBaseMutex gPixelRefMutexRing[PIXELREF_MUTEX_RING_COUNT]; 22 23#else 24 static SkBaseMutex gPixelRefMutexRing[] = { 25 SK_BASE_MUTEX_INIT, SK_BASE_MUTEX_INIT, 26 SK_BASE_MUTEX_INIT, SK_BASE_MUTEX_INIT, 27 SK_BASE_MUTEX_INIT, SK_BASE_MUTEX_INIT, 28 SK_BASE_MUTEX_INIT, SK_BASE_MUTEX_INIT, 29 30 SK_BASE_MUTEX_INIT, SK_BASE_MUTEX_INIT, 31 SK_BASE_MUTEX_INIT, SK_BASE_MUTEX_INIT, 32 SK_BASE_MUTEX_INIT, SK_BASE_MUTEX_INIT, 33 SK_BASE_MUTEX_INIT, SK_BASE_MUTEX_INIT, 34 35 SK_BASE_MUTEX_INIT, SK_BASE_MUTEX_INIT, 36 SK_BASE_MUTEX_INIT, SK_BASE_MUTEX_INIT, 37 SK_BASE_MUTEX_INIT, SK_BASE_MUTEX_INIT, 38 SK_BASE_MUTEX_INIT, SK_BASE_MUTEX_INIT, 39 40 SK_BASE_MUTEX_INIT, SK_BASE_MUTEX_INIT, 41 SK_BASE_MUTEX_INIT, SK_BASE_MUTEX_INIT, 42 SK_BASE_MUTEX_INIT, SK_BASE_MUTEX_INIT, 43 SK_BASE_MUTEX_INIT, SK_BASE_MUTEX_INIT, 44 }; 45 // must be a power-of-2. undef to just use 1 mutex 46 #define PIXELREF_MUTEX_RING_COUNT SK_ARRAY_COUNT(gPixelRefMutexRing) 47 48#endif 49 50static SkBaseMutex* get_default_mutex() { 51 static int32_t gPixelRefMutexRingIndex; 52 53 SkASSERT(SkIsPow2(PIXELREF_MUTEX_RING_COUNT)); 54 55 // atomic_inc might be overkill here. It may be fine if once in a while 56 // we hit a race-condition and two subsequent calls get the same index... 57 int index = sk_atomic_inc(&gPixelRefMutexRingIndex); 58 return &gPixelRefMutexRing[index & (PIXELREF_MUTEX_RING_COUNT - 1)]; 59} 60 61/////////////////////////////////////////////////////////////////////////////// 62 63static uint32_t next_gen_id() { 64 static uint32_t gNextGenID = 0; 65 uint32_t genID; 66 // Loop in case our global wraps around, as we never want to return a 0. 67 do { 68 genID = sk_atomic_fetch_add(&gNextGenID, 2u) + 2; // Never set the low bit. 69 } while (0 == genID); 70 return genID; 71} 72 73/////////////////////////////////////////////////////////////////////////////// 74 75void SkPixelRef::setMutex(SkBaseMutex* mutex) { 76 if (NULL == mutex) { 77 mutex = get_default_mutex(); 78 } 79 fMutex = mutex; 80} 81 82// just need a > 0 value, so pick a funny one to aid in debugging 83#define SKPIXELREF_PRELOCKED_LOCKCOUNT 123456789 84 85static SkImageInfo validate_info(const SkImageInfo& info) { 86 SkAlphaType newAlphaType = info.alphaType(); 87 SkAssertResult(SkColorTypeValidateAlphaType(info.colorType(), info.alphaType(), &newAlphaType)); 88 return info.makeAlphaType(newAlphaType); 89} 90 91#ifdef SK_TRACE_PIXELREF_LIFETIME 92 static int32_t gInstCounter; 93#endif 94 95SkPixelRef::SkPixelRef(const SkImageInfo& info) 96 : fInfo(validate_info(info)) 97#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK 98 , fStableID(next_gen_id()) 99#endif 100 101{ 102#ifdef SK_TRACE_PIXELREF_LIFETIME 103 SkDebugf(" pixelref %d\n", sk_atomic_inc(&gInstCounter)); 104#endif 105 this->setMutex(NULL); 106 fRec.zero(); 107 fLockCount = 0; 108 this->needsNewGenID(); 109 fMutability = kMutable; 110 fPreLocked = false; 111 fAddedToCache.store(false); 112} 113 114 115SkPixelRef::SkPixelRef(const SkImageInfo& info, SkBaseMutex* mutex) 116 : fInfo(validate_info(info)) 117#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK 118 , fStableID(next_gen_id()) 119#endif 120{ 121#ifdef SK_TRACE_PIXELREF_LIFETIME 122 SkDebugf(" pixelref %d\n", sk_atomic_inc(&gInstCounter)); 123#endif 124 this->setMutex(mutex); 125 fRec.zero(); 126 fLockCount = 0; 127 this->needsNewGenID(); 128 fMutability = kMutable; 129 fPreLocked = false; 130 fAddedToCache.store(false); 131} 132 133SkPixelRef::~SkPixelRef() { 134#ifndef SK_SUPPORT_LEGACY_UNBALANCED_PIXELREF_LOCKCOUNT 135 SkASSERT(SKPIXELREF_PRELOCKED_LOCKCOUNT == fLockCount || 0 == fLockCount); 136#endif 137 138#ifdef SK_TRACE_PIXELREF_LIFETIME 139 SkDebugf("~pixelref %d\n", sk_atomic_dec(&gInstCounter) - 1); 140#endif 141 this->callGenIDChangeListeners(); 142} 143 144void SkPixelRef::needsNewGenID() { 145 fTaggedGenID.store(0); 146 SkASSERT(!this->genIDIsUnique()); // This method isn't threadsafe, so the assert should be fine. 147} 148 149void SkPixelRef::cloneGenID(const SkPixelRef& that) { 150 // This is subtle. We must call that.getGenerationID() to make sure its genID isn't 0. 151 uint32_t genID = that.getGenerationID(); 152 153 // Neither ID is unique any more. 154 // (These & ~1u are actually redundant. that.getGenerationID() just did it for us.) 155 this->fTaggedGenID.store(genID & ~1u); 156 that. fTaggedGenID.store(genID & ~1u); 157 158 // This method isn't threadsafe, so these asserts should be fine. 159 SkASSERT(!this->genIDIsUnique()); 160 SkASSERT(!that. genIDIsUnique()); 161} 162 163static void validate_pixels_ctable(const SkImageInfo& info, const SkColorTable* ctable) { 164 if (info.isEmpty()) { 165 return; // can't require ctable if the dimensions are empty 166 } 167 if (kIndex_8_SkColorType == info.colorType()) { 168 SkASSERT(ctable); 169 } else { 170 SkASSERT(NULL == ctable); 171 } 172} 173 174void SkPixelRef::setPreLocked(void* pixels, size_t rowBytes, SkColorTable* ctable) { 175#ifndef SK_IGNORE_PIXELREF_SETPRELOCKED 176 SkASSERT(pixels); 177 validate_pixels_ctable(fInfo, ctable); 178 // only call me in your constructor, otherwise fLockCount tracking can get 179 // out of sync. 180 fRec.fPixels = pixels; 181 fRec.fColorTable = ctable; 182 fRec.fRowBytes = rowBytes; 183 fLockCount = SKPIXELREF_PRELOCKED_LOCKCOUNT; 184 fPreLocked = true; 185#endif 186} 187 188// Increments fLockCount only on success 189bool SkPixelRef::lockPixelsInsideMutex() { 190 fMutex->assertHeld(); 191 192 if (1 == ++fLockCount) { 193 SkASSERT(fRec.isZero()); 194 if (!this->onNewLockPixels(&fRec)) { 195 fRec.zero(); 196 fLockCount -= 1; // we return fLockCount unchanged if we fail. 197 return false; 198 } 199 } 200 if (fRec.fPixels) { 201 validate_pixels_ctable(fInfo, fRec.fColorTable); 202 return true; 203 } 204 return false; 205} 206 207// For historical reasons, we always inc fLockCount, even if we return false. 208// It would be nice to change this (it seems), and only inc if we actually succeed... 209bool SkPixelRef::lockPixels() { 210 SkASSERT(!fPreLocked || SKPIXELREF_PRELOCKED_LOCKCOUNT == fLockCount); 211 212 if (!fPreLocked) { 213 TRACE_EVENT_BEGIN0("skia", "SkPixelRef::lockPixelsMutex"); 214 SkAutoMutexAcquire ac(*fMutex); 215 TRACE_EVENT_END0("skia", "SkPixelRef::lockPixelsMutex"); 216 SkDEBUGCODE(int oldCount = fLockCount;) 217 bool success = this->lockPixelsInsideMutex(); 218 // lockPixelsInsideMutex only increments the count if it succeeds. 219 SkASSERT(oldCount + (int)success == fLockCount); 220 221 if (!success) { 222 // For compatibility with SkBitmap calling lockPixels, we still want to increment 223 // fLockCount even if we failed. If we updated SkBitmap we could remove this oddity. 224 fLockCount += 1; 225 return false; 226 } 227 } 228 if (fRec.fPixels) { 229 validate_pixels_ctable(fInfo, fRec.fColorTable); 230 return true; 231 } 232 return false; 233} 234 235bool SkPixelRef::lockPixels(LockRec* rec) { 236 if (this->lockPixels()) { 237 *rec = fRec; 238 return true; 239 } 240 return false; 241} 242 243void SkPixelRef::unlockPixels() { 244 SkASSERT(!fPreLocked || SKPIXELREF_PRELOCKED_LOCKCOUNT == fLockCount); 245 246 if (!fPreLocked) { 247 SkAutoMutexAcquire ac(*fMutex); 248 249 SkASSERT(fLockCount > 0); 250 if (0 == --fLockCount) { 251 // don't call onUnlockPixels unless onLockPixels succeeded 252 if (fRec.fPixels) { 253 this->onUnlockPixels(); 254 fRec.zero(); 255 } else { 256 SkASSERT(fRec.isZero()); 257 } 258 } 259 } 260} 261 262bool SkPixelRef::requestLock(const LockRequest& request, LockResult* result) { 263 SkASSERT(result); 264 if (request.fSize.isEmpty()) { 265 return false; 266 } 267 // until we support subsets, we have to check this... 268 if (request.fSize.width() != fInfo.width() || request.fSize.height() != fInfo.height()) { 269 return false; 270 } 271 272 if (fPreLocked) { 273 result->fUnlockProc = NULL; 274 result->fUnlockContext = NULL; 275 result->fCTable = fRec.fColorTable; 276 result->fPixels = fRec.fPixels; 277 result->fRowBytes = fRec.fRowBytes; 278 result->fSize.set(fInfo.width(), fInfo.height()); 279 } else { 280 SkAutoMutexAcquire ac(*fMutex); 281 if (!this->onRequestLock(request, result)) { 282 return false; 283 } 284 } 285 if (result->fPixels) { 286 validate_pixels_ctable(fInfo, result->fCTable); 287 return true; 288 } 289 return false; 290} 291 292bool SkPixelRef::lockPixelsAreWritable() const { 293 return this->onLockPixelsAreWritable(); 294} 295 296bool SkPixelRef::onLockPixelsAreWritable() const { 297 return true; 298} 299 300uint32_t SkPixelRef::getGenerationID() const { 301 uint32_t id = fTaggedGenID.load(); 302 if (0 == id) { 303 uint32_t next = next_gen_id() | 1u; 304 if (fTaggedGenID.compare_exchange(&id, next)) { 305 id = next; // There was no race or we won the race. fTaggedGenID is next now. 306 } else { 307 // We lost a race to set fTaggedGenID. compare_exchange() filled id with the winner. 308 } 309 // We can't quite SkASSERT(this->genIDIsUnique()). It could be non-unique 310 // if we got here via the else path (pretty unlikely, but possible). 311 } 312 return id & ~1u; // Mask off bottom unique bit. 313} 314 315void SkPixelRef::addGenIDChangeListener(GenIDChangeListener* listener) { 316 if (NULL == listener || !this->genIDIsUnique()) { 317 // No point in tracking this if we're not going to call it. 318 SkDELETE(listener); 319 return; 320 } 321 *fGenIDChangeListeners.append() = listener; 322} 323 324// we need to be called *before* the genID gets changed or zerod 325void SkPixelRef::callGenIDChangeListeners() { 326 // We don't invalidate ourselves if we think another SkPixelRef is sharing our genID. 327 if (this->genIDIsUnique()) { 328 for (int i = 0; i < fGenIDChangeListeners.count(); i++) { 329 fGenIDChangeListeners[i]->onChange(); 330 } 331 332 // TODO: SkAtomic could add "old_value = atomic.xchg(new_value)" to make this clearer. 333 if (fAddedToCache.load()) { 334 SkNotifyBitmapGenIDIsStale(this->getGenerationID()); 335 fAddedToCache.store(false); 336 } 337 } 338 // Listeners get at most one shot, so whether these triggered or not, blow them away. 339 fGenIDChangeListeners.deleteAll(); 340} 341 342void SkPixelRef::notifyPixelsChanged() { 343#ifdef SK_DEBUG 344 if (this->isImmutable()) { 345 SkDebugf("========== notifyPixelsChanged called on immutable pixelref"); 346 } 347#endif 348 this->callGenIDChangeListeners(); 349 this->needsNewGenID(); 350 this->onNotifyPixelsChanged(); 351} 352 353void SkPixelRef::changeAlphaType(SkAlphaType at) { 354 *const_cast<SkImageInfo*>(&fInfo) = fInfo.makeAlphaType(at); 355} 356 357void SkPixelRef::setImmutable() { 358 fMutability = kImmutable; 359} 360void SkPixelRef::setTemporarilyImmutable() { 361 SkASSERT(fMutability != kImmutable); 362 fMutability = kTemporarilyImmutable; 363} 364 365void SkPixelRef::restoreMutability() { 366 SkASSERT(fMutability != kImmutable); 367 fMutability = kMutable; 368} 369 370bool SkPixelRef::readPixels(SkBitmap* dst, const SkIRect* subset) { 371 return this->onReadPixels(dst, subset); 372} 373 374/////////////////////////////////////////////////////////////////////////////////////////////////// 375 376bool SkPixelRef::onReadPixels(SkBitmap* dst, const SkIRect* subset) { 377 return false; 378} 379 380void SkPixelRef::onNotifyPixelsChanged() { } 381 382SkData* SkPixelRef::onRefEncodedData() { 383 return NULL; 384} 385 386bool SkPixelRef::onGetYUV8Planes(SkISize sizes[3], void* planes[3], size_t rowBytes[3], 387 SkYUVColorSpace* colorSpace) { 388 return false; 389} 390 391size_t SkPixelRef::getAllocatedSizeInBytes() const { 392 return 0; 393} 394 395static void unlock_legacy_result(void* ctx) { 396 SkPixelRef* pr = (SkPixelRef*)ctx; 397 pr->unlockPixels(); 398 pr->unref(); // balancing the Ref in onRequestLoc 399} 400 401bool SkPixelRef::onRequestLock(const LockRequest& request, LockResult* result) { 402 if (!this->lockPixelsInsideMutex()) { 403 return false; 404 } 405 406 result->fUnlockProc = unlock_legacy_result; 407 result->fUnlockContext = SkRef(this); // this is balanced in our fUnlockProc 408 result->fCTable = fRec.fColorTable; 409 result->fPixels = fRec.fPixels; 410 result->fRowBytes = fRec.fRowBytes; 411 result->fSize.set(fInfo.width(), fInfo.height()); 412 return true; 413} 414