Searched refs:old_state (Results 1 - 8 of 8) sorted by relevance

/bionic/libc/bionic/
H A Dpthread_detach.cpp40 ThreadJoinState old_state = THREAD_NOT_JOINED; local
41 while (old_state == THREAD_NOT_JOINED &&
42 !atomic_compare_exchange_weak(&thread->join_state, &old_state, THREAD_DETACHED)) {
45 if (old_state == THREAD_NOT_JOINED) {
47 } else if (old_state == THREAD_EXITED_NOT_JOINED) {
H A Dpthread_join.cpp44 ThreadJoinState old_state = THREAD_NOT_JOINED; local
45 while ((old_state == THREAD_NOT_JOINED || old_state == THREAD_EXITED_NOT_JOINED) &&
46 !atomic_compare_exchange_weak(&thread->join_state, &old_state, THREAD_JOINED)) {
49 if (old_state == THREAD_DETACHED || old_state == THREAD_JOINED) {
H A Dpthread_rwlock.cpp263 static inline __always_inline bool __can_acquire_read_lock(int old_state, argument
267 bool cannot_apply = __state_owned_by_writer(old_state) ||
268 (writer_nonrecursive_preferred && __state_have_pending_writers(old_state));
273 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed); local
275 while (__predict_true(__can_acquire_read_lock(old_state, rwlock->writer_nonrecursive_preferred))) {
277 int new_state = old_state + STATE_READER_COUNT_CHANGE_STEP;
281 if (__predict_true(atomic_compare_exchange_weak_explicit(&rwlock->state, &old_state, new_state,
302 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed); local
303 if (__can_acquire_read_lock(old_state, rwlock->writer_nonrecursive_preferred)) {
324 old_state
350 __can_acquire_write_lock(int old_state) argument
355 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed); local
380 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed); local
465 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed); local
[all...]
H A Dpthread_mutex.cpp277 uint16_t old_state = unlocked; local
278 if (__predict_true(atomic_compare_exchange_strong_explicit(&mutex->state, &old_state,
380 uint16_t old_state) {
384 if (MUTEX_COUNTER_BITS_WILL_OVERFLOW(old_state)) {
398 uint16_t old_state,
406 return __futex_wait_ex(&mutex->state, shared, old_state, rel_timeout);
415 return __futex_wait_ex(&mutex->state, shared, (owner_tid << 16) | old_state, rel_timeout);
421 uint16_t old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed); local
422 uint16_t mtype = (old_state & MUTEX_TYPE_MASK);
423 uint16_t shared = (old_state
379 __recursive_increment(pthread_mutex_internal_t* mutex, uint16_t old_state) argument
395 __recursive_or_errorcheck_mutex_wait( pthread_mutex_internal_t* mutex, uint16_t shared, uint16_t old_state, const timespec* rel_timeout) argument
512 uint16_t old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed); local
533 uint16_t old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed); local
577 uint16_t old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed); local
[all...]
H A Dpthread_exit.cpp94 ThreadJoinState old_state = THREAD_NOT_JOINED; local
95 while (old_state == THREAD_NOT_JOINED &&
96 !atomic_compare_exchange_weak(&thread->join_state, &old_state, THREAD_EXITED_NOT_JOINED)) {
99 if (old_state == THREAD_DETACHED) {
H A Dpthread_cond.cpp175 unsigned int old_state = atomic_load_explicit(&cond->state, memory_order_relaxed); local
178 int status = __futex_wait_ex(&cond->state, cond->process_shared(), old_state, rel_timeout_or_null);
/bionic/libc/private/
H A Dbionic_lock.h55 LockState old_state = Unlocked; local
56 if (__predict_true(atomic_compare_exchange_strong_explicit(&state, &old_state,
/bionic/libc/stdio/
H A Dstdio_ext.cpp77 int old_state = _EXT(fp)->_stdio_handles_locking ? FSETLOCKING_INTERNAL : FSETLOCKING_BYCALLER; local
79 return old_state;
88 return old_state;

Completed in 576 milliseconds