Searched refs:atomic_load (Results 1 - 25 of 45) sorted by relevance

12

/external/compiler-rt/lib/asan/
H A Dasan_stack.cc27 return atomic_load(&malloc_context_size, memory_order_acquire);
/external/libcxx/test/std/utilities/memory/util.smartptr/util.smartptr.shared.atomic/
H A Datomic_load.pass.cpp23 // atomic_load(const shared_ptr<T>* p)
33 std::shared_ptr<int> q = std::atomic_load(&p);
/external/boringssl/src/crypto/
H A Drefcount_c11.c39 uint32_t expected = atomic_load(count);
51 uint32_t expected = atomic_load(count);
/external/libcxx/test/std/atomics/atomics.types.operations/atomics.types.operations.req/
H A Datomic_load.pass.cpp17 // atomic_load(const volatile atomic<T>* obj);
21 // atomic_load(const atomic<T>* obj);
34 assert(std::atomic_load(&t) == T(1));
37 assert(std::atomic_load(&vt) == T(2));
/external/compiler-rt/lib/sanitizer_common/
H A Dsanitizer_lfstack.h33 return (atomic_load(&head_, memory_order_relaxed) & kPtrMask) == 0;
37 u64 cmp = atomic_load(&head_, memory_order_relaxed);
49 u64 cmp = atomic_load(&head_, memory_order_acquire);
H A Dsanitizer_mutex.h44 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), 1);
56 if (atomic_load(&state_, memory_order_relaxed) == 0
100 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), kUnlocked);
132 CHECK_NE(atomic_load(&state_, memory_order_relaxed), kUnlocked);
150 u32 cmp = atomic_load(&state_, memory_order_relaxed);
164 u32 prev = atomic_load(&state_, memory_order_acquire);
H A Dsanitizer_persistent_allocator.h37 uptr cmp = atomic_load(&region_pos, memory_order_acquire);
38 uptr end = atomic_load(&region_end, memory_order_acquire);
H A Dsanitizer_addrhashmap.h176 uptr addr1 = atomic_load(&c->addr, memory_order_acquire);
184 if (atomic_load(&b->add, memory_order_relaxed)) {
186 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed);
189 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed);
205 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed);
217 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed);
221 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed);
245 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed);
278 CHECK_EQ(atomic_load(&c->addr, memory_order_relaxed), 0);
289 uptr addr1 = atomic_load(
[all...]
H A Dsanitizer_libignore.h73 const uptr n = atomic_load(&loaded_count_, memory_order_acquire);
H A Dsanitizer_quarantine.h58 uptr GetSize() const { return atomic_load(&max_size_, memory_order_acquire); }
89 uptr min_size = atomic_load(&min_size_, memory_order_acquire);
128 return atomic_load(&size_, memory_order_relaxed);
H A Dsanitizer_stackdepot.cc40 atomic_load(&hash_and_use_count, memory_order_relaxed) & kHashMask;
92 return atomic_load(&node_->hash_and_use_count, memory_order_relaxed) &
142 uptr v = atomic_load(p, memory_order_consume);
H A Dsanitizer_allocator.cc59 if (atomic_load(&internal_allocator_initialized, memory_order_acquire) == 0) {
61 if (atomic_load(&internal_allocator_initialized, memory_order_relaxed) ==
H A Dsanitizer_stackdepotbase.h77 uptr cmp = atomic_load(p, memory_order_relaxed);
103 uptr v = atomic_load(p, memory_order_consume);
148 uptr v = atomic_load(p, memory_order_consume);
170 uptr s = atomic_load(p, memory_order_relaxed);
H A Dsanitizer_coverage_libcdep.cc267 uptr size = atomic_load(&pc_array_size, memory_order_relaxed);
277 CHECK_EQ(atomic_load(&pc_array_index, memory_order_relaxed), 0);
296 uptr size = atomic_load(&pc_array_size, memory_order_relaxed);
358 uptr range_end = atomic_load(&pc_array_index, memory_order_relaxed);
395 s32 guard_value = atomic_load(atomic_guard, memory_order_relaxed);
402 if (idx >= atomic_load(&pc_array_index, memory_order_acquire))
405 atomic_load(&pc_array_size, memory_order_acquire));
427 atomic_load(&cc_array_size, memory_order_acquire));
431 CHECK_EQ(atomic_load(&atomic_callee_cache[0], memory_order_relaxed), caller);
507 return atomic_load(
[all...]
H A Dsanitizer_atomic_clang_other.h25 INLINE typename T::Type atomic_load( function in namespace:__sanitizer
H A Dsanitizer_atomic_clang_x86.h28 INLINE typename T::Type atomic_load( function in namespace:__sanitizer
H A Dsanitizer_libignore.cc80 const uptr idx = atomic_load(&loaded_count_, memory_order_relaxed);
H A Dsanitizer_tls_get_addr.cc93 atomic_load(&number_of_live_dtls, memory_order_relaxed));
/external/compiler-rt/lib/tsan/rtl/
H A Dtsan_mutex.cc221 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), kUnlocked);
233 if (atomic_load(&state_, memory_order_relaxed) == kUnlocked) {
263 prev = atomic_load(&state_, memory_order_acquire);
284 CHECK_NE(atomic_load(&state_, memory_order_relaxed), 0);
H A Dtsan_fd.cc58 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1)
64 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1) {
78 uptr l1 = atomic_load(pl1, memory_order_consume);
127 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed);
139 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed);
/external/compiler-rt/lib/dfsan/
H A Ddfsan.cc152 label = atomic_load(table_ent, memory_order_acquire);
291 atomic_load(&__dfsan_last_label, memory_order_relaxed);
299 atomic_load(&__dfsan_last_label, memory_order_relaxed);
/external/compiler-rt/lib/lsan/
H A Dlsan_interceptors.cc216 while ((tid = atomic_load(&p->tid, memory_order_acquire)) == 0)
245 while (atomic_load(&p.tid, memory_order_acquire) != 0)
/external/compiler-rt/lib/tsan/tests/rtl/
H A Dtsan_mutex.cc169 int *val = (int *)atomic_load(singleton, memory_order_acquire);
215 uptr v = atomic_load(&flag, memory_order_acquire);
H A Dtsan_test_util_linux.cc328 Event* ev = (Event*)atomic_load(&impl->event, memory_order_acquire);
348 CHECK_EQ(atomic_load(&event, memory_order_relaxed), 0);
350 while (atomic_load(&event, memory_order_acquire) != 0)
/external/clang/lib/Headers/
H A Dstdatomic.h134 #define atomic_load(object) __c11_atomic_load(object, __ATOMIC_SEQ_CST) macro

Completed in 425 milliseconds

12