Home
last modified time | relevance | path

Searched refs:atomic_load (Results 1 – 25 of 74) sorted by relevance

123

/external/compiler-rt/lib/sanitizer_common/
Dsanitizer_addrhashmap.h176 uptr addr1 = atomic_load(&c->addr, memory_order_acquire); in acquire()
184 if (atomic_load(&b->add, memory_order_relaxed)) { in acquire()
186 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed); in acquire()
189 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire()
205 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire()
217 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed); in acquire()
221 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire()
245 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire()
278 CHECK_EQ(atomic_load(&c->addr, memory_order_relaxed), 0); in acquire()
289 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in release()
[all …]
Dsanitizer_mutex.h44 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), 1); in CheckLocked()
56 if (atomic_load(&state_, memory_order_relaxed) == 0 in LockSlow()
100 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), kUnlocked); in ~RWMutex()
132 CHECK_NE(atomic_load(&state_, memory_order_relaxed), kUnlocked); in CheckLocked()
150 u32 cmp = atomic_load(&state_, memory_order_relaxed); in LockSlow()
164 u32 prev = atomic_load(&state_, memory_order_acquire); in ReadLockSlow()
Dsanitizer_lfstack.h33 return (atomic_load(&head_, memory_order_relaxed) & kPtrMask) == 0; in Empty()
37 u64 cmp = atomic_load(&head_, memory_order_relaxed); in Push()
49 u64 cmp = atomic_load(&head_, memory_order_acquire); in Pop()
Dsanitizer_coverage_libcdep.cc291 uptr size = atomic_load(&pc_array_size, memory_order_relaxed); in ReInit()
302 CHECK_EQ(atomic_load(&pc_array_index, memory_order_relaxed), 0); in ReInit()
321 uptr size = atomic_load(&pc_array_size, memory_order_relaxed); in Extend()
383 uptr range_end = atomic_load(&pc_array_index, memory_order_relaxed); in InitializeGuards()
420 s32 guard_value = atomic_load(atomic_guard, memory_order_relaxed); in Add()
427 if (idx >= atomic_load(&pc_array_index, memory_order_acquire)) in Add()
430 atomic_load(&pc_array_size, memory_order_acquire)); in Add()
453 atomic_load(&cc_array_size, memory_order_acquire)); in IndirCall()
457 CHECK_EQ(atomic_load(&atomic_callee_cache[0], memory_order_relaxed), caller); in IndirCall()
533 return atomic_load(&pc_array_index, memory_order_relaxed); in size()
[all …]
Dsanitizer_stackdepotbase.h78 uptr cmp = atomic_load(p, memory_order_relaxed); in lock()
104 uptr v = atomic_load(p, memory_order_consume); in Put()
149 uptr v = atomic_load(p, memory_order_consume); in Get()
171 uptr s = atomic_load(p, memory_order_relaxed); in UnlockAll()
Dsanitizer_persistent_allocator.h38 uptr cmp = atomic_load(&region_pos, memory_order_acquire); in tryAlloc()
39 uptr end = atomic_load(&region_end, memory_order_acquire); in tryAlloc()
Dsanitizer_quarantine.h58 uptr GetSize() const { return atomic_load(&max_size_, memory_order_acquire); } in GetSize()
89 uptr min_size = atomic_load(&min_size_, memory_order_acquire); in Recycle()
130 return atomic_load(&size_, memory_order_relaxed); in Size()
Dsanitizer_stackdepot.cc40 atomic_load(&hash_and_use_count, memory_order_relaxed) & kHashMask; in eq()
92 return atomic_load(&node_->hash_and_use_count, memory_order_relaxed) & in use_count()
142 uptr v = atomic_load(p, memory_order_consume); in StackDepotReverseMap()
Dsanitizer_allocator.cc77 if (atomic_load(&internal_allocator_initialized, memory_order_acquire) == 0) {
79 if (atomic_load(&internal_allocator_initialized, memory_order_relaxed) ==
Dsanitizer_libignore.h73 const uptr n = atomic_load(&loaded_count_, memory_order_acquire); in IsIgnored()
/external/libcxx/test/std/atomics/atomics.types.operations/atomics.types.operations.req/
Datomic_load.pass.cpp35 assert(std::atomic_load(&t) == T(1)); in operator ()()
38 assert(std::atomic_load(&vt) == T(2)); in operator ()()
/external/boringssl/src/crypto/
Drefcount_c11.c39 uint32_t expected = atomic_load(count); in CRYPTO_refcount_inc()
51 uint32_t expected = atomic_load(count); in CRYPTO_refcount_dec_and_test_zero()
/external/compiler-rt/lib/tsan/rtl/
Dtsan_mutex.cc224 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), kUnlocked); in ~Mutex()
236 if (atomic_load(&state_, memory_order_relaxed) == kUnlocked) { in Lock()
266 prev = atomic_load(&state_, memory_order_acquire); in ReadLock()
287 CHECK_NE(atomic_load(&state_, memory_order_relaxed), 0); in CheckLocked()
Dtsan_fd.cc58 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1) in ref()
64 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1) { in unref()
78 uptr l1 = atomic_load(pl1, memory_order_consume); in fddesc()
133 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed); in FdOnFork()
145 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed); in FdLocation()
/external/oj-libjdwp/src/share/back/
DvmDebug.c64 return transport_is_open() && atomic_load(&hasSeenDebuggerActivity); in isDebuggerConnected()
87 jlong last_time = atomic_load(&lastDebuggerActivity); in VMDebug_lastDebuggerActivity()
/external/compiler-rt/lib/esan/
Desan_sideline_linux.cpp48 if (atomic_load(&Thread->SidelineExit, memory_order_relaxed) != 0) in handleSidelineSignal()
91 while (atomic_load(&TheThread->SidelineExit, memory_order_relaxed) == 0) in runSideline()
/external/compiler-rt/lib/dfsan/
Ddfsan.cc182 label = atomic_load(table_ent, memory_order_acquire); in __dfsan_union()
321 atomic_load(&__dfsan_last_label, memory_order_relaxed); in dfsan_get_label_count()
329 atomic_load(&__dfsan_last_label, memory_order_relaxed); in dfsan_dump_labels()
/external/compiler-rt/lib/asan/
Dasan_thread.h100 return !atomic_load(&stack_switching_, memory_order_relaxed) && in has_fake_stack()
107 if (atomic_load(&stack_switching_, memory_order_relaxed)) in fake_stack()
Dasan_stack.cc27 return atomic_load(&malloc_context_size, memory_order_acquire); in GetMallocContextSize()
Dasan_thread.cc125 if (atomic_load(&stack_switching_, memory_order_relaxed)) { in StartSwitchFiber()
145 if (!atomic_load(&stack_switching_, memory_order_relaxed)) { in FinishSwitchFiber()
163 if (!atomic_load(&stack_switching_, memory_order_acquire)) in GetStackBounds()
Dasan_allocator.cc276 options->min_redzone = atomic_load(&min_redzone, memory_order_acquire); in GetOptions()
277 options->max_redzone = atomic_load(&max_redzone, memory_order_acquire); in GetOptions()
280 atomic_load(&alloc_dealloc_mismatch, memory_order_acquire); in GetOptions()
293 u32 min_rz = atomic_load(&min_redzone, memory_order_acquire); in ComputeRZLog()
294 u32 max_rz = atomic_load(&max_redzone, memory_order_acquire); in ComputeRZLog()
525 if (atomic_load(&alloc_dealloc_mismatch, memory_order_acquire)) { in Deallocate()
/external/libcxx/test/std/utilities/memory/util.smartptr/util.smartptr.shared.atomic/
Datomic_load.pass.cpp36 std::shared_ptr<int> q = std::atomic_load(&p); in main()
/external/llvm/test/CodeGen/PowerPC/
Datomic-2.ll93 define i64 @atomic_load(i64* %mem) nounwind {
95 ; CHECK: @atomic_load
/external/compiler-rt/lib/tsan/tests/rtl/
Dtsan_mutex.cc169 int *val = (int *)atomic_load(singleton, memory_order_acquire); in singleton_thread()
215 uptr v = atomic_load(&flag, memory_order_acquire); in TEST()
/external/compiler-rt/lib/lsan/
Dlsan_interceptors.cc221 while ((tid = atomic_load(&p->tid, memory_order_acquire)) == 0) in __lsan_thread_start_func()
258 while (atomic_load(&p.tid, memory_order_acquire) != 0) in INTERCEPTOR()

123