Searched refs:atomic_load_explicit (Results 1 – 10 of 10) sorted by relevance
/bionic/libc/bionic/ |
D | pthread_key.cpp | 80 uintptr_t seq = atomic_load_explicit(&key_map[i].seq, memory_order_relaxed); in pthread_key_clean_all() 90 atomic_load_explicit(&key_map[i].key_destructor, memory_order_relaxed)); in pthread_key_clean_all() 95 if (atomic_load_explicit(&key_map[i].seq, memory_order_relaxed) != seq) { in pthread_key_clean_all() 120 uintptr_t seq = atomic_load_explicit(&key_map[i].seq, memory_order_relaxed); in pthread_key_create() 142 uintptr_t seq = atomic_load_explicit(&key_map[key].seq, memory_order_relaxed); in pthread_key_delete() 156 uintptr_t seq = atomic_load_explicit(&key_map[key].seq, memory_order_relaxed); in pthread_getspecific() 174 uintptr_t seq = atomic_load_explicit(&key_map[key].seq, memory_order_relaxed); in pthread_setspecific()
|
D | pthread_once.cpp | 54 int old_value = atomic_load_explicit(once_control_ptr, memory_order_acquire); in pthread_once() 83 old_value = atomic_load_explicit(once_control_ptr, memory_order_acquire); in pthread_once()
|
D | system_properties.cpp | 344 uint_least32_t off = atomic_load_explicit(off_p, memory_order_consume); in to_prop_bt() 349 uint_least32_t off = atomic_load_explicit(off_p, memory_order_consume); in to_prop_info() 385 uint_least32_t left_offset = atomic_load_explicit(¤t->left, memory_order_relaxed); in find_prop_bt() 401 … uint_least32_t right_offset = atomic_load_explicit(¤t->right, memory_order_relaxed); in find_prop_bt() 439 … uint_least32_t children_offset = atomic_load_explicit(¤t->children, memory_order_relaxed); in find_property() 465 uint_least32_t prop_offset = atomic_load_explicit(¤t->prop, memory_order_relaxed); in find_property() 552 uint_least32_t left_offset = atomic_load_explicit(&trie->left, memory_order_relaxed); in foreach_property() 558 uint_least32_t prop_offset = atomic_load_explicit(&trie->prop, memory_order_relaxed); in foreach_property() 565 uint_least32_t children_offset = atomic_load_explicit(&trie->children, memory_order_relaxed); in foreach_property() 571 uint_least32_t right_offset = atomic_load_explicit(&trie->right, memory_order_relaxed); in foreach_property() [all …]
|
D | __cxa_guard.cpp | 82 int old_value = atomic_load_explicit(&gv->state, memory_order_relaxed); in __cxa_guard_acquire() 113 old_value = atomic_load_explicit(&gv->state, memory_order_relaxed); in __cxa_guard_acquire()
|
D | pthread_rwlock.cpp | 257 if (atomic_load_explicit(&rwlock->state, memory_order_relaxed) != 0) { in pthread_rwlock_destroy() 273 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed); in __pthread_rwlock_tryrdlock() 292 if (atomic_load_explicit(&rwlock->writer_tid, memory_order_relaxed) == __get_thread()->tid) { in __pthread_rwlock_timedrdlock() 302 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed); in __pthread_rwlock_timedrdlock() 355 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed); in __pthread_rwlock_trywrlock() 371 if (atomic_load_explicit(&rwlock->writer_tid, memory_order_relaxed) == __get_thread()->tid) { in __pthread_rwlock_timedwrlock() 380 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed); in __pthread_rwlock_timedwrlock() 465 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed); in pthread_rwlock_unlock() 467 if (atomic_load_explicit(&rwlock->writer_tid, memory_order_relaxed) != __get_thread()->tid) { in pthread_rwlock_unlock()
|
D | semaphore.cpp | 100 return (atomic_load_explicit(sem_count_ptr, memory_order_relaxed) & SEMCOUNT_SHARED_MASK); in SEM_GET_SHARED() 144 unsigned int old_value = atomic_load_explicit(sem_count_ptr, memory_order_relaxed); in __sem_dec() 164 unsigned int old_value = atomic_load_explicit(sem_count_ptr, memory_order_relaxed); in __sem_trydec() 188 unsigned int old_value = atomic_load_explicit(sem_count_ptr, memory_order_relaxed); in __sem_inc()
|
D | pthread_mutex.cpp | 414 uint32_t owner_tid = atomic_load_explicit(&mutex->owner_tid, memory_order_relaxed); in __recursive_or_errorcheck_mutex_wait() 421 uint16_t old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed); in __pthread_mutex_lock_with_timeout() 432 if (tid == atomic_load_explicit(&mutex->owner_tid, memory_order_relaxed)) { in __pthread_mutex_lock_with_timeout() 499 old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed); in __pthread_mutex_lock_with_timeout() 512 uint16_t old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed); in pthread_mutex_lock() 533 uint16_t old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed); in pthread_mutex_unlock() 545 if ( tid != atomic_load_explicit(&mutex->owner_tid, memory_order_relaxed) ) { in pthread_mutex_unlock() 577 uint16_t old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed); in pthread_mutex_trylock() 591 if (tid == atomic_load_explicit(&mutex->owner_tid, memory_order_relaxed)) { in pthread_mutex_trylock()
|
D | pthread_cond.cpp | 111 return COND_IS_SHARED(atomic_load_explicit(&state, memory_order_relaxed)); in process_shared() 115 return COND_GET_CLOCK(atomic_load_explicit(&state, memory_order_relaxed)); in get_clock() 175 unsigned int old_state = atomic_load_explicit(&cond->state, memory_order_relaxed); in __pthread_cond_timedwait_relative()
|
/bionic/tests/ |
D | stdatomic_test.cpp | 100 ASSERT_EQ(123, atomic_load_explicit(&i, memory_order_relaxed)); in TEST() 207 yval = atomic_load_explicit(&a->y, memory_order_acquire); in reader() 208 zval = atomic_load_explicit(&a->z, memory_order_relaxed); in reader() 209 xval = atomic_load_explicit(&a->x, memory_order_relaxed); in reader() 247 EXPECT_EQ(atomic_load_explicit(&a.x, memory_order_consume), BIG + 1); in TEST() 248 EXPECT_EQ(atomic_load_explicit(&a.y, memory_order_seq_cst), BIG + 1); in TEST()
|
/bionic/libc/include/ |
D | stdatomic.h | 75 using std::atomic_load_explicit; 423 #define atomic_load_explicit(object, order) \ macro 448 #define atomic_load_explicit(object, order) \ macro 498 #define atomic_load_explicit(object, order) \ macro 531 atomic_load_explicit(object, memory_order_seq_cst)
|