/art/adbconnection/ |
D | adbconnection.h | 152 std::atomic<bool> shutting_down_; 155 std::atomic<bool> agent_loaded_; 158 std::atomic<bool> agent_listening_; 162 std::atomic<bool> agent_has_socket_; 164 std::atomic<bool> sent_agent_fds_; 166 std::atomic<bool> performed_handshake_; 170 std::atomic<uint32_t> next_ddm_id_;
|
/art/test/1908-suspend-native-resume-self/ |
D | native_suspend_resume.cc | 33 std::atomic<bool> done(false); 34 std::atomic<bool> started(false); 35 std::atomic<bool> resumed(false); 36 std::atomic<bool> resuming(false);
|
/art/test/1942-suspend-raw-monitor-exit/ |
D | native_suspend_monitor.cc | 34 std::atomic<bool> should_pause(true); 35 std::atomic<bool> paused(false); 36 std::atomic<bool> done(false); 37 std::atomic<bool> locked(false);
|
/art/test/1920-suspend-native-monitor/ |
D | native_suspend_monitor.cc | 33 std::atomic<bool> should_pause(false); 34 std::atomic<bool> paused(false); 35 std::atomic<bool> done(false); 36 std::atomic<bool> started(false);
|
/art/test/1921-suspend-native-recursive-monitor/ |
D | native_suspend_recursive_monitor.cc | 33 std::atomic<bool> should_pause(false); 34 std::atomic<bool> paused(false); 35 std::atomic<bool> done(false); 36 std::atomic<bool> started(false);
|
/art/test/1905-suspend-native/ |
D | native_suspend.cc | 33 std::atomic<bool> done(false); 34 std::atomic<bool> started(false);
|
/art/runtime/mirror/ |
D | dex_cache.h | 77 static void Initialize(std::atomic<DexCachePair<T>>* dex_cache); 100 static void Initialize(std::atomic<NativeDexCachePair<T>>* dex_cache); 118 using TypeDexCacheType = std::atomic<TypeDexCachePair>; 121 using StringDexCacheType = std::atomic<StringDexCachePair>; 124 using FieldDexCacheType = std::atomic<FieldDexCachePair>; 127 using MethodDexCacheType = std::atomic<MethodDexCachePair>; 130 using MethodTypeDexCacheType = std::atomic<MethodTypeDexCachePair>; 451 static NativeDexCachePair<T> GetNativePair(std::atomic<NativeDexCachePair<T>>* pair_array, 455 static void SetNativePair(std::atomic<NativeDexCachePair<T>>* pair_array, 516 std::atomic<ConversionPair64>* target) { in AtomicLoadRelaxed16B() [all …]
|
D | dex_cache-inl.h | 49 inline void DexCachePair<T>::Initialize(std::atomic<DexCachePair<T>>* dex_cache) { in Initialize() 66 inline void NativeDexCachePair<T>::Initialize(std::atomic<NativeDexCachePair<T>>* dex_cache) { in Initialize() 276 NativeDexCachePair<T> DexCache::GetNativePair(std::atomic<NativeDexCachePair<T>>* pair_array, in GetNativePair() 279 auto* array = reinterpret_cast<std::atomic<ConversionPair64>*>(pair_array); in GetNativePair() 284 auto* array = reinterpret_cast<std::atomic<ConversionPair32>*>(pair_array); in GetNativePair() 291 void DexCache::SetNativePair(std::atomic<NativeDexCachePair<T>>* pair_array, in SetNativePair() 295 auto* array = reinterpret_cast<std::atomic<ConversionPair64>*>(pair_array); in SetNativePair() 299 auto* array = reinterpret_cast<std::atomic<ConversionPair32>*>(pair_array); in SetNativePair() 309 inline void VisitDexCachePairs(std::atomic<DexCachePair<T>>* pairs, in VisitDexCachePairs()
|
D | var_handle.cc | 305 std::atomic<T>* atom = reinterpret_cast<std::atomic<T>*>(addr); in Access() 319 std::atomic<T>* atom = reinterpret_cast<std::atomic<T>*>(addr); in Access() 344 std::atomic<T>* atom = reinterpret_cast<std::atomic<T>*>(addr); in Access() 366 std::atomic<T>* atom = reinterpret_cast<std::atomic<T>*>(addr); in Access() 388 std::atomic<T>* atom = reinterpret_cast<std::atomic<T>*>(addr); in Access() 409 std::atomic<T>* atom = reinterpret_cast<std::atomic<T>*>(addr); in Access() 426 std::atomic<T>* atom = reinterpret_cast<std::atomic<T>*>(addr); in Apply() 437 std::atomic<T>* atom = reinterpret_cast<std::atomic<T>*>(addr); in Apply() 475 std::atomic<T>* const atom = reinterpret_cast<std::atomic<T>*>(addr); in Access() 504 std::atomic<T>* atom = reinterpret_cast<std::atomic<T>*>(addr); in Access() [all …]
|
/art/test/1951-monitor-enter-no-suspend/ |
D | raw_monitor.cc | 69 static std::atomic<bool> started(false); 70 static std::atomic<bool> resumed(false); 71 static std::atomic<bool> progress(false);
|
/art/test/136-daemon-jni-shutdown/ |
D | daemon_jni_shutdown.cc | 29 static volatile std::atomic<bool> vm_was_shutdown(false); 32 static std::atomic<int> barrier_count(kThreadCount + 1);
|
/art/libartbase/base/ |
D | atomic.h | 37 class PACKED(sizeof(T)) Atomic : public std::atomic<T> { in PACKED() 39 Atomic<T>() : std::atomic<T>(T()) { } in PACKED() 41 explicit Atomic<T>(T value) : std::atomic<T>(value) { } in PACKED()
|
D | aborting.h | 27 extern std::atomic<unsigned int> gAborting;
|
/art/openjdkjvmti/ |
D | alloc_manager.h | 103 std::atomic<bool> allocations_paused_ever_ = false; 104 std::atomic<art::Thread*> allocations_paused_thread_ = nullptr; 105 std::atomic<bool> callback_enabled_ = false;
|
D | ti_allocator.h | 56 static std::atomic<jlong> allocated;
|
/art/runtime/javaheapprof/ |
D | javaheapsampler.h | 83 std::atomic<bool> enabled_; 86 std::atomic<int> p_sampling_interval_{4 * 1024};
|
/art/test/2031-zygote-compiled-frame-deopt/ |
D | native-wait.cc | 30 std::atomic<bool> native_waiting = false; 31 std::atomic<bool> native_wait = false;
|
/art/runtime/ |
D | monitor.h | 325 std::atomic<size_t> num_waiters_; 330 std::atomic<Thread*> owner_; 383 std::atomic<Thread*> lock_owner_; // *lock_owner_ may no longer exist! 384 std::atomic<ArtMethod*> lock_owner_method_; 385 std::atomic<uint32_t> lock_owner_dex_pc_; 386 std::atomic<uintptr_t> lock_owner_sum_; 389 std::atomic<Thread*> lock_owner_request_;
|
/art/test/2011-stack-walk-concurrent-instrument/ |
D | stack_walk_concurrent.cc | 30 std::atomic<bool> instrument_waiting = false; 31 std::atomic<bool> instrumented = false;
|
/art/libartbase/base/metrics/ |
D | metrics.h | 277 std::atomic<value_t> value_; 278 static_assert(std::atomic<value_t>::is_always_lock_free); 324 std::atomic<count_t> count_; 325 static_assert(std::atomic<count_t>::is_always_lock_free); 387 std::array<std::atomic<value_t>, num_buckets_> buckets_; 388 static_assert(std::atomic<value_t>::is_always_lock_free); 430 std::atomic<T> value_;
|
/art/test/2029-contended-monitors/ |
D | info.txt | 2 atomic. Also checks j.u.c. increments. Can be configured to print execution
|
/art/runtime/jit/ |
D | jit_memory_region_test.cc | 146 std::atomic<int32_t>* shared = reinterpret_cast<std::atomic<int32_t>*>( in TestUnmapWritableAfterFork() 244 std::atomic<int32_t>* shared = reinterpret_cast<std::atomic<int32_t>*>( in TestMadviseDontFork() 328 std::atomic<int32_t>* shared = reinterpret_cast<std::atomic<int32_t>*>( in TestFromSharedToPrivate()
|
/art/compiler/optimizing/ |
D | optimizing_compiler_stats.h | 183 for (std::atomic<uint32_t>& stat : compile_stats_) { in Reset() 189 std::atomic<uint32_t> compile_stats_[static_cast<size_t>(MethodCompilationStat::kLastStat)];
|
/art/test/1339-dead-reference-safe/src/ |
D | DeadReferenceUnsafeTest.java | 17 import java.util.concurrent.atomic.AtomicInteger;
|
/art/test/2030-long-running-child/src/ |
D | Main.java | 16 import java.util.concurrent.atomic.AtomicInteger;
|