Home
last modified time | relevance | path

Searched refs:lock_ (Results 1 – 25 of 39) sorted by relevance

12

/art/runtime/jit/
Djit_code_cache.h97 size_t CodeCacheSize() REQUIRES(!lock_);
100 size_t DataCacheSize() REQUIRES(!lock_);
104 REQUIRES(!lock_);
108 REQUIRES(!lock_);
116 REQUIRES(!lock_);
120 REQUIRES(!lock_);
124 REQUIRES(!lock_);
148 REQUIRES(!lock_);
154 bool ContainsMethod(ArtMethod* method) REQUIRES(!lock_);
157 const void* GetJniStubCode(ArtMethod* method) REQUIRES(!lock_);
[all …]
Djit.h72 void DumpInfo(std::ostream& os) REQUIRES(!lock_);
77 REQUIRES(!lock_)
138 void DumpForSigQuit(std::ostream& os) REQUIRES(!lock_);
195 Histogram<uint64_t> memory_use_ GUARDED_BY(lock_);
196 Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
/art/runtime/
Dbarrier.h47 void Pass(Thread* self) REQUIRES(!lock_);
50 void Wait(Thread* self) REQUIRES(!lock_);
61 void Increment(Thread* self, int delta) REQUIRES(!lock_);
65 bool Increment(Thread* self, int delta, uint32_t timeout_ms) REQUIRES(!lock_);
69 void Init(Thread* self, int count) REQUIRES(!lock_);
71 int GetCount(Thread* self) REQUIRES(!lock_);
74 void SetCountLocked(Thread* self, int count) REQUIRES(lock_);
77 int count_ GUARDED_BY(lock_);
79 Mutex lock_ ACQUIRED_AFTER(Locks::abort_lock_);
80 ConditionVariable condition_ GUARDED_BY(lock_);
Dlinear_alloc.h31 void* Alloc(Thread* self, size_t size) REQUIRES(!lock_);
32 void* AllocAlign16(Thread* self, size_t size) REQUIRES(!lock_);
35 void* Realloc(Thread* self, void* ptr, size_t old_size, size_t new_size) REQUIRES(!lock_);
39 T* AllocArray(Thread* self, size_t elements) REQUIRES(!lock_) { in AllocArray()
44 size_t GetUsedMemory() const REQUIRES(!lock_);
46 ArenaPool* GetArenaPool() REQUIRES(!lock_);
49 bool Contains(void* ptr) const REQUIRES(!lock_);
56 mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
57 ArenaAllocator allocator_ GUARDED_BY(lock_);
Dclass_table.h145 REQUIRES(!lock_)
151 REQUIRES(!lock_)
156 REQUIRES(!lock_)
161 REQUIRES(!lock_)
166 REQUIRES(!lock_)
171 REQUIRES(!lock_)
176 REQUIRES(!lock_)
183 REQUIRES(!lock_)
189 REQUIRES(!lock_)
195 REQUIRES(!lock_)
[all …]
Dsignal_catcher.h63 void SetHaltFlag(bool new_value) REQUIRES(!lock_);
64 bool ShouldHalt() REQUIRES(!lock_);
65 int WaitForSignal(Thread* self, SignalSet& signals) REQUIRES(!lock_);
70 mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
71 ConditionVariable cond_ GUARDED_BY(lock_);
72 bool halt_ GUARDED_BY(lock_);
73 pthread_t pthread_ GUARDED_BY(lock_);
74 Thread* thread_ GUARDED_BY(lock_);
Dlinear_alloc.cc23 LinearAlloc::LinearAlloc(ArenaPool* pool) : lock_("linear alloc"), allocator_(pool) { in LinearAlloc()
27 MutexLock mu(self, lock_); in Realloc()
32 MutexLock mu(self, lock_); in Alloc()
37 MutexLock mu(self, lock_); in AllocAlign16()
42 MutexLock mu(Thread::Current(), lock_); in GetUsedMemory()
47 MutexLock mu(Thread::Current(), lock_); in GetArenaPool()
52 MutexLock mu(Thread::Current(), lock_); in Contains()
Dbarrier.cc30 lock_("GC barrier lock", kThreadSuspendCountLock), in Barrier()
31 condition_("GC barrier condition", lock_) { in Barrier()
38 MutexLock mu(self, lock_); in Pass()
47 MutexLock mu(self, lock_); in Init()
53 MutexLock mu(self, lock_); in Increment()
73 MutexLock mu(self, lock_); in Increment()
94 MutexLock mu(self, lock_); in GetCount()
Dclass_table.cc25 ClassTable::ClassTable() : lock_("Class loader classes", kClassLoaderClassesLock) { in ClassTable()
32 WriterMutexLock mu(Thread::Current(), lock_); in FreezeSnapshot()
37 ReaderMutexLock mu(Thread::Current(), lock_); in Contains()
49 ReaderMutexLock mu(Thread::Current(), lock_); in LookupByDescriptor()
67 WriterMutexLock mu(Thread::Current(), lock_); in UpdateClass()
105 ReaderMutexLock mu(Thread::Current(), lock_); in NumZygoteClasses()
114 ReaderMutexLock mu(Thread::Current(), lock_); in NumNonZygoteClasses()
119 ReaderMutexLock mu(Thread::Current(), lock_); in NumReferencedZygoteClasses()
128 ReaderMutexLock mu(Thread::Current(), lock_); in NumReferencedNonZygoteClasses()
134 ReaderMutexLock mu(Thread::Current(), lock_); in Lookup()
[all …]
Dsignal_catcher.cc80 lock_("SignalCatcher lock"), in SignalCatcher()
81 cond_("SignalCatcher::cond_", lock_), in SignalCatcher()
95 MutexLock mu(self, lock_); in SignalCatcher()
110 MutexLock mu(Thread::Current(), lock_); in SetHaltFlag()
115 MutexLock mu(Thread::Current(), lock_); in ShouldHalt()
245 MutexLock mu(self, signal_catcher->lock_); in Run()
Dclass_table-inl.h29 ReaderMutexLock mu(Thread::Current(), lock_); in VisitRoots()
47 ReaderMutexLock mu(Thread::Current(), lock_); in VisitRoots()
65 ReaderMutexLock mu(Thread::Current(), lock_); in Visit()
78 ReaderMutexLock mu(Thread::Current(), lock_); in Visit()
137 WriterMutexLock mu(Thread::Current(), lock_); in RemoveStrongRoots()
/art/runtime/gc/
Dtask_processor.h57 void AddTask(Thread* self, HeapTask* task) REQUIRES(!lock_);
58 HeapTask* GetTask(Thread* self) REQUIRES(!lock_);
59 void Start(Thread* self) REQUIRES(!lock_);
62 void Stop(Thread* self) REQUIRES(!lock_);
63 void RunAllTasks(Thread* self) REQUIRES(!lock_);
64 bool IsRunning() const REQUIRES(!lock_);
66 REQUIRES(!lock_);
67 Thread* GetRunningThread() const REQUIRES(!lock_);
77 mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
78 ConditionVariable cond_ GUARDED_BY(lock_);
[all …]
Dtask_processor.cc26 : lock_("Task processor lock", kReferenceProcessorLock), in TaskProcessor()
27 cond_("Task processor condition", lock_), in TaskProcessor()
43 MutexLock mu(self, lock_); in AddTask()
50 MutexLock mu(self, lock_); in GetTask()
80 MutexLock mu(self, lock_); in UpdateTargetRunTime()
102 MutexLock mu(Thread::Current(), lock_); in IsRunning()
107 MutexLock mu(Thread::Current(), lock_); in GetRunningThread()
112 MutexLock mu(self, lock_); in Stop()
119 MutexLock mu(self, lock_); in Start()
/art/runtime/jdwp/
Dobject_registry.h69 REQUIRES(!Locks::thread_list_lock_, !Locks::thread_suspend_count_lock_, !lock_);
73 REQUIRES(!Locks::thread_list_lock_, !Locks::thread_suspend_count_lock_, !lock_);
78 REQUIRES(!Locks::thread_list_lock_, !Locks::thread_suspend_count_lock_, !lock_);
82 REQUIRES(!Locks::thread_list_lock_, !Locks::thread_suspend_count_lock_, !lock_);
85 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!lock_) { in Get()
93 void Clear() REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!lock_);
96 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!lock_);
99 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!lock_);
102 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!lock_);
105 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!lock_);
[all …]
Dobject_registry.cc37 : lock_("ObjectRegistry lock", kJdwpObjectRegistryLock), next_id_(1) { in ObjectRegistry()
38 Locks::AddToExpectedMutexesOnWeakRefAccess(&lock_); in ObjectRegistry()
42 Locks::RemoveFromExpectedMutexesOnWeakRefAccess(&lock_); in ~ObjectRegistry()
97 MutexLock mu(soa.Self(), lock_); in InternalAdd()
161 MutexLock mu(self, lock_); in Clear()
181 MutexLock mu(self, lock_); in InternalGet()
197 MutexLock mu(self, lock_); in GetJObject()
206 MutexLock mu(self, lock_); in DisableCollection()
214 MutexLock mu(self, lock_); in EnableCollection()
244 MutexLock mu(self, lock_); in IsCollected()
[all …]
/art/runtime/gc/space/
Dlarge_object_space.h138 size_t AllocationSize(mirror::Object* obj, size_t* usable_size) REQUIRES(!lock_);
141 REQUIRES(!lock_);
142 size_t Free(Thread* self, mirror::Object* ptr) REQUIRES(!lock_);
143 void Walk(DlMallocSpace::WalkCallback, void* arg) OVERRIDE REQUIRES(!lock_);
147 std::pair<uint8_t*, uint8_t*> GetBeginEndAtomic() const OVERRIDE REQUIRES(!lock_);
157 bool IsZygoteLargeObject(Thread* self, mirror::Object* obj) const OVERRIDE REQUIRES(!lock_);
158 void SetAllLargeObjectsAsZygoteObjects(Thread* self) OVERRIDE REQUIRES(!lock_);
161 mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
163 GUARDED_BY(lock_);
174 REQUIRES(lock_);
[all …]
Ddlmalloc_space.h53 OVERRIDE REQUIRES(!lock_);
57 OVERRIDE REQUIRES(!lock_) { in Alloc()
67 REQUIRES(!lock_)
71 REQUIRES(!lock_)
89 REQUIRES(!lock_);
107 void Walk(WalkCallback callback, void* arg) OVERRIDE REQUIRES(!lock_);
150 REQUIRES(lock_);
Ddlmalloc_space.cc135 MutexLock mu(self, lock_); in AllocWithGrowth()
170 MutexLock mu(self, lock_); in Free()
199 MutexLock mu(self, lock_); in FreeList()
220 MutexLock mu(self, lock_); in FreeList()
227 MutexLock mu(Thread::Current(), lock_); in Trim()
238 MutexLock mu(Thread::Current(), lock_); in Walk()
244 MutexLock mu(Thread::Current(), lock_); in GetFootprint()
249 MutexLock mu(Thread::Current(), lock_); in GetFootprintLimit()
254 MutexLock mu(Thread::Current(), lock_); in SetFootprintLimit()
267 MutexLock mu(Thread::Current(), lock_); in GetBytesAllocated()
[all …]
Dlarge_object_space.cc50 MutexLock mu(Thread::Current(), lock_); in ~MemoryToolLargeObjectMapSpace()
127 lock_("large object map space lock", kAllocSpaceLock) {} in LargeObjectMapSpace()
148 MutexLock mu(self, lock_); in Alloc()
172 MutexLock mu(self, lock_); in IsZygoteLargeObject()
179 MutexLock mu(self, lock_); in SetAllLargeObjectsAsZygoteObjects()
186 MutexLock mu(self, lock_); in Free()
205 MutexLock mu(Thread::Current(), lock_); in AllocationSize()
227 MutexLock mu(Thread::Current(), lock_); in Walk()
237 if (lock_.IsExclusiveHeld(self)) { in Contains()
241 MutexLock mu(self, lock_); in Contains()
[all …]
/art/runtime/base/
Dtiming_logger.h37 void End() REQUIRES(!lock_);
38 void Reset() REQUIRES(!lock_);
39 void Dump(std::ostream& os) const REQUIRES(!lock_);
45 void SetName(const std::string& name) REQUIRES(!lock_);
46 void AddLogger(const TimingLogger& logger) REQUIRES(!lock_);
47 size_t GetIterations() const REQUIRES(!lock_);
62 REQUIRES(lock_);
63 void DumpHistogram(std::ostream &os) const REQUIRES(lock_);
68 std::set<Histogram<uint64_t>*, HistogramComparator> histograms_ GUARDED_BY(lock_);
71 mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
[all …]
Dtiming_logger.cc43 lock_(lock_name_.c_str(), kDefaultMutexLevel, true) { in CumulativeLogger()
52 MutexLock mu(Thread::Current(), lock_); in SetName()
60 MutexLock mu(Thread::Current(), lock_); in End()
65 MutexLock mu(Thread::Current(), lock_); in Reset()
72 MutexLock mu(Thread::Current(), lock_); in AddLogger()
84 MutexLock mu(Thread::Current(), lock_); in GetIterations()
89 MutexLock mu(Thread::Current(), lock_); in Dump()
/art/runtime/gc/allocator/
Drosalloc.h55 size_t ByteSize(RosAlloc* rosalloc) const REQUIRES(rosalloc->lock_) { in ByteSize()
64 REQUIRES(rosalloc->lock_) { in SetByteSize()
73 void* End(RosAlloc* rosalloc) REQUIRES(rosalloc->lock_) { in End()
79 REQUIRES(rosalloc->lock_) { in IsLargerThanPageReleaseThreshold()
83 REQUIRES(rosalloc->lock_) { in IsAtEndOfSpace()
86 bool ShouldReleasePages(RosAlloc* rosalloc) REQUIRES(rosalloc->lock_) { in ShouldReleasePages()
103 void ReleasePages(RosAlloc* rosalloc) REQUIRES(rosalloc->lock_) { in ReleasePages()
722 AllocationTrackingSet<FreePageRun*, kAllocatorTagRosAlloc> free_page_runs_ GUARDED_BY(lock_);
755 GUARDED_BY(lock_);
758 Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
[all …]
/art/compiler/dex/
Ddex_to_dex_compiler.h93 BitVector* GetOrAddBitVectorForDex(const DexFile* dex_file) REQUIRES(lock_);
102 mutable Mutex lock_; variable
110 GUARDED_BY(lock_);
112 size_t num_code_items_ GUARDED_BY(lock_) = 0u;
/art/compiler/utils/
Dswap_space.h39 void* Alloc(size_t size) REQUIRES(!lock_);
40 void Free(void* ptr, size_t size) REQUIRES(!lock_);
92 SpaceChunk NewFileChunk(size_t min_size) REQUIRES(lock_);
94 void RemoveChunk(FreeBySizeSet::const_iterator free_by_size_pos) REQUIRES(lock_);
95 void InsertChunk(const SpaceChunk& chunk) REQUIRES(lock_);
103 FreeByStartSet free_by_start_ GUARDED_BY(lock_);
105 FreeBySizeSet free_by_size_ GUARDED_BY(lock_);
107 mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
Ddedupe_set-inl.h60 lock_(lock_name_.c_str()),
71 const StoreKey* Add(Thread* self, size_t hash, const InKey& in_key) REQUIRES(!lock_) {
72 MutexLock lock(self, lock_);
84 void UpdateStats(Thread* self, Stats* global_stats) REQUIRES(!lock_) {
89 MutexLock lock(self, lock_);
179 Mutex lock_;
180 HashSet<HashedKey<StoreKey>, ShardEmptyFn, ShardHashFn, ShardPred> keys_ GUARDED_BY(lock_);

12