Home
last modified time | relevance | path

Searched refs:self (Results 1 – 25 of 186) sorted by relevance

12345678

/art/runtime/
Dthread_list.cc57 Thread* self = Thread::Current(); in ~ThreadList() local
58 MutexLock mu(self, *Locks::thread_list_lock_); in ~ThreadList()
59 contains = Contains(self); in ~ThreadList()
124 Thread* self = Thread::Current(); in DumpUnattachedThreads() local
132 MutexLock mu(self, *Locks::thread_list_lock_); in DumpUnattachedThreads()
151 void ThreadList::AssertThreadsAreSuspended(Thread* self, Thread* ignore1, Thread* ignore2) { in AssertThreadsAreSuspended() argument
152 MutexLock mu(self, *Locks::thread_list_lock_); in AssertThreadsAreSuspended()
153 MutexLock mu2(self, *Locks::thread_suspend_count_lock_); in AssertThreadsAreSuspended()
182 static void ThreadSuspendSleep(Thread* self, useconds_t* delay_us, useconds_t* total_delay_us) { in ThreadSuspendSleep() argument
199 Thread* self = Thread::Current(); in RunCheckpoint() local
[all …]
Dmonitor_test.cc59 static void FillHeap(Thread* self, ClassLinker* class_linker, in FillHeap() argument
65 hsp->reset(new StackHandleScope<kMaxHandles>(self)); in FillHeap()
67 Handle<mirror::Class> c((*hsp)->NewHandle(class_linker->FindSystemClass(self, in FillHeap()
70 Handle<mirror::Class> ca((*hsp)->NewHandle(class_linker->FindSystemClass(self, in FillHeap()
77 mirror::ObjectArray<mirror::Object>::Alloc(self, ca.Get(), length / 4))); in FillHeap()
78 if (self->IsExceptionPending() || h.Get() == nullptr) { in FillHeap()
79 self->ClearException(); in FillHeap()
94 while (!self->IsExceptionPending()) { in FillHeap()
95 Handle<mirror::Object> h = (*hsp)->NewHandle<mirror::Object>(c->AllocObject(self)); in FillHeap()
96 if (!self->IsExceptionPending() && h.Get() != nullptr) { in FillHeap()
[all …]
Dbarrier.cc30 void Barrier::Pass(Thread* self) { in Pass() argument
31 MutexLock mu(self, lock_); in Pass()
32 SetCountLocked(self, count_ - 1); in Pass()
35 void Barrier::Wait(Thread* self) { in Wait() argument
36 Increment(self, -1); in Wait()
39 void Barrier::Init(Thread* self, int count) { in Init() argument
40 MutexLock mu(self, lock_); in Init()
41 SetCountLocked(self, count); in Init()
44 void Barrier::Increment(Thread* self, int delta) { in Increment() argument
45 MutexLock mu(self, lock_); in Increment()
[all …]
Dthread_pool.cc49 Thread* self = Thread::Current(); in Run() local
51 thread_pool_->creation_barier_.Wait(self); in Run()
52 while ((task = thread_pool_->GetTask(self)) != NULL) { in Run()
53 task->Run(self); in Run()
68 void ThreadPool::AddTask(Thread* self, Task* task) { in AddTask() argument
69 MutexLock mu(self, task_queue_lock_); in AddTask()
73 task_queue_condition_.Signal(self); in AddTask()
90 Thread* self = Thread::Current(); in ThreadPool() local
96 creation_barier_.Wait(self); in ThreadPool()
107 Thread* self = Thread::Current(); in ~ThreadPool() local
[all …]
Dmonitor.cc83 Monitor::Monitor(Thread* self, Thread* owner, mirror::Object* obj, int32_t hash_code) in Monitor() argument
94 monitor_id_(MonitorPool::ComputeMonitorId(this, self)) { in Monitor()
101 CHECK(owner == nullptr || owner == self || owner->IsSuspended()); in Monitor()
105 Monitor::Monitor(Thread* self, Thread* owner, mirror::Object* obj, int32_t hash_code, in Monitor() argument
123 CHECK(owner == nullptr || owner == self || owner->IsSuspended()); in Monitor()
137 bool Monitor::Install(Thread* self) { in Install() argument
138 MutexLock mu(self, monitor_lock_); // Uncontended mutex acquisition as monitor isn't yet public. in Install()
139 CHECK(owner_ == nullptr || owner_ == self || owner_->IsSuspended()); in Install()
233 void Monitor::Lock(Thread* self) { in Lock() argument
234 MutexLock mu(self, monitor_lock_); in Lock()
[all …]
Dthread_pool_test.cc31 void Run(Thread* self) { in Run() argument
33 LOG(INFO) << "Running: " << *self; in Run()
62 Thread* self = Thread::Current(); in TEST_F() local
67 thread_pool.AddTask(self, new CountTask(&count)); in TEST_F()
69 thread_pool.StartWorkers(self); in TEST_F()
71 thread_pool.Wait(self, true, false); in TEST_F()
77 Thread* self = Thread::Current(); in TEST_F() local
82 thread_pool.AddTask(self, new CountTask(&count)); in TEST_F()
88 thread_pool.StartWorkers(self); in TEST_F()
90 thread_pool.StopWorkers(self); in TEST_F()
[all …]
Dbarrier_test.cc35 void Run(Thread* self) { in Run() argument
36 LOG(INFO) << "Before barrier" << *self; in Run()
38 barrier_->Wait(self); in Run()
40 LOG(INFO) << "After barrier" << *self; in Run()
62 Thread* self = Thread::Current(); in TEST_F() local
69 thread_pool.AddTask(self, new CheckWaitTask(&barrier, &count1, &count2)); in TEST_F()
71 thread_pool.StartWorkers(self); in TEST_F()
73 timeout_barrier.Increment(self, 1, 100); // sleep 100 msecs in TEST_F()
78 barrier.Wait(self); in TEST_F()
80 thread_pool.Wait(self, true, false); in TEST_F()
[all …]
Dmonitor_pool_test.cc39 static void VerifyMonitor(Monitor* mon, Thread* self) { in VerifyMonitor() argument
43 EXPECT_EQ(MonitorPool::ComputeMonitorId(mon, self), mon->GetMonitorId()); in VerifyMonitor()
58 Thread* self = Thread::Current(); in TEST_F() local
59 ScopedObjectAccess soa(self); in TEST_F()
74 Monitor* mon = MonitorPool::CreateMonitor(self, self, nullptr, static_cast<int32_t>(i)); in TEST_F()
77 VerifyMonitor(mon, self); in TEST_F()
85 VerifyMonitor(mon, self); in TEST_F()
87 MonitorPool::ReleaseMonitor(self, mon); in TEST_F()
97 Monitor* mon = MonitorPool::CreateMonitor(self, self, nullptr, in TEST_F()
101 VerifyMonitor(mon, self); in TEST_F()
[all …]
Dsignal_catcher.cc74 Thread* self = Thread::Current(); in SignalCatcher() local
75 MutexLock mu(self, lock_); in SignalCatcher()
77 cond_.Wait(self); in SignalCatcher()
133 Thread* self = Thread::Current(); in HandleSigQuit() local
134 Locks::mutator_lock_->AssertExclusiveHeld(self); in HandleSigQuit()
135 const char* old_cause = self->StartAssertNoThreadSuspension("Handling SIGQUIT"); in HandleSigQuit()
136 ThreadState old_state = self->SetStateUnsafe(kRunnable); in HandleSigQuit()
158 CHECK_EQ(self->SetStateUnsafe(old_state), kRunnable); in HandleSigQuit()
159 self->EndAssertNoThreadSuspension(old_cause); in HandleSigQuit()
163 if (self->ReadFlag(kCheckpointRequest)) { in HandleSigQuit()
[all …]
/art/runtime/entrypoints/quick/
Dquick_jni_entrypoints.cc31 extern uint32_t JniMethodStart(Thread* self) { in JniMethodStart() argument
32 JNIEnvExt* env = self->GetJniEnv(); in JniMethodStart()
36 mirror::ArtMethod* native_method = self->GetManagedStack()->GetTopQuickFrame()->AsMirrorPtr(); in JniMethodStart()
39 self->TransitionFromRunnableToSuspended(kNative); in JniMethodStart()
44 extern uint32_t JniMethodStartSynchronized(jobject to_lock, Thread* self) { in JniMethodStartSynchronized() argument
45 self->DecodeJObject(to_lock)->MonitorEnter(self); in JniMethodStartSynchronized()
46 return JniMethodStart(self); in JniMethodStartSynchronized()
50 static void GoToRunnable(Thread* self) NO_THREAD_SAFETY_ANALYSIS { in GoToRunnable() argument
51 mirror::ArtMethod* native_method = self->GetManagedStack()->GetTopQuickFrame()->AsMirrorPtr(); in GoToRunnable()
54 self->TransitionFromSuspendedToRunnable(); in GoToRunnable()
[all …]
Dquick_throw_entrypoints.cc35 extern "C" void artDeliverExceptionFromCode(mirror::Throwable* exception, Thread* self, in artDeliverExceptionFromCode() argument
45 FinishCalleeSaveFrameSetup(self, sp, Runtime::kSaveAll); in artDeliverExceptionFromCode()
46 ThrowLocation throw_location = self->GetCurrentLocationForThrow(); in artDeliverExceptionFromCode()
48 self->ThrowNewException(throw_location, "Ljava/lang/NullPointerException;", in artDeliverExceptionFromCode()
51 self->SetException(throw_location, exception); in artDeliverExceptionFromCode()
53 self->QuickDeliverException(); in artDeliverExceptionFromCode()
57 extern "C" void artThrowNullPointerExceptionFromCode(Thread* self, in artThrowNullPointerExceptionFromCode() argument
60 FinishCalleeSaveFrameSetup(self, sp, Runtime::kSaveAll); in artThrowNullPointerExceptionFromCode()
61 self->NoteSignalBeingHandled(); in artThrowNullPointerExceptionFromCode()
62 ThrowLocation throw_location = self->GetCurrentLocationForThrow(); in artThrowNullPointerExceptionFromCode()
[all …]
Dquick_lock_entrypoints.cc23 extern "C" int artLockObjectFromCode(mirror::Object* obj, Thread* self, in artLockObjectFromCode() argument
27 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); in artLockObjectFromCode()
29 ThrowLocation throw_location(self->GetCurrentLocationForThrow()); in artLockObjectFromCode()
35 obj = obj->MonitorEnter(self); // May block in artLockObjectFromCode()
36 CHECK(self->HoldsLock(obj)); in artLockObjectFromCode()
37 CHECK(!self->IsExceptionPending()); in artLockObjectFromCode()
39 obj->MonitorEnter(self); // May block in artLockObjectFromCode()
46 extern "C" int artUnlockObjectFromCode(mirror::Object* obj, Thread* self, in artUnlockObjectFromCode() argument
50 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); in artUnlockObjectFromCode()
52 ThrowLocation throw_location(self->GetCurrentLocationForThrow()); in artUnlockObjectFromCode()
[all …]
Dquick_field_entrypoints.cc30 Thread* self, StackReference<mirror::ArtMethod>* sp) in artGet32StaticFromCode() argument
37 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); in artGet32StaticFromCode()
38 field = FindFieldFromCode<StaticPrimitiveRead, true>(field_idx, referrer, self, sizeof(int32_t)); in artGet32StaticFromCode()
47 Thread* self, StackReference<mirror::ArtMethod>* sp) in artGet64StaticFromCode() argument
54 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); in artGet64StaticFromCode()
55 field = FindFieldFromCode<StaticPrimitiveRead, true>(field_idx, referrer, self, sizeof(int64_t)); in artGet64StaticFromCode()
64 Thread* self, in artGetObjStaticFromCode() argument
72 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); in artGetObjStaticFromCode()
73 field = FindFieldFromCode<StaticObjectRead, true>(field_idx, referrer, self, in artGetObjStaticFromCode()
82 mirror::ArtMethod* referrer, Thread* self, in artGet32InstanceFromCode() argument
[all …]
Dquick_dexcache_entrypoints.cc30 Thread* self, in artInitializeStaticStorageFromCode() argument
36 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); in artInitializeStaticStorageFromCode()
37 return ResolveVerifyAndClinit(type_idx, referrer, self, true, false); in artInitializeStaticStorageFromCode()
42 Thread* self, in artInitializeTypeFromCode() argument
46 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); in artInitializeTypeFromCode()
47 return ResolveVerifyAndClinit(type_idx, referrer, self, false, false); in artInitializeTypeFromCode()
52 Thread* self, in artInitializeTypeAndVerifyAccessFromCode() argument
56 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); in artInitializeTypeAndVerifyAccessFromCode()
57 return ResolveVerifyAndClinit(type_idx, referrer, self, false, true); in artInitializeTypeAndVerifyAccessFromCode()
62 Thread* self, in artResolveStringFromCode() argument
[all …]
Dquick_alloc_entrypoints.cc30 uint32_t type_idx, mirror::ArtMethod* method, Thread* self, \
33 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); \
34 return AllocObjectFromCode<false, instrumented_bool>(type_idx, method, self, allocator_type); \
37 mirror::Class* klass, mirror::ArtMethod* method, Thread* self, \
40 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); \
41 return AllocObjectFromCodeResolved<instrumented_bool>(klass, method, self, allocator_type); \
44 mirror::Class* klass, mirror::ArtMethod* method, Thread* self, \
47 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); \
48 return AllocObjectFromCodeInitialized<instrumented_bool>(klass, method, self, allocator_type); \
51 uint32_t type_idx, mirror::ArtMethod* method, Thread* self, \
[all …]
/art/runtime/entrypoints/portable/
Dportable_jni_entrypoints.cc25 extern "C" uint32_t art_portable_jni_method_start(Thread* self) in art_portable_jni_method_start() argument
27 JNIEnvExt* env = self->GetJniEnv(); in art_portable_jni_method_start()
30 self->TransitionFromRunnableToSuspended(kNative); in art_portable_jni_method_start()
34 extern "C" uint32_t art_portable_jni_method_start_synchronized(jobject to_lock, Thread* self) in art_portable_jni_method_start_synchronized() argument
36 self->DecodeJObject(to_lock)->MonitorEnter(self); in art_portable_jni_method_start_synchronized()
37 return art_portable_jni_method_start(self); in art_portable_jni_method_start_synchronized()
40 static void PopLocalReferences(uint32_t saved_local_ref_cookie, Thread* self) in PopLocalReferences() argument
42 JNIEnvExt* env = self->GetJniEnv(); in PopLocalReferences()
47 extern "C" void art_portable_jni_method_end(uint32_t saved_local_ref_cookie, Thread* self) in art_portable_jni_method_end() argument
49 self->TransitionFromSuspendedToRunnable(); in art_portable_jni_method_end()
[all …]
/art/runtime/base/
Dmutex.h145 void RegisterAsLocked(Thread* self);
146 void RegisterAsUnlocked(Thread* self);
147 void CheckSafeToWait(Thread* self);
208 void ExclusiveLock(Thread* self) EXCLUSIVE_LOCK_FUNCTION();
209 void Lock(Thread* self) EXCLUSIVE_LOCK_FUNCTION() { ExclusiveLock(self); } in Lock() argument
212 bool ExclusiveTryLock(Thread* self) EXCLUSIVE_TRYLOCK_FUNCTION(true);
213 bool TryLock(Thread* self) EXCLUSIVE_TRYLOCK_FUNCTION(true) { return ExclusiveTryLock(self); } in TryLock() argument
216 void ExclusiveUnlock(Thread* self) UNLOCK_FUNCTION();
217 void Unlock(Thread* self) UNLOCK_FUNCTION() { ExclusiveUnlock(self); } in Unlock() argument
220 bool IsExclusiveHeld(const Thread* self) const;
[all …]
Dmutex-inl.h76 static inline uint64_t SafeGetTid(const Thread* self) { in SafeGetTid() argument
77 if (self != NULL) { in SafeGetTid()
78 return static_cast<uint64_t>(self->GetTid()); in SafeGetTid()
108 inline void BaseMutex::RegisterAsLocked(Thread* self) { in RegisterAsLocked() argument
109 if (UNLIKELY(self == NULL)) { in RegisterAsLocked()
117 BaseMutex* held_mutex = self->GetHeldMutex(static_cast<LockLevel>(i)); in RegisterAsLocked()
134 self->SetHeldMutex(level_, this); in RegisterAsLocked()
138 inline void BaseMutex::RegisterAsUnlocked(Thread* self) { in RegisterAsUnlocked() argument
139 if (UNLIKELY(self == NULL)) { in RegisterAsUnlocked()
145 CHECK(self->GetHeldMutex(level_) == this) << "Unlocking on unacquired mutex: " << name_; in RegisterAsUnlocked()
[all …]
Dmutex.cc150 void BaseMutex::CheckSafeToWait(Thread* self) { in CheckSafeToWait() argument
151 if (self == NULL) { in CheckSafeToWait()
156 CHECK(self->GetHeldMutex(level_) == this || level_ == kMonitorLock) in CheckSafeToWait()
161 BaseMutex* held_mutex = self->GetHeldMutex(static_cast<LockLevel>(i)); in CheckSafeToWait()
313 void Mutex::ExclusiveLock(Thread* self) { in ExclusiveLock() argument
314 DCHECK(self == NULL || self == Thread::Current()); in ExclusiveLock()
316 AssertNotHeld(self); in ExclusiveLock()
318 if (!recursive_ || !IsExclusiveHeld(self)) { in ExclusiveLock()
328 ScopedContentionRecorder scr(this, SafeGetTid(self), GetExclusiveOwnerTid()); in ExclusiveLock()
345 exclusive_owner_ = SafeGetTid(self); in ExclusiveLock()
[all …]
/art/runtime/arch/
Dstub_test.cc61 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) { in GetTlsPtr() argument
62 return &self->tlsPtr_; in GetTlsPtr()
66 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) { in Invoke3() argument
67 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr); in Invoke3()
71 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self, in Invoke3WithReferrer() argument
75 self->PushManagedStackFragment(&fragment); in Invoke3WithReferrer()
126 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self), in Invoke3WithReferrer()
254 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self), in Invoke3WithReferrer()
285 self->PopManagedStackFragment(fragment); in Invoke3WithReferrer()
295 Thread* self, mirror::ArtMethod* referrer, size_t hidden) { in Invoke3WithReferrerAndHidden() argument
[all …]
/art/runtime/gc/
Dreference_processor.cc46 void ReferenceProcessor::DisableSlowPath(Thread* self) { in DisableSlowPath() argument
48 condition_.Broadcast(self); in DisableSlowPath()
51 mirror::Object* ReferenceProcessor::GetReferent(Thread* self, mirror::Reference* reference) { in GetReferent() argument
58 MutexLock mu(self, *Locks::reference_processor_lock_); in GetReferent()
85 condition_.WaitHoldingLocks(self); in GetReferent()
100 void ReferenceProcessor::StartPreservingReferences(Thread* self) { in StartPreservingReferences() argument
101 MutexLock mu(self, *Locks::reference_processor_lock_); in StartPreservingReferences()
105 void ReferenceProcessor::StopPreservingReferences(Thread* self) { in StopPreservingReferences() argument
106 MutexLock mu(self, *Locks::reference_processor_lock_); in StopPreservingReferences()
109 condition_.Broadcast(self); in StopPreservingReferences()
[all …]
Dheap-inl.h39 inline mirror::Object* Heap::AllocObjectWithAllocator(Thread* self, mirror::Class* klass, in AllocObjectWithAllocator() argument
46 CHECK_EQ(self->GetState(), kRunnable); in AllocObjectWithAllocator()
47 self->AssertThreadSuspensionIsAllowable(); in AllocObjectWithAllocator()
53 obj = AllocLargeObject<kInstrumented, PreFenceVisitor>(self, &klass, byte_count, in AllocObjectWithAllocator()
59 self->ClearException(); in AllocObjectWithAllocator()
73 if (allocator == kAllocatorTypeTLAB && byte_count <= self->TlabSize()) { in AllocObjectWithAllocator()
74 obj = self->AllocTlab(byte_count); in AllocObjectWithAllocator()
88 obj = TryToAllocate<kInstrumented, false>(self, allocator, byte_count, &bytes_allocated, in AllocObjectWithAllocator()
92 obj = AllocateInternalWithGc(self, allocator, byte_count, &bytes_allocated, &usable_size, in AllocObjectWithAllocator()
98 if (!self->IsExceptionPending() && is_current_allocator && !after_is_current_allocator) { in AllocObjectWithAllocator()
[all …]
/art/runtime/interpreter/
Dinterpreter.cc27 static void UnstartedRuntimeJni(Thread* self, ArtMethod* method, in UnstartedRuntimeJni() argument
36 mirror::Class* array_class = runtime->GetClassLinker()->FindArrayClass(self, &element_class); in UnstartedRuntimeJni()
39 result->SetL(mirror::Array::Alloc<true>(self, array_class, length, in UnstartedRuntimeJni()
44 NthCallerVisitor visitor(self, 3); in UnstartedRuntimeJni()
52 StackHandleScope<1> hs(self); in UnstartedRuntimeJni()
63 result->SetL(receiver->Clone(self)); in UnstartedRuntimeJni()
65 receiver->NotifyAll(self); in UnstartedRuntimeJni()
75 StackHandleScope<2> hs(self); in UnstartedRuntimeJni()
78 result->SetL(Array::CreateMultiArray(self, h_class, h_dimensions)); in UnstartedRuntimeJni()
80 ScopedObjectAccessUnchecked soa(self); in UnstartedRuntimeJni()
[all …]
/art/tools/
Dcpplint.py460 def __init__(self): argument
461 dict.__init__(self)
463 self._section = self._INITIAL_SECTION
465 self._last_header = ''
467 def CanonicalizeAlphabeticalOrder(self, header_path): argument
482 def IsInAlphabeticalOrder(self, header_path): argument
491 canonical_header = self.CanonicalizeAlphabeticalOrder(header_path)
492 if self._last_header > canonical_header:
494 self._last_header = canonical_header
497 def CheckNextIncludeOrder(self, header_type): argument
[all …]
/art/runtime/jdwp/
Dobject_registry.cc52 Thread* const self = Thread::Current(); in InternalAdd() local
53 StackHandleScope<1> hs(self); in InternalAdd()
59 ScopedObjectAccessUnchecked soa(self); in InternalAdd()
91 bool ObjectRegistry::ContainsLocked(Thread* self, mirror::Object* o, int32_t identity_hash_code, in ContainsLocked() argument
97 if (o == self->DecodeJObject(entry->jni_reference)) { in ContainsLocked()
108 Thread* self = Thread::Current(); in Clear() local
109 MutexLock mu(self, lock_); in Clear()
112 JNIEnv* env = self->GetJniEnv(); in Clear()
128 Thread* self = Thread::Current(); in InternalGet() local
129 MutexLock mu(self, lock_); in InternalGet()
[all …]

12345678