Lines Matching refs:waitp

757   return x->waitp->how == y->waitp->how &&  in MuSameCondition()
758 Condition::GuaranteedEqual(x->waitp->cond, y->waitp->cond); in MuSameCondition()
852 static void CondVarEnqueue(SynchWaitParams *waitp);
874 SynchWaitParams *waitp, intptr_t mu, int flags) { in Enqueue() argument
877 if (waitp->cv_word != nullptr) { in Enqueue()
878 CondVarEnqueue(waitp); in Enqueue()
882 PerThreadSynch *s = waitp->thread; in Enqueue()
884 s->waitp == nullptr || // normal case in Enqueue()
885 s->waitp == waitp || // Fer()---transfer from condition variable in Enqueue()
888 s->waitp = waitp; in Enqueue()
942 } else if (waitp->how == kExclusive && in Enqueue()
943 Condition::GuaranteedEqual(waitp->cond, nullptr)) { in Enqueue()
1035 if (w->waitp->how == kExclusive) { // wake at most 1 writer in DequeueAllWakeable()
1113 if (!DecrementSynchSem(this, s, s->waitp->timeout)) { in Block()
1130 s->waitp->timeout = KernelTimeout::Never(); // timeout is satisfied in Block()
1131 s->waitp->cond = nullptr; // condition no longer relevant for wakeups in Block()
1134 ABSL_RAW_CHECK(s->waitp != nullptr || s->suppress_fatal_errors, in Block()
1136 s->waitp = nullptr; in Block()
1571 SynchWaitParams waitp( in AwaitCommon() local
1578 this->UnlockSlow(&waitp); in AwaitCommon()
1579 this->Block(waitp.thread); in AwaitCommon()
1582 this->LockSlowLoop(&waitp, flags); in AwaitCommon()
1583 bool res = waitp.cond != nullptr || // => cond known true from LockSlowLoop in AwaitCommon()
1851 SynchWaitParams waitp( in LockSlowWithDeadline() local
1858 this->UnlockSlow(&waitp); in LockSlowWithDeadline()
1859 this->Block(waitp.thread); in LockSlowWithDeadline()
1862 this->LockSlowLoop(&waitp, flags); in LockSlowWithDeadline()
1863 return waitp.cond != nullptr || // => cond known true from LockSlowLoop in LockSlowWithDeadline()
1901 void Mutex::LockSlowLoop(SynchWaitParams *waitp, int flags) { in LockSlowLoop() argument
1906 waitp->how == kExclusive? SYNCH_EV_LOCK: SYNCH_EV_READERLOCK); in LockSlowLoop()
1909 waitp->thread->waitp == nullptr || waitp->thread->suppress_fatal_errors, in LockSlowLoop()
1914 if ((v & waitp->how->slow_need_zero) == 0) { in LockSlowLoop()
1916 v, (waitp->how->fast_or | in LockSlowLoop()
1918 waitp->how->fast_add, in LockSlowLoop()
1920 if (waitp->cond == nullptr || in LockSlowLoop()
1921 EvalConditionAnnotated(waitp->cond, this, true, false, in LockSlowLoop()
1922 waitp->how == kShared)) { in LockSlowLoop()
1925 this->UnlockSlow(waitp); // got lock but condition false in LockSlowLoop()
1926 this->Block(waitp->thread); in LockSlowLoop()
1934 PerThreadSynch *new_h = Enqueue(nullptr, waitp, v, flags); in LockSlowLoop()
1938 if (waitp->how == kExclusive && (v & kMuReader) != 0) { in LockSlowLoop()
1947 waitp->thread->waitp = nullptr; in LockSlowLoop()
1949 } else if ((v & waitp->how->slow_inc_need_zero & in LockSlowLoop()
1964 if (waitp->cond == nullptr || in LockSlowLoop()
1965 EvalConditionAnnotated(waitp->cond, this, true, false, in LockSlowLoop()
1966 waitp->how == kShared)) { in LockSlowLoop()
1969 this->UnlockSlow(waitp); // got lock but condition false in LockSlowLoop()
1970 this->Block(waitp->thread); in LockSlowLoop()
1980 PerThreadSynch *new_h = Enqueue(h, waitp, v, flags); in LockSlowLoop()
1983 if (waitp->how == kExclusive && (v & kMuReader) != 0) { in LockSlowLoop()
1995 this->Block(waitp->thread); // wait until removed from list or timeout in LockSlowLoop()
2001 waitp->thread->waitp == nullptr || waitp->thread->suppress_fatal_errors, in LockSlowLoop()
2006 waitp->thread->waitp == nullptr || waitp->thread->suppress_fatal_errors, in LockSlowLoop()
2010 waitp->how == kExclusive? SYNCH_EV_LOCK_RETURNING : in LockSlowLoop()
2020 void Mutex::UnlockSlow(SynchWaitParams *waitp) { in UnlockSlow() argument
2041 ABSL_RAW_CHECK(waitp == nullptr || waitp->thread->waitp == nullptr || in UnlockSlow()
2042 waitp->thread->suppress_fatal_errors, in UnlockSlow()
2050 waitp == nullptr) { in UnlockSlow()
2057 } else if ((v & (kMuReader | kMuWait)) == kMuReader && waitp == nullptr) { in UnlockSlow()
2072 ABSL_RAW_CHECK(waitp != nullptr, in UnlockSlow()
2084 do_enqueue = (waitp->cv_word == nullptr); in UnlockSlow()
2085 new_h = Enqueue(nullptr, waitp, new_readers, kMuIsCond); in UnlockSlow()
2115 if (waitp != nullptr) { // but waitp!=nullptr => must queue ourselves in UnlockSlow()
2116 PerThreadSynch *new_h = Enqueue(h, waitp, v, kMuIsCond); in UnlockSlow()
2141 if (h->next->waitp->how == kExclusive && in UnlockSlow()
2142 Condition::GuaranteedEqual(h->next->waitp->cond, nullptr)) { in UnlockSlow()
2155 } else if (w != nullptr && (w->waitp->how == kExclusive || h == old_h)) { in UnlockSlow()
2172 if (waitp != nullptr) { // we must queue ourselves and sleep in UnlockSlow()
2173 PerThreadSynch *new_h = Enqueue(h, waitp, v, kMuIsCond); in UnlockSlow()
2223 if (w_walk->waitp->cond == in UnlockSlow()
2225 (w_walk->waitp->cond != known_false && in UnlockSlow()
2228 EvalConditionIgnored(this, w_walk->waitp->cond))) { in UnlockSlow()
2233 if (w_walk->waitp->how == kExclusive) { in UnlockSlow()
2237 } else if (w_walk->waitp->how == kShared) { // wake if a reader in UnlockSlow()
2243 known_false = w_walk->waitp->cond; // remember last false condition in UnlockSlow()
2277 if (waitp != nullptr) { // we must queue ourselves and sleep in UnlockSlow()
2278 h = Enqueue(h, waitp, v, kMuIsCond); in UnlockSlow()
2301 int64_t enqueue_timestamp = wake_list->waitp->contention_start_cycles; in UnlockSlow()
2337 ABSL_RAW_CHECK(w->waitp->cond == nullptr, in Fer()
2339 ABSL_RAW_CHECK(!w->waitp->timeout.has_timeout(), in Fer()
2341 ABSL_RAW_CHECK(w->waitp->cv_word == nullptr, in Fer()
2352 kMuWriter | (w->waitp->how == kShared ? 0 : kMuReader); in Fer()
2361 PerThreadSynch *new_h = Enqueue(nullptr, w->waitp, v, kMuIsCond); in Fer()
2372 PerThreadSynch *new_h = Enqueue(h, w->waitp, v, kMuIsCond); in Fer()
2479 static void CondVarEnqueue(SynchWaitParams *waitp) { in CondVarEnqueue() argument
2486 std::atomic<intptr_t> *cv_word = waitp->cv_word; in CondVarEnqueue()
2487 waitp->cv_word = nullptr; in CondVarEnqueue()
2498 ABSL_RAW_CHECK(waitp->thread->waitp == nullptr, "waiting when shouldn't be"); in CondVarEnqueue()
2499 waitp->thread->waitp = waitp; // prepare ourselves for waiting in CondVarEnqueue()
2502 waitp->thread->next = waitp->thread; in CondVarEnqueue()
2504 waitp->thread->next = h->next; in CondVarEnqueue()
2505 h->next = waitp->thread; in CondVarEnqueue()
2507 waitp->thread->state.store(PerThreadSynch::kQueued, in CondVarEnqueue()
2509 cv_word->store((v & kCvEvent) | reinterpret_cast<intptr_t>(waitp->thread), in CondVarEnqueue()
2528 SynchWaitParams waitp(mutex_how, nullptr, t, mutex, in WaitCommon() local
2533 mutex->UnlockSlow(&waitp); in WaitCommon()
2536 while (waitp.thread->state.load(std::memory_order_acquire) == in WaitCommon()
2538 if (!Mutex::DecrementSynchSem(mutex, waitp.thread, t)) { in WaitCommon()
2539 this->Remove(waitp.thread); in WaitCommon()
2544 ABSL_RAW_CHECK(waitp.thread->waitp != nullptr, "not waiting when should be"); in WaitCommon()
2545 waitp.thread->waitp = nullptr; // cleanup in WaitCommon()
2581 if (w->waitp->timeout.has_timeout() || w->waitp->cvmu == nullptr) { in Wakeup()
2584 Mutex *mu = w->waitp->cvmu; in Wakeup()
2589 w->waitp->cvmu->Fer(w); in Wakeup()