Lines Matching refs:u64

81                      u64 v1, u64 v2);
83 const u64 kShadowRodata = (u64)-1; // .rodata shadow marker
93 FastState(u64 tid, u64 epoch) { in FastState()
101 explicit FastState(u64 x) in FastState()
105 u64 raw() const { in raw()
109 u64 tid() const { in tid()
110 u64 res = (x_ & ~kIgnoreBit) >> kTidShift; in tid()
114 u64 TidWithIgnore() const { in TidWithIgnore()
115 u64 res = x_ >> kTidShift; in TidWithIgnore()
119 u64 epoch() const { in epoch()
120 u64 res = x_ & ((1ull << kClkBits) - 1); in epoch()
125 u64 old_epoch = epoch(); in IncrementEpoch()
138 x_ = (x_ & ~(kHistoryMask << kHistoryShift)) | (u64(hs) << kHistoryShift); in SetHistorySize()
151 u64 GetTracePos() const { in GetTracePos()
154 const u64 mask = (1ull << (kTracePartSizeBits + hs + 1)) - 1; in GetTracePos()
161 static const u64 kIgnoreBit = 1ull << 63;
162 static const u64 kFreedBit = 1ull << 63;
163 static const u64 kHistoryShift = kClkBits;
164 static const u64 kHistoryMask = 7;
165 u64 x_;
178 explicit Shadow(u64 x) in Shadow()
187 void SetAddr0AndSizeLog(u64 addr0, unsigned kAccessSizeLog) { in SetAddr0AndSizeLog()
219 u64 shifted_xor = (s1.x_ ^ s2.x_) >> kTidShift; in TidsAreEqual()
226 u64 masked_xor = ((s1.x_ ^ s2.x_) >> kClkBits) & 31; in Addr0AndSizeAreEqual()
233 u64 diff = s1.addr0() - s2.addr0(); in TwoRangesIntersect()
248 u64 ALWAYS_INLINE addr0() const { return (x_ >> kClkBits) & 7; } in addr0()
249 u64 ALWAYS_INLINE size() const { return 1ull << size_log(); } in size()
277 bool v = x_ & ((u64(kIsWrite ^ 1) << kReadShift) in IsBothReadsOrAtomic()
278 | (u64(kIsAtomic) << kAtomicShift)); in IsBothReadsOrAtomic()
285 <= u64((kIsWrite ^ 1) | (kIsAtomic << 1)); in IsRWNotWeaker()
293 >= u64((kIsWrite ^ 1) | (kIsAtomic << 1)); in IsRWWeakerOrEqual()
300 static const u64 kReadShift = 5 + kClkBits;
301 static const u64 kReadBit = 1ull << kReadShift;
302 static const u64 kAtomicShift = 6 + kClkBits;
303 static const u64 kAtomicBit = 1ull << kAtomicShift;
305 u64 size_log() const { return (x_ >> (3 + kClkBits)) & 3; } in size_log()
342 u64 fast_synch_epoch;
358 u64 *racy_shadow_addr;
359 u64 racy_state[2];
369 u64 stat[StatCnt];
407 explicit ThreadState(Context *ctx, int tid, int unique_id, u64 epoch,
437 u64 epoch0;
438 u64 epoch1;
502 u64 stat[StatCnt];
503 u64 int_alloc_cnt[MBlockTypeCount];
504 u64 int_alloc_siz[MBlockTypeCount];
535 u64 AddMutex(u64 id);
548 void AddDeadMutex(u64 id);
554 void RestoreStack(int tid, const u64 epoch, VarSizeStackTrace *stk,
570 void StatAggregate(u64 *dst, u64 *src);
571 void StatOutput(u64 *stat);
574 void ALWAYS_INLINE StatInc(ThreadState *thr, StatType typ, u64 n = 1) {
579 void ALWAYS_INLINE StatSet(ThreadState *thr, StatType typ, u64 n) { in StatSet()
630 u64 *shadow_mem, Shadow cur);
742 EventType typ, u64 addr) { in TraceAddEvent()
749 u64 pos = fs.GetTracePos(); in TraceAddEvent()
759 Event ev = (u64)addr | ((u64)typ << 61); in TraceAddEvent()