Lines Matching refs:u64
110 ThreadState::ThreadState(Context *ctx, int tid, int unique_id, u64 epoch, in ThreadState()
151 const u64 kMs2Ns = 1000 * 1000; in BackgroundThread()
172 u64 last_flush = NanoTime(); in BackgroundThread()
178 u64 now = NanoTime(); in BackgroundThread()
194 (u64)rss >> 20, (u64)last_rss >> 20, (u64)limit >> 20); in BackgroundThread()
199 VPrintf(1, "ThreadSanitizer: memory flushed RSS=%llu\n", (u64)rss>>20); in BackgroundThread()
210 u64 last = atomic_load(&ctx->last_symbolize_time_ns, in BackgroundThread()
538 Shadow LoadShadow(u64 *p) { in LoadShadow()
539 u64 raw = atomic_load((atomic_uint64_t*)p, memory_order_relaxed); in LoadShadow()
544 void StoreShadow(u64 *sp, u64 s) { in StoreShadow()
549 void StoreIfNotYetStored(u64 *sp, u64 *s) { in StoreIfNotYetStored()
555 void HandleRace(ThreadState *thr, u64 *shadow_mem, in HandleRace()
574 u64 *shadow_mem, Shadow cur) { in MemoryAccessImpl1()
583 u64 store_word = cur.raw(); in MemoryAccessImpl1()
650 bool ContainsSameAccessSlow(u64 *s, u64 a, u64 sync_epoch, bool is_write) { in ContainsSameAccessSlow()
669 bool ContainsSameAccessFast(u64 *s, u64 a, u64 sync_epoch, bool is_write) { in ContainsSameAccessFast()
723 bool ContainsSameAccess(u64 *s, u64 a, u64 sync_epoch, bool is_write) { in ContainsSameAccess()
739 u64 *shadow_mem = (u64*)MemToShadow(addr); in MemoryAccess()
805 u64 *shadow_mem, Shadow cur) { in MemoryAccessImpl()
820 u64 val) { in MemoryRangeSet()
845 u64 *p = (u64*)MemToShadow(addr); in MemoryRangeSet()
857 u64 *begin = (u64*)MemToShadow(addr); in MemoryRangeSet()
858 u64 *end = begin + size / kShadowCell * kShadowCnt; in MemoryRangeSet()
859 u64 *p = begin; in MemoryRangeSet()
867 u64 *p1 = p; in MemoryRangeSet()