/art/runtime/gc/accounting/ |
D | bitmap.h | 47 static ALWAYS_INLINE constexpr size_t BitIndexToWordIndex(uintptr_t offset) { in BitIndexToWordIndex() 52 static ALWAYS_INLINE constexpr T WordIndexToBitIndex(T word_index) { in WordIndexToBitIndex() 56 static ALWAYS_INLINE constexpr uintptr_t BitIndexToMask(uintptr_t bit_index) { in BitIndexToMask() 60 ALWAYS_INLINE bool SetBit(size_t bit_index) { in SetBit() 64 ALWAYS_INLINE bool ClearBit(size_t bit_index) { in ClearBit() 68 ALWAYS_INLINE bool TestBit(size_t bit_index) const; 71 ALWAYS_INLINE bool AtomicTestAndSetBit(size_t bit_index); 94 ALWAYS_INLINE void CheckValidBitIndex(size_t bit_index) const { in CheckValidBitIndex() 110 ALWAYS_INLINE bool ModifyBit(uintptr_t bit_index); 135 ALWAYS_INLINE uintptr_t CoverBegin() const { in CoverBegin() [all …]
|
/art/runtime/ |
D | handle_scope.h | 52 ALWAYS_INLINE uint32_t NumberOfReferences() const; 54 ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const; 57 ALWAYS_INLINE void VisitRoots(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_); 64 ALWAYS_INLINE VariableSizedHandleScope* AsVariableSized(); 65 ALWAYS_INLINE HandleScope* AsHandleScope(); 66 ALWAYS_INLINE const VariableSizedHandleScope* AsVariableSized() const; 67 ALWAYS_INLINE const HandleScope* AsHandleScope() const; 108 ALWAYS_INLINE ObjPtr<mirror::Object> GetReference(size_t i) const 111 ALWAYS_INLINE Handle<mirror::Object> GetHandle(size_t i); 113 ALWAYS_INLINE MutableHandle<mirror::Object> GetMutableHandle(size_t i) [all …]
|
D | scoped_thread_state_change.h | 44 ALWAYS_INLINE ScopedThreadStateChange(Thread* self, ThreadState new_thread_state) 47 ALWAYS_INLINE ~ScopedThreadStateChange() REQUIRES(!Locks::thread_suspend_count_lock_); 49 ALWAYS_INLINE Thread* Self() const { in Self() 103 ALWAYS_INLINE bool IsRunnable() const; 106 ALWAYS_INLINE explicit ScopedObjectAccessAlreadyRunnable(JNIEnv* env) 109 ALWAYS_INLINE explicit ScopedObjectAccessAlreadyRunnable(Thread* self) 120 ALWAYS_INLINE ~ScopedObjectAccessAlreadyRunnable() {} in ~ScopedObjectAccessAlreadyRunnable() 149 ALWAYS_INLINE explicit ScopedObjectAccessUnchecked(JNIEnv* env) 152 ALWAYS_INLINE explicit ScopedObjectAccessUnchecked(Thread* self) 155 ALWAYS_INLINE ~ScopedObjectAccessUnchecked() REQUIRES(!Locks::thread_suspend_count_lock_) {} in ~ScopedObjectAccessUnchecked() [all …]
|
D | reflective_handle.h | 38 ALWAYS_INLINE ReflectiveHandle(const ReflectiveHandle<T>& handle) = default; 39 ALWAYS_INLINE ReflectiveHandle<T>& operator=(const ReflectiveHandle<T>& handle) = default; 41 ALWAYS_INLINE explicit ReflectiveHandle(ReflectiveReference<T>* reference) in ReflectiveHandle() 44 ALWAYS_INLINE T& operator*() const REQUIRES_SHARED(Locks::mutator_lock_) { in REQUIRES_SHARED() 48 ALWAYS_INLINE T* operator->() const REQUIRES_SHARED(Locks::mutator_lock_) { 52 ALWAYS_INLINE T* Get() const REQUIRES_SHARED(Locks::mutator_lock_) { in Get() 56 ALWAYS_INLINE bool IsNull() const { in IsNull() 61 ALWAYS_INLINE bool operator!=(std::nullptr_t) const REQUIRES_SHARED(Locks::mutator_lock_) { 65 ALWAYS_INLINE bool operator==(std::nullptr_t) const REQUIRES_SHARED(Locks::mutator_lock_) { 84 ALWAYS_INLINE MutableReflectiveHandle(const MutableReflectiveHandle<T>& handle) [all …]
|
D | reflective_handle_scope.h | 54 ALWAYS_INLINE void VisitTargets(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_) { in VisitTargets() 59 ALWAYS_INLINE virtual ~BaseReflectiveHandleScope() { in ~BaseReflectiveHandleScope() 77 ALWAYS_INLINE BaseReflectiveHandleScope() : self_(nullptr), link_(nullptr) {} in BaseReflectiveHandleScope() 79 ALWAYS_INLINE inline void PushScope(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_); 80 ALWAYS_INLINE inline void PopScope() REQUIRES_SHARED(Locks::mutator_lock_); 99 ALWAYS_INLINE explicit StackReflectiveHandleScope(Thread* self) 101 ALWAYS_INLINE ~StackReflectiveHandleScope() REQUIRES_SHARED(Locks::mutator_lock_); 108 ALWAYS_INLINE MutableReflectiveHandle<T> NewHandle(T* t) REQUIRES_SHARED(Locks::mutator_lock_) { in NewHandle() 117 ALWAYS_INLINE ReflectiveHandleWrapper<T> NewReflectiveHandleWrapper(T** t) in NewReflectiveHandleWrapper() 122 ALWAYS_INLINE MutableReflectiveHandle<ArtField> NewFieldHandle(ArtField* f) in NewFieldHandle() [all …]
|
D | handle.h | 54 ALWAYS_INLINE Handle(const Handle<T>& handle) = default; 56 ALWAYS_INLINE Handle<T>& operator=(const Handle<T>& handle) = default; 60 ALWAYS_INLINE Handle(const Handle<Type>& other) : reference_(other.reference_) { in Handle() 63 ALWAYS_INLINE explicit Handle(StackReference<T>* reference) : reference_(reference) { in Handle() 66 ALWAYS_INLINE T& operator*() const REQUIRES_SHARED(Locks::mutator_lock_) { in REQUIRES_SHARED() 70 ALWAYS_INLINE T* operator->() const REQUIRES_SHARED(Locks::mutator_lock_) { 74 ALWAYS_INLINE T* Get() const REQUIRES_SHARED(Locks::mutator_lock_) { in Get() 80 ALWAYS_INLINE IterationRange<mirror::ConstHandleArrayIter<Type>> ConstIterate() const in ConstIterate() 86 ALWAYS_INLINE IterationRange<mirror::HandleArrayIter<Type>> Iterate() in Iterate() 91 ALWAYS_INLINE bool IsNull() const { in IsNull() [all …]
|
D | write_barrier.h | 40 ALWAYS_INLINE static void ForFieldWrite(ObjPtr<mirror::Object> dst, 47 ALWAYS_INLINE static void ForArrayWrite(ObjPtr<mirror::Object> dst, 53 ALWAYS_INLINE static void ForEveryFieldWrite(ObjPtr<mirror::Object> obj) 57 ALWAYS_INLINE static gc::accounting::CardTable* GetCardTable();
|
D | art_method.h | 93 ALWAYS_INLINE ObjPtr<mirror::Class> GetDeclaringClass() REQUIRES_SHARED(Locks::mutator_lock_); 96 ALWAYS_INLINE ObjPtr<mirror::Class> GetDeclaringClassUnchecked() 180 ALWAYS_INLINE void SetIntrinsic(uint32_t intrinsic) REQUIRES_SHARED(Locks::mutator_lock_); 447 ALWAYS_INLINE uint32_t GetDexMethodIndex() const { in GetDexMethodIndex() 484 ALWAYS_INLINE 495 ALWAYS_INLINE void SetEntryPointFromQuickCompiledCodePtrSize( in SetEntryPointFromQuickCompiledCodePtrSize() 525 ALWAYS_INLINE void SetImtConflictTable(ImtConflictTable* table, PointerSize pointer_size) in SetImtConflictTable() 532 ALWAYS_INLINE bool HasSingleImplementation() REQUIRES_SHARED(Locks::mutator_lock_); 534 ALWAYS_INLINE void SetHasSingleImplementation(bool single_impl) in SetHasSingleImplementation() 544 ALWAYS_INLINE bool HasSingleImplementationFlag() const { in HasSingleImplementationFlag() [all …]
|
D | stack_map.h | 141 ALWAYS_INLINE uint32_t GetNativePcOffset(InstructionSet instruction_set) const { in GetNativePcOffset() 145 ALWAYS_INLINE bool HasInlineInfo() const { in HasInlineInfo() 149 ALWAYS_INLINE bool HasDexRegisterMap() const { in HasDexRegisterMap() 227 ALWAYS_INLINE DexRegisterLocation GetLocation() const { in BIT_TABLE_HEADER() 258 ALWAYS_INLINE uint32_t GetMask() const { in BIT_TABLE_HEADER() 294 ALWAYS_INLINE CodeInfo() {} in CodeInfo() 295 ALWAYS_INLINE explicit CodeInfo(const uint8_t* data, size_t* num_read_bits = nullptr); 296 ALWAYS_INLINE explicit CodeInfo(const OatQuickMethodHeader* header); 302 ALWAYS_INLINE static uint32_t DecodeCodeSize(const uint8_t* code_info_data) { in DecodeCodeSize() 306 ALWAYS_INLINE static QuickMethodFrameInfo DecodeFrameInfo(const uint8_t* code_info_data) { in DecodeFrameInfo() [all …]
|
D | read_barrier.h | 56 ALWAYS_INLINE static MirrorType* Barrier( 63 ALWAYS_INLINE static MirrorType* BarrierForRoot(MirrorType** root, 70 ALWAYS_INLINE static MirrorType* BarrierForRoot(mirror::CompressedReference<MirrorType>* root, 76 ALWAYS_INLINE static MirrorType* IsMarked(MirrorType* ref) 116 ALWAYS_INLINE static bool IsGray(mirror::Object* obj, uintptr_t* fake_address_dependency) 121 ALWAYS_INLINE static bool IsGray(mirror::Object* obj)
|
D | gc_root.h | 112 ALWAYS_INLINE void VisitRoot(mirror::Object** root, const RootInfo& info) in VisitRoot() 118 ALWAYS_INLINE void VisitRootIfNonNull(mirror::Object** root, const RootInfo& info) in VisitRootIfNonNull() 190 ALWAYS_INLINE MirrorType* Read(GcRootSource* gc_root_source = nullptr) const 208 ALWAYS_INLINE mirror::CompressedReference<mirror::Object>* AddressWithoutBarrier() { in AddressWithoutBarrier() 212 ALWAYS_INLINE bool IsNull() const { in IsNull() 217 ALWAYS_INLINE GcRoot() {} in GcRoot() 218 explicit ALWAYS_INLINE GcRoot(MirrorType* ref) 220 explicit ALWAYS_INLINE GcRoot(ObjPtr<MirrorType> ref) 247 ALWAYS_INLINE void VisitRootIfNonNull(GcRoot<MirrorType>& root) in VisitRootIfNonNull() 255 ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<MirrorType>* root) in VisitRootIfNonNull() [all …]
|
D | oat_quick_method_header.h | 66 ALWAYS_INLINE bool IsOptimized() const { in IsOptimized() 73 ALWAYS_INLINE const uint8_t* GetOptimizedCodeInfoPtr() const { in GetOptimizedCodeInfoPtr() 79 ALWAYS_INLINE uint8_t* GetOptimizedCodeInfoPtr() { in GetOptimizedCodeInfoPtr() 85 ALWAYS_INLINE const uint8_t* GetCode() const { in GetCode() 89 ALWAYS_INLINE uint32_t GetCodeSize() const { in GetCodeSize() 95 ALWAYS_INLINE uint32_t GetCodeInfoOffset() const { in GetCodeInfoOffset()
|
/art/libdexfile/dex/ |
D | code_item_accessors.h | 43 ALWAYS_INLINE CodeItemInstructionAccessor(const DexFile& dex_file, 46 ALWAYS_INLINE explicit CodeItemInstructionAccessor(ArtMethod* method); 48 ALWAYS_INLINE DexInstructionIterator begin() const; 50 ALWAYS_INLINE DexInstructionIterator end() const; 81 ALWAYS_INLINE void Init(uint32_t insns_size_in_code_units, const uint16_t* insns); 82 ALWAYS_INLINE void Init(const DexFile& dex_file, const dex::CodeItem* code_item); 85 ALWAYS_INLINE void Init(const DexFileCodeItemType& code_item); 99 ALWAYS_INLINE CodeItemDataAccessor(const DexFile& dex_file, const dex::CodeItem* code_item); 128 ALWAYS_INLINE void Init(const DexFile& dex_file, const dex::CodeItem* code_item); 131 ALWAYS_INLINE void Init(const DexFileCodeItemType& code_item); [all …]
|
D | dex_instruction_iterator.h | 31 ALWAYS_INLINE const Instruction& Inst() const { in Inst() 35 ALWAYS_INLINE const Instruction* operator->() const { 39 ALWAYS_INLINE uint32_t DexPc() const { in DexPc() 43 ALWAYS_INLINE const uint16_t* Instructions() const { in Instructions() 74 ALWAYS_INLINE uint32_t DexPc() const { in DexPc() 79 ALWAYS_INLINE const uint16_t* Instructions() const { in Instructions() 87 static ALWAYS_INLINE inline bool operator==(const DexInstructionIteratorBase& lhs, 152 ALWAYS_INLINE uint32_t DexPc() const { in DexPc() 206 ALWAYS_INLINE const Instruction& Inst() const { in Inst() 221 ALWAYS_INLINE void AssertValid() const { in AssertValid() [all …]
|
/art/libelffile/dwarf/ |
D | debug_frame_opcode_writer.h | 47 void ALWAYS_INLINE AdvancePC(int absolute_pc) { in AdvancePC() 73 void ALWAYS_INLINE RelOffset(Reg reg, int offset) { in RelOffset() 78 void ALWAYS_INLINE AdjustCFAOffset(int delta) { in AdjustCFAOffset() 83 void ALWAYS_INLINE RelOffsetForMany(Reg reg_base, in RelOffsetForMany() 101 void ALWAYS_INLINE RestoreMany(Reg reg_base, uint32_t reg_mask) { in RestoreMany() 113 void ALWAYS_INLINE Nop() { in Nop() 119 void ALWAYS_INLINE Offset(Reg reg, int offset) { in Offset() 141 void ALWAYS_INLINE Restore(Reg reg) { in Restore() 153 void ALWAYS_INLINE Undefined(Reg reg) { in Undefined() 161 void ALWAYS_INLINE SameValue(Reg reg) { in SameValue() [all …]
|
/art/libartbase/base/ |
D | bit_memory_region.h | 40 ALWAYS_INLINE BitMemoryRegion(uint8_t* data, ssize_t bit_start, size_t bit_size) { in BitMemoryRegion() 46 ALWAYS_INLINE explicit BitMemoryRegion(MemoryRegion region) in BitMemoryRegion() 49 ALWAYS_INLINE BitMemoryRegion(MemoryRegion region, size_t bit_offset, size_t bit_length) in BitMemoryRegion() 54 ALWAYS_INLINE bool IsValid() const { return data_ != nullptr; } in IsValid() 69 ALWAYS_INLINE BitMemoryRegion Subregion(size_t bit_offset, size_t bit_length) const { in Subregion() 78 ALWAYS_INLINE BitMemoryRegion Subregion(size_t bit_offset) const { in Subregion() 88 ALWAYS_INLINE bool LoadBit(size_t bit_offset) const { in LoadBit() 95 ALWAYS_INLINE void StoreBit(size_t bit_offset, bool value) { in StoreBit() 109 ALWAYS_INLINE Result LoadBits(size_t bit_offset, size_t bit_length) const { in LoadBits() 139 ALWAYS_INLINE void StoreBits(size_t bit_offset, uint32_t value, size_t bit_length) { in StoreBits() [all …]
|
D | memory_region.h | 62 ALWAYS_INLINE T Load(uintptr_t offset) const { in Load() 72 ALWAYS_INLINE void Store(uintptr_t offset, T value) const { in Store() 81 ALWAYS_INLINE T LoadUnaligned(uintptr_t offset) const { in LoadUnaligned() 96 ALWAYS_INLINE void StoreUnaligned(uintptr_t offset, T value) const { in StoreUnaligned() 108 ALWAYS_INLINE T* PointerTo(uintptr_t offset) const { in PointerTo() 122 ALWAYS_INLINE MemoryRegion Subregion(uintptr_t offset, uintptr_t size_in) const { in Subregion() 129 ALWAYS_INLINE void Extend(const MemoryRegion& region, uintptr_t extra) { in Extend() 136 ALWAYS_INLINE T* ComputeInternalPointer(size_t offset) const { in ComputeInternalPointer() 144 ALWAYS_INLINE uint8_t* ComputeBitPointer(uintptr_t bit_offset, uint8_t* bit_mask) const { in ComputeBitPointer()
|
D | macros.h | 39 NO_RETURN ALWAYS_INLINE void operator delete(void*, size_t) { UNREACHABLE(); } \ 40 ALWAYS_INLINE void* operator new(size_t, void* ptr) noexcept { return ptr; } \ 41 ALWAYS_INLINE void operator delete(void*, void*) noexcept { } \ 71 #define ALWAYS_INLINE macro 74 #define ALWAYS_INLINE __attribute__ ((always_inline)) macro
|
/art/runtime/mirror/ |
D | object.h | 98 ALWAYS_INLINE Class* GetClass() REQUIRES_SHARED(Locks::mutator_lock_); 105 ALWAYS_INLINE uint32_t GetReadBarrierState(uintptr_t* fake_address_dependency) 108 ALWAYS_INLINE uint32_t GetReadBarrierState() REQUIRES_SHARED(Locks::mutator_lock_); 110 ALWAYS_INLINE uint32_t GetReadBarrierStateAcquire() REQUIRES_SHARED(Locks::mutator_lock_); 112 ALWAYS_INLINE void SetReadBarrierState(uint32_t rb_state) REQUIRES_SHARED(Locks::mutator_lock_); 115 ALWAYS_INLINE bool AtomicSetReadBarrierState(uint32_t expected_rb_state, uint32_t rb_state) 118 ALWAYS_INLINE uint32_t GetMarkBit() REQUIRES_SHARED(Locks::mutator_lock_); 120 ALWAYS_INLINE bool AtomicSetMarkBit(uint32_t expected_mark_bit, uint32_t mark_bit) 124 ALWAYS_INLINE void AssertReadBarrierState() const REQUIRES_SHARED(Locks::mutator_lock_); 131 ALWAYS_INLINE bool InstanceOf(ObjPtr<Class> klass) REQUIRES_SHARED(Locks::mutator_lock_); [all …]
|
D | dex_cache.h | 277 String* GetResolvedString(dex::StringIndex string_idx) ALWAYS_INLINE 280 void SetResolvedString(dex::StringIndex string_idx, ObjPtr<mirror::String> resolved) ALWAYS_INLINE 285 ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_); 289 ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_); 302 ALWAYS_INLINE ArtMethod* GetResolvedMethod(uint32_t method_idx) 305 ALWAYS_INLINE void SetResolvedMethod(uint32_t method_idx, ArtMethod* resolved) 308 ALWAYS_INLINE ArtField* GetResolvedField(uint32_t idx) 311 ALWAYS_INLINE void SetResolvedField(uint32_t idx, ArtField* field) 330 StringDexCacheType* GetStrings() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { in GetStrings() 335 GcRoot<mirror::String>* GetPreResolvedStrings() ALWAYS_INLINE in GetPreResolvedStrings() [all …]
|
D | class.h | 215 ALWAYS_INLINE uint32_t GetAccessFlags() REQUIRES_SHARED(Locks::mutator_lock_) { in GetAccessFlags() 227 ALWAYS_INLINE uint32_t GetClassFlags() REQUIRES_SHARED(Locks::mutator_lock_) { in GetClassFlags() 239 ALWAYS_INLINE bool IsEnum() REQUIRES_SHARED(Locks::mutator_lock_) { in IsEnum() 245 ALWAYS_INLINE bool IsInterface() REQUIRES_SHARED(Locks::mutator_lock_) { in IsInterface() 250 ALWAYS_INLINE bool IsPublic() REQUIRES_SHARED(Locks::mutator_lock_) { in IsPublic() 255 ALWAYS_INLINE bool IsFinal() REQUIRES_SHARED(Locks::mutator_lock_) { in IsFinal() 259 ALWAYS_INLINE bool IsFinalizable() REQUIRES_SHARED(Locks::mutator_lock_) { in IsFinalizable() 263 ALWAYS_INLINE bool ShouldSkipHiddenApiChecks() REQUIRES_SHARED(Locks::mutator_lock_) { in ShouldSkipHiddenApiChecks() 267 ALWAYS_INLINE void SetSkipHiddenApiChecks() REQUIRES_SHARED(Locks::mutator_lock_) { in SetSkipHiddenApiChecks() 272 ALWAYS_INLINE void SetRecursivelyInitialized() REQUIRES_SHARED(Locks::mutator_lock_); [all …]
|
/art/runtime/jni/ |
D | jni_internal.h | 71 ALWAYS_INLINE 77 ALWAYS_INLINE 83 ALWAYS_INLINE 93 ALWAYS_INLINE static inline jfieldID EncodeArtField(ReflectiveHandle<ArtField> field) in EncodeArtField() 103 ALWAYS_INLINE 113 ALWAYS_INLINE 124 ALWAYS_INLINE 135 ALWAYS_INLINE
|
/art/openjdkjvmti/ |
D | jvmti_weak_table.h | 63 ALWAYS_INLINE bool Remove(art::ObjPtr<art::mirror::Object> obj, /* out */ T* tag) 66 ALWAYS_INLINE bool RemoveLocked(art::ObjPtr<art::mirror::Object> obj, /* out */ T* tag) 72 ALWAYS_INLINE virtual bool Set(art::ObjPtr<art::mirror::Object> obj, T tag) 75 ALWAYS_INLINE virtual bool SetLocked(art::ObjPtr<art::mirror::Object> obj, T tag) 101 ALWAYS_INLINE void Sweep(art::IsMarkedVisitor* visitor) 106 ALWAYS_INLINE 117 ALWAYS_INLINE void Lock() ACQUIRE(allow_disallow_lock_); 118 ALWAYS_INLINE void Unlock() RELEASE(allow_disallow_lock_); 119 ALWAYS_INLINE void AssertLocked() ASSERT_CAPABILITY(allow_disallow_lock_); 121 ALWAYS_INLINE art::ObjPtr<art::mirror::Object> Find(T tag) [all …]
|
D | events.h | 155 ALWAYS_INLINE static inline ArtJvmtiEvent GetArtJvmtiEvent(ArtJvmTiEnv* env, jvmtiEvent e); 266 ALWAYS_INLINE 277 ALWAYS_INLINE 284 ALWAYS_INLINE 292 ALWAYS_INLINE 301 ALWAYS_INLINE 328 ALWAYS_INLINE 334 ALWAYS_INLINE 338 ALWAYS_INLINE 345 ALWAYS_INLINE [all …]
|
/art/runtime/base/ |
D | mutex.h | 194 ALWAYS_INLINE bool IsExclusiveHeld(const Thread* self) const; 197 ALWAYS_INLINE void AssertExclusiveHeld(const Thread* self) const ASSERT_CAPABILITY(this); 198 ALWAYS_INLINE void AssertHeld(const Thread* self) const ASSERT_CAPABILITY(this); 338 void SharedLock(Thread* self) ACQUIRE_SHARED() ALWAYS_INLINE; 345 void SharedUnlock(Thread* self) RELEASE_SHARED() ALWAYS_INLINE; 349 ALWAYS_INLINE bool IsExclusiveHeld(const Thread* self) const; 352 ALWAYS_INLINE void AssertExclusiveHeld(const Thread* self) const ASSERT_CAPABILITY(this); 353 ALWAYS_INLINE void AssertWriterHeld(const Thread* self) const ASSERT_CAPABILITY(this); 369 ALWAYS_INLINE void AssertSharedHeld(const Thread* self) ASSERT_SHARED_CAPABILITY(this) { in AssertSharedHeld() 375 ALWAYS_INLINE void AssertReaderHeld(const Thread* self) ASSERT_SHARED_CAPABILITY(this) { in AssertReaderHeld() [all …]
|