/art/libdexfile/dex/ |
D | code_item_accessors.h | 35 ALWAYS_INLINE CodeItemInstructionAccessor(const DexFile& dex_file, 38 ALWAYS_INLINE explicit CodeItemInstructionAccessor(ArtMethod* method); 40 ALWAYS_INLINE DexInstructionIterator begin() const; 42 ALWAYS_INLINE DexInstructionIterator end() const; 68 ALWAYS_INLINE void Init(uint32_t insns_size_in_code_units, const uint16_t* insns); 69 ALWAYS_INLINE void Init(const CompactDexFile::CodeItem& code_item); 70 ALWAYS_INLINE void Init(const StandardDexFile::CodeItem& code_item); 71 ALWAYS_INLINE void Init(const DexFile& dex_file, const DexFile::CodeItem* code_item); 85 ALWAYS_INLINE CodeItemDataAccessor(const DexFile& dex_file, const DexFile::CodeItem* code_item); 114 ALWAYS_INLINE void Init(const CompactDexFile::CodeItem& code_item); [all …]
|
D | hidden_api_access_flags.h | 67 static ALWAYS_INLINE ApiList DecodeFromDex(uint32_t dex_access_flags) { in DecodeFromDex() 73 static ALWAYS_INLINE uint32_t RemoveFromDex(uint32_t dex_access_flags) { in RemoveFromDex() 80 static ALWAYS_INLINE uint32_t EncodeForDex(uint32_t dex_access_flags, ApiList value) { in EncodeForDex() 88 static ALWAYS_INLINE ApiList DecodeFromRuntime(uint32_t runtime_access_flags) { in DecodeFromRuntime() 95 static ALWAYS_INLINE uint32_t EncodeForRuntime(uint32_t runtime_access_flags, ApiList value) { in EncodeForRuntime() 113 ALWAYS_INLINE uint32_t GetSecondFlag() { in GetSecondFlag() 117 ALWAYS_INLINE bool IsFirstBitSet() { in IsFirstBitSet() 122 ALWAYS_INLINE void SetFirstBit(bool value) { in SetFirstBit() 128 ALWAYS_INLINE bool IsSecondBitSet() { in IsSecondBitSet() 132 ALWAYS_INLINE void SetSecondBit(bool value) { in SetSecondBit() [all …]
|
D | dex_instruction_iterator.h | 31 ALWAYS_INLINE const Instruction& Inst() const { in Inst() 35 ALWAYS_INLINE const Instruction* operator->() const { 39 ALWAYS_INLINE uint32_t DexPc() const { in DexPc() 43 ALWAYS_INLINE const uint16_t* Instructions() const { in Instructions() 75 ALWAYS_INLINE uint32_t DexPc() const { in DexPc() 80 ALWAYS_INLINE const uint16_t* Instructions() const { in Instructions() 88 static ALWAYS_INLINE inline bool operator==(const DexInstructionIteratorBase& lhs, 153 ALWAYS_INLINE uint32_t DexPc() const { in DexPc() 207 ALWAYS_INLINE const Instruction& Inst() const { in Inst() 222 ALWAYS_INLINE void AssertValid() const { in AssertValid() [all …]
|
/art/runtime/ |
D | obj_ptr.h | 48 ALWAYS_INLINE ObjPtr() REQUIRES_SHARED(Locks::mutator_lock_) : reference_(0u) {} in ObjPtr() 54 ALWAYS_INLINE ObjPtr(std::nullptr_t) in ObjPtr() 60 ALWAYS_INLINE ObjPtr(Type* ptr) in ObjPtr() 67 ALWAYS_INLINE ObjPtr(const ObjPtr<Type>& other) in ObjPtr() 76 ALWAYS_INLINE ObjPtr& operator=(const ObjPtr<Type>& other) 84 ALWAYS_INLINE ObjPtr& operator=(MirrorType* ptr) REQUIRES_SHARED(Locks::mutator_lock_) { in REQUIRES_SHARED() 89 ALWAYS_INLINE void Assign(MirrorType* ptr) REQUIRES_SHARED(Locks::mutator_lock_) { in Assign() 93 ALWAYS_INLINE MirrorType* operator->() const REQUIRES_SHARED(Locks::mutator_lock_) { 97 ALWAYS_INLINE bool IsNull() const { in IsNull() 102 ALWAYS_INLINE MirrorType* Ptr() const REQUIRES_SHARED(Locks::mutator_lock_) { in Ptr() [all …]
|
D | scoped_thread_state_change.h | 44 ALWAYS_INLINE ScopedThreadStateChange(Thread* self, ThreadState new_thread_state) 47 ALWAYS_INLINE ~ScopedThreadStateChange() REQUIRES(!Locks::thread_suspend_count_lock_); 49 ALWAYS_INLINE Thread* Self() const { in Self() 101 ALWAYS_INLINE bool IsRunnable() const; 104 ALWAYS_INLINE explicit ScopedObjectAccessAlreadyRunnable(JNIEnv* env) 107 ALWAYS_INLINE explicit ScopedObjectAccessAlreadyRunnable(Thread* self) 118 ALWAYS_INLINE ~ScopedObjectAccessAlreadyRunnable() {} in ~ScopedObjectAccessAlreadyRunnable() 147 ALWAYS_INLINE explicit ScopedObjectAccessUnchecked(JNIEnv* env) 150 ALWAYS_INLINE explicit ScopedObjectAccessUnchecked(Thread* self) 153 ALWAYS_INLINE ~ScopedObjectAccessUnchecked() REQUIRES(!Locks::thread_suspend_count_lock_) {} in ~ScopedObjectAccessUnchecked() [all …]
|
D | handle.h | 46 ALWAYS_INLINE Handle(const Handle<T>& handle) = default; 48 ALWAYS_INLINE Handle<T>& operator=(const Handle<T>& handle) = default; 50 ALWAYS_INLINE explicit Handle(StackReference<T>* reference) : reference_(reference) { in Handle() 53 ALWAYS_INLINE T& operator*() const REQUIRES_SHARED(Locks::mutator_lock_) { in REQUIRES_SHARED() 57 ALWAYS_INLINE T* operator->() const REQUIRES_SHARED(Locks::mutator_lock_) { 61 ALWAYS_INLINE T* Get() const REQUIRES_SHARED(Locks::mutator_lock_) { in Get() 65 ALWAYS_INLINE bool IsNull() const REQUIRES_SHARED(Locks::mutator_lock_) { in IsNull() 69 ALWAYS_INLINE jobject ToJObject() const REQUIRES_SHARED(Locks::mutator_lock_) { in ToJObject() 77 ALWAYS_INLINE StackReference<mirror::Object>* GetReference() { in GetReference() 81 ALWAYS_INLINE const StackReference<mirror::Object>* GetReference() const { in GetReference() [all …]
|
D | handle_scope.h | 50 ALWAYS_INLINE uint32_t NumberOfReferences() const; 52 ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const; 55 ALWAYS_INLINE void VisitRoots(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_); 62 ALWAYS_INLINE VariableSizedHandleScope* AsVariableSized(); 63 ALWAYS_INLINE HandleScope* AsHandleScope(); 64 ALWAYS_INLINE const VariableSizedHandleScope* AsVariableSized() const; 65 ALWAYS_INLINE const HandleScope* AsHandleScope() const; 106 ALWAYS_INLINE mirror::Object* GetReference(size_t i) const 109 ALWAYS_INLINE Handle<mirror::Object> GetHandle(size_t i); 111 ALWAYS_INLINE MutableHandle<mirror::Object> GetMutableHandle(size_t i) [all …]
|
D | bit_memory_region.h | 29 ALWAYS_INLINE BitMemoryRegion(MemoryRegion region, size_t bit_offset, size_t bit_size) { in BitMemoryRegion() 43 ALWAYS_INLINE BitMemoryRegion Subregion(size_t bit_offset, size_t bit_size) const { in Subregion() 49 ALWAYS_INLINE bool LoadBit(uintptr_t bit_offset) const { in LoadBit() 53 ALWAYS_INLINE void StoreBit(uintptr_t bit_offset, bool value) const { in StoreBit() 57 ALWAYS_INLINE uint32_t LoadBits(uintptr_t bit_offset, size_t length) const { in LoadBits() 62 ALWAYS_INLINE void StoreBits(uintptr_t bit_offset, uint32_t value, size_t length) { in StoreBits()
|
D | jni_internal.h | 37 ALWAYS_INLINE 42 ALWAYS_INLINE 47 ALWAYS_INLINE 52 ALWAYS_INLINE
|
D | memory_region.h | 62 ALWAYS_INLINE T Load(uintptr_t offset) const { in Load() 72 ALWAYS_INLINE void Store(uintptr_t offset, T value) const { in Store() 81 ALWAYS_INLINE T LoadUnaligned(uintptr_t offset) const { in LoadUnaligned() 96 ALWAYS_INLINE void StoreUnaligned(uintptr_t offset, T value) const { in StoreUnaligned() 108 ALWAYS_INLINE T* PointerTo(uintptr_t offset) const { in PointerTo() 114 ALWAYS_INLINE bool LoadBit(uintptr_t bit_offset) const { in LoadBit() 120 ALWAYS_INLINE void StoreBit(uintptr_t bit_offset, bool value) const { in StoreBit() 134 ALWAYS_INLINE uint32_t LoadBits(uintptr_t bit_offset, size_t length) const { in LoadBits() 183 ALWAYS_INLINE MemoryRegion Subregion(uintptr_t offset, uintptr_t size_in) const { in Subregion() 190 ALWAYS_INLINE void Extend(const MemoryRegion& region, uintptr_t extra) { in Extend() [all …]
|
D | method_handles.h | 60 ALWAYS_INLINE bool ConvertArgumentValue(Handle<mirror::MethodType> callsite_type, 71 ALWAYS_INLINE bool ConvertReturnValue(Handle<mirror::MethodType> callsite_type, 139 ALWAYS_INLINE uint32_t Get() REQUIRES_SHARED(Locks::mutator_lock_) { in Get() 143 ALWAYS_INLINE int64_t GetLong() REQUIRES_SHARED(Locks::mutator_lock_) { in GetLong() 147 ALWAYS_INLINE ObjPtr<mirror::Object> GetReference() REQUIRES_SHARED(Locks::mutator_lock_) { in GetReference() 176 ALWAYS_INLINE void Set(uint32_t value) REQUIRES_SHARED(Locks::mutator_lock_) { in Set() 180 ALWAYS_INLINE void SetReference(ObjPtr<mirror::Object> value) in SetReference() 185 ALWAYS_INLINE void SetLong(int64_t value) REQUIRES_SHARED(Locks::mutator_lock_) { in SetLong()
|
D | art_method.h | 90 ALWAYS_INLINE mirror::Class* GetDeclaringClass() REQUIRES_SHARED(Locks::mutator_lock_); 93 ALWAYS_INLINE mirror::Class* GetDeclaringClassUnchecked() 185 ALWAYS_INLINE void SetIntrinsic(uint32_t intrinsic) REQUIRES_SHARED(Locks::mutator_lock_); 387 ALWAYS_INLINE uint32_t GetDexMethodIndexUnchecked() { in GetDexMethodIndexUnchecked() 391 ALWAYS_INLINE uint32_t GetDexMethodIndex() REQUIRES_SHARED(Locks::mutator_lock_); 426 ALWAYS_INLINE const void* GetEntryPointFromQuickCompiledCodePtrSize(PointerSize pointer_size) { in GetEntryPointFromQuickCompiledCodePtrSize() 435 ALWAYS_INLINE void SetEntryPointFromQuickCompiledCodePtrSize( in SetEntryPointFromQuickCompiledCodePtrSize() 469 ALWAYS_INLINE void SetImtConflictTable(ImtConflictTable* table, PointerSize pointer_size) { in SetImtConflictTable() 485 ALWAYS_INLINE void SetProfilingInfo(ProfilingInfo* info) { in SetProfilingInfo() 489 ALWAYS_INLINE void SetProfilingInfoPtrSize(ProfilingInfo* info, PointerSize pointer_size) { in SetProfilingInfoPtrSize() [all …]
|
D | read_barrier.h | 56 ALWAYS_INLINE static MirrorType* Barrier( 63 ALWAYS_INLINE static MirrorType* BarrierForRoot(MirrorType** root, 70 ALWAYS_INLINE static MirrorType* BarrierForRoot(mirror::CompressedReference<MirrorType>* root, 76 ALWAYS_INLINE static MirrorType* IsMarked(MirrorType* ref) 116 ALWAYS_INLINE static bool IsGray(mirror::Object* obj, uintptr_t* fake_address_dependency) 121 ALWAYS_INLINE static bool IsGray(mirror::Object* obj)
|
D | gc_root.h | 111 ALWAYS_INLINE void VisitRoot(mirror::Object** root, const RootInfo& info) in VisitRoot() 117 ALWAYS_INLINE void VisitRootIfNonNull(mirror::Object** root, const RootInfo& info) in VisitRootIfNonNull() 189 ALWAYS_INLINE MirrorType* Read(GcRootSource* gc_root_source = nullptr) const 207 ALWAYS_INLINE mirror::CompressedReference<mirror::Object>* AddressWithoutBarrier() { in AddressWithoutBarrier() 211 ALWAYS_INLINE bool IsNull() const { in IsNull() 216 ALWAYS_INLINE GcRoot() {} in GcRoot() 217 explicit ALWAYS_INLINE GcRoot(MirrorType* ref) 219 explicit ALWAYS_INLINE GcRoot(ObjPtr<MirrorType> ref) 246 ALWAYS_INLINE void VisitRootIfNonNull(GcRoot<MirrorType>& root) in VisitRootIfNonNull() 254 ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<MirrorType>* root) in VisitRootIfNonNull() [all …]
|
/art/runtime/gc/accounting/ |
D | bitmap.h | 48 static ALWAYS_INLINE constexpr size_t BitIndexToWordIndex(uintptr_t offset) { in BitIndexToWordIndex() 53 static ALWAYS_INLINE constexpr T WordIndexToBitIndex(T word_index) { in WordIndexToBitIndex() 57 static ALWAYS_INLINE constexpr uintptr_t BitIndexToMask(uintptr_t bit_index) { in BitIndexToMask() 61 ALWAYS_INLINE bool SetBit(size_t bit_index) { in SetBit() 65 ALWAYS_INLINE bool ClearBit(size_t bit_index) { in ClearBit() 69 ALWAYS_INLINE bool TestBit(size_t bit_index) const; 72 ALWAYS_INLINE bool AtomicTestAndSetBit(size_t bit_index); 95 ALWAYS_INLINE void CheckValidBitIndex(size_t bit_index) const { in CheckValidBitIndex() 111 ALWAYS_INLINE bool ModifyBit(uintptr_t bit_index); 136 ALWAYS_INLINE uintptr_t CoverBegin() const { in CoverBegin() [all …]
|
D | card_table.h | 60 ALWAYS_INLINE void MarkCard(const void *addr) { in MarkCard() 128 void* AddrFromCard(const uint8_t *card_addr) const ALWAYS_INLINE; 131 uint8_t* CardFromAddr(const void *addr) const ALWAYS_INLINE; 139 bool IsValidCard(const uint8_t* card_addr) const ALWAYS_INLINE; 141 void CheckCardValid(uint8_t* card) const ALWAYS_INLINE;
|
/art/runtime/arch/ |
D | code_offset.h | 34 ALWAYS_INLINE static CodeOffset FromOffset(uint32_t offset, InstructionSet isa = kRuntimeISA) { 38 ALWAYS_INLINE static CodeOffset FromCompressedOffset(uint32_t offset) { in FromCompressedOffset() 42 ALWAYS_INLINE uint32_t Uint32Value(InstructionSet isa = kRuntimeISA) const { 49 ALWAYS_INLINE uint32_t CompressedValue() const { in CompressedValue() 53 ALWAYS_INLINE CodeOffset() = default; 54 ALWAYS_INLINE CodeOffset(const CodeOffset&) = default; 55 ALWAYS_INLINE CodeOffset& operator=(const CodeOffset&) = default; 56 ALWAYS_INLINE CodeOffset& operator=(CodeOffset&&) = default; 59 ALWAYS_INLINE explicit CodeOffset(uint32_t value) : value_(value) {} in CodeOffset()
|
/art/compiler/debug/dwarf/ |
D | debug_frame_opcode_writer.h | 47 void ALWAYS_INLINE AdvancePC(int absolute_pc) { in AdvancePC() 73 void ALWAYS_INLINE RelOffset(Reg reg, int offset) { in RelOffset() 78 void ALWAYS_INLINE AdjustCFAOffset(int delta) { in AdjustCFAOffset() 83 void ALWAYS_INLINE RelOffsetForMany(Reg reg_base, int offset, in RelOffsetForMany() 99 void ALWAYS_INLINE RestoreMany(Reg reg_base, uint32_t reg_mask) { in RestoreMany() 111 void ALWAYS_INLINE Nop() { in Nop() 117 void ALWAYS_INLINE Offset(Reg reg, int offset) { in Offset() 139 void ALWAYS_INLINE Restore(Reg reg) { in Restore() 151 void ALWAYS_INLINE Undefined(Reg reg) { in Undefined() 159 void ALWAYS_INLINE SameValue(Reg reg) { in SameValue() [all …]
|
/art/openjdkjvmti/ |
D | jvmti_weak_table.h | 63 ALWAYS_INLINE bool Remove(art::mirror::Object* obj, /* out */ T* tag) 66 ALWAYS_INLINE bool RemoveLocked(art::mirror::Object* obj, /* out */ T* tag) 72 ALWAYS_INLINE virtual bool Set(art::mirror::Object* obj, T tag) 75 ALWAYS_INLINE virtual bool SetLocked(art::mirror::Object* obj, T tag) 101 ALWAYS_INLINE void Sweep(art::IsMarkedVisitor* visitor) 106 ALWAYS_INLINE 117 ALWAYS_INLINE void Lock() ACQUIRE(allow_disallow_lock_); 118 ALWAYS_INLINE void Unlock() RELEASE(allow_disallow_lock_); 119 ALWAYS_INLINE void AssertLocked() ASSERT_CAPABILITY(allow_disallow_lock_); 121 ALWAYS_INLINE art::mirror::Object* Find(T tag) [all …]
|
D | events.h | 103 ALWAYS_INLINE static inline ArtJvmtiEvent GetArtJvmtiEvent(ArtJvmTiEnv* env, jvmtiEvent e); 208 ALWAYS_INLINE 219 ALWAYS_INLINE 226 ALWAYS_INLINE 234 ALWAYS_INLINE 243 ALWAYS_INLINE 251 ALWAYS_INLINE 257 ALWAYS_INLINE 261 ALWAYS_INLINE 268 ALWAYS_INLINE [all …]
|
/art/runtime/mirror/ |
D | class.h | 170 ALWAYS_INLINE uint32_t GetAccessFlags() REQUIRES_SHARED(Locks::mutator_lock_) { in GetAccessFlags() 182 ALWAYS_INLINE uint32_t GetClassFlags() REQUIRES_SHARED(Locks::mutator_lock_) { in GetClassFlags() 190 ALWAYS_INLINE bool IsEnum() REQUIRES_SHARED(Locks::mutator_lock_) { in IsEnum() 195 ALWAYS_INLINE bool IsInterface() REQUIRES_SHARED(Locks::mutator_lock_) { in IsInterface() 200 ALWAYS_INLINE bool IsPublic() REQUIRES_SHARED(Locks::mutator_lock_) { in IsPublic() 205 ALWAYS_INLINE bool IsFinal() REQUIRES_SHARED(Locks::mutator_lock_) { in IsFinal() 209 ALWAYS_INLINE bool IsFinalizable() REQUIRES_SHARED(Locks::mutator_lock_) { in IsFinalizable() 213 ALWAYS_INLINE bool ShouldSkipHiddenApiChecks() REQUIRES_SHARED(Locks::mutator_lock_) { in ShouldSkipHiddenApiChecks() 217 ALWAYS_INLINE void SetSkipHiddenApiChecks() REQUIRES_SHARED(Locks::mutator_lock_) { in SetSkipHiddenApiChecks() 222 ALWAYS_INLINE void SetRecursivelyInitialized() REQUIRES_SHARED(Locks::mutator_lock_) { in SetRecursivelyInitialized() [all …]
|
D | dex_cache.h | 273 String* GetResolvedString(dex::StringIndex string_idx) ALWAYS_INLINE 276 void SetResolvedString(dex::StringIndex string_idx, ObjPtr<mirror::String> resolved) ALWAYS_INLINE 290 ALWAYS_INLINE ArtMethod* GetResolvedMethod(uint32_t method_idx, PointerSize ptr_size) 293 ALWAYS_INLINE void SetResolvedMethod(uint32_t method_idx, 297 ALWAYS_INLINE void ClearResolvedMethod(uint32_t method_idx, PointerSize ptr_size) 301 ALWAYS_INLINE ArtField* GetResolvedField(uint32_t idx, PointerSize ptr_size) 305 ALWAYS_INLINE void SetResolvedField(uint32_t idx, ArtField* field, PointerSize ptr_size) 307 ALWAYS_INLINE void ClearResolvedField(uint32_t idx, PointerSize ptr_size) 325 StringDexCacheType* GetStrings() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { in GetStrings() 329 void SetStrings(StringDexCacheType* strings) ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { in SetStrings() [all …]
|
D | object_array.h | 47 ALWAYS_INLINE T* Get(int32_t i) REQUIRES_SHARED(Locks::mutator_lock_); 55 ALWAYS_INLINE void Set(int32_t i, ObjPtr<T> object) REQUIRES_SHARED(Locks::mutator_lock_); 59 ALWAYS_INLINE void Set(int32_t i, ObjPtr<T> object) NO_THREAD_SAFETY_ANALYSIS; 67 ALWAYS_INLINE void SetWithoutChecks(int32_t i, ObjPtr<T> object) NO_THREAD_SAFETY_ANALYSIS; 72 ALWAYS_INLINE void SetWithoutChecksAndWriteBarrier(int32_t i, ObjPtr<T> object) 77 ALWAYS_INLINE T* GetWithoutChecks(int32_t i) REQUIRES_SHARED(Locks::mutator_lock_);
|
/art/libartbase/base/ |
D | macros.h | 42 NO_RETURN ALWAYS_INLINE void operator delete(void*, size_t) { UNREACHABLE(); } \ 43 ALWAYS_INLINE void* operator new(size_t, void* ptr) noexcept { return ptr; } \ 44 ALWAYS_INLINE void operator delete(void*, void*) noexcept { } \ 67 #define ALWAYS_INLINE macro 69 #define ALWAYS_INLINE __attribute__ ((always_inline)) macro
|
/art/runtime/native/ |
D | scoped_fast_native_object_access.h | 30 ALWAYS_INLINE explicit ScopedFastNativeObjectAccess(JNIEnv* env) 34 ALWAYS_INLINE ~ScopedFastNativeObjectAccess() UNLOCK_FUNCTION(Locks::mutator_lock_) {} in ~ScopedFastNativeObjectAccess()
|