/art/runtime/ |
D | handle.h | 41 ALWAYS_INLINE ConstHandle(const ConstHandle<T>& handle) : reference_(handle.reference_) { in ConstHandle() 44 ALWAYS_INLINE ConstHandle<T>& operator=(const ConstHandle<T>& handle) { 49 ALWAYS_INLINE explicit ConstHandle(StackReference<T>* reference) : reference_(reference) { in ConstHandle() 52 ALWAYS_INLINE T& operator*() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in SHARED_LOCKS_REQUIRED() 56 ALWAYS_INLINE T* operator->() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 60 ALWAYS_INLINE T* Get() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in Get() 64 ALWAYS_INLINE jobject ToJObject() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in ToJObject() 82 …ference<mirror::Object>* GetReference() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE { in GetReference() 85 ALWAYS_INLINE const StackReference<mirror::Object>* GetReference() const in GetReference() 107 ALWAYS_INLINE Handle(const Handle<T>& handle) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) in Handle() [all …]
|
D | handle_scope.h | 81 ALWAYS_INLINE { in GetReference() 87 ALWAYS_INLINE { in GetHandle() 93 ALWAYS_INLINE { in SetReference() 124 ALWAYS_INLINE StackReference<mirror::Object>* GetReferences() const { in GetReferences() 166 explicit ALWAYS_INLINE StackHandleScope(Thread* self, mirror::Object* fill_value = nullptr); 167 ALWAYS_INLINE ~StackHandleScope(); 170 ALWAYS_INLINE Handle<T> NewHandle(T* object) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in NewHandle() 178 ALWAYS_INLINE HandleWrapper<T> NewHandleWrapper(T** object) in NewHandleWrapper() 186 ALWAYS_INLINE void SetReference(size_t i, mirror::Object* object) in SetReference() 194 ALWAYS_INLINE Handle<T> GetHandle(size_t i) in GetHandle()
|
D | scoped_thread_state_change.h | 35 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE in ScopedThreadStateChange() 60 ~ScopedThreadStateChange() LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE { in LOCKS_EXCLUDED() 185 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE in ScopedObjectAccessAlreadyRunnable() 190 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE in ScopedObjectAccessAlreadyRunnable() 201 ~ScopedObjectAccessAlreadyRunnable() ALWAYS_INLINE { in ~ScopedObjectAccessAlreadyRunnable() 229 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE in ScopedObjectAccessUnchecked() 236 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE in ScopedObjectAccessUnchecked() 244 explicit ScopedObjectAccessUnchecked(JavaVM* vm) ALWAYS_INLINE in ScopedObjectAccessUnchecked() 260 SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE in ScopedObjectAccess() 266 SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE in ScopedObjectAccess() [all …]
|
D | gc_root.h | 77 ALWAYS_INLINE MirrorType* Read() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 92 ALWAYS_INLINE MirrorType** AddressWithoutBarrier() { in AddressWithoutBarrier() 101 ALWAYS_INLINE explicit GcRoot<MirrorType>() : root_(nullptr) { in root_() 104 ALWAYS_INLINE explicit GcRoot<MirrorType>(MirrorType* ref)
|
D | verify_object.h | 55 ALWAYS_INLINE void VerifyObject(mirror::Object* obj) NO_THREAD_SAFETY_ANALYSIS; 58 ALWAYS_INLINE bool VerifyClassClass(mirror::Class* c) NO_THREAD_SAFETY_ANALYSIS;
|
D | read_barrier.h | 39 ALWAYS_INLINE static MirrorType* Barrier( 46 ALWAYS_INLINE static MirrorType* BarrierForRoot(MirrorType** root)
|
D | utf.h | 58 ALWAYS_INLINE int CompareModifiedUtf8ToModifiedUtf8AsUtf16CodePointValues(const char* utf8_1,
|
/art/runtime/mirror/ |
D | dex_cache.h | 94 void SetResolvedString(uint32_t string_idx, String* resolved) ALWAYS_INLINE in SetResolvedString() 100 Class* GetResolvedType(uint32_t type_idx) ALWAYS_INLINE in GetResolvedType() 110 void SetResolvedMethod(uint32_t method_idx, ArtMethod* resolved) ALWAYS_INLINE in SetResolvedMethod() 115 ArtField* GetResolvedField(uint32_t field_idx) ALWAYS_INLINE in GetResolvedField() 125 void SetResolvedField(uint32_t field_idx, ArtField* resolved) ALWAYS_INLINE in SetResolvedField() 130 ObjectArray<String>* GetStrings() ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in GetStrings() 134 ObjectArray<Class>* GetResolvedTypes() ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in GetResolvedTypes() 139 ObjectArray<ArtMethod>* GetResolvedMethods() ALWAYS_INLINE in GetResolvedMethods() 144 ObjectArray<ArtField>* GetResolvedFields() ALWAYS_INLINE in GetResolvedFields() 149 const DexFile* GetDexFile() ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in GetDexFile() [all …]
|
D | array.h | 53 ALWAYS_INLINE int32_t GetLength() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in GetLength() 93 ALWAYS_INLINE bool CheckIsValidIndex(int32_t index) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 118 const T* GetData() const ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in GetData() 122 T* GetData() ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in GetData() 126 T Get(int32_t i) ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 128 T GetWithoutChecks(int32_t i) ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in GetWithoutChecks() 133 void Set(int32_t i, T value) ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 138 void Set(int32_t i, T value) ALWAYS_INLINE NO_THREAD_SAFETY_ANALYSIS; 143 void SetWithoutChecks(int32_t i, T value) ALWAYS_INLINE NO_THREAD_SAFETY_ANALYSIS;
|
D | object_array.h | 40 T* Get(int32_t i) ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 48 ALWAYS_INLINE void Set(int32_t i, T* object) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 52 ALWAYS_INLINE void Set(int32_t i, T* object) NO_THREAD_SAFETY_ANALYSIS; 60 ALWAYS_INLINE void SetWithoutChecks(int32_t i, T* object) NO_THREAD_SAFETY_ANALYSIS; 65 ALWAYS_INLINE void SetWithoutChecksAndWriteBarrier(int32_t i, T* object) 68 ALWAYS_INLINE T* GetWithoutChecks(int32_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
|
D | object.h | 83 ALWAYS_INLINE Class* GetClass() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 211 ALWAYS_INLINE T* GetFieldObject(MemberOffset field_offset) 216 ALWAYS_INLINE T* GetFieldObjectVolatile(MemberOffset field_offset) 221 ALWAYS_INLINE void SetFieldObjectWithoutWriteBarrier(MemberOffset field_offset, Object* new_value) 226 ALWAYS_INLINE void SetFieldObject(MemberOffset field_offset, Object* new_value) 231 ALWAYS_INLINE void SetFieldObjectVolatile(MemberOffset field_offset, Object* new_value) 250 ALWAYS_INLINE int32_t GetField32(MemberOffset field_offset) 254 ALWAYS_INLINE int32_t GetField32Volatile(MemberOffset field_offset) 259 ALWAYS_INLINE void SetField32(MemberOffset field_offset, int32_t new_value) 264 ALWAYS_INLINE void SetField32Volatile(MemberOffset field_offset, int32_t new_value) [all …]
|
D | art_method.h | 57 Class* GetDeclaringClass() ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 65 ALWAYS_INLINE uint32_t GetAccessFlags() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 197 ALWAYS_INLINE uint32_t GetDexMethodIndex() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 212 ALWAYS_INLINE ArtMethod* GetDexCacheResolvedMethod(uint16_t method_idx) 214 ALWAYS_INLINE void SetDexCacheResolvedMethod(uint16_t method_idx, ArtMethod* new_method) 216 ALWAYS_INLINE void SetDexCacheResolvedMethods(ObjectArray<ArtMethod>* new_dex_cache_methods) 266 ALWAYS_INLINE static MemberOffset EntryPointFromPortableCompiledCodeOffset(size_t pointer_size) { in EntryPointFromPortableCompiledCodeOffset() 279 ALWAYS_INLINE const void* GetEntryPointFromPortableCompiledCodePtrSize(size_t pointer_size) in GetEntryPointFromPortableCompiledCodePtrSize() 309 ALWAYS_INLINE const void* GetEntryPointFromQuickCompiledCodePtrSize(size_t pointer_size) in GetEntryPointFromQuickCompiledCodePtrSize() 323 ALWAYS_INLINE void SetEntryPointFromQuickCompiledCodePtrSize( in SetEntryPointFromQuickCompiledCodePtrSize() [all …]
|
D | class.h | 223 ALWAYS_INLINE uint32_t GetAccessFlags() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 228 ALWAYS_INLINE bool IsInterface() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in IsInterface() 233 ALWAYS_INLINE bool IsPublic() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in IsPublic() 238 ALWAYS_INLINE bool IsFinal() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in IsFinal() 242 ALWAYS_INLINE bool IsFinalizable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in IsFinalizable() 246 ALWAYS_INLINE void SetFinalizable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in SetFinalizable() 252 ALWAYS_INLINE bool IsAbstract() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in IsAbstract() 257 ALWAYS_INLINE bool IsAnnotation() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in IsAnnotation() 262 ALWAYS_INLINE bool IsSynthetic() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in IsSynthetic() 331 Primitive::Type GetPrimitiveType() ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); [all …]
|
D | iftable.h | 28 ALWAYS_INLINE Class* GetInterface(int32_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in GetInterface() 34 ALWAYS_INLINE void SetInterface(int32_t i, Class* interface)
|
/art/runtime/entrypoints/ |
D | entrypoint_utils.h | 44 ALWAYS_INLINE static inline mirror::Class* CheckObjectAlloc(uint32_t type_idx, 50 ALWAYS_INLINE static inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass, 59 ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCode(uint32_t type_idx, 67 ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCodeResolved(mirror::Class* klass, 75 ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass, 83 ALWAYS_INLINE static inline mirror::Class* CheckArrayAlloc(uint32_t type_idx, 94 ALWAYS_INLINE static inline mirror::Array* AllocArrayFromCode(uint32_t type_idx, 102 ALWAYS_INLINE static inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass,
|
/art/runtime/gc/accounting/ |
D | card_table.h | 57 ALWAYS_INLINE void MarkCard(const void *addr) { in MarkCard() 121 void* AddrFromCard(const byte *card_addr) const ALWAYS_INLINE; 124 byte* CardFromAddr(const void *addr) const ALWAYS_INLINE; 138 void CheckCardValid(byte* card) const ALWAYS_INLINE;
|
D | heap_bitmap.h | 38 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) ALWAYS_INLINE; 41 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) ALWAYS_INLINE;
|
D | mod_union_table-inl.h | 35 bool ShouldAddReference(const mirror::Object* ref) const OVERRIDE ALWAYS_INLINE { in ShouldAddReference()
|
/art/runtime/native/ |
D | scoped_fast_native_object_access.h | 31 SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE in ScopedFastNativeObjectAccess() 39 ~ScopedFastNativeObjectAccess() UNLOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE { in UNLOCK_FUNCTION()
|
/art/compiler/dex/ |
D | dataflow_iterator.h | 84 virtual BasicBlock* ForwardSingleNext() ALWAYS_INLINE; 90 virtual BasicBlock* ReverseSingleNext() ALWAYS_INLINE; 96 virtual BasicBlock* ForwardRepeatNext() ALWAYS_INLINE; 102 virtual BasicBlock* ReverseRepeatNext() ALWAYS_INLINE; 323 virtual BasicBlock* Next(bool had_change = false) ALWAYS_INLINE;
|
/art/runtime/base/ |
D | macros.h | 158 #define ALWAYS_INLINE macro 160 #define ALWAYS_INLINE __attribute__ ((always_inline)) macro 167 #define ALWAYS_INLINE_LAMBDA ALWAYS_INLINE
|
/art/compiler/utils/ |
D | scoped_arena_allocator.h | 66 void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE { in Alloc() 118 void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE { in Alloc()
|
/art/runtime/gc/ |
D | heap.h | 193 ALWAYS_INLINE mirror::Object* AllocObjectWithAllocator( 375 ALWAYS_INLINE void WriteBarrierField(const mirror::Object* dst, MemberOffset /*offset*/, in WriteBarrierField() 381 ALWAYS_INLINE void WriteBarrierArray(const mirror::Object* dst, int /*start_offset*/, in WriteBarrierArray() 386 ALWAYS_INLINE void WriteBarrierEveryFieldOf(const mirror::Object* obj) { in WriteBarrierEveryFieldOf() 621 static ALWAYS_INLINE bool AllocatorHasAllocationStack(AllocatorType allocator_type) { in AllocatorHasAllocationStack() 626 static ALWAYS_INLINE bool AllocatorMayHaveConcurrentGC(AllocatorType allocator_type) { in AllocatorMayHaveConcurrentGC() 636 ALWAYS_INLINE void CheckConcurrentGC(Thread* self, size_t new_num_bytes_allocated, 670 ALWAYS_INLINE mirror::Object* TryToAllocate(Thread* self, AllocatorType allocator_type, 772 bool IsGcConcurrent() const ALWAYS_INLINE { in IsGcConcurrent()
|
/art/runtime/gc/allocator/ |
D | rosalloc-inl.h | 27 inline ALWAYS_INLINE void* RosAlloc::Alloc(Thread* self, size_t size, size_t* bytes_allocated) { in Alloc()
|
/art/runtime/gc/collector/ |
D | immune_region.h | 47 bool ContainsObject(const mirror::Object* obj) const ALWAYS_INLINE { in ContainsObject()
|