/art/runtime/base/ |
D | bit_utils.h | 29 template<typename T> 30 static constexpr int CLZ(T x) { in CLZ() 31 static_assert(std::is_integral<T>::value, "T must be integral"); in CLZ() 33 static_assert(sizeof(T) <= sizeof(long long), // NOLINT [runtime/int] [4] in CLZ() 35 return (sizeof(T) == sizeof(uint32_t)) in CLZ() 40 template<typename T> 41 static constexpr int CTZ(T x) { in CTZ() 42 static_assert(std::is_integral<T>::value, "T must be integral"); in CTZ() 44 return (sizeof(T) == sizeof(uint32_t)) in CTZ() 49 template<typename T> [all …]
|
D | arena_containers.h | 41 template <typename T> 44 template <typename T> 45 using ArenaDeque = std::deque<T, ArenaAllocatorAdapter<T>>; 47 template <typename T> 48 using ArenaQueue = std::queue<T, ArenaDeque<T>>; 50 template <typename T> 51 using ArenaVector = std::vector<T, ArenaAllocatorAdapter<T>>; 53 template <typename T, typename Comparator = std::less<T>> 54 using ArenaSet = std::set<T, Comparator, ArenaAllocatorAdapter<T>>; 121 template <typename T> [all …]
|
D | scoped_arena_containers.h | 40 template <typename T> 43 template <typename T> 44 using ScopedArenaDeque = std::deque<T, ScopedArenaAllocatorAdapter<T>>; 46 template <typename T> 47 using ScopedArenaQueue = std::queue<T, ScopedArenaDeque<T>>; 49 template <typename T> 50 using ScopedArenaVector = std::vector<T, ScopedArenaAllocatorAdapter<T>>; 52 template <typename T, typename Comparator = std::less<T>> 53 using ScopedArenaSet = std::set<T, Comparator, ScopedArenaAllocatorAdapter<T>>; 105 template <typename T> [all …]
|
D | allocator.h | 104 template<class T, AllocatorTag kTag> 105 class TrackingAllocatorImpl : public std::allocator<T> { 107 typedef typename std::allocator<T>::value_type value_type; 108 typedef typename std::allocator<T>::size_type size_type; 109 typedef typename std::allocator<T>::difference_type difference_type; 110 typedef typename std::allocator<T>::pointer pointer; 111 typedef typename std::allocator<T>::const_pointer const_pointer; 112 typedef typename std::allocator<T>::reference reference; 113 typedef typename std::allocator<T>::const_reference const_reference; 135 const size_t size = n * sizeof(T); [all …]
|
D | stl_util.h | 26 template<class T> 27 void STLSortAndRemoveDuplicates(T* v) { in STLSortAndRemoveDuplicates() 62 template <class T> 63 void STLDeleteElements(T *container) { in STLDeleteElements() 73 template <class T> 74 void STLDeleteValues(T *v) { in STLDeleteValues() 76 for (typename T::iterator i = v->begin(); i != v->end(); ++i) { in STLDeleteValues() 83 template <class T> 84 std::string ToString(const T& v) { in ToString()
|
D | bit_field.h | 29 template<typename T, int position, int size> 33 static bool IsValid(T value) { in IsValid() 60 static uintptr_t Encode(T value) { in Encode() 66 static T Decode(uintptr_t value) { in Decode() 67 return static_cast<T>((value >> position) & ((kUintPtrTOne << size) - 1)); in Decode() 73 static uintptr_t Update(T value, uintptr_t original) { in Update()
|
D | hash_set.h | 31 template <class T> 34 void MakeEmpty(T& item) const { in MakeEmpty() 35 item = T(); in MakeEmpty() 37 bool IsEmpty(const T& item) const { in IsEmpty() 38 return item == T(); in IsEmpty() 42 template <class T> 43 class DefaultEmptyFn<T*> { 45 void MakeEmpty(T*& item) const { in MakeEmpty() 48 bool IsEmpty(const T*& item) const { in IsEmpty() 58 template <class T, class EmptyFn = DefaultEmptyFn<T>, class HashFn = std::hash<T>, [all …]
|
D | dumpable.h | 31 template<typename T> 34 explicit Dumpable(const T& value) : value_(value) { in Dumpable() 42 const T& value_; 47 template<typename T> 48 std::ostream& operator<<(std::ostream& os, const Dumpable<T>& rhs) {
|
/art/cmdline/ |
D | cmdline_parse_result.h | 29 template <typename T> 39 static CmdlineParseResult<T> Failure() { in Failure() 44 static CmdlineParseResult<T> Failure(const std::string& message) { in Failure() 49 static CmdlineParseResult<T> Success(const T& value) { in Success() 54 static CmdlineParseResult<T> Success(T&& value) { in Success() 55 return CmdlineParseResult(std::forward<T>(value)); in Success() 60 static CmdlineParseResult<T> SuccessNoValue() { in SuccessNoValue() 61 return CmdlineParseResult(T {}); in SuccessNoValue() 65 static CmdlineParseResult<T> OutOfRange(const std::string& message) { in OutOfRange() 72 static CmdlineParseResult<T> OutOfRange(const T& value, in OutOfRange() [all …]
|
/art/runtime/ |
D | atomic.h | 190 template<typename T> 191 class PACKED(sizeof(T)) Atomic : public std::atomic<T> { in PACKED() argument 193 Atomic<T>() : std::atomic<T>(0) { } in PACKED() 195 explicit Atomic<T>(T value) : std::atomic<T>(value) { } in PACKED() 198 T LoadRelaxed() const { in PACKED() 206 T LoadJavaData() const { in PACKED() 212 T LoadSequentiallyConsistent() const { in PACKED() 217 void StoreRelaxed(T desired) { in PACKED() 222 void StoreJavaData(T desired) { in PACKED() 227 void StoreRelease(T desired) { in PACKED() [all …]
|
D | handle.h | 30 template<class T> class Handle; 36 template<class T> 42 ALWAYS_INLINE Handle(const Handle<T>& handle) : reference_(handle.reference_) { in Handle() 45 ALWAYS_INLINE Handle<T>& operator=(const Handle<T>& handle) { 50 ALWAYS_INLINE explicit Handle(StackReference<T>* reference) : reference_(reference) { in Handle() 53 ALWAYS_INLINE T& operator*() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in SHARED_LOCKS_REQUIRED() 57 ALWAYS_INLINE T* operator->() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 61 ALWAYS_INLINE T* Get() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in Get() 62 return down_cast<T*>(reference_->AsMirrorPtr()); in Get() 104 template<class T> [all …]
|
D | memory_region.h | 54 template<typename T> 55 ALWAYS_INLINE T Load(uintptr_t offset) const { in Load() 56 T* address = ComputeInternalPointer<T>(offset); in Load() 64 template<typename T> 65 ALWAYS_INLINE void Store(uintptr_t offset, T value) const { in Store() 66 T* address = ComputeInternalPointer<T>(offset); in Store() 73 template<typename T> 74 ALWAYS_INLINE T LoadUnaligned(uintptr_t offset) const { in LoadUnaligned() 76 typedef typename std::make_unsigned<T>::type U; in LoadUnaligned() 83 return bit_cast<T, U>(equivalent_unsigned_integer_value); in LoadUnaligned() [all …]
|
D | object_lock.cc | 24 template <typename T> 25 ObjectLock<T>::ObjectLock(Thread* self, Handle<T> object) : self_(self), obj_(object) { in ObjectLock() 30 template <typename T> 31 ObjectLock<T>::~ObjectLock() { in ~ObjectLock() 35 template <typename T> 36 void ObjectLock<T>::WaitIgnoringInterrupts() { in WaitIgnoringInterrupts() 40 template <typename T> 41 void ObjectLock<T>::Notify() { in Notify() 45 template <typename T> 46 void ObjectLock<T>::NotifyAll() { in NotifyAll()
|
D | handle_scope.h | 128 template<class T> 129 class HandleWrapper : public MutableHandle<T> { 131 HandleWrapper(T** obj, const MutableHandle<T>& handle) in HandleWrapper() 132 : MutableHandle<T>(handle), obj_(obj) { in HandleWrapper() 138 *obj_ = MutableHandle<T>::Get(); in ~HandleWrapper() 142 T** const obj_; 152 template<class T> 153 ALWAYS_INLINE MutableHandle<T> NewHandle(T* object) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 155 template<class T> 156 ALWAYS_INLINE HandleWrapper<T> NewHandleWrapper(T** object) [all …]
|
D | handle_scope-inl.h | 89 template<size_t kNumReferences> template<class T> 90 inline MutableHandle<T> StackHandleScope<kNumReferences>::NewHandle(T* object) { in NewHandle() 92 MutableHandle<T> h(GetHandle<T>(pos_)); in NewHandle() 97 template<size_t kNumReferences> template<class T> 98 inline HandleWrapper<T> StackHandleScope<kNumReferences>::NewHandleWrapper(T** object) { in NewHandleWrapper() 100 MutableHandle<T> h(GetHandle<T>(pos_)); in NewHandleWrapper() 102 return HandleWrapper<T>(object, h); in NewHandleWrapper()
|
D | stride_iterator.h | 24 template<typename T> 25 class StrideIterator : public std::iterator<std::forward_iterator_tag, T> { 55 T& operator*() const { 56 return *reinterpret_cast<T*>(ptr_); 59 T* operator->() const {
|
/art/runtime/mirror/ |
D | object_array-inl.h | 36 template<class T> 37 inline ObjectArray<T>* ObjectArray<T>::Alloc(Thread* self, Class* object_array_class, in Alloc() 47 return array->AsObjectArray<T>(); in Alloc() 51 template<class T> 52 inline ObjectArray<T>* ObjectArray<T>::Alloc(Thread* self, Class* object_array_class, in Alloc() 58 template<class T> 59 inline T* ObjectArray<T>::Get(int32_t i) { in Get() 64 return GetFieldObject<T>(OffsetOfElement(i)); in Get() 67 template<class T> template<VerifyObjectFlags kVerifyFlags> 68 inline bool ObjectArray<T>::CheckAssignable(T* object) { in CheckAssignable() [all …]
|
D | array-inl.h | 200 template<class T> 201 inline void PrimitiveArray<T>::VisitRoots(RootVisitor* visitor) { in VisitRoots() 205 template<typename T> 206 inline PrimitiveArray<T>* PrimitiveArray<T>::Alloc(Thread* self, size_t length) { in Alloc() 208 ComponentSizeShiftWidth(sizeof(T)), in Alloc() 210 return down_cast<PrimitiveArray<T>*>(raw_array); in Alloc() 213 template<typename T> 214 inline T PrimitiveArray<T>::Get(int32_t i) { in Get() 217 return T(0); in Get() 222 template<typename T> [all …]
|
D | array.h | 27 template<class T> class Handle; 104 template<typename T> 107 typedef T ElementType; 109 static PrimitiveArray<T>* Alloc(Thread* self, size_t length) 112 const T* GetData() const ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in GetData() 113 return reinterpret_cast<const T*>(GetRawData(sizeof(T), 0)); in GetData() 116 T* GetData() ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in GetData() 117 return reinterpret_cast<T*>(GetRawData(sizeof(T), 0)); in GetData() 120 T Get(int32_t i) ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 122 T GetWithoutChecks(int32_t i) ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in GetWithoutChecks() [all …]
|
D | object_array.h | 25 template<class T> 33 static ObjectArray<T>* Alloc(Thread* self, Class* object_array_class, int32_t length, 37 static ObjectArray<T>* Alloc(Thread* self, Class* object_array_class, int32_t length) 40 T* Get(int32_t i) ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 46 bool CheckAssignable(T* object) NO_THREAD_SAFETY_ANALYSIS; 48 ALWAYS_INLINE void Set(int32_t i, T* object) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 52 ALWAYS_INLINE void Set(int32_t i, T* object) NO_THREAD_SAFETY_ANALYSIS; 60 ALWAYS_INLINE void SetWithoutChecks(int32_t i, T* object) NO_THREAD_SAFETY_ANALYSIS; 65 ALWAYS_INLINE void SetWithoutChecksAndWriteBarrier(int32_t i, T* object) 68 ALWAYS_INLINE T* GetWithoutChecks(int32_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); [all …]
|
/art/compiler/utils/ |
D | growable_array.h | 29 template<typename T> 36 elem_list_ = arena_->AllocArray<T>(init_length, kArenaAllocGrowableArray); in GrowableArray() 39 GrowableArray(ArenaAllocator* arena, size_t init_length, T initial_data) in GrowableArray() 43 elem_list_ = arena_->AllocArray<T>(init_length, kArenaAllocGrowableArray); in GrowableArray() 49 bool Contains(T value) const { in Contains() 67 T* new_array = arena_->AllocArray<T>(target_length, kArenaAllocGrowableArray); in Resize() 68 memcpy(new_array, elem_list_, sizeof(T) * num_allocated_); in Resize() 79 void Insert(T elem) { in Insert() 86 void InsertAt(size_t index, T elem) { in InsertAt() 95 void Add(T elem) { in Add() [all …]
|
D | array_ref.h | 40 template <typename T> 46 typedef T value_type; 47 typedef T& reference; 48 typedef const T& const_reference; 49 typedef T* pointer; 50 typedef const T* const_pointer; 51 typedef T* iterator; 52 typedef const T* const_iterator; 65 constexpr ArrayRef(T (&array)[size]) in ArrayRef() 71 typename std::enable_if<std::is_same<T, const U>::value, tag>::type [all …]
|
D | swap_space.h | 101 template <typename T> class SwapAllocator; 131 template <typename T> 134 typedef T value_type; 135 typedef T* pointer; 136 typedef T& reference; 137 typedef const T* const_pointer; 138 typedef const T& const_reference; 157 return static_cast<size_type>(-1) / sizeof(T); in max_size() 166 return reinterpret_cast<T*>(malloc(n * sizeof(T))); 168 return reinterpret_cast<T*>(swap_space_->Alloc(n * sizeof(T))); [all …]
|
/art/cmdline/detail/ |
D | cmdline_parser_detail.h | 27 template <typename T> 28 typename std::remove_reference<T>::type& FakeReference(); 36 template <typename TStream, typename T> 37 static std::true_type InsertionOperatorTest(TStream& os, const T& value, 40 template <typename TStream, typename ... T> 41 static std::false_type InsertionOperatorTest(TStream& os, const T& ... args); 58 template <typename TL, typename ... T> 59 static std::false_type EqualityOperatorTest(const TL& left, const T& ... args); 90 template <typename T> 91 std::string ToStringAny(const T& value, [all …]
|
/art/runtime/gc/accounting/ |
D | atomic_stack.h | 37 template <typename T> 43 bool operator()(const T* a, const StackReference<T>& b) const NO_THREAD_SAFETY_ANALYSIS { in operator() 46 bool operator()(const StackReference<T>& a, const T* b) const NO_THREAD_SAFETY_ANALYSIS { in operator() 50 bool operator()(const StackReference<T>& a, const StackReference<T>& b) const in operator() 77 bool AtomicPushBackIgnoreGrowthLimit(T* value) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in AtomicPushBackIgnoreGrowthLimit() 82 bool AtomicPushBack(T* value) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { in AtomicPushBack() 88 bool AtomicBumpBack(size_t num_slots, StackReference<T>** start_address, in AtomicBumpBack() 89 StackReference<T>** end_address) in AtomicBumpBack() 109 DCHECK_EQ(begin_[i].AsMirrorPtr(), static_cast<T*>(nullptr)) in AtomicBumpBack() 119 DCHECK_EQ(begin_[i].AsMirrorPtr(), static_cast<T*>(nullptr)) << "i=" << i; in AssertAllZero() [all …]
|