1 /* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_RUNTIME_OAT_QUICK_METHOD_HEADER_H_ 18 #define ART_RUNTIME_OAT_QUICK_METHOD_HEADER_H_ 19 20 #include "arch/instruction_set.h" 21 #include "base/locks.h" 22 #include "base/macros.h" 23 #include "base/utils.h" 24 #include "quick/quick_method_frame_info.h" 25 #include "stack_map.h" 26 27 namespace art { 28 29 class ArtMethod; 30 31 // OatQuickMethodHeader precedes the raw code chunk generated by the compiler. 32 class PACKED(4) OatQuickMethodHeader { 33 public: 34 OatQuickMethodHeader() = default; OatQuickMethodHeader(uint32_t vmap_table_offset,uint32_t code_size)35 OatQuickMethodHeader(uint32_t vmap_table_offset, 36 uint32_t code_size) 37 : vmap_table_offset_(vmap_table_offset), 38 code_size_(code_size) { 39 } 40 41 static OatQuickMethodHeader* NterpMethodHeader; 42 43 bool IsNterpMethodHeader() const; 44 FromCodePointer(const void * code_ptr)45 static OatQuickMethodHeader* FromCodePointer(const void* code_ptr) { 46 uintptr_t code = reinterpret_cast<uintptr_t>(code_ptr); 47 uintptr_t header = code - OFFSETOF_MEMBER(OatQuickMethodHeader, code_); 48 DCHECK(IsAlignedParam(code, GetInstructionSetAlignment(kRuntimeISA)) || 49 IsAlignedParam(header, GetInstructionSetAlignment(kRuntimeISA))) 50 << std::hex << code << " " << std::hex << header; 51 return reinterpret_cast<OatQuickMethodHeader*>(header); 52 } 53 FromEntryPoint(const void * entry_point)54 static OatQuickMethodHeader* FromEntryPoint(const void* entry_point) { 55 return FromCodePointer(EntryPointToCodePointer(entry_point)); 56 } 57 InstructionAlignedSize()58 static size_t InstructionAlignedSize() { 59 return RoundUp(sizeof(OatQuickMethodHeader), GetInstructionSetAlignment(kRuntimeISA)); 60 } 61 62 OatQuickMethodHeader(const OatQuickMethodHeader&) = default; 63 OatQuickMethodHeader& operator=(const OatQuickMethodHeader&) = default; 64 NativeQuickPcOffset(const uintptr_t pc)65 uintptr_t NativeQuickPcOffset(const uintptr_t pc) const { 66 return pc - reinterpret_cast<uintptr_t>(GetEntryPoint()); 67 } 68 IsOptimized()69 bool IsOptimized() const { 70 return GetCodeSize() != 0 && vmap_table_offset_ != 0; 71 } 72 GetOptimizedCodeInfoPtr()73 const uint8_t* GetOptimizedCodeInfoPtr() const { 74 DCHECK(IsOptimized()); 75 return code_ - vmap_table_offset_; 76 } 77 GetOptimizedCodeInfoPtr()78 uint8_t* GetOptimizedCodeInfoPtr() { 79 DCHECK(IsOptimized()); 80 return code_ - vmap_table_offset_; 81 } 82 GetCode()83 const uint8_t* GetCode() const { 84 return code_; 85 } 86 GetCodeSize()87 uint32_t GetCodeSize() const { 88 // ART compiled method are prefixed with header, but we can also easily 89 // accidentally use a function pointer to one of the stubs/trampolines. 90 // We prefix those with 0xFF in the aseembly so that we can do DCHECKs. 91 CHECK_NE(code_size_, 0xFFFFFFFF) << code_size_; 92 return code_size_ & kCodeSizeMask; 93 } 94 GetCodeSizeAddr()95 const uint32_t* GetCodeSizeAddr() const { 96 return &code_size_; 97 } 98 GetVmapTableOffset()99 uint32_t GetVmapTableOffset() const { 100 return vmap_table_offset_; 101 } 102 SetVmapTableOffset(uint32_t offset)103 void SetVmapTableOffset(uint32_t offset) { 104 vmap_table_offset_ = offset; 105 } 106 GetVmapTableOffsetAddr()107 const uint32_t* GetVmapTableOffsetAddr() const { 108 return &vmap_table_offset_; 109 } 110 GetVmapTable()111 const uint8_t* GetVmapTable() const { 112 CHECK(!IsOptimized()) << "Unimplemented vmap table for optimizing compiler"; 113 return (vmap_table_offset_ == 0) ? nullptr : code_ - vmap_table_offset_; 114 } 115 Contains(uintptr_t pc)116 bool Contains(uintptr_t pc) const { 117 // Remove hwasan tag to make comparison below valid. The PC from the stack does not have it. 118 uintptr_t code_start = reinterpret_cast<uintptr_t>(HWASanUntag(code_)); 119 static_assert(kRuntimeISA != InstructionSet::kThumb2, "kThumb2 cannot be a runtime ISA"); 120 if (kRuntimeISA == InstructionSet::kArm) { 121 // On Thumb-2, the pc is offset by one. 122 code_start++; 123 } 124 return code_start <= pc && pc <= (code_start + GetCodeSize()); 125 } 126 GetEntryPoint()127 const uint8_t* GetEntryPoint() const { 128 // When the runtime architecture is ARM, `kRuntimeISA` is set to `kArm` 129 // (not `kThumb2`), *but* we always generate code for the Thumb-2 130 // instruction set anyway. Thumb-2 requires the entrypoint to be of 131 // offset 1. 132 static_assert(kRuntimeISA != InstructionSet::kThumb2, "kThumb2 cannot be a runtime ISA"); 133 return (kRuntimeISA == InstructionSet::kArm) 134 ? reinterpret_cast<uint8_t*>(reinterpret_cast<uintptr_t>(code_) | 1) 135 : code_; 136 } 137 138 template <bool kCheckFrameSize = true> GetFrameSizeInBytes()139 uint32_t GetFrameSizeInBytes() const { 140 uint32_t result = GetFrameInfo().FrameSizeInBytes(); 141 if (kCheckFrameSize) { 142 DCHECK_ALIGNED(result, kStackAlignment); 143 } 144 return result; 145 } 146 GetFrameInfo()147 QuickMethodFrameInfo GetFrameInfo() const { 148 DCHECK(IsOptimized()); 149 return CodeInfo::DecodeFrameInfo(GetOptimizedCodeInfoPtr()); 150 } 151 152 uintptr_t ToNativeQuickPc(ArtMethod* method, 153 const uint32_t dex_pc, 154 bool is_for_catch_handler, 155 bool abort_on_failure = true) const; 156 157 uint32_t ToDexPc(ArtMethod** frame, 158 const uintptr_t pc, 159 bool abort_on_failure = true) const 160 REQUIRES_SHARED(Locks::mutator_lock_); 161 SetHasShouldDeoptimizeFlag()162 void SetHasShouldDeoptimizeFlag() { 163 DCHECK_EQ(code_size_ & kShouldDeoptimizeMask, 0u); 164 code_size_ |= kShouldDeoptimizeMask; 165 } 166 HasShouldDeoptimizeFlag()167 bool HasShouldDeoptimizeFlag() const { 168 return (code_size_ & kShouldDeoptimizeMask) != 0; 169 } 170 171 private: 172 static constexpr uint32_t kShouldDeoptimizeMask = 0x80000000; 173 static constexpr uint32_t kCodeSizeMask = ~kShouldDeoptimizeMask; 174 175 // The offset in bytes from the start of the vmap table to the end of the header. 176 uint32_t vmap_table_offset_ = 0u; 177 // The code size in bytes. The highest bit is used to signify if the compiled 178 // code with the method header has should_deoptimize flag. 179 uint32_t code_size_ = 0u; 180 // The actual code. 181 uint8_t code_[0]; 182 }; 183 184 } // namespace art 185 186 #endif // ART_RUNTIME_OAT_QUICK_METHOD_HEADER_H_ 187