1 /* 2 * Copyright (C) 2018 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 // The interpreter function takes considerable time to compile and link. 18 // We compile the explicit definitions separately to speed up the build. 19 20 #include "interpreter_switch_impl-inl.h" 21 22 namespace art HIDDEN { 23 namespace interpreter { 24 25 // Define the helper class that does not do any transaction checks. 26 class InactiveTransactionChecker { 27 public: WriteConstraint(Thread * self,ObjPtr<mirror::Object> obj)28 ALWAYS_INLINE static bool WriteConstraint([[maybe_unused]] Thread* self, 29 [[maybe_unused]] ObjPtr<mirror::Object> obj) 30 REQUIRES_SHARED(Locks::mutator_lock_) { 31 return false; 32 } 33 WriteValueConstraint(Thread * self,ObjPtr<mirror::Object> value)34 ALWAYS_INLINE static bool WriteValueConstraint([[maybe_unused]] Thread* self, 35 [[maybe_unused]] ObjPtr<mirror::Object> value) 36 REQUIRES_SHARED(Locks::mutator_lock_) { 37 return false; 38 } 39 ReadConstraint(Thread * self,ObjPtr<mirror::Object> value)40 ALWAYS_INLINE static bool ReadConstraint([[maybe_unused]] Thread* self, 41 [[maybe_unused]] ObjPtr<mirror::Object> value) 42 REQUIRES_SHARED(Locks::mutator_lock_) { 43 return false; 44 } 45 AllocationConstraint(Thread * self,ObjPtr<mirror::Class> klass)46 ALWAYS_INLINE static bool AllocationConstraint([[maybe_unused]] Thread* self, 47 [[maybe_unused]] ObjPtr<mirror::Class> klass) 48 REQUIRES_SHARED(Locks::mutator_lock_) { 49 return false; 50 } 51 IsTransactionAborted()52 ALWAYS_INLINE static bool IsTransactionAborted() { 53 return false; 54 } 55 RecordArrayElementsInTransaction(ObjPtr<mirror::Object> array,int32_t count)56 static void RecordArrayElementsInTransaction([[maybe_unused]] ObjPtr<mirror::Object> array, 57 [[maybe_unused]] int32_t count) 58 REQUIRES_SHARED(Locks::mutator_lock_) {} 59 RecordNewObject(ObjPtr<mirror::Object> new_object)60 ALWAYS_INLINE static void RecordNewObject([[maybe_unused]] ObjPtr<mirror::Object> new_object) 61 REQUIRES_SHARED(Locks::mutator_lock_) {} 62 RecordNewArray(ObjPtr<mirror::Array> new_array)63 ALWAYS_INLINE static void RecordNewArray([[maybe_unused]] ObjPtr<mirror::Array> new_array) 64 REQUIRES_SHARED(Locks::mutator_lock_) {} 65 }; 66 67 class ActiveInstrumentationHandler { 68 public: 69 ALWAYS_INLINE WARN_UNUSED HasFieldReadListeners(const instrumentation::Instrumentation * instrumentation)70 static bool HasFieldReadListeners(const instrumentation::Instrumentation* instrumentation) 71 REQUIRES_SHARED(Locks::mutator_lock_) { 72 return instrumentation->HasFieldReadListeners(); 73 } 74 75 ALWAYS_INLINE WARN_UNUSED HasFieldWriteListeners(const instrumentation::Instrumentation * instrumentation)76 static bool HasFieldWriteListeners(const instrumentation::Instrumentation* instrumentation) 77 REQUIRES_SHARED(Locks::mutator_lock_) { 78 return instrumentation->HasFieldWriteListeners(); 79 } 80 81 ALWAYS_INLINE WARN_UNUSED HasBranchListeners(const instrumentation::Instrumentation * instrumentation)82 static bool HasBranchListeners(const instrumentation::Instrumentation* instrumentation) 83 REQUIRES_SHARED(Locks::mutator_lock_) { 84 return instrumentation->HasBranchListeners(); 85 } 86 87 ALWAYS_INLINE WARN_UNUSED NeedsDexPcEvents(ShadowFrame & shadow_frame)88 static bool NeedsDexPcEvents(ShadowFrame& shadow_frame) 89 REQUIRES_SHARED(Locks::mutator_lock_) { 90 DCHECK_IMPLIES(shadow_frame.GetNotifyDexPcMoveEvents(), 91 Runtime::Current()->GetInstrumentation()->HasDexPcListeners()); 92 return shadow_frame.GetNotifyDexPcMoveEvents(); 93 } 94 95 ALWAYS_INLINE WARN_UNUSED NeedsMethodExitEvent(const instrumentation::Instrumentation * instrumentation)96 static bool NeedsMethodExitEvent(const instrumentation::Instrumentation* instrumentation) 97 REQUIRES_SHARED(Locks::mutator_lock_) { 98 return interpreter::NeedsMethodExitEvent(instrumentation); 99 } 100 101 ALWAYS_INLINE WARN_UNUSED GetForcePopFrame(ShadowFrame & shadow_frame)102 static bool GetForcePopFrame(ShadowFrame& shadow_frame) { 103 DCHECK_IMPLIES(shadow_frame.GetForcePopFrame(), 104 Runtime::Current()->AreNonStandardExitsEnabled()); 105 return shadow_frame.GetForcePopFrame(); 106 } 107 108 ALWAYS_INLINE Branch(Thread * self,ArtMethod * method,uint32_t dex_pc,int32_t dex_pc_offset,const instrumentation::Instrumentation * instrumentation)109 static void Branch(Thread* self, 110 ArtMethod* method, 111 uint32_t dex_pc, 112 int32_t dex_pc_offset, 113 const instrumentation::Instrumentation* instrumentation) 114 REQUIRES_SHARED(Locks::mutator_lock_) { 115 instrumentation->Branch(self, method, dex_pc, dex_pc_offset); 116 } 117 118 // Unlike most other events the DexPcMovedEvent can be sent when there is a pending exception (if 119 // the next instruction is MOVE_EXCEPTION). This means it needs to be handled carefully to be able 120 // to detect exceptions thrown by the DexPcMovedEvent itself. These exceptions could be thrown by 121 // jvmti-agents while handling breakpoint or single step events. We had to move this into its own 122 // function because it was making ExecuteSwitchImpl have too large a stack. DoDexPcMoveEvent(Thread * self,const CodeItemDataAccessor & accessor,const ShadowFrame & shadow_frame,uint32_t dex_pc,const instrumentation::Instrumentation * instrumentation,JValue * save_ref)123 NO_INLINE static bool DoDexPcMoveEvent(Thread* self, 124 const CodeItemDataAccessor& accessor, 125 const ShadowFrame& shadow_frame, 126 uint32_t dex_pc, 127 const instrumentation::Instrumentation* instrumentation, 128 JValue* save_ref) 129 REQUIRES_SHARED(Locks::mutator_lock_) { 130 DCHECK(instrumentation->HasDexPcListeners()); 131 StackHandleScope<2> hs(self); 132 Handle<mirror::Throwable> thr(hs.NewHandle(self->GetException())); 133 mirror::Object* null_obj = nullptr; 134 HandleWrapper<mirror::Object> h( 135 hs.NewHandleWrapper(LIKELY(save_ref == nullptr) ? &null_obj : save_ref->GetGCRoot())); 136 self->ClearException(); 137 instrumentation->DexPcMovedEvent(self, 138 shadow_frame.GetThisObject(accessor.InsSize()), 139 shadow_frame.GetMethod(), 140 dex_pc); 141 if (UNLIKELY(self->IsExceptionPending())) { 142 // We got a new exception in the dex-pc-moved event. 143 // We just let this exception replace the old one. 144 // TODO It would be good to add the old exception to the 145 // suppressed exceptions of the new one if possible. 146 return false; // Pending exception. 147 } 148 if (UNLIKELY(!thr.IsNull())) { 149 self->SetException(thr.Get()); 150 } 151 return true; 152 } 153 154 template <typename T> 155 ALWAYS_INLINE WARN_UNUSED SendMethodExitEvents(Thread * self,const instrumentation::Instrumentation * instrumentation,ShadowFrame & frame,ArtMethod * method,T & result)156 static bool SendMethodExitEvents( 157 Thread* self, 158 const instrumentation::Instrumentation* instrumentation, 159 ShadowFrame& frame, 160 ArtMethod* method, 161 T& result) REQUIRES_SHARED(Locks::mutator_lock_) { 162 return interpreter::SendMethodExitEvents(self, instrumentation, frame, method, result); 163 } 164 }; 165 166 // Explicit definition of ExecuteSwitchImplCpp. 167 template HOT_ATTR 168 void ExecuteSwitchImplCpp<false>(SwitchImplContext* ctx); 169 170 } // namespace interpreter 171 } // namespace art 172