1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_RUNTIME_MONITOR_OBJECTS_STACK_VISITOR_H_ 18 #define ART_RUNTIME_MONITOR_OBJECTS_STACK_VISITOR_H_ 19 20 #include <android-base/logging.h> 21 22 #include "art_method.h" 23 #include "base/mutex.h" 24 #include "monitor.h" 25 #include "stack.h" 26 #include "thread.h" 27 #include "thread_state.h" 28 29 namespace art { 30 31 namespace mirror { 32 class Object; 33 } 34 35 class Context; 36 37 class MonitorObjectsStackVisitor : public StackVisitor { 38 public: 39 MonitorObjectsStackVisitor(Thread* thread_in, 40 Context* context, 41 bool check_suspended = true, 42 bool dump_locks_in = true) REQUIRES_SHARED(Locks::mutator_lock_)43 REQUIRES_SHARED(Locks::mutator_lock_) 44 : StackVisitor(thread_in, 45 context, 46 StackVisitor::StackWalkKind::kIncludeInlinedFrames, 47 check_suspended), 48 frame_count(0u), 49 dump_locks(dump_locks_in) {} 50 51 enum class VisitMethodResult { 52 kContinueMethod, 53 kSkipMethod, 54 kEndStackWalk, 55 }; 56 VisitFrame()57 bool VisitFrame() FINAL REQUIRES_SHARED(Locks::mutator_lock_) { 58 ArtMethod* m = GetMethod(); 59 if (m->IsRuntimeMethod()) { 60 return true; 61 } 62 63 VisitMethodResult vmrEntry = StartMethod(m, frame_count); 64 switch (vmrEntry) { 65 case VisitMethodResult::kContinueMethod: 66 break; 67 case VisitMethodResult::kSkipMethod: 68 return true; 69 case VisitMethodResult::kEndStackWalk: 70 return false; 71 } 72 73 if (frame_count == 0) { 74 // Top frame, check for blocked state. 75 76 mirror::Object* monitor_object; 77 uint32_t lock_owner_tid; 78 ThreadState state = Monitor::FetchState(GetThread(), 79 &monitor_object, 80 &lock_owner_tid); 81 switch (state) { 82 case kWaiting: 83 case kTimedWaiting: 84 VisitWaitingObject(monitor_object, state); 85 break; 86 case kSleeping: 87 VisitSleepingObject(monitor_object); 88 break; 89 90 case kBlocked: 91 case kWaitingForLockInflation: 92 VisitBlockedOnObject(monitor_object, state, lock_owner_tid); 93 break; 94 95 default: 96 break; 97 } 98 } 99 100 if (dump_locks) { 101 // Visit locks, but do not abort on errors. This could trigger a nested abort. 102 // Skip visiting locks if dump_locks is false as it would cause a bad_mutexes_held in 103 // RegTypeCache::RegTypeCache due to thread_list_lock. 104 Monitor::VisitLocks(this, VisitLockedObject, this, false); 105 } 106 107 ++frame_count; 108 109 VisitMethodResult vmrExit = EndMethod(m); 110 switch (vmrExit) { 111 case VisitMethodResult::kContinueMethod: 112 case VisitMethodResult::kSkipMethod: 113 return true; 114 115 case VisitMethodResult::kEndStackWalk: 116 return false; 117 } 118 LOG(FATAL) << "Unreachable"; 119 UNREACHABLE(); 120 } 121 122 protected: 123 virtual VisitMethodResult StartMethod(ArtMethod* m, size_t frame_nr) 124 REQUIRES_SHARED(Locks::mutator_lock_) = 0; 125 virtual VisitMethodResult EndMethod(ArtMethod* m) 126 REQUIRES_SHARED(Locks::mutator_lock_) = 0; 127 128 virtual void VisitWaitingObject(mirror::Object* obj, ThreadState state) 129 REQUIRES_SHARED(Locks::mutator_lock_) = 0; 130 virtual void VisitSleepingObject(mirror::Object* obj) 131 REQUIRES_SHARED(Locks::mutator_lock_) = 0; 132 virtual void VisitBlockedOnObject(mirror::Object* obj, ThreadState state, uint32_t owner_tid) 133 REQUIRES_SHARED(Locks::mutator_lock_) = 0; 134 virtual void VisitLockedObject(mirror::Object* obj) 135 REQUIRES_SHARED(Locks::mutator_lock_) = 0; 136 137 size_t frame_count; 138 139 private: VisitLockedObject(mirror::Object * o,void * context)140 static void VisitLockedObject(mirror::Object* o, void* context) 141 REQUIRES_SHARED(Locks::mutator_lock_) { 142 MonitorObjectsStackVisitor* self = reinterpret_cast<MonitorObjectsStackVisitor*>(context); 143 if (o != nullptr) { 144 if (kUseReadBarrier && Thread::Current()->GetIsGcMarking()) { 145 // We may call Thread::Dump() in the middle of the CC thread flip and this thread's stack 146 // may have not been flipped yet and "o" may be a from-space (stale) ref, in which case the 147 // IdentityHashCode call below will crash. So explicitly mark/forward it here. 148 o = ReadBarrier::Mark(o); 149 } 150 } 151 self->VisitLockedObject(o); 152 } 153 154 const bool dump_locks; 155 }; 156 157 } // namespace art 158 159 #endif // ART_RUNTIME_MONITOR_OBJECTS_STACK_VISITOR_H_ 160