1 /* 2 * Copyright (C) 2008 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 /* 18 * Dalvik-specific side of debugger support. (The JDWP code is intended to 19 * be relatively generic.) 20 */ 21 #ifndef ART_RUNTIME_DEBUGGER_H_ 22 #define ART_RUNTIME_DEBUGGER_H_ 23 24 #include <pthread.h> 25 26 #include <set> 27 #include <string> 28 #include <vector> 29 30 #include "base/array_ref.h" 31 #include "class_linker.h" 32 #include "gc_root.h" 33 #include "handle.h" 34 #include "jdwp/jdwp.h" 35 #include "jni.h" 36 #include "jvalue.h" 37 #include "obj_ptr.h" 38 #include "runtime_callbacks.h" 39 #include "thread.h" 40 #include "thread_state.h" 41 42 namespace art { 43 namespace mirror { 44 class Class; 45 class Object; 46 class Throwable; 47 } // namespace mirror 48 class ArtField; 49 class ArtMethod; 50 class ObjectRegistry; 51 class ScopedObjectAccess; 52 class ScopedObjectAccessUnchecked; 53 class StackVisitor; 54 class Thread; 55 56 struct DebuggerActiveMethodInspectionCallback : public MethodInspectionCallback { 57 bool IsMethodBeingInspected(ArtMethod* method) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_); 58 bool IsMethodSafeToJit(ArtMethod* method) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_); 59 bool MethodNeedsDebugVersion(ArtMethod* method) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_); 60 }; 61 62 struct DebuggerDdmCallback : public DdmCallback { 63 void DdmPublishChunk(uint32_t type, const ArrayRef<const uint8_t>& data) 64 OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_); 65 }; 66 67 struct InternalDebuggerControlCallback : public DebuggerControlCallback { 68 void StartDebugger() OVERRIDE; 69 void StopDebugger() OVERRIDE; 70 bool IsDebuggerConfigured() OVERRIDE; 71 }; 72 73 /* 74 * Invoke-during-breakpoint support. 75 */ 76 struct DebugInvokeReq { DebugInvokeReqDebugInvokeReq77 DebugInvokeReq(uint32_t invoke_request_id, JDWP::ObjectId invoke_thread_id, 78 mirror::Object* invoke_receiver, mirror::Class* invoke_class, 79 ArtMethod* invoke_method, uint32_t invoke_options, 80 uint64_t args[], uint32_t args_count) 81 : request_id(invoke_request_id), thread_id(invoke_thread_id), receiver(invoke_receiver), 82 klass(invoke_class), method(invoke_method), arg_count(args_count), arg_values(args), 83 options(invoke_options), reply(JDWP::expandBufAlloc()) { 84 } 85 ~DebugInvokeReqDebugInvokeReq86 ~DebugInvokeReq() { 87 JDWP::expandBufFree(reply); 88 } 89 90 // Request 91 const uint32_t request_id; 92 const JDWP::ObjectId thread_id; 93 GcRoot<mirror::Object> receiver; // not used for ClassType.InvokeMethod. 94 GcRoot<mirror::Class> klass; 95 ArtMethod* const method; 96 const uint32_t arg_count; 97 std::unique_ptr<uint64_t[]> arg_values; // will be null if arg_count_ == 0. We take ownership 98 // of this array so we must delete it upon destruction. 99 const uint32_t options; 100 101 // Reply 102 JDWP::ExpandBuf* const reply; 103 104 void VisitRoots(RootVisitor* visitor, const RootInfo& root_info) 105 REQUIRES_SHARED(Locks::mutator_lock_); 106 107 private: 108 DISALLOW_COPY_AND_ASSIGN(DebugInvokeReq); 109 }; 110 111 // Thread local data-structure that holds fields for controlling single-stepping. 112 class SingleStepControl { 113 public: SingleStepControl(JDWP::JdwpStepSize step_size,JDWP::JdwpStepDepth step_depth,int stack_depth,ArtMethod * method)114 SingleStepControl(JDWP::JdwpStepSize step_size, JDWP::JdwpStepDepth step_depth, 115 int stack_depth, ArtMethod* method) 116 : step_size_(step_size), step_depth_(step_depth), 117 stack_depth_(stack_depth), method_(method) { 118 } 119 GetStepSize()120 JDWP::JdwpStepSize GetStepSize() const { 121 return step_size_; 122 } 123 GetStepDepth()124 JDWP::JdwpStepDepth GetStepDepth() const { 125 return step_depth_; 126 } 127 GetStackDepth()128 int GetStackDepth() const { 129 return stack_depth_; 130 } 131 GetMethod()132 ArtMethod* GetMethod() const { 133 return method_; 134 } 135 GetDexPcs()136 const std::set<uint32_t>& GetDexPcs() const { 137 return dex_pcs_; 138 } 139 140 void AddDexPc(uint32_t dex_pc); 141 142 bool ContainsDexPc(uint32_t dex_pc) const; 143 144 private: 145 // See JdwpStepSize and JdwpStepDepth for details. 146 const JDWP::JdwpStepSize step_size_; 147 const JDWP::JdwpStepDepth step_depth_; 148 149 // The stack depth when this single-step was initiated. This is used to support SD_OVER and SD_OUT 150 // single-step depth. 151 const int stack_depth_; 152 153 // The location this single-step was initiated from. 154 // A single-step is initiated in a suspended thread. We save here the current method and the 155 // set of DEX pcs associated to the source line number where the suspension occurred. 156 // This is used to support SD_INTO and SD_OVER single-step depths so we detect when a single-step 157 // causes the execution of an instruction in a different method or at a different line number. 158 ArtMethod* method_; 159 160 std::set<uint32_t> dex_pcs_; 161 162 DISALLOW_COPY_AND_ASSIGN(SingleStepControl); 163 }; 164 165 // TODO rename to InstrumentationRequest. 166 class DeoptimizationRequest { 167 public: 168 enum Kind { 169 kNothing, // no action. 170 kRegisterForEvent, // start listening for instrumentation event. 171 kUnregisterForEvent, // stop listening for instrumentation event. 172 kFullDeoptimization, // deoptimize everything. 173 kFullUndeoptimization, // undeoptimize everything. 174 kSelectiveDeoptimization, // deoptimize one method. 175 kSelectiveUndeoptimization // undeoptimize one method. 176 }; 177 DeoptimizationRequest()178 DeoptimizationRequest() : kind_(kNothing), instrumentation_event_(0), method_(nullptr) {} 179 180 DeoptimizationRequest(const DeoptimizationRequest& other) REQUIRES_SHARED(Locks::mutator_lock_)181 REQUIRES_SHARED(Locks::mutator_lock_) 182 : kind_(other.kind_), instrumentation_event_(other.instrumentation_event_) { 183 // Create a new JNI global reference for the method. 184 SetMethod(other.Method()); 185 } 186 187 ArtMethod* Method() const REQUIRES_SHARED(Locks::mutator_lock_); 188 189 void SetMethod(ArtMethod* m) REQUIRES_SHARED(Locks::mutator_lock_); 190 191 // Name 'Kind()' would collide with the above enum name. GetKind()192 Kind GetKind() const { 193 return kind_; 194 } 195 SetKind(Kind kind)196 void SetKind(Kind kind) { 197 kind_ = kind; 198 } 199 InstrumentationEvent()200 uint32_t InstrumentationEvent() const { 201 return instrumentation_event_; 202 } 203 SetInstrumentationEvent(uint32_t instrumentation_event)204 void SetInstrumentationEvent(uint32_t instrumentation_event) { 205 instrumentation_event_ = instrumentation_event; 206 } 207 208 private: 209 Kind kind_; 210 211 // TODO we could use a union to hold the instrumentation_event and the method since they 212 // respectively have sense only for kRegisterForEvent/kUnregisterForEvent and 213 // kSelectiveDeoptimization/kSelectiveUndeoptimization. 214 215 // Event to start or stop listening to. Only for kRegisterForEvent and kUnregisterForEvent. 216 uint32_t instrumentation_event_; 217 218 // Method for selective deoptimization. 219 jmethodID method_; 220 }; 221 std::ostream& operator<<(std::ostream& os, const DeoptimizationRequest::Kind& rhs); 222 223 class Dbg { 224 public: 225 static void SetJdwpAllowed(bool allowed); 226 static bool IsJdwpAllowed(); 227 228 static void StartJdwp(); 229 static void StopJdwp(); 230 231 // Invoked by the GC in case we need to keep DDMS informed. 232 static void GcDidFinish() REQUIRES(!Locks::mutator_lock_); 233 234 // Return the DebugInvokeReq for the current thread. 235 static DebugInvokeReq* GetInvokeReq(); 236 237 static Thread* GetDebugThread(); 238 static void ClearWaitForEventThread(); 239 240 /* 241 * Enable/disable breakpoints and step modes. Used to provide a heads-up 242 * when the debugger attaches. 243 */ 244 static void Connected(); 245 static void GoActive() 246 REQUIRES(!Locks::breakpoint_lock_, !Locks::deoptimization_lock_, !Locks::mutator_lock_); 247 static void Disconnected() REQUIRES(!Locks::deoptimization_lock_, !Locks::mutator_lock_); Dispose()248 static void Dispose() { 249 gDisposed = true; 250 } 251 252 // Returns true if we're actually debugging with a real debugger, false if it's 253 // just DDMS (or nothing at all). IsDebuggerActive()254 static bool IsDebuggerActive() { 255 return gDebuggerActive; 256 } 257 258 // Configures JDWP with parsed command-line options. 259 static void ConfigureJdwp(const JDWP::JdwpOptions& jdwp_options) 260 REQUIRES_SHARED(Locks::mutator_lock_); 261 262 // Returns true if we had -Xrunjdwp or -agentlib:jdwp= on the command line. 263 static bool IsJdwpConfigured(); 264 265 // Returns true if a method has any breakpoints. 266 static bool MethodHasAnyBreakpoints(ArtMethod* method) 267 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Locks::breakpoint_lock_); 268 IsDisposed()269 static bool IsDisposed() { 270 return gDisposed; 271 } 272 273 /* 274 * Time, in milliseconds, since the last debugger activity. Does not 275 * include DDMS activity. Returns -1 if there has been no activity. 276 * Returns 0 if we're in the middle of handling a debugger request. 277 */ 278 static int64_t LastDebuggerActivity(); 279 280 static void UndoDebuggerSuspensions() 281 REQUIRES(!Locks::thread_list_lock_, !Locks::thread_suspend_count_lock_); 282 283 /* 284 * Class, Object, Array 285 */ 286 static std::string GetClassName(JDWP::RefTypeId id) 287 REQUIRES_SHARED(Locks::mutator_lock_); 288 static std::string GetClassName(mirror::Class* klass) 289 REQUIRES_SHARED(Locks::mutator_lock_); 290 static JDWP::JdwpError GetClassObject(JDWP::RefTypeId id, JDWP::ObjectId* class_object_id) 291 REQUIRES_SHARED(Locks::mutator_lock_); 292 static JDWP::JdwpError GetSuperclass(JDWP::RefTypeId id, JDWP::RefTypeId* superclass_id) 293 REQUIRES_SHARED(Locks::mutator_lock_); 294 static JDWP::JdwpError GetClassLoader(JDWP::RefTypeId id, JDWP::ExpandBuf* pReply) 295 REQUIRES_SHARED(Locks::mutator_lock_); 296 static JDWP::JdwpError GetModifiers(JDWP::RefTypeId id, JDWP::ExpandBuf* pReply) 297 REQUIRES_SHARED(Locks::mutator_lock_); 298 static JDWP::JdwpError GetReflectedType(JDWP::RefTypeId class_id, JDWP::ExpandBuf* pReply) 299 REQUIRES_SHARED(Locks::mutator_lock_); 300 static void GetClassList(std::vector<JDWP::RefTypeId>* classes) 301 REQUIRES_SHARED(Locks::mutator_lock_); 302 static JDWP::JdwpError GetClassInfo(JDWP::RefTypeId class_id, JDWP::JdwpTypeTag* pTypeTag, 303 uint32_t* pStatus, std::string* pDescriptor) 304 REQUIRES_SHARED(Locks::mutator_lock_); 305 static void FindLoadedClassBySignature(const char* descriptor, std::vector<JDWP::RefTypeId>* ids) 306 REQUIRES_SHARED(Locks::mutator_lock_); 307 static JDWP::JdwpError GetReferenceType(JDWP::ObjectId object_id, JDWP::ExpandBuf* pReply) 308 REQUIRES_SHARED(Locks::mutator_lock_); 309 static JDWP::JdwpError GetSignature(JDWP::RefTypeId ref_type_id, std::string* signature) 310 REQUIRES_SHARED(Locks::mutator_lock_); 311 static JDWP::JdwpError GetSourceDebugExtension(JDWP::RefTypeId ref_type_id, 312 std::string* extension_data) 313 REQUIRES_SHARED(Locks::mutator_lock_); 314 static JDWP::JdwpError GetSourceFile(JDWP::RefTypeId ref_type_id, std::string* source_file) 315 REQUIRES_SHARED(Locks::mutator_lock_); 316 static JDWP::JdwpError GetObjectTag(JDWP::ObjectId object_id, uint8_t* tag) 317 REQUIRES_SHARED(Locks::mutator_lock_); 318 static size_t GetTagWidth(JDWP::JdwpTag tag); 319 320 static JDWP::JdwpError GetArrayLength(JDWP::ObjectId array_id, int32_t* length) 321 REQUIRES_SHARED(Locks::mutator_lock_); 322 static JDWP::JdwpError OutputArray(JDWP::ObjectId array_id, int offset, int count, 323 JDWP::ExpandBuf* pReply) 324 REQUIRES_SHARED(Locks::mutator_lock_); 325 static JDWP::JdwpError SetArrayElements(JDWP::ObjectId array_id, int offset, int count, 326 JDWP::Request* request) 327 REQUIRES_SHARED(Locks::mutator_lock_); 328 329 static JDWP::JdwpError CreateString(const std::string& str, JDWP::ObjectId* new_string_id) 330 REQUIRES_SHARED(Locks::mutator_lock_); 331 static JDWP::JdwpError CreateObject(JDWP::RefTypeId class_id, JDWP::ObjectId* new_object_id) 332 REQUIRES_SHARED(Locks::mutator_lock_); 333 static JDWP::JdwpError CreateArrayObject(JDWP::RefTypeId array_class_id, uint32_t length, 334 JDWP::ObjectId* new_array_id) 335 REQUIRES_SHARED(Locks::mutator_lock_); 336 337 // 338 // Event filtering. 339 // 340 static bool MatchThread(JDWP::ObjectId expected_thread_id, Thread* event_thread) 341 REQUIRES_SHARED(Locks::mutator_lock_); 342 343 static bool MatchLocation(const JDWP::JdwpLocation& expected_location, 344 const JDWP::EventLocation& event_location) 345 REQUIRES_SHARED(Locks::mutator_lock_); 346 347 static bool MatchType(ObjPtr<mirror::Class> event_class, JDWP::RefTypeId class_id) 348 REQUIRES_SHARED(Locks::mutator_lock_); 349 350 static bool MatchField(JDWP::RefTypeId expected_type_id, JDWP::FieldId expected_field_id, 351 ArtField* event_field) 352 REQUIRES_SHARED(Locks::mutator_lock_); 353 354 static bool MatchInstance(JDWP::ObjectId expected_instance_id, mirror::Object* event_instance) 355 REQUIRES_SHARED(Locks::mutator_lock_); 356 357 // 358 // Monitors. 359 // 360 static JDWP::JdwpError GetMonitorInfo(JDWP::ObjectId object_id, JDWP::ExpandBuf* reply) 361 REQUIRES_SHARED(Locks::mutator_lock_); 362 static JDWP::JdwpError GetOwnedMonitors(JDWP::ObjectId thread_id, 363 std::vector<JDWP::ObjectId>* monitors, 364 std::vector<uint32_t>* stack_depths) 365 REQUIRES(!Locks::thread_list_lock_) REQUIRES_SHARED(Locks::mutator_lock_); 366 static JDWP::JdwpError GetContendedMonitor(JDWP::ObjectId thread_id, 367 JDWP::ObjectId* contended_monitor) 368 REQUIRES(!Locks::thread_list_lock_) REQUIRES_SHARED(Locks::mutator_lock_); 369 370 // 371 // Heap. 372 // 373 static JDWP::JdwpError GetInstanceCounts(const std::vector<JDWP::RefTypeId>& class_ids, 374 std::vector<uint64_t>* counts) 375 REQUIRES_SHARED(Locks::mutator_lock_); 376 static JDWP::JdwpError GetInstances(JDWP::RefTypeId class_id, int32_t max_count, 377 std::vector<JDWP::ObjectId>* instances) 378 REQUIRES_SHARED(Locks::mutator_lock_); 379 static JDWP::JdwpError GetReferringObjects(JDWP::ObjectId object_id, int32_t max_count, 380 std::vector<JDWP::ObjectId>* referring_objects) 381 REQUIRES_SHARED(Locks::mutator_lock_); 382 static JDWP::JdwpError DisableCollection(JDWP::ObjectId object_id) 383 REQUIRES_SHARED(Locks::mutator_lock_); 384 static JDWP::JdwpError EnableCollection(JDWP::ObjectId object_id) 385 REQUIRES_SHARED(Locks::mutator_lock_); 386 static JDWP::JdwpError IsCollected(JDWP::ObjectId object_id, bool* is_collected) 387 REQUIRES_SHARED(Locks::mutator_lock_); 388 static void DisposeObject(JDWP::ObjectId object_id, uint32_t reference_count) 389 REQUIRES_SHARED(Locks::mutator_lock_); 390 391 // 392 // Methods and fields. 393 // 394 static std::string GetMethodName(JDWP::MethodId method_id) 395 REQUIRES_SHARED(Locks::mutator_lock_); 396 static bool IsMethodObsolete(JDWP::MethodId method_id) 397 REQUIRES_SHARED(Locks::mutator_lock_); 398 static JDWP::JdwpError OutputDeclaredFields(JDWP::RefTypeId ref_type_id, bool with_generic, 399 JDWP::ExpandBuf* pReply) 400 REQUIRES_SHARED(Locks::mutator_lock_); 401 static JDWP::JdwpError OutputDeclaredMethods(JDWP::RefTypeId ref_type_id, bool with_generic, 402 JDWP::ExpandBuf* pReply) 403 REQUIRES_SHARED(Locks::mutator_lock_); 404 static JDWP::JdwpError OutputDeclaredInterfaces(JDWP::RefTypeId ref_type_id, 405 JDWP::ExpandBuf* pReply) 406 REQUIRES_SHARED(Locks::mutator_lock_); 407 static void OutputLineTable(JDWP::RefTypeId ref_type_id, JDWP::MethodId method_id, 408 JDWP::ExpandBuf* pReply) 409 REQUIRES_SHARED(Locks::mutator_lock_); 410 static void OutputVariableTable(JDWP::RefTypeId ref_type_id, JDWP::MethodId id, bool with_generic, 411 JDWP::ExpandBuf* pReply) 412 REQUIRES_SHARED(Locks::mutator_lock_); 413 static void OutputMethodReturnValue(JDWP::MethodId method_id, const JValue* return_value, 414 JDWP::ExpandBuf* pReply) 415 REQUIRES_SHARED(Locks::mutator_lock_); 416 static void OutputFieldValue(JDWP::FieldId field_id, const JValue* field_value, 417 JDWP::ExpandBuf* pReply) 418 REQUIRES_SHARED(Locks::mutator_lock_); 419 static JDWP::JdwpError GetBytecodes(JDWP::RefTypeId class_id, JDWP::MethodId method_id, 420 std::vector<uint8_t>* bytecodes) 421 REQUIRES_SHARED(Locks::mutator_lock_); 422 423 static std::string GetFieldName(JDWP::FieldId field_id) 424 REQUIRES_SHARED(Locks::mutator_lock_); 425 static JDWP::JdwpTag GetFieldBasicTag(JDWP::FieldId field_id) 426 REQUIRES_SHARED(Locks::mutator_lock_); 427 static JDWP::JdwpTag GetStaticFieldBasicTag(JDWP::FieldId field_id) 428 REQUIRES_SHARED(Locks::mutator_lock_); 429 static JDWP::JdwpError GetFieldValue(JDWP::ObjectId object_id, JDWP::FieldId field_id, 430 JDWP::ExpandBuf* pReply) 431 REQUIRES_SHARED(Locks::mutator_lock_); 432 static JDWP::JdwpError SetFieldValue(JDWP::ObjectId object_id, JDWP::FieldId field_id, 433 uint64_t value, int width) 434 REQUIRES_SHARED(Locks::mutator_lock_); 435 static JDWP::JdwpError GetStaticFieldValue(JDWP::RefTypeId ref_type_id, JDWP::FieldId field_id, 436 JDWP::ExpandBuf* pReply) 437 REQUIRES_SHARED(Locks::mutator_lock_); 438 static JDWP::JdwpError SetStaticFieldValue(JDWP::FieldId field_id, uint64_t value, int width) 439 REQUIRES_SHARED(Locks::mutator_lock_); 440 441 static JDWP::JdwpError StringToUtf8(JDWP::ObjectId string_id, std::string* str) 442 REQUIRES_SHARED(Locks::mutator_lock_); 443 static void OutputJValue(JDWP::JdwpTag tag, const JValue* return_value, JDWP::ExpandBuf* pReply) 444 REQUIRES_SHARED(Locks::mutator_lock_); 445 446 /* 447 * Thread, ThreadGroup, Frame 448 */ 449 static JDWP::JdwpError GetThreadName(JDWP::ObjectId thread_id, std::string* name) 450 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Locks::thread_list_lock_); 451 static JDWP::JdwpError GetThreadGroup(JDWP::ObjectId thread_id, JDWP::ExpandBuf* pReply) 452 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Locks::thread_list_lock_); 453 static JDWP::JdwpError GetThreadGroupName(JDWP::ObjectId thread_group_id, 454 JDWP::ExpandBuf* pReply) 455 REQUIRES_SHARED(Locks::mutator_lock_); 456 static JDWP::JdwpError GetThreadGroupParent(JDWP::ObjectId thread_group_id, 457 JDWP::ExpandBuf* pReply) 458 REQUIRES_SHARED(Locks::mutator_lock_); 459 static JDWP::JdwpError GetThreadGroupChildren(JDWP::ObjectId thread_group_id, 460 JDWP::ExpandBuf* pReply) 461 REQUIRES_SHARED(Locks::mutator_lock_); 462 static JDWP::ObjectId GetSystemThreadGroupId() 463 REQUIRES_SHARED(Locks::mutator_lock_); 464 465 static JDWP::JdwpThreadStatus ToJdwpThreadStatus(ThreadState state); 466 static JDWP::JdwpError GetThreadStatus(JDWP::ObjectId thread_id, 467 JDWP::JdwpThreadStatus* pThreadStatus, 468 JDWP::JdwpSuspendStatus* pSuspendStatus) 469 REQUIRES(!Locks::thread_list_lock_); 470 static JDWP::JdwpError GetThreadDebugSuspendCount(JDWP::ObjectId thread_id, 471 JDWP::ExpandBuf* pReply) 472 REQUIRES(!Locks::thread_list_lock_, !Locks::thread_suspend_count_lock_); 473 // static void WaitForSuspend(JDWP::ObjectId thread_id); 474 475 // Fills 'thread_ids' with the threads in the given thread group. If thread_group_id == 0, 476 // returns all threads. 477 static void GetThreads(mirror::Object* thread_group, std::vector<JDWP::ObjectId>* thread_ids) 478 REQUIRES(!Locks::thread_list_lock_) REQUIRES_SHARED(Locks::mutator_lock_); 479 480 static JDWP::JdwpError GetThreadFrameCount(JDWP::ObjectId thread_id, size_t* result) 481 REQUIRES(!Locks::thread_list_lock_); 482 static JDWP::JdwpError GetThreadFrames(JDWP::ObjectId thread_id, size_t start_frame, 483 size_t frame_count, JDWP::ExpandBuf* buf) 484 REQUIRES(!Locks::thread_list_lock_) REQUIRES_SHARED(Locks::mutator_lock_); 485 486 static JDWP::ObjectId GetThreadSelfId() REQUIRES_SHARED(Locks::mutator_lock_); 487 static JDWP::ObjectId GetThreadId(Thread* thread) REQUIRES_SHARED(Locks::mutator_lock_); 488 489 static void SuspendVM() 490 REQUIRES(!Locks::thread_list_lock_, !Locks::thread_suspend_count_lock_); 491 static void ResumeVM() 492 REQUIRES(!Locks::thread_list_lock_, !Locks::thread_suspend_count_lock_); 493 static JDWP::JdwpError SuspendThread(JDWP::ObjectId thread_id, bool request_suspension = true) 494 REQUIRES(!Locks::mutator_lock_, !Locks::thread_list_lock_, 495 !Locks::thread_suspend_count_lock_); 496 497 static void ResumeThread(JDWP::ObjectId thread_id) 498 REQUIRES(!Locks::thread_list_lock_, !Locks::thread_suspend_count_lock_) 499 REQUIRES_SHARED(Locks::mutator_lock_); 500 static void SuspendSelf(); 501 502 static JDWP::JdwpError GetThisObject(JDWP::ObjectId thread_id, JDWP::FrameId frame_id, 503 JDWP::ObjectId* result) 504 REQUIRES(!Locks::thread_list_lock_, !Locks::thread_suspend_count_lock_) 505 REQUIRES_SHARED(Locks::mutator_lock_); 506 static JDWP::JdwpError GetLocalValues(JDWP::Request* request, JDWP::ExpandBuf* pReply) 507 REQUIRES(!Locks::thread_list_lock_) REQUIRES_SHARED(Locks::mutator_lock_); 508 static JDWP::JdwpError SetLocalValues(JDWP::Request* request) 509 REQUIRES(!Locks::thread_list_lock_) REQUIRES_SHARED(Locks::mutator_lock_); 510 511 static JDWP::JdwpError Interrupt(JDWP::ObjectId thread_id) 512 REQUIRES(!Locks::thread_list_lock_); 513 514 /* 515 * Debugger notification 516 */ 517 enum EventFlag { 518 kBreakpoint = 0x01, 519 kSingleStep = 0x02, 520 kMethodEntry = 0x04, 521 kMethodExit = 0x08, 522 }; 523 static void PostFieldAccessEvent(ArtMethod* m, int dex_pc, mirror::Object* this_object, 524 ArtField* f) 525 REQUIRES_SHARED(Locks::mutator_lock_); 526 static void PostFieldModificationEvent(ArtMethod* m, int dex_pc, 527 mirror::Object* this_object, ArtField* f, 528 const JValue* field_value) 529 REQUIRES_SHARED(Locks::mutator_lock_); 530 static void PostException(mirror::Throwable* exception) 531 REQUIRES_SHARED(Locks::mutator_lock_); 532 533 static void UpdateDebugger(Thread* thread, mirror::Object* this_object, 534 ArtMethod* method, uint32_t new_dex_pc, 535 int event_flags, const JValue* return_value) 536 REQUIRES(!Locks::breakpoint_lock_) REQUIRES_SHARED(Locks::mutator_lock_); 537 538 // Indicates whether we need deoptimization for debugging. 539 static bool RequiresDeoptimization(); 540 541 // Records deoptimization request in the queue. 542 static void RequestDeoptimization(const DeoptimizationRequest& req) 543 REQUIRES(!Locks::deoptimization_lock_) REQUIRES_SHARED(Locks::mutator_lock_); 544 545 // Manage deoptimization after updating JDWP events list. Suspends all threads, processes each 546 // request and finally resumes all threads. 547 static void ManageDeoptimization() 548 REQUIRES(!Locks::deoptimization_lock_) REQUIRES_SHARED(Locks::mutator_lock_); 549 550 // Breakpoints. 551 static void WatchLocation(const JDWP::JdwpLocation* pLoc, DeoptimizationRequest* req) 552 REQUIRES(!Locks::breakpoint_lock_) REQUIRES_SHARED(Locks::mutator_lock_); 553 static void UnwatchLocation(const JDWP::JdwpLocation* pLoc, DeoptimizationRequest* req) 554 REQUIRES(!Locks::breakpoint_lock_) REQUIRES_SHARED(Locks::mutator_lock_); 555 556 /* 557 * Forced interpreter checkers for single-step and continue support. 558 */ 559 560 // Indicates whether we need to force the use of interpreter to invoke a method. 561 // This allows to single-step or continue into the called method. IsForcedInterpreterNeededForCalling(Thread * thread,ArtMethod * m)562 static bool IsForcedInterpreterNeededForCalling(Thread* thread, ArtMethod* m) 563 REQUIRES_SHARED(Locks::mutator_lock_) { 564 if (!IsDebuggerActive()) { 565 return false; 566 } 567 return IsForcedInterpreterNeededForCallingImpl(thread, m); 568 } 569 570 // Indicates whether we need to force the use of interpreter entrypoint when calling a 571 // method through the resolution trampoline. This allows to single-step or continue into 572 // the called method. IsForcedInterpreterNeededForResolution(Thread * thread,ArtMethod * m)573 static bool IsForcedInterpreterNeededForResolution(Thread* thread, ArtMethod* m) 574 REQUIRES_SHARED(Locks::mutator_lock_) { 575 if (!IsDebuggerActive()) { 576 return false; 577 } 578 return IsForcedInterpreterNeededForResolutionImpl(thread, m); 579 } 580 581 // Indicates whether we need to force the use of instrumentation entrypoint when calling 582 // a method through the resolution trampoline. This allows to deoptimize the stack for 583 // debugging when we returned from the called method. IsForcedInstrumentationNeededForResolution(Thread * thread,ArtMethod * m)584 static bool IsForcedInstrumentationNeededForResolution(Thread* thread, ArtMethod* m) 585 REQUIRES_SHARED(Locks::mutator_lock_) { 586 if (!IsDebuggerActive()) { 587 return false; 588 } 589 return IsForcedInstrumentationNeededForResolutionImpl(thread, m); 590 } 591 592 // Indicates whether we need to force the use of interpreter when returning from the 593 // interpreter into the runtime. This allows to deoptimize the stack and continue 594 // execution with interpreter for debugging. IsForcedInterpreterNeededForUpcall(Thread * thread,ArtMethod * m)595 static bool IsForcedInterpreterNeededForUpcall(Thread* thread, ArtMethod* m) 596 REQUIRES_SHARED(Locks::mutator_lock_) { 597 if (!IsDebuggerActive() && !thread->HasDebuggerShadowFrames()) { 598 return false; 599 } 600 return IsForcedInterpreterNeededForUpcallImpl(thread, m); 601 } 602 603 // Indicates whether we need to force the use of interpreter when handling an 604 // exception. This allows to deoptimize the stack and continue execution with 605 // the interpreter. 606 // Note: the interpreter will start by handling the exception when executing 607 // the deoptimized frames. IsForcedInterpreterNeededForException(Thread * thread)608 static bool IsForcedInterpreterNeededForException(Thread* thread) 609 REQUIRES_SHARED(Locks::mutator_lock_) { 610 if (!IsDebuggerActive() && !thread->HasDebuggerShadowFrames()) { 611 return false; 612 } 613 return IsForcedInterpreterNeededForExceptionImpl(thread); 614 } 615 616 // Single-stepping. 617 static JDWP::JdwpError ConfigureStep(JDWP::ObjectId thread_id, JDWP::JdwpStepSize size, 618 JDWP::JdwpStepDepth depth) 619 REQUIRES_SHARED(Locks::mutator_lock_); 620 static void UnconfigureStep(JDWP::ObjectId thread_id) 621 REQUIRES(!Locks::thread_list_lock_) REQUIRES_SHARED(Locks::mutator_lock_); 622 623 /* 624 * Invoke support 625 */ 626 627 // Called by the JDWP thread to prepare invocation in the event thread (suspended on an event). 628 // If the information sent by the debugger is incorrect, it will send a reply with the 629 // appropriate error code. Otherwise, it will attach a DebugInvokeReq object to the event thread 630 // and resume it (and possibly other threads depending on the invoke options). 631 // Unlike other commands, the JDWP thread will not send the reply to the debugger (see 632 // JdwpState::ProcessRequest). The reply will be sent by the event thread itself after method 633 // invocation completes (see FinishInvokeMethod). This is required to allow the JDWP thread to 634 // process incoming commands from the debugger while the invocation is still in progress in the 635 // event thread, especially if it gets suspended by a debug event occurring in another thread. 636 static JDWP::JdwpError PrepareInvokeMethod(uint32_t request_id, JDWP::ObjectId thread_id, 637 JDWP::ObjectId object_id, JDWP::RefTypeId class_id, 638 JDWP::MethodId method_id, uint32_t arg_count, 639 uint64_t arg_values[], JDWP::JdwpTag* arg_types, 640 uint32_t options) 641 REQUIRES(!Locks::thread_list_lock_, !Locks::thread_suspend_count_lock_) 642 REQUIRES_SHARED(Locks::mutator_lock_); 643 644 // Called by the event thread to execute a method prepared by the JDWP thread in the given 645 // DebugInvokeReq object. Once the invocation completes, the event thread attaches a reply 646 // to that DebugInvokeReq object so it can be sent to the debugger only when the event thread 647 // is ready to suspend (see FinishInvokeMethod). 648 static void ExecuteMethod(DebugInvokeReq* pReq); 649 650 // Called by the event thread to send the reply of the invoke (created in ExecuteMethod) 651 // before suspending itself. This is to ensure the thread is ready to suspend before the 652 // debugger receives the reply. 653 static void FinishInvokeMethod(DebugInvokeReq* pReq); 654 655 /* 656 * DDM support. 657 */ 658 static void DdmSendThreadNotification(Thread* t, uint32_t type) 659 REQUIRES_SHARED(Locks::mutator_lock_); 660 static void DdmSetThreadNotification(bool enable) 661 REQUIRES(!Locks::thread_list_lock_); 662 static bool DdmHandleChunk( 663 JNIEnv* env, 664 uint32_t type, 665 const ArrayRef<const jbyte>& data, 666 /*out*/uint32_t* out_type, 667 /*out*/std::vector<uint8_t>* out_data); 668 static bool DdmHandlePacket(JDWP::Request* request, uint8_t** pReplyBuf, int* pReplyLen); 669 static void DdmConnected() REQUIRES_SHARED(Locks::mutator_lock_); 670 static void DdmDisconnected() REQUIRES_SHARED(Locks::mutator_lock_); 671 672 // Visit breakpoint roots, used to prevent unloading of methods with breakpoints. 673 static void VisitRoots(RootVisitor* visitor) 674 REQUIRES_SHARED(Locks::mutator_lock_); 675 676 /* 677 * Allocation tracking support. 678 */ 679 static void SetAllocTrackingEnabled(bool enabled) REQUIRES(!Locks::alloc_tracker_lock_); 680 static jbyteArray GetRecentAllocations() 681 REQUIRES(!Locks::alloc_tracker_lock_) REQUIRES_SHARED(Locks::mutator_lock_); 682 static void DumpRecentAllocations() REQUIRES(!Locks::alloc_tracker_lock_); 683 684 enum HpifWhen { 685 HPIF_WHEN_NEVER = 0, 686 HPIF_WHEN_NOW = 1, 687 HPIF_WHEN_NEXT_GC = 2, 688 HPIF_WHEN_EVERY_GC = 3 689 }; 690 static int DdmHandleHpifChunk(HpifWhen when) 691 REQUIRES_SHARED(Locks::mutator_lock_); 692 693 enum HpsgWhen { 694 HPSG_WHEN_NEVER = 0, 695 HPSG_WHEN_EVERY_GC = 1, 696 }; 697 enum HpsgWhat { 698 HPSG_WHAT_MERGED_OBJECTS = 0, 699 HPSG_WHAT_DISTINCT_OBJECTS = 1, 700 }; 701 static bool DdmHandleHpsgNhsgChunk(HpsgWhen when, HpsgWhat what, bool native); 702 703 static void DdmSendHeapInfo(HpifWhen reason) 704 REQUIRES_SHARED(Locks::mutator_lock_); 705 static void DdmSendHeapSegments(bool native) 706 REQUIRES_SHARED(Locks::mutator_lock_); 707 GetObjectRegistry()708 static ObjectRegistry* GetObjectRegistry() { 709 return gRegistry; 710 } 711 712 static JDWP::JdwpTag TagFromObject(const ScopedObjectAccessUnchecked& soa, mirror::Object* o) 713 REQUIRES_SHARED(Locks::mutator_lock_); 714 715 static JDWP::JdwpTypeTag GetTypeTag(ObjPtr<mirror::Class> klass) 716 REQUIRES_SHARED(Locks::mutator_lock_); 717 718 static JDWP::FieldId ToFieldId(const ArtField* f) 719 REQUIRES_SHARED(Locks::mutator_lock_); 720 721 static void SetJdwpLocation(JDWP::JdwpLocation* location, ArtMethod* m, uint32_t dex_pc) 722 REQUIRES_SHARED(Locks::mutator_lock_) 723 REQUIRES(!Locks::thread_list_lock_, !Locks::thread_suspend_count_lock_); 724 725 static JDWP::JdwpState* GetJdwpState(); 726 GetInstrumentationEvents()727 static uint32_t GetInstrumentationEvents() REQUIRES_SHARED(Locks::mutator_lock_) { 728 return instrumentation_events_; 729 } 730 GetThreadLifecycleCallback()731 static ThreadLifecycleCallback* GetThreadLifecycleCallback() { 732 return &thread_lifecycle_callback_; 733 } GetClassLoadCallback()734 static ClassLoadCallback* GetClassLoadCallback() { 735 return &class_load_callback_; 736 } 737 738 private: 739 static void ExecuteMethodWithoutPendingException(ScopedObjectAccess& soa, DebugInvokeReq* pReq) 740 REQUIRES_SHARED(Locks::mutator_lock_); 741 742 static void BuildInvokeReply(JDWP::ExpandBuf* pReply, uint32_t request_id, 743 JDWP::JdwpTag result_tag, uint64_t result_value, 744 JDWP::ObjectId exception) 745 REQUIRES_SHARED(Locks::mutator_lock_); 746 747 static JDWP::JdwpError GetLocalValue(const StackVisitor& visitor, 748 ScopedObjectAccessUnchecked& soa, int slot, 749 JDWP::JdwpTag tag, uint8_t* buf, size_t width) 750 REQUIRES(!Locks::thread_list_lock_) REQUIRES_SHARED(Locks::mutator_lock_); 751 static JDWP::JdwpError SetLocalValue(Thread* thread, StackVisitor& visitor, int slot, 752 JDWP::JdwpTag tag, uint64_t value, size_t width) 753 REQUIRES(!Locks::thread_list_lock_) REQUIRES_SHARED(Locks::mutator_lock_); 754 755 static void DdmBroadcast(bool connect) REQUIRES_SHARED(Locks::mutator_lock_); 756 757 static void PostThreadStart(Thread* t) 758 REQUIRES_SHARED(Locks::mutator_lock_); 759 static void PostThreadDeath(Thread* t) 760 REQUIRES_SHARED(Locks::mutator_lock_); 761 static void PostThreadStartOrStop(Thread*, uint32_t) 762 REQUIRES_SHARED(Locks::mutator_lock_); 763 764 static void PostClassPrepare(mirror::Class* c) 765 REQUIRES_SHARED(Locks::mutator_lock_); 766 767 static void PostLocationEvent(ArtMethod* method, int pcOffset, 768 mirror::Object* thisPtr, int eventFlags, 769 const JValue* return_value) 770 REQUIRES_SHARED(Locks::mutator_lock_); 771 772 static void ProcessDeoptimizationRequest(const DeoptimizationRequest& request) 773 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_); 774 775 static void RequestDeoptimizationLocked(const DeoptimizationRequest& req) 776 REQUIRES(Locks::deoptimization_lock_) REQUIRES_SHARED(Locks::mutator_lock_); 777 778 static bool IsForcedInterpreterNeededForCallingImpl(Thread* thread, ArtMethod* m) 779 REQUIRES_SHARED(Locks::mutator_lock_); 780 781 static bool IsForcedInterpreterNeededForResolutionImpl(Thread* thread, ArtMethod* m) 782 REQUIRES_SHARED(Locks::mutator_lock_); 783 784 static bool IsForcedInstrumentationNeededForResolutionImpl(Thread* thread, ArtMethod* m) 785 REQUIRES_SHARED(Locks::mutator_lock_); 786 787 static bool IsForcedInterpreterNeededForUpcallImpl(Thread* thread, ArtMethod* m) 788 REQUIRES_SHARED(Locks::mutator_lock_); 789 790 static bool IsForcedInterpreterNeededForExceptionImpl(Thread* thread) 791 REQUIRES_SHARED(Locks::mutator_lock_); 792 793 // Indicates whether the debugger is making requests. 794 static bool gDebuggerActive; 795 796 static DebuggerActiveMethodInspectionCallback gDebugActiveCallback; 797 static DebuggerDdmCallback gDebugDdmCallback; 798 static InternalDebuggerControlCallback gDebuggerControlCallback; 799 800 // Indicates whether we should drop the JDWP connection because the runtime stops or the 801 // debugger called VirtualMachine.Dispose. 802 static bool gDisposed; 803 804 // The registry mapping objects to JDWP ids. 805 static ObjectRegistry* gRegistry; 806 807 // Deoptimization requests to be processed each time the event list is updated. This is used when 808 // registering and unregistering events so we do not deoptimize while holding the event list 809 // lock. 810 // TODO rename to instrumentation_requests. 811 static std::vector<DeoptimizationRequest> deoptimization_requests_ GUARDED_BY(Locks::deoptimization_lock_); 812 813 // Count the number of events requiring full deoptimization. When the counter is > 0, everything 814 // is deoptimized, otherwise everything is undeoptimized. 815 // Note: we fully deoptimize on the first event only (when the counter is set to 1). We fully 816 // undeoptimize when the last event is unregistered (when the counter is set to 0). 817 static size_t full_deoptimization_event_count_ GUARDED_BY(Locks::deoptimization_lock_); 818 819 static size_t* GetReferenceCounterForEvent(uint32_t instrumentation_event); 820 821 // Instrumentation event reference counters. 822 // TODO we could use an array instead of having all these dedicated counters. Instrumentation 823 // events are bits of a mask so we could convert them to array index. 824 static size_t dex_pc_change_event_ref_count_ GUARDED_BY(Locks::deoptimization_lock_); 825 static size_t method_enter_event_ref_count_ GUARDED_BY(Locks::deoptimization_lock_); 826 static size_t method_exit_event_ref_count_ GUARDED_BY(Locks::deoptimization_lock_); 827 static size_t field_read_event_ref_count_ GUARDED_BY(Locks::deoptimization_lock_); 828 static size_t field_write_event_ref_count_ GUARDED_BY(Locks::deoptimization_lock_); 829 static size_t exception_catch_event_ref_count_ GUARDED_BY(Locks::deoptimization_lock_); 830 static uint32_t instrumentation_events_ GUARDED_BY(Locks::mutator_lock_); 831 832 class DbgThreadLifecycleCallback : public ThreadLifecycleCallback { 833 public: 834 void ThreadStart(Thread* self) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_); 835 void ThreadDeath(Thread* self) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_); 836 }; 837 838 class DbgClassLoadCallback : public ClassLoadCallback { 839 public: 840 void ClassLoad(Handle<mirror::Class> klass) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_); 841 void ClassPrepare(Handle<mirror::Class> temp_klass, 842 Handle<mirror::Class> klass) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_); 843 }; 844 845 static DbgThreadLifecycleCallback thread_lifecycle_callback_; 846 static DbgClassLoadCallback class_load_callback_; 847 848 DISALLOW_COPY_AND_ASSIGN(Dbg); 849 }; 850 851 #define CHUNK_TYPE(_name) \ 852 static_cast<uint32_t>((_name)[0] << 24 | (_name)[1] << 16 | (_name)[2] << 8 | (_name)[3]) 853 854 } // namespace art 855 856 #endif // ART_RUNTIME_DEBUGGER_H_ 857