instrumentation.cc revision e246f33605a2d6b96f2c7221fa0165dc067c3079
1/* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "instrumentation.h" 18 19#include <sys/uio.h> 20 21#include "arch/context.h" 22#include "atomic.h" 23#include "base/unix_file/fd_file.h" 24#include "class_linker.h" 25#include "debugger.h" 26#include "dex_file-inl.h" 27#include "entrypoints/quick/quick_alloc_entrypoints.h" 28#include "gc_root-inl.h" 29#include "interpreter/interpreter.h" 30#include "mirror/art_method-inl.h" 31#include "mirror/class-inl.h" 32#include "mirror/dex_cache.h" 33#include "mirror/object_array-inl.h" 34#include "mirror/object-inl.h" 35#include "nth_caller_visitor.h" 36#if !defined(ART_USE_PORTABLE_COMPILER) 37#include "entrypoints/quick/quick_entrypoints.h" 38#endif 39#include "os.h" 40#include "scoped_thread_state_change.h" 41#include "thread.h" 42#include "thread_list.h" 43 44namespace art { 45 46namespace instrumentation { 47 48const bool kVerboseInstrumentation = false; 49 50// Do we want to deoptimize for method entry and exit listeners or just try to intercept 51// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the 52// application's performance. 53static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = true; 54 55static bool InstallStubsClassVisitor(mirror::Class* klass, void* arg) 56 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 57 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg); 58 return instrumentation->InstallStubsForClass(klass); 59} 60 61Instrumentation::Instrumentation() 62 : instrumentation_stubs_installed_(false), entry_exit_stubs_installed_(false), 63 interpreter_stubs_installed_(false), 64 interpret_only_(false), forced_interpret_only_(false), 65 have_method_entry_listeners_(false), have_method_exit_listeners_(false), 66 have_method_unwind_listeners_(false), have_dex_pc_listeners_(false), 67 have_field_read_listeners_(false), have_field_write_listeners_(false), 68 have_exception_caught_listeners_(false), 69 deoptimized_methods_lock_("deoptimized methods lock"), 70 deoptimization_enabled_(false), 71 interpreter_handler_table_(kMainHandlerTable), 72 quick_alloc_entry_points_instrumentation_counter_(0) { 73} 74 75bool Instrumentation::InstallStubsForClass(mirror::Class* klass) { 76 for (size_t i = 0, e = klass->NumDirectMethods(); i < e; i++) { 77 InstallStubsForMethod(klass->GetDirectMethod(i)); 78 } 79 for (size_t i = 0, e = klass->NumVirtualMethods(); i < e; i++) { 80 InstallStubsForMethod(klass->GetVirtualMethod(i)); 81 } 82 return true; 83} 84 85static void UpdateEntrypoints(mirror::ArtMethod* method, const void* quick_code, 86 const void* portable_code, bool have_portable_code) 87 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 88 method->SetEntryPointFromPortableCompiledCode(portable_code); 89 method->SetEntryPointFromQuickCompiledCode(quick_code); 90 bool portable_enabled = method->IsPortableCompiled(); 91 if (have_portable_code && !portable_enabled) { 92 method->SetIsPortableCompiled(); 93 } else if (portable_enabled) { 94 method->ClearIsPortableCompiled(); 95 } 96 if (!method->IsResolutionMethod()) { 97 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 98 if (quick_code == GetQuickToInterpreterBridge() || 99 quick_code == class_linker->GetQuickToInterpreterBridgeTrampoline() || 100 (quick_code == class_linker->GetQuickResolutionTrampoline() && 101 Runtime::Current()->GetInstrumentation()->IsForcedInterpretOnly() 102 && !method->IsNative() && !method->IsProxyMethod())) { 103 if (kIsDebugBuild) { 104 if (quick_code == GetQuickToInterpreterBridge()) { 105 DCHECK(portable_code == GetPortableToInterpreterBridge()); 106 } else if (quick_code == class_linker->GetQuickResolutionTrampoline()) { 107 DCHECK(portable_code == class_linker->GetPortableResolutionTrampoline()); 108 } 109 } 110 DCHECK(!method->IsNative()) << PrettyMethod(method); 111 DCHECK(!method->IsProxyMethod()) << PrettyMethod(method); 112 method->SetEntryPointFromInterpreter(art::interpreter::artInterpreterToInterpreterBridge); 113 } else { 114 method->SetEntryPointFromInterpreter(art::artInterpreterToCompiledCodeBridge); 115 } 116 } 117} 118 119void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) { 120 if (method->IsAbstract() || method->IsProxyMethod()) { 121 // Do not change stubs for these methods. 122 return; 123 } 124 // Don't stub Proxy.<init>. Note that the Proxy class itself is not a proxy class. 125 if (method->IsConstructor() && 126 method->GetDeclaringClass()->DescriptorEquals("Ljava/lang/reflect/Proxy;")) { 127 return; 128 } 129 const void* new_portable_code; 130 const void* new_quick_code; 131 bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_; 132 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 133 bool is_class_initialized = method->GetDeclaringClass()->IsInitialized(); 134 bool have_portable_code = false; 135 if (uninstall) { 136 if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) { 137 new_portable_code = GetPortableToInterpreterBridge(); 138 new_quick_code = GetQuickToInterpreterBridge(); 139 } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) { 140 new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code); 141 new_quick_code = class_linker->GetQuickOatCodeFor(method); 142 } else { 143 new_portable_code = class_linker->GetPortableResolutionTrampoline(); 144 new_quick_code = class_linker->GetQuickResolutionTrampoline(); 145 } 146 } else { // !uninstall 147 if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) && 148 !method->IsNative()) { 149 new_portable_code = GetPortableToInterpreterBridge(); 150 new_quick_code = GetQuickToInterpreterBridge(); 151 } else { 152 // Do not overwrite resolution trampoline. When the trampoline initializes the method's 153 // class, all its static methods code will be set to the instrumentation entry point. 154 // For more details, see ClassLinker::FixupStaticTrampolines. 155 if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) { 156 if (entry_exit_stubs_installed_) { 157 new_portable_code = GetPortableToInterpreterBridge(); 158 new_quick_code = GetQuickInstrumentationEntryPoint(); 159 } else { 160 new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code); 161 new_quick_code = class_linker->GetQuickOatCodeFor(method); 162 DCHECK(new_quick_code != class_linker->GetQuickToInterpreterBridgeTrampoline()); 163 } 164 } else { 165 new_portable_code = class_linker->GetPortableResolutionTrampoline(); 166 new_quick_code = class_linker->GetQuickResolutionTrampoline(); 167 } 168 } 169 } 170 UpdateEntrypoints(method, new_quick_code, new_portable_code, have_portable_code); 171} 172 173// Places the instrumentation exit pc as the return PC for every quick frame. This also allows 174// deoptimization of quick frames to interpreter frames. 175// Since we may already have done this previously, we need to push new instrumentation frame before 176// existing instrumentation frames. 177static void InstrumentationInstallStack(Thread* thread, void* arg) 178 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 179 struct InstallStackVisitor : public StackVisitor { 180 InstallStackVisitor(Thread* thread, Context* context, uintptr_t instrumentation_exit_pc) 181 : StackVisitor(thread, context), instrumentation_stack_(thread->GetInstrumentationStack()), 182 instrumentation_exit_pc_(instrumentation_exit_pc), 183 reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0), 184 last_return_pc_(0) { 185 } 186 187 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 188 mirror::ArtMethod* m = GetMethod(); 189 if (m == NULL) { 190 if (kVerboseInstrumentation) { 191 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId(); 192 } 193 last_return_pc_ = 0; 194 return true; // Ignore upcalls. 195 } 196 if (GetCurrentQuickFrame() == NULL) { 197 bool interpreter_frame = !m->IsPortableCompiled(); 198 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, 0, GetFrameId(), 199 interpreter_frame); 200 if (kVerboseInstrumentation) { 201 LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump(); 202 } 203 shadow_stack_.push_back(instrumentation_frame); 204 return true; // Continue. 205 } 206 uintptr_t return_pc = GetReturnPc(); 207 if (m->IsRuntimeMethod()) { 208 if (return_pc == instrumentation_exit_pc_) { 209 if (kVerboseInstrumentation) { 210 LOG(INFO) << " Handling quick to interpreter transition. Frame " << GetFrameId(); 211 } 212 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size()); 213 const InstrumentationStackFrame& frame = 214 instrumentation_stack_->at(instrumentation_stack_depth_); 215 CHECK(frame.interpreter_entry_); 216 // This is an interpreter frame so method enter event must have been reported. However we 217 // need to push a DEX pc into the dex_pcs_ list to match size of instrumentation stack. 218 // Since we won't report method entry here, we can safely push any DEX pc. 219 dex_pcs_.push_back(0); 220 last_return_pc_ = frame.return_pc_; 221 ++instrumentation_stack_depth_; 222 return true; 223 } else { 224 if (kVerboseInstrumentation) { 225 LOG(INFO) << " Skipping runtime method. Frame " << GetFrameId(); 226 } 227 last_return_pc_ = GetReturnPc(); 228 return true; // Ignore unresolved methods since they will be instrumented after resolution. 229 } 230 } 231 if (kVerboseInstrumentation) { 232 LOG(INFO) << " Installing exit stub in " << DescribeLocation(); 233 } 234 if (return_pc == instrumentation_exit_pc_) { 235 // We've reached a frame which has already been installed with instrumentation exit stub. 236 // We should have already installed instrumentation on previous frames. 237 reached_existing_instrumentation_frames_ = true; 238 239 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size()); 240 const InstrumentationStackFrame& frame = 241 instrumentation_stack_->at(instrumentation_stack_depth_); 242 CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m) 243 << ", Found " << PrettyMethod(frame.method_); 244 return_pc = frame.return_pc_; 245 if (kVerboseInstrumentation) { 246 LOG(INFO) << "Ignoring already instrumented " << frame.Dump(); 247 } 248 } else { 249 CHECK_NE(return_pc, 0U); 250 CHECK(!reached_existing_instrumentation_frames_); 251 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(), 252 false); 253 if (kVerboseInstrumentation) { 254 LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump(); 255 } 256 257 // Insert frame at the right position so we do not corrupt the instrumentation stack. 258 // Instrumentation stack frames are in descending frame id order. 259 auto it = instrumentation_stack_->begin(); 260 for (auto end = instrumentation_stack_->end(); it != end; ++it) { 261 const InstrumentationStackFrame& current = *it; 262 if (instrumentation_frame.frame_id_ >= current.frame_id_) { 263 break; 264 } 265 } 266 instrumentation_stack_->insert(it, instrumentation_frame); 267 SetReturnPc(instrumentation_exit_pc_); 268 } 269 dex_pcs_.push_back(m->ToDexPc(last_return_pc_)); 270 last_return_pc_ = return_pc; 271 ++instrumentation_stack_depth_; 272 return true; // Continue. 273 } 274 std::deque<InstrumentationStackFrame>* const instrumentation_stack_; 275 std::vector<InstrumentationStackFrame> shadow_stack_; 276 std::vector<uint32_t> dex_pcs_; 277 const uintptr_t instrumentation_exit_pc_; 278 bool reached_existing_instrumentation_frames_; 279 size_t instrumentation_stack_depth_; 280 uintptr_t last_return_pc_; 281 }; 282 if (kVerboseInstrumentation) { 283 std::string thread_name; 284 thread->GetThreadName(thread_name); 285 LOG(INFO) << "Installing exit stubs in " << thread_name; 286 } 287 288 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg); 289 std::unique_ptr<Context> context(Context::Create()); 290 uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc(); 291 InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc); 292 visitor.WalkStack(true); 293 CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size()); 294 295 if (instrumentation->ShouldNotifyMethodEnterExitEvents()) { 296 // Create method enter events for all methods currently on the thread's stack. We only do this 297 // if no debugger is attached to prevent from posting events twice. 298 auto ssi = visitor.shadow_stack_.rbegin(); 299 for (auto isi = thread->GetInstrumentationStack()->rbegin(), 300 end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) { 301 while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < (*isi).frame_id_) { 302 instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0); 303 ++ssi; 304 } 305 uint32_t dex_pc = visitor.dex_pcs_.back(); 306 visitor.dex_pcs_.pop_back(); 307 if (!isi->interpreter_entry_) { 308 instrumentation->MethodEnterEvent(thread, (*isi).this_object_, (*isi).method_, dex_pc); 309 } 310 } 311 } 312 thread->VerifyStack(); 313} 314 315// Removes the instrumentation exit pc as the return PC for every quick frame. 316static void InstrumentationRestoreStack(Thread* thread, void* arg) 317 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 318 struct RestoreStackVisitor : public StackVisitor { 319 RestoreStackVisitor(Thread* thread, uintptr_t instrumentation_exit_pc, 320 Instrumentation* instrumentation) 321 : StackVisitor(thread, NULL), thread_(thread), 322 instrumentation_exit_pc_(instrumentation_exit_pc), 323 instrumentation_(instrumentation), 324 instrumentation_stack_(thread->GetInstrumentationStack()), 325 frames_removed_(0) {} 326 327 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 328 if (instrumentation_stack_->size() == 0) { 329 return false; // Stop. 330 } 331 mirror::ArtMethod* m = GetMethod(); 332 if (GetCurrentQuickFrame() == NULL) { 333 if (kVerboseInstrumentation) { 334 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId() 335 << " Method=" << PrettyMethod(m); 336 } 337 return true; // Ignore shadow frames. 338 } 339 if (m == NULL) { 340 if (kVerboseInstrumentation) { 341 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId(); 342 } 343 return true; // Ignore upcalls. 344 } 345 bool removed_stub = false; 346 // TODO: make this search more efficient? 347 const size_t frameId = GetFrameId(); 348 for (const InstrumentationStackFrame& instrumentation_frame : *instrumentation_stack_) { 349 if (instrumentation_frame.frame_id_ == frameId) { 350 if (kVerboseInstrumentation) { 351 LOG(INFO) << " Removing exit stub in " << DescribeLocation(); 352 } 353 if (instrumentation_frame.interpreter_entry_) { 354 CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)); 355 } else { 356 CHECK(m == instrumentation_frame.method_) << PrettyMethod(m); 357 } 358 SetReturnPc(instrumentation_frame.return_pc_); 359 if (instrumentation_->ShouldNotifyMethodEnterExitEvents()) { 360 // Create the method exit events. As the methods didn't really exit the result is 0. 361 // We only do this if no debugger is attached to prevent from posting events twice. 362 instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m, 363 GetDexPc(), JValue()); 364 } 365 frames_removed_++; 366 removed_stub = true; 367 break; 368 } 369 } 370 if (!removed_stub) { 371 if (kVerboseInstrumentation) { 372 LOG(INFO) << " No exit stub in " << DescribeLocation(); 373 } 374 } 375 return true; // Continue. 376 } 377 Thread* const thread_; 378 const uintptr_t instrumentation_exit_pc_; 379 Instrumentation* const instrumentation_; 380 std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_; 381 size_t frames_removed_; 382 }; 383 if (kVerboseInstrumentation) { 384 std::string thread_name; 385 thread->GetThreadName(thread_name); 386 LOG(INFO) << "Removing exit stubs in " << thread_name; 387 } 388 std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack(); 389 if (stack->size() > 0) { 390 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg); 391 uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc(); 392 RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation); 393 visitor.WalkStack(true); 394 CHECK_EQ(visitor.frames_removed_, stack->size()); 395 while (stack->size() > 0) { 396 stack->pop_front(); 397 } 398 } 399} 400 401void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) { 402 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current()); 403 if ((events & kMethodEntered) != 0) { 404 method_entry_listeners_.push_back(listener); 405 have_method_entry_listeners_ = true; 406 } 407 if ((events & kMethodExited) != 0) { 408 method_exit_listeners_.push_back(listener); 409 have_method_exit_listeners_ = true; 410 } 411 if ((events & kMethodUnwind) != 0) { 412 method_unwind_listeners_.push_back(listener); 413 have_method_unwind_listeners_ = true; 414 } 415 if ((events & kDexPcMoved) != 0) { 416 std::list<InstrumentationListener*>* modified; 417 if (have_dex_pc_listeners_) { 418 modified = new std::list<InstrumentationListener*>(*dex_pc_listeners_.get()); 419 } else { 420 modified = new std::list<InstrumentationListener*>(); 421 } 422 modified->push_back(listener); 423 dex_pc_listeners_.reset(modified); 424 have_dex_pc_listeners_ = true; 425 } 426 if ((events & kFieldRead) != 0) { 427 std::list<InstrumentationListener*>* modified; 428 if (have_field_read_listeners_) { 429 modified = new std::list<InstrumentationListener*>(*field_read_listeners_.get()); 430 } else { 431 modified = new std::list<InstrumentationListener*>(); 432 } 433 modified->push_back(listener); 434 field_read_listeners_.reset(modified); 435 have_field_read_listeners_ = true; 436 } 437 if ((events & kFieldWritten) != 0) { 438 std::list<InstrumentationListener*>* modified; 439 if (have_field_write_listeners_) { 440 modified = new std::list<InstrumentationListener*>(*field_write_listeners_.get()); 441 } else { 442 modified = new std::list<InstrumentationListener*>(); 443 } 444 modified->push_back(listener); 445 field_write_listeners_.reset(modified); 446 have_field_write_listeners_ = true; 447 } 448 if ((events & kExceptionCaught) != 0) { 449 std::list<InstrumentationListener*>* modified; 450 if (have_exception_caught_listeners_) { 451 modified = new std::list<InstrumentationListener*>(*exception_caught_listeners_.get()); 452 } else { 453 modified = new std::list<InstrumentationListener*>(); 454 } 455 modified->push_back(listener); 456 exception_caught_listeners_.reset(modified); 457 have_exception_caught_listeners_ = true; 458 } 459 UpdateInterpreterHandlerTable(); 460} 461 462void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) { 463 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current()); 464 465 if ((events & kMethodEntered) != 0) { 466 if (have_method_entry_listeners_) { 467 method_entry_listeners_.remove(listener); 468 have_method_entry_listeners_ = !method_entry_listeners_.empty(); 469 } 470 } 471 if ((events & kMethodExited) != 0) { 472 if (have_method_exit_listeners_) { 473 method_exit_listeners_.remove(listener); 474 have_method_exit_listeners_ = !method_exit_listeners_.empty(); 475 } 476 } 477 if ((events & kMethodUnwind) != 0) { 478 if (have_method_unwind_listeners_) { 479 method_unwind_listeners_.remove(listener); 480 have_method_unwind_listeners_ = !method_unwind_listeners_.empty(); 481 } 482 } 483 if ((events & kDexPcMoved) != 0) { 484 if (have_dex_pc_listeners_) { 485 std::list<InstrumentationListener*>* modified = 486 new std::list<InstrumentationListener*>(*dex_pc_listeners_.get()); 487 modified->remove(listener); 488 have_dex_pc_listeners_ = !modified->empty(); 489 if (have_dex_pc_listeners_) { 490 dex_pc_listeners_.reset(modified); 491 } else { 492 dex_pc_listeners_.reset(); 493 delete modified; 494 } 495 } 496 } 497 if ((events & kFieldRead) != 0) { 498 if (have_field_read_listeners_) { 499 std::list<InstrumentationListener*>* modified = 500 new std::list<InstrumentationListener*>(*field_read_listeners_.get()); 501 modified->remove(listener); 502 have_field_read_listeners_ = !modified->empty(); 503 if (have_field_read_listeners_) { 504 field_read_listeners_.reset(modified); 505 } else { 506 field_read_listeners_.reset(); 507 delete modified; 508 } 509 } 510 } 511 if ((events & kFieldWritten) != 0) { 512 if (have_field_write_listeners_) { 513 std::list<InstrumentationListener*>* modified = 514 new std::list<InstrumentationListener*>(*field_write_listeners_.get()); 515 modified->remove(listener); 516 have_field_write_listeners_ = !modified->empty(); 517 if (have_field_write_listeners_) { 518 field_write_listeners_.reset(modified); 519 } else { 520 field_write_listeners_.reset(); 521 delete modified; 522 } 523 } 524 } 525 if ((events & kExceptionCaught) != 0) { 526 if (have_exception_caught_listeners_) { 527 std::list<InstrumentationListener*>* modified = 528 new std::list<InstrumentationListener*>(*exception_caught_listeners_.get()); 529 modified->remove(listener); 530 have_exception_caught_listeners_ = !modified->empty(); 531 if (have_exception_caught_listeners_) { 532 exception_caught_listeners_.reset(modified); 533 } else { 534 exception_caught_listeners_.reset(); 535 delete modified; 536 } 537 } 538 } 539 UpdateInterpreterHandlerTable(); 540} 541 542void Instrumentation::ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) { 543 interpret_only_ = require_interpreter || forced_interpret_only_; 544 // Compute what level of instrumentation is required and compare to current. 545 int desired_level, current_level; 546 if (require_interpreter) { 547 desired_level = 2; 548 } else if (require_entry_exit_stubs) { 549 desired_level = 1; 550 } else { 551 desired_level = 0; 552 } 553 if (interpreter_stubs_installed_) { 554 current_level = 2; 555 } else if (entry_exit_stubs_installed_) { 556 current_level = 1; 557 } else { 558 current_level = 0; 559 } 560 if (desired_level == current_level) { 561 // We're already set. 562 return; 563 } 564 Thread* const self = Thread::Current(); 565 Runtime* runtime = Runtime::Current(); 566 Locks::thread_list_lock_->AssertNotHeld(self); 567 if (desired_level > 0) { 568 if (require_interpreter) { 569 interpreter_stubs_installed_ = true; 570 } else { 571 CHECK(require_entry_exit_stubs); 572 entry_exit_stubs_installed_ = true; 573 } 574 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this); 575 instrumentation_stubs_installed_ = true; 576 MutexLock mu(self, *Locks::thread_list_lock_); 577 runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this); 578 } else { 579 interpreter_stubs_installed_ = false; 580 entry_exit_stubs_installed_ = false; 581 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this); 582 // Restore stack only if there is no method currently deoptimized. 583 bool empty; 584 { 585 ReaderMutexLock mu(self, deoptimized_methods_lock_); 586 empty = IsDeoptimizedMethodsEmpty(); // Avoid lock violation. 587 } 588 if (empty) { 589 instrumentation_stubs_installed_ = false; 590 MutexLock mu(self, *Locks::thread_list_lock_); 591 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this); 592 } 593 } 594} 595 596static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg) { 597 thread->ResetQuickAllocEntryPointsForThread(); 598} 599 600void Instrumentation::SetEntrypointsInstrumented(bool instrumented) { 601 Runtime* runtime = Runtime::Current(); 602 ThreadList* tl = runtime->GetThreadList(); 603 if (runtime->IsStarted()) { 604 tl->SuspendAll(); 605 } 606 { 607 MutexLock mu(Thread::Current(), *Locks::runtime_shutdown_lock_); 608 SetQuickAllocEntryPointsInstrumented(instrumented); 609 ResetQuickAllocEntryPoints(); 610 } 611 if (runtime->IsStarted()) { 612 tl->ResumeAll(); 613 } 614} 615 616void Instrumentation::InstrumentQuickAllocEntryPoints() { 617 // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code 618 // should be guarded by a lock. 619 DCHECK_GE(quick_alloc_entry_points_instrumentation_counter_.LoadSequentiallyConsistent(), 0); 620 const bool enable_instrumentation = 621 quick_alloc_entry_points_instrumentation_counter_.FetchAndAddSequentiallyConsistent(1) == 0; 622 if (enable_instrumentation) { 623 SetEntrypointsInstrumented(true); 624 } 625} 626 627void Instrumentation::UninstrumentQuickAllocEntryPoints() { 628 // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code 629 // should be guarded by a lock. 630 DCHECK_GT(quick_alloc_entry_points_instrumentation_counter_.LoadSequentiallyConsistent(), 0); 631 const bool disable_instrumentation = 632 quick_alloc_entry_points_instrumentation_counter_.FetchAndSubSequentiallyConsistent(1) == 1; 633 if (disable_instrumentation) { 634 SetEntrypointsInstrumented(false); 635 } 636} 637 638void Instrumentation::ResetQuickAllocEntryPoints() { 639 Runtime* runtime = Runtime::Current(); 640 if (runtime->IsStarted()) { 641 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_); 642 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, NULL); 643 } 644} 645 646void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code, 647 const void* portable_code, bool have_portable_code) { 648 const void* new_portable_code; 649 const void* new_quick_code; 650 bool new_have_portable_code; 651 if (LIKELY(!instrumentation_stubs_installed_)) { 652 new_portable_code = portable_code; 653 new_quick_code = quick_code; 654 new_have_portable_code = have_portable_code; 655 } else { 656 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) { 657 new_portable_code = GetPortableToInterpreterBridge(); 658 new_quick_code = GetQuickToInterpreterBridge(); 659 new_have_portable_code = false; 660 } else { 661 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 662 if (quick_code == class_linker->GetQuickResolutionTrampoline() || 663 quick_code == class_linker->GetQuickToInterpreterBridgeTrampoline() || 664 quick_code == GetQuickToInterpreterBridge()) { 665 DCHECK((portable_code == class_linker->GetPortableResolutionTrampoline()) || 666 (portable_code == GetPortableToInterpreterBridge())); 667 new_portable_code = portable_code; 668 new_quick_code = quick_code; 669 new_have_portable_code = have_portable_code; 670 } else if (entry_exit_stubs_installed_) { 671 new_quick_code = GetQuickInstrumentationEntryPoint(); 672 new_portable_code = GetPortableToInterpreterBridge(); 673 new_have_portable_code = false; 674 } else { 675 new_portable_code = portable_code; 676 new_quick_code = quick_code; 677 new_have_portable_code = have_portable_code; 678 } 679 } 680 } 681 UpdateEntrypoints(method, new_quick_code, new_portable_code, new_have_portable_code); 682} 683 684bool Instrumentation::AddDeoptimizedMethod(mirror::ArtMethod* method) { 685 // Note that the insert() below isn't read barrier-aware. So, this 686 // FindDeoptimizedMethod() call is necessary or else we would end up 687 // storing the same method twice in the map (the from-space and the 688 // to-space ones). 689 if (FindDeoptimizedMethod(method)) { 690 // Already in the map. Return. 691 return false; 692 } 693 // Not found. Add it. 694 int32_t hash_code = method->IdentityHashCode(); 695 deoptimized_methods_.insert(std::make_pair(hash_code, GcRoot<mirror::ArtMethod>(method))); 696 return true; 697} 698 699bool Instrumentation::FindDeoptimizedMethod(mirror::ArtMethod* method) { 700 int32_t hash_code = method->IdentityHashCode(); 701 auto range = deoptimized_methods_.equal_range(hash_code); 702 for (auto it = range.first; it != range.second; ++it) { 703 mirror::ArtMethod* m = it->second.Read(); 704 if (m == method) { 705 // Found. 706 return true; 707 } 708 } 709 // Not found. 710 return false; 711} 712 713mirror::ArtMethod* Instrumentation::BeginDeoptimizedMethod() { 714 auto it = deoptimized_methods_.begin(); 715 if (it == deoptimized_methods_.end()) { 716 // Empty. 717 return nullptr; 718 } 719 return it->second.Read(); 720} 721 722bool Instrumentation::RemoveDeoptimizedMethod(mirror::ArtMethod* method) { 723 int32_t hash_code = method->IdentityHashCode(); 724 auto range = deoptimized_methods_.equal_range(hash_code); 725 for (auto it = range.first; it != range.second; ++it) { 726 mirror::ArtMethod* m = it->second.Read(); 727 if (m == method) { 728 // Found. Erase and return. 729 deoptimized_methods_.erase(it); 730 return true; 731 } 732 } 733 // Not found. 734 return false; 735} 736 737bool Instrumentation::IsDeoptimizedMethodsEmpty() const { 738 return deoptimized_methods_.empty(); 739} 740 741void Instrumentation::Deoptimize(mirror::ArtMethod* method) { 742 CHECK(!method->IsNative()); 743 CHECK(!method->IsProxyMethod()); 744 CHECK(!method->IsAbstract()); 745 746 Thread* self = Thread::Current(); 747 { 748 WriterMutexLock mu(self, deoptimized_methods_lock_); 749 bool has_not_been_deoptimized = AddDeoptimizedMethod(method); 750 CHECK(has_not_been_deoptimized) << "Method " << PrettyMethod(method) 751 << " is already deoptimized"; 752 } 753 if (!interpreter_stubs_installed_) { 754 UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint(), GetPortableToInterpreterBridge(), 755 false); 756 757 // Install instrumentation exit stub and instrumentation frames. We may already have installed 758 // these previously so it will only cover the newly created frames. 759 instrumentation_stubs_installed_ = true; 760 MutexLock mu(self, *Locks::thread_list_lock_); 761 Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this); 762 } 763} 764 765void Instrumentation::Undeoptimize(mirror::ArtMethod* method) { 766 CHECK(!method->IsNative()); 767 CHECK(!method->IsProxyMethod()); 768 CHECK(!method->IsAbstract()); 769 770 Thread* self = Thread::Current(); 771 bool empty; 772 { 773 WriterMutexLock mu(self, deoptimized_methods_lock_); 774 bool found_and_erased = RemoveDeoptimizedMethod(method); 775 CHECK(found_and_erased) << "Method " << PrettyMethod(method) 776 << " is not deoptimized"; 777 empty = IsDeoptimizedMethodsEmpty(); 778 } 779 780 // Restore code and possibly stack only if we did not deoptimize everything. 781 if (!interpreter_stubs_installed_) { 782 // Restore its code or resolution trampoline. 783 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 784 if (method->IsStatic() && !method->IsConstructor() && 785 !method->GetDeclaringClass()->IsInitialized()) { 786 // TODO: we're updating to entrypoints in the image here, we can avoid the trampoline. 787 UpdateEntrypoints(method, class_linker->GetQuickResolutionTrampoline(), 788 class_linker->GetPortableResolutionTrampoline(), false); 789 } else { 790 bool have_portable_code = false; 791 const void* quick_code = class_linker->GetQuickOatCodeFor(method); 792 const void* portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code); 793 UpdateEntrypoints(method, quick_code, portable_code, have_portable_code); 794 } 795 796 // If there is no deoptimized method left, we can restore the stack of each thread. 797 if (empty) { 798 MutexLock mu(self, *Locks::thread_list_lock_); 799 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this); 800 instrumentation_stubs_installed_ = false; 801 } 802 } 803} 804 805bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) { 806 DCHECK(method != nullptr); 807 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_); 808 return FindDeoptimizedMethod(method); 809} 810 811void Instrumentation::EnableDeoptimization() { 812 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_); 813 CHECK(IsDeoptimizedMethodsEmpty()); 814 CHECK_EQ(deoptimization_enabled_, false); 815 deoptimization_enabled_ = true; 816} 817 818void Instrumentation::DisableDeoptimization() { 819 CHECK_EQ(deoptimization_enabled_, true); 820 // If we deoptimized everything, undo it. 821 if (interpreter_stubs_installed_) { 822 UndeoptimizeEverything(); 823 } 824 // Undeoptimized selected methods. 825 while (true) { 826 mirror::ArtMethod* method; 827 { 828 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_); 829 if (IsDeoptimizedMethodsEmpty()) { 830 break; 831 } 832 method = BeginDeoptimizedMethod(); 833 CHECK(method != nullptr); 834 } 835 Undeoptimize(method); 836 } 837 deoptimization_enabled_ = false; 838} 839 840// Indicates if instrumentation should notify method enter/exit events to the listeners. 841bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const { 842 return !deoptimization_enabled_ && !interpreter_stubs_installed_; 843} 844 845void Instrumentation::DeoptimizeEverything() { 846 CHECK(!interpreter_stubs_installed_); 847 ConfigureStubs(false, true); 848} 849 850void Instrumentation::UndeoptimizeEverything() { 851 CHECK(interpreter_stubs_installed_); 852 ConfigureStubs(false, false); 853} 854 855void Instrumentation::EnableMethodTracing() { 856 bool require_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners; 857 ConfigureStubs(!require_interpreter, require_interpreter); 858} 859 860void Instrumentation::DisableMethodTracing() { 861 ConfigureStubs(false, false); 862} 863 864const void* Instrumentation::GetQuickCodeFor(mirror::ArtMethod* method) const { 865 Runtime* runtime = Runtime::Current(); 866 if (LIKELY(!instrumentation_stubs_installed_)) { 867 const void* code = method->GetEntryPointFromQuickCompiledCode(); 868 DCHECK(code != nullptr); 869 ClassLinker* class_linker = runtime->GetClassLinker(); 870 if (LIKELY(code != class_linker->GetQuickResolutionTrampoline()) && 871 LIKELY(code != class_linker->GetQuickToInterpreterBridgeTrampoline()) && 872 LIKELY(code != GetQuickToInterpreterBridge())) { 873 return code; 874 } 875 } 876 return runtime->GetClassLinker()->GetQuickOatCodeFor(method); 877} 878 879void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object, 880 mirror::ArtMethod* method, 881 uint32_t dex_pc) const { 882 auto it = method_entry_listeners_.begin(); 883 bool is_end = (it == method_entry_listeners_.end()); 884 // Implemented this way to prevent problems caused by modification of the list while iterating. 885 while (!is_end) { 886 InstrumentationListener* cur = *it; 887 ++it; 888 is_end = (it == method_entry_listeners_.end()); 889 cur->MethodEntered(thread, this_object, method, dex_pc); 890 } 891} 892 893void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object, 894 mirror::ArtMethod* method, 895 uint32_t dex_pc, const JValue& return_value) const { 896 auto it = method_exit_listeners_.begin(); 897 bool is_end = (it == method_exit_listeners_.end()); 898 // Implemented this way to prevent problems caused by modification of the list while iterating. 899 while (!is_end) { 900 InstrumentationListener* cur = *it; 901 ++it; 902 is_end = (it == method_exit_listeners_.end()); 903 cur->MethodExited(thread, this_object, method, dex_pc, return_value); 904 } 905} 906 907void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object, 908 mirror::ArtMethod* method, 909 uint32_t dex_pc) const { 910 if (have_method_unwind_listeners_) { 911 for (InstrumentationListener* listener : method_unwind_listeners_) { 912 listener->MethodUnwind(thread, this_object, method, dex_pc); 913 } 914 } 915} 916 917void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object, 918 mirror::ArtMethod* method, 919 uint32_t dex_pc) const { 920 if (HasDexPcListeners()) { 921 std::shared_ptr<std::list<InstrumentationListener*>> original(dex_pc_listeners_); 922 for (InstrumentationListener* listener : *original.get()) { 923 listener->DexPcMoved(thread, this_object, method, dex_pc); 924 } 925 } 926} 927 928void Instrumentation::FieldReadEventImpl(Thread* thread, mirror::Object* this_object, 929 mirror::ArtMethod* method, uint32_t dex_pc, 930 mirror::ArtField* field) const { 931 if (HasFieldReadListeners()) { 932 std::shared_ptr<std::list<InstrumentationListener*>> original(field_read_listeners_); 933 for (InstrumentationListener* listener : *original.get()) { 934 listener->FieldRead(thread, this_object, method, dex_pc, field); 935 } 936 } 937} 938 939void Instrumentation::FieldWriteEventImpl(Thread* thread, mirror::Object* this_object, 940 mirror::ArtMethod* method, uint32_t dex_pc, 941 mirror::ArtField* field, const JValue& field_value) const { 942 if (HasFieldWriteListeners()) { 943 std::shared_ptr<std::list<InstrumentationListener*>> original(field_write_listeners_); 944 for (InstrumentationListener* listener : *original.get()) { 945 listener->FieldWritten(thread, this_object, method, dex_pc, field, field_value); 946 } 947 } 948} 949 950void Instrumentation::ExceptionCaughtEvent(Thread* thread, const ThrowLocation& throw_location, 951 mirror::ArtMethod* catch_method, 952 uint32_t catch_dex_pc, 953 mirror::Throwable* exception_object) const { 954 if (HasExceptionCaughtListeners()) { 955 DCHECK_EQ(thread->GetException(nullptr), exception_object); 956 bool is_exception_reported = thread->IsExceptionReportedToInstrumentation(); 957 thread->ClearException(); 958 std::shared_ptr<std::list<InstrumentationListener*>> original(exception_caught_listeners_); 959 for (InstrumentationListener* listener : *original.get()) { 960 listener->ExceptionCaught(thread, throw_location, catch_method, catch_dex_pc, 961 exception_object); 962 } 963 thread->SetException(throw_location, exception_object); 964 thread->SetExceptionReportedToInstrumentation(is_exception_reported); 965 } 966} 967 968static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame, 969 int delta) 970 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 971 size_t frame_id = StackVisitor::ComputeNumFrames(self) + delta; 972 if (frame_id != instrumentation_frame.frame_id_) { 973 LOG(ERROR) << "Expected frame_id=" << frame_id << " but found " 974 << instrumentation_frame.frame_id_; 975 StackVisitor::DescribeStack(self); 976 CHECK_EQ(frame_id, instrumentation_frame.frame_id_); 977 } 978} 979 980void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object, 981 mirror::ArtMethod* method, 982 uintptr_t lr, bool interpreter_entry) { 983 // We have a callee-save frame meaning this value is guaranteed to never be 0. 984 size_t frame_id = StackVisitor::ComputeNumFrames(self); 985 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack(); 986 if (kVerboseInstrumentation) { 987 LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr); 988 } 989 instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr, 990 frame_id, interpreter_entry); 991 stack->push_front(instrumentation_frame); 992 993 if (!interpreter_entry) { 994 MethodEnterEvent(self, this_object, method, 0); 995 } 996} 997 998TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc, 999 uint64_t gpr_result, 1000 uint64_t fpr_result) { 1001 // Do the pop. 1002 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack(); 1003 CHECK_GT(stack->size(), 0U); 1004 InstrumentationStackFrame instrumentation_frame = stack->front(); 1005 stack->pop_front(); 1006 1007 // Set return PC and check the sanity of the stack. 1008 *return_pc = instrumentation_frame.return_pc_; 1009 CheckStackDepth(self, instrumentation_frame, 0); 1010 1011 mirror::ArtMethod* method = instrumentation_frame.method_; 1012 uint32_t length; 1013 char return_shorty = method->GetShorty(&length)[0]; 1014 JValue return_value; 1015 if (return_shorty == 'V') { 1016 return_value.SetJ(0); 1017 } else if (return_shorty == 'F' || return_shorty == 'D') { 1018 return_value.SetJ(fpr_result); 1019 } else { 1020 return_value.SetJ(gpr_result); 1021 } 1022 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to 1023 // return_pc. 1024 uint32_t dex_pc = DexFile::kDexNoIndex; 1025 mirror::Object* this_object = instrumentation_frame.this_object_; 1026 if (!instrumentation_frame.interpreter_entry_) { 1027 MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value); 1028 } 1029 1030 // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get 1031 // back to an upcall. 1032 NthCallerVisitor visitor(self, 1, true); 1033 visitor.WalkStack(true); 1034 bool deoptimize = (visitor.caller != NULL) && 1035 (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller)); 1036 if (deoptimize && kVerboseInstrumentation) { 1037 LOG(INFO) << "Deoptimizing into " << PrettyMethod(visitor.caller); 1038 } 1039 if (deoptimize) { 1040 if (kVerboseInstrumentation) { 1041 LOG(INFO) << "Deoptimizing from " << PrettyMethod(method) 1042 << " result is " << std::hex << return_value.GetJ(); 1043 } 1044 self->SetDeoptimizationReturnValue(return_value); 1045 return GetTwoWordSuccessValue(*return_pc, 1046 reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint())); 1047 } else { 1048 if (kVerboseInstrumentation) { 1049 LOG(INFO) << "Returning from " << PrettyMethod(method) 1050 << " to PC " << reinterpret_cast<void*>(*return_pc); 1051 } 1052 return GetTwoWordSuccessValue(0, *return_pc); 1053 } 1054} 1055 1056void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const { 1057 // Do the pop. 1058 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack(); 1059 CHECK_GT(stack->size(), 0U); 1060 InstrumentationStackFrame instrumentation_frame = stack->front(); 1061 // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2); 1062 stack->pop_front(); 1063 1064 mirror::ArtMethod* method = instrumentation_frame.method_; 1065 if (is_deoptimization) { 1066 if (kVerboseInstrumentation) { 1067 LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method); 1068 } 1069 } else { 1070 if (kVerboseInstrumentation) { 1071 LOG(INFO) << "Popping for unwind " << PrettyMethod(method); 1072 } 1073 1074 // Notify listeners of method unwind. 1075 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to 1076 // return_pc. 1077 uint32_t dex_pc = DexFile::kDexNoIndex; 1078 MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc); 1079 } 1080} 1081 1082void Instrumentation::VisitRoots(RootCallback* callback, void* arg) { 1083 WriterMutexLock mu(Thread::Current(), deoptimized_methods_lock_); 1084 if (IsDeoptimizedMethodsEmpty()) { 1085 return; 1086 } 1087 for (auto pair : deoptimized_methods_) { 1088 pair.second.VisitRoot(callback, arg, 0, kRootVMInternal); 1089 } 1090} 1091 1092std::string InstrumentationStackFrame::Dump() const { 1093 std::ostringstream os; 1094 os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":" 1095 << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_); 1096 return os.str(); 1097} 1098 1099} // namespace instrumentation 1100} // namespace art 1101