instrumentation.cc revision 63bc11efaac0c041e849ab401f9fc368631a00f5
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
19#include <sys/uio.h>
20
21#include "arch/context.h"
22#include "atomic.h"
23#include "base/unix_file/fd_file.h"
24#include "class_linker.h"
25#include "debugger.h"
26#include "dex_file-inl.h"
27#include "entrypoints/quick/quick_alloc_entrypoints.h"
28#include "gc_root-inl.h"
29#include "interpreter/interpreter.h"
30#include "mirror/art_method-inl.h"
31#include "mirror/class-inl.h"
32#include "mirror/dex_cache.h"
33#include "mirror/object_array-inl.h"
34#include "mirror/object-inl.h"
35#include "nth_caller_visitor.h"
36#if !defined(ART_USE_PORTABLE_COMPILER)
37#include "entrypoints/quick/quick_entrypoints.h"
38#endif
39#include "os.h"
40#include "scoped_thread_state_change.h"
41#include "thread.h"
42#include "thread_list.h"
43
44namespace art {
45
46namespace instrumentation {
47
48const bool kVerboseInstrumentation = false;
49
50// Do we want to deoptimize for method entry and exit listeners or just try to intercept
51// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
52// application's performance.
53static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = true;
54
55static bool InstallStubsClassVisitor(mirror::Class* klass, void* arg)
56    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
57  Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
58  return instrumentation->InstallStubsForClass(klass);
59}
60
61Instrumentation::Instrumentation()
62    : instrumentation_stubs_installed_(false), entry_exit_stubs_installed_(false),
63      interpreter_stubs_installed_(false),
64      interpret_only_(false), forced_interpret_only_(false),
65      have_method_entry_listeners_(false), have_method_exit_listeners_(false),
66      have_method_unwind_listeners_(false), have_dex_pc_listeners_(false),
67      have_field_read_listeners_(false), have_field_write_listeners_(false),
68      have_exception_caught_listeners_(false),
69      deoptimized_methods_lock_("deoptimized methods lock"),
70      deoptimization_enabled_(false),
71      interpreter_handler_table_(kMainHandlerTable),
72      quick_alloc_entry_points_instrumentation_counter_(0) {
73}
74
75bool Instrumentation::InstallStubsForClass(mirror::Class* klass) {
76  for (size_t i = 0, e = klass->NumDirectMethods(); i < e; i++) {
77    InstallStubsForMethod(klass->GetDirectMethod(i));
78  }
79  for (size_t i = 0, e = klass->NumVirtualMethods(); i < e; i++) {
80    InstallStubsForMethod(klass->GetVirtualMethod(i));
81  }
82  return true;
83}
84
85static void UpdateEntrypoints(mirror::ArtMethod* method, const void* quick_code,
86                              const void* portable_code, bool have_portable_code)
87    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
88#if defined(ART_USE_PORTABLE_COMPILER)
89  method->SetEntryPointFromPortableCompiledCode(portable_code);
90#endif
91  method->SetEntryPointFromQuickCompiledCode(quick_code);
92  bool portable_enabled = method->IsPortableCompiled();
93  if (have_portable_code && !portable_enabled) {
94    method->SetIsPortableCompiled();
95  } else if (portable_enabled) {
96    method->ClearIsPortableCompiled();
97  }
98  if (!method->IsResolutionMethod()) {
99    ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
100    if (quick_code == GetQuickToInterpreterBridge() ||
101        quick_code == class_linker->GetQuickToInterpreterBridgeTrampoline() ||
102        (quick_code == class_linker->GetQuickResolutionTrampoline() &&
103         Runtime::Current()->GetInstrumentation()->IsForcedInterpretOnly()
104         && !method->IsNative() && !method->IsProxyMethod())) {
105      if (kIsDebugBuild) {
106        if (quick_code == GetQuickToInterpreterBridge()) {
107#if defined(ART_USE_PORTABLE_COMPILER)
108          DCHECK(portable_code == GetPortableToInterpreterBridge());
109#endif
110        } else if (quick_code == class_linker->GetQuickResolutionTrampoline()) {
111#if defined(ART_USE_PORTABLE_COMPILER)
112          DCHECK(portable_code == class_linker->GetPortableResolutionTrampoline());
113#endif
114        }
115      }
116      DCHECK(!method->IsNative()) << PrettyMethod(method);
117      DCHECK(!method->IsProxyMethod()) << PrettyMethod(method);
118      method->SetEntryPointFromInterpreter(art::interpreter::artInterpreterToInterpreterBridge);
119    } else {
120      method->SetEntryPointFromInterpreter(art::artInterpreterToCompiledCodeBridge);
121    }
122  }
123}
124
125void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) {
126  if (method->IsAbstract() || method->IsProxyMethod()) {
127    // Do not change stubs for these methods.
128    return;
129  }
130  // Don't stub Proxy.<init>. Note that the Proxy class itself is not a proxy class.
131  if (method->IsConstructor() &&
132      method->GetDeclaringClass()->DescriptorEquals("Ljava/lang/reflect/Proxy;")) {
133    return;
134  }
135  const void* new_portable_code;
136  const void* new_quick_code;
137  bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
138  ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
139  bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
140  bool have_portable_code = false;
141#if !defined(ART_USE_PORTABLE_COMPILER)
142  new_portable_code = nullptr;
143#endif
144  if (uninstall) {
145    if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
146#if defined(ART_USE_PORTABLE_COMPILER)
147      new_portable_code = GetPortableToInterpreterBridge();
148#endif
149      new_quick_code = GetQuickToInterpreterBridge();
150    } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
151#if defined(ART_USE_PORTABLE_COMPILER)
152      new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
153#endif
154      new_quick_code = class_linker->GetQuickOatCodeFor(method);
155    } else {
156#if defined(ART_USE_PORTABLE_COMPILER)
157      new_portable_code = class_linker->GetPortableResolutionTrampoline();
158#endif
159      new_quick_code = class_linker->GetQuickResolutionTrampoline();
160    }
161  } else {  // !uninstall
162    if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) &&
163        !method->IsNative()) {
164#if defined(ART_USE_PORTABLE_COMPILER)
165      new_portable_code = GetPortableToInterpreterBridge();
166#endif
167      new_quick_code = GetQuickToInterpreterBridge();
168    } else {
169      // Do not overwrite resolution trampoline. When the trampoline initializes the method's
170      // class, all its static methods code will be set to the instrumentation entry point.
171      // For more details, see ClassLinker::FixupStaticTrampolines.
172      if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
173        if (entry_exit_stubs_installed_) {
174#if defined(ART_USE_PORTABLE_COMPILER)
175          new_portable_code = GetPortableToInterpreterBridge();
176#endif
177          new_quick_code = GetQuickInstrumentationEntryPoint();
178        } else {
179#if defined(ART_USE_PORTABLE_COMPILER)
180          new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
181#endif
182          new_quick_code = class_linker->GetQuickOatCodeFor(method);
183          DCHECK(new_quick_code != class_linker->GetQuickToInterpreterBridgeTrampoline());
184        }
185      } else {
186#if defined(ART_USE_PORTABLE_COMPILER)
187        new_portable_code = class_linker->GetPortableResolutionTrampoline();
188#endif
189        new_quick_code = class_linker->GetQuickResolutionTrampoline();
190      }
191    }
192  }
193  UpdateEntrypoints(method, new_quick_code, new_portable_code, have_portable_code);
194}
195
196// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
197// deoptimization of quick frames to interpreter frames.
198// Since we may already have done this previously, we need to push new instrumentation frame before
199// existing instrumentation frames.
200static void InstrumentationInstallStack(Thread* thread, void* arg)
201    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
202  struct InstallStackVisitor : public StackVisitor {
203    InstallStackVisitor(Thread* thread, Context* context, uintptr_t instrumentation_exit_pc)
204        : StackVisitor(thread, context),  instrumentation_stack_(thread->GetInstrumentationStack()),
205          instrumentation_exit_pc_(instrumentation_exit_pc),
206          reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
207          last_return_pc_(0) {
208    }
209
210    virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
211      mirror::ArtMethod* m = GetMethod();
212      if (m == NULL) {
213        if (kVerboseInstrumentation) {
214          LOG(INFO) << "  Skipping upcall. Frame " << GetFrameId();
215        }
216        last_return_pc_ = 0;
217        return true;  // Ignore upcalls.
218      }
219      if (GetCurrentQuickFrame() == NULL) {
220        bool interpreter_frame = !m->IsPortableCompiled();
221        InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, 0, GetFrameId(),
222                                                        interpreter_frame);
223        if (kVerboseInstrumentation) {
224          LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
225        }
226        shadow_stack_.push_back(instrumentation_frame);
227        return true;  // Continue.
228      }
229      uintptr_t return_pc = GetReturnPc();
230      if (m->IsRuntimeMethod()) {
231        if (return_pc == instrumentation_exit_pc_) {
232          if (kVerboseInstrumentation) {
233            LOG(INFO) << "  Handling quick to interpreter transition. Frame " << GetFrameId();
234          }
235          CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
236          const InstrumentationStackFrame& frame =
237              instrumentation_stack_->at(instrumentation_stack_depth_);
238          CHECK(frame.interpreter_entry_);
239          // This is an interpreter frame so method enter event must have been reported. However we
240          // need to push a DEX pc into the dex_pcs_ list to match size of instrumentation stack.
241          // Since we won't report method entry here, we can safely push any DEX pc.
242          dex_pcs_.push_back(0);
243          last_return_pc_ = frame.return_pc_;
244          ++instrumentation_stack_depth_;
245          return true;
246        } else {
247          if (kVerboseInstrumentation) {
248            LOG(INFO) << "  Skipping runtime method. Frame " << GetFrameId();
249          }
250          last_return_pc_ = GetReturnPc();
251          return true;  // Ignore unresolved methods since they will be instrumented after resolution.
252        }
253      }
254      if (kVerboseInstrumentation) {
255        LOG(INFO) << "  Installing exit stub in " << DescribeLocation();
256      }
257      if (return_pc == instrumentation_exit_pc_) {
258        // We've reached a frame which has already been installed with instrumentation exit stub.
259        // We should have already installed instrumentation on previous frames.
260        reached_existing_instrumentation_frames_ = true;
261
262        CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
263        const InstrumentationStackFrame& frame =
264            instrumentation_stack_->at(instrumentation_stack_depth_);
265        CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m)
266                                   << ", Found " << PrettyMethod(frame.method_);
267        return_pc = frame.return_pc_;
268        if (kVerboseInstrumentation) {
269          LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
270        }
271      } else {
272        CHECK_NE(return_pc, 0U);
273        CHECK(!reached_existing_instrumentation_frames_);
274        InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
275                                                        false);
276        if (kVerboseInstrumentation) {
277          LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
278        }
279
280        // Insert frame at the right position so we do not corrupt the instrumentation stack.
281        // Instrumentation stack frames are in descending frame id order.
282        auto it = instrumentation_stack_->begin();
283        for (auto end = instrumentation_stack_->end(); it != end; ++it) {
284          const InstrumentationStackFrame& current = *it;
285          if (instrumentation_frame.frame_id_ >= current.frame_id_) {
286            break;
287          }
288        }
289        instrumentation_stack_->insert(it, instrumentation_frame);
290        SetReturnPc(instrumentation_exit_pc_);
291      }
292      dex_pcs_.push_back(m->ToDexPc(last_return_pc_));
293      last_return_pc_ = return_pc;
294      ++instrumentation_stack_depth_;
295      return true;  // Continue.
296    }
297    std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
298    std::vector<InstrumentationStackFrame> shadow_stack_;
299    std::vector<uint32_t> dex_pcs_;
300    const uintptr_t instrumentation_exit_pc_;
301    bool reached_existing_instrumentation_frames_;
302    size_t instrumentation_stack_depth_;
303    uintptr_t last_return_pc_;
304  };
305  if (kVerboseInstrumentation) {
306    std::string thread_name;
307    thread->GetThreadName(thread_name);
308    LOG(INFO) << "Installing exit stubs in " << thread_name;
309  }
310
311  Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
312  std::unique_ptr<Context> context(Context::Create());
313  uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
314  InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
315  visitor.WalkStack(true);
316  CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
317
318  if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
319    // Create method enter events for all methods currently on the thread's stack. We only do this
320    // if no debugger is attached to prevent from posting events twice.
321    auto ssi = visitor.shadow_stack_.rbegin();
322    for (auto isi = thread->GetInstrumentationStack()->rbegin(),
323        end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) {
324      while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < (*isi).frame_id_) {
325        instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0);
326        ++ssi;
327      }
328      uint32_t dex_pc = visitor.dex_pcs_.back();
329      visitor.dex_pcs_.pop_back();
330      if (!isi->interpreter_entry_) {
331        instrumentation->MethodEnterEvent(thread, (*isi).this_object_, (*isi).method_, dex_pc);
332      }
333    }
334  }
335  thread->VerifyStack();
336}
337
338// Removes the instrumentation exit pc as the return PC for every quick frame.
339static void InstrumentationRestoreStack(Thread* thread, void* arg)
340    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
341  struct RestoreStackVisitor : public StackVisitor {
342    RestoreStackVisitor(Thread* thread, uintptr_t instrumentation_exit_pc,
343                        Instrumentation* instrumentation)
344        : StackVisitor(thread, NULL), thread_(thread),
345          instrumentation_exit_pc_(instrumentation_exit_pc),
346          instrumentation_(instrumentation),
347          instrumentation_stack_(thread->GetInstrumentationStack()),
348          frames_removed_(0) {}
349
350    virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
351      if (instrumentation_stack_->size() == 0) {
352        return false;  // Stop.
353      }
354      mirror::ArtMethod* m = GetMethod();
355      if (GetCurrentQuickFrame() == NULL) {
356        if (kVerboseInstrumentation) {
357          LOG(INFO) << "  Ignoring a shadow frame. Frame " << GetFrameId()
358              << " Method=" << PrettyMethod(m);
359        }
360        return true;  // Ignore shadow frames.
361      }
362      if (m == NULL) {
363        if (kVerboseInstrumentation) {
364          LOG(INFO) << "  Skipping upcall. Frame " << GetFrameId();
365        }
366        return true;  // Ignore upcalls.
367      }
368      bool removed_stub = false;
369      // TODO: make this search more efficient?
370      const size_t frameId = GetFrameId();
371      for (const InstrumentationStackFrame& instrumentation_frame : *instrumentation_stack_) {
372        if (instrumentation_frame.frame_id_ == frameId) {
373          if (kVerboseInstrumentation) {
374            LOG(INFO) << "  Removing exit stub in " << DescribeLocation();
375          }
376          if (instrumentation_frame.interpreter_entry_) {
377            CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
378          } else {
379            CHECK(m == instrumentation_frame.method_) << PrettyMethod(m);
380          }
381          SetReturnPc(instrumentation_frame.return_pc_);
382          if (instrumentation_->ShouldNotifyMethodEnterExitEvents()) {
383            // Create the method exit events. As the methods didn't really exit the result is 0.
384            // We only do this if no debugger is attached to prevent from posting events twice.
385            instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
386                                              GetDexPc(), JValue());
387          }
388          frames_removed_++;
389          removed_stub = true;
390          break;
391        }
392      }
393      if (!removed_stub) {
394        if (kVerboseInstrumentation) {
395          LOG(INFO) << "  No exit stub in " << DescribeLocation();
396        }
397      }
398      return true;  // Continue.
399    }
400    Thread* const thread_;
401    const uintptr_t instrumentation_exit_pc_;
402    Instrumentation* const instrumentation_;
403    std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
404    size_t frames_removed_;
405  };
406  if (kVerboseInstrumentation) {
407    std::string thread_name;
408    thread->GetThreadName(thread_name);
409    LOG(INFO) << "Removing exit stubs in " << thread_name;
410  }
411  std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
412  if (stack->size() > 0) {
413    Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
414    uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
415    RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
416    visitor.WalkStack(true);
417    CHECK_EQ(visitor.frames_removed_, stack->size());
418    while (stack->size() > 0) {
419      stack->pop_front();
420    }
421  }
422}
423
424void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
425  Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
426  if ((events & kMethodEntered) != 0) {
427    method_entry_listeners_.push_back(listener);
428    have_method_entry_listeners_ = true;
429  }
430  if ((events & kMethodExited) != 0) {
431    method_exit_listeners_.push_back(listener);
432    have_method_exit_listeners_ = true;
433  }
434  if ((events & kMethodUnwind) != 0) {
435    method_unwind_listeners_.push_back(listener);
436    have_method_unwind_listeners_ = true;
437  }
438  if ((events & kDexPcMoved) != 0) {
439    std::list<InstrumentationListener*>* modified;
440    if (have_dex_pc_listeners_) {
441      modified = new std::list<InstrumentationListener*>(*dex_pc_listeners_.get());
442    } else {
443      modified = new std::list<InstrumentationListener*>();
444    }
445    modified->push_back(listener);
446    dex_pc_listeners_.reset(modified);
447    have_dex_pc_listeners_ = true;
448  }
449  if ((events & kFieldRead) != 0) {
450    std::list<InstrumentationListener*>* modified;
451    if (have_field_read_listeners_) {
452      modified = new std::list<InstrumentationListener*>(*field_read_listeners_.get());
453    } else {
454      modified = new std::list<InstrumentationListener*>();
455    }
456    modified->push_back(listener);
457    field_read_listeners_.reset(modified);
458    have_field_read_listeners_ = true;
459  }
460  if ((events & kFieldWritten) != 0) {
461    std::list<InstrumentationListener*>* modified;
462    if (have_field_write_listeners_) {
463      modified = new std::list<InstrumentationListener*>(*field_write_listeners_.get());
464    } else {
465      modified = new std::list<InstrumentationListener*>();
466    }
467    modified->push_back(listener);
468    field_write_listeners_.reset(modified);
469    have_field_write_listeners_ = true;
470  }
471  if ((events & kExceptionCaught) != 0) {
472    std::list<InstrumentationListener*>* modified;
473    if (have_exception_caught_listeners_) {
474      modified = new std::list<InstrumentationListener*>(*exception_caught_listeners_.get());
475    } else {
476      modified = new std::list<InstrumentationListener*>();
477    }
478    modified->push_back(listener);
479    exception_caught_listeners_.reset(modified);
480    have_exception_caught_listeners_ = true;
481  }
482  UpdateInterpreterHandlerTable();
483}
484
485void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
486  Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
487
488  if ((events & kMethodEntered) != 0) {
489    if (have_method_entry_listeners_) {
490      method_entry_listeners_.remove(listener);
491      have_method_entry_listeners_ = !method_entry_listeners_.empty();
492    }
493  }
494  if ((events & kMethodExited) != 0) {
495    if (have_method_exit_listeners_) {
496      method_exit_listeners_.remove(listener);
497      have_method_exit_listeners_ = !method_exit_listeners_.empty();
498    }
499  }
500  if ((events & kMethodUnwind) != 0) {
501    if (have_method_unwind_listeners_) {
502      method_unwind_listeners_.remove(listener);
503      have_method_unwind_listeners_ = !method_unwind_listeners_.empty();
504    }
505  }
506  if ((events & kDexPcMoved) != 0) {
507    if (have_dex_pc_listeners_) {
508      std::list<InstrumentationListener*>* modified =
509          new std::list<InstrumentationListener*>(*dex_pc_listeners_.get());
510      modified->remove(listener);
511      have_dex_pc_listeners_ = !modified->empty();
512      if (have_dex_pc_listeners_) {
513        dex_pc_listeners_.reset(modified);
514      } else {
515        dex_pc_listeners_.reset();
516        delete modified;
517      }
518    }
519  }
520  if ((events & kFieldRead) != 0) {
521    if (have_field_read_listeners_) {
522      std::list<InstrumentationListener*>* modified =
523          new std::list<InstrumentationListener*>(*field_read_listeners_.get());
524      modified->remove(listener);
525      have_field_read_listeners_ = !modified->empty();
526      if (have_field_read_listeners_) {
527        field_read_listeners_.reset(modified);
528      } else {
529        field_read_listeners_.reset();
530        delete modified;
531      }
532    }
533  }
534  if ((events & kFieldWritten) != 0) {
535    if (have_field_write_listeners_) {
536      std::list<InstrumentationListener*>* modified =
537          new std::list<InstrumentationListener*>(*field_write_listeners_.get());
538      modified->remove(listener);
539      have_field_write_listeners_ = !modified->empty();
540      if (have_field_write_listeners_) {
541        field_write_listeners_.reset(modified);
542      } else {
543        field_write_listeners_.reset();
544        delete modified;
545      }
546    }
547  }
548  if ((events & kExceptionCaught) != 0) {
549    if (have_exception_caught_listeners_) {
550      std::list<InstrumentationListener*>* modified =
551          new std::list<InstrumentationListener*>(*exception_caught_listeners_.get());
552      modified->remove(listener);
553      have_exception_caught_listeners_ = !modified->empty();
554      if (have_exception_caught_listeners_) {
555        exception_caught_listeners_.reset(modified);
556      } else {
557        exception_caught_listeners_.reset();
558        delete modified;
559      }
560    }
561  }
562  UpdateInterpreterHandlerTable();
563}
564
565void Instrumentation::ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) {
566  interpret_only_ = require_interpreter || forced_interpret_only_;
567  // Compute what level of instrumentation is required and compare to current.
568  int desired_level, current_level;
569  if (require_interpreter) {
570    desired_level = 2;
571  } else if (require_entry_exit_stubs) {
572    desired_level = 1;
573  } else {
574    desired_level = 0;
575  }
576  if (interpreter_stubs_installed_) {
577    current_level = 2;
578  } else if (entry_exit_stubs_installed_) {
579    current_level = 1;
580  } else {
581    current_level = 0;
582  }
583  if (desired_level == current_level) {
584    // We're already set.
585    return;
586  }
587  Thread* const self = Thread::Current();
588  Runtime* runtime = Runtime::Current();
589  Locks::thread_list_lock_->AssertNotHeld(self);
590  if (desired_level > 0) {
591    if (require_interpreter) {
592      interpreter_stubs_installed_ = true;
593    } else {
594      CHECK(require_entry_exit_stubs);
595      entry_exit_stubs_installed_ = true;
596    }
597    runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
598    instrumentation_stubs_installed_ = true;
599    MutexLock mu(self, *Locks::thread_list_lock_);
600    runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
601  } else {
602    interpreter_stubs_installed_ = false;
603    entry_exit_stubs_installed_ = false;
604    runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
605    // Restore stack only if there is no method currently deoptimized.
606    bool empty;
607    {
608      ReaderMutexLock mu(self, deoptimized_methods_lock_);
609      empty = IsDeoptimizedMethodsEmpty();  // Avoid lock violation.
610    }
611    if (empty) {
612      instrumentation_stubs_installed_ = false;
613      MutexLock mu(self, *Locks::thread_list_lock_);
614      Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
615    }
616  }
617}
618
619static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg) {
620  thread->ResetQuickAllocEntryPointsForThread();
621}
622
623void Instrumentation::SetEntrypointsInstrumented(bool instrumented, bool suspended) {
624  Runtime* runtime = Runtime::Current();
625  ThreadList* tl = runtime->GetThreadList();
626  if (suspended) {
627    Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
628  }
629  if (runtime->IsStarted() && !suspended) {
630    tl->SuspendAll();
631  }
632  {
633    MutexLock mu(Thread::Current(), *Locks::runtime_shutdown_lock_);
634    SetQuickAllocEntryPointsInstrumented(instrumented);
635    ResetQuickAllocEntryPoints();
636  }
637  if (runtime->IsStarted() && !suspended) {
638    tl->ResumeAll();
639  }
640}
641
642void Instrumentation::InstrumentQuickAllocEntryPoints(bool suspended) {
643  // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
644  //       should be guarded by a lock.
645  DCHECK_GE(quick_alloc_entry_points_instrumentation_counter_.LoadSequentiallyConsistent(), 0);
646  const bool enable_instrumentation =
647      quick_alloc_entry_points_instrumentation_counter_.FetchAndAddSequentiallyConsistent(1) == 0;
648  if (enable_instrumentation) {
649    SetEntrypointsInstrumented(true, suspended);
650  }
651}
652
653void Instrumentation::UninstrumentQuickAllocEntryPoints(bool suspended) {
654  // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
655  //       should be guarded by a lock.
656  DCHECK_GT(quick_alloc_entry_points_instrumentation_counter_.LoadSequentiallyConsistent(), 0);
657  const bool disable_instrumentation =
658      quick_alloc_entry_points_instrumentation_counter_.FetchAndSubSequentiallyConsistent(1) == 1;
659  if (disable_instrumentation) {
660    SetEntrypointsInstrumented(false, suspended);
661  }
662}
663
664void Instrumentation::ResetQuickAllocEntryPoints() {
665  Runtime* runtime = Runtime::Current();
666  if (runtime->IsStarted()) {
667    MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
668    runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, NULL);
669  }
670}
671
672void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code,
673                                        const void* portable_code, bool have_portable_code) {
674  const void* new_portable_code;
675  const void* new_quick_code;
676  bool new_have_portable_code;
677  if (LIKELY(!instrumentation_stubs_installed_)) {
678    new_portable_code = portable_code;
679    new_quick_code = quick_code;
680    new_have_portable_code = have_portable_code;
681  } else {
682    if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
683#if defined(ART_USE_PORTABLE_COMPILER)
684      new_portable_code = GetPortableToInterpreterBridge();
685#else
686      new_portable_code = portable_code;
687#endif
688      new_quick_code = GetQuickToInterpreterBridge();
689      new_have_portable_code = false;
690    } else {
691      ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
692      if (quick_code == class_linker->GetQuickResolutionTrampoline() ||
693          quick_code == class_linker->GetQuickToInterpreterBridgeTrampoline() ||
694          quick_code == GetQuickToInterpreterBridge()) {
695#if defined(ART_USE_PORTABLE_COMPILER)
696        DCHECK((portable_code == class_linker->GetPortableResolutionTrampoline()) ||
697               (portable_code == GetPortableToInterpreterBridge()));
698#endif
699        new_portable_code = portable_code;
700        new_quick_code = quick_code;
701        new_have_portable_code = have_portable_code;
702      } else if (entry_exit_stubs_installed_) {
703        new_quick_code = GetQuickInstrumentationEntryPoint();
704#if defined(ART_USE_PORTABLE_COMPILER)
705        new_portable_code = GetPortableToInterpreterBridge();
706#else
707        new_portable_code = portable_code;
708#endif
709        new_have_portable_code = false;
710      } else {
711        new_portable_code = portable_code;
712        new_quick_code = quick_code;
713        new_have_portable_code = have_portable_code;
714      }
715    }
716  }
717  UpdateEntrypoints(method, new_quick_code, new_portable_code, new_have_portable_code);
718}
719
720bool Instrumentation::AddDeoptimizedMethod(mirror::ArtMethod* method) {
721  // Note that the insert() below isn't read barrier-aware. So, this
722  // FindDeoptimizedMethod() call is necessary or else we would end up
723  // storing the same method twice in the map (the from-space and the
724  // to-space ones).
725  if (FindDeoptimizedMethod(method)) {
726    // Already in the map. Return.
727    return false;
728  }
729  // Not found. Add it.
730  int32_t hash_code = method->IdentityHashCode();
731  deoptimized_methods_.insert(std::make_pair(hash_code, GcRoot<mirror::ArtMethod>(method)));
732  return true;
733}
734
735bool Instrumentation::FindDeoptimizedMethod(mirror::ArtMethod* method) {
736  int32_t hash_code = method->IdentityHashCode();
737  auto range = deoptimized_methods_.equal_range(hash_code);
738  for (auto it = range.first; it != range.second; ++it) {
739    mirror::ArtMethod* m = it->second.Read();
740    if (m == method) {
741      // Found.
742      return true;
743    }
744  }
745  // Not found.
746  return false;
747}
748
749mirror::ArtMethod* Instrumentation::BeginDeoptimizedMethod() {
750  auto it = deoptimized_methods_.begin();
751  if (it == deoptimized_methods_.end()) {
752    // Empty.
753    return nullptr;
754  }
755  return it->second.Read();
756}
757
758bool Instrumentation::RemoveDeoptimizedMethod(mirror::ArtMethod* method) {
759  int32_t hash_code = method->IdentityHashCode();
760  auto range = deoptimized_methods_.equal_range(hash_code);
761  for (auto it = range.first; it != range.second; ++it) {
762    mirror::ArtMethod* m = it->second.Read();
763    if (m == method) {
764      // Found. Erase and return.
765      deoptimized_methods_.erase(it);
766      return true;
767    }
768  }
769  // Not found.
770  return false;
771}
772
773bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
774  return deoptimized_methods_.empty();
775}
776
777void Instrumentation::Deoptimize(mirror::ArtMethod* method) {
778  CHECK(!method->IsNative());
779  CHECK(!method->IsProxyMethod());
780  CHECK(!method->IsAbstract());
781
782  Thread* self = Thread::Current();
783  {
784    WriterMutexLock mu(self, deoptimized_methods_lock_);
785    bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
786    CHECK(has_not_been_deoptimized) << "Method " << PrettyMethod(method)
787        << " is already deoptimized";
788  }
789  if (!interpreter_stubs_installed_) {
790    UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint(),
791#if defined(ART_USE_PORTABLE_COMPILER)
792                      GetPortableToInterpreterBridge(),
793#else
794                      nullptr,
795#endif
796                      false);
797
798    // Install instrumentation exit stub and instrumentation frames. We may already have installed
799    // these previously so it will only cover the newly created frames.
800    instrumentation_stubs_installed_ = true;
801    MutexLock mu(self, *Locks::thread_list_lock_);
802    Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
803  }
804}
805
806void Instrumentation::Undeoptimize(mirror::ArtMethod* method) {
807  CHECK(!method->IsNative());
808  CHECK(!method->IsProxyMethod());
809  CHECK(!method->IsAbstract());
810
811  Thread* self = Thread::Current();
812  bool empty;
813  {
814    WriterMutexLock mu(self, deoptimized_methods_lock_);
815    bool found_and_erased = RemoveDeoptimizedMethod(method);
816    CHECK(found_and_erased) << "Method " << PrettyMethod(method)
817        << " is not deoptimized";
818    empty = IsDeoptimizedMethodsEmpty();
819  }
820
821  // Restore code and possibly stack only if we did not deoptimize everything.
822  if (!interpreter_stubs_installed_) {
823    // Restore its code or resolution trampoline.
824    ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
825    if (method->IsStatic() && !method->IsConstructor() &&
826        !method->GetDeclaringClass()->IsInitialized()) {
827      // TODO: we're updating to entrypoints in the image here, we can avoid the trampoline.
828      UpdateEntrypoints(method, class_linker->GetQuickResolutionTrampoline(),
829#if defined(ART_USE_PORTABLE_COMPILER)
830                        class_linker->GetPortableResolutionTrampoline(),
831#else
832                        nullptr,
833#endif
834                        false);
835    } else {
836      bool have_portable_code = false;
837      const void* quick_code = class_linker->GetQuickOatCodeFor(method);
838#if defined(ART_USE_PORTABLE_COMPILER)
839      const void* portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
840#else
841      const void* portable_code = nullptr;
842#endif
843      UpdateEntrypoints(method, quick_code, portable_code, have_portable_code);
844    }
845
846    // If there is no deoptimized method left, we can restore the stack of each thread.
847    if (empty) {
848      MutexLock mu(self, *Locks::thread_list_lock_);
849      Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
850      instrumentation_stubs_installed_ = false;
851    }
852  }
853}
854
855bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) {
856  DCHECK(method != nullptr);
857  ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
858  return FindDeoptimizedMethod(method);
859}
860
861void Instrumentation::EnableDeoptimization() {
862  ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
863  CHECK(IsDeoptimizedMethodsEmpty());
864  CHECK_EQ(deoptimization_enabled_, false);
865  deoptimization_enabled_ = true;
866}
867
868void Instrumentation::DisableDeoptimization() {
869  CHECK_EQ(deoptimization_enabled_, true);
870  // If we deoptimized everything, undo it.
871  if (interpreter_stubs_installed_) {
872    UndeoptimizeEverything();
873  }
874  // Undeoptimized selected methods.
875  while (true) {
876    mirror::ArtMethod* method;
877    {
878      ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
879      if (IsDeoptimizedMethodsEmpty()) {
880        break;
881      }
882      method = BeginDeoptimizedMethod();
883      CHECK(method != nullptr);
884    }
885    Undeoptimize(method);
886  }
887  deoptimization_enabled_ = false;
888}
889
890// Indicates if instrumentation should notify method enter/exit events to the listeners.
891bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
892  return !deoptimization_enabled_ && !interpreter_stubs_installed_;
893}
894
895void Instrumentation::DeoptimizeEverything() {
896  CHECK(!interpreter_stubs_installed_);
897  ConfigureStubs(false, true);
898}
899
900void Instrumentation::UndeoptimizeEverything() {
901  CHECK(interpreter_stubs_installed_);
902  ConfigureStubs(false, false);
903}
904
905void Instrumentation::EnableMethodTracing() {
906  bool require_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners;
907  ConfigureStubs(!require_interpreter, require_interpreter);
908}
909
910void Instrumentation::DisableMethodTracing() {
911  ConfigureStubs(false, false);
912}
913
914const void* Instrumentation::GetQuickCodeFor(mirror::ArtMethod* method) const {
915  Runtime* runtime = Runtime::Current();
916  if (LIKELY(!instrumentation_stubs_installed_)) {
917    const void* code = method->GetEntryPointFromQuickCompiledCode();
918    DCHECK(code != nullptr);
919    ClassLinker* class_linker = runtime->GetClassLinker();
920    if (LIKELY(code != class_linker->GetQuickResolutionTrampoline()) &&
921        LIKELY(code != class_linker->GetQuickToInterpreterBridgeTrampoline()) &&
922        LIKELY(code != GetQuickToInterpreterBridge())) {
923      return code;
924    }
925  }
926  return runtime->GetClassLinker()->GetQuickOatCodeFor(method);
927}
928
929void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
930                                           mirror::ArtMethod* method,
931                                           uint32_t dex_pc) const {
932  auto it = method_entry_listeners_.begin();
933  bool is_end = (it == method_entry_listeners_.end());
934  // Implemented this way to prevent problems caused by modification of the list while iterating.
935  while (!is_end) {
936    InstrumentationListener* cur = *it;
937    ++it;
938    is_end = (it == method_entry_listeners_.end());
939    cur->MethodEntered(thread, this_object, method, dex_pc);
940  }
941}
942
943void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
944                                          mirror::ArtMethod* method,
945                                          uint32_t dex_pc, const JValue& return_value) const {
946  auto it = method_exit_listeners_.begin();
947  bool is_end = (it == method_exit_listeners_.end());
948  // Implemented this way to prevent problems caused by modification of the list while iterating.
949  while (!is_end) {
950    InstrumentationListener* cur = *it;
951    ++it;
952    is_end = (it == method_exit_listeners_.end());
953    cur->MethodExited(thread, this_object, method, dex_pc, return_value);
954  }
955}
956
957void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
958                                        mirror::ArtMethod* method,
959                                        uint32_t dex_pc) const {
960  if (have_method_unwind_listeners_) {
961    for (InstrumentationListener* listener : method_unwind_listeners_) {
962      listener->MethodUnwind(thread, this_object, method, dex_pc);
963    }
964  }
965}
966
967void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
968                                          mirror::ArtMethod* method,
969                                          uint32_t dex_pc) const {
970  if (HasDexPcListeners()) {
971    std::shared_ptr<std::list<InstrumentationListener*>> original(dex_pc_listeners_);
972    for (InstrumentationListener* listener : *original.get()) {
973      listener->DexPcMoved(thread, this_object, method, dex_pc);
974    }
975  }
976}
977
978void Instrumentation::FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
979                                         mirror::ArtMethod* method, uint32_t dex_pc,
980                                         mirror::ArtField* field) const {
981  if (HasFieldReadListeners()) {
982    std::shared_ptr<std::list<InstrumentationListener*>> original(field_read_listeners_);
983    for (InstrumentationListener* listener : *original.get()) {
984      listener->FieldRead(thread, this_object, method, dex_pc, field);
985    }
986  }
987}
988
989void Instrumentation::FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
990                                         mirror::ArtMethod* method, uint32_t dex_pc,
991                                         mirror::ArtField* field, const JValue& field_value) const {
992  if (HasFieldWriteListeners()) {
993    std::shared_ptr<std::list<InstrumentationListener*>> original(field_write_listeners_);
994    for (InstrumentationListener* listener : *original.get()) {
995      listener->FieldWritten(thread, this_object, method, dex_pc, field, field_value);
996    }
997  }
998}
999
1000void Instrumentation::ExceptionCaughtEvent(Thread* thread, const ThrowLocation& throw_location,
1001                                           mirror::ArtMethod* catch_method,
1002                                           uint32_t catch_dex_pc,
1003                                           mirror::Throwable* exception_object) const {
1004  if (HasExceptionCaughtListeners()) {
1005    DCHECK_EQ(thread->GetException(nullptr), exception_object);
1006    bool is_exception_reported = thread->IsExceptionReportedToInstrumentation();
1007    thread->ClearException();
1008    std::shared_ptr<std::list<InstrumentationListener*>> original(exception_caught_listeners_);
1009    for (InstrumentationListener* listener : *original.get()) {
1010      listener->ExceptionCaught(thread, throw_location, catch_method, catch_dex_pc,
1011                                exception_object);
1012    }
1013    thread->SetException(throw_location, exception_object);
1014    thread->SetExceptionReportedToInstrumentation(is_exception_reported);
1015  }
1016}
1017
1018static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
1019                            int delta)
1020    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1021  size_t frame_id = StackVisitor::ComputeNumFrames(self) + delta;
1022  if (frame_id != instrumentation_frame.frame_id_) {
1023    LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
1024        << instrumentation_frame.frame_id_;
1025    StackVisitor::DescribeStack(self);
1026    CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
1027  }
1028}
1029
1030void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
1031                                                    mirror::ArtMethod* method,
1032                                                    uintptr_t lr, bool interpreter_entry) {
1033  // We have a callee-save frame meaning this value is guaranteed to never be 0.
1034  size_t frame_id = StackVisitor::ComputeNumFrames(self);
1035  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1036  if (kVerboseInstrumentation) {
1037    LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr);
1038  }
1039  instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
1040                                                                   frame_id, interpreter_entry);
1041  stack->push_front(instrumentation_frame);
1042
1043  if (!interpreter_entry) {
1044    MethodEnterEvent(self, this_object, method, 0);
1045  }
1046}
1047
1048TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
1049                                                            uint64_t gpr_result,
1050                                                            uint64_t fpr_result) {
1051  // Do the pop.
1052  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1053  CHECK_GT(stack->size(), 0U);
1054  InstrumentationStackFrame instrumentation_frame = stack->front();
1055  stack->pop_front();
1056
1057  // Set return PC and check the sanity of the stack.
1058  *return_pc = instrumentation_frame.return_pc_;
1059  CheckStackDepth(self, instrumentation_frame, 0);
1060
1061  mirror::ArtMethod* method = instrumentation_frame.method_;
1062  uint32_t length;
1063  char return_shorty = method->GetShorty(&length)[0];
1064  JValue return_value;
1065  if (return_shorty == 'V') {
1066    return_value.SetJ(0);
1067  } else if (return_shorty == 'F' || return_shorty == 'D') {
1068    return_value.SetJ(fpr_result);
1069  } else {
1070    return_value.SetJ(gpr_result);
1071  }
1072  // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1073  //       return_pc.
1074  uint32_t dex_pc = DexFile::kDexNoIndex;
1075  mirror::Object* this_object = instrumentation_frame.this_object_;
1076  if (!instrumentation_frame.interpreter_entry_) {
1077    MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
1078  }
1079
1080  // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
1081  // back to an upcall.
1082  NthCallerVisitor visitor(self, 1, true);
1083  visitor.WalkStack(true);
1084  bool deoptimize = (visitor.caller != NULL) &&
1085                    (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller));
1086  if (deoptimize && kVerboseInstrumentation) {
1087    LOG(INFO) << "Deoptimizing into " << PrettyMethod(visitor.caller);
1088  }
1089  if (deoptimize) {
1090    if (kVerboseInstrumentation) {
1091      LOG(INFO) << "Deoptimizing from " << PrettyMethod(method)
1092                << " result is " << std::hex << return_value.GetJ();
1093    }
1094    self->SetDeoptimizationReturnValue(return_value);
1095    return GetTwoWordSuccessValue(*return_pc,
1096                                  reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
1097  } else {
1098    if (kVerboseInstrumentation) {
1099      LOG(INFO) << "Returning from " << PrettyMethod(method)
1100                << " to PC " << reinterpret_cast<void*>(*return_pc);
1101    }
1102    return GetTwoWordSuccessValue(0, *return_pc);
1103  }
1104}
1105
1106void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
1107  // Do the pop.
1108  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1109  CHECK_GT(stack->size(), 0U);
1110  InstrumentationStackFrame instrumentation_frame = stack->front();
1111  // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
1112  stack->pop_front();
1113
1114  mirror::ArtMethod* method = instrumentation_frame.method_;
1115  if (is_deoptimization) {
1116    if (kVerboseInstrumentation) {
1117      LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method);
1118    }
1119  } else {
1120    if (kVerboseInstrumentation) {
1121      LOG(INFO) << "Popping for unwind " << PrettyMethod(method);
1122    }
1123
1124    // Notify listeners of method unwind.
1125    // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1126    //       return_pc.
1127    uint32_t dex_pc = DexFile::kDexNoIndex;
1128    MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
1129  }
1130}
1131
1132void Instrumentation::VisitRoots(RootCallback* callback, void* arg) {
1133  WriterMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
1134  if (IsDeoptimizedMethodsEmpty()) {
1135    return;
1136  }
1137  for (auto pair : deoptimized_methods_) {
1138    pair.second.VisitRoot(callback, arg, 0, kRootVMInternal);
1139  }
1140}
1141
1142std::string InstrumentationStackFrame::Dump() const {
1143  std::ostringstream os;
1144  os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":"
1145      << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
1146  return os.str();
1147}
1148
1149}  // namespace instrumentation
1150}  // namespace art
1151