instrumentation.cc revision c934e483ceabbd589422beea1fa35f5182ecfa99
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
19#include <sys/uio.h>
20
21#include "arch/context.h"
22#include "atomic.h"
23#include "base/unix_file/fd_file.h"
24#include "class_linker.h"
25#include "debugger.h"
26#include "dex_file-inl.h"
27#include "entrypoints/quick/quick_alloc_entrypoints.h"
28#include "gc_root-inl.h"
29#include "interpreter/interpreter.h"
30#include "mirror/art_method-inl.h"
31#include "mirror/class-inl.h"
32#include "mirror/dex_cache.h"
33#include "mirror/object_array-inl.h"
34#include "mirror/object-inl.h"
35#include "nth_caller_visitor.h"
36#if !defined(ART_USE_PORTABLE_COMPILER)
37#include "entrypoints/quick/quick_entrypoints.h"
38#endif
39#include "os.h"
40#include "scoped_thread_state_change.h"
41#include "thread.h"
42#include "thread_list.h"
43
44namespace art {
45
46namespace instrumentation {
47
48const bool kVerboseInstrumentation = false;
49
50// Do we want to deoptimize for method entry and exit listeners or just try to intercept
51// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
52// application's performance.
53static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = true;
54
55static bool InstallStubsClassVisitor(mirror::Class* klass, void* arg)
56    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
57  Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
58  return instrumentation->InstallStubsForClass(klass);
59}
60
61Instrumentation::Instrumentation()
62    : instrumentation_stubs_installed_(false), entry_exit_stubs_installed_(false),
63      interpreter_stubs_installed_(false),
64      interpret_only_(false), forced_interpret_only_(false),
65      have_method_entry_listeners_(false), have_method_exit_listeners_(false),
66      have_method_unwind_listeners_(false), have_dex_pc_listeners_(false),
67      have_field_read_listeners_(false), have_field_write_listeners_(false),
68      have_exception_caught_listeners_(false),
69      deoptimized_methods_lock_("deoptimized methods lock"),
70      deoptimization_enabled_(false),
71      interpreter_handler_table_(kMainHandlerTable),
72      quick_alloc_entry_points_instrumentation_counter_(0) {
73}
74
75bool Instrumentation::InstallStubsForClass(mirror::Class* klass) {
76  for (size_t i = 0, e = klass->NumDirectMethods(); i < e; i++) {
77    InstallStubsForMethod(klass->GetDirectMethod(i));
78  }
79  for (size_t i = 0, e = klass->NumVirtualMethods(); i < e; i++) {
80    InstallStubsForMethod(klass->GetVirtualMethod(i));
81  }
82  return true;
83}
84
85static void UpdateEntrypoints(mirror::ArtMethod* method, const void* quick_code,
86                              const void* portable_code, bool have_portable_code)
87    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
88#if defined(ART_USE_PORTABLE_COMPILER)
89  method->SetEntryPointFromPortableCompiledCode(portable_code);
90#endif
91  method->SetEntryPointFromQuickCompiledCode(quick_code);
92  bool portable_enabled = method->IsPortableCompiled();
93  if (have_portable_code && !portable_enabled) {
94    method->SetIsPortableCompiled();
95  } else if (portable_enabled) {
96    method->ClearIsPortableCompiled();
97  }
98  if (!method->IsResolutionMethod()) {
99    ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
100    if (quick_code == GetQuickToInterpreterBridge() ||
101        quick_code == class_linker->GetQuickToInterpreterBridgeTrampoline() ||
102        (quick_code == class_linker->GetQuickResolutionTrampoline() &&
103         Runtime::Current()->GetInstrumentation()->IsForcedInterpretOnly()
104         && !method->IsNative() && !method->IsProxyMethod())) {
105      if (kIsDebugBuild) {
106        if (quick_code == GetQuickToInterpreterBridge()) {
107#if defined(ART_USE_PORTABLE_COMPILER)
108          DCHECK(portable_code == GetPortableToInterpreterBridge());
109#endif
110        } else if (quick_code == class_linker->GetQuickResolutionTrampoline()) {
111#if defined(ART_USE_PORTABLE_COMPILER)
112          DCHECK(portable_code == class_linker->GetPortableResolutionTrampoline());
113#endif
114        }
115      }
116      DCHECK(!method->IsNative()) << PrettyMethod(method);
117      DCHECK(!method->IsProxyMethod()) << PrettyMethod(method);
118      method->SetEntryPointFromInterpreter(art::interpreter::artInterpreterToInterpreterBridge);
119    } else {
120      method->SetEntryPointFromInterpreter(art::artInterpreterToCompiledCodeBridge);
121    }
122  }
123}
124
125void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) {
126  if (method->IsAbstract() || method->IsProxyMethod()) {
127    // Do not change stubs for these methods.
128    return;
129  }
130  // Don't stub Proxy.<init>. Note that the Proxy class itself is not a proxy class.
131  if (method->IsConstructor() &&
132      method->GetDeclaringClass()->DescriptorEquals("Ljava/lang/reflect/Proxy;")) {
133    return;
134  }
135  const void* new_portable_code;
136  const void* new_quick_code;
137  bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
138  ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
139  bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
140  bool have_portable_code = false;
141#if !defined(ART_USE_PORTABLE_COMPILER)
142  new_portable_code = nullptr;
143#endif
144  if (uninstall) {
145    if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
146#if defined(ART_USE_PORTABLE_COMPILER)
147      new_portable_code = GetPortableToInterpreterBridge();
148#endif
149      new_quick_code = GetQuickToInterpreterBridge();
150    } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
151#if defined(ART_USE_PORTABLE_COMPILER)
152      new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
153#endif
154      new_quick_code = class_linker->GetQuickOatCodeFor(method);
155    } else {
156#if defined(ART_USE_PORTABLE_COMPILER)
157      new_portable_code = class_linker->GetPortableResolutionTrampoline();
158#endif
159      new_quick_code = class_linker->GetQuickResolutionTrampoline();
160    }
161  } else {  // !uninstall
162    if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) &&
163        !method->IsNative()) {
164#if defined(ART_USE_PORTABLE_COMPILER)
165      new_portable_code = GetPortableToInterpreterBridge();
166#endif
167      new_quick_code = GetQuickToInterpreterBridge();
168    } else {
169      // Do not overwrite resolution trampoline. When the trampoline initializes the method's
170      // class, all its static methods code will be set to the instrumentation entry point.
171      // For more details, see ClassLinker::FixupStaticTrampolines.
172      if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
173        if (entry_exit_stubs_installed_) {
174#if defined(ART_USE_PORTABLE_COMPILER)
175          new_portable_code = GetPortableToInterpreterBridge();
176#endif
177          new_quick_code = GetQuickInstrumentationEntryPoint();
178        } else {
179#if defined(ART_USE_PORTABLE_COMPILER)
180          new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
181#endif
182          new_quick_code = class_linker->GetQuickOatCodeFor(method);
183          DCHECK(new_quick_code != class_linker->GetQuickToInterpreterBridgeTrampoline());
184        }
185      } else {
186#if defined(ART_USE_PORTABLE_COMPILER)
187        new_portable_code = class_linker->GetPortableResolutionTrampoline();
188#endif
189        new_quick_code = class_linker->GetQuickResolutionTrampoline();
190      }
191    }
192  }
193  UpdateEntrypoints(method, new_quick_code, new_portable_code, have_portable_code);
194}
195
196// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
197// deoptimization of quick frames to interpreter frames.
198// Since we may already have done this previously, we need to push new instrumentation frame before
199// existing instrumentation frames.
200static void InstrumentationInstallStack(Thread* thread, void* arg)
201    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
202  struct InstallStackVisitor : public StackVisitor {
203    InstallStackVisitor(Thread* thread, Context* context, uintptr_t instrumentation_exit_pc)
204        : StackVisitor(thread, context),  instrumentation_stack_(thread->GetInstrumentationStack()),
205          instrumentation_exit_pc_(instrumentation_exit_pc),
206          reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
207          last_return_pc_(0) {
208    }
209
210    virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
211      mirror::ArtMethod* m = GetMethod();
212      if (m == NULL) {
213        if (kVerboseInstrumentation) {
214          LOG(INFO) << "  Skipping upcall. Frame " << GetFrameId();
215        }
216        last_return_pc_ = 0;
217        return true;  // Ignore upcalls.
218      }
219      if (GetCurrentQuickFrame() == NULL) {
220        bool interpreter_frame = !m->IsPortableCompiled();
221        InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, 0, GetFrameId(),
222                                                        interpreter_frame);
223        if (kVerboseInstrumentation) {
224          LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
225        }
226        shadow_stack_.push_back(instrumentation_frame);
227        return true;  // Continue.
228      }
229      uintptr_t return_pc = GetReturnPc();
230      if (m->IsRuntimeMethod()) {
231        if (return_pc == instrumentation_exit_pc_) {
232          if (kVerboseInstrumentation) {
233            LOG(INFO) << "  Handling quick to interpreter transition. Frame " << GetFrameId();
234          }
235          CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
236          const InstrumentationStackFrame& frame =
237              instrumentation_stack_->at(instrumentation_stack_depth_);
238          CHECK(frame.interpreter_entry_);
239          // This is an interpreter frame so method enter event must have been reported. However we
240          // need to push a DEX pc into the dex_pcs_ list to match size of instrumentation stack.
241          // Since we won't report method entry here, we can safely push any DEX pc.
242          dex_pcs_.push_back(0);
243          last_return_pc_ = frame.return_pc_;
244          ++instrumentation_stack_depth_;
245          return true;
246        } else {
247          if (kVerboseInstrumentation) {
248            LOG(INFO) << "  Skipping runtime method. Frame " << GetFrameId();
249          }
250          last_return_pc_ = GetReturnPc();
251          return true;  // Ignore unresolved methods since they will be instrumented after resolution.
252        }
253      }
254      if (kVerboseInstrumentation) {
255        LOG(INFO) << "  Installing exit stub in " << DescribeLocation();
256      }
257      if (return_pc == instrumentation_exit_pc_) {
258        // We've reached a frame which has already been installed with instrumentation exit stub.
259        // We should have already installed instrumentation on previous frames.
260        reached_existing_instrumentation_frames_ = true;
261
262        CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
263        const InstrumentationStackFrame& frame =
264            instrumentation_stack_->at(instrumentation_stack_depth_);
265        CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m)
266                                   << ", Found " << PrettyMethod(frame.method_);
267        return_pc = frame.return_pc_;
268        if (kVerboseInstrumentation) {
269          LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
270        }
271      } else {
272        CHECK_NE(return_pc, 0U);
273        CHECK(!reached_existing_instrumentation_frames_);
274        InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
275                                                        false);
276        if (kVerboseInstrumentation) {
277          LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
278        }
279
280        // Insert frame at the right position so we do not corrupt the instrumentation stack.
281        // Instrumentation stack frames are in descending frame id order.
282        auto it = instrumentation_stack_->begin();
283        for (auto end = instrumentation_stack_->end(); it != end; ++it) {
284          const InstrumentationStackFrame& current = *it;
285          if (instrumentation_frame.frame_id_ >= current.frame_id_) {
286            break;
287          }
288        }
289        instrumentation_stack_->insert(it, instrumentation_frame);
290        SetReturnPc(instrumentation_exit_pc_);
291      }
292      dex_pcs_.push_back(m->ToDexPc(last_return_pc_));
293      last_return_pc_ = return_pc;
294      ++instrumentation_stack_depth_;
295      return true;  // Continue.
296    }
297    std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
298    std::vector<InstrumentationStackFrame> shadow_stack_;
299    std::vector<uint32_t> dex_pcs_;
300    const uintptr_t instrumentation_exit_pc_;
301    bool reached_existing_instrumentation_frames_;
302    size_t instrumentation_stack_depth_;
303    uintptr_t last_return_pc_;
304  };
305  if (kVerboseInstrumentation) {
306    std::string thread_name;
307    thread->GetThreadName(thread_name);
308    LOG(INFO) << "Installing exit stubs in " << thread_name;
309  }
310
311  Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
312  std::unique_ptr<Context> context(Context::Create());
313  uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
314  InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
315  visitor.WalkStack(true);
316  CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
317
318  if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
319    // Create method enter events for all methods currently on the thread's stack. We only do this
320    // if no debugger is attached to prevent from posting events twice.
321    auto ssi = visitor.shadow_stack_.rbegin();
322    for (auto isi = thread->GetInstrumentationStack()->rbegin(),
323        end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) {
324      while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < (*isi).frame_id_) {
325        instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0);
326        ++ssi;
327      }
328      uint32_t dex_pc = visitor.dex_pcs_.back();
329      visitor.dex_pcs_.pop_back();
330      if (!isi->interpreter_entry_) {
331        instrumentation->MethodEnterEvent(thread, (*isi).this_object_, (*isi).method_, dex_pc);
332      }
333    }
334  }
335  thread->VerifyStack();
336}
337
338// Removes the instrumentation exit pc as the return PC for every quick frame.
339static void InstrumentationRestoreStack(Thread* thread, void* arg)
340    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
341  struct RestoreStackVisitor : public StackVisitor {
342    RestoreStackVisitor(Thread* thread, uintptr_t instrumentation_exit_pc,
343                        Instrumentation* instrumentation)
344        : StackVisitor(thread, NULL), thread_(thread),
345          instrumentation_exit_pc_(instrumentation_exit_pc),
346          instrumentation_(instrumentation),
347          instrumentation_stack_(thread->GetInstrumentationStack()),
348          frames_removed_(0) {}
349
350    virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
351      if (instrumentation_stack_->size() == 0) {
352        return false;  // Stop.
353      }
354      mirror::ArtMethod* m = GetMethod();
355      if (GetCurrentQuickFrame() == NULL) {
356        if (kVerboseInstrumentation) {
357          LOG(INFO) << "  Ignoring a shadow frame. Frame " << GetFrameId()
358              << " Method=" << PrettyMethod(m);
359        }
360        return true;  // Ignore shadow frames.
361      }
362      if (m == NULL) {
363        if (kVerboseInstrumentation) {
364          LOG(INFO) << "  Skipping upcall. Frame " << GetFrameId();
365        }
366        return true;  // Ignore upcalls.
367      }
368      bool removed_stub = false;
369      // TODO: make this search more efficient?
370      const size_t frameId = GetFrameId();
371      for (const InstrumentationStackFrame& instrumentation_frame : *instrumentation_stack_) {
372        if (instrumentation_frame.frame_id_ == frameId) {
373          if (kVerboseInstrumentation) {
374            LOG(INFO) << "  Removing exit stub in " << DescribeLocation();
375          }
376          if (instrumentation_frame.interpreter_entry_) {
377            CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
378          } else {
379            CHECK(m == instrumentation_frame.method_) << PrettyMethod(m);
380          }
381          SetReturnPc(instrumentation_frame.return_pc_);
382          if (instrumentation_->ShouldNotifyMethodEnterExitEvents()) {
383            // Create the method exit events. As the methods didn't really exit the result is 0.
384            // We only do this if no debugger is attached to prevent from posting events twice.
385            instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
386                                              GetDexPc(), JValue());
387          }
388          frames_removed_++;
389          removed_stub = true;
390          break;
391        }
392      }
393      if (!removed_stub) {
394        if (kVerboseInstrumentation) {
395          LOG(INFO) << "  No exit stub in " << DescribeLocation();
396        }
397      }
398      return true;  // Continue.
399    }
400    Thread* const thread_;
401    const uintptr_t instrumentation_exit_pc_;
402    Instrumentation* const instrumentation_;
403    std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
404    size_t frames_removed_;
405  };
406  if (kVerboseInstrumentation) {
407    std::string thread_name;
408    thread->GetThreadName(thread_name);
409    LOG(INFO) << "Removing exit stubs in " << thread_name;
410  }
411  std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
412  if (stack->size() > 0) {
413    Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
414    uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
415    RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
416    visitor.WalkStack(true);
417    CHECK_EQ(visitor.frames_removed_, stack->size());
418    while (stack->size() > 0) {
419      stack->pop_front();
420    }
421  }
422}
423
424void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
425  Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
426  if ((events & kMethodEntered) != 0) {
427    method_entry_listeners_.push_back(listener);
428    have_method_entry_listeners_ = true;
429  }
430  if ((events & kMethodExited) != 0) {
431    method_exit_listeners_.push_back(listener);
432    have_method_exit_listeners_ = true;
433  }
434  if ((events & kMethodUnwind) != 0) {
435    method_unwind_listeners_.push_back(listener);
436    have_method_unwind_listeners_ = true;
437  }
438  if ((events & kDexPcMoved) != 0) {
439    std::list<InstrumentationListener*>* modified;
440    if (have_dex_pc_listeners_) {
441      modified = new std::list<InstrumentationListener*>(*dex_pc_listeners_.get());
442    } else {
443      modified = new std::list<InstrumentationListener*>();
444    }
445    modified->push_back(listener);
446    dex_pc_listeners_.reset(modified);
447    have_dex_pc_listeners_ = true;
448  }
449  if ((events & kFieldRead) != 0) {
450    std::list<InstrumentationListener*>* modified;
451    if (have_field_read_listeners_) {
452      modified = new std::list<InstrumentationListener*>(*field_read_listeners_.get());
453    } else {
454      modified = new std::list<InstrumentationListener*>();
455    }
456    modified->push_back(listener);
457    field_read_listeners_.reset(modified);
458    have_field_read_listeners_ = true;
459  }
460  if ((events & kFieldWritten) != 0) {
461    std::list<InstrumentationListener*>* modified;
462    if (have_field_write_listeners_) {
463      modified = new std::list<InstrumentationListener*>(*field_write_listeners_.get());
464    } else {
465      modified = new std::list<InstrumentationListener*>();
466    }
467    modified->push_back(listener);
468    field_write_listeners_.reset(modified);
469    have_field_write_listeners_ = true;
470  }
471  if ((events & kExceptionCaught) != 0) {
472    std::list<InstrumentationListener*>* modified;
473    if (have_exception_caught_listeners_) {
474      modified = new std::list<InstrumentationListener*>(*exception_caught_listeners_.get());
475    } else {
476      modified = new std::list<InstrumentationListener*>();
477    }
478    modified->push_back(listener);
479    exception_caught_listeners_.reset(modified);
480    have_exception_caught_listeners_ = true;
481  }
482  UpdateInterpreterHandlerTable();
483}
484
485void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
486  Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
487
488  if ((events & kMethodEntered) != 0) {
489    if (have_method_entry_listeners_) {
490      method_entry_listeners_.remove(listener);
491      have_method_entry_listeners_ = !method_entry_listeners_.empty();
492    }
493  }
494  if ((events & kMethodExited) != 0) {
495    if (have_method_exit_listeners_) {
496      method_exit_listeners_.remove(listener);
497      have_method_exit_listeners_ = !method_exit_listeners_.empty();
498    }
499  }
500  if ((events & kMethodUnwind) != 0) {
501    if (have_method_unwind_listeners_) {
502      method_unwind_listeners_.remove(listener);
503      have_method_unwind_listeners_ = !method_unwind_listeners_.empty();
504    }
505  }
506  if ((events & kDexPcMoved) != 0) {
507    if (have_dex_pc_listeners_) {
508      std::list<InstrumentationListener*>* modified =
509          new std::list<InstrumentationListener*>(*dex_pc_listeners_.get());
510      modified->remove(listener);
511      have_dex_pc_listeners_ = !modified->empty();
512      if (have_dex_pc_listeners_) {
513        dex_pc_listeners_.reset(modified);
514      } else {
515        dex_pc_listeners_.reset();
516        delete modified;
517      }
518    }
519  }
520  if ((events & kFieldRead) != 0) {
521    if (have_field_read_listeners_) {
522      std::list<InstrumentationListener*>* modified =
523          new std::list<InstrumentationListener*>(*field_read_listeners_.get());
524      modified->remove(listener);
525      have_field_read_listeners_ = !modified->empty();
526      if (have_field_read_listeners_) {
527        field_read_listeners_.reset(modified);
528      } else {
529        field_read_listeners_.reset();
530        delete modified;
531      }
532    }
533  }
534  if ((events & kFieldWritten) != 0) {
535    if (have_field_write_listeners_) {
536      std::list<InstrumentationListener*>* modified =
537          new std::list<InstrumentationListener*>(*field_write_listeners_.get());
538      modified->remove(listener);
539      have_field_write_listeners_ = !modified->empty();
540      if (have_field_write_listeners_) {
541        field_write_listeners_.reset(modified);
542      } else {
543        field_write_listeners_.reset();
544        delete modified;
545      }
546    }
547  }
548  if ((events & kExceptionCaught) != 0) {
549    if (have_exception_caught_listeners_) {
550      std::list<InstrumentationListener*>* modified =
551          new std::list<InstrumentationListener*>(*exception_caught_listeners_.get());
552      modified->remove(listener);
553      have_exception_caught_listeners_ = !modified->empty();
554      if (have_exception_caught_listeners_) {
555        exception_caught_listeners_.reset(modified);
556      } else {
557        exception_caught_listeners_.reset();
558        delete modified;
559      }
560    }
561  }
562  UpdateInterpreterHandlerTable();
563}
564
565void Instrumentation::ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) {
566  interpret_only_ = require_interpreter || forced_interpret_only_;
567  // Compute what level of instrumentation is required and compare to current.
568  int desired_level, current_level;
569  if (require_interpreter) {
570    desired_level = 2;
571  } else if (require_entry_exit_stubs) {
572    desired_level = 1;
573  } else {
574    desired_level = 0;
575  }
576  if (interpreter_stubs_installed_) {
577    current_level = 2;
578  } else if (entry_exit_stubs_installed_) {
579    current_level = 1;
580  } else {
581    current_level = 0;
582  }
583  if (desired_level == current_level) {
584    // We're already set.
585    return;
586  }
587  Thread* const self = Thread::Current();
588  Runtime* runtime = Runtime::Current();
589  Locks::thread_list_lock_->AssertNotHeld(self);
590  if (desired_level > 0) {
591    if (require_interpreter) {
592      interpreter_stubs_installed_ = true;
593    } else {
594      CHECK(require_entry_exit_stubs);
595      entry_exit_stubs_installed_ = true;
596    }
597    runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
598    instrumentation_stubs_installed_ = true;
599    MutexLock mu(self, *Locks::thread_list_lock_);
600    runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
601  } else {
602    interpreter_stubs_installed_ = false;
603    entry_exit_stubs_installed_ = false;
604    runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
605    // Restore stack only if there is no method currently deoptimized.
606    bool empty;
607    {
608      ReaderMutexLock mu(self, deoptimized_methods_lock_);
609      empty = IsDeoptimizedMethodsEmpty();  // Avoid lock violation.
610    }
611    if (empty) {
612      instrumentation_stubs_installed_ = false;
613      MutexLock mu(self, *Locks::thread_list_lock_);
614      Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
615    }
616  }
617}
618
619static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg) {
620  thread->ResetQuickAllocEntryPointsForThread();
621}
622
623void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
624  Thread* self = Thread::Current();
625  Runtime* runtime = Runtime::Current();
626  ThreadList* tl = runtime->GetThreadList();
627  Locks::mutator_lock_->AssertNotHeld(self);
628  Locks::instrument_entrypoints_lock_->AssertHeld(self);
629  if (runtime->IsStarted()) {
630    tl->SuspendAll();
631  }
632  {
633    MutexLock mu(self, *Locks::runtime_shutdown_lock_);
634    SetQuickAllocEntryPointsInstrumented(instrumented);
635    ResetQuickAllocEntryPoints();
636  }
637  if (runtime->IsStarted()) {
638    tl->ResumeAll();
639  }
640}
641
642void Instrumentation::InstrumentQuickAllocEntryPoints() {
643  MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
644  InstrumentQuickAllocEntryPointsLocked();
645}
646
647void Instrumentation::UninstrumentQuickAllocEntryPoints() {
648  MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
649  UninstrumentQuickAllocEntryPointsLocked();
650}
651
652void Instrumentation::InstrumentQuickAllocEntryPointsLocked() {
653  Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
654  if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
655    SetEntrypointsInstrumented(true);
656  }
657  ++quick_alloc_entry_points_instrumentation_counter_;
658  LOG(INFO) << "Counter: " << quick_alloc_entry_points_instrumentation_counter_;
659}
660
661void Instrumentation::UninstrumentQuickAllocEntryPointsLocked() {
662  Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
663  CHECK_GT(quick_alloc_entry_points_instrumentation_counter_, 0U);
664  --quick_alloc_entry_points_instrumentation_counter_;
665  if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
666    SetEntrypointsInstrumented(false);
667  }
668  LOG(INFO) << "Counter: " << quick_alloc_entry_points_instrumentation_counter_;
669}
670
671void Instrumentation::ResetQuickAllocEntryPoints() {
672  Runtime* runtime = Runtime::Current();
673  if (runtime->IsStarted()) {
674    MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
675    runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, NULL);
676  }
677}
678
679void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code,
680                                        const void* portable_code, bool have_portable_code) {
681  const void* new_portable_code;
682  const void* new_quick_code;
683  bool new_have_portable_code;
684  if (LIKELY(!instrumentation_stubs_installed_)) {
685    new_portable_code = portable_code;
686    new_quick_code = quick_code;
687    new_have_portable_code = have_portable_code;
688  } else {
689    if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
690#if defined(ART_USE_PORTABLE_COMPILER)
691      new_portable_code = GetPortableToInterpreterBridge();
692#else
693      new_portable_code = portable_code;
694#endif
695      new_quick_code = GetQuickToInterpreterBridge();
696      new_have_portable_code = false;
697    } else {
698      ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
699      if (quick_code == class_linker->GetQuickResolutionTrampoline() ||
700          quick_code == class_linker->GetQuickToInterpreterBridgeTrampoline() ||
701          quick_code == GetQuickToInterpreterBridge()) {
702#if defined(ART_USE_PORTABLE_COMPILER)
703        DCHECK((portable_code == class_linker->GetPortableResolutionTrampoline()) ||
704               (portable_code == GetPortableToInterpreterBridge()));
705#endif
706        new_portable_code = portable_code;
707        new_quick_code = quick_code;
708        new_have_portable_code = have_portable_code;
709      } else if (entry_exit_stubs_installed_) {
710        new_quick_code = GetQuickInstrumentationEntryPoint();
711#if defined(ART_USE_PORTABLE_COMPILER)
712        new_portable_code = GetPortableToInterpreterBridge();
713#else
714        new_portable_code = portable_code;
715#endif
716        new_have_portable_code = false;
717      } else {
718        new_portable_code = portable_code;
719        new_quick_code = quick_code;
720        new_have_portable_code = have_portable_code;
721      }
722    }
723  }
724  UpdateEntrypoints(method, new_quick_code, new_portable_code, new_have_portable_code);
725}
726
727bool Instrumentation::AddDeoptimizedMethod(mirror::ArtMethod* method) {
728  // Note that the insert() below isn't read barrier-aware. So, this
729  // FindDeoptimizedMethod() call is necessary or else we would end up
730  // storing the same method twice in the map (the from-space and the
731  // to-space ones).
732  if (FindDeoptimizedMethod(method)) {
733    // Already in the map. Return.
734    return false;
735  }
736  // Not found. Add it.
737  int32_t hash_code = method->IdentityHashCode();
738  deoptimized_methods_.insert(std::make_pair(hash_code, GcRoot<mirror::ArtMethod>(method)));
739  return true;
740}
741
742bool Instrumentation::FindDeoptimizedMethod(mirror::ArtMethod* method) {
743  int32_t hash_code = method->IdentityHashCode();
744  auto range = deoptimized_methods_.equal_range(hash_code);
745  for (auto it = range.first; it != range.second; ++it) {
746    mirror::ArtMethod* m = it->second.Read();
747    if (m == method) {
748      // Found.
749      return true;
750    }
751  }
752  // Not found.
753  return false;
754}
755
756mirror::ArtMethod* Instrumentation::BeginDeoptimizedMethod() {
757  auto it = deoptimized_methods_.begin();
758  if (it == deoptimized_methods_.end()) {
759    // Empty.
760    return nullptr;
761  }
762  return it->second.Read();
763}
764
765bool Instrumentation::RemoveDeoptimizedMethod(mirror::ArtMethod* method) {
766  int32_t hash_code = method->IdentityHashCode();
767  auto range = deoptimized_methods_.equal_range(hash_code);
768  for (auto it = range.first; it != range.second; ++it) {
769    mirror::ArtMethod* m = it->second.Read();
770    if (m == method) {
771      // Found. Erase and return.
772      deoptimized_methods_.erase(it);
773      return true;
774    }
775  }
776  // Not found.
777  return false;
778}
779
780bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
781  return deoptimized_methods_.empty();
782}
783
784void Instrumentation::Deoptimize(mirror::ArtMethod* method) {
785  CHECK(!method->IsNative());
786  CHECK(!method->IsProxyMethod());
787  CHECK(!method->IsAbstract());
788
789  Thread* self = Thread::Current();
790  {
791    WriterMutexLock mu(self, deoptimized_methods_lock_);
792    bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
793    CHECK(has_not_been_deoptimized) << "Method " << PrettyMethod(method)
794        << " is already deoptimized";
795  }
796  if (!interpreter_stubs_installed_) {
797    UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint(),
798#if defined(ART_USE_PORTABLE_COMPILER)
799                      GetPortableToInterpreterBridge(),
800#else
801                      nullptr,
802#endif
803                      false);
804
805    // Install instrumentation exit stub and instrumentation frames. We may already have installed
806    // these previously so it will only cover the newly created frames.
807    instrumentation_stubs_installed_ = true;
808    MutexLock mu(self, *Locks::thread_list_lock_);
809    Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
810  }
811}
812
813void Instrumentation::Undeoptimize(mirror::ArtMethod* method) {
814  CHECK(!method->IsNative());
815  CHECK(!method->IsProxyMethod());
816  CHECK(!method->IsAbstract());
817
818  Thread* self = Thread::Current();
819  bool empty;
820  {
821    WriterMutexLock mu(self, deoptimized_methods_lock_);
822    bool found_and_erased = RemoveDeoptimizedMethod(method);
823    CHECK(found_and_erased) << "Method " << PrettyMethod(method)
824        << " is not deoptimized";
825    empty = IsDeoptimizedMethodsEmpty();
826  }
827
828  // Restore code and possibly stack only if we did not deoptimize everything.
829  if (!interpreter_stubs_installed_) {
830    // Restore its code or resolution trampoline.
831    ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
832    if (method->IsStatic() && !method->IsConstructor() &&
833        !method->GetDeclaringClass()->IsInitialized()) {
834      // TODO: we're updating to entrypoints in the image here, we can avoid the trampoline.
835      UpdateEntrypoints(method, class_linker->GetQuickResolutionTrampoline(),
836#if defined(ART_USE_PORTABLE_COMPILER)
837                        class_linker->GetPortableResolutionTrampoline(),
838#else
839                        nullptr,
840#endif
841                        false);
842    } else {
843      bool have_portable_code = false;
844      const void* quick_code = class_linker->GetQuickOatCodeFor(method);
845#if defined(ART_USE_PORTABLE_COMPILER)
846      const void* portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
847#else
848      const void* portable_code = nullptr;
849#endif
850      UpdateEntrypoints(method, quick_code, portable_code, have_portable_code);
851    }
852
853    // If there is no deoptimized method left, we can restore the stack of each thread.
854    if (empty) {
855      MutexLock mu(self, *Locks::thread_list_lock_);
856      Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
857      instrumentation_stubs_installed_ = false;
858    }
859  }
860}
861
862bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) {
863  DCHECK(method != nullptr);
864  ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
865  return FindDeoptimizedMethod(method);
866}
867
868void Instrumentation::EnableDeoptimization() {
869  ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
870  CHECK(IsDeoptimizedMethodsEmpty());
871  CHECK_EQ(deoptimization_enabled_, false);
872  deoptimization_enabled_ = true;
873}
874
875void Instrumentation::DisableDeoptimization() {
876  CHECK_EQ(deoptimization_enabled_, true);
877  // If we deoptimized everything, undo it.
878  if (interpreter_stubs_installed_) {
879    UndeoptimizeEverything();
880  }
881  // Undeoptimized selected methods.
882  while (true) {
883    mirror::ArtMethod* method;
884    {
885      ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
886      if (IsDeoptimizedMethodsEmpty()) {
887        break;
888      }
889      method = BeginDeoptimizedMethod();
890      CHECK(method != nullptr);
891    }
892    Undeoptimize(method);
893  }
894  deoptimization_enabled_ = false;
895}
896
897// Indicates if instrumentation should notify method enter/exit events to the listeners.
898bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
899  return !deoptimization_enabled_ && !interpreter_stubs_installed_;
900}
901
902void Instrumentation::DeoptimizeEverything() {
903  CHECK(!interpreter_stubs_installed_);
904  ConfigureStubs(false, true);
905}
906
907void Instrumentation::UndeoptimizeEverything() {
908  CHECK(interpreter_stubs_installed_);
909  ConfigureStubs(false, false);
910}
911
912void Instrumentation::EnableMethodTracing() {
913  bool require_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners;
914  ConfigureStubs(!require_interpreter, require_interpreter);
915}
916
917void Instrumentation::DisableMethodTracing() {
918  ConfigureStubs(false, false);
919}
920
921const void* Instrumentation::GetQuickCodeFor(mirror::ArtMethod* method, size_t pointer_size) const {
922  Runtime* runtime = Runtime::Current();
923  if (LIKELY(!instrumentation_stubs_installed_)) {
924    const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(pointer_size);
925    DCHECK(code != nullptr);
926    ClassLinker* class_linker = runtime->GetClassLinker();
927    if (LIKELY(code != class_linker->GetQuickResolutionTrampoline()) &&
928        LIKELY(code != class_linker->GetQuickToInterpreterBridgeTrampoline()) &&
929        LIKELY(code != GetQuickToInterpreterBridge())) {
930      return code;
931    }
932  }
933  return runtime->GetClassLinker()->GetQuickOatCodeFor(method);
934}
935
936void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
937                                           mirror::ArtMethod* method,
938                                           uint32_t dex_pc) const {
939  auto it = method_entry_listeners_.begin();
940  bool is_end = (it == method_entry_listeners_.end());
941  // Implemented this way to prevent problems caused by modification of the list while iterating.
942  while (!is_end) {
943    InstrumentationListener* cur = *it;
944    ++it;
945    is_end = (it == method_entry_listeners_.end());
946    cur->MethodEntered(thread, this_object, method, dex_pc);
947  }
948}
949
950void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
951                                          mirror::ArtMethod* method,
952                                          uint32_t dex_pc, const JValue& return_value) const {
953  auto it = method_exit_listeners_.begin();
954  bool is_end = (it == method_exit_listeners_.end());
955  // Implemented this way to prevent problems caused by modification of the list while iterating.
956  while (!is_end) {
957    InstrumentationListener* cur = *it;
958    ++it;
959    is_end = (it == method_exit_listeners_.end());
960    cur->MethodExited(thread, this_object, method, dex_pc, return_value);
961  }
962}
963
964void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
965                                        mirror::ArtMethod* method,
966                                        uint32_t dex_pc) const {
967  if (have_method_unwind_listeners_) {
968    for (InstrumentationListener* listener : method_unwind_listeners_) {
969      listener->MethodUnwind(thread, this_object, method, dex_pc);
970    }
971  }
972}
973
974void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
975                                          mirror::ArtMethod* method,
976                                          uint32_t dex_pc) const {
977  if (HasDexPcListeners()) {
978    std::shared_ptr<std::list<InstrumentationListener*>> original(dex_pc_listeners_);
979    for (InstrumentationListener* listener : *original.get()) {
980      listener->DexPcMoved(thread, this_object, method, dex_pc);
981    }
982  }
983}
984
985void Instrumentation::FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
986                                         mirror::ArtMethod* method, uint32_t dex_pc,
987                                         mirror::ArtField* field) const {
988  if (HasFieldReadListeners()) {
989    std::shared_ptr<std::list<InstrumentationListener*>> original(field_read_listeners_);
990    for (InstrumentationListener* listener : *original.get()) {
991      listener->FieldRead(thread, this_object, method, dex_pc, field);
992    }
993  }
994}
995
996void Instrumentation::FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
997                                         mirror::ArtMethod* method, uint32_t dex_pc,
998                                         mirror::ArtField* field, const JValue& field_value) const {
999  if (HasFieldWriteListeners()) {
1000    std::shared_ptr<std::list<InstrumentationListener*>> original(field_write_listeners_);
1001    for (InstrumentationListener* listener : *original.get()) {
1002      listener->FieldWritten(thread, this_object, method, dex_pc, field, field_value);
1003    }
1004  }
1005}
1006
1007void Instrumentation::ExceptionCaughtEvent(Thread* thread, const ThrowLocation& throw_location,
1008                                           mirror::ArtMethod* catch_method,
1009                                           uint32_t catch_dex_pc,
1010                                           mirror::Throwable* exception_object) const {
1011  if (HasExceptionCaughtListeners()) {
1012    DCHECK_EQ(thread->GetException(nullptr), exception_object);
1013    bool is_exception_reported = thread->IsExceptionReportedToInstrumentation();
1014    thread->ClearException();
1015    std::shared_ptr<std::list<InstrumentationListener*>> original(exception_caught_listeners_);
1016    for (InstrumentationListener* listener : *original.get()) {
1017      listener->ExceptionCaught(thread, throw_location, catch_method, catch_dex_pc,
1018                                exception_object);
1019    }
1020    thread->SetException(throw_location, exception_object);
1021    thread->SetExceptionReportedToInstrumentation(is_exception_reported);
1022  }
1023}
1024
1025static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
1026                            int delta)
1027    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1028  size_t frame_id = StackVisitor::ComputeNumFrames(self) + delta;
1029  if (frame_id != instrumentation_frame.frame_id_) {
1030    LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
1031        << instrumentation_frame.frame_id_;
1032    StackVisitor::DescribeStack(self);
1033    CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
1034  }
1035}
1036
1037void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
1038                                                    mirror::ArtMethod* method,
1039                                                    uintptr_t lr, bool interpreter_entry) {
1040  // We have a callee-save frame meaning this value is guaranteed to never be 0.
1041  size_t frame_id = StackVisitor::ComputeNumFrames(self);
1042  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1043  if (kVerboseInstrumentation) {
1044    LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr);
1045  }
1046  instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
1047                                                                   frame_id, interpreter_entry);
1048  stack->push_front(instrumentation_frame);
1049
1050  if (!interpreter_entry) {
1051    MethodEnterEvent(self, this_object, method, 0);
1052  }
1053}
1054
1055TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
1056                                                            uint64_t gpr_result,
1057                                                            uint64_t fpr_result) {
1058  // Do the pop.
1059  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1060  CHECK_GT(stack->size(), 0U);
1061  InstrumentationStackFrame instrumentation_frame = stack->front();
1062  stack->pop_front();
1063
1064  // Set return PC and check the sanity of the stack.
1065  *return_pc = instrumentation_frame.return_pc_;
1066  CheckStackDepth(self, instrumentation_frame, 0);
1067
1068  mirror::ArtMethod* method = instrumentation_frame.method_;
1069  uint32_t length;
1070  char return_shorty = method->GetShorty(&length)[0];
1071  JValue return_value;
1072  if (return_shorty == 'V') {
1073    return_value.SetJ(0);
1074  } else if (return_shorty == 'F' || return_shorty == 'D') {
1075    return_value.SetJ(fpr_result);
1076  } else {
1077    return_value.SetJ(gpr_result);
1078  }
1079  // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1080  //       return_pc.
1081  uint32_t dex_pc = DexFile::kDexNoIndex;
1082  mirror::Object* this_object = instrumentation_frame.this_object_;
1083  if (!instrumentation_frame.interpreter_entry_) {
1084    MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
1085  }
1086
1087  // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
1088  // back to an upcall.
1089  NthCallerVisitor visitor(self, 1, true);
1090  visitor.WalkStack(true);
1091  bool deoptimize = (visitor.caller != NULL) &&
1092                    (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller));
1093  if (deoptimize && kVerboseInstrumentation) {
1094    LOG(INFO) << "Deoptimizing into " << PrettyMethod(visitor.caller);
1095  }
1096  if (deoptimize) {
1097    if (kVerboseInstrumentation) {
1098      LOG(INFO) << "Deoptimizing from " << PrettyMethod(method)
1099                << " result is " << std::hex << return_value.GetJ();
1100    }
1101    self->SetDeoptimizationReturnValue(return_value);
1102    return GetTwoWordSuccessValue(*return_pc,
1103                                  reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
1104  } else {
1105    if (kVerboseInstrumentation) {
1106      LOG(INFO) << "Returning from " << PrettyMethod(method)
1107                << " to PC " << reinterpret_cast<void*>(*return_pc);
1108    }
1109    return GetTwoWordSuccessValue(0, *return_pc);
1110  }
1111}
1112
1113void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
1114  // Do the pop.
1115  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1116  CHECK_GT(stack->size(), 0U);
1117  InstrumentationStackFrame instrumentation_frame = stack->front();
1118  // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
1119  stack->pop_front();
1120
1121  mirror::ArtMethod* method = instrumentation_frame.method_;
1122  if (is_deoptimization) {
1123    if (kVerboseInstrumentation) {
1124      LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method);
1125    }
1126  } else {
1127    if (kVerboseInstrumentation) {
1128      LOG(INFO) << "Popping for unwind " << PrettyMethod(method);
1129    }
1130
1131    // Notify listeners of method unwind.
1132    // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1133    //       return_pc.
1134    uint32_t dex_pc = DexFile::kDexNoIndex;
1135    MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
1136  }
1137}
1138
1139void Instrumentation::VisitRoots(RootCallback* callback, void* arg) {
1140  WriterMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
1141  if (IsDeoptimizedMethodsEmpty()) {
1142    return;
1143  }
1144  for (auto pair : deoptimized_methods_) {
1145    pair.second.VisitRoot(callback, arg, 0, kRootVMInternal);
1146  }
1147}
1148
1149std::string InstrumentationStackFrame::Dump() const {
1150  std::ostringstream os;
1151  os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":"
1152      << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
1153  return os.str();
1154}
1155
1156}  // namespace instrumentation
1157}  // namespace art
1158