instrumentation.cc revision 138dbfc3336e379d74d157086f69a0fbe830089b
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
19#include <sys/uio.h>
20
21#include "atomic_integer.h"
22#include "base/unix_file/fd_file.h"
23#include "class_linker.h"
24#include "debugger.h"
25#include "dex_file-inl.h"
26#include "interpreter/interpreter.h"
27#include "mirror/art_method-inl.h"
28#include "mirror/class-inl.h"
29#include "mirror/dex_cache.h"
30#include "mirror/object_array-inl.h"
31#include "mirror/object-inl.h"
32#include "nth_caller_visitor.h"
33#if !defined(ART_USE_PORTABLE_COMPILER)
34#include "entrypoints/quick/quick_entrypoints.h"
35#endif
36#include "object_utils.h"
37#include "os.h"
38#include "scoped_thread_state_change.h"
39#include "thread.h"
40#include "thread_list.h"
41
42namespace art {
43
44extern void SetQuickAllocEntryPointsInstrumented(bool instrumented);
45
46namespace instrumentation {
47
48const bool kVerboseInstrumentation = false;
49
50// Do we want to deoptimize for method entry and exit listeners or just try to intercept
51// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
52// application's performance.
53static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = false;
54
55static bool InstallStubsClassVisitor(mirror::Class* klass, void* arg)
56    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
57  Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
58  return instrumentation->InstallStubsForClass(klass);
59}
60
61bool Instrumentation::InstallStubsForClass(mirror::Class* klass) {
62  for (size_t i = 0, e = klass->NumDirectMethods(); i < e; i++) {
63    InstallStubsForMethod(klass->GetDirectMethod(i));
64  }
65  for (size_t i = 0, e = klass->NumVirtualMethods(); i < e; i++) {
66    InstallStubsForMethod(klass->GetVirtualMethod(i));
67  }
68  return true;
69}
70
71static void UpdateEntrypoints(mirror::ArtMethod* method, const void* code) {
72  method->SetEntryPointFromCompiledCode(code);
73  if (!method->IsResolutionMethod()) {
74    if (code == GetCompiledCodeToInterpreterBridge()) {
75      method->SetEntryPointFromInterpreter(art::interpreter::artInterpreterToInterpreterBridge);
76    } else {
77      method->SetEntryPointFromInterpreter(art::artInterpreterToCompiledCodeBridge);
78    }
79  }
80}
81
82void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) {
83  if (method->IsAbstract() || method->IsProxyMethod()) {
84    // Do not change stubs for these methods.
85    return;
86  }
87  const void* new_code;
88  bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
89  ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
90  bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
91  if (uninstall) {
92    if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
93      new_code = GetCompiledCodeToInterpreterBridge();
94    } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
95      new_code = class_linker->GetOatCodeFor(method);
96    } else {
97      new_code = GetResolutionTrampoline(class_linker);
98    }
99  } else {  // !uninstall
100    if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
101      new_code = GetCompiledCodeToInterpreterBridge();
102    } else {
103      // Do not overwrite resolution trampoline. When the trampoline initializes the method's
104      // class, all its static methods code will be set to the instrumentation entry point.
105      // For more details, see ClassLinker::FixupStaticTrampolines.
106      if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
107        // Do not overwrite interpreter to prevent from posting method entry/exit events twice.
108        new_code = class_linker->GetOatCodeFor(method);
109        if (entry_exit_stubs_installed_ && new_code != GetCompiledCodeToInterpreterBridge()) {
110          new_code = GetQuickInstrumentationEntryPoint();
111        }
112      } else {
113        new_code = GetResolutionTrampoline(class_linker);
114      }
115    }
116  }
117  UpdateEntrypoints(method, new_code);
118}
119
120// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
121// deoptimization of quick frames to interpreter frames.
122// Since we may already have done this previously, we need to push new instrumentation frame before
123// existing instrumentation frames.
124static void InstrumentationInstallStack(Thread* thread, void* arg)
125    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
126  struct InstallStackVisitor : public StackVisitor {
127    InstallStackVisitor(Thread* thread, Context* context, uintptr_t instrumentation_exit_pc,
128                        bool is_deoptimization_enabled)
129        : StackVisitor(thread, context),  instrumentation_stack_(thread->GetInstrumentationStack()),
130          existing_instrumentation_frames_count_(instrumentation_stack_->size()),
131          instrumentation_exit_pc_(instrumentation_exit_pc),
132          is_deoptimization_enabled_(is_deoptimization_enabled),
133          reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
134          last_return_pc_(0) {
135    }
136
137    virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
138      mirror::ArtMethod* m = GetMethod();
139      if (GetCurrentQuickFrame() == NULL) {
140        if (kVerboseInstrumentation) {
141          LOG(INFO) << "  Ignoring a shadow frame. Frame " << GetFrameId()
142                    << " Method=" << PrettyMethod(m);
143        }
144        return true;  // Ignore shadow frames.
145      }
146      if (m == NULL) {
147        if (kVerboseInstrumentation) {
148          LOG(INFO) << "  Skipping upcall. Frame " << GetFrameId();
149        }
150        last_return_pc_ = 0;
151        return true;  // Ignore upcalls.
152      }
153      if (m->IsRuntimeMethod()) {
154        if (kVerboseInstrumentation) {
155          LOG(INFO) << "  Skipping runtime method. Frame " << GetFrameId();
156        }
157        last_return_pc_ = GetReturnPc();
158        return true;  // Ignore unresolved methods since they will be instrumented after resolution.
159      }
160      if (kVerboseInstrumentation) {
161        LOG(INFO) << "  Installing exit stub in " << DescribeLocation();
162      }
163      uintptr_t return_pc = GetReturnPc();
164      if (return_pc == instrumentation_exit_pc_) {
165        // We've reached a frame which has already been installed with instrumentation exit stub.
166        // We should have already installed instrumentation on previous frames.
167        reached_existing_instrumentation_frames_ = true;
168
169        CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
170        const InstrumentationStackFrame& frame = instrumentation_stack_->at(instrumentation_stack_depth_);
171        CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m)
172                                   << ", Found " << PrettyMethod(frame.method_);
173        return_pc = frame.return_pc_;
174        if (kVerboseInstrumentation) {
175          LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
176        }
177      } else {
178        CHECK_NE(return_pc, 0U);
179        CHECK(!reached_existing_instrumentation_frames_);
180        InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
181                                                        false);
182        if (kVerboseInstrumentation) {
183          LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
184        }
185
186        // Insert frame before old ones so we do not corrupt the instrumentation stack.
187        auto it = instrumentation_stack_->end() - existing_instrumentation_frames_count_;
188        instrumentation_stack_->insert(it, instrumentation_frame);
189        SetReturnPc(instrumentation_exit_pc_);
190      }
191      dex_pcs_.push_back(m->ToDexPc(last_return_pc_));
192      last_return_pc_ = return_pc;
193      ++instrumentation_stack_depth_;
194      return true;  // Continue.
195    }
196    std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
197    const size_t existing_instrumentation_frames_count_;
198    std::vector<uint32_t> dex_pcs_;
199    const uintptr_t instrumentation_exit_pc_;
200    const bool is_deoptimization_enabled_;
201    bool reached_existing_instrumentation_frames_;
202    size_t instrumentation_stack_depth_;
203    uintptr_t last_return_pc_;
204  };
205  if (kVerboseInstrumentation) {
206    std::string thread_name;
207    thread->GetThreadName(thread_name);
208    LOG(INFO) << "Installing exit stubs in " << thread_name;
209  }
210
211  Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
212  UniquePtr<Context> context(Context::Create());
213  uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
214  InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc,
215                              instrumentation->IsDeoptimizationEnabled());
216  visitor.WalkStack(true);
217  CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
218
219  if (!instrumentation->IsDeoptimizationEnabled()) {
220    // Create method enter events for all methods currently on the thread's stack. We only do this
221    // if no debugger is attached to prevent from posting events twice.
222    typedef std::deque<InstrumentationStackFrame>::const_reverse_iterator It;
223    for (It it = thread->GetInstrumentationStack()->rbegin(),
224        end = thread->GetInstrumentationStack()->rend(); it != end; ++it) {
225      mirror::Object* this_object = (*it).this_object_;
226      mirror::ArtMethod* method = (*it).method_;
227      uint32_t dex_pc = visitor.dex_pcs_.back();
228      visitor.dex_pcs_.pop_back();
229      instrumentation->MethodEnterEvent(thread, this_object, method, dex_pc);
230    }
231  }
232  thread->VerifyStack();
233}
234
235// Removes the instrumentation exit pc as the return PC for every quick frame.
236static void InstrumentationRestoreStack(Thread* thread, void* arg)
237    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
238  struct RestoreStackVisitor : public StackVisitor {
239    RestoreStackVisitor(Thread* thread, uintptr_t instrumentation_exit_pc,
240                        Instrumentation* instrumentation)
241        : StackVisitor(thread, NULL), thread_(thread),
242          instrumentation_exit_pc_(instrumentation_exit_pc),
243          instrumentation_(instrumentation),
244          instrumentation_stack_(thread->GetInstrumentationStack()),
245          frames_removed_(0) {}
246
247    virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
248      if (instrumentation_stack_->size() == 0) {
249        return false;  // Stop.
250      }
251      mirror::ArtMethod* m = GetMethod();
252      if (GetCurrentQuickFrame() == NULL) {
253        if (kVerboseInstrumentation) {
254          LOG(INFO) << "  Ignoring a shadow frame. Frame " << GetFrameId() << " Method=" << PrettyMethod(m);
255        }
256        return true;  // Ignore shadow frames.
257      }
258      if (m == NULL) {
259        if (kVerboseInstrumentation) {
260          LOG(INFO) << "  Skipping upcall. Frame " << GetFrameId();
261        }
262        return true;  // Ignore upcalls.
263      }
264      bool removed_stub = false;
265      // TODO: make this search more efficient?
266      for (InstrumentationStackFrame instrumentation_frame : *instrumentation_stack_) {
267        if (instrumentation_frame.frame_id_ == GetFrameId()) {
268          if (kVerboseInstrumentation) {
269            LOG(INFO) << "  Removing exit stub in " << DescribeLocation();
270          }
271          if (instrumentation_frame.interpreter_entry_) {
272            CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
273          } else {
274            CHECK(m == instrumentation_frame.method_) << PrettyMethod(m);
275          }
276          SetReturnPc(instrumentation_frame.return_pc_);
277          if (!instrumentation_->IsDeoptimizationEnabled()) {
278            // Create the method exit events. As the methods didn't really exit the result is 0.
279            // We only do this if no debugger is attached to prevent from posting events twice.
280            instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
281                                              GetDexPc(), JValue());
282          }
283          frames_removed_++;
284          removed_stub = true;
285          break;
286        }
287      }
288      if (!removed_stub) {
289        if (kVerboseInstrumentation) {
290          LOG(INFO) << "  No exit stub in " << DescribeLocation();
291        }
292      }
293      return true;  // Continue.
294    }
295    Thread* const thread_;
296    const uintptr_t instrumentation_exit_pc_;
297    Instrumentation* const instrumentation_;
298    std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
299    size_t frames_removed_;
300  };
301  if (kVerboseInstrumentation) {
302    std::string thread_name;
303    thread->GetThreadName(thread_name);
304    LOG(INFO) << "Removing exit stubs in " << thread_name;
305  }
306  std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
307  if (stack->size() > 0) {
308    Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
309    uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
310    RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
311    visitor.WalkStack(true);
312    CHECK_EQ(visitor.frames_removed_, stack->size());
313    while (stack->size() > 0) {
314      stack->pop_front();
315    }
316  }
317}
318
319void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
320  Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
321  if ((events & kMethodEntered) != 0) {
322    method_entry_listeners_.push_back(listener);
323    have_method_entry_listeners_ = true;
324  }
325  if ((events & kMethodExited) != 0) {
326    method_exit_listeners_.push_back(listener);
327    have_method_exit_listeners_ = true;
328  }
329  if ((events & kMethodUnwind) != 0) {
330    method_unwind_listeners_.push_back(listener);
331    have_method_unwind_listeners_ = true;
332  }
333  if ((events & kDexPcMoved) != 0) {
334    dex_pc_listeners_.push_back(listener);
335    have_dex_pc_listeners_ = true;
336  }
337  if ((events & kExceptionCaught) != 0) {
338    exception_caught_listeners_.push_back(listener);
339    have_exception_caught_listeners_ = true;
340  }
341  UpdateInterpreterHandlerTable();
342}
343
344void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
345  Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
346
347  if ((events & kMethodEntered) != 0) {
348    bool contains = std::find(method_entry_listeners_.begin(), method_entry_listeners_.end(),
349                              listener) != method_entry_listeners_.end();
350    if (contains) {
351      method_entry_listeners_.remove(listener);
352    }
353    have_method_entry_listeners_ = method_entry_listeners_.size() > 0;
354  }
355  if ((events & kMethodExited) != 0) {
356    bool contains = std::find(method_exit_listeners_.begin(), method_exit_listeners_.end(),
357                              listener) != method_exit_listeners_.end();
358    if (contains) {
359      method_exit_listeners_.remove(listener);
360    }
361    have_method_exit_listeners_ = method_exit_listeners_.size() > 0;
362  }
363  if ((events & kMethodUnwind) != 0) {
364    method_unwind_listeners_.remove(listener);
365  }
366  if ((events & kDexPcMoved) != 0) {
367    bool contains = std::find(dex_pc_listeners_.begin(), dex_pc_listeners_.end(),
368                              listener) != dex_pc_listeners_.end();
369    if (contains) {
370      dex_pc_listeners_.remove(listener);
371    }
372    have_dex_pc_listeners_ = dex_pc_listeners_.size() > 0;
373  }
374  if ((events & kExceptionCaught) != 0) {
375    exception_caught_listeners_.remove(listener);
376    have_exception_caught_listeners_ = exception_caught_listeners_.size() > 0;
377  }
378  UpdateInterpreterHandlerTable();
379}
380
381void Instrumentation::ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) {
382  interpret_only_ = require_interpreter || forced_interpret_only_;
383  // Compute what level of instrumentation is required and compare to current.
384  int desired_level, current_level;
385  if (require_interpreter) {
386    desired_level = 2;
387  } else if (require_entry_exit_stubs) {
388    desired_level = 1;
389  } else {
390    desired_level = 0;
391  }
392  if (interpreter_stubs_installed_) {
393    current_level = 2;
394  } else if (entry_exit_stubs_installed_) {
395    current_level = 1;
396  } else {
397    current_level = 0;
398  }
399  if (desired_level == current_level) {
400    // We're already set.
401    return;
402  }
403  Thread* self = Thread::Current();
404  Runtime* runtime = Runtime::Current();
405  Locks::thread_list_lock_->AssertNotHeld(self);
406  if (desired_level > 0) {
407    if (require_interpreter) {
408      interpreter_stubs_installed_ = true;
409    } else {
410      CHECK(require_entry_exit_stubs);
411      entry_exit_stubs_installed_ = true;
412    }
413    runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
414    instrumentation_stubs_installed_ = true;
415    MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
416    runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
417  } else {
418    interpreter_stubs_installed_ = false;
419    entry_exit_stubs_installed_ = false;
420    runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
421    // Restore stack only if there is no method currently deoptimized.
422    if (deoptimized_methods_.empty()) {
423      instrumentation_stubs_installed_ = false;
424      MutexLock mu(self, *Locks::thread_list_lock_);
425      Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
426    }
427  }
428}
429
430static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg) {
431  thread->ResetQuickAllocEntryPointsForThread();
432}
433
434void Instrumentation::InstrumentQuickAllocEntryPoints() {
435  // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
436  //       should be guarded by a lock.
437  DCHECK_GE(quick_alloc_entry_points_instrumentation_counter_.Load(), 0);
438  const bool enable_instrumentation =
439      quick_alloc_entry_points_instrumentation_counter_.FetchAndAdd(1) == 0;
440  if (enable_instrumentation) {
441    // Instrumentation wasn't enabled so enable it.
442    SetQuickAllocEntryPointsInstrumented(true);
443    ThreadList* tl = Runtime::Current()->GetThreadList();
444    tl->SuspendAll();
445    ResetQuickAllocEntryPoints();
446    tl->ResumeAll();
447  }
448}
449
450void Instrumentation::UninstrumentQuickAllocEntryPoints() {
451  // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
452  //       should be guarded by a lock.
453  DCHECK_GT(quick_alloc_entry_points_instrumentation_counter_.Load(), 0);
454  const bool disable_instrumentation =
455      quick_alloc_entry_points_instrumentation_counter_.FetchAndSub(1) == 1;
456  if (disable_instrumentation) {
457    SetQuickAllocEntryPointsInstrumented(false);
458    ThreadList* tl = Runtime::Current()->GetThreadList();
459    tl->SuspendAll();
460    ResetQuickAllocEntryPoints();
461    tl->ResumeAll();
462  }
463}
464
465void Instrumentation::ResetQuickAllocEntryPoints() {
466  Runtime* runtime = Runtime::Current();
467  if (runtime->IsStarted()) {
468    MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
469    runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, NULL);
470  }
471}
472
473void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* code) const {
474  const void* new_code;
475  if (LIKELY(!instrumentation_stubs_installed_)) {
476    new_code = code;
477  } else {
478    if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
479      new_code = GetCompiledCodeToInterpreterBridge();
480    } else if (code == GetResolutionTrampoline(Runtime::Current()->GetClassLinker()) ||
481               code == GetCompiledCodeToInterpreterBridge()) {
482      new_code = code;
483    } else if (entry_exit_stubs_installed_) {
484      new_code = GetQuickInstrumentationEntryPoint();
485    } else {
486      new_code = code;
487    }
488  }
489  UpdateEntrypoints(method, new_code);
490}
491
492void Instrumentation::Deoptimize(mirror::ArtMethod* method) {
493  CHECK(!method->IsNative());
494  CHECK(!method->IsProxyMethod());
495  CHECK(!method->IsAbstract());
496
497  std::pair<std::set<mirror::ArtMethod*>::iterator, bool> pair = deoptimized_methods_.insert(method);
498  bool already_deoptimized = !pair.second;
499  CHECK(!already_deoptimized) << "Method " << PrettyMethod(method) << " is already deoptimized";
500
501  if (!interpreter_stubs_installed_) {
502    UpdateEntrypoints(method, GetCompiledCodeToInterpreterBridge());
503
504    // Install instrumentation exit stub and instrumentation frames. We may already have installed
505    // these previously so it will only cover the newly created frames.
506    instrumentation_stubs_installed_ = true;
507    MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
508    Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
509  }
510}
511
512void Instrumentation::Undeoptimize(mirror::ArtMethod* method) {
513  CHECK(!method->IsNative());
514  CHECK(!method->IsProxyMethod());
515  CHECK(!method->IsAbstract());
516
517  auto it = deoptimized_methods_.find(method);
518  CHECK(it != deoptimized_methods_.end()) << "Method " << PrettyMethod(method) << " is not deoptimized";
519  deoptimized_methods_.erase(it);
520
521  // Restore code and possibly stack only if we did not deoptimize everything.
522  if (!interpreter_stubs_installed_) {
523    // Restore its code or resolution trampoline.
524    ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
525    if (method->IsStatic() && !method->IsConstructor() && !method->GetDeclaringClass()->IsInitialized()) {
526      UpdateEntrypoints(method, GetResolutionTrampoline(class_linker));
527    } else {
528      UpdateEntrypoints(method, class_linker->GetOatCodeFor(method));
529    }
530
531    // If there is no deoptimized method left, we can restore the stack of each thread.
532    if (deoptimized_methods_.empty()) {
533      MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
534      Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
535      instrumentation_stubs_installed_ = false;
536    }
537  }
538}
539
540bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) const {
541  DCHECK(method != nullptr);
542  return deoptimized_methods_.count(method);
543}
544
545void Instrumentation::EnableDeoptimization() {
546  CHECK(deoptimized_methods_.empty());
547}
548
549void Instrumentation::DisableDeoptimization() {
550  // If we deoptimized everything, undo it.
551  if (interpreter_stubs_installed_) {
552    UndeoptimizeEverything();
553  }
554  // Undeoptimized selected methods.
555  while (!deoptimized_methods_.empty()) {
556    auto it_begin = deoptimized_methods_.begin();
557    Undeoptimize(*it_begin);
558  }
559  CHECK(deoptimized_methods_.empty());
560}
561
562bool Instrumentation::IsDeoptimizationEnabled() const {
563  return interpreter_stubs_installed_ || !deoptimized_methods_.empty();
564}
565
566void Instrumentation::DeoptimizeEverything() {
567  CHECK(!interpreter_stubs_installed_);
568  ConfigureStubs(false, true);
569}
570
571void Instrumentation::UndeoptimizeEverything() {
572  CHECK(interpreter_stubs_installed_);
573  ConfigureStubs(false, false);
574}
575
576void Instrumentation::EnableMethodTracing() {
577  bool require_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners;
578  ConfigureStubs(!require_interpreter, require_interpreter);
579}
580
581void Instrumentation::DisableMethodTracing() {
582  ConfigureStubs(false, false);
583}
584
585const void* Instrumentation::GetQuickCodeFor(const mirror::ArtMethod* method) const {
586  Runtime* runtime = Runtime::Current();
587  if (LIKELY(!instrumentation_stubs_installed_)) {
588    const void* code = method->GetEntryPointFromCompiledCode();
589    DCHECK(code != NULL);
590    if (LIKELY(code != GetQuickResolutionTrampoline(runtime->GetClassLinker()) &&
591               code != GetQuickToInterpreterBridge())) {
592      return code;
593    }
594  }
595  return runtime->GetClassLinker()->GetOatCodeFor(method);
596}
597
598void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
599                                           const mirror::ArtMethod* method,
600                                           uint32_t dex_pc) const {
601  auto it = method_entry_listeners_.begin();
602  bool is_end = (it == method_entry_listeners_.end());
603  // Implemented this way to prevent problems caused by modification of the list while iterating.
604  while (!is_end) {
605    InstrumentationListener* cur = *it;
606    ++it;
607    is_end = (it == method_entry_listeners_.end());
608    cur->MethodEntered(thread, this_object, method, dex_pc);
609  }
610}
611
612void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
613                                          const mirror::ArtMethod* method,
614                                          uint32_t dex_pc, const JValue& return_value) const {
615  auto it = method_exit_listeners_.begin();
616  bool is_end = (it == method_exit_listeners_.end());
617  // Implemented this way to prevent problems caused by modification of the list while iterating.
618  while (!is_end) {
619    InstrumentationListener* cur = *it;
620    ++it;
621    is_end = (it == method_exit_listeners_.end());
622    cur->MethodExited(thread, this_object, method, dex_pc, return_value);
623  }
624}
625
626void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
627                                        const mirror::ArtMethod* method,
628                                        uint32_t dex_pc) const {
629  if (have_method_unwind_listeners_) {
630    for (InstrumentationListener* listener : method_unwind_listeners_) {
631      listener->MethodUnwind(thread, this_object, method, dex_pc);
632    }
633  }
634}
635
636void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
637                                          const mirror::ArtMethod* method,
638                                          uint32_t dex_pc) const {
639  // TODO: STL copy-on-write collection? The copy below is due to the debug listener having an
640  // action where it can remove itself as a listener and break the iterator. The copy only works
641  // around the problem and in general we may have to move to something like reference counting to
642  // ensure listeners are deleted correctly.
643  std::list<InstrumentationListener*> copy(dex_pc_listeners_);
644  for (InstrumentationListener* listener : copy) {
645    listener->DexPcMoved(thread, this_object, method, dex_pc);
646  }
647}
648
649void Instrumentation::ExceptionCaughtEvent(Thread* thread, const ThrowLocation& throw_location,
650                                           mirror::ArtMethod* catch_method,
651                                           uint32_t catch_dex_pc,
652                                           mirror::Throwable* exception_object) const {
653  if (have_exception_caught_listeners_) {
654    DCHECK_EQ(thread->GetException(NULL), exception_object);
655    thread->ClearException();
656    for (InstrumentationListener* listener : exception_caught_listeners_) {
657      listener->ExceptionCaught(thread, throw_location, catch_method, catch_dex_pc, exception_object);
658    }
659    thread->SetException(throw_location, exception_object);
660  }
661}
662
663static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
664                            int delta)
665    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
666  size_t frame_id = StackVisitor::ComputeNumFrames(self) + delta;
667  if (frame_id != instrumentation_frame.frame_id_) {
668    LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
669        << instrumentation_frame.frame_id_;
670    StackVisitor::DescribeStack(self);
671    CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
672  }
673}
674
675void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
676                                                    mirror::ArtMethod* method,
677                                                    uintptr_t lr, bool interpreter_entry) {
678  // We have a callee-save frame meaning this value is guaranteed to never be 0.
679  size_t frame_id = StackVisitor::ComputeNumFrames(self);
680  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
681  if (kVerboseInstrumentation) {
682    LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr);
683  }
684  instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
685                                                                   frame_id, interpreter_entry);
686  stack->push_front(instrumentation_frame);
687
688  MethodEnterEvent(self, this_object, method, 0);
689}
690
691uint64_t Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
692                                                       uint64_t gpr_result, uint64_t fpr_result) {
693  // Do the pop.
694  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
695  CHECK_GT(stack->size(), 0U);
696  InstrumentationStackFrame instrumentation_frame = stack->front();
697  stack->pop_front();
698
699  // Set return PC and check the sanity of the stack.
700  *return_pc = instrumentation_frame.return_pc_;
701  CheckStackDepth(self, instrumentation_frame, 0);
702
703  mirror::ArtMethod* method = instrumentation_frame.method_;
704  char return_shorty = MethodHelper(method).GetShorty()[0];
705  JValue return_value;
706  if (return_shorty == 'V') {
707    return_value.SetJ(0);
708  } else if (return_shorty == 'F' || return_shorty == 'D') {
709    return_value.SetJ(fpr_result);
710  } else {
711    return_value.SetJ(gpr_result);
712  }
713  // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
714  //       return_pc.
715  uint32_t dex_pc = DexFile::kDexNoIndex;
716  mirror::Object* this_object = instrumentation_frame.this_object_;
717  MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
718
719  // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
720  // back to an upcall.
721  NthCallerVisitor visitor(self, 1, true);
722  visitor.WalkStack(true);
723  bool deoptimize = (visitor.caller != NULL) &&
724                    (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller));
725  if (deoptimize && kVerboseInstrumentation) {
726    LOG(INFO) << "Deoptimizing into " << PrettyMethod(visitor.caller);
727  }
728  if (deoptimize) {
729    if (kVerboseInstrumentation) {
730      LOG(INFO) << "Deoptimizing from " << PrettyMethod(method)
731                << " result is " << std::hex << return_value.GetJ();
732    }
733    self->SetDeoptimizationReturnValue(return_value);
734    return static_cast<uint64_t>(GetQuickDeoptimizationEntryPoint()) |
735        (static_cast<uint64_t>(*return_pc) << 32);
736  } else {
737    if (kVerboseInstrumentation) {
738      LOG(INFO) << "Returning from " << PrettyMethod(method)
739                << " to PC " << reinterpret_cast<void*>(*return_pc);
740    }
741    return *return_pc;
742  }
743}
744
745void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
746  // Do the pop.
747  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
748  CHECK_GT(stack->size(), 0U);
749  InstrumentationStackFrame instrumentation_frame = stack->front();
750  // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
751  stack->pop_front();
752
753  mirror::ArtMethod* method = instrumentation_frame.method_;
754  if (is_deoptimization) {
755    if (kVerboseInstrumentation) {
756      LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method);
757    }
758  } else {
759    if (kVerboseInstrumentation) {
760      LOG(INFO) << "Popping for unwind " << PrettyMethod(method);
761    }
762
763    // Notify listeners of method unwind.
764    // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
765    //       return_pc.
766    uint32_t dex_pc = DexFile::kDexNoIndex;
767    MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
768  }
769}
770
771std::string InstrumentationStackFrame::Dump() const {
772  std::ostringstream os;
773  os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":"
774      << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
775  return os.str();
776}
777
778}  // namespace instrumentation
779}  // namespace art
780