instrumentation.cc revision 3f52eafe5577b8489f90dc8ed5981b3455206147
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
19#include <sys/uio.h>
20
21#include "atomic.h"
22#include "base/unix_file/fd_file.h"
23#include "class_linker.h"
24#include "debugger.h"
25#include "dex_file-inl.h"
26#include "entrypoints/quick/quick_alloc_entrypoints.h"
27#include "interpreter/interpreter.h"
28#include "mirror/art_method-inl.h"
29#include "mirror/class-inl.h"
30#include "mirror/dex_cache.h"
31#include "mirror/object_array-inl.h"
32#include "mirror/object-inl.h"
33#include "nth_caller_visitor.h"
34#if !defined(ART_USE_PORTABLE_COMPILER)
35#include "entrypoints/quick/quick_entrypoints.h"
36#endif
37#include "object_utils.h"
38#include "os.h"
39#include "scoped_thread_state_change.h"
40#include "thread.h"
41#include "thread_list.h"
42
43namespace art {
44
45namespace instrumentation {
46
47const bool kVerboseInstrumentation = false;
48
49// Do we want to deoptimize for method entry and exit listeners or just try to intercept
50// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
51// application's performance.
52static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = false;
53
54static bool InstallStubsClassVisitor(mirror::Class* klass, void* arg)
55    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
56  Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
57  return instrumentation->InstallStubsForClass(klass);
58}
59
60Instrumentation::Instrumentation()
61    : instrumentation_stubs_installed_(false), entry_exit_stubs_installed_(false),
62      interpreter_stubs_installed_(false),
63      interpret_only_(false), forced_interpret_only_(false),
64      have_method_entry_listeners_(false), have_method_exit_listeners_(false),
65      have_method_unwind_listeners_(false), have_dex_pc_listeners_(false),
66      have_field_read_listeners_(false), have_field_write_listeners_(false),
67      have_exception_caught_listeners_(false),
68      deoptimized_methods_lock_("deoptimized methods lock"),
69      deoptimization_enabled_(false),
70      interpreter_handler_table_(kMainHandlerTable),
71      quick_alloc_entry_points_instrumentation_counter_(0) {
72}
73
74bool Instrumentation::InstallStubsForClass(mirror::Class* klass) {
75  for (size_t i = 0, e = klass->NumDirectMethods(); i < e; i++) {
76    InstallStubsForMethod(klass->GetDirectMethod(i));
77  }
78  for (size_t i = 0, e = klass->NumVirtualMethods(); i < e; i++) {
79    InstallStubsForMethod(klass->GetVirtualMethod(i));
80  }
81  return true;
82}
83
84static void UpdateEntrypoints(mirror::ArtMethod* method, const void* quick_code,
85                              const void* portable_code, bool have_portable_code)
86    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
87  method->SetEntryPointFromPortableCompiledCode(portable_code);
88  method->SetEntryPointFromQuickCompiledCode(quick_code);
89  bool portable_enabled = method->IsPortableCompiled();
90  if (have_portable_code && !portable_enabled) {
91    method->SetIsPortableCompiled();
92  } else if (portable_enabled) {
93    method->ClearIsPortableCompiled();
94  }
95  if (!method->IsResolutionMethod()) {
96    if (quick_code == GetQuickToInterpreterBridge() ||
97        (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker()) &&
98         Runtime::Current()->GetInstrumentation()->IsForcedInterpretOnly()
99         && !method->IsNative() && !method->IsProxyMethod())) {
100      if (kIsDebugBuild) {
101        if (quick_code == GetQuickToInterpreterBridge()) {
102          DCHECK(portable_code == GetPortableToInterpreterBridge());
103        } else if (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker())) {
104          DCHECK(portable_code == GetPortableResolutionTrampoline(Runtime::Current()->GetClassLinker()));
105        }
106      }
107      DCHECK(!method->IsNative()) << PrettyMethod(method);
108      DCHECK(!method->IsProxyMethod()) << PrettyMethod(method);
109      method->SetEntryPointFromInterpreter(art::interpreter::artInterpreterToInterpreterBridge);
110    } else {
111      method->SetEntryPointFromInterpreter(art::artInterpreterToCompiledCodeBridge);
112    }
113  }
114}
115
116void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) {
117  if (method->IsAbstract() || method->IsProxyMethod()) {
118    // Do not change stubs for these methods.
119    return;
120  }
121  const void* new_portable_code;
122  const void* new_quick_code;
123  bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
124  ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
125  bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
126  bool have_portable_code = false;
127  if (uninstall) {
128    if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
129      new_portable_code = GetPortableToInterpreterBridge();
130      new_quick_code = GetQuickToInterpreterBridge();
131    } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
132      new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
133      new_quick_code = class_linker->GetQuickOatCodeFor(method);
134    } else {
135      new_portable_code = GetPortableResolutionTrampoline(class_linker);
136      new_quick_code = GetQuickResolutionTrampoline(class_linker);
137    }
138  } else {  // !uninstall
139    if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
140      new_portable_code = GetPortableToInterpreterBridge();
141      new_quick_code = GetQuickToInterpreterBridge();
142    } else {
143      // Do not overwrite resolution trampoline. When the trampoline initializes the method's
144      // class, all its static methods code will be set to the instrumentation entry point.
145      // For more details, see ClassLinker::FixupStaticTrampolines.
146      if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
147        // Do not overwrite interpreter to prevent from posting method entry/exit events twice.
148        new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
149        new_quick_code = class_linker->GetQuickOatCodeFor(method);
150        if (entry_exit_stubs_installed_ && new_quick_code != GetQuickToInterpreterBridge()) {
151          DCHECK(new_portable_code != GetPortableToInterpreterBridge());
152          new_portable_code = GetPortableToInterpreterBridge();
153          new_quick_code = GetQuickInstrumentationEntryPoint();
154        }
155      } else {
156        new_portable_code = GetPortableResolutionTrampoline(class_linker);
157        new_quick_code = GetQuickResolutionTrampoline(class_linker);
158      }
159    }
160  }
161  UpdateEntrypoints(method, new_quick_code, new_portable_code, have_portable_code);
162}
163
164// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
165// deoptimization of quick frames to interpreter frames.
166// Since we may already have done this previously, we need to push new instrumentation frame before
167// existing instrumentation frames.
168static void InstrumentationInstallStack(Thread* thread, void* arg)
169    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
170  struct InstallStackVisitor : public StackVisitor {
171    InstallStackVisitor(Thread* thread, Context* context, uintptr_t instrumentation_exit_pc)
172        : StackVisitor(thread, context),  instrumentation_stack_(thread->GetInstrumentationStack()),
173          existing_instrumentation_frames_count_(instrumentation_stack_->size()),
174          instrumentation_exit_pc_(instrumentation_exit_pc),
175          reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
176          last_return_pc_(0) {
177    }
178
179    virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
180      mirror::ArtMethod* m = GetMethod();
181      if (GetCurrentQuickFrame() == NULL) {
182        if (kVerboseInstrumentation) {
183          LOG(INFO) << "  Ignoring a shadow frame. Frame " << GetFrameId()
184                    << " Method=" << PrettyMethod(m);
185        }
186        return true;  // Ignore shadow frames.
187      }
188      if (m == NULL) {
189        if (kVerboseInstrumentation) {
190          LOG(INFO) << "  Skipping upcall. Frame " << GetFrameId();
191        }
192        last_return_pc_ = 0;
193        return true;  // Ignore upcalls.
194      }
195      if (m->IsRuntimeMethod()) {
196        if (kVerboseInstrumentation) {
197          LOG(INFO) << "  Skipping runtime method. Frame " << GetFrameId();
198        }
199        last_return_pc_ = GetReturnPc();
200        return true;  // Ignore unresolved methods since they will be instrumented after resolution.
201      }
202      if (kVerboseInstrumentation) {
203        LOG(INFO) << "  Installing exit stub in " << DescribeLocation();
204      }
205      uintptr_t return_pc = GetReturnPc();
206      if (return_pc == instrumentation_exit_pc_) {
207        // We've reached a frame which has already been installed with instrumentation exit stub.
208        // We should have already installed instrumentation on previous frames.
209        reached_existing_instrumentation_frames_ = true;
210
211        CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
212        const InstrumentationStackFrame& frame = instrumentation_stack_->at(instrumentation_stack_depth_);
213        CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m)
214                                   << ", Found " << PrettyMethod(frame.method_);
215        return_pc = frame.return_pc_;
216        if (kVerboseInstrumentation) {
217          LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
218        }
219      } else {
220        CHECK_NE(return_pc, 0U);
221        CHECK(!reached_existing_instrumentation_frames_);
222        InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
223                                                        false);
224        if (kVerboseInstrumentation) {
225          LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
226        }
227
228        // Insert frame before old ones so we do not corrupt the instrumentation stack.
229        auto it = instrumentation_stack_->end() - existing_instrumentation_frames_count_;
230        instrumentation_stack_->insert(it, instrumentation_frame);
231        SetReturnPc(instrumentation_exit_pc_);
232      }
233      dex_pcs_.push_back(m->ToDexPc(last_return_pc_));
234      last_return_pc_ = return_pc;
235      ++instrumentation_stack_depth_;
236      return true;  // Continue.
237    }
238    std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
239    const size_t existing_instrumentation_frames_count_;
240    std::vector<uint32_t> dex_pcs_;
241    const uintptr_t instrumentation_exit_pc_;
242    bool reached_existing_instrumentation_frames_;
243    size_t instrumentation_stack_depth_;
244    uintptr_t last_return_pc_;
245  };
246  if (kVerboseInstrumentation) {
247    std::string thread_name;
248    thread->GetThreadName(thread_name);
249    LOG(INFO) << "Installing exit stubs in " << thread_name;
250  }
251
252  Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
253  UniquePtr<Context> context(Context::Create());
254  uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
255  InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
256  visitor.WalkStack(true);
257  CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
258
259  if (!instrumentation->ShouldNotifyMethodEnterExitEvents()) {
260    // Create method enter events for all methods currently on the thread's stack. We only do this
261    // if no debugger is attached to prevent from posting events twice.
262    typedef std::deque<InstrumentationStackFrame>::const_reverse_iterator It;
263    for (It it = thread->GetInstrumentationStack()->rbegin(),
264        end = thread->GetInstrumentationStack()->rend(); it != end; ++it) {
265      mirror::Object* this_object = (*it).this_object_;
266      mirror::ArtMethod* method = (*it).method_;
267      uint32_t dex_pc = visitor.dex_pcs_.back();
268      visitor.dex_pcs_.pop_back();
269      instrumentation->MethodEnterEvent(thread, this_object, method, dex_pc);
270    }
271  }
272  thread->VerifyStack();
273}
274
275// Removes the instrumentation exit pc as the return PC for every quick frame.
276static void InstrumentationRestoreStack(Thread* thread, void* arg)
277    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
278  struct RestoreStackVisitor : public StackVisitor {
279    RestoreStackVisitor(Thread* thread, uintptr_t instrumentation_exit_pc,
280                        Instrumentation* instrumentation)
281        : StackVisitor(thread, NULL), thread_(thread),
282          instrumentation_exit_pc_(instrumentation_exit_pc),
283          instrumentation_(instrumentation),
284          instrumentation_stack_(thread->GetInstrumentationStack()),
285          frames_removed_(0) {}
286
287    virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
288      if (instrumentation_stack_->size() == 0) {
289        return false;  // Stop.
290      }
291      mirror::ArtMethod* m = GetMethod();
292      if (GetCurrentQuickFrame() == NULL) {
293        if (kVerboseInstrumentation) {
294          LOG(INFO) << "  Ignoring a shadow frame. Frame " << GetFrameId() << " Method=" << PrettyMethod(m);
295        }
296        return true;  // Ignore shadow frames.
297      }
298      if (m == NULL) {
299        if (kVerboseInstrumentation) {
300          LOG(INFO) << "  Skipping upcall. Frame " << GetFrameId();
301        }
302        return true;  // Ignore upcalls.
303      }
304      bool removed_stub = false;
305      // TODO: make this search more efficient?
306      for (InstrumentationStackFrame instrumentation_frame : *instrumentation_stack_) {
307        if (instrumentation_frame.frame_id_ == GetFrameId()) {
308          if (kVerboseInstrumentation) {
309            LOG(INFO) << "  Removing exit stub in " << DescribeLocation();
310          }
311          if (instrumentation_frame.interpreter_entry_) {
312            CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
313          } else {
314            CHECK(m == instrumentation_frame.method_) << PrettyMethod(m);
315          }
316          SetReturnPc(instrumentation_frame.return_pc_);
317          if (!instrumentation_->ShouldNotifyMethodEnterExitEvents()) {
318            // Create the method exit events. As the methods didn't really exit the result is 0.
319            // We only do this if no debugger is attached to prevent from posting events twice.
320            instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
321                                              GetDexPc(), JValue());
322          }
323          frames_removed_++;
324          removed_stub = true;
325          break;
326        }
327      }
328      if (!removed_stub) {
329        if (kVerboseInstrumentation) {
330          LOG(INFO) << "  No exit stub in " << DescribeLocation();
331        }
332      }
333      return true;  // Continue.
334    }
335    Thread* const thread_;
336    const uintptr_t instrumentation_exit_pc_;
337    Instrumentation* const instrumentation_;
338    std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
339    size_t frames_removed_;
340  };
341  if (kVerboseInstrumentation) {
342    std::string thread_name;
343    thread->GetThreadName(thread_name);
344    LOG(INFO) << "Removing exit stubs in " << thread_name;
345  }
346  std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
347  if (stack->size() > 0) {
348    Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
349    uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
350    RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
351    visitor.WalkStack(true);
352    CHECK_EQ(visitor.frames_removed_, stack->size());
353    while (stack->size() > 0) {
354      stack->pop_front();
355    }
356  }
357}
358
359void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
360  Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
361  if ((events & kMethodEntered) != 0) {
362    method_entry_listeners_.push_back(listener);
363    have_method_entry_listeners_ = true;
364  }
365  if ((events & kMethodExited) != 0) {
366    method_exit_listeners_.push_back(listener);
367    have_method_exit_listeners_ = true;
368  }
369  if ((events & kMethodUnwind) != 0) {
370    method_unwind_listeners_.push_back(listener);
371    have_method_unwind_listeners_ = true;
372  }
373  if ((events & kDexPcMoved) != 0) {
374    dex_pc_listeners_.push_back(listener);
375    have_dex_pc_listeners_ = true;
376  }
377  if ((events & kFieldRead) != 0) {
378    field_read_listeners_.push_back(listener);
379    have_field_read_listeners_ = true;
380  }
381  if ((events & kFieldWritten) != 0) {
382    field_write_listeners_.push_back(listener);
383    have_field_write_listeners_ = true;
384  }
385  if ((events & kExceptionCaught) != 0) {
386    exception_caught_listeners_.push_back(listener);
387    have_exception_caught_listeners_ = true;
388  }
389  UpdateInterpreterHandlerTable();
390}
391
392void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
393  Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
394
395  if ((events & kMethodEntered) != 0) {
396    bool contains = std::find(method_entry_listeners_.begin(), method_entry_listeners_.end(),
397                              listener) != method_entry_listeners_.end();
398    if (contains) {
399      method_entry_listeners_.remove(listener);
400    }
401    have_method_entry_listeners_ = method_entry_listeners_.size() > 0;
402  }
403  if ((events & kMethodExited) != 0) {
404    bool contains = std::find(method_exit_listeners_.begin(), method_exit_listeners_.end(),
405                              listener) != method_exit_listeners_.end();
406    if (contains) {
407      method_exit_listeners_.remove(listener);
408    }
409    have_method_exit_listeners_ = method_exit_listeners_.size() > 0;
410  }
411  if ((events & kMethodUnwind) != 0) {
412    method_unwind_listeners_.remove(listener);
413  }
414  if ((events & kDexPcMoved) != 0) {
415    bool contains = std::find(dex_pc_listeners_.begin(), dex_pc_listeners_.end(),
416                              listener) != dex_pc_listeners_.end();
417    if (contains) {
418      dex_pc_listeners_.remove(listener);
419    }
420    have_dex_pc_listeners_ = dex_pc_listeners_.size() > 0;
421  }
422  if ((events & kFieldRead) != 0) {
423    bool contains = std::find(field_read_listeners_.begin(), field_read_listeners_.end(),
424                              listener) != field_read_listeners_.end();
425    if (contains) {
426      field_read_listeners_.remove(listener);
427    }
428    have_field_read_listeners_ = field_read_listeners_.size() > 0;
429  }
430  if ((events & kFieldWritten) != 0) {
431    bool contains = std::find(field_write_listeners_.begin(), field_write_listeners_.end(),
432                              listener) != field_write_listeners_.end();
433    if (contains) {
434      field_write_listeners_.remove(listener);
435    }
436    have_field_write_listeners_ = field_write_listeners_.size() > 0;
437  }
438  if ((events & kExceptionCaught) != 0) {
439    exception_caught_listeners_.remove(listener);
440    have_exception_caught_listeners_ = exception_caught_listeners_.size() > 0;
441  }
442  UpdateInterpreterHandlerTable();
443}
444
445void Instrumentation::ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) {
446  interpret_only_ = require_interpreter || forced_interpret_only_;
447  // Compute what level of instrumentation is required and compare to current.
448  int desired_level, current_level;
449  if (require_interpreter) {
450    desired_level = 2;
451  } else if (require_entry_exit_stubs) {
452    desired_level = 1;
453  } else {
454    desired_level = 0;
455  }
456  if (interpreter_stubs_installed_) {
457    current_level = 2;
458  } else if (entry_exit_stubs_installed_) {
459    current_level = 1;
460  } else {
461    current_level = 0;
462  }
463  if (desired_level == current_level) {
464    // We're already set.
465    return;
466  }
467  Thread* self = Thread::Current();
468  Runtime* runtime = Runtime::Current();
469  Locks::thread_list_lock_->AssertNotHeld(self);
470  if (desired_level > 0) {
471    if (require_interpreter) {
472      interpreter_stubs_installed_ = true;
473    } else {
474      CHECK(require_entry_exit_stubs);
475      entry_exit_stubs_installed_ = true;
476    }
477    runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
478    instrumentation_stubs_installed_ = true;
479    MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
480    runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
481  } else {
482    interpreter_stubs_installed_ = false;
483    entry_exit_stubs_installed_ = false;
484    runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
485    // Restore stack only if there is no method currently deoptimized.
486    bool empty;
487    {
488      ReaderMutexLock mu(self, deoptimized_methods_lock_);
489      empty = deoptimized_methods_.empty();  // Avoid lock violation.
490    }
491    if (empty) {
492      instrumentation_stubs_installed_ = false;
493      MutexLock mu(self, *Locks::thread_list_lock_);
494      Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
495    }
496  }
497}
498
499static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg) {
500  thread->ResetQuickAllocEntryPointsForThread();
501}
502
503void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
504  Runtime* runtime = Runtime::Current();
505  ThreadList* tl = runtime->GetThreadList();
506  if (runtime->IsStarted()) {
507    tl->SuspendAll();
508  }
509  {
510    MutexLock mu(Thread::Current(), *Locks::runtime_shutdown_lock_);
511    SetQuickAllocEntryPointsInstrumented(instrumented);
512    ResetQuickAllocEntryPoints();
513  }
514  if (runtime->IsStarted()) {
515    tl->ResumeAll();
516  }
517}
518
519void Instrumentation::InstrumentQuickAllocEntryPoints() {
520  // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
521  //       should be guarded by a lock.
522  DCHECK_GE(quick_alloc_entry_points_instrumentation_counter_.Load(), 0);
523  const bool enable_instrumentation =
524      quick_alloc_entry_points_instrumentation_counter_.FetchAndAdd(1) == 0;
525  if (enable_instrumentation) {
526    SetEntrypointsInstrumented(true);
527  }
528}
529
530void Instrumentation::UninstrumentQuickAllocEntryPoints() {
531  // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
532  //       should be guarded by a lock.
533  DCHECK_GT(quick_alloc_entry_points_instrumentation_counter_.Load(), 0);
534  const bool disable_instrumentation =
535      quick_alloc_entry_points_instrumentation_counter_.FetchAndSub(1) == 1;
536  if (disable_instrumentation) {
537    SetEntrypointsInstrumented(false);
538  }
539}
540
541void Instrumentation::ResetQuickAllocEntryPoints() {
542  Runtime* runtime = Runtime::Current();
543  if (runtime->IsStarted()) {
544    MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
545    runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, NULL);
546  }
547}
548
549void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code,
550                                        const void* portable_code, bool have_portable_code) const {
551  const void* new_portable_code;
552  const void* new_quick_code;
553  bool new_have_portable_code;
554  if (LIKELY(!instrumentation_stubs_installed_)) {
555    new_portable_code = portable_code;
556    new_quick_code = quick_code;
557    new_have_portable_code = have_portable_code;
558  } else {
559    if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
560      new_portable_code = GetPortableToInterpreterBridge();
561      new_quick_code = GetQuickToInterpreterBridge();
562      new_have_portable_code = false;
563    } else if (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker()) ||
564               quick_code == GetQuickToInterpreterBridge()) {
565      DCHECK((portable_code == GetPortableResolutionTrampoline(Runtime::Current()->GetClassLinker())) ||
566             (portable_code == GetPortableToInterpreterBridge()));
567      new_portable_code = portable_code;
568      new_quick_code = quick_code;
569      new_have_portable_code = have_portable_code;
570    } else if (entry_exit_stubs_installed_) {
571      new_quick_code = GetQuickInstrumentationEntryPoint();
572      new_portable_code = GetPortableToInterpreterBridge();
573      new_have_portable_code = false;
574    } else {
575      new_portable_code = portable_code;
576      new_quick_code = quick_code;
577      new_have_portable_code = have_portable_code;
578    }
579  }
580  UpdateEntrypoints(method, new_quick_code, new_portable_code, new_have_portable_code);
581}
582
583void Instrumentation::Deoptimize(mirror::ArtMethod* method) {
584  CHECK(!method->IsNative());
585  CHECK(!method->IsProxyMethod());
586  CHECK(!method->IsAbstract());
587
588  Thread* self = Thread::Current();
589  std::pair<std::set<mirror::ArtMethod*>::iterator, bool> pair;
590  {
591    WriterMutexLock mu(self, deoptimized_methods_lock_);
592    pair = deoptimized_methods_.insert(method);
593  }
594  bool already_deoptimized = !pair.second;
595  CHECK(!already_deoptimized) << "Method " << PrettyMethod(method) << " is already deoptimized";
596
597  if (!interpreter_stubs_installed_) {
598    UpdateEntrypoints(method, GetQuickToInterpreterBridge(), GetPortableToInterpreterBridge(),
599                      false);
600
601    // Install instrumentation exit stub and instrumentation frames. We may already have installed
602    // these previously so it will only cover the newly created frames.
603    instrumentation_stubs_installed_ = true;
604    MutexLock mu(self, *Locks::thread_list_lock_);
605    Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
606  }
607}
608
609void Instrumentation::Undeoptimize(mirror::ArtMethod* method) {
610  CHECK(!method->IsNative());
611  CHECK(!method->IsProxyMethod());
612  CHECK(!method->IsAbstract());
613
614  Thread* self = Thread::Current();
615  bool empty;
616  {
617    WriterMutexLock mu(self, deoptimized_methods_lock_);
618    auto it = deoptimized_methods_.find(method);
619    CHECK(it != deoptimized_methods_.end()) << "Method " << PrettyMethod(method)
620        << " is not deoptimized";
621    deoptimized_methods_.erase(it);
622    empty = deoptimized_methods_.empty();
623  }
624
625  // Restore code and possibly stack only if we did not deoptimize everything.
626  if (!interpreter_stubs_installed_) {
627    // Restore its code or resolution trampoline.
628    ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
629    if (method->IsStatic() && !method->IsConstructor() &&
630        !method->GetDeclaringClass()->IsInitialized()) {
631      UpdateEntrypoints(method, GetQuickResolutionTrampoline(class_linker),
632                        GetPortableResolutionTrampoline(class_linker), false);
633    } else {
634      bool have_portable_code = false;
635      const void* quick_code = class_linker->GetQuickOatCodeFor(method);
636      const void* portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
637      UpdateEntrypoints(method, quick_code, portable_code, have_portable_code);
638    }
639
640    // If there is no deoptimized method left, we can restore the stack of each thread.
641    if (empty) {
642      MutexLock mu(self, *Locks::thread_list_lock_);
643      Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
644      instrumentation_stubs_installed_ = false;
645    }
646  }
647}
648
649bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) const {
650  ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
651  DCHECK(method != nullptr);
652  return deoptimized_methods_.find(method) != deoptimized_methods_.end();
653}
654
655void Instrumentation::EnableDeoptimization() {
656  ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
657  CHECK(deoptimized_methods_.empty());
658  CHECK_EQ(deoptimization_enabled_, false);
659  deoptimization_enabled_ = true;
660}
661
662void Instrumentation::DisableDeoptimization() {
663  CHECK_EQ(deoptimization_enabled_, true);
664  // If we deoptimized everything, undo it.
665  if (interpreter_stubs_installed_) {
666    UndeoptimizeEverything();
667  }
668  // Undeoptimized selected methods.
669  while (true) {
670    mirror::ArtMethod* method;
671    {
672      ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
673      if (deoptimized_methods_.empty()) {
674        break;
675      }
676      method = *deoptimized_methods_.begin();
677    }
678    Undeoptimize(method);
679  }
680  deoptimization_enabled_ = false;
681}
682
683// Indicates if instrumentation should notify method enter/exit events to the listeners.
684bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
685  return deoptimization_enabled_ || interpreter_stubs_installed_;
686}
687
688void Instrumentation::DeoptimizeEverything() {
689  CHECK(!interpreter_stubs_installed_);
690  ConfigureStubs(false, true);
691}
692
693void Instrumentation::UndeoptimizeEverything() {
694  CHECK(interpreter_stubs_installed_);
695  ConfigureStubs(false, false);
696}
697
698void Instrumentation::EnableMethodTracing() {
699  bool require_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners;
700  ConfigureStubs(!require_interpreter, require_interpreter);
701}
702
703void Instrumentation::DisableMethodTracing() {
704  ConfigureStubs(false, false);
705}
706
707const void* Instrumentation::GetQuickCodeFor(mirror::ArtMethod* method) const {
708  Runtime* runtime = Runtime::Current();
709  if (LIKELY(!instrumentation_stubs_installed_)) {
710    const void* code = method->GetEntryPointFromQuickCompiledCode();
711    DCHECK(code != NULL);
712    if (LIKELY(code != GetQuickResolutionTrampoline(runtime->GetClassLinker()) &&
713               code != GetQuickToInterpreterBridge())) {
714      return code;
715    }
716  }
717  return runtime->GetClassLinker()->GetQuickOatCodeFor(method);
718}
719
720void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
721                                           mirror::ArtMethod* method,
722                                           uint32_t dex_pc) const {
723  auto it = method_entry_listeners_.begin();
724  bool is_end = (it == method_entry_listeners_.end());
725  // Implemented this way to prevent problems caused by modification of the list while iterating.
726  while (!is_end) {
727    InstrumentationListener* cur = *it;
728    ++it;
729    is_end = (it == method_entry_listeners_.end());
730    cur->MethodEntered(thread, this_object, method, dex_pc);
731  }
732}
733
734void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
735                                          mirror::ArtMethod* method,
736                                          uint32_t dex_pc, const JValue& return_value) const {
737  auto it = method_exit_listeners_.begin();
738  bool is_end = (it == method_exit_listeners_.end());
739  // Implemented this way to prevent problems caused by modification of the list while iterating.
740  while (!is_end) {
741    InstrumentationListener* cur = *it;
742    ++it;
743    is_end = (it == method_exit_listeners_.end());
744    cur->MethodExited(thread, this_object, method, dex_pc, return_value);
745  }
746}
747
748void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
749                                        mirror::ArtMethod* method,
750                                        uint32_t dex_pc) const {
751  if (have_method_unwind_listeners_) {
752    for (InstrumentationListener* listener : method_unwind_listeners_) {
753      listener->MethodUnwind(thread, this_object, method, dex_pc);
754    }
755  }
756}
757
758void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
759                                          mirror::ArtMethod* method,
760                                          uint32_t dex_pc) const {
761  // TODO: STL copy-on-write collection? The copy below is due to the debug listener having an
762  // action where it can remove itself as a listener and break the iterator. The copy only works
763  // around the problem and in general we may have to move to something like reference counting to
764  // ensure listeners are deleted correctly.
765  std::list<InstrumentationListener*> copy(dex_pc_listeners_);
766  for (InstrumentationListener* listener : copy) {
767    listener->DexPcMoved(thread, this_object, method, dex_pc);
768  }
769}
770
771void Instrumentation::FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
772                                         mirror::ArtMethod* method, uint32_t dex_pc,
773                                         mirror::ArtField* field) const {
774  if (have_field_read_listeners_) {
775    // TODO: same comment than DexPcMovedEventImpl.
776    std::list<InstrumentationListener*> copy(field_read_listeners_);
777    for (InstrumentationListener* listener : copy) {
778      listener->FieldRead(thread, this_object, method, dex_pc, field);
779    }
780  }
781}
782
783void Instrumentation::FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
784                                         mirror::ArtMethod* method, uint32_t dex_pc,
785                                         mirror::ArtField* field, const JValue& field_value) const {
786  if (have_field_write_listeners_) {
787    // TODO: same comment than DexPcMovedEventImpl.
788    std::list<InstrumentationListener*> copy(field_write_listeners_);
789    for (InstrumentationListener* listener : copy) {
790      listener->FieldWritten(thread, this_object, method, dex_pc, field, field_value);
791    }
792  }
793}
794
795void Instrumentation::ExceptionCaughtEvent(Thread* thread, const ThrowLocation& throw_location,
796                                           mirror::ArtMethod* catch_method,
797                                           uint32_t catch_dex_pc,
798                                           mirror::Throwable* exception_object) const {
799  if (have_exception_caught_listeners_) {
800    DCHECK_EQ(thread->GetException(NULL), exception_object);
801    thread->ClearException();
802    for (InstrumentationListener* listener : exception_caught_listeners_) {
803      listener->ExceptionCaught(thread, throw_location, catch_method, catch_dex_pc, exception_object);
804    }
805    thread->SetException(throw_location, exception_object);
806  }
807}
808
809static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
810                            int delta)
811    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
812  size_t frame_id = StackVisitor::ComputeNumFrames(self) + delta;
813  if (frame_id != instrumentation_frame.frame_id_) {
814    LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
815        << instrumentation_frame.frame_id_;
816    StackVisitor::DescribeStack(self);
817    CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
818  }
819}
820
821void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
822                                                    mirror::ArtMethod* method,
823                                                    uintptr_t lr, bool interpreter_entry) {
824  // We have a callee-save frame meaning this value is guaranteed to never be 0.
825  size_t frame_id = StackVisitor::ComputeNumFrames(self);
826  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
827  if (kVerboseInstrumentation) {
828    LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr);
829  }
830  instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
831                                                                   frame_id, interpreter_entry);
832  stack->push_front(instrumentation_frame);
833
834  MethodEnterEvent(self, this_object, method, 0);
835}
836
837uint64_t Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
838                                                       uint64_t gpr_result, uint64_t fpr_result) {
839  // Do the pop.
840  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
841  CHECK_GT(stack->size(), 0U);
842  InstrumentationStackFrame instrumentation_frame = stack->front();
843  stack->pop_front();
844
845  // Set return PC and check the sanity of the stack.
846  *return_pc = instrumentation_frame.return_pc_;
847  CheckStackDepth(self, instrumentation_frame, 0);
848
849  mirror::ArtMethod* method = instrumentation_frame.method_;
850  char return_shorty = MethodHelper(method).GetShorty()[0];
851  JValue return_value;
852  if (return_shorty == 'V') {
853    return_value.SetJ(0);
854  } else if (return_shorty == 'F' || return_shorty == 'D') {
855    return_value.SetJ(fpr_result);
856  } else {
857    return_value.SetJ(gpr_result);
858  }
859  // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
860  //       return_pc.
861  uint32_t dex_pc = DexFile::kDexNoIndex;
862  mirror::Object* this_object = instrumentation_frame.this_object_;
863  MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
864
865  // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
866  // back to an upcall.
867  NthCallerVisitor visitor(self, 1, true);
868  visitor.WalkStack(true);
869  bool deoptimize = (visitor.caller != NULL) &&
870                    (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller));
871  if (deoptimize && kVerboseInstrumentation) {
872    LOG(INFO) << "Deoptimizing into " << PrettyMethod(visitor.caller);
873  }
874  if (deoptimize) {
875    if (kVerboseInstrumentation) {
876      LOG(INFO) << "Deoptimizing from " << PrettyMethod(method)
877                << " result is " << std::hex << return_value.GetJ();
878    }
879    self->SetDeoptimizationReturnValue(return_value);
880    return static_cast<uint64_t>(GetQuickDeoptimizationEntryPoint()) |
881        (static_cast<uint64_t>(*return_pc) << 32);
882  } else {
883    if (kVerboseInstrumentation) {
884      LOG(INFO) << "Returning from " << PrettyMethod(method)
885                << " to PC " << reinterpret_cast<void*>(*return_pc);
886    }
887    return *return_pc;
888  }
889}
890
891void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
892  // Do the pop.
893  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
894  CHECK_GT(stack->size(), 0U);
895  InstrumentationStackFrame instrumentation_frame = stack->front();
896  // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
897  stack->pop_front();
898
899  mirror::ArtMethod* method = instrumentation_frame.method_;
900  if (is_deoptimization) {
901    if (kVerboseInstrumentation) {
902      LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method);
903    }
904  } else {
905    if (kVerboseInstrumentation) {
906      LOG(INFO) << "Popping for unwind " << PrettyMethod(method);
907    }
908
909    // Notify listeners of method unwind.
910    // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
911    //       return_pc.
912    uint32_t dex_pc = DexFile::kDexNoIndex;
913    MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
914  }
915}
916
917void Instrumentation::VisitRoots(RootCallback* callback, void* arg) {
918  WriterMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
919  if (deoptimized_methods_.empty()) {
920    return;
921  }
922  std::set<mirror::ArtMethod*> new_deoptimized_methods;
923  for (mirror::ArtMethod* method : deoptimized_methods_) {
924    DCHECK(method != nullptr);
925    callback(reinterpret_cast<mirror::Object**>(&method), arg, 0, kRootVMInternal);
926    new_deoptimized_methods.insert(method);
927  }
928  deoptimized_methods_ = new_deoptimized_methods;
929}
930
931std::string InstrumentationStackFrame::Dump() const {
932  std::ostringstream os;
933  os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":"
934      << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
935  return os.str();
936}
937
938}  // namespace instrumentation
939}  // namespace art
940