instrumentation.cc revision 799eb3a5555254427db269921042419bc30d4d86
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
19#include <sys/uio.h>
20
21#include "arch/context.h"
22#include "atomic.h"
23#include "base/unix_file/fd_file.h"
24#include "class_linker.h"
25#include "debugger.h"
26#include "dex_file-inl.h"
27#include "entrypoints/quick/quick_alloc_entrypoints.h"
28#include "interpreter/interpreter.h"
29#include "mirror/art_method-inl.h"
30#include "mirror/class-inl.h"
31#include "mirror/dex_cache.h"
32#include "mirror/object_array-inl.h"
33#include "mirror/object-inl.h"
34#include "nth_caller_visitor.h"
35#if !defined(ART_USE_PORTABLE_COMPILER)
36#include "entrypoints/quick/quick_entrypoints.h"
37#endif
38#include "os.h"
39#include "scoped_thread_state_change.h"
40#include "thread.h"
41#include "thread_list.h"
42
43namespace art {
44
45namespace instrumentation {
46
47const bool kVerboseInstrumentation = false;
48
49// Do we want to deoptimize for method entry and exit listeners or just try to intercept
50// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
51// application's performance.
52static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = false;
53
54static bool InstallStubsClassVisitor(mirror::Class* klass, void* arg)
55    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
56  Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
57  return instrumentation->InstallStubsForClass(klass);
58}
59
60Instrumentation::Instrumentation()
61    : instrumentation_stubs_installed_(false), entry_exit_stubs_installed_(false),
62      interpreter_stubs_installed_(false),
63      interpret_only_(false), forced_interpret_only_(false),
64      have_method_entry_listeners_(false), have_method_exit_listeners_(false),
65      have_method_unwind_listeners_(false), have_dex_pc_listeners_(false),
66      have_field_read_listeners_(false), have_field_write_listeners_(false),
67      have_exception_caught_listeners_(false),
68      deoptimized_methods_lock_("deoptimized methods lock"),
69      deoptimization_enabled_(false),
70      interpreter_handler_table_(kMainHandlerTable),
71      quick_alloc_entry_points_instrumentation_counter_(0) {
72}
73
74bool Instrumentation::InstallStubsForClass(mirror::Class* klass) {
75  for (size_t i = 0, e = klass->NumDirectMethods(); i < e; i++) {
76    InstallStubsForMethod(klass->GetDirectMethod(i));
77  }
78  for (size_t i = 0, e = klass->NumVirtualMethods(); i < e; i++) {
79    InstallStubsForMethod(klass->GetVirtualMethod(i));
80  }
81  return true;
82}
83
84static void UpdateEntrypoints(mirror::ArtMethod* method, const void* quick_code,
85                              const void* portable_code, bool have_portable_code)
86    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
87  method->SetEntryPointFromPortableCompiledCode(portable_code);
88  method->SetEntryPointFromQuickCompiledCode(quick_code);
89  bool portable_enabled = method->IsPortableCompiled();
90  if (have_portable_code && !portable_enabled) {
91    method->SetIsPortableCompiled();
92  } else if (portable_enabled) {
93    method->ClearIsPortableCompiled();
94  }
95  if (!method->IsResolutionMethod()) {
96    ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
97    if (quick_code == GetQuickToInterpreterBridge() ||
98        quick_code == class_linker->GetQuickToInterpreterBridgeTrampoline() ||
99        (quick_code == class_linker->GetQuickResolutionTrampoline() &&
100         Runtime::Current()->GetInstrumentation()->IsForcedInterpretOnly()
101         && !method->IsNative() && !method->IsProxyMethod())) {
102      if (kIsDebugBuild) {
103        if (quick_code == GetQuickToInterpreterBridge()) {
104          DCHECK(portable_code == GetPortableToInterpreterBridge());
105        } else if (quick_code == class_linker->GetQuickResolutionTrampoline()) {
106          DCHECK(portable_code == class_linker->GetPortableResolutionTrampoline());
107        }
108      }
109      DCHECK(!method->IsNative()) << PrettyMethod(method);
110      DCHECK(!method->IsProxyMethod()) << PrettyMethod(method);
111      method->SetEntryPointFromInterpreter(art::interpreter::artInterpreterToInterpreterBridge);
112    } else {
113      method->SetEntryPointFromInterpreter(art::artInterpreterToCompiledCodeBridge);
114    }
115  }
116}
117
118void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) {
119  if (method->IsAbstract() || method->IsProxyMethod()) {
120    // Do not change stubs for these methods.
121    return;
122  }
123  const void* new_portable_code;
124  const void* new_quick_code;
125  bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
126  ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
127  bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
128  bool have_portable_code = false;
129  if (uninstall) {
130    if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
131      new_portable_code = GetPortableToInterpreterBridge();
132      new_quick_code = GetQuickToInterpreterBridge();
133    } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
134      new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
135      new_quick_code = class_linker->GetQuickOatCodeFor(method);
136    } else {
137      new_portable_code = class_linker->GetPortableResolutionTrampoline();
138      new_quick_code = class_linker->GetQuickResolutionTrampoline();
139    }
140  } else {  // !uninstall
141    if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) &&
142        !method->IsNative()) {
143      new_portable_code = GetPortableToInterpreterBridge();
144      new_quick_code = GetQuickToInterpreterBridge();
145    } else {
146      // Do not overwrite resolution trampoline. When the trampoline initializes the method's
147      // class, all its static methods code will be set to the instrumentation entry point.
148      // For more details, see ClassLinker::FixupStaticTrampolines.
149      if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
150        if (entry_exit_stubs_installed_) {
151          new_portable_code = GetPortableToInterpreterBridge();
152          new_quick_code = GetQuickInstrumentationEntryPoint();
153        } else {
154          new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
155          new_quick_code = class_linker->GetQuickOatCodeFor(method);
156          DCHECK(new_quick_code != class_linker->GetQuickToInterpreterBridgeTrampoline());
157        }
158      } else {
159        new_portable_code = class_linker->GetPortableResolutionTrampoline();
160        new_quick_code = class_linker->GetQuickResolutionTrampoline();
161      }
162    }
163  }
164  UpdateEntrypoints(method, new_quick_code, new_portable_code, have_portable_code);
165}
166
167// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
168// deoptimization of quick frames to interpreter frames.
169// Since we may already have done this previously, we need to push new instrumentation frame before
170// existing instrumentation frames.
171static void InstrumentationInstallStack(Thread* thread, void* arg)
172    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
173  struct InstallStackVisitor : public StackVisitor {
174    InstallStackVisitor(Thread* thread, Context* context, uintptr_t instrumentation_exit_pc)
175        : StackVisitor(thread, context),  instrumentation_stack_(thread->GetInstrumentationStack()),
176          instrumentation_exit_pc_(instrumentation_exit_pc),
177          reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
178          last_return_pc_(0) {
179    }
180
181    virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
182      mirror::ArtMethod* m = GetMethod();
183      if (m == NULL) {
184        if (kVerboseInstrumentation) {
185          LOG(INFO) << "  Skipping upcall. Frame " << GetFrameId();
186        }
187        last_return_pc_ = 0;
188        return true;  // Ignore upcalls.
189      }
190      if (GetCurrentQuickFrame() == NULL) {
191        bool interpreter_frame = !m->IsPortableCompiled();
192        InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, 0, GetFrameId(),
193                                                        interpreter_frame);
194        if (kVerboseInstrumentation) {
195          LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
196        }
197        shadow_stack_.push_back(instrumentation_frame);
198        return true;  // Continue.
199      }
200      uintptr_t return_pc = GetReturnPc();
201      if (m->IsRuntimeMethod()) {
202        if (return_pc == instrumentation_exit_pc_) {
203          if (kVerboseInstrumentation) {
204            LOG(INFO) << "  Handling quick to interpreter transition. Frame " << GetFrameId();
205          }
206          CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
207          const InstrumentationStackFrame& frame = instrumentation_stack_->at(instrumentation_stack_depth_);
208          CHECK(frame.interpreter_entry_);
209          // This is an interpreter frame so method enter event must have been reported. However we
210          // need to push a DEX pc into the dex_pcs_ list to match size of instrumentation stack.
211          // Since we won't report method entry here, we can safely push any DEX pc.
212          dex_pcs_.push_back(0);
213          last_return_pc_ = frame.return_pc_;
214          ++instrumentation_stack_depth_;
215          return true;
216        } else {
217          if (kVerboseInstrumentation) {
218            LOG(INFO) << "  Skipping runtime method. Frame " << GetFrameId();
219          }
220          last_return_pc_ = GetReturnPc();
221          return true;  // Ignore unresolved methods since they will be instrumented after resolution.
222        }
223      }
224      if (kVerboseInstrumentation) {
225        LOG(INFO) << "  Installing exit stub in " << DescribeLocation();
226      }
227      if (return_pc == instrumentation_exit_pc_) {
228        // We've reached a frame which has already been installed with instrumentation exit stub.
229        // We should have already installed instrumentation on previous frames.
230        reached_existing_instrumentation_frames_ = true;
231
232        CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
233        const InstrumentationStackFrame& frame = instrumentation_stack_->at(instrumentation_stack_depth_);
234        CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m)
235                                   << ", Found " << PrettyMethod(frame.method_);
236        return_pc = frame.return_pc_;
237        if (kVerboseInstrumentation) {
238          LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
239        }
240      } else {
241        CHECK_NE(return_pc, 0U);
242        CHECK(!reached_existing_instrumentation_frames_);
243        InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
244                                                        false);
245        if (kVerboseInstrumentation) {
246          LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
247        }
248
249        // Insert frame at the right position so we do not corrupt the instrumentation stack.
250        // Instrumentation stack frames are in descending frame id order.
251        auto it = instrumentation_stack_->begin();
252        for (auto end = instrumentation_stack_->end(); it != end; ++it) {
253          const InstrumentationStackFrame& current = *it;
254          if (instrumentation_frame.frame_id_ >= current.frame_id_) {
255            break;
256          }
257        }
258        instrumentation_stack_->insert(it, instrumentation_frame);
259        SetReturnPc(instrumentation_exit_pc_);
260      }
261      dex_pcs_.push_back(m->ToDexPc(last_return_pc_));
262      last_return_pc_ = return_pc;
263      ++instrumentation_stack_depth_;
264      return true;  // Continue.
265    }
266    std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
267    std::vector<InstrumentationStackFrame> shadow_stack_;
268    std::vector<uint32_t> dex_pcs_;
269    const uintptr_t instrumentation_exit_pc_;
270    bool reached_existing_instrumentation_frames_;
271    size_t instrumentation_stack_depth_;
272    uintptr_t last_return_pc_;
273  };
274  if (kVerboseInstrumentation) {
275    std::string thread_name;
276    thread->GetThreadName(thread_name);
277    LOG(INFO) << "Installing exit stubs in " << thread_name;
278  }
279
280  Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
281  std::unique_ptr<Context> context(Context::Create());
282  uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
283  InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
284  visitor.WalkStack(true);
285  CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
286
287  if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
288    // Create method enter events for all methods currently on the thread's stack. We only do this
289    // if no debugger is attached to prevent from posting events twice.
290    auto ssi = visitor.shadow_stack_.rbegin();
291    for (auto isi = thread->GetInstrumentationStack()->rbegin(),
292        end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) {
293      while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < (*isi).frame_id_) {
294        instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0);
295        ++ssi;
296      }
297      uint32_t dex_pc = visitor.dex_pcs_.back();
298      visitor.dex_pcs_.pop_back();
299      if (!isi->interpreter_entry_) {
300        instrumentation->MethodEnterEvent(thread, (*isi).this_object_, (*isi).method_, dex_pc);
301      }
302    }
303  }
304  thread->VerifyStack();
305}
306
307// Removes the instrumentation exit pc as the return PC for every quick frame.
308static void InstrumentationRestoreStack(Thread* thread, void* arg)
309    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
310  struct RestoreStackVisitor : public StackVisitor {
311    RestoreStackVisitor(Thread* thread, uintptr_t instrumentation_exit_pc,
312                        Instrumentation* instrumentation)
313        : StackVisitor(thread, NULL), thread_(thread),
314          instrumentation_exit_pc_(instrumentation_exit_pc),
315          instrumentation_(instrumentation),
316          instrumentation_stack_(thread->GetInstrumentationStack()),
317          frames_removed_(0) {}
318
319    virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
320      if (instrumentation_stack_->size() == 0) {
321        return false;  // Stop.
322      }
323      mirror::ArtMethod* m = GetMethod();
324      if (GetCurrentQuickFrame() == NULL) {
325        if (kVerboseInstrumentation) {
326          LOG(INFO) << "  Ignoring a shadow frame. Frame " << GetFrameId() << " Method=" << PrettyMethod(m);
327        }
328        return true;  // Ignore shadow frames.
329      }
330      if (m == NULL) {
331        if (kVerboseInstrumentation) {
332          LOG(INFO) << "  Skipping upcall. Frame " << GetFrameId();
333        }
334        return true;  // Ignore upcalls.
335      }
336      bool removed_stub = false;
337      // TODO: make this search more efficient?
338      const size_t frameId = GetFrameId();
339      for (const InstrumentationStackFrame& instrumentation_frame : *instrumentation_stack_) {
340        if (instrumentation_frame.frame_id_ == frameId) {
341          if (kVerboseInstrumentation) {
342            LOG(INFO) << "  Removing exit stub in " << DescribeLocation();
343          }
344          if (instrumentation_frame.interpreter_entry_) {
345            CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
346          } else {
347            CHECK(m == instrumentation_frame.method_) << PrettyMethod(m);
348          }
349          SetReturnPc(instrumentation_frame.return_pc_);
350          if (instrumentation_->ShouldNotifyMethodEnterExitEvents()) {
351            // Create the method exit events. As the methods didn't really exit the result is 0.
352            // We only do this if no debugger is attached to prevent from posting events twice.
353            instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
354                                              GetDexPc(), JValue());
355          }
356          frames_removed_++;
357          removed_stub = true;
358          break;
359        }
360      }
361      if (!removed_stub) {
362        if (kVerboseInstrumentation) {
363          LOG(INFO) << "  No exit stub in " << DescribeLocation();
364        }
365      }
366      return true;  // Continue.
367    }
368    Thread* const thread_;
369    const uintptr_t instrumentation_exit_pc_;
370    Instrumentation* const instrumentation_;
371    std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
372    size_t frames_removed_;
373  };
374  if (kVerboseInstrumentation) {
375    std::string thread_name;
376    thread->GetThreadName(thread_name);
377    LOG(INFO) << "Removing exit stubs in " << thread_name;
378  }
379  std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
380  if (stack->size() > 0) {
381    Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
382    uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
383    RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
384    visitor.WalkStack(true);
385    CHECK_EQ(visitor.frames_removed_, stack->size());
386    while (stack->size() > 0) {
387      stack->pop_front();
388    }
389  }
390}
391
392void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
393  Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
394  if ((events & kMethodEntered) != 0) {
395    method_entry_listeners_.push_back(listener);
396    have_method_entry_listeners_ = true;
397  }
398  if ((events & kMethodExited) != 0) {
399    method_exit_listeners_.push_back(listener);
400    have_method_exit_listeners_ = true;
401  }
402  if ((events & kMethodUnwind) != 0) {
403    method_unwind_listeners_.push_back(listener);
404    have_method_unwind_listeners_ = true;
405  }
406  if ((events & kDexPcMoved) != 0) {
407    dex_pc_listeners_.push_back(listener);
408    have_dex_pc_listeners_ = true;
409  }
410  if ((events & kFieldRead) != 0) {
411    field_read_listeners_.push_back(listener);
412    have_field_read_listeners_ = true;
413  }
414  if ((events & kFieldWritten) != 0) {
415    field_write_listeners_.push_back(listener);
416    have_field_write_listeners_ = true;
417  }
418  if ((events & kExceptionCaught) != 0) {
419    exception_caught_listeners_.push_back(listener);
420    have_exception_caught_listeners_ = true;
421  }
422  UpdateInterpreterHandlerTable();
423}
424
425void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
426  Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
427
428  if ((events & kMethodEntered) != 0) {
429    bool contains = std::find(method_entry_listeners_.begin(), method_entry_listeners_.end(),
430                              listener) != method_entry_listeners_.end();
431    if (contains) {
432      method_entry_listeners_.remove(listener);
433    }
434    have_method_entry_listeners_ = method_entry_listeners_.size() > 0;
435  }
436  if ((events & kMethodExited) != 0) {
437    bool contains = std::find(method_exit_listeners_.begin(), method_exit_listeners_.end(),
438                              listener) != method_exit_listeners_.end();
439    if (contains) {
440      method_exit_listeners_.remove(listener);
441    }
442    have_method_exit_listeners_ = method_exit_listeners_.size() > 0;
443  }
444  if ((events & kMethodUnwind) != 0) {
445    method_unwind_listeners_.remove(listener);
446  }
447  if ((events & kDexPcMoved) != 0) {
448    bool contains = std::find(dex_pc_listeners_.begin(), dex_pc_listeners_.end(),
449                              listener) != dex_pc_listeners_.end();
450    if (contains) {
451      dex_pc_listeners_.remove(listener);
452    }
453    have_dex_pc_listeners_ = dex_pc_listeners_.size() > 0;
454  }
455  if ((events & kFieldRead) != 0) {
456    bool contains = std::find(field_read_listeners_.begin(), field_read_listeners_.end(),
457                              listener) != field_read_listeners_.end();
458    if (contains) {
459      field_read_listeners_.remove(listener);
460    }
461    have_field_read_listeners_ = field_read_listeners_.size() > 0;
462  }
463  if ((events & kFieldWritten) != 0) {
464    bool contains = std::find(field_write_listeners_.begin(), field_write_listeners_.end(),
465                              listener) != field_write_listeners_.end();
466    if (contains) {
467      field_write_listeners_.remove(listener);
468    }
469    have_field_write_listeners_ = field_write_listeners_.size() > 0;
470  }
471  if ((events & kExceptionCaught) != 0) {
472    exception_caught_listeners_.remove(listener);
473    have_exception_caught_listeners_ = exception_caught_listeners_.size() > 0;
474  }
475  UpdateInterpreterHandlerTable();
476}
477
478void Instrumentation::ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) {
479  interpret_only_ = require_interpreter || forced_interpret_only_;
480  // Compute what level of instrumentation is required and compare to current.
481  int desired_level, current_level;
482  if (require_interpreter) {
483    desired_level = 2;
484  } else if (require_entry_exit_stubs) {
485    desired_level = 1;
486  } else {
487    desired_level = 0;
488  }
489  if (interpreter_stubs_installed_) {
490    current_level = 2;
491  } else if (entry_exit_stubs_installed_) {
492    current_level = 1;
493  } else {
494    current_level = 0;
495  }
496  if (desired_level == current_level) {
497    // We're already set.
498    return;
499  }
500  Thread* const self = Thread::Current();
501  Runtime* runtime = Runtime::Current();
502  Locks::thread_list_lock_->AssertNotHeld(self);
503  if (desired_level > 0) {
504    if (require_interpreter) {
505      interpreter_stubs_installed_ = true;
506    } else {
507      CHECK(require_entry_exit_stubs);
508      entry_exit_stubs_installed_ = true;
509    }
510    runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
511    instrumentation_stubs_installed_ = true;
512    MutexLock mu(self, *Locks::thread_list_lock_);
513    runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
514  } else {
515    interpreter_stubs_installed_ = false;
516    entry_exit_stubs_installed_ = false;
517    runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
518    // Restore stack only if there is no method currently deoptimized.
519    bool empty;
520    {
521      ReaderMutexLock mu(self, deoptimized_methods_lock_);
522      empty = IsDeoptimizedMethodsEmpty();  // Avoid lock violation.
523    }
524    if (empty) {
525      instrumentation_stubs_installed_ = false;
526      MutexLock mu(self, *Locks::thread_list_lock_);
527      Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
528    }
529  }
530}
531
532static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg) {
533  thread->ResetQuickAllocEntryPointsForThread();
534}
535
536void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
537  Runtime* runtime = Runtime::Current();
538  ThreadList* tl = runtime->GetThreadList();
539  if (runtime->IsStarted()) {
540    tl->SuspendAll();
541  }
542  {
543    MutexLock mu(Thread::Current(), *Locks::runtime_shutdown_lock_);
544    SetQuickAllocEntryPointsInstrumented(instrumented);
545    ResetQuickAllocEntryPoints();
546  }
547  if (runtime->IsStarted()) {
548    tl->ResumeAll();
549  }
550}
551
552void Instrumentation::InstrumentQuickAllocEntryPoints() {
553  // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
554  //       should be guarded by a lock.
555  DCHECK_GE(quick_alloc_entry_points_instrumentation_counter_.LoadSequentiallyConsistent(), 0);
556  const bool enable_instrumentation =
557      quick_alloc_entry_points_instrumentation_counter_.FetchAndAddSequentiallyConsistent(1) == 0;
558  if (enable_instrumentation) {
559    SetEntrypointsInstrumented(true);
560  }
561}
562
563void Instrumentation::UninstrumentQuickAllocEntryPoints() {
564  // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
565  //       should be guarded by a lock.
566  DCHECK_GT(quick_alloc_entry_points_instrumentation_counter_.LoadSequentiallyConsistent(), 0);
567  const bool disable_instrumentation =
568      quick_alloc_entry_points_instrumentation_counter_.FetchAndSubSequentiallyConsistent(1) == 1;
569  if (disable_instrumentation) {
570    SetEntrypointsInstrumented(false);
571  }
572}
573
574void Instrumentation::ResetQuickAllocEntryPoints() {
575  Runtime* runtime = Runtime::Current();
576  if (runtime->IsStarted()) {
577    MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
578    runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, NULL);
579  }
580}
581
582void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code,
583                                        const void* portable_code, bool have_portable_code) {
584  const void* new_portable_code;
585  const void* new_quick_code;
586  bool new_have_portable_code;
587  if (LIKELY(!instrumentation_stubs_installed_)) {
588    new_portable_code = portable_code;
589    new_quick_code = quick_code;
590    new_have_portable_code = have_portable_code;
591  } else {
592    if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
593      new_portable_code = GetPortableToInterpreterBridge();
594      new_quick_code = GetQuickToInterpreterBridge();
595      new_have_portable_code = false;
596    } else {
597      ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
598      if (quick_code == class_linker->GetQuickResolutionTrampoline() ||
599          quick_code == class_linker->GetQuickToInterpreterBridgeTrampoline() ||
600          quick_code == GetQuickToInterpreterBridge()) {
601        DCHECK((portable_code == class_linker->GetPortableResolutionTrampoline()) ||
602               (portable_code == GetPortableToInterpreterBridge()));
603        new_portable_code = portable_code;
604        new_quick_code = quick_code;
605        new_have_portable_code = have_portable_code;
606      } else if (entry_exit_stubs_installed_) {
607        new_quick_code = GetQuickInstrumentationEntryPoint();
608        new_portable_code = GetPortableToInterpreterBridge();
609        new_have_portable_code = false;
610      } else {
611        new_portable_code = portable_code;
612        new_quick_code = quick_code;
613        new_have_portable_code = have_portable_code;
614      }
615    }
616  }
617  UpdateEntrypoints(method, new_quick_code, new_portable_code, new_have_portable_code);
618}
619
620bool Instrumentation::AddDeoptimizedMethod(mirror::ArtMethod* method) {
621  // Note that the insert() below isn't read barrier-aware. So, this
622  // FindDeoptimizedMethod() call is necessary or else we would end up
623  // storing the same method twice in the map (the from-space and the
624  // to-space ones).
625  if (FindDeoptimizedMethod(method)) {
626    // Already in the map. Return.
627    return false;
628  }
629  // Not found. Add it.
630  int32_t hash_code = method->IdentityHashCode();
631  deoptimized_methods_.insert(std::make_pair(hash_code, method));
632  return true;
633}
634
635bool Instrumentation::FindDeoptimizedMethod(mirror::ArtMethod* method) {
636  int32_t hash_code = method->IdentityHashCode();
637  auto range = deoptimized_methods_.equal_range(hash_code);
638  for (auto it = range.first; it != range.second; ++it) {
639    mirror::ArtMethod** root = &it->second;
640    mirror::ArtMethod* m = ReadBarrier::BarrierForRoot<mirror::ArtMethod>(root);
641    if (m == method) {
642      // Found.
643      return true;
644    }
645  }
646  // Not found.
647  return false;
648}
649
650mirror::ArtMethod* Instrumentation::BeginDeoptimizedMethod() {
651  auto it = deoptimized_methods_.begin();
652  if (it == deoptimized_methods_.end()) {
653    // Empty.
654    return nullptr;
655  }
656  mirror::ArtMethod** root = &it->second;
657  return ReadBarrier::BarrierForRoot<mirror::ArtMethod>(root);
658}
659
660bool Instrumentation::RemoveDeoptimizedMethod(mirror::ArtMethod* method) {
661  int32_t hash_code = method->IdentityHashCode();
662  auto range = deoptimized_methods_.equal_range(hash_code);
663  for (auto it = range.first; it != range.second; ++it) {
664    mirror::ArtMethod** root = &it->second;
665    mirror::ArtMethod* m = ReadBarrier::BarrierForRoot<mirror::ArtMethod>(root);
666    if (m == method) {
667      // Found. Erase and return.
668      deoptimized_methods_.erase(it);
669      return true;
670    }
671  }
672  // Not found.
673  return false;
674}
675
676bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
677  return deoptimized_methods_.empty();
678}
679
680void Instrumentation::Deoptimize(mirror::ArtMethod* method) {
681  CHECK(!method->IsNative());
682  CHECK(!method->IsProxyMethod());
683  CHECK(!method->IsAbstract());
684
685  Thread* self = Thread::Current();
686  {
687    WriterMutexLock mu(self, deoptimized_methods_lock_);
688    bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
689    CHECK(has_not_been_deoptimized) << "Method " << PrettyMethod(method) << " is already deoptimized";
690  }
691  if (!interpreter_stubs_installed_) {
692    UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint(), GetPortableToInterpreterBridge(),
693                      false);
694
695    // Install instrumentation exit stub and instrumentation frames. We may already have installed
696    // these previously so it will only cover the newly created frames.
697    instrumentation_stubs_installed_ = true;
698    MutexLock mu(self, *Locks::thread_list_lock_);
699    Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
700  }
701}
702
703void Instrumentation::Undeoptimize(mirror::ArtMethod* method) {
704  CHECK(!method->IsNative());
705  CHECK(!method->IsProxyMethod());
706  CHECK(!method->IsAbstract());
707
708  Thread* self = Thread::Current();
709  bool empty;
710  {
711    WriterMutexLock mu(self, deoptimized_methods_lock_);
712    bool found_and_erased = RemoveDeoptimizedMethod(method);
713    CHECK(found_and_erased) << "Method " << PrettyMethod(method)
714        << " is not deoptimized";
715    empty = IsDeoptimizedMethodsEmpty();
716  }
717
718  // Restore code and possibly stack only if we did not deoptimize everything.
719  if (!interpreter_stubs_installed_) {
720    // Restore its code or resolution trampoline.
721    ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
722    if (method->IsStatic() && !method->IsConstructor() &&
723        !method->GetDeclaringClass()->IsInitialized()) {
724      // TODO: we're updating to entrypoints in the image here, we can avoid the trampoline.
725      UpdateEntrypoints(method, class_linker->GetQuickResolutionTrampoline(),
726                        class_linker->GetPortableResolutionTrampoline(), false);
727    } else {
728      bool have_portable_code = false;
729      const void* quick_code = class_linker->GetQuickOatCodeFor(method);
730      const void* portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
731      UpdateEntrypoints(method, quick_code, portable_code, have_portable_code);
732    }
733
734    // If there is no deoptimized method left, we can restore the stack of each thread.
735    if (empty) {
736      MutexLock mu(self, *Locks::thread_list_lock_);
737      Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
738      instrumentation_stubs_installed_ = false;
739    }
740  }
741}
742
743bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) {
744  DCHECK(method != nullptr);
745  ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
746  return FindDeoptimizedMethod(method);
747}
748
749void Instrumentation::EnableDeoptimization() {
750  ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
751  CHECK(IsDeoptimizedMethodsEmpty());
752  CHECK_EQ(deoptimization_enabled_, false);
753  deoptimization_enabled_ = true;
754}
755
756void Instrumentation::DisableDeoptimization() {
757  CHECK_EQ(deoptimization_enabled_, true);
758  // If we deoptimized everything, undo it.
759  if (interpreter_stubs_installed_) {
760    UndeoptimizeEverything();
761  }
762  // Undeoptimized selected methods.
763  while (true) {
764    mirror::ArtMethod* method;
765    {
766      ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
767      if (IsDeoptimizedMethodsEmpty()) {
768        break;
769      }
770      method = BeginDeoptimizedMethod();
771      CHECK(method != nullptr);
772    }
773    Undeoptimize(method);
774  }
775  deoptimization_enabled_ = false;
776}
777
778// Indicates if instrumentation should notify method enter/exit events to the listeners.
779bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
780  return !deoptimization_enabled_ && !interpreter_stubs_installed_;
781}
782
783void Instrumentation::DeoptimizeEverything() {
784  CHECK(!interpreter_stubs_installed_);
785  ConfigureStubs(false, true);
786}
787
788void Instrumentation::UndeoptimizeEverything() {
789  CHECK(interpreter_stubs_installed_);
790  ConfigureStubs(false, false);
791}
792
793void Instrumentation::EnableMethodTracing() {
794  bool require_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners;
795  ConfigureStubs(!require_interpreter, require_interpreter);
796}
797
798void Instrumentation::DisableMethodTracing() {
799  ConfigureStubs(false, false);
800}
801
802const void* Instrumentation::GetQuickCodeFor(mirror::ArtMethod* method) const {
803  Runtime* runtime = Runtime::Current();
804  if (LIKELY(!instrumentation_stubs_installed_)) {
805    const void* code = method->GetEntryPointFromQuickCompiledCode();
806    DCHECK(code != nullptr);
807    ClassLinker* class_linker = runtime->GetClassLinker();
808    if (LIKELY(code != class_linker->GetQuickResolutionTrampoline()) &&
809        LIKELY(code != class_linker->GetQuickToInterpreterBridgeTrampoline()) &&
810        LIKELY(code != GetQuickToInterpreterBridge())) {
811      return code;
812    }
813  }
814  return runtime->GetClassLinker()->GetQuickOatCodeFor(method);
815}
816
817void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
818                                           mirror::ArtMethod* method,
819                                           uint32_t dex_pc) const {
820  auto it = method_entry_listeners_.begin();
821  bool is_end = (it == method_entry_listeners_.end());
822  // Implemented this way to prevent problems caused by modification of the list while iterating.
823  while (!is_end) {
824    InstrumentationListener* cur = *it;
825    ++it;
826    is_end = (it == method_entry_listeners_.end());
827    cur->MethodEntered(thread, this_object, method, dex_pc);
828  }
829}
830
831void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
832                                          mirror::ArtMethod* method,
833                                          uint32_t dex_pc, const JValue& return_value) const {
834  auto it = method_exit_listeners_.begin();
835  bool is_end = (it == method_exit_listeners_.end());
836  // Implemented this way to prevent problems caused by modification of the list while iterating.
837  while (!is_end) {
838    InstrumentationListener* cur = *it;
839    ++it;
840    is_end = (it == method_exit_listeners_.end());
841    cur->MethodExited(thread, this_object, method, dex_pc, return_value);
842  }
843}
844
845void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
846                                        mirror::ArtMethod* method,
847                                        uint32_t dex_pc) const {
848  if (have_method_unwind_listeners_) {
849    for (InstrumentationListener* listener : method_unwind_listeners_) {
850      listener->MethodUnwind(thread, this_object, method, dex_pc);
851    }
852  }
853}
854
855void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
856                                          mirror::ArtMethod* method,
857                                          uint32_t dex_pc) const {
858  // TODO: STL copy-on-write collection? The copy below is due to the debug listener having an
859  // action where it can remove itself as a listener and break the iterator. The copy only works
860  // around the problem and in general we may have to move to something like reference counting to
861  // ensure listeners are deleted correctly.
862  std::list<InstrumentationListener*> copy(dex_pc_listeners_);
863  for (InstrumentationListener* listener : copy) {
864    listener->DexPcMoved(thread, this_object, method, dex_pc);
865  }
866}
867
868void Instrumentation::FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
869                                         mirror::ArtMethod* method, uint32_t dex_pc,
870                                         mirror::ArtField* field) const {
871  // TODO: same comment than DexPcMovedEventImpl.
872  std::list<InstrumentationListener*> copy(field_read_listeners_);
873  for (InstrumentationListener* listener : copy) {
874    listener->FieldRead(thread, this_object, method, dex_pc, field);
875  }
876}
877
878void Instrumentation::FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
879                                         mirror::ArtMethod* method, uint32_t dex_pc,
880                                         mirror::ArtField* field, const JValue& field_value) const {
881  // TODO: same comment than DexPcMovedEventImpl.
882  std::list<InstrumentationListener*> copy(field_write_listeners_);
883  for (InstrumentationListener* listener : copy) {
884    listener->FieldWritten(thread, this_object, method, dex_pc, field, field_value);
885  }
886}
887
888void Instrumentation::ExceptionCaughtEvent(Thread* thread, const ThrowLocation& throw_location,
889                                           mirror::ArtMethod* catch_method,
890                                           uint32_t catch_dex_pc,
891                                           mirror::Throwable* exception_object) const {
892  if (HasExceptionCaughtListeners()) {
893    DCHECK_EQ(thread->GetException(nullptr), exception_object);
894    bool is_exception_reported = thread->IsExceptionReportedToInstrumentation();
895    thread->ClearException();
896    // TODO: The copy below is due to the debug listener having an action where it can remove
897    // itself as a listener and break the iterator. The copy only works around the problem.
898    std::list<InstrumentationListener*> copy(exception_caught_listeners_);
899    for (InstrumentationListener* listener : copy) {
900      listener->ExceptionCaught(thread, throw_location, catch_method, catch_dex_pc, exception_object);
901    }
902    thread->SetException(throw_location, exception_object);
903    thread->SetExceptionReportedToInstrumentation(is_exception_reported);
904  }
905}
906
907static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
908                            int delta)
909    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
910  size_t frame_id = StackVisitor::ComputeNumFrames(self) + delta;
911  if (frame_id != instrumentation_frame.frame_id_) {
912    LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
913        << instrumentation_frame.frame_id_;
914    StackVisitor::DescribeStack(self);
915    CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
916  }
917}
918
919void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
920                                                    mirror::ArtMethod* method,
921                                                    uintptr_t lr, bool interpreter_entry) {
922  // We have a callee-save frame meaning this value is guaranteed to never be 0.
923  size_t frame_id = StackVisitor::ComputeNumFrames(self);
924  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
925  if (kVerboseInstrumentation) {
926    LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr);
927  }
928  instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
929                                                                   frame_id, interpreter_entry);
930  stack->push_front(instrumentation_frame);
931
932  if (!interpreter_entry) {
933    MethodEnterEvent(self, this_object, method, 0);
934  }
935}
936
937TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
938                                                            uint64_t gpr_result,
939                                                            uint64_t fpr_result) {
940  // Do the pop.
941  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
942  CHECK_GT(stack->size(), 0U);
943  InstrumentationStackFrame instrumentation_frame = stack->front();
944  stack->pop_front();
945
946  // Set return PC and check the sanity of the stack.
947  *return_pc = instrumentation_frame.return_pc_;
948  CheckStackDepth(self, instrumentation_frame, 0);
949
950  mirror::ArtMethod* method = instrumentation_frame.method_;
951  uint32_t length;
952  char return_shorty = method->GetShorty(&length)[0];
953  JValue return_value;
954  if (return_shorty == 'V') {
955    return_value.SetJ(0);
956  } else if (return_shorty == 'F' || return_shorty == 'D') {
957    return_value.SetJ(fpr_result);
958  } else {
959    return_value.SetJ(gpr_result);
960  }
961  // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
962  //       return_pc.
963  uint32_t dex_pc = DexFile::kDexNoIndex;
964  mirror::Object* this_object = instrumentation_frame.this_object_;
965  if (!instrumentation_frame.interpreter_entry_) {
966    MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
967  }
968
969  // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
970  // back to an upcall.
971  NthCallerVisitor visitor(self, 1, true);
972  visitor.WalkStack(true);
973  bool deoptimize = (visitor.caller != NULL) &&
974                    (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller));
975  if (deoptimize && kVerboseInstrumentation) {
976    LOG(INFO) << "Deoptimizing into " << PrettyMethod(visitor.caller);
977  }
978  if (deoptimize) {
979    if (kVerboseInstrumentation) {
980      LOG(INFO) << "Deoptimizing from " << PrettyMethod(method)
981                << " result is " << std::hex << return_value.GetJ();
982    }
983    self->SetDeoptimizationReturnValue(return_value);
984    return GetTwoWordSuccessValue(*return_pc,
985                                  reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
986  } else {
987    if (kVerboseInstrumentation) {
988      LOG(INFO) << "Returning from " << PrettyMethod(method)
989                << " to PC " << reinterpret_cast<void*>(*return_pc);
990    }
991    return GetTwoWordSuccessValue(0, *return_pc);
992  }
993}
994
995void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
996  // Do the pop.
997  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
998  CHECK_GT(stack->size(), 0U);
999  InstrumentationStackFrame instrumentation_frame = stack->front();
1000  // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
1001  stack->pop_front();
1002
1003  mirror::ArtMethod* method = instrumentation_frame.method_;
1004  if (is_deoptimization) {
1005    if (kVerboseInstrumentation) {
1006      LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method);
1007    }
1008  } else {
1009    if (kVerboseInstrumentation) {
1010      LOG(INFO) << "Popping for unwind " << PrettyMethod(method);
1011    }
1012
1013    // Notify listeners of method unwind.
1014    // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1015    //       return_pc.
1016    uint32_t dex_pc = DexFile::kDexNoIndex;
1017    MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
1018  }
1019}
1020
1021void Instrumentation::VisitRoots(RootCallback* callback, void* arg) {
1022  WriterMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
1023  if (IsDeoptimizedMethodsEmpty()) {
1024    return;
1025  }
1026  for (auto pair : deoptimized_methods_) {
1027    mirror::ArtMethod** root = &pair.second;
1028    callback(reinterpret_cast<mirror::Object**>(root), arg, 0, kRootVMInternal);
1029  }
1030}
1031
1032std::string InstrumentationStackFrame::Dump() const {
1033  std::ostringstream os;
1034  os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":"
1035      << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
1036  return os.str();
1037}
1038
1039}  // namespace instrumentation
1040}  // namespace art
1041