instrumentation.cc revision 95b4c65da93500cdbdcaa3e01010771ef3f466f1
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
19#include <sys/uio.h>
20
21#include "arch/context.h"
22#include "atomic.h"
23#include "base/unix_file/fd_file.h"
24#include "class_linker.h"
25#include "debugger.h"
26#include "dex_file-inl.h"
27#include "entrypoints/quick/quick_alloc_entrypoints.h"
28#include "gc_root-inl.h"
29#include "interpreter/interpreter.h"
30#include "mirror/art_method-inl.h"
31#include "mirror/class-inl.h"
32#include "mirror/dex_cache.h"
33#include "mirror/object_array-inl.h"
34#include "mirror/object-inl.h"
35#include "nth_caller_visitor.h"
36#if !defined(ART_USE_PORTABLE_COMPILER)
37#include "entrypoints/quick/quick_entrypoints.h"
38#endif
39#include "os.h"
40#include "scoped_thread_state_change.h"
41#include "thread.h"
42#include "thread_list.h"
43
44namespace art {
45
46namespace instrumentation {
47
48const bool kVerboseInstrumentation = false;
49
50// Do we want to deoptimize for method entry and exit listeners or just try to intercept
51// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
52// application's performance.
53static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = true;
54
55static bool InstallStubsClassVisitor(mirror::Class* klass, void* arg)
56    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
57  Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
58  return instrumentation->InstallStubsForClass(klass);
59}
60
61Instrumentation::Instrumentation()
62    : instrumentation_stubs_installed_(false), entry_exit_stubs_installed_(false),
63      interpreter_stubs_installed_(false),
64      interpret_only_(false), forced_interpret_only_(false),
65      have_method_entry_listeners_(false), have_method_exit_listeners_(false),
66      have_method_unwind_listeners_(false), have_dex_pc_listeners_(false),
67      have_field_read_listeners_(false), have_field_write_listeners_(false),
68      have_exception_caught_listeners_(false),
69      deoptimized_methods_lock_("deoptimized methods lock"),
70      deoptimization_enabled_(false),
71      interpreter_handler_table_(kMainHandlerTable),
72      quick_alloc_entry_points_instrumentation_counter_(0) {
73}
74
75bool Instrumentation::InstallStubsForClass(mirror::Class* klass) {
76  for (size_t i = 0, e = klass->NumDirectMethods(); i < e; i++) {
77    InstallStubsForMethod(klass->GetDirectMethod(i));
78  }
79  for (size_t i = 0, e = klass->NumVirtualMethods(); i < e; i++) {
80    InstallStubsForMethod(klass->GetVirtualMethod(i));
81  }
82  return true;
83}
84
85static void UpdateEntrypoints(mirror::ArtMethod* method, const void* quick_code,
86                              const void* portable_code, bool have_portable_code)
87    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
88  method->SetEntryPointFromPortableCompiledCode(portable_code);
89  method->SetEntryPointFromQuickCompiledCode(quick_code);
90  bool portable_enabled = method->IsPortableCompiled();
91  if (have_portable_code && !portable_enabled) {
92    method->SetIsPortableCompiled();
93  } else if (portable_enabled) {
94    method->ClearIsPortableCompiled();
95  }
96  if (!method->IsResolutionMethod()) {
97    ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
98    if (quick_code == GetQuickToInterpreterBridge() ||
99        quick_code == class_linker->GetQuickToInterpreterBridgeTrampoline() ||
100        (quick_code == class_linker->GetQuickResolutionTrampoline() &&
101         Runtime::Current()->GetInstrumentation()->IsForcedInterpretOnly()
102         && !method->IsNative() && !method->IsProxyMethod())) {
103      if (kIsDebugBuild) {
104        if (quick_code == GetQuickToInterpreterBridge()) {
105          DCHECK(portable_code == GetPortableToInterpreterBridge());
106        } else if (quick_code == class_linker->GetQuickResolutionTrampoline()) {
107          DCHECK(portable_code == class_linker->GetPortableResolutionTrampoline());
108        }
109      }
110      DCHECK(!method->IsNative()) << PrettyMethod(method);
111      DCHECK(!method->IsProxyMethod()) << PrettyMethod(method);
112      method->SetEntryPointFromInterpreter(art::interpreter::artInterpreterToInterpreterBridge);
113    } else {
114      method->SetEntryPointFromInterpreter(art::artInterpreterToCompiledCodeBridge);
115    }
116  }
117}
118
119void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) {
120  if (method->IsAbstract() || method->IsProxyMethod()) {
121    // Do not change stubs for these methods.
122    return;
123  }
124  std::string temp;
125  // Note that the Proxy class itself is not a proxy class.
126  if (strcmp(method->GetDeclaringClass()->GetDescriptor(&temp), "Ljava/lang/reflect/Proxy;") == 0 &&
127      method->IsConstructor()) {
128    // Do not stub Proxy.<init>.
129    return;
130  }
131  const void* new_portable_code;
132  const void* new_quick_code;
133  bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
134  ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
135  bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
136  bool have_portable_code = false;
137  if (uninstall) {
138    if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
139      new_portable_code = GetPortableToInterpreterBridge();
140      new_quick_code = GetQuickToInterpreterBridge();
141    } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
142      new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
143      new_quick_code = class_linker->GetQuickOatCodeFor(method);
144    } else {
145      new_portable_code = class_linker->GetPortableResolutionTrampoline();
146      new_quick_code = class_linker->GetQuickResolutionTrampoline();
147    }
148  } else {  // !uninstall
149    if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) &&
150        !method->IsNative()) {
151      new_portable_code = GetPortableToInterpreterBridge();
152      new_quick_code = GetQuickToInterpreterBridge();
153    } else {
154      // Do not overwrite resolution trampoline. When the trampoline initializes the method's
155      // class, all its static methods code will be set to the instrumentation entry point.
156      // For more details, see ClassLinker::FixupStaticTrampolines.
157      if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
158        if (entry_exit_stubs_installed_) {
159          new_portable_code = GetPortableToInterpreterBridge();
160          new_quick_code = GetQuickInstrumentationEntryPoint();
161        } else {
162          new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
163          new_quick_code = class_linker->GetQuickOatCodeFor(method);
164          DCHECK(new_quick_code != class_linker->GetQuickToInterpreterBridgeTrampoline());
165        }
166      } else {
167        new_portable_code = class_linker->GetPortableResolutionTrampoline();
168        new_quick_code = class_linker->GetQuickResolutionTrampoline();
169      }
170    }
171  }
172  UpdateEntrypoints(method, new_quick_code, new_portable_code, have_portable_code);
173}
174
175// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
176// deoptimization of quick frames to interpreter frames.
177// Since we may already have done this previously, we need to push new instrumentation frame before
178// existing instrumentation frames.
179static void InstrumentationInstallStack(Thread* thread, void* arg)
180    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
181  struct InstallStackVisitor : public StackVisitor {
182    InstallStackVisitor(Thread* thread, Context* context, uintptr_t instrumentation_exit_pc)
183        : StackVisitor(thread, context),  instrumentation_stack_(thread->GetInstrumentationStack()),
184          instrumentation_exit_pc_(instrumentation_exit_pc),
185          reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
186          last_return_pc_(0) {
187    }
188
189    virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
190      mirror::ArtMethod* m = GetMethod();
191      if (m == NULL) {
192        if (kVerboseInstrumentation) {
193          LOG(INFO) << "  Skipping upcall. Frame " << GetFrameId();
194        }
195        last_return_pc_ = 0;
196        return true;  // Ignore upcalls.
197      }
198      if (GetCurrentQuickFrame() == NULL) {
199        bool interpreter_frame = !m->IsPortableCompiled();
200        InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, 0, GetFrameId(),
201                                                        interpreter_frame);
202        if (kVerboseInstrumentation) {
203          LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
204        }
205        shadow_stack_.push_back(instrumentation_frame);
206        return true;  // Continue.
207      }
208      uintptr_t return_pc = GetReturnPc();
209      if (m->IsRuntimeMethod()) {
210        if (return_pc == instrumentation_exit_pc_) {
211          if (kVerboseInstrumentation) {
212            LOG(INFO) << "  Handling quick to interpreter transition. Frame " << GetFrameId();
213          }
214          CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
215          const InstrumentationStackFrame& frame = instrumentation_stack_->at(instrumentation_stack_depth_);
216          CHECK(frame.interpreter_entry_);
217          // This is an interpreter frame so method enter event must have been reported. However we
218          // need to push a DEX pc into the dex_pcs_ list to match size of instrumentation stack.
219          // Since we won't report method entry here, we can safely push any DEX pc.
220          dex_pcs_.push_back(0);
221          last_return_pc_ = frame.return_pc_;
222          ++instrumentation_stack_depth_;
223          return true;
224        } else {
225          if (kVerboseInstrumentation) {
226            LOG(INFO) << "  Skipping runtime method. Frame " << GetFrameId();
227          }
228          last_return_pc_ = GetReturnPc();
229          return true;  // Ignore unresolved methods since they will be instrumented after resolution.
230        }
231      }
232      if (kVerboseInstrumentation) {
233        LOG(INFO) << "  Installing exit stub in " << DescribeLocation();
234      }
235      if (return_pc == instrumentation_exit_pc_) {
236        // We've reached a frame which has already been installed with instrumentation exit stub.
237        // We should have already installed instrumentation on previous frames.
238        reached_existing_instrumentation_frames_ = true;
239
240        CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
241        const InstrumentationStackFrame& frame = instrumentation_stack_->at(instrumentation_stack_depth_);
242        CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m)
243                                   << ", Found " << PrettyMethod(frame.method_);
244        return_pc = frame.return_pc_;
245        if (kVerboseInstrumentation) {
246          LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
247        }
248      } else {
249        CHECK_NE(return_pc, 0U);
250        CHECK(!reached_existing_instrumentation_frames_);
251        InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
252                                                        false);
253        if (kVerboseInstrumentation) {
254          LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
255        }
256
257        // Insert frame at the right position so we do not corrupt the instrumentation stack.
258        // Instrumentation stack frames are in descending frame id order.
259        auto it = instrumentation_stack_->begin();
260        for (auto end = instrumentation_stack_->end(); it != end; ++it) {
261          const InstrumentationStackFrame& current = *it;
262          if (instrumentation_frame.frame_id_ >= current.frame_id_) {
263            break;
264          }
265        }
266        instrumentation_stack_->insert(it, instrumentation_frame);
267        SetReturnPc(instrumentation_exit_pc_);
268      }
269      dex_pcs_.push_back(m->ToDexPc(last_return_pc_));
270      last_return_pc_ = return_pc;
271      ++instrumentation_stack_depth_;
272      return true;  // Continue.
273    }
274    std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
275    std::vector<InstrumentationStackFrame> shadow_stack_;
276    std::vector<uint32_t> dex_pcs_;
277    const uintptr_t instrumentation_exit_pc_;
278    bool reached_existing_instrumentation_frames_;
279    size_t instrumentation_stack_depth_;
280    uintptr_t last_return_pc_;
281  };
282  if (kVerboseInstrumentation) {
283    std::string thread_name;
284    thread->GetThreadName(thread_name);
285    LOG(INFO) << "Installing exit stubs in " << thread_name;
286  }
287
288  Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
289  std::unique_ptr<Context> context(Context::Create());
290  uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
291  InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
292  visitor.WalkStack(true);
293  CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
294
295  if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
296    // Create method enter events for all methods currently on the thread's stack. We only do this
297    // if no debugger is attached to prevent from posting events twice.
298    auto ssi = visitor.shadow_stack_.rbegin();
299    for (auto isi = thread->GetInstrumentationStack()->rbegin(),
300        end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) {
301      while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < (*isi).frame_id_) {
302        instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0);
303        ++ssi;
304      }
305      uint32_t dex_pc = visitor.dex_pcs_.back();
306      visitor.dex_pcs_.pop_back();
307      if (!isi->interpreter_entry_) {
308        instrumentation->MethodEnterEvent(thread, (*isi).this_object_, (*isi).method_, dex_pc);
309      }
310    }
311  }
312  thread->VerifyStack();
313}
314
315// Removes the instrumentation exit pc as the return PC for every quick frame.
316static void InstrumentationRestoreStack(Thread* thread, void* arg)
317    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
318  struct RestoreStackVisitor : public StackVisitor {
319    RestoreStackVisitor(Thread* thread, uintptr_t instrumentation_exit_pc,
320                        Instrumentation* instrumentation)
321        : StackVisitor(thread, NULL), thread_(thread),
322          instrumentation_exit_pc_(instrumentation_exit_pc),
323          instrumentation_(instrumentation),
324          instrumentation_stack_(thread->GetInstrumentationStack()),
325          frames_removed_(0) {}
326
327    virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
328      if (instrumentation_stack_->size() == 0) {
329        return false;  // Stop.
330      }
331      mirror::ArtMethod* m = GetMethod();
332      if (GetCurrentQuickFrame() == NULL) {
333        if (kVerboseInstrumentation) {
334          LOG(INFO) << "  Ignoring a shadow frame. Frame " << GetFrameId() << " Method=" << PrettyMethod(m);
335        }
336        return true;  // Ignore shadow frames.
337      }
338      if (m == NULL) {
339        if (kVerboseInstrumentation) {
340          LOG(INFO) << "  Skipping upcall. Frame " << GetFrameId();
341        }
342        return true;  // Ignore upcalls.
343      }
344      bool removed_stub = false;
345      // TODO: make this search more efficient?
346      const size_t frameId = GetFrameId();
347      for (const InstrumentationStackFrame& instrumentation_frame : *instrumentation_stack_) {
348        if (instrumentation_frame.frame_id_ == frameId) {
349          if (kVerboseInstrumentation) {
350            LOG(INFO) << "  Removing exit stub in " << DescribeLocation();
351          }
352          if (instrumentation_frame.interpreter_entry_) {
353            CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
354          } else {
355            CHECK(m == instrumentation_frame.method_) << PrettyMethod(m);
356          }
357          SetReturnPc(instrumentation_frame.return_pc_);
358          if (instrumentation_->ShouldNotifyMethodEnterExitEvents()) {
359            // Create the method exit events. As the methods didn't really exit the result is 0.
360            // We only do this if no debugger is attached to prevent from posting events twice.
361            instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
362                                              GetDexPc(), JValue());
363          }
364          frames_removed_++;
365          removed_stub = true;
366          break;
367        }
368      }
369      if (!removed_stub) {
370        if (kVerboseInstrumentation) {
371          LOG(INFO) << "  No exit stub in " << DescribeLocation();
372        }
373      }
374      return true;  // Continue.
375    }
376    Thread* const thread_;
377    const uintptr_t instrumentation_exit_pc_;
378    Instrumentation* const instrumentation_;
379    std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
380    size_t frames_removed_;
381  };
382  if (kVerboseInstrumentation) {
383    std::string thread_name;
384    thread->GetThreadName(thread_name);
385    LOG(INFO) << "Removing exit stubs in " << thread_name;
386  }
387  std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
388  if (stack->size() > 0) {
389    Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
390    uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
391    RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
392    visitor.WalkStack(true);
393    CHECK_EQ(visitor.frames_removed_, stack->size());
394    while (stack->size() > 0) {
395      stack->pop_front();
396    }
397  }
398}
399
400void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
401  Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
402  if ((events & kMethodEntered) != 0) {
403    method_entry_listeners_.push_back(listener);
404    have_method_entry_listeners_ = true;
405  }
406  if ((events & kMethodExited) != 0) {
407    method_exit_listeners_.push_back(listener);
408    have_method_exit_listeners_ = true;
409  }
410  if ((events & kMethodUnwind) != 0) {
411    method_unwind_listeners_.push_back(listener);
412    have_method_unwind_listeners_ = true;
413  }
414  if ((events & kDexPcMoved) != 0) {
415    dex_pc_listeners_.push_back(listener);
416    have_dex_pc_listeners_ = true;
417  }
418  if ((events & kFieldRead) != 0) {
419    field_read_listeners_.push_back(listener);
420    have_field_read_listeners_ = true;
421  }
422  if ((events & kFieldWritten) != 0) {
423    field_write_listeners_.push_back(listener);
424    have_field_write_listeners_ = true;
425  }
426  if ((events & kExceptionCaught) != 0) {
427    exception_caught_listeners_.push_back(listener);
428    have_exception_caught_listeners_ = true;
429  }
430  UpdateInterpreterHandlerTable();
431}
432
433void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
434  Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
435
436  if ((events & kMethodEntered) != 0) {
437    bool contains = std::find(method_entry_listeners_.begin(), method_entry_listeners_.end(),
438                              listener) != method_entry_listeners_.end();
439    if (contains) {
440      method_entry_listeners_.remove(listener);
441    }
442    have_method_entry_listeners_ = method_entry_listeners_.size() > 0;
443  }
444  if ((events & kMethodExited) != 0) {
445    bool contains = std::find(method_exit_listeners_.begin(), method_exit_listeners_.end(),
446                              listener) != method_exit_listeners_.end();
447    if (contains) {
448      method_exit_listeners_.remove(listener);
449    }
450    have_method_exit_listeners_ = method_exit_listeners_.size() > 0;
451  }
452  if ((events & kMethodUnwind) != 0) {
453    method_unwind_listeners_.remove(listener);
454  }
455  if ((events & kDexPcMoved) != 0) {
456    bool contains = std::find(dex_pc_listeners_.begin(), dex_pc_listeners_.end(),
457                              listener) != dex_pc_listeners_.end();
458    if (contains) {
459      dex_pc_listeners_.remove(listener);
460    }
461    have_dex_pc_listeners_ = dex_pc_listeners_.size() > 0;
462  }
463  if ((events & kFieldRead) != 0) {
464    bool contains = std::find(field_read_listeners_.begin(), field_read_listeners_.end(),
465                              listener) != field_read_listeners_.end();
466    if (contains) {
467      field_read_listeners_.remove(listener);
468    }
469    have_field_read_listeners_ = field_read_listeners_.size() > 0;
470  }
471  if ((events & kFieldWritten) != 0) {
472    bool contains = std::find(field_write_listeners_.begin(), field_write_listeners_.end(),
473                              listener) != field_write_listeners_.end();
474    if (contains) {
475      field_write_listeners_.remove(listener);
476    }
477    have_field_write_listeners_ = field_write_listeners_.size() > 0;
478  }
479  if ((events & kExceptionCaught) != 0) {
480    exception_caught_listeners_.remove(listener);
481    have_exception_caught_listeners_ = exception_caught_listeners_.size() > 0;
482  }
483  UpdateInterpreterHandlerTable();
484}
485
486void Instrumentation::ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) {
487  interpret_only_ = require_interpreter || forced_interpret_only_;
488  // Compute what level of instrumentation is required and compare to current.
489  int desired_level, current_level;
490  if (require_interpreter) {
491    desired_level = 2;
492  } else if (require_entry_exit_stubs) {
493    desired_level = 1;
494  } else {
495    desired_level = 0;
496  }
497  if (interpreter_stubs_installed_) {
498    current_level = 2;
499  } else if (entry_exit_stubs_installed_) {
500    current_level = 1;
501  } else {
502    current_level = 0;
503  }
504  if (desired_level == current_level) {
505    // We're already set.
506    return;
507  }
508  Thread* const self = Thread::Current();
509  Runtime* runtime = Runtime::Current();
510  Locks::thread_list_lock_->AssertNotHeld(self);
511  if (desired_level > 0) {
512    if (require_interpreter) {
513      interpreter_stubs_installed_ = true;
514    } else {
515      CHECK(require_entry_exit_stubs);
516      entry_exit_stubs_installed_ = true;
517    }
518    runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
519    instrumentation_stubs_installed_ = true;
520    MutexLock mu(self, *Locks::thread_list_lock_);
521    runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
522  } else {
523    interpreter_stubs_installed_ = false;
524    entry_exit_stubs_installed_ = false;
525    runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
526    // Restore stack only if there is no method currently deoptimized.
527    bool empty;
528    {
529      ReaderMutexLock mu(self, deoptimized_methods_lock_);
530      empty = IsDeoptimizedMethodsEmpty();  // Avoid lock violation.
531    }
532    if (empty) {
533      instrumentation_stubs_installed_ = false;
534      MutexLock mu(self, *Locks::thread_list_lock_);
535      Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
536    }
537  }
538}
539
540static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg) {
541  thread->ResetQuickAllocEntryPointsForThread();
542}
543
544void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
545  Runtime* runtime = Runtime::Current();
546  ThreadList* tl = runtime->GetThreadList();
547  if (runtime->IsStarted()) {
548    tl->SuspendAll();
549  }
550  {
551    MutexLock mu(Thread::Current(), *Locks::runtime_shutdown_lock_);
552    SetQuickAllocEntryPointsInstrumented(instrumented);
553    ResetQuickAllocEntryPoints();
554  }
555  if (runtime->IsStarted()) {
556    tl->ResumeAll();
557  }
558}
559
560void Instrumentation::InstrumentQuickAllocEntryPoints() {
561  // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
562  //       should be guarded by a lock.
563  DCHECK_GE(quick_alloc_entry_points_instrumentation_counter_.LoadSequentiallyConsistent(), 0);
564  const bool enable_instrumentation =
565      quick_alloc_entry_points_instrumentation_counter_.FetchAndAddSequentiallyConsistent(1) == 0;
566  if (enable_instrumentation) {
567    SetEntrypointsInstrumented(true);
568  }
569}
570
571void Instrumentation::UninstrumentQuickAllocEntryPoints() {
572  // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
573  //       should be guarded by a lock.
574  DCHECK_GT(quick_alloc_entry_points_instrumentation_counter_.LoadSequentiallyConsistent(), 0);
575  const bool disable_instrumentation =
576      quick_alloc_entry_points_instrumentation_counter_.FetchAndSubSequentiallyConsistent(1) == 1;
577  if (disable_instrumentation) {
578    SetEntrypointsInstrumented(false);
579  }
580}
581
582void Instrumentation::ResetQuickAllocEntryPoints() {
583  Runtime* runtime = Runtime::Current();
584  if (runtime->IsStarted()) {
585    MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
586    runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, NULL);
587  }
588}
589
590void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code,
591                                        const void* portable_code, bool have_portable_code) {
592  const void* new_portable_code;
593  const void* new_quick_code;
594  bool new_have_portable_code;
595  if (LIKELY(!instrumentation_stubs_installed_)) {
596    new_portable_code = portable_code;
597    new_quick_code = quick_code;
598    new_have_portable_code = have_portable_code;
599  } else {
600    if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
601      new_portable_code = GetPortableToInterpreterBridge();
602      new_quick_code = GetQuickToInterpreterBridge();
603      new_have_portable_code = false;
604    } else {
605      ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
606      if (quick_code == class_linker->GetQuickResolutionTrampoline() ||
607          quick_code == class_linker->GetQuickToInterpreterBridgeTrampoline() ||
608          quick_code == GetQuickToInterpreterBridge()) {
609        DCHECK((portable_code == class_linker->GetPortableResolutionTrampoline()) ||
610               (portable_code == GetPortableToInterpreterBridge()));
611        new_portable_code = portable_code;
612        new_quick_code = quick_code;
613        new_have_portable_code = have_portable_code;
614      } else if (entry_exit_stubs_installed_) {
615        new_quick_code = GetQuickInstrumentationEntryPoint();
616        new_portable_code = GetPortableToInterpreterBridge();
617        new_have_portable_code = false;
618      } else {
619        new_portable_code = portable_code;
620        new_quick_code = quick_code;
621        new_have_portable_code = have_portable_code;
622      }
623    }
624  }
625  UpdateEntrypoints(method, new_quick_code, new_portable_code, new_have_portable_code);
626}
627
628bool Instrumentation::AddDeoptimizedMethod(mirror::ArtMethod* method) {
629  // Note that the insert() below isn't read barrier-aware. So, this
630  // FindDeoptimizedMethod() call is necessary or else we would end up
631  // storing the same method twice in the map (the from-space and the
632  // to-space ones).
633  if (FindDeoptimizedMethod(method)) {
634    // Already in the map. Return.
635    return false;
636  }
637  // Not found. Add it.
638  int32_t hash_code = method->IdentityHashCode();
639  deoptimized_methods_.insert(std::make_pair(hash_code, GcRoot<mirror::ArtMethod>(method)));
640  return true;
641}
642
643bool Instrumentation::FindDeoptimizedMethod(mirror::ArtMethod* method) {
644  int32_t hash_code = method->IdentityHashCode();
645  auto range = deoptimized_methods_.equal_range(hash_code);
646  for (auto it = range.first; it != range.second; ++it) {
647    mirror::ArtMethod* m = it->second.Read();
648    if (m == method) {
649      // Found.
650      return true;
651    }
652  }
653  // Not found.
654  return false;
655}
656
657mirror::ArtMethod* Instrumentation::BeginDeoptimizedMethod() {
658  auto it = deoptimized_methods_.begin();
659  if (it == deoptimized_methods_.end()) {
660    // Empty.
661    return nullptr;
662  }
663  return it->second.Read();
664}
665
666bool Instrumentation::RemoveDeoptimizedMethod(mirror::ArtMethod* method) {
667  int32_t hash_code = method->IdentityHashCode();
668  auto range = deoptimized_methods_.equal_range(hash_code);
669  for (auto it = range.first; it != range.second; ++it) {
670    mirror::ArtMethod* m = it->second.Read();
671    if (m == method) {
672      // Found. Erase and return.
673      deoptimized_methods_.erase(it);
674      return true;
675    }
676  }
677  // Not found.
678  return false;
679}
680
681bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
682  return deoptimized_methods_.empty();
683}
684
685void Instrumentation::Deoptimize(mirror::ArtMethod* method) {
686  CHECK(!method->IsNative());
687  CHECK(!method->IsProxyMethod());
688  CHECK(!method->IsAbstract());
689
690  Thread* self = Thread::Current();
691  {
692    WriterMutexLock mu(self, deoptimized_methods_lock_);
693    bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
694    CHECK(has_not_been_deoptimized) << "Method " << PrettyMethod(method) << " is already deoptimized";
695  }
696  if (!interpreter_stubs_installed_) {
697    UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint(), GetPortableToInterpreterBridge(),
698                      false);
699
700    // Install instrumentation exit stub and instrumentation frames. We may already have installed
701    // these previously so it will only cover the newly created frames.
702    instrumentation_stubs_installed_ = true;
703    MutexLock mu(self, *Locks::thread_list_lock_);
704    Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
705  }
706}
707
708void Instrumentation::Undeoptimize(mirror::ArtMethod* method) {
709  CHECK(!method->IsNative());
710  CHECK(!method->IsProxyMethod());
711  CHECK(!method->IsAbstract());
712
713  Thread* self = Thread::Current();
714  bool empty;
715  {
716    WriterMutexLock mu(self, deoptimized_methods_lock_);
717    bool found_and_erased = RemoveDeoptimizedMethod(method);
718    CHECK(found_and_erased) << "Method " << PrettyMethod(method)
719        << " is not deoptimized";
720    empty = IsDeoptimizedMethodsEmpty();
721  }
722
723  // Restore code and possibly stack only if we did not deoptimize everything.
724  if (!interpreter_stubs_installed_) {
725    // Restore its code or resolution trampoline.
726    ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
727    if (method->IsStatic() && !method->IsConstructor() &&
728        !method->GetDeclaringClass()->IsInitialized()) {
729      // TODO: we're updating to entrypoints in the image here, we can avoid the trampoline.
730      UpdateEntrypoints(method, class_linker->GetQuickResolutionTrampoline(),
731                        class_linker->GetPortableResolutionTrampoline(), false);
732    } else {
733      bool have_portable_code = false;
734      const void* quick_code = class_linker->GetQuickOatCodeFor(method);
735      const void* portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
736      UpdateEntrypoints(method, quick_code, portable_code, have_portable_code);
737    }
738
739    // If there is no deoptimized method left, we can restore the stack of each thread.
740    if (empty) {
741      MutexLock mu(self, *Locks::thread_list_lock_);
742      Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
743      instrumentation_stubs_installed_ = false;
744    }
745  }
746}
747
748bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) {
749  DCHECK(method != nullptr);
750  ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
751  return FindDeoptimizedMethod(method);
752}
753
754void Instrumentation::EnableDeoptimization() {
755  ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
756  CHECK(IsDeoptimizedMethodsEmpty());
757  CHECK_EQ(deoptimization_enabled_, false);
758  deoptimization_enabled_ = true;
759}
760
761void Instrumentation::DisableDeoptimization() {
762  CHECK_EQ(deoptimization_enabled_, true);
763  // If we deoptimized everything, undo it.
764  if (interpreter_stubs_installed_) {
765    UndeoptimizeEverything();
766  }
767  // Undeoptimized selected methods.
768  while (true) {
769    mirror::ArtMethod* method;
770    {
771      ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
772      if (IsDeoptimizedMethodsEmpty()) {
773        break;
774      }
775      method = BeginDeoptimizedMethod();
776      CHECK(method != nullptr);
777    }
778    Undeoptimize(method);
779  }
780  deoptimization_enabled_ = false;
781}
782
783// Indicates if instrumentation should notify method enter/exit events to the listeners.
784bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
785  return !deoptimization_enabled_ && !interpreter_stubs_installed_;
786}
787
788void Instrumentation::DeoptimizeEverything() {
789  CHECK(!interpreter_stubs_installed_);
790  ConfigureStubs(false, true);
791}
792
793void Instrumentation::UndeoptimizeEverything() {
794  CHECK(interpreter_stubs_installed_);
795  ConfigureStubs(false, false);
796}
797
798void Instrumentation::EnableMethodTracing() {
799  bool require_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners;
800  ConfigureStubs(!require_interpreter, require_interpreter);
801}
802
803void Instrumentation::DisableMethodTracing() {
804  ConfigureStubs(false, false);
805}
806
807const void* Instrumentation::GetQuickCodeFor(mirror::ArtMethod* method) const {
808  Runtime* runtime = Runtime::Current();
809  if (LIKELY(!instrumentation_stubs_installed_)) {
810    const void* code = method->GetEntryPointFromQuickCompiledCode();
811    DCHECK(code != nullptr);
812    ClassLinker* class_linker = runtime->GetClassLinker();
813    if (LIKELY(code != class_linker->GetQuickResolutionTrampoline()) &&
814        LIKELY(code != class_linker->GetQuickToInterpreterBridgeTrampoline()) &&
815        LIKELY(code != GetQuickToInterpreterBridge())) {
816      return code;
817    }
818  }
819  return runtime->GetClassLinker()->GetQuickOatCodeFor(method);
820}
821
822void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
823                                           mirror::ArtMethod* method,
824                                           uint32_t dex_pc) const {
825  auto it = method_entry_listeners_.begin();
826  bool is_end = (it == method_entry_listeners_.end());
827  // Implemented this way to prevent problems caused by modification of the list while iterating.
828  while (!is_end) {
829    InstrumentationListener* cur = *it;
830    ++it;
831    is_end = (it == method_entry_listeners_.end());
832    cur->MethodEntered(thread, this_object, method, dex_pc);
833  }
834}
835
836void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
837                                          mirror::ArtMethod* method,
838                                          uint32_t dex_pc, const JValue& return_value) const {
839  auto it = method_exit_listeners_.begin();
840  bool is_end = (it == method_exit_listeners_.end());
841  // Implemented this way to prevent problems caused by modification of the list while iterating.
842  while (!is_end) {
843    InstrumentationListener* cur = *it;
844    ++it;
845    is_end = (it == method_exit_listeners_.end());
846    cur->MethodExited(thread, this_object, method, dex_pc, return_value);
847  }
848}
849
850void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
851                                        mirror::ArtMethod* method,
852                                        uint32_t dex_pc) const {
853  if (have_method_unwind_listeners_) {
854    for (InstrumentationListener* listener : method_unwind_listeners_) {
855      listener->MethodUnwind(thread, this_object, method, dex_pc);
856    }
857  }
858}
859
860void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
861                                          mirror::ArtMethod* method,
862                                          uint32_t dex_pc) const {
863  // TODO: STL copy-on-write collection? The copy below is due to the debug listener having an
864  // action where it can remove itself as a listener and break the iterator. The copy only works
865  // around the problem and in general we may have to move to something like reference counting to
866  // ensure listeners are deleted correctly.
867  std::list<InstrumentationListener*> copy(dex_pc_listeners_);
868  for (InstrumentationListener* listener : copy) {
869    listener->DexPcMoved(thread, this_object, method, dex_pc);
870  }
871}
872
873void Instrumentation::FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
874                                         mirror::ArtMethod* method, uint32_t dex_pc,
875                                         mirror::ArtField* field) const {
876  // TODO: same comment than DexPcMovedEventImpl.
877  std::list<InstrumentationListener*> copy(field_read_listeners_);
878  for (InstrumentationListener* listener : copy) {
879    listener->FieldRead(thread, this_object, method, dex_pc, field);
880  }
881}
882
883void Instrumentation::FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
884                                         mirror::ArtMethod* method, uint32_t dex_pc,
885                                         mirror::ArtField* field, const JValue& field_value) const {
886  // TODO: same comment than DexPcMovedEventImpl.
887  std::list<InstrumentationListener*> copy(field_write_listeners_);
888  for (InstrumentationListener* listener : copy) {
889    listener->FieldWritten(thread, this_object, method, dex_pc, field, field_value);
890  }
891}
892
893void Instrumentation::ExceptionCaughtEvent(Thread* thread, const ThrowLocation& throw_location,
894                                           mirror::ArtMethod* catch_method,
895                                           uint32_t catch_dex_pc,
896                                           mirror::Throwable* exception_object) const {
897  if (HasExceptionCaughtListeners()) {
898    DCHECK_EQ(thread->GetException(nullptr), exception_object);
899    bool is_exception_reported = thread->IsExceptionReportedToInstrumentation();
900    thread->ClearException();
901    // TODO: The copy below is due to the debug listener having an action where it can remove
902    // itself as a listener and break the iterator. The copy only works around the problem.
903    std::list<InstrumentationListener*> copy(exception_caught_listeners_);
904    for (InstrumentationListener* listener : copy) {
905      listener->ExceptionCaught(thread, throw_location, catch_method, catch_dex_pc, exception_object);
906    }
907    thread->SetException(throw_location, exception_object);
908    thread->SetExceptionReportedToInstrumentation(is_exception_reported);
909  }
910}
911
912static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
913                            int delta)
914    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
915  size_t frame_id = StackVisitor::ComputeNumFrames(self) + delta;
916  if (frame_id != instrumentation_frame.frame_id_) {
917    LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
918        << instrumentation_frame.frame_id_;
919    StackVisitor::DescribeStack(self);
920    CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
921  }
922}
923
924void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
925                                                    mirror::ArtMethod* method,
926                                                    uintptr_t lr, bool interpreter_entry) {
927  // We have a callee-save frame meaning this value is guaranteed to never be 0.
928  size_t frame_id = StackVisitor::ComputeNumFrames(self);
929  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
930  if (kVerboseInstrumentation) {
931    LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr);
932  }
933  instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
934                                                                   frame_id, interpreter_entry);
935  stack->push_front(instrumentation_frame);
936
937  if (!interpreter_entry) {
938    MethodEnterEvent(self, this_object, method, 0);
939  }
940}
941
942TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
943                                                            uint64_t gpr_result,
944                                                            uint64_t fpr_result) {
945  // Do the pop.
946  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
947  CHECK_GT(stack->size(), 0U);
948  InstrumentationStackFrame instrumentation_frame = stack->front();
949  stack->pop_front();
950
951  // Set return PC and check the sanity of the stack.
952  *return_pc = instrumentation_frame.return_pc_;
953  CheckStackDepth(self, instrumentation_frame, 0);
954
955  mirror::ArtMethod* method = instrumentation_frame.method_;
956  uint32_t length;
957  char return_shorty = method->GetShorty(&length)[0];
958  JValue return_value;
959  if (return_shorty == 'V') {
960    return_value.SetJ(0);
961  } else if (return_shorty == 'F' || return_shorty == 'D') {
962    return_value.SetJ(fpr_result);
963  } else {
964    return_value.SetJ(gpr_result);
965  }
966  // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
967  //       return_pc.
968  uint32_t dex_pc = DexFile::kDexNoIndex;
969  mirror::Object* this_object = instrumentation_frame.this_object_;
970  if (!instrumentation_frame.interpreter_entry_) {
971    MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
972  }
973
974  // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
975  // back to an upcall.
976  NthCallerVisitor visitor(self, 1, true);
977  visitor.WalkStack(true);
978  bool deoptimize = (visitor.caller != NULL) &&
979                    (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller));
980  if (deoptimize && kVerboseInstrumentation) {
981    LOG(INFO) << "Deoptimizing into " << PrettyMethod(visitor.caller);
982  }
983  if (deoptimize) {
984    if (kVerboseInstrumentation) {
985      LOG(INFO) << "Deoptimizing from " << PrettyMethod(method)
986                << " result is " << std::hex << return_value.GetJ();
987    }
988    self->SetDeoptimizationReturnValue(return_value);
989    return GetTwoWordSuccessValue(*return_pc,
990                                  reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
991  } else {
992    if (kVerboseInstrumentation) {
993      LOG(INFO) << "Returning from " << PrettyMethod(method)
994                << " to PC " << reinterpret_cast<void*>(*return_pc);
995    }
996    return GetTwoWordSuccessValue(0, *return_pc);
997  }
998}
999
1000void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
1001  // Do the pop.
1002  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1003  CHECK_GT(stack->size(), 0U);
1004  InstrumentationStackFrame instrumentation_frame = stack->front();
1005  // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
1006  stack->pop_front();
1007
1008  mirror::ArtMethod* method = instrumentation_frame.method_;
1009  if (is_deoptimization) {
1010    if (kVerboseInstrumentation) {
1011      LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method);
1012    }
1013  } else {
1014    if (kVerboseInstrumentation) {
1015      LOG(INFO) << "Popping for unwind " << PrettyMethod(method);
1016    }
1017
1018    // Notify listeners of method unwind.
1019    // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1020    //       return_pc.
1021    uint32_t dex_pc = DexFile::kDexNoIndex;
1022    MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
1023  }
1024}
1025
1026void Instrumentation::VisitRoots(RootCallback* callback, void* arg) {
1027  WriterMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
1028  if (IsDeoptimizedMethodsEmpty()) {
1029    return;
1030  }
1031  for (auto pair : deoptimized_methods_) {
1032    pair.second.VisitRoot(callback, arg, 0, kRootVMInternal);
1033  }
1034}
1035
1036std::string InstrumentationStackFrame::Dump() const {
1037  std::ostringstream os;
1038  os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":"
1039      << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
1040  return os.str();
1041}
1042
1043}  // namespace instrumentation
1044}  // namespace art
1045