instrumentation.cc revision 94f7b49578b6aaa80de8ffed230648d601393905
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
19#include <sys/uio.h>
20
21#include "arch/context.h"
22#include "atomic.h"
23#include "base/unix_file/fd_file.h"
24#include "class_linker.h"
25#include "debugger.h"
26#include "dex_file-inl.h"
27#include "entrypoints/quick/quick_alloc_entrypoints.h"
28#include "gc_root-inl.h"
29#include "interpreter/interpreter.h"
30#include "mirror/art_method-inl.h"
31#include "mirror/class-inl.h"
32#include "mirror/dex_cache.h"
33#include "mirror/object_array-inl.h"
34#include "mirror/object-inl.h"
35#include "nth_caller_visitor.h"
36#if !defined(ART_USE_PORTABLE_COMPILER)
37#include "entrypoints/quick/quick_entrypoints.h"
38#endif
39#include "os.h"
40#include "scoped_thread_state_change.h"
41#include "thread.h"
42#include "thread_list.h"
43
44namespace art {
45
46namespace instrumentation {
47
48const bool kVerboseInstrumentation = false;
49
50// Do we want to deoptimize for method entry and exit listeners or just try to intercept
51// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
52// application's performance.
53static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = false;
54
55static bool InstallStubsClassVisitor(mirror::Class* klass, void* arg)
56    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
57  Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
58  return instrumentation->InstallStubsForClass(klass);
59}
60
61Instrumentation::Instrumentation()
62    : instrumentation_stubs_installed_(false), entry_exit_stubs_installed_(false),
63      interpreter_stubs_installed_(false),
64      interpret_only_(false), forced_interpret_only_(false),
65      have_method_entry_listeners_(false), have_method_exit_listeners_(false),
66      have_method_unwind_listeners_(false), have_dex_pc_listeners_(false),
67      have_field_read_listeners_(false), have_field_write_listeners_(false),
68      have_exception_caught_listeners_(false),
69      deoptimized_methods_lock_("deoptimized methods lock"),
70      deoptimization_enabled_(false),
71      interpreter_handler_table_(kMainHandlerTable),
72      quick_alloc_entry_points_instrumentation_counter_(0) {
73}
74
75bool Instrumentation::InstallStubsForClass(mirror::Class* klass) {
76  for (size_t i = 0, e = klass->NumDirectMethods(); i < e; i++) {
77    InstallStubsForMethod(klass->GetDirectMethod(i));
78  }
79  for (size_t i = 0, e = klass->NumVirtualMethods(); i < e; i++) {
80    InstallStubsForMethod(klass->GetVirtualMethod(i));
81  }
82  return true;
83}
84
85static void UpdateEntrypoints(mirror::ArtMethod* method, const void* quick_code,
86                              const void* portable_code, bool have_portable_code)
87    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
88  method->SetEntryPointFromPortableCompiledCode(portable_code);
89  method->SetEntryPointFromQuickCompiledCode(quick_code);
90  bool portable_enabled = method->IsPortableCompiled();
91  if (have_portable_code && !portable_enabled) {
92    method->SetIsPortableCompiled();
93  } else if (portable_enabled) {
94    method->ClearIsPortableCompiled();
95  }
96  if (!method->IsResolutionMethod()) {
97    ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
98    if (quick_code == GetQuickToInterpreterBridge() ||
99        quick_code == class_linker->GetQuickToInterpreterBridgeTrampoline() ||
100        (quick_code == class_linker->GetQuickResolutionTrampoline() &&
101         Runtime::Current()->GetInstrumentation()->IsForcedInterpretOnly()
102         && !method->IsNative() && !method->IsProxyMethod())) {
103      if (kIsDebugBuild) {
104        if (quick_code == GetQuickToInterpreterBridge()) {
105          DCHECK(portable_code == GetPortableToInterpreterBridge());
106        } else if (quick_code == class_linker->GetQuickResolutionTrampoline()) {
107          DCHECK(portable_code == class_linker->GetPortableResolutionTrampoline());
108        }
109      }
110      DCHECK(!method->IsNative()) << PrettyMethod(method);
111      DCHECK(!method->IsProxyMethod()) << PrettyMethod(method);
112      method->SetEntryPointFromInterpreter(art::interpreter::artInterpreterToInterpreterBridge);
113    } else {
114      method->SetEntryPointFromInterpreter(art::artInterpreterToCompiledCodeBridge);
115    }
116  }
117}
118
119void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) {
120  if (method->IsAbstract() || method->IsProxyMethod()) {
121    // Do not change stubs for these methods.
122    return;
123  }
124  const void* new_portable_code;
125  const void* new_quick_code;
126  bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
127  ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
128  bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
129  bool have_portable_code = false;
130  if (uninstall) {
131    if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
132      new_portable_code = GetPortableToInterpreterBridge();
133      new_quick_code = GetQuickToInterpreterBridge();
134    } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
135      new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
136      new_quick_code = class_linker->GetQuickOatCodeFor(method);
137    } else {
138      new_portable_code = class_linker->GetPortableResolutionTrampoline();
139      new_quick_code = class_linker->GetQuickResolutionTrampoline();
140    }
141  } else {  // !uninstall
142    if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) &&
143        !method->IsNative()) {
144      new_portable_code = GetPortableToInterpreterBridge();
145      new_quick_code = GetQuickToInterpreterBridge();
146    } else {
147      // Do not overwrite resolution trampoline. When the trampoline initializes the method's
148      // class, all its static methods code will be set to the instrumentation entry point.
149      // For more details, see ClassLinker::FixupStaticTrampolines.
150      if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
151        if (entry_exit_stubs_installed_) {
152          new_portable_code = GetPortableToInterpreterBridge();
153          new_quick_code = GetQuickInstrumentationEntryPoint();
154        } else {
155          new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
156          new_quick_code = class_linker->GetQuickOatCodeFor(method);
157          DCHECK(new_quick_code != class_linker->GetQuickToInterpreterBridgeTrampoline());
158        }
159      } else {
160        new_portable_code = class_linker->GetPortableResolutionTrampoline();
161        new_quick_code = class_linker->GetQuickResolutionTrampoline();
162      }
163    }
164  }
165  UpdateEntrypoints(method, new_quick_code, new_portable_code, have_portable_code);
166}
167
168// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
169// deoptimization of quick frames to interpreter frames.
170// Since we may already have done this previously, we need to push new instrumentation frame before
171// existing instrumentation frames.
172static void InstrumentationInstallStack(Thread* thread, void* arg)
173    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
174  struct InstallStackVisitor : public StackVisitor {
175    InstallStackVisitor(Thread* thread, Context* context, uintptr_t instrumentation_exit_pc)
176        : StackVisitor(thread, context),  instrumentation_stack_(thread->GetInstrumentationStack()),
177          instrumentation_exit_pc_(instrumentation_exit_pc),
178          reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
179          last_return_pc_(0) {
180    }
181
182    virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
183      mirror::ArtMethod* m = GetMethod();
184      if (m == NULL) {
185        if (kVerboseInstrumentation) {
186          LOG(INFO) << "  Skipping upcall. Frame " << GetFrameId();
187        }
188        last_return_pc_ = 0;
189        return true;  // Ignore upcalls.
190      }
191      if (GetCurrentQuickFrame() == NULL) {
192        bool interpreter_frame = !m->IsPortableCompiled();
193        InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, 0, GetFrameId(),
194                                                        interpreter_frame);
195        if (kVerboseInstrumentation) {
196          LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
197        }
198        shadow_stack_.push_back(instrumentation_frame);
199        return true;  // Continue.
200      }
201      uintptr_t return_pc = GetReturnPc();
202      if (m->IsRuntimeMethod()) {
203        if (return_pc == instrumentation_exit_pc_) {
204          if (kVerboseInstrumentation) {
205            LOG(INFO) << "  Handling quick to interpreter transition. Frame " << GetFrameId();
206          }
207          CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
208          const InstrumentationStackFrame& frame = instrumentation_stack_->at(instrumentation_stack_depth_);
209          CHECK(frame.interpreter_entry_);
210          // This is an interpreter frame so method enter event must have been reported. However we
211          // need to push a DEX pc into the dex_pcs_ list to match size of instrumentation stack.
212          // Since we won't report method entry here, we can safely push any DEX pc.
213          dex_pcs_.push_back(0);
214          last_return_pc_ = frame.return_pc_;
215          ++instrumentation_stack_depth_;
216          return true;
217        } else {
218          if (kVerboseInstrumentation) {
219            LOG(INFO) << "  Skipping runtime method. Frame " << GetFrameId();
220          }
221          last_return_pc_ = GetReturnPc();
222          return true;  // Ignore unresolved methods since they will be instrumented after resolution.
223        }
224      }
225      if (kVerboseInstrumentation) {
226        LOG(INFO) << "  Installing exit stub in " << DescribeLocation();
227      }
228      if (return_pc == instrumentation_exit_pc_) {
229        // We've reached a frame which has already been installed with instrumentation exit stub.
230        // We should have already installed instrumentation on previous frames.
231        reached_existing_instrumentation_frames_ = true;
232
233        CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
234        const InstrumentationStackFrame& frame = instrumentation_stack_->at(instrumentation_stack_depth_);
235        CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m)
236                                   << ", Found " << PrettyMethod(frame.method_);
237        return_pc = frame.return_pc_;
238        if (kVerboseInstrumentation) {
239          LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
240        }
241      } else {
242        CHECK_NE(return_pc, 0U);
243        CHECK(!reached_existing_instrumentation_frames_);
244        InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
245                                                        false);
246        if (kVerboseInstrumentation) {
247          LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
248        }
249
250        // Insert frame at the right position so we do not corrupt the instrumentation stack.
251        // Instrumentation stack frames are in descending frame id order.
252        auto it = instrumentation_stack_->begin();
253        for (auto end = instrumentation_stack_->end(); it != end; ++it) {
254          const InstrumentationStackFrame& current = *it;
255          if (instrumentation_frame.frame_id_ >= current.frame_id_) {
256            break;
257          }
258        }
259        instrumentation_stack_->insert(it, instrumentation_frame);
260        SetReturnPc(instrumentation_exit_pc_);
261      }
262      dex_pcs_.push_back(m->ToDexPc(last_return_pc_));
263      last_return_pc_ = return_pc;
264      ++instrumentation_stack_depth_;
265      return true;  // Continue.
266    }
267    std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
268    std::vector<InstrumentationStackFrame> shadow_stack_;
269    std::vector<uint32_t> dex_pcs_;
270    const uintptr_t instrumentation_exit_pc_;
271    bool reached_existing_instrumentation_frames_;
272    size_t instrumentation_stack_depth_;
273    uintptr_t last_return_pc_;
274  };
275  if (kVerboseInstrumentation) {
276    std::string thread_name;
277    thread->GetThreadName(thread_name);
278    LOG(INFO) << "Installing exit stubs in " << thread_name;
279  }
280
281  Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
282  std::unique_ptr<Context> context(Context::Create());
283  uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
284  InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
285  visitor.WalkStack(true);
286  CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
287
288  if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
289    // Create method enter events for all methods currently on the thread's stack. We only do this
290    // if no debugger is attached to prevent from posting events twice.
291    auto ssi = visitor.shadow_stack_.rbegin();
292    for (auto isi = thread->GetInstrumentationStack()->rbegin(),
293        end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) {
294      while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < (*isi).frame_id_) {
295        instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0);
296        ++ssi;
297      }
298      uint32_t dex_pc = visitor.dex_pcs_.back();
299      visitor.dex_pcs_.pop_back();
300      if (!isi->interpreter_entry_) {
301        instrumentation->MethodEnterEvent(thread, (*isi).this_object_, (*isi).method_, dex_pc);
302      }
303    }
304  }
305  thread->VerifyStack();
306}
307
308// Removes the instrumentation exit pc as the return PC for every quick frame.
309static void InstrumentationRestoreStack(Thread* thread, void* arg)
310    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
311  struct RestoreStackVisitor : public StackVisitor {
312    RestoreStackVisitor(Thread* thread, uintptr_t instrumentation_exit_pc,
313                        Instrumentation* instrumentation)
314        : StackVisitor(thread, NULL), thread_(thread),
315          instrumentation_exit_pc_(instrumentation_exit_pc),
316          instrumentation_(instrumentation),
317          instrumentation_stack_(thread->GetInstrumentationStack()),
318          frames_removed_(0) {}
319
320    virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
321      if (instrumentation_stack_->size() == 0) {
322        return false;  // Stop.
323      }
324      mirror::ArtMethod* m = GetMethod();
325      if (GetCurrentQuickFrame() == NULL) {
326        if (kVerboseInstrumentation) {
327          LOG(INFO) << "  Ignoring a shadow frame. Frame " << GetFrameId() << " Method=" << PrettyMethod(m);
328        }
329        return true;  // Ignore shadow frames.
330      }
331      if (m == NULL) {
332        if (kVerboseInstrumentation) {
333          LOG(INFO) << "  Skipping upcall. Frame " << GetFrameId();
334        }
335        return true;  // Ignore upcalls.
336      }
337      bool removed_stub = false;
338      // TODO: make this search more efficient?
339      const size_t frameId = GetFrameId();
340      for (const InstrumentationStackFrame& instrumentation_frame : *instrumentation_stack_) {
341        if (instrumentation_frame.frame_id_ == frameId) {
342          if (kVerboseInstrumentation) {
343            LOG(INFO) << "  Removing exit stub in " << DescribeLocation();
344          }
345          if (instrumentation_frame.interpreter_entry_) {
346            CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
347          } else {
348            CHECK(m == instrumentation_frame.method_) << PrettyMethod(m);
349          }
350          SetReturnPc(instrumentation_frame.return_pc_);
351          if (instrumentation_->ShouldNotifyMethodEnterExitEvents()) {
352            // Create the method exit events. As the methods didn't really exit the result is 0.
353            // We only do this if no debugger is attached to prevent from posting events twice.
354            instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
355                                              GetDexPc(), JValue());
356          }
357          frames_removed_++;
358          removed_stub = true;
359          break;
360        }
361      }
362      if (!removed_stub) {
363        if (kVerboseInstrumentation) {
364          LOG(INFO) << "  No exit stub in " << DescribeLocation();
365        }
366      }
367      return true;  // Continue.
368    }
369    Thread* const thread_;
370    const uintptr_t instrumentation_exit_pc_;
371    Instrumentation* const instrumentation_;
372    std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
373    size_t frames_removed_;
374  };
375  if (kVerboseInstrumentation) {
376    std::string thread_name;
377    thread->GetThreadName(thread_name);
378    LOG(INFO) << "Removing exit stubs in " << thread_name;
379  }
380  std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
381  if (stack->size() > 0) {
382    Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
383    uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
384    RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
385    visitor.WalkStack(true);
386    CHECK_EQ(visitor.frames_removed_, stack->size());
387    while (stack->size() > 0) {
388      stack->pop_front();
389    }
390  }
391}
392
393void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
394  Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
395  if ((events & kMethodEntered) != 0) {
396    method_entry_listeners_.push_back(listener);
397    have_method_entry_listeners_ = true;
398  }
399  if ((events & kMethodExited) != 0) {
400    method_exit_listeners_.push_back(listener);
401    have_method_exit_listeners_ = true;
402  }
403  if ((events & kMethodUnwind) != 0) {
404    method_unwind_listeners_.push_back(listener);
405    have_method_unwind_listeners_ = true;
406  }
407  if ((events & kDexPcMoved) != 0) {
408    dex_pc_listeners_.push_back(listener);
409    have_dex_pc_listeners_ = true;
410  }
411  if ((events & kFieldRead) != 0) {
412    field_read_listeners_.push_back(listener);
413    have_field_read_listeners_ = true;
414  }
415  if ((events & kFieldWritten) != 0) {
416    field_write_listeners_.push_back(listener);
417    have_field_write_listeners_ = true;
418  }
419  if ((events & kExceptionCaught) != 0) {
420    exception_caught_listeners_.push_back(listener);
421    have_exception_caught_listeners_ = true;
422  }
423  UpdateInterpreterHandlerTable();
424}
425
426void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
427  Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
428
429  if ((events & kMethodEntered) != 0) {
430    bool contains = std::find(method_entry_listeners_.begin(), method_entry_listeners_.end(),
431                              listener) != method_entry_listeners_.end();
432    if (contains) {
433      method_entry_listeners_.remove(listener);
434    }
435    have_method_entry_listeners_ = method_entry_listeners_.size() > 0;
436  }
437  if ((events & kMethodExited) != 0) {
438    bool contains = std::find(method_exit_listeners_.begin(), method_exit_listeners_.end(),
439                              listener) != method_exit_listeners_.end();
440    if (contains) {
441      method_exit_listeners_.remove(listener);
442    }
443    have_method_exit_listeners_ = method_exit_listeners_.size() > 0;
444  }
445  if ((events & kMethodUnwind) != 0) {
446    method_unwind_listeners_.remove(listener);
447  }
448  if ((events & kDexPcMoved) != 0) {
449    bool contains = std::find(dex_pc_listeners_.begin(), dex_pc_listeners_.end(),
450                              listener) != dex_pc_listeners_.end();
451    if (contains) {
452      dex_pc_listeners_.remove(listener);
453    }
454    have_dex_pc_listeners_ = dex_pc_listeners_.size() > 0;
455  }
456  if ((events & kFieldRead) != 0) {
457    bool contains = std::find(field_read_listeners_.begin(), field_read_listeners_.end(),
458                              listener) != field_read_listeners_.end();
459    if (contains) {
460      field_read_listeners_.remove(listener);
461    }
462    have_field_read_listeners_ = field_read_listeners_.size() > 0;
463  }
464  if ((events & kFieldWritten) != 0) {
465    bool contains = std::find(field_write_listeners_.begin(), field_write_listeners_.end(),
466                              listener) != field_write_listeners_.end();
467    if (contains) {
468      field_write_listeners_.remove(listener);
469    }
470    have_field_write_listeners_ = field_write_listeners_.size() > 0;
471  }
472  if ((events & kExceptionCaught) != 0) {
473    exception_caught_listeners_.remove(listener);
474    have_exception_caught_listeners_ = exception_caught_listeners_.size() > 0;
475  }
476  UpdateInterpreterHandlerTable();
477}
478
479void Instrumentation::ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) {
480  interpret_only_ = require_interpreter || forced_interpret_only_;
481  // Compute what level of instrumentation is required and compare to current.
482  int desired_level, current_level;
483  if (require_interpreter) {
484    desired_level = 2;
485  } else if (require_entry_exit_stubs) {
486    desired_level = 1;
487  } else {
488    desired_level = 0;
489  }
490  if (interpreter_stubs_installed_) {
491    current_level = 2;
492  } else if (entry_exit_stubs_installed_) {
493    current_level = 1;
494  } else {
495    current_level = 0;
496  }
497  if (desired_level == current_level) {
498    // We're already set.
499    return;
500  }
501  Thread* const self = Thread::Current();
502  Runtime* runtime = Runtime::Current();
503  Locks::thread_list_lock_->AssertNotHeld(self);
504  if (desired_level > 0) {
505    if (require_interpreter) {
506      interpreter_stubs_installed_ = true;
507    } else {
508      CHECK(require_entry_exit_stubs);
509      entry_exit_stubs_installed_ = true;
510    }
511    runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
512    instrumentation_stubs_installed_ = true;
513    MutexLock mu(self, *Locks::thread_list_lock_);
514    runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
515  } else {
516    interpreter_stubs_installed_ = false;
517    entry_exit_stubs_installed_ = false;
518    runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
519    // Restore stack only if there is no method currently deoptimized.
520    bool empty;
521    {
522      ReaderMutexLock mu(self, deoptimized_methods_lock_);
523      empty = IsDeoptimizedMethodsEmpty();  // Avoid lock violation.
524    }
525    if (empty) {
526      instrumentation_stubs_installed_ = false;
527      MutexLock mu(self, *Locks::thread_list_lock_);
528      Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
529    }
530  }
531}
532
533static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg) {
534  thread->ResetQuickAllocEntryPointsForThread();
535}
536
537void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
538  Runtime* runtime = Runtime::Current();
539  ThreadList* tl = runtime->GetThreadList();
540  if (runtime->IsStarted()) {
541    tl->SuspendAll();
542  }
543  {
544    MutexLock mu(Thread::Current(), *Locks::runtime_shutdown_lock_);
545    SetQuickAllocEntryPointsInstrumented(instrumented);
546    ResetQuickAllocEntryPoints();
547  }
548  if (runtime->IsStarted()) {
549    tl->ResumeAll();
550  }
551}
552
553void Instrumentation::InstrumentQuickAllocEntryPoints() {
554  // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
555  //       should be guarded by a lock.
556  DCHECK_GE(quick_alloc_entry_points_instrumentation_counter_.LoadSequentiallyConsistent(), 0);
557  const bool enable_instrumentation =
558      quick_alloc_entry_points_instrumentation_counter_.FetchAndAddSequentiallyConsistent(1) == 0;
559  if (enable_instrumentation) {
560    SetEntrypointsInstrumented(true);
561  }
562}
563
564void Instrumentation::UninstrumentQuickAllocEntryPoints() {
565  // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
566  //       should be guarded by a lock.
567  DCHECK_GT(quick_alloc_entry_points_instrumentation_counter_.LoadSequentiallyConsistent(), 0);
568  const bool disable_instrumentation =
569      quick_alloc_entry_points_instrumentation_counter_.FetchAndSubSequentiallyConsistent(1) == 1;
570  if (disable_instrumentation) {
571    SetEntrypointsInstrumented(false);
572  }
573}
574
575void Instrumentation::ResetQuickAllocEntryPoints() {
576  Runtime* runtime = Runtime::Current();
577  if (runtime->IsStarted()) {
578    MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
579    runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, NULL);
580  }
581}
582
583void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code,
584                                        const void* portable_code, bool have_portable_code) {
585  const void* new_portable_code;
586  const void* new_quick_code;
587  bool new_have_portable_code;
588  if (LIKELY(!instrumentation_stubs_installed_)) {
589    new_portable_code = portable_code;
590    new_quick_code = quick_code;
591    new_have_portable_code = have_portable_code;
592  } else {
593    if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
594      new_portable_code = GetPortableToInterpreterBridge();
595      new_quick_code = GetQuickToInterpreterBridge();
596      new_have_portable_code = false;
597    } else {
598      ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
599      if (quick_code == class_linker->GetQuickResolutionTrampoline() ||
600          quick_code == class_linker->GetQuickToInterpreterBridgeTrampoline() ||
601          quick_code == GetQuickToInterpreterBridge()) {
602        DCHECK((portable_code == class_linker->GetPortableResolutionTrampoline()) ||
603               (portable_code == GetPortableToInterpreterBridge()));
604        new_portable_code = portable_code;
605        new_quick_code = quick_code;
606        new_have_portable_code = have_portable_code;
607      } else if (entry_exit_stubs_installed_) {
608        new_quick_code = GetQuickInstrumentationEntryPoint();
609        new_portable_code = GetPortableToInterpreterBridge();
610        new_have_portable_code = false;
611      } else {
612        new_portable_code = portable_code;
613        new_quick_code = quick_code;
614        new_have_portable_code = have_portable_code;
615      }
616    }
617  }
618  UpdateEntrypoints(method, new_quick_code, new_portable_code, new_have_portable_code);
619}
620
621bool Instrumentation::AddDeoptimizedMethod(mirror::ArtMethod* method) {
622  // Note that the insert() below isn't read barrier-aware. So, this
623  // FindDeoptimizedMethod() call is necessary or else we would end up
624  // storing the same method twice in the map (the from-space and the
625  // to-space ones).
626  if (FindDeoptimizedMethod(method)) {
627    // Already in the map. Return.
628    return false;
629  }
630  // Not found. Add it.
631  int32_t hash_code = method->IdentityHashCode();
632  deoptimized_methods_.insert(std::make_pair(hash_code, GcRoot<mirror::ArtMethod>(method)));
633  return true;
634}
635
636bool Instrumentation::FindDeoptimizedMethod(mirror::ArtMethod* method) {
637  int32_t hash_code = method->IdentityHashCode();
638  auto range = deoptimized_methods_.equal_range(hash_code);
639  for (auto it = range.first; it != range.second; ++it) {
640    mirror::ArtMethod* m = it->second.Read();
641    if (m == method) {
642      // Found.
643      return true;
644    }
645  }
646  // Not found.
647  return false;
648}
649
650mirror::ArtMethod* Instrumentation::BeginDeoptimizedMethod() {
651  auto it = deoptimized_methods_.begin();
652  if (it == deoptimized_methods_.end()) {
653    // Empty.
654    return nullptr;
655  }
656  return it->second.Read();
657}
658
659bool Instrumentation::RemoveDeoptimizedMethod(mirror::ArtMethod* method) {
660  int32_t hash_code = method->IdentityHashCode();
661  auto range = deoptimized_methods_.equal_range(hash_code);
662  for (auto it = range.first; it != range.second; ++it) {
663    mirror::ArtMethod* m = it->second.Read();
664    if (m == method) {
665      // Found. Erase and return.
666      deoptimized_methods_.erase(it);
667      return true;
668    }
669  }
670  // Not found.
671  return false;
672}
673
674bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
675  return deoptimized_methods_.empty();
676}
677
678void Instrumentation::Deoptimize(mirror::ArtMethod* method) {
679  CHECK(!method->IsNative());
680  CHECK(!method->IsProxyMethod());
681  CHECK(!method->IsAbstract());
682
683  Thread* self = Thread::Current();
684  {
685    WriterMutexLock mu(self, deoptimized_methods_lock_);
686    bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
687    CHECK(has_not_been_deoptimized) << "Method " << PrettyMethod(method) << " is already deoptimized";
688  }
689  if (!interpreter_stubs_installed_) {
690    UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint(), GetPortableToInterpreterBridge(),
691                      false);
692
693    // Install instrumentation exit stub and instrumentation frames. We may already have installed
694    // these previously so it will only cover the newly created frames.
695    instrumentation_stubs_installed_ = true;
696    MutexLock mu(self, *Locks::thread_list_lock_);
697    Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
698  }
699}
700
701void Instrumentation::Undeoptimize(mirror::ArtMethod* method) {
702  CHECK(!method->IsNative());
703  CHECK(!method->IsProxyMethod());
704  CHECK(!method->IsAbstract());
705
706  Thread* self = Thread::Current();
707  bool empty;
708  {
709    WriterMutexLock mu(self, deoptimized_methods_lock_);
710    bool found_and_erased = RemoveDeoptimizedMethod(method);
711    CHECK(found_and_erased) << "Method " << PrettyMethod(method)
712        << " is not deoptimized";
713    empty = IsDeoptimizedMethodsEmpty();
714  }
715
716  // Restore code and possibly stack only if we did not deoptimize everything.
717  if (!interpreter_stubs_installed_) {
718    // Restore its code or resolution trampoline.
719    ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
720    if (method->IsStatic() && !method->IsConstructor() &&
721        !method->GetDeclaringClass()->IsInitialized()) {
722      // TODO: we're updating to entrypoints in the image here, we can avoid the trampoline.
723      UpdateEntrypoints(method, class_linker->GetQuickResolutionTrampoline(),
724                        class_linker->GetPortableResolutionTrampoline(), false);
725    } else {
726      bool have_portable_code = false;
727      const void* quick_code = class_linker->GetQuickOatCodeFor(method);
728      const void* portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
729      UpdateEntrypoints(method, quick_code, portable_code, have_portable_code);
730    }
731
732    // If there is no deoptimized method left, we can restore the stack of each thread.
733    if (empty) {
734      MutexLock mu(self, *Locks::thread_list_lock_);
735      Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
736      instrumentation_stubs_installed_ = false;
737    }
738  }
739}
740
741bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) {
742  DCHECK(method != nullptr);
743  ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
744  return FindDeoptimizedMethod(method);
745}
746
747void Instrumentation::EnableDeoptimization() {
748  ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
749  CHECK(IsDeoptimizedMethodsEmpty());
750  CHECK_EQ(deoptimization_enabled_, false);
751  deoptimization_enabled_ = true;
752}
753
754void Instrumentation::DisableDeoptimization() {
755  CHECK_EQ(deoptimization_enabled_, true);
756  // If we deoptimized everything, undo it.
757  if (interpreter_stubs_installed_) {
758    UndeoptimizeEverything();
759  }
760  // Undeoptimized selected methods.
761  while (true) {
762    mirror::ArtMethod* method;
763    {
764      ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
765      if (IsDeoptimizedMethodsEmpty()) {
766        break;
767      }
768      method = BeginDeoptimizedMethod();
769      CHECK(method != nullptr);
770    }
771    Undeoptimize(method);
772  }
773  deoptimization_enabled_ = false;
774}
775
776// Indicates if instrumentation should notify method enter/exit events to the listeners.
777bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
778  return !deoptimization_enabled_ && !interpreter_stubs_installed_;
779}
780
781void Instrumentation::DeoptimizeEverything() {
782  CHECK(!interpreter_stubs_installed_);
783  ConfigureStubs(false, true);
784}
785
786void Instrumentation::UndeoptimizeEverything() {
787  CHECK(interpreter_stubs_installed_);
788  ConfigureStubs(false, false);
789}
790
791void Instrumentation::EnableMethodTracing() {
792  bool require_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners;
793  ConfigureStubs(!require_interpreter, require_interpreter);
794}
795
796void Instrumentation::DisableMethodTracing() {
797  ConfigureStubs(false, false);
798}
799
800const void* Instrumentation::GetQuickCodeFor(mirror::ArtMethod* method) const {
801  Runtime* runtime = Runtime::Current();
802  if (LIKELY(!instrumentation_stubs_installed_)) {
803    const void* code = method->GetEntryPointFromQuickCompiledCode();
804    DCHECK(code != nullptr);
805    ClassLinker* class_linker = runtime->GetClassLinker();
806    if (LIKELY(code != class_linker->GetQuickResolutionTrampoline()) &&
807        LIKELY(code != class_linker->GetQuickToInterpreterBridgeTrampoline()) &&
808        LIKELY(code != GetQuickToInterpreterBridge())) {
809      return code;
810    }
811  }
812  return runtime->GetClassLinker()->GetQuickOatCodeFor(method);
813}
814
815void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
816                                           mirror::ArtMethod* method,
817                                           uint32_t dex_pc) const {
818  auto it = method_entry_listeners_.begin();
819  bool is_end = (it == method_entry_listeners_.end());
820  // Implemented this way to prevent problems caused by modification of the list while iterating.
821  while (!is_end) {
822    InstrumentationListener* cur = *it;
823    ++it;
824    is_end = (it == method_entry_listeners_.end());
825    cur->MethodEntered(thread, this_object, method, dex_pc);
826  }
827}
828
829void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
830                                          mirror::ArtMethod* method,
831                                          uint32_t dex_pc, const JValue& return_value) const {
832  auto it = method_exit_listeners_.begin();
833  bool is_end = (it == method_exit_listeners_.end());
834  // Implemented this way to prevent problems caused by modification of the list while iterating.
835  while (!is_end) {
836    InstrumentationListener* cur = *it;
837    ++it;
838    is_end = (it == method_exit_listeners_.end());
839    cur->MethodExited(thread, this_object, method, dex_pc, return_value);
840  }
841}
842
843void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
844                                        mirror::ArtMethod* method,
845                                        uint32_t dex_pc) const {
846  if (have_method_unwind_listeners_) {
847    for (InstrumentationListener* listener : method_unwind_listeners_) {
848      listener->MethodUnwind(thread, this_object, method, dex_pc);
849    }
850  }
851}
852
853void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
854                                          mirror::ArtMethod* method,
855                                          uint32_t dex_pc) const {
856  // TODO: STL copy-on-write collection? The copy below is due to the debug listener having an
857  // action where it can remove itself as a listener and break the iterator. The copy only works
858  // around the problem and in general we may have to move to something like reference counting to
859  // ensure listeners are deleted correctly.
860  std::list<InstrumentationListener*> copy(dex_pc_listeners_);
861  for (InstrumentationListener* listener : copy) {
862    listener->DexPcMoved(thread, this_object, method, dex_pc);
863  }
864}
865
866void Instrumentation::FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
867                                         mirror::ArtMethod* method, uint32_t dex_pc,
868                                         mirror::ArtField* field) const {
869  // TODO: same comment than DexPcMovedEventImpl.
870  std::list<InstrumentationListener*> copy(field_read_listeners_);
871  for (InstrumentationListener* listener : copy) {
872    listener->FieldRead(thread, this_object, method, dex_pc, field);
873  }
874}
875
876void Instrumentation::FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
877                                         mirror::ArtMethod* method, uint32_t dex_pc,
878                                         mirror::ArtField* field, const JValue& field_value) const {
879  // TODO: same comment than DexPcMovedEventImpl.
880  std::list<InstrumentationListener*> copy(field_write_listeners_);
881  for (InstrumentationListener* listener : copy) {
882    listener->FieldWritten(thread, this_object, method, dex_pc, field, field_value);
883  }
884}
885
886void Instrumentation::ExceptionCaughtEvent(Thread* thread, const ThrowLocation& throw_location,
887                                           mirror::ArtMethod* catch_method,
888                                           uint32_t catch_dex_pc,
889                                           mirror::Throwable* exception_object) const {
890  if (HasExceptionCaughtListeners()) {
891    DCHECK_EQ(thread->GetException(nullptr), exception_object);
892    bool is_exception_reported = thread->IsExceptionReportedToInstrumentation();
893    thread->ClearException();
894    // TODO: The copy below is due to the debug listener having an action where it can remove
895    // itself as a listener and break the iterator. The copy only works around the problem.
896    std::list<InstrumentationListener*> copy(exception_caught_listeners_);
897    for (InstrumentationListener* listener : copy) {
898      listener->ExceptionCaught(thread, throw_location, catch_method, catch_dex_pc, exception_object);
899    }
900    thread->SetException(throw_location, exception_object);
901    thread->SetExceptionReportedToInstrumentation(is_exception_reported);
902  }
903}
904
905static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
906                            int delta)
907    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
908  size_t frame_id = StackVisitor::ComputeNumFrames(self) + delta;
909  if (frame_id != instrumentation_frame.frame_id_) {
910    LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
911        << instrumentation_frame.frame_id_;
912    StackVisitor::DescribeStack(self);
913    CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
914  }
915}
916
917void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
918                                                    mirror::ArtMethod* method,
919                                                    uintptr_t lr, bool interpreter_entry) {
920  // We have a callee-save frame meaning this value is guaranteed to never be 0.
921  size_t frame_id = StackVisitor::ComputeNumFrames(self);
922  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
923  if (kVerboseInstrumentation) {
924    LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr);
925  }
926  instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
927                                                                   frame_id, interpreter_entry);
928  stack->push_front(instrumentation_frame);
929
930  if (!interpreter_entry) {
931    MethodEnterEvent(self, this_object, method, 0);
932  }
933}
934
935TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
936                                                            uint64_t gpr_result,
937                                                            uint64_t fpr_result) {
938  // Do the pop.
939  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
940  CHECK_GT(stack->size(), 0U);
941  InstrumentationStackFrame instrumentation_frame = stack->front();
942  stack->pop_front();
943
944  // Set return PC and check the sanity of the stack.
945  *return_pc = instrumentation_frame.return_pc_;
946  CheckStackDepth(self, instrumentation_frame, 0);
947
948  mirror::ArtMethod* method = instrumentation_frame.method_;
949  uint32_t length;
950  char return_shorty = method->GetShorty(&length)[0];
951  JValue return_value;
952  if (return_shorty == 'V') {
953    return_value.SetJ(0);
954  } else if (return_shorty == 'F' || return_shorty == 'D') {
955    return_value.SetJ(fpr_result);
956  } else {
957    return_value.SetJ(gpr_result);
958  }
959  // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
960  //       return_pc.
961  uint32_t dex_pc = DexFile::kDexNoIndex;
962  mirror::Object* this_object = instrumentation_frame.this_object_;
963  if (!instrumentation_frame.interpreter_entry_) {
964    MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
965  }
966
967  // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
968  // back to an upcall.
969  NthCallerVisitor visitor(self, 1, true);
970  visitor.WalkStack(true);
971  bool deoptimize = (visitor.caller != NULL) &&
972                    (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller));
973  if (deoptimize && kVerboseInstrumentation) {
974    LOG(INFO) << "Deoptimizing into " << PrettyMethod(visitor.caller);
975  }
976  if (deoptimize) {
977    if (kVerboseInstrumentation) {
978      LOG(INFO) << "Deoptimizing from " << PrettyMethod(method)
979                << " result is " << std::hex << return_value.GetJ();
980    }
981    self->SetDeoptimizationReturnValue(return_value);
982    return GetTwoWordSuccessValue(*return_pc,
983                                  reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
984  } else {
985    if (kVerboseInstrumentation) {
986      LOG(INFO) << "Returning from " << PrettyMethod(method)
987                << " to PC " << reinterpret_cast<void*>(*return_pc);
988    }
989    return GetTwoWordSuccessValue(0, *return_pc);
990  }
991}
992
993void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
994  // Do the pop.
995  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
996  CHECK_GT(stack->size(), 0U);
997  InstrumentationStackFrame instrumentation_frame = stack->front();
998  // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
999  stack->pop_front();
1000
1001  mirror::ArtMethod* method = instrumentation_frame.method_;
1002  if (is_deoptimization) {
1003    if (kVerboseInstrumentation) {
1004      LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method);
1005    }
1006  } else {
1007    if (kVerboseInstrumentation) {
1008      LOG(INFO) << "Popping for unwind " << PrettyMethod(method);
1009    }
1010
1011    // Notify listeners of method unwind.
1012    // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1013    //       return_pc.
1014    uint32_t dex_pc = DexFile::kDexNoIndex;
1015    MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
1016  }
1017}
1018
1019void Instrumentation::VisitRoots(RootCallback* callback, void* arg) {
1020  WriterMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
1021  if (IsDeoptimizedMethodsEmpty()) {
1022    return;
1023  }
1024  for (auto pair : deoptimized_methods_) {
1025    pair.second.VisitRoot(callback, arg, 0, kRootVMInternal);
1026  }
1027}
1028
1029std::string InstrumentationStackFrame::Dump() const {
1030  std::ostringstream os;
1031  os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":"
1032      << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
1033  return os.str();
1034}
1035
1036}  // namespace instrumentation
1037}  // namespace art
1038