instrumentation.cc revision 661974a5561e5ccdfbac8cb5d8df8b7e6f3483b8
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
19#include <sys/uio.h>
20
21#include "atomic.h"
22#include "base/unix_file/fd_file.h"
23#include "class_linker.h"
24#include "debugger.h"
25#include "dex_file-inl.h"
26#include "entrypoints/quick/quick_alloc_entrypoints.h"
27#include "interpreter/interpreter.h"
28#include "mirror/art_method-inl.h"
29#include "mirror/class-inl.h"
30#include "mirror/dex_cache.h"
31#include "mirror/object_array-inl.h"
32#include "mirror/object-inl.h"
33#include "nth_caller_visitor.h"
34#if !defined(ART_USE_PORTABLE_COMPILER)
35#include "entrypoints/quick/quick_entrypoints.h"
36#endif
37#include "object_utils.h"
38#include "os.h"
39#include "scoped_thread_state_change.h"
40#include "thread.h"
41#include "thread_list.h"
42
43namespace art {
44
45namespace instrumentation {
46
47const bool kVerboseInstrumentation = false;
48
49// Do we want to deoptimize for method entry and exit listeners or just try to intercept
50// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
51// application's performance.
52static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = false;
53
54static bool InstallStubsClassVisitor(mirror::Class* klass, void* arg)
55    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
56  Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
57  return instrumentation->InstallStubsForClass(klass);
58}
59
60bool Instrumentation::InstallStubsForClass(mirror::Class* klass) {
61  for (size_t i = 0, e = klass->NumDirectMethods(); i < e; i++) {
62    InstallStubsForMethod(klass->GetDirectMethod(i));
63  }
64  for (size_t i = 0, e = klass->NumVirtualMethods(); i < e; i++) {
65    InstallStubsForMethod(klass->GetVirtualMethod(i));
66  }
67  return true;
68}
69
70static void UpdateEntrypoints(mirror::ArtMethod* method, const void* quick_code,
71                              const void* portable_code, bool have_portable_code)
72    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
73  method->SetEntryPointFromPortableCompiledCode(portable_code);
74  method->SetEntryPointFromQuickCompiledCode(quick_code);
75  bool portable_enabled = method->IsPortableCompiled();
76  if (have_portable_code && !portable_enabled) {
77    method->SetIsPortableCompiled();
78  } else if (portable_enabled) {
79    method->ClearIsPortableCompiled();
80  }
81  if (!method->IsResolutionMethod()) {
82    if (quick_code == GetQuickToInterpreterBridge() ||
83        (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker()) &&
84         Runtime::Current()->GetInstrumentation()->IsForcedInterpretOnly()
85         && !method->IsNative() && !method->IsProxyMethod())) {
86      if (kIsDebugBuild) {
87        if (quick_code == GetQuickToInterpreterBridge()) {
88          DCHECK(portable_code == GetPortableToInterpreterBridge());
89        } else if (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker())) {
90          DCHECK(portable_code == GetPortableResolutionTrampoline(Runtime::Current()->GetClassLinker()));
91        }
92      }
93      DCHECK(!method->IsNative()) << PrettyMethod(method);
94      DCHECK(!method->IsProxyMethod()) << PrettyMethod(method);
95      method->SetEntryPointFromInterpreter(art::interpreter::artInterpreterToInterpreterBridge);
96    } else {
97      method->SetEntryPointFromInterpreter(art::artInterpreterToCompiledCodeBridge);
98    }
99  }
100}
101
102void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) {
103  if (method->IsAbstract() || method->IsProxyMethod()) {
104    // Do not change stubs for these methods.
105    return;
106  }
107  const void* new_portable_code;
108  const void* new_quick_code;
109  bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
110  ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
111  bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
112  bool have_portable_code = false;
113  if (uninstall) {
114    if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
115      new_portable_code = GetPortableToInterpreterBridge();
116      new_quick_code = GetQuickToInterpreterBridge();
117    } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
118      new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
119      new_quick_code = class_linker->GetQuickOatCodeFor(method);
120    } else {
121      new_portable_code = GetPortableResolutionTrampoline(class_linker);
122      new_quick_code = GetQuickResolutionTrampoline(class_linker);
123    }
124  } else {  // !uninstall
125    if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
126      new_portable_code = GetPortableToInterpreterBridge();
127      new_quick_code = GetQuickToInterpreterBridge();
128    } else {
129      // Do not overwrite resolution trampoline. When the trampoline initializes the method's
130      // class, all its static methods code will be set to the instrumentation entry point.
131      // For more details, see ClassLinker::FixupStaticTrampolines.
132      if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
133        // Do not overwrite interpreter to prevent from posting method entry/exit events twice.
134        new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
135        new_quick_code = class_linker->GetQuickOatCodeFor(method);
136        if (entry_exit_stubs_installed_ && new_quick_code != GetQuickToInterpreterBridge()) {
137          DCHECK(new_portable_code != GetPortableToInterpreterBridge());
138          new_portable_code = GetPortableToInterpreterBridge();
139          new_quick_code = GetQuickInstrumentationEntryPoint();
140        }
141      } else {
142        new_portable_code = GetPortableResolutionTrampoline(class_linker);
143        new_quick_code = GetQuickResolutionTrampoline(class_linker);
144      }
145    }
146  }
147  UpdateEntrypoints(method, new_quick_code, new_portable_code, have_portable_code);
148}
149
150// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
151// deoptimization of quick frames to interpreter frames.
152// Since we may already have done this previously, we need to push new instrumentation frame before
153// existing instrumentation frames.
154static void InstrumentationInstallStack(Thread* thread, void* arg)
155    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
156  struct InstallStackVisitor : public StackVisitor {
157    InstallStackVisitor(Thread* thread, Context* context, uintptr_t instrumentation_exit_pc)
158        : StackVisitor(thread, context),  instrumentation_stack_(thread->GetInstrumentationStack()),
159          existing_instrumentation_frames_count_(instrumentation_stack_->size()),
160          instrumentation_exit_pc_(instrumentation_exit_pc),
161          reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
162          last_return_pc_(0) {
163    }
164
165    virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
166      mirror::ArtMethod* m = GetMethod();
167      if (GetCurrentQuickFrame() == NULL) {
168        if (kVerboseInstrumentation) {
169          LOG(INFO) << "  Ignoring a shadow frame. Frame " << GetFrameId()
170                    << " Method=" << PrettyMethod(m);
171        }
172        return true;  // Ignore shadow frames.
173      }
174      if (m == NULL) {
175        if (kVerboseInstrumentation) {
176          LOG(INFO) << "  Skipping upcall. Frame " << GetFrameId();
177        }
178        last_return_pc_ = 0;
179        return true;  // Ignore upcalls.
180      }
181      if (m->IsRuntimeMethod()) {
182        if (kVerboseInstrumentation) {
183          LOG(INFO) << "  Skipping runtime method. Frame " << GetFrameId();
184        }
185        last_return_pc_ = GetReturnPc();
186        return true;  // Ignore unresolved methods since they will be instrumented after resolution.
187      }
188      if (kVerboseInstrumentation) {
189        LOG(INFO) << "  Installing exit stub in " << DescribeLocation();
190      }
191      uintptr_t return_pc = GetReturnPc();
192      if (return_pc == instrumentation_exit_pc_) {
193        // We've reached a frame which has already been installed with instrumentation exit stub.
194        // We should have already installed instrumentation on previous frames.
195        reached_existing_instrumentation_frames_ = true;
196
197        CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
198        const InstrumentationStackFrame& frame = instrumentation_stack_->at(instrumentation_stack_depth_);
199        CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m)
200                                   << ", Found " << PrettyMethod(frame.method_);
201        return_pc = frame.return_pc_;
202        if (kVerboseInstrumentation) {
203          LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
204        }
205      } else {
206        CHECK_NE(return_pc, 0U);
207        CHECK(!reached_existing_instrumentation_frames_);
208        InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
209                                                        false);
210        if (kVerboseInstrumentation) {
211          LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
212        }
213
214        // Insert frame before old ones so we do not corrupt the instrumentation stack.
215        auto it = instrumentation_stack_->end() - existing_instrumentation_frames_count_;
216        instrumentation_stack_->insert(it, instrumentation_frame);
217        SetReturnPc(instrumentation_exit_pc_);
218      }
219      dex_pcs_.push_back(m->ToDexPc(last_return_pc_));
220      last_return_pc_ = return_pc;
221      ++instrumentation_stack_depth_;
222      return true;  // Continue.
223    }
224    std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
225    const size_t existing_instrumentation_frames_count_;
226    std::vector<uint32_t> dex_pcs_;
227    const uintptr_t instrumentation_exit_pc_;
228    bool reached_existing_instrumentation_frames_;
229    size_t instrumentation_stack_depth_;
230    uintptr_t last_return_pc_;
231  };
232  if (kVerboseInstrumentation) {
233    std::string thread_name;
234    thread->GetThreadName(thread_name);
235    LOG(INFO) << "Installing exit stubs in " << thread_name;
236  }
237
238  Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
239  UniquePtr<Context> context(Context::Create());
240  uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
241  InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
242  visitor.WalkStack(true);
243  CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
244
245  if (!instrumentation->ShouldNotifyMethodEnterExitEvents()) {
246    // Create method enter events for all methods currently on the thread's stack. We only do this
247    // if no debugger is attached to prevent from posting events twice.
248    typedef std::deque<InstrumentationStackFrame>::const_reverse_iterator It;
249    for (It it = thread->GetInstrumentationStack()->rbegin(),
250        end = thread->GetInstrumentationStack()->rend(); it != end; ++it) {
251      mirror::Object* this_object = (*it).this_object_;
252      mirror::ArtMethod* method = (*it).method_;
253      uint32_t dex_pc = visitor.dex_pcs_.back();
254      visitor.dex_pcs_.pop_back();
255      instrumentation->MethodEnterEvent(thread, this_object, method, dex_pc);
256    }
257  }
258  thread->VerifyStack();
259}
260
261// Removes the instrumentation exit pc as the return PC for every quick frame.
262static void InstrumentationRestoreStack(Thread* thread, void* arg)
263    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
264  struct RestoreStackVisitor : public StackVisitor {
265    RestoreStackVisitor(Thread* thread, uintptr_t instrumentation_exit_pc,
266                        Instrumentation* instrumentation)
267        : StackVisitor(thread, NULL), thread_(thread),
268          instrumentation_exit_pc_(instrumentation_exit_pc),
269          instrumentation_(instrumentation),
270          instrumentation_stack_(thread->GetInstrumentationStack()),
271          frames_removed_(0) {}
272
273    virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
274      if (instrumentation_stack_->size() == 0) {
275        return false;  // Stop.
276      }
277      mirror::ArtMethod* m = GetMethod();
278      if (GetCurrentQuickFrame() == NULL) {
279        if (kVerboseInstrumentation) {
280          LOG(INFO) << "  Ignoring a shadow frame. Frame " << GetFrameId() << " Method=" << PrettyMethod(m);
281        }
282        return true;  // Ignore shadow frames.
283      }
284      if (m == NULL) {
285        if (kVerboseInstrumentation) {
286          LOG(INFO) << "  Skipping upcall. Frame " << GetFrameId();
287        }
288        return true;  // Ignore upcalls.
289      }
290      bool removed_stub = false;
291      // TODO: make this search more efficient?
292      for (InstrumentationStackFrame instrumentation_frame : *instrumentation_stack_) {
293        if (instrumentation_frame.frame_id_ == GetFrameId()) {
294          if (kVerboseInstrumentation) {
295            LOG(INFO) << "  Removing exit stub in " << DescribeLocation();
296          }
297          if (instrumentation_frame.interpreter_entry_) {
298            CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
299          } else {
300            CHECK(m == instrumentation_frame.method_) << PrettyMethod(m);
301          }
302          SetReturnPc(instrumentation_frame.return_pc_);
303          if (!instrumentation_->ShouldNotifyMethodEnterExitEvents()) {
304            // Create the method exit events. As the methods didn't really exit the result is 0.
305            // We only do this if no debugger is attached to prevent from posting events twice.
306            instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
307                                              GetDexPc(), JValue());
308          }
309          frames_removed_++;
310          removed_stub = true;
311          break;
312        }
313      }
314      if (!removed_stub) {
315        if (kVerboseInstrumentation) {
316          LOG(INFO) << "  No exit stub in " << DescribeLocation();
317        }
318      }
319      return true;  // Continue.
320    }
321    Thread* const thread_;
322    const uintptr_t instrumentation_exit_pc_;
323    Instrumentation* const instrumentation_;
324    std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
325    size_t frames_removed_;
326  };
327  if (kVerboseInstrumentation) {
328    std::string thread_name;
329    thread->GetThreadName(thread_name);
330    LOG(INFO) << "Removing exit stubs in " << thread_name;
331  }
332  std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
333  if (stack->size() > 0) {
334    Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
335    uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
336    RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
337    visitor.WalkStack(true);
338    CHECK_EQ(visitor.frames_removed_, stack->size());
339    while (stack->size() > 0) {
340      stack->pop_front();
341    }
342  }
343}
344
345void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
346  Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
347  if ((events & kMethodEntered) != 0) {
348    method_entry_listeners_.push_back(listener);
349    have_method_entry_listeners_ = true;
350  }
351  if ((events & kMethodExited) != 0) {
352    method_exit_listeners_.push_back(listener);
353    have_method_exit_listeners_ = true;
354  }
355  if ((events & kMethodUnwind) != 0) {
356    method_unwind_listeners_.push_back(listener);
357    have_method_unwind_listeners_ = true;
358  }
359  if ((events & kDexPcMoved) != 0) {
360    dex_pc_listeners_.push_back(listener);
361    have_dex_pc_listeners_ = true;
362  }
363  if ((events & kExceptionCaught) != 0) {
364    exception_caught_listeners_.push_back(listener);
365    have_exception_caught_listeners_ = true;
366  }
367  UpdateInterpreterHandlerTable();
368}
369
370void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
371  Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
372
373  if ((events & kMethodEntered) != 0) {
374    bool contains = std::find(method_entry_listeners_.begin(), method_entry_listeners_.end(),
375                              listener) != method_entry_listeners_.end();
376    if (contains) {
377      method_entry_listeners_.remove(listener);
378    }
379    have_method_entry_listeners_ = method_entry_listeners_.size() > 0;
380  }
381  if ((events & kMethodExited) != 0) {
382    bool contains = std::find(method_exit_listeners_.begin(), method_exit_listeners_.end(),
383                              listener) != method_exit_listeners_.end();
384    if (contains) {
385      method_exit_listeners_.remove(listener);
386    }
387    have_method_exit_listeners_ = method_exit_listeners_.size() > 0;
388  }
389  if ((events & kMethodUnwind) != 0) {
390    method_unwind_listeners_.remove(listener);
391  }
392  if ((events & kDexPcMoved) != 0) {
393    bool contains = std::find(dex_pc_listeners_.begin(), dex_pc_listeners_.end(),
394                              listener) != dex_pc_listeners_.end();
395    if (contains) {
396      dex_pc_listeners_.remove(listener);
397    }
398    have_dex_pc_listeners_ = dex_pc_listeners_.size() > 0;
399  }
400  if ((events & kExceptionCaught) != 0) {
401    exception_caught_listeners_.remove(listener);
402    have_exception_caught_listeners_ = exception_caught_listeners_.size() > 0;
403  }
404  UpdateInterpreterHandlerTable();
405}
406
407void Instrumentation::ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) {
408  interpret_only_ = require_interpreter || forced_interpret_only_;
409  // Compute what level of instrumentation is required and compare to current.
410  int desired_level, current_level;
411  if (require_interpreter) {
412    desired_level = 2;
413  } else if (require_entry_exit_stubs) {
414    desired_level = 1;
415  } else {
416    desired_level = 0;
417  }
418  if (interpreter_stubs_installed_) {
419    current_level = 2;
420  } else if (entry_exit_stubs_installed_) {
421    current_level = 1;
422  } else {
423    current_level = 0;
424  }
425  if (desired_level == current_level) {
426    // We're already set.
427    return;
428  }
429  Thread* self = Thread::Current();
430  Runtime* runtime = Runtime::Current();
431  Locks::thread_list_lock_->AssertNotHeld(self);
432  if (desired_level > 0) {
433    if (require_interpreter) {
434      interpreter_stubs_installed_ = true;
435    } else {
436      CHECK(require_entry_exit_stubs);
437      entry_exit_stubs_installed_ = true;
438    }
439    runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
440    instrumentation_stubs_installed_ = true;
441    MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
442    runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
443  } else {
444    interpreter_stubs_installed_ = false;
445    entry_exit_stubs_installed_ = false;
446    runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
447    // Restore stack only if there is no method currently deoptimized.
448    if (deoptimized_methods_.empty()) {
449      instrumentation_stubs_installed_ = false;
450      MutexLock mu(self, *Locks::thread_list_lock_);
451      Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
452    }
453  }
454}
455
456static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg) {
457  thread->ResetQuickAllocEntryPointsForThread();
458}
459
460void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
461  Runtime* runtime = Runtime::Current();
462  ThreadList* tl = runtime->GetThreadList();
463  if (runtime->IsStarted()) {
464    tl->SuspendAll();
465  }
466  {
467    MutexLock mu(Thread::Current(), *Locks::runtime_shutdown_lock_);
468    SetQuickAllocEntryPointsInstrumented(instrumented);
469    ResetQuickAllocEntryPoints();
470  }
471  if (runtime->IsStarted()) {
472    tl->ResumeAll();
473  }
474}
475
476void Instrumentation::InstrumentQuickAllocEntryPoints() {
477  // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
478  //       should be guarded by a lock.
479  DCHECK_GE(quick_alloc_entry_points_instrumentation_counter_.Load(), 0);
480  const bool enable_instrumentation =
481      quick_alloc_entry_points_instrumentation_counter_.FetchAndAdd(1) == 0;
482  if (enable_instrumentation) {
483    SetEntrypointsInstrumented(true);
484  }
485}
486
487void Instrumentation::UninstrumentQuickAllocEntryPoints() {
488  // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
489  //       should be guarded by a lock.
490  DCHECK_GT(quick_alloc_entry_points_instrumentation_counter_.Load(), 0);
491  const bool disable_instrumentation =
492      quick_alloc_entry_points_instrumentation_counter_.FetchAndSub(1) == 1;
493  if (disable_instrumentation) {
494    SetEntrypointsInstrumented(false);
495  }
496}
497
498void Instrumentation::ResetQuickAllocEntryPoints() {
499  Runtime* runtime = Runtime::Current();
500  if (runtime->IsStarted()) {
501    MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
502    runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, NULL);
503  }
504}
505
506void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code,
507                                        const void* portable_code, bool have_portable_code) const {
508  const void* new_portable_code;
509  const void* new_quick_code;
510  bool new_have_portable_code;
511  if (LIKELY(!instrumentation_stubs_installed_)) {
512    new_portable_code = portable_code;
513    new_quick_code = quick_code;
514    new_have_portable_code = have_portable_code;
515  } else {
516    if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
517      new_portable_code = GetPortableToInterpreterBridge();
518      new_quick_code = GetQuickToInterpreterBridge();
519      new_have_portable_code = false;
520    } else if (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker()) ||
521               quick_code == GetQuickToInterpreterBridge()) {
522      DCHECK((portable_code == GetPortableResolutionTrampoline(Runtime::Current()->GetClassLinker())) ||
523             (portable_code == GetPortableToInterpreterBridge()));
524      new_portable_code = portable_code;
525      new_quick_code = quick_code;
526      new_have_portable_code = have_portable_code;
527    } else if (entry_exit_stubs_installed_) {
528      new_quick_code = GetQuickInstrumentationEntryPoint();
529      new_portable_code = GetPortableToInterpreterBridge();
530      new_have_portable_code = false;
531    } else {
532      new_portable_code = portable_code;
533      new_quick_code = quick_code;
534      new_have_portable_code = have_portable_code;
535    }
536  }
537  UpdateEntrypoints(method, new_quick_code, new_portable_code, new_have_portable_code);
538}
539
540void Instrumentation::Deoptimize(mirror::ArtMethod* method) {
541  CHECK(!method->IsNative());
542  CHECK(!method->IsProxyMethod());
543  CHECK(!method->IsAbstract());
544
545  std::pair<std::set<mirror::ArtMethod*>::iterator, bool> pair = deoptimized_methods_.insert(method);
546  bool already_deoptimized = !pair.second;
547  CHECK(!already_deoptimized) << "Method " << PrettyMethod(method) << " is already deoptimized";
548
549  if (!interpreter_stubs_installed_) {
550    UpdateEntrypoints(method, GetQuickToInterpreterBridge(), GetPortableToInterpreterBridge(),
551                      false);
552
553    // Install instrumentation exit stub and instrumentation frames. We may already have installed
554    // these previously so it will only cover the newly created frames.
555    instrumentation_stubs_installed_ = true;
556    MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
557    Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
558  }
559}
560
561void Instrumentation::Undeoptimize(mirror::ArtMethod* method) {
562  CHECK(!method->IsNative());
563  CHECK(!method->IsProxyMethod());
564  CHECK(!method->IsAbstract());
565
566  auto it = deoptimized_methods_.find(method);
567  CHECK(it != deoptimized_methods_.end()) << "Method " << PrettyMethod(method) << " is not deoptimized";
568  deoptimized_methods_.erase(it);
569
570  // Restore code and possibly stack only if we did not deoptimize everything.
571  if (!interpreter_stubs_installed_) {
572    // Restore its code or resolution trampoline.
573    ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
574    if (method->IsStatic() && !method->IsConstructor() &&
575        !method->GetDeclaringClass()->IsInitialized()) {
576      UpdateEntrypoints(method, GetQuickResolutionTrampoline(class_linker),
577                        GetPortableResolutionTrampoline(class_linker), false);
578    } else {
579      bool have_portable_code = false;
580      const void* quick_code = class_linker->GetQuickOatCodeFor(method);
581      const void* portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
582      UpdateEntrypoints(method, quick_code, portable_code, have_portable_code);
583    }
584
585    // If there is no deoptimized method left, we can restore the stack of each thread.
586    if (deoptimized_methods_.empty()) {
587      MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
588      Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
589      instrumentation_stubs_installed_ = false;
590    }
591  }
592}
593
594bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) const {
595  DCHECK(method != nullptr);
596  return deoptimized_methods_.count(method);
597}
598
599void Instrumentation::EnableDeoptimization() {
600  CHECK(deoptimized_methods_.empty());
601  CHECK_EQ(deoptimization_enabled_, false);
602  deoptimization_enabled_ = true;
603}
604
605void Instrumentation::DisableDeoptimization() {
606  CHECK_EQ(deoptimization_enabled_, true);
607  // If we deoptimized everything, undo it.
608  if (interpreter_stubs_installed_) {
609    UndeoptimizeEverything();
610  }
611  // Undeoptimized selected methods.
612  while (!deoptimized_methods_.empty()) {
613    auto it_begin = deoptimized_methods_.begin();
614    Undeoptimize(*it_begin);
615  }
616  CHECK(deoptimized_methods_.empty());
617  deoptimization_enabled_ = false;
618}
619
620// Indicates if instrumentation should notify method enter/exit events to the listeners.
621bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
622  return deoptimization_enabled_ || interpreter_stubs_installed_;
623}
624
625void Instrumentation::DeoptimizeEverything() {
626  CHECK(!interpreter_stubs_installed_);
627  ConfigureStubs(false, true);
628}
629
630void Instrumentation::UndeoptimizeEverything() {
631  CHECK(interpreter_stubs_installed_);
632  ConfigureStubs(false, false);
633}
634
635void Instrumentation::EnableMethodTracing() {
636  bool require_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners;
637  ConfigureStubs(!require_interpreter, require_interpreter);
638}
639
640void Instrumentation::DisableMethodTracing() {
641  ConfigureStubs(false, false);
642}
643
644const void* Instrumentation::GetQuickCodeFor(mirror::ArtMethod* method) const {
645  Runtime* runtime = Runtime::Current();
646  if (LIKELY(!instrumentation_stubs_installed_)) {
647    const void* code = method->GetEntryPointFromQuickCompiledCode();
648    DCHECK(code != NULL);
649    if (LIKELY(code != GetQuickResolutionTrampoline(runtime->GetClassLinker()) &&
650               code != GetQuickToInterpreterBridge())) {
651      return code;
652    }
653  }
654  return runtime->GetClassLinker()->GetQuickOatCodeFor(method);
655}
656
657void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
658                                           mirror::ArtMethod* method,
659                                           uint32_t dex_pc) const {
660  auto it = method_entry_listeners_.begin();
661  bool is_end = (it == method_entry_listeners_.end());
662  // Implemented this way to prevent problems caused by modification of the list while iterating.
663  while (!is_end) {
664    InstrumentationListener* cur = *it;
665    ++it;
666    is_end = (it == method_entry_listeners_.end());
667    cur->MethodEntered(thread, this_object, method, dex_pc);
668  }
669}
670
671void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
672                                          mirror::ArtMethod* method,
673                                          uint32_t dex_pc, const JValue& return_value) const {
674  auto it = method_exit_listeners_.begin();
675  bool is_end = (it == method_exit_listeners_.end());
676  // Implemented this way to prevent problems caused by modification of the list while iterating.
677  while (!is_end) {
678    InstrumentationListener* cur = *it;
679    ++it;
680    is_end = (it == method_exit_listeners_.end());
681    cur->MethodExited(thread, this_object, method, dex_pc, return_value);
682  }
683}
684
685void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
686                                        mirror::ArtMethod* method,
687                                        uint32_t dex_pc) const {
688  if (have_method_unwind_listeners_) {
689    for (InstrumentationListener* listener : method_unwind_listeners_) {
690      listener->MethodUnwind(thread, this_object, method, dex_pc);
691    }
692  }
693}
694
695void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
696                                          mirror::ArtMethod* method,
697                                          uint32_t dex_pc) const {
698  // TODO: STL copy-on-write collection? The copy below is due to the debug listener having an
699  // action where it can remove itself as a listener and break the iterator. The copy only works
700  // around the problem and in general we may have to move to something like reference counting to
701  // ensure listeners are deleted correctly.
702  std::list<InstrumentationListener*> copy(dex_pc_listeners_);
703  for (InstrumentationListener* listener : copy) {
704    listener->DexPcMoved(thread, this_object, method, dex_pc);
705  }
706}
707
708void Instrumentation::ExceptionCaughtEvent(Thread* thread, const ThrowLocation& throw_location,
709                                           mirror::ArtMethod* catch_method,
710                                           uint32_t catch_dex_pc,
711                                           mirror::Throwable* exception_object) const {
712  if (have_exception_caught_listeners_) {
713    DCHECK_EQ(thread->GetException(NULL), exception_object);
714    thread->ClearException();
715    for (InstrumentationListener* listener : exception_caught_listeners_) {
716      listener->ExceptionCaught(thread, throw_location, catch_method, catch_dex_pc, exception_object);
717    }
718    thread->SetException(throw_location, exception_object);
719  }
720}
721
722static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
723                            int delta)
724    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
725  size_t frame_id = StackVisitor::ComputeNumFrames(self) + delta;
726  if (frame_id != instrumentation_frame.frame_id_) {
727    LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
728        << instrumentation_frame.frame_id_;
729    StackVisitor::DescribeStack(self);
730    CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
731  }
732}
733
734void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
735                                                    mirror::ArtMethod* method,
736                                                    uintptr_t lr, bool interpreter_entry) {
737  // We have a callee-save frame meaning this value is guaranteed to never be 0.
738  size_t frame_id = StackVisitor::ComputeNumFrames(self);
739  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
740  if (kVerboseInstrumentation) {
741    LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr);
742  }
743  instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
744                                                                   frame_id, interpreter_entry);
745  stack->push_front(instrumentation_frame);
746
747  MethodEnterEvent(self, this_object, method, 0);
748}
749
750uint64_t Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
751                                                       uint64_t gpr_result, uint64_t fpr_result) {
752  // Do the pop.
753  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
754  CHECK_GT(stack->size(), 0U);
755  InstrumentationStackFrame instrumentation_frame = stack->front();
756  stack->pop_front();
757
758  // Set return PC and check the sanity of the stack.
759  *return_pc = instrumentation_frame.return_pc_;
760  CheckStackDepth(self, instrumentation_frame, 0);
761
762  mirror::ArtMethod* method = instrumentation_frame.method_;
763  char return_shorty = MethodHelper(method).GetShorty()[0];
764  JValue return_value;
765  if (return_shorty == 'V') {
766    return_value.SetJ(0);
767  } else if (return_shorty == 'F' || return_shorty == 'D') {
768    return_value.SetJ(fpr_result);
769  } else {
770    return_value.SetJ(gpr_result);
771  }
772  // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
773  //       return_pc.
774  uint32_t dex_pc = DexFile::kDexNoIndex;
775  mirror::Object* this_object = instrumentation_frame.this_object_;
776  MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
777
778  // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
779  // back to an upcall.
780  NthCallerVisitor visitor(self, 1, true);
781  visitor.WalkStack(true);
782  bool deoptimize = (visitor.caller != NULL) &&
783                    (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller));
784  if (deoptimize && kVerboseInstrumentation) {
785    LOG(INFO) << "Deoptimizing into " << PrettyMethod(visitor.caller);
786  }
787  if (deoptimize) {
788    if (kVerboseInstrumentation) {
789      LOG(INFO) << "Deoptimizing from " << PrettyMethod(method)
790                << " result is " << std::hex << return_value.GetJ();
791    }
792    self->SetDeoptimizationReturnValue(return_value);
793    return static_cast<uint64_t>(GetQuickDeoptimizationEntryPoint()) |
794        (static_cast<uint64_t>(*return_pc) << 32);
795  } else {
796    if (kVerboseInstrumentation) {
797      LOG(INFO) << "Returning from " << PrettyMethod(method)
798                << " to PC " << reinterpret_cast<void*>(*return_pc);
799    }
800    return *return_pc;
801  }
802}
803
804void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
805  // Do the pop.
806  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
807  CHECK_GT(stack->size(), 0U);
808  InstrumentationStackFrame instrumentation_frame = stack->front();
809  // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
810  stack->pop_front();
811
812  mirror::ArtMethod* method = instrumentation_frame.method_;
813  if (is_deoptimization) {
814    if (kVerboseInstrumentation) {
815      LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method);
816    }
817  } else {
818    if (kVerboseInstrumentation) {
819      LOG(INFO) << "Popping for unwind " << PrettyMethod(method);
820    }
821
822    // Notify listeners of method unwind.
823    // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
824    //       return_pc.
825    uint32_t dex_pc = DexFile::kDexNoIndex;
826    MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
827  }
828}
829
830std::string InstrumentationStackFrame::Dump() const {
831  std::ostringstream os;
832  os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":"
833      << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
834  return os.str();
835}
836
837}  // namespace instrumentation
838}  // namespace art
839