instrumentation.cc revision d889178ec78930538d9d6a66c3df9ee9afaffbb4
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
19#include <sys/uio.h>
20
21#include "atomic.h"
22#include "base/unix_file/fd_file.h"
23#include "class_linker.h"
24#include "debugger.h"
25#include "dex_file-inl.h"
26#include "entrypoints/quick/quick_alloc_entrypoints.h"
27#include "interpreter/interpreter.h"
28#include "mirror/art_method-inl.h"
29#include "mirror/class-inl.h"
30#include "mirror/dex_cache.h"
31#include "mirror/object_array-inl.h"
32#include "mirror/object-inl.h"
33#include "nth_caller_visitor.h"
34#if !defined(ART_USE_PORTABLE_COMPILER)
35#include "entrypoints/quick/quick_entrypoints.h"
36#endif
37#include "object_utils.h"
38#include "os.h"
39#include "scoped_thread_state_change.h"
40#include "thread.h"
41#include "thread_list.h"
42
43namespace art {
44
45namespace instrumentation {
46
47const bool kVerboseInstrumentation = false;
48
49// Do we want to deoptimize for method entry and exit listeners or just try to intercept
50// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
51// application's performance.
52static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = false;
53
54static bool InstallStubsClassVisitor(mirror::Class* klass, void* arg)
55    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
56  Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
57  return instrumentation->InstallStubsForClass(klass);
58}
59
60bool Instrumentation::InstallStubsForClass(mirror::Class* klass) {
61  for (size_t i = 0, e = klass->NumDirectMethods(); i < e; i++) {
62    InstallStubsForMethod(klass->GetDirectMethod(i));
63  }
64  for (size_t i = 0, e = klass->NumVirtualMethods(); i < e; i++) {
65    InstallStubsForMethod(klass->GetVirtualMethod(i));
66  }
67  return true;
68}
69
70static void UpdateEntrypoints(mirror::ArtMethod* method, const void* quick_code,
71                              const void* portable_code, bool have_portable_code)
72    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
73  method->SetEntryPointFromPortableCompiledCode(portable_code);
74  method->SetEntryPointFromQuickCompiledCode(quick_code);
75  bool portable_enabled = method->IsPortableCompiled();
76  if (have_portable_code && !portable_enabled) {
77    method->SetIsPortableCompiled();
78  } else if (portable_enabled) {
79    method->ClearIsPortableCompiled();
80  }
81  if (!method->IsResolutionMethod()) {
82    if (quick_code == GetQuickToInterpreterBridge() ||
83        (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker()) &&
84         Runtime::Current()->GetInstrumentation()->IsForcedInterpretOnly()
85         && !method->IsNative() && !method->IsProxyMethod())) {
86      if (kIsDebugBuild) {
87        if (quick_code == GetQuickToInterpreterBridge()) {
88          DCHECK(portable_code == GetPortableToInterpreterBridge());
89        } else if (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker())) {
90          DCHECK(portable_code == GetPortableResolutionTrampoline(Runtime::Current()->GetClassLinker()));
91        }
92      }
93      DCHECK(!method->IsNative()) << PrettyMethod(method);
94      DCHECK(!method->IsProxyMethod()) << PrettyMethod(method);
95      method->SetEntryPointFromInterpreter(art::interpreter::artInterpreterToInterpreterBridge);
96    } else {
97      method->SetEntryPointFromInterpreter(art::artInterpreterToCompiledCodeBridge);
98    }
99  }
100}
101
102void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) {
103  if (method->IsAbstract() || method->IsProxyMethod()) {
104    // Do not change stubs for these methods.
105    return;
106  }
107  const void* new_portable_code;
108  const void* new_quick_code;
109  bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
110  ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
111  bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
112  bool have_portable_code = false;
113  if (uninstall) {
114    if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
115      new_portable_code = GetPortableToInterpreterBridge();
116      new_quick_code = GetQuickToInterpreterBridge();
117    } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
118      new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
119      new_quick_code = class_linker->GetQuickOatCodeFor(method);
120    } else {
121      new_portable_code = GetPortableResolutionTrampoline(class_linker);
122      new_quick_code = GetQuickResolutionTrampoline(class_linker);
123    }
124  } else {  // !uninstall
125    if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
126      new_portable_code = GetPortableToInterpreterBridge();
127      new_quick_code = GetQuickToInterpreterBridge();
128    } else {
129      // Do not overwrite resolution trampoline. When the trampoline initializes the method's
130      // class, all its static methods code will be set to the instrumentation entry point.
131      // For more details, see ClassLinker::FixupStaticTrampolines.
132      if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
133        // Do not overwrite interpreter to prevent from posting method entry/exit events twice.
134        new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
135        new_quick_code = class_linker->GetQuickOatCodeFor(method);
136        if (entry_exit_stubs_installed_ && new_quick_code != GetQuickToInterpreterBridge()) {
137          DCHECK(new_portable_code != GetPortableToInterpreterBridge());
138          new_portable_code = GetPortableToInterpreterBridge();
139          new_quick_code = GetQuickInstrumentationEntryPoint();
140        }
141      } else {
142        new_portable_code = GetPortableResolutionTrampoline(class_linker);
143        new_quick_code = GetQuickResolutionTrampoline(class_linker);
144      }
145    }
146  }
147  UpdateEntrypoints(method, new_quick_code, new_portable_code, have_portable_code);
148}
149
150// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
151// deoptimization of quick frames to interpreter frames.
152// Since we may already have done this previously, we need to push new instrumentation frame before
153// existing instrumentation frames.
154static void InstrumentationInstallStack(Thread* thread, void* arg)
155    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
156  struct InstallStackVisitor : public StackVisitor {
157    InstallStackVisitor(Thread* thread, Context* context, uintptr_t instrumentation_exit_pc)
158        : StackVisitor(thread, context),  instrumentation_stack_(thread->GetInstrumentationStack()),
159          existing_instrumentation_frames_count_(instrumentation_stack_->size()),
160          instrumentation_exit_pc_(instrumentation_exit_pc),
161          reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
162          last_return_pc_(0) {
163    }
164
165    virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
166      mirror::ArtMethod* m = GetMethod();
167      if (GetCurrentQuickFrame() == NULL) {
168        if (kVerboseInstrumentation) {
169          LOG(INFO) << "  Ignoring a shadow frame. Frame " << GetFrameId()
170                    << " Method=" << PrettyMethod(m);
171        }
172        return true;  // Ignore shadow frames.
173      }
174      if (m == NULL) {
175        if (kVerboseInstrumentation) {
176          LOG(INFO) << "  Skipping upcall. Frame " << GetFrameId();
177        }
178        last_return_pc_ = 0;
179        return true;  // Ignore upcalls.
180      }
181      if (m->IsRuntimeMethod()) {
182        if (kVerboseInstrumentation) {
183          LOG(INFO) << "  Skipping runtime method. Frame " << GetFrameId();
184        }
185        last_return_pc_ = GetReturnPc();
186        return true;  // Ignore unresolved methods since they will be instrumented after resolution.
187      }
188      if (kVerboseInstrumentation) {
189        LOG(INFO) << "  Installing exit stub in " << DescribeLocation();
190      }
191      uintptr_t return_pc = GetReturnPc();
192      if (return_pc == instrumentation_exit_pc_) {
193        // We've reached a frame which has already been installed with instrumentation exit stub.
194        // We should have already installed instrumentation on previous frames.
195        reached_existing_instrumentation_frames_ = true;
196
197        CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
198        const InstrumentationStackFrame& frame = instrumentation_stack_->at(instrumentation_stack_depth_);
199        CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m)
200                                   << ", Found " << PrettyMethod(frame.method_);
201        return_pc = frame.return_pc_;
202        if (kVerboseInstrumentation) {
203          LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
204        }
205      } else {
206        CHECK_NE(return_pc, 0U);
207        CHECK(!reached_existing_instrumentation_frames_);
208        InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
209                                                        false);
210        if (kVerboseInstrumentation) {
211          LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
212        }
213
214        // Insert frame before old ones so we do not corrupt the instrumentation stack.
215        auto it = instrumentation_stack_->end() - existing_instrumentation_frames_count_;
216        instrumentation_stack_->insert(it, instrumentation_frame);
217        SetReturnPc(instrumentation_exit_pc_);
218      }
219      dex_pcs_.push_back(m->ToDexPc(last_return_pc_));
220      last_return_pc_ = return_pc;
221      ++instrumentation_stack_depth_;
222      return true;  // Continue.
223    }
224    std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
225    const size_t existing_instrumentation_frames_count_;
226    std::vector<uint32_t> dex_pcs_;
227    const uintptr_t instrumentation_exit_pc_;
228    bool reached_existing_instrumentation_frames_;
229    size_t instrumentation_stack_depth_;
230    uintptr_t last_return_pc_;
231  };
232  if (kVerboseInstrumentation) {
233    std::string thread_name;
234    thread->GetThreadName(thread_name);
235    LOG(INFO) << "Installing exit stubs in " << thread_name;
236  }
237
238  Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
239  UniquePtr<Context> context(Context::Create());
240  uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
241  InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
242  visitor.WalkStack(true);
243  CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
244
245  if (!instrumentation->ShouldNotifyMethodEnterExitEvents()) {
246    // Create method enter events for all methods currently on the thread's stack. We only do this
247    // if no debugger is attached to prevent from posting events twice.
248    typedef std::deque<InstrumentationStackFrame>::const_reverse_iterator It;
249    for (It it = thread->GetInstrumentationStack()->rbegin(),
250        end = thread->GetInstrumentationStack()->rend(); it != end; ++it) {
251      mirror::Object* this_object = (*it).this_object_;
252      mirror::ArtMethod* method = (*it).method_;
253      uint32_t dex_pc = visitor.dex_pcs_.back();
254      visitor.dex_pcs_.pop_back();
255      instrumentation->MethodEnterEvent(thread, this_object, method, dex_pc);
256    }
257  }
258  thread->VerifyStack();
259}
260
261// Removes the instrumentation exit pc as the return PC for every quick frame.
262static void InstrumentationRestoreStack(Thread* thread, void* arg)
263    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
264  struct RestoreStackVisitor : public StackVisitor {
265    RestoreStackVisitor(Thread* thread, uintptr_t instrumentation_exit_pc,
266                        Instrumentation* instrumentation)
267        : StackVisitor(thread, NULL), thread_(thread),
268          instrumentation_exit_pc_(instrumentation_exit_pc),
269          instrumentation_(instrumentation),
270          instrumentation_stack_(thread->GetInstrumentationStack()),
271          frames_removed_(0) {}
272
273    virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
274      if (instrumentation_stack_->size() == 0) {
275        return false;  // Stop.
276      }
277      mirror::ArtMethod* m = GetMethod();
278      if (GetCurrentQuickFrame() == NULL) {
279        if (kVerboseInstrumentation) {
280          LOG(INFO) << "  Ignoring a shadow frame. Frame " << GetFrameId() << " Method=" << PrettyMethod(m);
281        }
282        return true;  // Ignore shadow frames.
283      }
284      if (m == NULL) {
285        if (kVerboseInstrumentation) {
286          LOG(INFO) << "  Skipping upcall. Frame " << GetFrameId();
287        }
288        return true;  // Ignore upcalls.
289      }
290      bool removed_stub = false;
291      // TODO: make this search more efficient?
292      for (InstrumentationStackFrame instrumentation_frame : *instrumentation_stack_) {
293        if (instrumentation_frame.frame_id_ == GetFrameId()) {
294          if (kVerboseInstrumentation) {
295            LOG(INFO) << "  Removing exit stub in " << DescribeLocation();
296          }
297          if (instrumentation_frame.interpreter_entry_) {
298            CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
299          } else {
300            CHECK(m == instrumentation_frame.method_) << PrettyMethod(m);
301          }
302          SetReturnPc(instrumentation_frame.return_pc_);
303          if (!instrumentation_->ShouldNotifyMethodEnterExitEvents()) {
304            // Create the method exit events. As the methods didn't really exit the result is 0.
305            // We only do this if no debugger is attached to prevent from posting events twice.
306            instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
307                                              GetDexPc(), JValue());
308          }
309          frames_removed_++;
310          removed_stub = true;
311          break;
312        }
313      }
314      if (!removed_stub) {
315        if (kVerboseInstrumentation) {
316          LOG(INFO) << "  No exit stub in " << DescribeLocation();
317        }
318      }
319      return true;  // Continue.
320    }
321    Thread* const thread_;
322    const uintptr_t instrumentation_exit_pc_;
323    Instrumentation* const instrumentation_;
324    std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
325    size_t frames_removed_;
326  };
327  if (kVerboseInstrumentation) {
328    std::string thread_name;
329    thread->GetThreadName(thread_name);
330    LOG(INFO) << "Removing exit stubs in " << thread_name;
331  }
332  std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
333  if (stack->size() > 0) {
334    Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
335    uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
336    RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
337    visitor.WalkStack(true);
338    CHECK_EQ(visitor.frames_removed_, stack->size());
339    while (stack->size() > 0) {
340      stack->pop_front();
341    }
342  }
343}
344
345void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
346  Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
347  if ((events & kMethodEntered) != 0) {
348    method_entry_listeners_.push_back(listener);
349    have_method_entry_listeners_ = true;
350  }
351  if ((events & kMethodExited) != 0) {
352    method_exit_listeners_.push_back(listener);
353    have_method_exit_listeners_ = true;
354  }
355  if ((events & kMethodUnwind) != 0) {
356    method_unwind_listeners_.push_back(listener);
357    have_method_unwind_listeners_ = true;
358  }
359  if ((events & kDexPcMoved) != 0) {
360    dex_pc_listeners_.push_back(listener);
361    have_dex_pc_listeners_ = true;
362  }
363  if ((events & kExceptionCaught) != 0) {
364    exception_caught_listeners_.push_back(listener);
365    have_exception_caught_listeners_ = true;
366  }
367  UpdateInterpreterHandlerTable();
368}
369
370void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
371  Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
372
373  if ((events & kMethodEntered) != 0) {
374    bool contains = std::find(method_entry_listeners_.begin(), method_entry_listeners_.end(),
375                              listener) != method_entry_listeners_.end();
376    if (contains) {
377      method_entry_listeners_.remove(listener);
378    }
379    have_method_entry_listeners_ = method_entry_listeners_.size() > 0;
380  }
381  if ((events & kMethodExited) != 0) {
382    bool contains = std::find(method_exit_listeners_.begin(), method_exit_listeners_.end(),
383                              listener) != method_exit_listeners_.end();
384    if (contains) {
385      method_exit_listeners_.remove(listener);
386    }
387    have_method_exit_listeners_ = method_exit_listeners_.size() > 0;
388  }
389  if ((events & kMethodUnwind) != 0) {
390    method_unwind_listeners_.remove(listener);
391  }
392  if ((events & kDexPcMoved) != 0) {
393    bool contains = std::find(dex_pc_listeners_.begin(), dex_pc_listeners_.end(),
394                              listener) != dex_pc_listeners_.end();
395    if (contains) {
396      dex_pc_listeners_.remove(listener);
397    }
398    have_dex_pc_listeners_ = dex_pc_listeners_.size() > 0;
399  }
400  if ((events & kExceptionCaught) != 0) {
401    exception_caught_listeners_.remove(listener);
402    have_exception_caught_listeners_ = exception_caught_listeners_.size() > 0;
403  }
404  UpdateInterpreterHandlerTable();
405}
406
407void Instrumentation::ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) {
408  interpret_only_ = require_interpreter || forced_interpret_only_;
409  // Compute what level of instrumentation is required and compare to current.
410  int desired_level, current_level;
411  if (require_interpreter) {
412    desired_level = 2;
413  } else if (require_entry_exit_stubs) {
414    desired_level = 1;
415  } else {
416    desired_level = 0;
417  }
418  if (interpreter_stubs_installed_) {
419    current_level = 2;
420  } else if (entry_exit_stubs_installed_) {
421    current_level = 1;
422  } else {
423    current_level = 0;
424  }
425  if (desired_level == current_level) {
426    // We're already set.
427    return;
428  }
429  Thread* self = Thread::Current();
430  Runtime* runtime = Runtime::Current();
431  Locks::thread_list_lock_->AssertNotHeld(self);
432  if (desired_level > 0) {
433    if (require_interpreter) {
434      interpreter_stubs_installed_ = true;
435    } else {
436      CHECK(require_entry_exit_stubs);
437      entry_exit_stubs_installed_ = true;
438    }
439    runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
440    instrumentation_stubs_installed_ = true;
441    MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
442    runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
443  } else {
444    interpreter_stubs_installed_ = false;
445    entry_exit_stubs_installed_ = false;
446    runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
447    // Restore stack only if there is no method currently deoptimized.
448    if (deoptimized_methods_.empty()) {
449      instrumentation_stubs_installed_ = false;
450      MutexLock mu(self, *Locks::thread_list_lock_);
451      Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
452    }
453  }
454}
455
456static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg) {
457  thread->ResetQuickAllocEntryPointsForThread();
458}
459
460void Instrumentation::InstrumentQuickAllocEntryPoints() {
461  // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
462  //       should be guarded by a lock.
463  DCHECK_GE(quick_alloc_entry_points_instrumentation_counter_.Load(), 0);
464  const bool enable_instrumentation =
465      quick_alloc_entry_points_instrumentation_counter_.FetchAndAdd(1) == 0;
466  if (enable_instrumentation) {
467    // Instrumentation wasn't enabled so enable it.
468    ThreadList* tl = Runtime::Current()->GetThreadList();
469    tl->SuspendAll();
470    {
471      MutexLock mu(Thread::Current(), *Locks::runtime_shutdown_lock_);
472      SetQuickAllocEntryPointsInstrumented(true);
473      ResetQuickAllocEntryPoints();
474    }
475    tl->ResumeAll();
476  }
477}
478
479void Instrumentation::UninstrumentQuickAllocEntryPoints() {
480  // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
481  //       should be guarded by a lock.
482  DCHECK_GT(quick_alloc_entry_points_instrumentation_counter_.Load(), 0);
483  const bool disable_instrumentation =
484      quick_alloc_entry_points_instrumentation_counter_.FetchAndSub(1) == 1;
485  if (disable_instrumentation) {
486    ThreadList* tl = Runtime::Current()->GetThreadList();
487    tl->SuspendAll();
488    {
489      MutexLock mu(Thread::Current(), *Locks::runtime_shutdown_lock_);
490      SetQuickAllocEntryPointsInstrumented(false);
491      ResetQuickAllocEntryPoints();
492    }
493    tl->ResumeAll();
494  }
495}
496
497void Instrumentation::ResetQuickAllocEntryPoints() {
498  Runtime* runtime = Runtime::Current();
499  if (runtime->IsStarted()) {
500    MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
501    runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, NULL);
502  }
503}
504
505void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code,
506                                        const void* portable_code, bool have_portable_code) const {
507  const void* new_portable_code;
508  const void* new_quick_code;
509  bool new_have_portable_code;
510  if (LIKELY(!instrumentation_stubs_installed_)) {
511    new_portable_code = portable_code;
512    new_quick_code = quick_code;
513    new_have_portable_code = have_portable_code;
514  } else {
515    if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
516      new_portable_code = GetPortableToInterpreterBridge();
517      new_quick_code = GetQuickToInterpreterBridge();
518      new_have_portable_code = false;
519    } else if (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker()) ||
520               quick_code == GetQuickToInterpreterBridge()) {
521      DCHECK((portable_code == GetPortableResolutionTrampoline(Runtime::Current()->GetClassLinker())) ||
522             (portable_code == GetPortableToInterpreterBridge()));
523      new_portable_code = portable_code;
524      new_quick_code = quick_code;
525      new_have_portable_code = have_portable_code;
526    } else if (entry_exit_stubs_installed_) {
527      new_quick_code = GetQuickInstrumentationEntryPoint();
528      new_portable_code = GetPortableToInterpreterBridge();
529      new_have_portable_code = false;
530    } else {
531      new_portable_code = portable_code;
532      new_quick_code = quick_code;
533      new_have_portable_code = have_portable_code;
534    }
535  }
536  UpdateEntrypoints(method, new_quick_code, new_portable_code, new_have_portable_code);
537}
538
539void Instrumentation::Deoptimize(mirror::ArtMethod* method) {
540  CHECK(!method->IsNative());
541  CHECK(!method->IsProxyMethod());
542  CHECK(!method->IsAbstract());
543
544  std::pair<std::set<mirror::ArtMethod*>::iterator, bool> pair = deoptimized_methods_.insert(method);
545  bool already_deoptimized = !pair.second;
546  CHECK(!already_deoptimized) << "Method " << PrettyMethod(method) << " is already deoptimized";
547
548  if (!interpreter_stubs_installed_) {
549    UpdateEntrypoints(method, GetQuickToInterpreterBridge(), GetPortableToInterpreterBridge(),
550                      false);
551
552    // Install instrumentation exit stub and instrumentation frames. We may already have installed
553    // these previously so it will only cover the newly created frames.
554    instrumentation_stubs_installed_ = true;
555    MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
556    Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
557  }
558}
559
560void Instrumentation::Undeoptimize(mirror::ArtMethod* method) {
561  CHECK(!method->IsNative());
562  CHECK(!method->IsProxyMethod());
563  CHECK(!method->IsAbstract());
564
565  auto it = deoptimized_methods_.find(method);
566  CHECK(it != deoptimized_methods_.end()) << "Method " << PrettyMethod(method) << " is not deoptimized";
567  deoptimized_methods_.erase(it);
568
569  // Restore code and possibly stack only if we did not deoptimize everything.
570  if (!interpreter_stubs_installed_) {
571    // Restore its code or resolution trampoline.
572    ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
573    if (method->IsStatic() && !method->IsConstructor() &&
574        !method->GetDeclaringClass()->IsInitialized()) {
575      UpdateEntrypoints(method, GetQuickResolutionTrampoline(class_linker),
576                        GetPortableResolutionTrampoline(class_linker), false);
577    } else {
578      bool have_portable_code = false;
579      const void* quick_code = class_linker->GetQuickOatCodeFor(method);
580      const void* portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
581      UpdateEntrypoints(method, quick_code, portable_code, have_portable_code);
582    }
583
584    // If there is no deoptimized method left, we can restore the stack of each thread.
585    if (deoptimized_methods_.empty()) {
586      MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
587      Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
588      instrumentation_stubs_installed_ = false;
589    }
590  }
591}
592
593bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) const {
594  DCHECK(method != nullptr);
595  return deoptimized_methods_.count(method);
596}
597
598void Instrumentation::EnableDeoptimization() {
599  CHECK(deoptimized_methods_.empty());
600  CHECK_EQ(deoptimization_enabled_, false);
601  deoptimization_enabled_ = true;
602}
603
604void Instrumentation::DisableDeoptimization() {
605  CHECK_EQ(deoptimization_enabled_, true);
606  // If we deoptimized everything, undo it.
607  if (interpreter_stubs_installed_) {
608    UndeoptimizeEverything();
609  }
610  // Undeoptimized selected methods.
611  while (!deoptimized_methods_.empty()) {
612    auto it_begin = deoptimized_methods_.begin();
613    Undeoptimize(*it_begin);
614  }
615  CHECK(deoptimized_methods_.empty());
616  deoptimization_enabled_ = false;
617}
618
619// Indicates if instrumentation should notify method enter/exit events to the listeners.
620bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
621  return deoptimization_enabled_ || interpreter_stubs_installed_;
622}
623
624void Instrumentation::DeoptimizeEverything() {
625  CHECK(!interpreter_stubs_installed_);
626  ConfigureStubs(false, true);
627}
628
629void Instrumentation::UndeoptimizeEverything() {
630  CHECK(interpreter_stubs_installed_);
631  ConfigureStubs(false, false);
632}
633
634void Instrumentation::EnableMethodTracing() {
635  bool require_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners;
636  ConfigureStubs(!require_interpreter, require_interpreter);
637}
638
639void Instrumentation::DisableMethodTracing() {
640  ConfigureStubs(false, false);
641}
642
643const void* Instrumentation::GetQuickCodeFor(mirror::ArtMethod* method) const {
644  Runtime* runtime = Runtime::Current();
645  if (LIKELY(!instrumentation_stubs_installed_)) {
646    const void* code = method->GetEntryPointFromQuickCompiledCode();
647    DCHECK(code != NULL);
648    if (LIKELY(code != GetQuickResolutionTrampoline(runtime->GetClassLinker()) &&
649               code != GetQuickToInterpreterBridge())) {
650      return code;
651    }
652  }
653  return runtime->GetClassLinker()->GetQuickOatCodeFor(method);
654}
655
656void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
657                                           mirror::ArtMethod* method,
658                                           uint32_t dex_pc) const {
659  auto it = method_entry_listeners_.begin();
660  bool is_end = (it == method_entry_listeners_.end());
661  // Implemented this way to prevent problems caused by modification of the list while iterating.
662  while (!is_end) {
663    InstrumentationListener* cur = *it;
664    ++it;
665    is_end = (it == method_entry_listeners_.end());
666    cur->MethodEntered(thread, this_object, method, dex_pc);
667  }
668}
669
670void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
671                                          mirror::ArtMethod* method,
672                                          uint32_t dex_pc, const JValue& return_value) const {
673  auto it = method_exit_listeners_.begin();
674  bool is_end = (it == method_exit_listeners_.end());
675  // Implemented this way to prevent problems caused by modification of the list while iterating.
676  while (!is_end) {
677    InstrumentationListener* cur = *it;
678    ++it;
679    is_end = (it == method_exit_listeners_.end());
680    cur->MethodExited(thread, this_object, method, dex_pc, return_value);
681  }
682}
683
684void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
685                                        mirror::ArtMethod* method,
686                                        uint32_t dex_pc) const {
687  if (have_method_unwind_listeners_) {
688    for (InstrumentationListener* listener : method_unwind_listeners_) {
689      listener->MethodUnwind(thread, this_object, method, dex_pc);
690    }
691  }
692}
693
694void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
695                                          mirror::ArtMethod* method,
696                                          uint32_t dex_pc) const {
697  // TODO: STL copy-on-write collection? The copy below is due to the debug listener having an
698  // action where it can remove itself as a listener and break the iterator. The copy only works
699  // around the problem and in general we may have to move to something like reference counting to
700  // ensure listeners are deleted correctly.
701  std::list<InstrumentationListener*> copy(dex_pc_listeners_);
702  for (InstrumentationListener* listener : copy) {
703    listener->DexPcMoved(thread, this_object, method, dex_pc);
704  }
705}
706
707void Instrumentation::ExceptionCaughtEvent(Thread* thread, const ThrowLocation& throw_location,
708                                           mirror::ArtMethod* catch_method,
709                                           uint32_t catch_dex_pc,
710                                           mirror::Throwable* exception_object) const {
711  if (have_exception_caught_listeners_) {
712    DCHECK_EQ(thread->GetException(NULL), exception_object);
713    thread->ClearException();
714    for (InstrumentationListener* listener : exception_caught_listeners_) {
715      listener->ExceptionCaught(thread, throw_location, catch_method, catch_dex_pc, exception_object);
716    }
717    thread->SetException(throw_location, exception_object);
718  }
719}
720
721static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
722                            int delta)
723    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
724  size_t frame_id = StackVisitor::ComputeNumFrames(self) + delta;
725  if (frame_id != instrumentation_frame.frame_id_) {
726    LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
727        << instrumentation_frame.frame_id_;
728    StackVisitor::DescribeStack(self);
729    CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
730  }
731}
732
733void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
734                                                    mirror::ArtMethod* method,
735                                                    uintptr_t lr, bool interpreter_entry) {
736  // We have a callee-save frame meaning this value is guaranteed to never be 0.
737  size_t frame_id = StackVisitor::ComputeNumFrames(self);
738  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
739  if (kVerboseInstrumentation) {
740    LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr);
741  }
742  instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
743                                                                   frame_id, interpreter_entry);
744  stack->push_front(instrumentation_frame);
745
746  MethodEnterEvent(self, this_object, method, 0);
747}
748
749uint64_t Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
750                                                       uint64_t gpr_result, uint64_t fpr_result) {
751  // Do the pop.
752  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
753  CHECK_GT(stack->size(), 0U);
754  InstrumentationStackFrame instrumentation_frame = stack->front();
755  stack->pop_front();
756
757  // Set return PC and check the sanity of the stack.
758  *return_pc = instrumentation_frame.return_pc_;
759  CheckStackDepth(self, instrumentation_frame, 0);
760
761  mirror::ArtMethod* method = instrumentation_frame.method_;
762  char return_shorty = MethodHelper(method).GetShorty()[0];
763  JValue return_value;
764  if (return_shorty == 'V') {
765    return_value.SetJ(0);
766  } else if (return_shorty == 'F' || return_shorty == 'D') {
767    return_value.SetJ(fpr_result);
768  } else {
769    return_value.SetJ(gpr_result);
770  }
771  // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
772  //       return_pc.
773  uint32_t dex_pc = DexFile::kDexNoIndex;
774  mirror::Object* this_object = instrumentation_frame.this_object_;
775  MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
776
777  // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
778  // back to an upcall.
779  NthCallerVisitor visitor(self, 1, true);
780  visitor.WalkStack(true);
781  bool deoptimize = (visitor.caller != NULL) &&
782                    (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller));
783  if (deoptimize && kVerboseInstrumentation) {
784    LOG(INFO) << "Deoptimizing into " << PrettyMethod(visitor.caller);
785  }
786  if (deoptimize) {
787    if (kVerboseInstrumentation) {
788      LOG(INFO) << "Deoptimizing from " << PrettyMethod(method)
789                << " result is " << std::hex << return_value.GetJ();
790    }
791    self->SetDeoptimizationReturnValue(return_value);
792    return static_cast<uint64_t>(GetQuickDeoptimizationEntryPoint()) |
793        (static_cast<uint64_t>(*return_pc) << 32);
794  } else {
795    if (kVerboseInstrumentation) {
796      LOG(INFO) << "Returning from " << PrettyMethod(method)
797                << " to PC " << reinterpret_cast<void*>(*return_pc);
798    }
799    return *return_pc;
800  }
801}
802
803void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
804  // Do the pop.
805  std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
806  CHECK_GT(stack->size(), 0U);
807  InstrumentationStackFrame instrumentation_frame = stack->front();
808  // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
809  stack->pop_front();
810
811  mirror::ArtMethod* method = instrumentation_frame.method_;
812  if (is_deoptimization) {
813    if (kVerboseInstrumentation) {
814      LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method);
815    }
816  } else {
817    if (kVerboseInstrumentation) {
818      LOG(INFO) << "Popping for unwind " << PrettyMethod(method);
819    }
820
821    // Notify listeners of method unwind.
822    // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
823    //       return_pc.
824    uint32_t dex_pc = DexFile::kDexNoIndex;
825    MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
826  }
827}
828
829std::string InstrumentationStackFrame::Dump() const {
830  std::ostringstream os;
831  os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":"
832      << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
833  return os.str();
834}
835
836}  // namespace instrumentation
837}  // namespace art
838