instrumentation.h revision 1d011d9306fd4ff57d72411775d415a86f5ed398
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_INSTRUMENTATION_H_
18#define ART_RUNTIME_INSTRUMENTATION_H_
19
20#include <stdint.h>
21#include <list>
22#include <unordered_set>
23
24#include "arch/instruction_set.h"
25#include "base/macros.h"
26#include "base/mutex.h"
27#include "gc_root.h"
28#include "safe_map.h"
29
30namespace art {
31namespace mirror {
32  class Class;
33  class Object;
34  class Throwable;
35}  // namespace mirror
36class ArtField;
37class ArtMethod;
38union JValue;
39class Thread;
40
41namespace instrumentation {
42
43// Interpreter handler tables.
44enum InterpreterHandlerTable {
45  kMainHandlerTable = 0,          // Main handler table: no suspend check, no instrumentation.
46  kAlternativeHandlerTable = 1,   // Alternative handler table: suspend check and/or instrumentation
47                                  // enabled.
48  kNumHandlerTables
49};
50
51// Do we want to deoptimize for method entry and exit listeners or just try to intercept
52// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
53// application's performance.
54static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = true;
55
56// Instrumentation event listener API. Registered listeners will get the appropriate call back for
57// the events they are listening for. The call backs supply the thread, method and dex_pc the event
58// occurred upon. The thread may or may not be Thread::Current().
59struct InstrumentationListener {
60  InstrumentationListener() {}
61  virtual ~InstrumentationListener() {}
62
63  // Call-back for when a method is entered.
64  virtual void MethodEntered(Thread* thread, mirror::Object* this_object,
65                             ArtMethod* method,
66                             uint32_t dex_pc) SHARED_REQUIRES(Locks::mutator_lock_) = 0;
67
68  // Call-back for when a method is exited.
69  virtual void MethodExited(Thread* thread, mirror::Object* this_object,
70                            ArtMethod* method, uint32_t dex_pc,
71                            const JValue& return_value)
72      SHARED_REQUIRES(Locks::mutator_lock_) = 0;
73
74  // Call-back for when a method is popped due to an exception throw. A method will either cause a
75  // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
76  virtual void MethodUnwind(Thread* thread, mirror::Object* this_object,
77                            ArtMethod* method, uint32_t dex_pc)
78      SHARED_REQUIRES(Locks::mutator_lock_) = 0;
79
80  // Call-back for when the dex pc moves in a method.
81  virtual void DexPcMoved(Thread* thread, mirror::Object* this_object,
82                          ArtMethod* method, uint32_t new_dex_pc)
83      SHARED_REQUIRES(Locks::mutator_lock_) = 0;
84
85  // Call-back for when we read from a field.
86  virtual void FieldRead(Thread* thread, mirror::Object* this_object, ArtMethod* method,
87                         uint32_t dex_pc, ArtField* field) = 0;
88
89  // Call-back for when we write into a field.
90  virtual void FieldWritten(Thread* thread, mirror::Object* this_object, ArtMethod* method,
91                            uint32_t dex_pc, ArtField* field, const JValue& field_value) = 0;
92
93  // Call-back when an exception is caught.
94  virtual void ExceptionCaught(Thread* thread, mirror::Throwable* exception_object)
95      SHARED_REQUIRES(Locks::mutator_lock_) = 0;
96
97  // Call-back for when we execute a branch.
98  virtual void Branch(Thread* thread,
99                      ArtMethod* method,
100                      uint32_t dex_pc,
101                      int32_t dex_pc_offset)
102      SHARED_REQUIRES(Locks::mutator_lock_) = 0;
103
104  // Call-back for when we get an invokevirtual or an invokeinterface.
105  virtual void InvokeVirtualOrInterface(Thread* thread,
106                                        mirror::Object* this_object,
107                                        ArtMethod* caller,
108                                        uint32_t dex_pc,
109                                        ArtMethod* callee)
110      REQUIRES(Roles::uninterruptible_)
111      SHARED_REQUIRES(Locks::mutator_lock_) = 0;
112};
113
114// Instrumentation is a catch-all for when extra information is required from the runtime. The
115// typical use for instrumentation is for profiling and debugging. Instrumentation may add stubs
116// to method entry and exit, it may also force execution to be switched to the interpreter and
117// trigger deoptimization.
118class Instrumentation {
119 public:
120  enum InstrumentationEvent {
121    kMethodEntered = 0x1,
122    kMethodExited = 0x2,
123    kMethodUnwind = 0x4,
124    kDexPcMoved = 0x8,
125    kFieldRead = 0x10,
126    kFieldWritten = 0x20,
127    kExceptionCaught = 0x40,
128    kBranch = 0x80,
129    kInvokeVirtualOrInterface = 0x100,
130  };
131
132  enum class InstrumentationLevel {
133    kInstrumentNothing,                   // execute without instrumentation
134    kInstrumentWithInstrumentationStubs,  // execute with instrumentation entry/exit stubs
135    kInstrumentWithInterpreter            // execute with interpreter
136  };
137
138  Instrumentation();
139
140  // Add a listener to be notified of the masked together sent of instrumentation events. This
141  // suspend the runtime to install stubs. You are expected to hold the mutator lock as a proxy
142  // for saying you should have suspended all threads (installing stubs while threads are running
143  // will break).
144  void AddListener(InstrumentationListener* listener, uint32_t events)
145      REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_);
146
147  // Removes a listener possibly removing instrumentation stubs.
148  void RemoveListener(InstrumentationListener* listener, uint32_t events)
149      REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_);
150
151  // Deoptimization.
152  void EnableDeoptimization()
153      REQUIRES(Locks::mutator_lock_)
154      REQUIRES(!deoptimized_methods_lock_);
155  // Calls UndeoptimizeEverything which may visit class linker classes through ConfigureStubs.
156  void DisableDeoptimization(const char* key)
157      REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
158      REQUIRES(!deoptimized_methods_lock_);
159
160  bool AreAllMethodsDeoptimized() const {
161    return interpreter_stubs_installed_;
162  }
163  bool ShouldNotifyMethodEnterExitEvents() const SHARED_REQUIRES(Locks::mutator_lock_);
164
165  // Executes everything with interpreter.
166  void DeoptimizeEverything(const char* key)
167      REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
168      REQUIRES(!Locks::thread_list_lock_,
169               !Locks::classlinker_classes_lock_,
170               !deoptimized_methods_lock_);
171
172  // Executes everything with compiled code (or interpreter if there is no code). May visit class
173  // linker classes through ConfigureStubs.
174  void UndeoptimizeEverything(const char* key)
175      REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
176      REQUIRES(!Locks::thread_list_lock_,
177               !Locks::classlinker_classes_lock_,
178               !deoptimized_methods_lock_);
179
180  // Deoptimize a method by forcing its execution with the interpreter. Nevertheless, a static
181  // method (except a class initializer) set to the resolution trampoline will be deoptimized only
182  // once its declaring class is initialized.
183  void Deoptimize(ArtMethod* method)
184      REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !deoptimized_methods_lock_);
185
186  // Undeoptimze the method by restoring its entrypoints. Nevertheless, a static method
187  // (except a class initializer) set to the resolution trampoline will be updated only once its
188  // declaring class is initialized.
189  void Undeoptimize(ArtMethod* method)
190      REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !deoptimized_methods_lock_);
191
192  // Indicates whether the method has been deoptimized so it is executed with the interpreter.
193  bool IsDeoptimized(ArtMethod* method)
194      REQUIRES(!deoptimized_methods_lock_) SHARED_REQUIRES(Locks::mutator_lock_);
195
196  // Enable method tracing by installing instrumentation entry/exit stubs or interpreter.
197  void EnableMethodTracing(const char* key,
198                           bool needs_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners)
199      REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
200      REQUIRES(!Locks::thread_list_lock_,
201               !Locks::classlinker_classes_lock_,
202               !deoptimized_methods_lock_);
203
204  // Disable method tracing by uninstalling instrumentation entry/exit stubs or interpreter.
205  void DisableMethodTracing(const char* key)
206      REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
207      REQUIRES(!Locks::thread_list_lock_,
208               !Locks::classlinker_classes_lock_,
209               !deoptimized_methods_lock_);
210
211  InterpreterHandlerTable GetInterpreterHandlerTable() const
212      SHARED_REQUIRES(Locks::mutator_lock_) {
213    return interpreter_handler_table_;
214  }
215
216  void InstrumentQuickAllocEntryPoints() REQUIRES(!Locks::instrument_entrypoints_lock_);
217  void UninstrumentQuickAllocEntryPoints() REQUIRES(!Locks::instrument_entrypoints_lock_);
218  void InstrumentQuickAllocEntryPointsLocked()
219      REQUIRES(Locks::instrument_entrypoints_lock_, !Locks::thread_list_lock_,
220               !Locks::runtime_shutdown_lock_);
221  void UninstrumentQuickAllocEntryPointsLocked()
222      REQUIRES(Locks::instrument_entrypoints_lock_, !Locks::thread_list_lock_,
223               !Locks::runtime_shutdown_lock_);
224  void ResetQuickAllocEntryPoints() REQUIRES(Locks::runtime_shutdown_lock_);
225
226  // Update the code of a method respecting any installed stubs.
227  void UpdateMethodsCode(ArtMethod* method, const void* quick_code)
228      SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
229
230  // Get the quick code for the given method. More efficient than asking the class linker as it
231  // will short-cut to GetCode if instrumentation and static method resolution stubs aren't
232  // installed.
233  const void* GetQuickCodeFor(ArtMethod* method, size_t pointer_size) const
234      SHARED_REQUIRES(Locks::mutator_lock_);
235
236  void ForceInterpretOnly() {
237    interpret_only_ = true;
238    forced_interpret_only_ = true;
239  }
240
241  // Called by ArtMethod::Invoke to determine dispatch mechanism.
242  bool InterpretOnly() const {
243    return interpret_only_;
244  }
245
246  bool IsForcedInterpretOnly() const {
247    return forced_interpret_only_;
248  }
249
250  // Code is in boot image oat file which isn't compiled as debuggable.
251  // Need debug version (interpreter or jitted) if that's the case.
252  bool NeedDebugVersionForBootImageCode(ArtMethod* method, const void* code) const
253      SHARED_REQUIRES(Locks::mutator_lock_);
254
255  bool AreExitStubsInstalled() const {
256    return instrumentation_stubs_installed_;
257  }
258
259  bool HasMethodEntryListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
260    return have_method_entry_listeners_;
261  }
262
263  bool HasMethodExitListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
264    return have_method_exit_listeners_;
265  }
266
267  bool HasMethodUnwindListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
268    return have_method_unwind_listeners_;
269  }
270
271  bool HasDexPcListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
272    return have_dex_pc_listeners_;
273  }
274
275  bool HasFieldReadListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
276    return have_field_read_listeners_;
277  }
278
279  bool HasFieldWriteListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
280    return have_field_write_listeners_;
281  }
282
283  bool HasExceptionCaughtListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
284    return have_exception_caught_listeners_;
285  }
286
287  bool HasBranchListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
288    return have_branch_listeners_;
289  }
290
291  bool HasInvokeVirtualOrInterfaceListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
292    return have_invoke_virtual_or_interface_listeners_;
293  }
294
295  bool IsActive() const SHARED_REQUIRES(Locks::mutator_lock_) {
296    return have_dex_pc_listeners_ || have_method_entry_listeners_ || have_method_exit_listeners_ ||
297        have_field_read_listeners_ || have_field_write_listeners_ ||
298        have_exception_caught_listeners_ || have_method_unwind_listeners_ ||
299        have_branch_listeners_ || have_invoke_virtual_or_interface_listeners_;
300  }
301
302  // Any instrumentation *other* than what is needed for Jit profiling active?
303  bool NonJitProfilingActive() const SHARED_REQUIRES(Locks::mutator_lock_) {
304    return have_dex_pc_listeners_ || have_method_exit_listeners_ ||
305        have_field_read_listeners_ || have_field_write_listeners_ ||
306        have_exception_caught_listeners_ || have_method_unwind_listeners_ ||
307        have_branch_listeners_;
308  }
309
310  // Inform listeners that a method has been entered. A dex PC is provided as we may install
311  // listeners into executing code and get method enter events for methods already on the stack.
312  void MethodEnterEvent(Thread* thread, mirror::Object* this_object,
313                        ArtMethod* method, uint32_t dex_pc) const
314      SHARED_REQUIRES(Locks::mutator_lock_) {
315    if (UNLIKELY(HasMethodEntryListeners())) {
316      MethodEnterEventImpl(thread, this_object, method, dex_pc);
317    }
318  }
319
320  // Inform listeners that a method has been exited.
321  void MethodExitEvent(Thread* thread, mirror::Object* this_object,
322                       ArtMethod* method, uint32_t dex_pc,
323                       const JValue& return_value) const
324      SHARED_REQUIRES(Locks::mutator_lock_) {
325    if (UNLIKELY(HasMethodExitListeners())) {
326      MethodExitEventImpl(thread, this_object, method, dex_pc, return_value);
327    }
328  }
329
330  // Inform listeners that a method has been exited due to an exception.
331  void MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
332                         ArtMethod* method, uint32_t dex_pc) const
333      SHARED_REQUIRES(Locks::mutator_lock_);
334
335  // Inform listeners that the dex pc has moved (only supported by the interpreter).
336  void DexPcMovedEvent(Thread* thread, mirror::Object* this_object,
337                       ArtMethod* method, uint32_t dex_pc) const
338      SHARED_REQUIRES(Locks::mutator_lock_) {
339    if (UNLIKELY(HasDexPcListeners())) {
340      DexPcMovedEventImpl(thread, this_object, method, dex_pc);
341    }
342  }
343
344  // Inform listeners that a branch has been taken (only supported by the interpreter).
345  void Branch(Thread* thread, ArtMethod* method, uint32_t dex_pc, int32_t offset) const
346      SHARED_REQUIRES(Locks::mutator_lock_) {
347    if (UNLIKELY(HasBranchListeners())) {
348      BranchImpl(thread, method, dex_pc, offset);
349    }
350  }
351
352  // Inform listeners that we read a field (only supported by the interpreter).
353  void FieldReadEvent(Thread* thread, mirror::Object* this_object,
354                      ArtMethod* method, uint32_t dex_pc,
355                      ArtField* field) const
356      SHARED_REQUIRES(Locks::mutator_lock_) {
357    if (UNLIKELY(HasFieldReadListeners())) {
358      FieldReadEventImpl(thread, this_object, method, dex_pc, field);
359    }
360  }
361
362  // Inform listeners that we write a field (only supported by the interpreter).
363  void FieldWriteEvent(Thread* thread, mirror::Object* this_object,
364                       ArtMethod* method, uint32_t dex_pc,
365                       ArtField* field, const JValue& field_value) const
366      SHARED_REQUIRES(Locks::mutator_lock_) {
367    if (UNLIKELY(HasFieldWriteListeners())) {
368      FieldWriteEventImpl(thread, this_object, method, dex_pc, field, field_value);
369    }
370  }
371
372  void InvokeVirtualOrInterface(Thread* thread,
373                                mirror::Object* this_object,
374                                ArtMethod* caller,
375                                uint32_t dex_pc,
376                                ArtMethod* callee) const
377      SHARED_REQUIRES(Locks::mutator_lock_) {
378    if (UNLIKELY(HasInvokeVirtualOrInterfaceListeners())) {
379      InvokeVirtualOrInterfaceImpl(thread, this_object, caller, dex_pc, callee);
380    }
381  }
382
383  // Inform listeners that an exception was caught.
384  void ExceptionCaughtEvent(Thread* thread, mirror::Throwable* exception_object) const
385      SHARED_REQUIRES(Locks::mutator_lock_);
386
387  // Called when an instrumented method is entered. The intended link register (lr) is saved so
388  // that returning causes a branch to the method exit stub. Generates method enter events.
389  void PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
390                                     ArtMethod* method, uintptr_t lr,
391                                     bool interpreter_entry)
392      SHARED_REQUIRES(Locks::mutator_lock_);
393
394  // Called when an instrumented method is exited. Removes the pushed instrumentation frame
395  // returning the intended link register. Generates method exit events.
396  TwoWordReturn PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
397                                             uint64_t gpr_result, uint64_t fpr_result)
398      SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
399
400  // Pops an instrumentation frame from the current thread and generate an unwind event.
401  void PopMethodForUnwind(Thread* self, bool is_deoptimization) const
402      SHARED_REQUIRES(Locks::mutator_lock_);
403
404  // Call back for configure stubs.
405  void InstallStubsForClass(mirror::Class* klass) SHARED_REQUIRES(Locks::mutator_lock_)
406      REQUIRES(!deoptimized_methods_lock_);
407
408  void InstallStubsForMethod(ArtMethod* method)
409      SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
410
411  // Install instrumentation exit stub on every method of the stack of the given thread.
412  // This is used by the debugger to cause a deoptimization of the thread's stack after updating
413  // local variable(s).
414  void InstrumentThreadStack(Thread* thread)
415      SHARED_REQUIRES(Locks::mutator_lock_)
416      REQUIRES(!Locks::thread_list_lock_);
417
418  static size_t ComputeFrameId(Thread* self,
419                               size_t frame_depth,
420                               size_t inlined_frames_before_frame)
421      SHARED_REQUIRES(Locks::mutator_lock_);
422
423  // Does not hold lock, used to check if someone changed from not instrumented to instrumented
424  // during a GC suspend point.
425  bool AllocEntrypointsInstrumented() const SHARED_REQUIRES(Locks::mutator_lock_) {
426    return alloc_entrypoints_instrumented_;
427  }
428
429 private:
430  InstrumentationLevel GetCurrentInstrumentationLevel() const;
431
432  // Does the job of installing or removing instrumentation code within methods.
433  // In order to support multiple clients using instrumentation at the same time,
434  // the caller must pass a unique key (a string) identifying it so we remind which
435  // instrumentation level it needs. Therefore the current instrumentation level
436  // becomes the highest instrumentation level required by a client.
437  void ConfigureStubs(const char* key, InstrumentationLevel desired_instrumentation_level)
438      REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
439      REQUIRES(!deoptimized_methods_lock_,
440               !Locks::thread_list_lock_,
441               !Locks::classlinker_classes_lock_);
442
443  void UpdateInterpreterHandlerTable() REQUIRES(Locks::mutator_lock_) {
444    /*
445     * TUNING: Dalvik's mterp stashes the actual current handler table base in a
446     * tls field.  For Arm, this enables all suspend, debug & tracing checks to be
447     * collapsed into a single conditionally-executed ldw instruction.
448     * Move to Dalvik-style handler-table management for both the goto interpreter and
449     * mterp.
450     */
451    interpreter_handler_table_ = IsActive() ? kAlternativeHandlerTable : kMainHandlerTable;
452  }
453
454  // No thread safety analysis to get around SetQuickAllocEntryPointsInstrumented requiring
455  // exclusive access to mutator lock which you can't get if the runtime isn't started.
456  void SetEntrypointsInstrumented(bool instrumented) NO_THREAD_SAFETY_ANALYSIS;
457
458  void MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
459                            ArtMethod* method, uint32_t dex_pc) const
460      SHARED_REQUIRES(Locks::mutator_lock_);
461  void MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
462                           ArtMethod* method,
463                           uint32_t dex_pc, const JValue& return_value) const
464      SHARED_REQUIRES(Locks::mutator_lock_);
465  void DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
466                           ArtMethod* method, uint32_t dex_pc) const
467      SHARED_REQUIRES(Locks::mutator_lock_);
468  void BranchImpl(Thread* thread, ArtMethod* method, uint32_t dex_pc, int32_t offset) const
469      SHARED_REQUIRES(Locks::mutator_lock_);
470  void InvokeVirtualOrInterfaceImpl(Thread* thread,
471                                    mirror::Object* this_object,
472                                    ArtMethod* caller,
473                                    uint32_t dex_pc,
474                                    ArtMethod* callee) const
475      SHARED_REQUIRES(Locks::mutator_lock_);
476  void FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
477                           ArtMethod* method, uint32_t dex_pc,
478                           ArtField* field) const
479      SHARED_REQUIRES(Locks::mutator_lock_);
480  void FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
481                           ArtMethod* method, uint32_t dex_pc,
482                           ArtField* field, const JValue& field_value) const
483      SHARED_REQUIRES(Locks::mutator_lock_);
484
485  // Read barrier-aware utility functions for accessing deoptimized_methods_
486  bool AddDeoptimizedMethod(ArtMethod* method)
487      SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(deoptimized_methods_lock_);
488  bool IsDeoptimizedMethod(ArtMethod* method)
489      SHARED_REQUIRES(Locks::mutator_lock_, deoptimized_methods_lock_);
490  bool RemoveDeoptimizedMethod(ArtMethod* method)
491      SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(deoptimized_methods_lock_);
492  ArtMethod* BeginDeoptimizedMethod()
493      SHARED_REQUIRES(Locks::mutator_lock_, deoptimized_methods_lock_);
494  bool IsDeoptimizedMethodsEmpty() const
495      SHARED_REQUIRES(Locks::mutator_lock_, deoptimized_methods_lock_);
496
497  // Have we hijacked ArtMethod::code_ so that it calls instrumentation/interpreter code?
498  bool instrumentation_stubs_installed_;
499
500  // Have we hijacked ArtMethod::code_ to reference the enter/exit stubs?
501  bool entry_exit_stubs_installed_;
502
503  // Have we hijacked ArtMethod::code_ to reference the enter interpreter stub?
504  bool interpreter_stubs_installed_;
505
506  // Do we need the fidelity of events that we only get from running within the interpreter?
507  bool interpret_only_;
508
509  // Did the runtime request we only run in the interpreter? ie -Xint mode.
510  bool forced_interpret_only_;
511
512  // Do we have any listeners for method entry events? Short-cut to avoid taking the
513  // instrumentation_lock_.
514  bool have_method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
515
516  // Do we have any listeners for method exit events? Short-cut to avoid taking the
517  // instrumentation_lock_.
518  bool have_method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
519
520  // Do we have any listeners for method unwind events? Short-cut to avoid taking the
521  // instrumentation_lock_.
522  bool have_method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
523
524  // Do we have any listeners for dex move events? Short-cut to avoid taking the
525  // instrumentation_lock_.
526  bool have_dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
527
528  // Do we have any listeners for field read events? Short-cut to avoid taking the
529  // instrumentation_lock_.
530  bool have_field_read_listeners_ GUARDED_BY(Locks::mutator_lock_);
531
532  // Do we have any listeners for field write events? Short-cut to avoid taking the
533  // instrumentation_lock_.
534  bool have_field_write_listeners_ GUARDED_BY(Locks::mutator_lock_);
535
536  // Do we have any exception caught listeners? Short-cut to avoid taking the instrumentation_lock_.
537  bool have_exception_caught_listeners_ GUARDED_BY(Locks::mutator_lock_);
538
539  // Do we have any branch listeners? Short-cut to avoid taking the instrumentation_lock_.
540  bool have_branch_listeners_ GUARDED_BY(Locks::mutator_lock_);
541
542  // Do we have any invoke listeners? Short-cut to avoid taking the instrumentation_lock_.
543  bool have_invoke_virtual_or_interface_listeners_ GUARDED_BY(Locks::mutator_lock_);
544
545  // Contains the instrumentation level required by each client of the instrumentation identified
546  // by a string key.
547  typedef SafeMap<const char*, InstrumentationLevel> InstrumentationLevelTable;
548  InstrumentationLevelTable requested_instrumentation_levels_ GUARDED_BY(Locks::mutator_lock_);
549
550  // The event listeners, written to with the mutator_lock_ exclusively held.
551  // Mutators must be able to iterate over these lists concurrently, that is, with listeners being
552  // added or removed while iterating. The modifying thread holds exclusive lock,
553  // so other threads cannot iterate (i.e. read the data of the list) at the same time but they
554  // do keep iterators that need to remain valid. This is the reason these listeners are std::list
555  // and not for example std::vector: the existing storage for a std::list does not move.
556  // Note that mutators cannot make a copy of these lists before iterating, as the instrumentation
557  // listeners can also be deleted concurrently.
558  // As a result, these lists are never trimmed. That's acceptable given the low number of
559  // listeners we have.
560  std::list<InstrumentationListener*> method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
561  std::list<InstrumentationListener*> method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
562  std::list<InstrumentationListener*> method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
563  std::list<InstrumentationListener*> branch_listeners_ GUARDED_BY(Locks::mutator_lock_);
564  std::list<InstrumentationListener*> invoke_virtual_or_interface_listeners_
565      GUARDED_BY(Locks::mutator_lock_);
566  std::list<InstrumentationListener*> dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
567  std::list<InstrumentationListener*> field_read_listeners_ GUARDED_BY(Locks::mutator_lock_);
568  std::list<InstrumentationListener*> field_write_listeners_ GUARDED_BY(Locks::mutator_lock_);
569  std::list<InstrumentationListener*> exception_caught_listeners_ GUARDED_BY(Locks::mutator_lock_);
570
571  // The set of methods being deoptimized (by the debugger) which must be executed with interpreter
572  // only.
573  mutable ReaderWriterMutex deoptimized_methods_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
574  std::unordered_set<ArtMethod*> deoptimized_methods_ GUARDED_BY(deoptimized_methods_lock_);
575  bool deoptimization_enabled_;
576
577  // Current interpreter handler table. This is updated each time the thread state flags are
578  // modified.
579  InterpreterHandlerTable interpreter_handler_table_ GUARDED_BY(Locks::mutator_lock_);
580
581  // Greater than 0 if quick alloc entry points instrumented.
582  size_t quick_alloc_entry_points_instrumentation_counter_;
583
584  // alloc_entrypoints_instrumented_ is only updated with all the threads suspended, this is done
585  // to prevent races with the GC where the GC relies on thread suspension only see
586  // alloc_entrypoints_instrumented_ change during suspend points.
587  bool alloc_entrypoints_instrumented_;
588
589  friend class InstrumentationTest;  // For GetCurrentInstrumentationLevel and ConfigureStubs.
590
591  DISALLOW_COPY_AND_ASSIGN(Instrumentation);
592};
593std::ostream& operator<<(std::ostream& os, const Instrumentation::InstrumentationEvent& rhs);
594std::ostream& operator<<(std::ostream& os, const Instrumentation::InstrumentationLevel& rhs);
595
596// An element in the instrumentation side stack maintained in art::Thread.
597struct InstrumentationStackFrame {
598  InstrumentationStackFrame(mirror::Object* this_object, ArtMethod* method,
599                            uintptr_t return_pc, size_t frame_id, bool interpreter_entry)
600      : this_object_(this_object), method_(method), return_pc_(return_pc), frame_id_(frame_id),
601        interpreter_entry_(interpreter_entry) {
602  }
603
604  std::string Dump() const SHARED_REQUIRES(Locks::mutator_lock_);
605
606  mirror::Object* this_object_;
607  ArtMethod* method_;
608  uintptr_t return_pc_;
609  size_t frame_id_;
610  bool interpreter_entry_;
611};
612
613}  // namespace instrumentation
614}  // namespace art
615
616#endif  // ART_RUNTIME_INSTRUMENTATION_H_
617