1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_INSTRUMENTATION_H_
18#define ART_RUNTIME_INSTRUMENTATION_H_
19
20#include <stdint.h>
21#include <list>
22#include <unordered_set>
23
24#include "arch/instruction_set.h"
25#include "base/enums.h"
26#include "base/macros.h"
27#include "base/mutex.h"
28#include "gc_root.h"
29#include "safe_map.h"
30
31namespace art {
32namespace mirror {
33  class Class;
34  class Object;
35  class Throwable;
36}  // namespace mirror
37class ArtField;
38class ArtMethod;
39union JValue;
40class Thread;
41
42namespace instrumentation {
43
44// Interpreter handler tables.
45enum InterpreterHandlerTable {
46  kMainHandlerTable = 0,          // Main handler table: no suspend check, no instrumentation.
47  kAlternativeHandlerTable = 1,   // Alternative handler table: suspend check and/or instrumentation
48                                  // enabled.
49  kNumHandlerTables
50};
51
52// Do we want to deoptimize for method entry and exit listeners or just try to intercept
53// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
54// application's performance.
55static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = true;
56
57// Instrumentation event listener API. Registered listeners will get the appropriate call back for
58// the events they are listening for. The call backs supply the thread, method and dex_pc the event
59// occurred upon. The thread may or may not be Thread::Current().
60struct InstrumentationListener {
61  InstrumentationListener() {}
62  virtual ~InstrumentationListener() {}
63
64  // Call-back for when a method is entered.
65  virtual void MethodEntered(Thread* thread, mirror::Object* this_object,
66                             ArtMethod* method,
67                             uint32_t dex_pc) REQUIRES_SHARED(Locks::mutator_lock_) = 0;
68
69  // Call-back for when a method is exited.
70  virtual void MethodExited(Thread* thread, mirror::Object* this_object,
71                            ArtMethod* method, uint32_t dex_pc,
72                            const JValue& return_value)
73      REQUIRES_SHARED(Locks::mutator_lock_) = 0;
74
75  // Call-back for when a method is popped due to an exception throw. A method will either cause a
76  // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
77  virtual void MethodUnwind(Thread* thread, mirror::Object* this_object,
78                            ArtMethod* method, uint32_t dex_pc)
79      REQUIRES_SHARED(Locks::mutator_lock_) = 0;
80
81  // Call-back for when the dex pc moves in a method.
82  virtual void DexPcMoved(Thread* thread, mirror::Object* this_object,
83                          ArtMethod* method, uint32_t new_dex_pc)
84      REQUIRES_SHARED(Locks::mutator_lock_) = 0;
85
86  // Call-back for when we read from a field.
87  virtual void FieldRead(Thread* thread, mirror::Object* this_object, ArtMethod* method,
88                         uint32_t dex_pc, ArtField* field) = 0;
89
90  // Call-back for when we write into a field.
91  virtual void FieldWritten(Thread* thread, mirror::Object* this_object, ArtMethod* method,
92                            uint32_t dex_pc, ArtField* field, const JValue& field_value) = 0;
93
94  // Call-back when an exception is caught.
95  virtual void ExceptionCaught(Thread* thread, mirror::Throwable* exception_object)
96      REQUIRES_SHARED(Locks::mutator_lock_) = 0;
97
98  // Call-back for when we execute a branch.
99  virtual void Branch(Thread* thread,
100                      ArtMethod* method,
101                      uint32_t dex_pc,
102                      int32_t dex_pc_offset)
103      REQUIRES_SHARED(Locks::mutator_lock_) = 0;
104
105  // Call-back for when we get an invokevirtual or an invokeinterface.
106  virtual void InvokeVirtualOrInterface(Thread* thread,
107                                        mirror::Object* this_object,
108                                        ArtMethod* caller,
109                                        uint32_t dex_pc,
110                                        ArtMethod* callee)
111      REQUIRES(Roles::uninterruptible_)
112      REQUIRES_SHARED(Locks::mutator_lock_) = 0;
113};
114
115// Instrumentation is a catch-all for when extra information is required from the runtime. The
116// typical use for instrumentation is for profiling and debugging. Instrumentation may add stubs
117// to method entry and exit, it may also force execution to be switched to the interpreter and
118// trigger deoptimization.
119class Instrumentation {
120 public:
121  enum InstrumentationEvent {
122    kMethodEntered = 0x1,
123    kMethodExited = 0x2,
124    kMethodUnwind = 0x4,
125    kDexPcMoved = 0x8,
126    kFieldRead = 0x10,
127    kFieldWritten = 0x20,
128    kExceptionCaught = 0x40,
129    kBranch = 0x80,
130    kInvokeVirtualOrInterface = 0x100,
131  };
132
133  enum class InstrumentationLevel {
134    kInstrumentNothing,                   // execute without instrumentation
135    kInstrumentWithInstrumentationStubs,  // execute with instrumentation entry/exit stubs
136    kInstrumentWithInterpreter            // execute with interpreter
137  };
138
139  Instrumentation();
140
141  // Add a listener to be notified of the masked together sent of instrumentation events. This
142  // suspend the runtime to install stubs. You are expected to hold the mutator lock as a proxy
143  // for saying you should have suspended all threads (installing stubs while threads are running
144  // will break).
145  void AddListener(InstrumentationListener* listener, uint32_t events)
146      REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_);
147
148  // Removes a listener possibly removing instrumentation stubs.
149  void RemoveListener(InstrumentationListener* listener, uint32_t events)
150      REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_);
151
152  // Deoptimization.
153  void EnableDeoptimization()
154      REQUIRES(Locks::mutator_lock_)
155      REQUIRES(!deoptimized_methods_lock_);
156  // Calls UndeoptimizeEverything which may visit class linker classes through ConfigureStubs.
157  void DisableDeoptimization(const char* key)
158      REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
159      REQUIRES(!deoptimized_methods_lock_);
160
161  bool AreAllMethodsDeoptimized() const {
162    return interpreter_stubs_installed_;
163  }
164  bool ShouldNotifyMethodEnterExitEvents() const REQUIRES_SHARED(Locks::mutator_lock_);
165
166  // Executes everything with interpreter.
167  void DeoptimizeEverything(const char* key)
168      REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
169      REQUIRES(!Locks::thread_list_lock_,
170               !Locks::classlinker_classes_lock_,
171               !deoptimized_methods_lock_);
172
173  // Executes everything with compiled code (or interpreter if there is no code). May visit class
174  // linker classes through ConfigureStubs.
175  void UndeoptimizeEverything(const char* key)
176      REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
177      REQUIRES(!Locks::thread_list_lock_,
178               !Locks::classlinker_classes_lock_,
179               !deoptimized_methods_lock_);
180
181  // Deoptimize a method by forcing its execution with the interpreter. Nevertheless, a static
182  // method (except a class initializer) set to the resolution trampoline will be deoptimized only
183  // once its declaring class is initialized.
184  void Deoptimize(ArtMethod* method)
185      REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !deoptimized_methods_lock_);
186
187  // Undeoptimze the method by restoring its entrypoints. Nevertheless, a static method
188  // (except a class initializer) set to the resolution trampoline will be updated only once its
189  // declaring class is initialized.
190  void Undeoptimize(ArtMethod* method)
191      REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !deoptimized_methods_lock_);
192
193  // Indicates whether the method has been deoptimized so it is executed with the interpreter.
194  bool IsDeoptimized(ArtMethod* method)
195      REQUIRES(!deoptimized_methods_lock_) REQUIRES_SHARED(Locks::mutator_lock_);
196
197  // Enable method tracing by installing instrumentation entry/exit stubs or interpreter.
198  void EnableMethodTracing(const char* key,
199                           bool needs_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners)
200      REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
201      REQUIRES(!Locks::thread_list_lock_,
202               !Locks::classlinker_classes_lock_,
203               !deoptimized_methods_lock_);
204
205  // Disable method tracing by uninstalling instrumentation entry/exit stubs or interpreter.
206  void DisableMethodTracing(const char* key)
207      REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
208      REQUIRES(!Locks::thread_list_lock_,
209               !Locks::classlinker_classes_lock_,
210               !deoptimized_methods_lock_);
211
212  InterpreterHandlerTable GetInterpreterHandlerTable() const
213      REQUIRES_SHARED(Locks::mutator_lock_) {
214    return interpreter_handler_table_;
215  }
216
217  void InstrumentQuickAllocEntryPoints() REQUIRES(!Locks::instrument_entrypoints_lock_);
218  void UninstrumentQuickAllocEntryPoints() REQUIRES(!Locks::instrument_entrypoints_lock_);
219  void InstrumentQuickAllocEntryPointsLocked()
220      REQUIRES(Locks::instrument_entrypoints_lock_, !Locks::thread_list_lock_,
221               !Locks::runtime_shutdown_lock_);
222  void UninstrumentQuickAllocEntryPointsLocked()
223      REQUIRES(Locks::instrument_entrypoints_lock_, !Locks::thread_list_lock_,
224               !Locks::runtime_shutdown_lock_);
225  void ResetQuickAllocEntryPoints() REQUIRES(Locks::runtime_shutdown_lock_);
226
227  // Update the code of a method respecting any installed stubs.
228  void UpdateMethodsCode(ArtMethod* method, const void* quick_code)
229      REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
230
231  // Update the code of a method respecting any installed stubs from debugger.
232  void UpdateMethodsCodeForJavaDebuggable(ArtMethod* method, const void* quick_code)
233      REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
234
235  // Get the quick code for the given method. More efficient than asking the class linker as it
236  // will short-cut to GetCode if instrumentation and static method resolution stubs aren't
237  // installed.
238  const void* GetQuickCodeFor(ArtMethod* method, PointerSize pointer_size) const
239      REQUIRES_SHARED(Locks::mutator_lock_);
240
241  void ForceInterpretOnly() {
242    interpret_only_ = true;
243    forced_interpret_only_ = true;
244  }
245
246  // Called by ArtMethod::Invoke to determine dispatch mechanism.
247  bool InterpretOnly() const {
248    return interpret_only_;
249  }
250
251  bool IsForcedInterpretOnly() const {
252    return forced_interpret_only_;
253  }
254
255  // Code is in boot image oat file which isn't compiled as debuggable.
256  // Need debug version (interpreter or jitted) if that's the case.
257  bool NeedDebugVersionFor(ArtMethod* method) const
258      REQUIRES_SHARED(Locks::mutator_lock_);
259
260  bool AreExitStubsInstalled() const {
261    return instrumentation_stubs_installed_;
262  }
263
264  bool HasMethodEntryListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
265    return have_method_entry_listeners_;
266  }
267
268  bool HasMethodExitListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
269    return have_method_exit_listeners_;
270  }
271
272  bool HasMethodUnwindListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
273    return have_method_unwind_listeners_;
274  }
275
276  bool HasDexPcListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
277    return have_dex_pc_listeners_;
278  }
279
280  bool HasFieldReadListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
281    return have_field_read_listeners_;
282  }
283
284  bool HasFieldWriteListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
285    return have_field_write_listeners_;
286  }
287
288  bool HasExceptionCaughtListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
289    return have_exception_caught_listeners_;
290  }
291
292  bool HasBranchListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
293    return have_branch_listeners_;
294  }
295
296  bool HasInvokeVirtualOrInterfaceListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
297    return have_invoke_virtual_or_interface_listeners_;
298  }
299
300  bool IsActive() const REQUIRES_SHARED(Locks::mutator_lock_) {
301    return have_dex_pc_listeners_ || have_method_entry_listeners_ || have_method_exit_listeners_ ||
302        have_field_read_listeners_ || have_field_write_listeners_ ||
303        have_exception_caught_listeners_ || have_method_unwind_listeners_ ||
304        have_branch_listeners_ || have_invoke_virtual_or_interface_listeners_;
305  }
306
307  // Any instrumentation *other* than what is needed for Jit profiling active?
308  bool NonJitProfilingActive() const REQUIRES_SHARED(Locks::mutator_lock_) {
309    return have_dex_pc_listeners_ || have_method_exit_listeners_ ||
310        have_field_read_listeners_ || have_field_write_listeners_ ||
311        have_exception_caught_listeners_ || have_method_unwind_listeners_ ||
312        have_branch_listeners_;
313  }
314
315  // Inform listeners that a method has been entered. A dex PC is provided as we may install
316  // listeners into executing code and get method enter events for methods already on the stack.
317  void MethodEnterEvent(Thread* thread, mirror::Object* this_object,
318                        ArtMethod* method, uint32_t dex_pc) const
319      REQUIRES_SHARED(Locks::mutator_lock_) {
320    if (UNLIKELY(HasMethodEntryListeners())) {
321      MethodEnterEventImpl(thread, this_object, method, dex_pc);
322    }
323  }
324
325  // Inform listeners that a method has been exited.
326  void MethodExitEvent(Thread* thread, mirror::Object* this_object,
327                       ArtMethod* method, uint32_t dex_pc,
328                       const JValue& return_value) const
329      REQUIRES_SHARED(Locks::mutator_lock_) {
330    if (UNLIKELY(HasMethodExitListeners())) {
331      MethodExitEventImpl(thread, this_object, method, dex_pc, return_value);
332    }
333  }
334
335  // Inform listeners that a method has been exited due to an exception.
336  void MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
337                         ArtMethod* method, uint32_t dex_pc) const
338      REQUIRES_SHARED(Locks::mutator_lock_);
339
340  // Inform listeners that the dex pc has moved (only supported by the interpreter).
341  void DexPcMovedEvent(Thread* thread, mirror::Object* this_object,
342                       ArtMethod* method, uint32_t dex_pc) const
343      REQUIRES_SHARED(Locks::mutator_lock_) {
344    if (UNLIKELY(HasDexPcListeners())) {
345      DexPcMovedEventImpl(thread, this_object, method, dex_pc);
346    }
347  }
348
349  // Inform listeners that a branch has been taken (only supported by the interpreter).
350  void Branch(Thread* thread, ArtMethod* method, uint32_t dex_pc, int32_t offset) const
351      REQUIRES_SHARED(Locks::mutator_lock_) {
352    if (UNLIKELY(HasBranchListeners())) {
353      BranchImpl(thread, method, dex_pc, offset);
354    }
355  }
356
357  // Inform listeners that we read a field (only supported by the interpreter).
358  void FieldReadEvent(Thread* thread, mirror::Object* this_object,
359                      ArtMethod* method, uint32_t dex_pc,
360                      ArtField* field) const
361      REQUIRES_SHARED(Locks::mutator_lock_) {
362    if (UNLIKELY(HasFieldReadListeners())) {
363      FieldReadEventImpl(thread, this_object, method, dex_pc, field);
364    }
365  }
366
367  // Inform listeners that we write a field (only supported by the interpreter).
368  void FieldWriteEvent(Thread* thread, mirror::Object* this_object,
369                       ArtMethod* method, uint32_t dex_pc,
370                       ArtField* field, const JValue& field_value) const
371      REQUIRES_SHARED(Locks::mutator_lock_) {
372    if (UNLIKELY(HasFieldWriteListeners())) {
373      FieldWriteEventImpl(thread, this_object, method, dex_pc, field, field_value);
374    }
375  }
376
377  void InvokeVirtualOrInterface(Thread* thread,
378                                mirror::Object* this_object,
379                                ArtMethod* caller,
380                                uint32_t dex_pc,
381                                ArtMethod* callee) const
382      REQUIRES_SHARED(Locks::mutator_lock_) {
383    if (UNLIKELY(HasInvokeVirtualOrInterfaceListeners())) {
384      InvokeVirtualOrInterfaceImpl(thread, this_object, caller, dex_pc, callee);
385    }
386  }
387
388  // Inform listeners that an exception was caught.
389  void ExceptionCaughtEvent(Thread* thread, mirror::Throwable* exception_object) const
390      REQUIRES_SHARED(Locks::mutator_lock_);
391
392  // Called when an instrumented method is entered. The intended link register (lr) is saved so
393  // that returning causes a branch to the method exit stub. Generates method enter events.
394  void PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
395                                     ArtMethod* method, uintptr_t lr,
396                                     bool interpreter_entry)
397      REQUIRES_SHARED(Locks::mutator_lock_);
398
399  // Called when an instrumented method is exited. Removes the pushed instrumentation frame
400  // returning the intended link register. Generates method exit events.
401  TwoWordReturn PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
402                                             uint64_t gpr_result, uint64_t fpr_result)
403      REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
404
405  // Pops an instrumentation frame from the current thread and generate an unwind event.
406  // Returns the return pc for the instrumentation frame that's popped.
407  uintptr_t PopMethodForUnwind(Thread* self, bool is_deoptimization) const
408      REQUIRES_SHARED(Locks::mutator_lock_);
409
410  // Call back for configure stubs.
411  void InstallStubsForClass(mirror::Class* klass) REQUIRES_SHARED(Locks::mutator_lock_)
412      REQUIRES(!deoptimized_methods_lock_);
413
414  void InstallStubsForMethod(ArtMethod* method)
415      REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
416
417  // Install instrumentation exit stub on every method of the stack of the given thread.
418  // This is used by the debugger to cause a deoptimization of the thread's stack after updating
419  // local variable(s).
420  void InstrumentThreadStack(Thread* thread)
421      REQUIRES_SHARED(Locks::mutator_lock_)
422      REQUIRES(!Locks::thread_list_lock_);
423
424  static size_t ComputeFrameId(Thread* self,
425                               size_t frame_depth,
426                               size_t inlined_frames_before_frame)
427      REQUIRES_SHARED(Locks::mutator_lock_);
428
429  // Does not hold lock, used to check if someone changed from not instrumented to instrumented
430  // during a GC suspend point.
431  bool AllocEntrypointsInstrumented() const REQUIRES_SHARED(Locks::mutator_lock_) {
432    return alloc_entrypoints_instrumented_;
433  }
434
435  InstrumentationLevel GetCurrentInstrumentationLevel() const;
436
437 private:
438  // Returns true if moving to the given instrumentation level requires the installation of stubs.
439  // False otherwise.
440  bool RequiresInstrumentationInstallation(InstrumentationLevel new_level) const;
441
442  // Does the job of installing or removing instrumentation code within methods.
443  // In order to support multiple clients using instrumentation at the same time,
444  // the caller must pass a unique key (a string) identifying it so we remind which
445  // instrumentation level it needs. Therefore the current instrumentation level
446  // becomes the highest instrumentation level required by a client.
447  void ConfigureStubs(const char* key, InstrumentationLevel desired_instrumentation_level)
448      REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
449      REQUIRES(!deoptimized_methods_lock_,
450               !Locks::thread_list_lock_,
451               !Locks::classlinker_classes_lock_);
452
453  void UpdateInterpreterHandlerTable() REQUIRES(Locks::mutator_lock_) {
454    /*
455     * TUNING: Dalvik's mterp stashes the actual current handler table base in a
456     * tls field.  For Arm, this enables all suspend, debug & tracing checks to be
457     * collapsed into a single conditionally-executed ldw instruction.
458     * Move to Dalvik-style handler-table management for both the goto interpreter and
459     * mterp.
460     */
461    interpreter_handler_table_ = IsActive() ? kAlternativeHandlerTable : kMainHandlerTable;
462  }
463
464  // No thread safety analysis to get around SetQuickAllocEntryPointsInstrumented requiring
465  // exclusive access to mutator lock which you can't get if the runtime isn't started.
466  void SetEntrypointsInstrumented(bool instrumented) NO_THREAD_SAFETY_ANALYSIS;
467
468  void MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
469                            ArtMethod* method, uint32_t dex_pc) const
470      REQUIRES_SHARED(Locks::mutator_lock_);
471  void MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
472                           ArtMethod* method,
473                           uint32_t dex_pc, const JValue& return_value) const
474      REQUIRES_SHARED(Locks::mutator_lock_);
475  void DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
476                           ArtMethod* method, uint32_t dex_pc) const
477      REQUIRES_SHARED(Locks::mutator_lock_);
478  void BranchImpl(Thread* thread, ArtMethod* method, uint32_t dex_pc, int32_t offset) const
479      REQUIRES_SHARED(Locks::mutator_lock_);
480  void InvokeVirtualOrInterfaceImpl(Thread* thread,
481                                    mirror::Object* this_object,
482                                    ArtMethod* caller,
483                                    uint32_t dex_pc,
484                                    ArtMethod* callee) const
485      REQUIRES_SHARED(Locks::mutator_lock_);
486  void FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
487                           ArtMethod* method, uint32_t dex_pc,
488                           ArtField* field) const
489      REQUIRES_SHARED(Locks::mutator_lock_);
490  void FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
491                           ArtMethod* method, uint32_t dex_pc,
492                           ArtField* field, const JValue& field_value) const
493      REQUIRES_SHARED(Locks::mutator_lock_);
494
495  // Read barrier-aware utility functions for accessing deoptimized_methods_
496  bool AddDeoptimizedMethod(ArtMethod* method)
497      REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(deoptimized_methods_lock_);
498  bool IsDeoptimizedMethod(ArtMethod* method)
499      REQUIRES_SHARED(Locks::mutator_lock_, deoptimized_methods_lock_);
500  bool RemoveDeoptimizedMethod(ArtMethod* method)
501      REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(deoptimized_methods_lock_);
502  ArtMethod* BeginDeoptimizedMethod()
503      REQUIRES_SHARED(Locks::mutator_lock_, deoptimized_methods_lock_);
504  bool IsDeoptimizedMethodsEmpty() const
505      REQUIRES_SHARED(Locks::mutator_lock_, deoptimized_methods_lock_);
506  void UpdateMethodsCodeImpl(ArtMethod* method, const void* quick_code)
507      REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
508
509
510  // Have we hijacked ArtMethod::code_ so that it calls instrumentation/interpreter code?
511  bool instrumentation_stubs_installed_;
512
513  // Have we hijacked ArtMethod::code_ to reference the enter/exit stubs?
514  bool entry_exit_stubs_installed_;
515
516  // Have we hijacked ArtMethod::code_ to reference the enter interpreter stub?
517  bool interpreter_stubs_installed_;
518
519  // Do we need the fidelity of events that we only get from running within the interpreter?
520  bool interpret_only_;
521
522  // Did the runtime request we only run in the interpreter? ie -Xint mode.
523  bool forced_interpret_only_;
524
525  // Do we have any listeners for method entry events? Short-cut to avoid taking the
526  // instrumentation_lock_.
527  bool have_method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
528
529  // Do we have any listeners for method exit events? Short-cut to avoid taking the
530  // instrumentation_lock_.
531  bool have_method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
532
533  // Do we have any listeners for method unwind events? Short-cut to avoid taking the
534  // instrumentation_lock_.
535  bool have_method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
536
537  // Do we have any listeners for dex move events? Short-cut to avoid taking the
538  // instrumentation_lock_.
539  bool have_dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
540
541  // Do we have any listeners for field read events? Short-cut to avoid taking the
542  // instrumentation_lock_.
543  bool have_field_read_listeners_ GUARDED_BY(Locks::mutator_lock_);
544
545  // Do we have any listeners for field write events? Short-cut to avoid taking the
546  // instrumentation_lock_.
547  bool have_field_write_listeners_ GUARDED_BY(Locks::mutator_lock_);
548
549  // Do we have any exception caught listeners? Short-cut to avoid taking the instrumentation_lock_.
550  bool have_exception_caught_listeners_ GUARDED_BY(Locks::mutator_lock_);
551
552  // Do we have any branch listeners? Short-cut to avoid taking the instrumentation_lock_.
553  bool have_branch_listeners_ GUARDED_BY(Locks::mutator_lock_);
554
555  // Do we have any invoke listeners? Short-cut to avoid taking the instrumentation_lock_.
556  bool have_invoke_virtual_or_interface_listeners_ GUARDED_BY(Locks::mutator_lock_);
557
558  // Contains the instrumentation level required by each client of the instrumentation identified
559  // by a string key.
560  typedef SafeMap<const char*, InstrumentationLevel> InstrumentationLevelTable;
561  InstrumentationLevelTable requested_instrumentation_levels_ GUARDED_BY(Locks::mutator_lock_);
562
563  // The event listeners, written to with the mutator_lock_ exclusively held.
564  // Mutators must be able to iterate over these lists concurrently, that is, with listeners being
565  // added or removed while iterating. The modifying thread holds exclusive lock,
566  // so other threads cannot iterate (i.e. read the data of the list) at the same time but they
567  // do keep iterators that need to remain valid. This is the reason these listeners are std::list
568  // and not for example std::vector: the existing storage for a std::list does not move.
569  // Note that mutators cannot make a copy of these lists before iterating, as the instrumentation
570  // listeners can also be deleted concurrently.
571  // As a result, these lists are never trimmed. That's acceptable given the low number of
572  // listeners we have.
573  std::list<InstrumentationListener*> method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
574  std::list<InstrumentationListener*> method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
575  std::list<InstrumentationListener*> method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
576  std::list<InstrumentationListener*> branch_listeners_ GUARDED_BY(Locks::mutator_lock_);
577  std::list<InstrumentationListener*> invoke_virtual_or_interface_listeners_
578      GUARDED_BY(Locks::mutator_lock_);
579  std::list<InstrumentationListener*> dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
580  std::list<InstrumentationListener*> field_read_listeners_ GUARDED_BY(Locks::mutator_lock_);
581  std::list<InstrumentationListener*> field_write_listeners_ GUARDED_BY(Locks::mutator_lock_);
582  std::list<InstrumentationListener*> exception_caught_listeners_ GUARDED_BY(Locks::mutator_lock_);
583
584  // The set of methods being deoptimized (by the debugger) which must be executed with interpreter
585  // only.
586  mutable ReaderWriterMutex deoptimized_methods_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
587  std::unordered_set<ArtMethod*> deoptimized_methods_ GUARDED_BY(deoptimized_methods_lock_);
588  bool deoptimization_enabled_;
589
590  // Current interpreter handler table. This is updated each time the thread state flags are
591  // modified.
592  InterpreterHandlerTable interpreter_handler_table_ GUARDED_BY(Locks::mutator_lock_);
593
594  // Greater than 0 if quick alloc entry points instrumented.
595  size_t quick_alloc_entry_points_instrumentation_counter_;
596
597  // alloc_entrypoints_instrumented_ is only updated with all the threads suspended, this is done
598  // to prevent races with the GC where the GC relies on thread suspension only see
599  // alloc_entrypoints_instrumented_ change during suspend points.
600  bool alloc_entrypoints_instrumented_;
601
602  friend class InstrumentationTest;  // For GetCurrentInstrumentationLevel and ConfigureStubs.
603
604  DISALLOW_COPY_AND_ASSIGN(Instrumentation);
605};
606std::ostream& operator<<(std::ostream& os, const Instrumentation::InstrumentationEvent& rhs);
607std::ostream& operator<<(std::ostream& os, const Instrumentation::InstrumentationLevel& rhs);
608
609// An element in the instrumentation side stack maintained in art::Thread.
610struct InstrumentationStackFrame {
611  InstrumentationStackFrame(mirror::Object* this_object, ArtMethod* method,
612                            uintptr_t return_pc, size_t frame_id, bool interpreter_entry)
613      : this_object_(this_object), method_(method), return_pc_(return_pc), frame_id_(frame_id),
614        interpreter_entry_(interpreter_entry) {
615  }
616
617  std::string Dump() const REQUIRES_SHARED(Locks::mutator_lock_);
618
619  mirror::Object* this_object_;
620  ArtMethod* method_;
621  uintptr_t return_pc_;
622  size_t frame_id_;
623  bool interpreter_entry_;
624};
625
626}  // namespace instrumentation
627}  // namespace art
628
629#endif  // ART_RUNTIME_INSTRUMENTATION_H_
630