instrumentation.h revision 59d9d668d4f4286813afe2b4e7c6db839222ce96
1/* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#ifndef ART_RUNTIME_INSTRUMENTATION_H_ 18#define ART_RUNTIME_INSTRUMENTATION_H_ 19 20#include <stdint.h> 21#include <list> 22#include <map> 23 24#include "atomic.h" 25#include "instruction_set.h" 26#include "base/macros.h" 27#include "base/mutex.h" 28#include "gc_root.h" 29#include "object_callbacks.h" 30 31namespace art { 32namespace mirror { 33 class ArtField; 34 class ArtMethod; 35 class Class; 36 class Object; 37 class Throwable; 38} // namespace mirror 39union JValue; 40class Thread; 41class ThrowLocation; 42 43namespace instrumentation { 44 45// Interpreter handler tables. 46enum InterpreterHandlerTable { 47 kMainHandlerTable = 0, // Main handler table: no suspend check, no instrumentation. 48 kAlternativeHandlerTable = 1, // Alternative handler table: suspend check and/or instrumentation 49 // enabled. 50 kNumHandlerTables 51}; 52 53// Instrumentation event listener API. Registered listeners will get the appropriate call back for 54// the events they are listening for. The call backs supply the thread, method and dex_pc the event 55// occurred upon. The thread may or may not be Thread::Current(). 56struct InstrumentationListener { 57 InstrumentationListener() {} 58 virtual ~InstrumentationListener() {} 59 60 // Call-back for when a method is entered. 61 virtual void MethodEntered(Thread* thread, mirror::Object* this_object, 62 mirror::ArtMethod* method, 63 uint32_t dex_pc) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) = 0; 64 65 // Call-back for when a method is exited. 66 // TODO: its likely passing the return value would be useful, however, we may need to get and 67 // parse the shorty to determine what kind of register holds the result. 68 virtual void MethodExited(Thread* thread, mirror::Object* this_object, 69 mirror::ArtMethod* method, uint32_t dex_pc, 70 const JValue& return_value) 71 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) = 0; 72 73 // Call-back for when a method is popped due to an exception throw. A method will either cause a 74 // MethodExited call-back or a MethodUnwind call-back when its activation is removed. 75 virtual void MethodUnwind(Thread* thread, mirror::Object* this_object, 76 mirror::ArtMethod* method, uint32_t dex_pc) 77 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) = 0; 78 79 // Call-back for when the dex pc moves in a method. 80 virtual void DexPcMoved(Thread* thread, mirror::Object* this_object, 81 mirror::ArtMethod* method, uint32_t new_dex_pc) 82 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) = 0; 83 84 // Call-back for when we read from a field. 85 virtual void FieldRead(Thread* thread, mirror::Object* this_object, mirror::ArtMethod* method, 86 uint32_t dex_pc, mirror::ArtField* field) = 0; 87 88 // Call-back for when we write into a field. 89 virtual void FieldWritten(Thread* thread, mirror::Object* this_object, mirror::ArtMethod* method, 90 uint32_t dex_pc, mirror::ArtField* field, const JValue& field_value) = 0; 91 92 // Call-back when an exception is caught. 93 virtual void ExceptionCaught(Thread* thread, const ThrowLocation& throw_location, 94 mirror::ArtMethod* catch_method, uint32_t catch_dex_pc, 95 mirror::Throwable* exception_object) 96 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) = 0; 97}; 98 99// Instrumentation is a catch-all for when extra information is required from the runtime. The 100// typical use for instrumentation is for profiling and debugging. Instrumentation may add stubs 101// to method entry and exit, it may also force execution to be switched to the interpreter and 102// trigger deoptimization. 103class Instrumentation { 104 public: 105 enum InstrumentationEvent { 106 kMethodEntered = 1 << 0, 107 kMethodExited = 1 << 1, 108 kMethodUnwind = 1 << 2, 109 kDexPcMoved = 1 << 3, 110 kFieldRead = 1 << 4, 111 kFieldWritten = 1 << 5, 112 kExceptionCaught = 1 << 6, 113 }; 114 115 Instrumentation(); 116 117 // Add a listener to be notified of the masked together sent of instrumentation events. This 118 // suspend the runtime to install stubs. You are expected to hold the mutator lock as a proxy 119 // for saying you should have suspended all threads (installing stubs while threads are running 120 // will break). 121 void AddListener(InstrumentationListener* listener, uint32_t events) 122 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) 123 LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::classlinker_classes_lock_); 124 125 // Removes a listener possibly removing instrumentation stubs. 126 void RemoveListener(InstrumentationListener* listener, uint32_t events) 127 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) 128 LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::classlinker_classes_lock_); 129 130 // Deoptimization. 131 void EnableDeoptimization() 132 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) 133 LOCKS_EXCLUDED(deoptimized_methods_lock_); 134 void DisableDeoptimization() 135 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) 136 LOCKS_EXCLUDED(deoptimized_methods_lock_); 137 bool AreAllMethodsDeoptimized() const { 138 return interpreter_stubs_installed_; 139 } 140 bool ShouldNotifyMethodEnterExitEvents() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 141 142 // Executes everything with interpreter. 143 void DeoptimizeEverything() 144 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) 145 LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::classlinker_classes_lock_); 146 147 // Executes everything with compiled code (or interpreter if there is no code). 148 void UndeoptimizeEverything() 149 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) 150 LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::classlinker_classes_lock_); 151 152 // Deoptimize a method by forcing its execution with the interpreter. Nevertheless, a static 153 // method (except a class initializer) set to the resolution trampoline will be deoptimized only 154 // once its declaring class is initialized. 155 void Deoptimize(mirror::ArtMethod* method) 156 LOCKS_EXCLUDED(Locks::thread_list_lock_, deoptimized_methods_lock_) 157 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_); 158 159 // Undeoptimze the method by restoring its entrypoints. Nevertheless, a static method 160 // (except a class initializer) set to the resolution trampoline will be updated only once its 161 // declaring class is initialized. 162 void Undeoptimize(mirror::ArtMethod* method) 163 LOCKS_EXCLUDED(Locks::thread_list_lock_, deoptimized_methods_lock_) 164 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_); 165 166 bool IsDeoptimized(mirror::ArtMethod* method) 167 LOCKS_EXCLUDED(deoptimized_methods_lock_) 168 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 169 170 // Enable method tracing by installing instrumentation entry/exit stubs. 171 void EnableMethodTracing() 172 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) 173 LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::classlinker_classes_lock_); 174 175 // Disable method tracing by uninstalling instrumentation entry/exit stubs. 176 void DisableMethodTracing() 177 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) 178 LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::classlinker_classes_lock_); 179 180 InterpreterHandlerTable GetInterpreterHandlerTable() const 181 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 182 return interpreter_handler_table_; 183 } 184 185 void InstrumentQuickAllocEntryPoints() LOCKS_EXCLUDED(Locks::thread_list_lock_, 186 Locks::runtime_shutdown_lock_); 187 void UninstrumentQuickAllocEntryPoints() LOCKS_EXCLUDED(Locks::thread_list_lock_, 188 Locks::runtime_shutdown_lock_); 189 void ResetQuickAllocEntryPoints() EXCLUSIVE_LOCKS_REQUIRED(Locks::runtime_shutdown_lock_); 190 191 // Update the code of a method respecting any installed stubs. 192 void UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code, 193 const void* portable_code, bool have_portable_code) 194 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 195 196 // Get the quick code for the given method. More efficient than asking the class linker as it 197 // will short-cut to GetCode if instrumentation and static method resolution stubs aren't 198 // installed. 199 const void* GetQuickCodeFor(mirror::ArtMethod* method) const 200 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 201 202 void ForceInterpretOnly() { 203 interpret_only_ = true; 204 forced_interpret_only_ = true; 205 } 206 207 // Called by ArtMethod::Invoke to determine dispatch mechanism. 208 bool InterpretOnly() const { 209 return interpret_only_; 210 } 211 212 bool IsForcedInterpretOnly() const { 213 return forced_interpret_only_; 214 } 215 216 bool ShouldPortableCodeDeoptimize() const { 217 return instrumentation_stubs_installed_; 218 } 219 220 bool AreExitStubsInstalled() const { 221 return instrumentation_stubs_installed_; 222 } 223 224 bool HasMethodEntryListeners() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 225 return have_method_entry_listeners_; 226 } 227 228 bool HasMethodExitListeners() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 229 return have_method_exit_listeners_; 230 } 231 232 bool HasDexPcListeners() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 233 return have_dex_pc_listeners_; 234 } 235 236 bool HasFieldReadListeners() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 237 return have_field_read_listeners_; 238 } 239 240 bool HasFieldWriteListeners() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 241 return have_field_write_listeners_; 242 } 243 244 bool HasExceptionCaughtListeners() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 245 return have_exception_caught_listeners_; 246 } 247 248 bool IsActive() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 249 return have_dex_pc_listeners_ || have_method_entry_listeners_ || have_method_exit_listeners_ || 250 have_field_read_listeners_ || have_field_write_listeners_ || 251 have_exception_caught_listeners_ || have_method_unwind_listeners_; 252 } 253 254 // Inform listeners that a method has been entered. A dex PC is provided as we may install 255 // listeners into executing code and get method enter events for methods already on the stack. 256 void MethodEnterEvent(Thread* thread, mirror::Object* this_object, 257 mirror::ArtMethod* method, uint32_t dex_pc) const 258 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 259 if (UNLIKELY(HasMethodEntryListeners())) { 260 MethodEnterEventImpl(thread, this_object, method, dex_pc); 261 } 262 } 263 264 // Inform listeners that a method has been exited. 265 void MethodExitEvent(Thread* thread, mirror::Object* this_object, 266 mirror::ArtMethod* method, uint32_t dex_pc, 267 const JValue& return_value) const 268 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 269 if (UNLIKELY(HasMethodExitListeners())) { 270 MethodExitEventImpl(thread, this_object, method, dex_pc, return_value); 271 } 272 } 273 274 // Inform listeners that a method has been exited due to an exception. 275 void MethodUnwindEvent(Thread* thread, mirror::Object* this_object, 276 mirror::ArtMethod* method, uint32_t dex_pc) const 277 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 278 279 // Inform listeners that the dex pc has moved (only supported by the interpreter). 280 void DexPcMovedEvent(Thread* thread, mirror::Object* this_object, 281 mirror::ArtMethod* method, uint32_t dex_pc) const 282 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 283 if (UNLIKELY(HasDexPcListeners())) { 284 DexPcMovedEventImpl(thread, this_object, method, dex_pc); 285 } 286 } 287 288 // Inform listeners that we read a field (only supported by the interpreter). 289 void FieldReadEvent(Thread* thread, mirror::Object* this_object, 290 mirror::ArtMethod* method, uint32_t dex_pc, 291 mirror::ArtField* field) const 292 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 293 if (UNLIKELY(HasFieldReadListeners())) { 294 FieldReadEventImpl(thread, this_object, method, dex_pc, field); 295 } 296 } 297 298 // Inform listeners that we write a field (only supported by the interpreter). 299 void FieldWriteEvent(Thread* thread, mirror::Object* this_object, 300 mirror::ArtMethod* method, uint32_t dex_pc, 301 mirror::ArtField* field, const JValue& field_value) const 302 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 303 if (UNLIKELY(HasFieldWriteListeners())) { 304 FieldWriteEventImpl(thread, this_object, method, dex_pc, field, field_value); 305 } 306 } 307 308 // Inform listeners that an exception was caught. 309 void ExceptionCaughtEvent(Thread* thread, const ThrowLocation& throw_location, 310 mirror::ArtMethod* catch_method, uint32_t catch_dex_pc, 311 mirror::Throwable* exception_object) const 312 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 313 314 // Called when an instrumented method is entered. The intended link register (lr) is saved so 315 // that returning causes a branch to the method exit stub. Generates method enter events. 316 void PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object, 317 mirror::ArtMethod* method, uintptr_t lr, 318 bool interpreter_entry) 319 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 320 321 // Called when an instrumented method is exited. Removes the pushed instrumentation frame 322 // returning the intended link register. Generates method exit events. 323 TwoWordReturn PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc, 324 uint64_t gpr_result, uint64_t fpr_result) 325 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 326 327 // Pops an instrumentation frame from the current thread and generate an unwind event. 328 void PopMethodForUnwind(Thread* self, bool is_deoptimization) const 329 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 330 331 // Call back for configure stubs. 332 bool InstallStubsForClass(mirror::Class* klass) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 333 334 void InstallStubsForMethod(mirror::ArtMethod* method) 335 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 336 337 void VisitRoots(RootCallback* callback, void* arg) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) 338 LOCKS_EXCLUDED(deoptimized_methods_lock_); 339 340 private: 341 // Does the job of installing or removing instrumentation code within methods. 342 void ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) 343 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) 344 LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::classlinker_classes_lock_, 345 deoptimized_methods_lock_); 346 347 void UpdateInterpreterHandlerTable() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) { 348 interpreter_handler_table_ = IsActive() ? kAlternativeHandlerTable : kMainHandlerTable; 349 } 350 351 // No thread safety analysis to get around SetQuickAllocEntryPointsInstrumented requiring 352 // exclusive access to mutator lock which you can't get if the runtime isn't started. 353 void SetEntrypointsInstrumented(bool instrumented) NO_THREAD_SAFETY_ANALYSIS; 354 355 void MethodEnterEventImpl(Thread* thread, mirror::Object* this_object, 356 mirror::ArtMethod* method, uint32_t dex_pc) const 357 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 358 void MethodExitEventImpl(Thread* thread, mirror::Object* this_object, 359 mirror::ArtMethod* method, 360 uint32_t dex_pc, const JValue& return_value) const 361 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 362 void DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object, 363 mirror::ArtMethod* method, uint32_t dex_pc) const 364 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 365 void FieldReadEventImpl(Thread* thread, mirror::Object* this_object, 366 mirror::ArtMethod* method, uint32_t dex_pc, 367 mirror::ArtField* field) const 368 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 369 void FieldWriteEventImpl(Thread* thread, mirror::Object* this_object, 370 mirror::ArtMethod* method, uint32_t dex_pc, 371 mirror::ArtField* field, const JValue& field_value) const 372 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 373 374 // Read barrier-aware utility functions for accessing deoptimized_methods_ 375 bool AddDeoptimizedMethod(mirror::ArtMethod* method) 376 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) 377 EXCLUSIVE_LOCKS_REQUIRED(deoptimized_methods_lock_); 378 bool FindDeoptimizedMethod(mirror::ArtMethod* method) 379 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) 380 SHARED_LOCKS_REQUIRED(deoptimized_methods_lock_); 381 bool RemoveDeoptimizedMethod(mirror::ArtMethod* method) 382 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) 383 EXCLUSIVE_LOCKS_REQUIRED(deoptimized_methods_lock_); 384 mirror::ArtMethod* BeginDeoptimizedMethod() 385 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) 386 SHARED_LOCKS_REQUIRED(deoptimized_methods_lock_); 387 bool IsDeoptimizedMethodsEmpty() const 388 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) 389 SHARED_LOCKS_REQUIRED(deoptimized_methods_lock_); 390 391 // Have we hijacked ArtMethod::code_ so that it calls instrumentation/interpreter code? 392 bool instrumentation_stubs_installed_; 393 394 // Have we hijacked ArtMethod::code_ to reference the enter/exit stubs? 395 bool entry_exit_stubs_installed_; 396 397 // Have we hijacked ArtMethod::code_ to reference the enter interpreter stub? 398 bool interpreter_stubs_installed_; 399 400 // Do we need the fidelity of events that we only get from running within the interpreter? 401 bool interpret_only_; 402 403 // Did the runtime request we only run in the interpreter? ie -Xint mode. 404 bool forced_interpret_only_; 405 406 // Do we have any listeners for method entry events? Short-cut to avoid taking the 407 // instrumentation_lock_. 408 bool have_method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_); 409 410 // Do we have any listeners for method exit events? Short-cut to avoid taking the 411 // instrumentation_lock_. 412 bool have_method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_); 413 414 // Do we have any listeners for method unwind events? Short-cut to avoid taking the 415 // instrumentation_lock_. 416 bool have_method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_); 417 418 // Do we have any listeners for dex move events? Short-cut to avoid taking the 419 // instrumentation_lock_. 420 bool have_dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_); 421 422 // Do we have any listeners for field read events? Short-cut to avoid taking the 423 // instrumentation_lock_. 424 bool have_field_read_listeners_ GUARDED_BY(Locks::mutator_lock_); 425 426 // Do we have any listeners for field write events? Short-cut to avoid taking the 427 // instrumentation_lock_. 428 bool have_field_write_listeners_ GUARDED_BY(Locks::mutator_lock_); 429 430 // Do we have any exception caught listeners? Short-cut to avoid taking the instrumentation_lock_. 431 bool have_exception_caught_listeners_ GUARDED_BY(Locks::mutator_lock_); 432 433 // The event listeners, written to with the mutator_lock_ exclusively held. 434 std::list<InstrumentationListener*> method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_); 435 std::list<InstrumentationListener*> method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_); 436 std::list<InstrumentationListener*> method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_); 437 std::shared_ptr<std::list<InstrumentationListener*>> dex_pc_listeners_ 438 GUARDED_BY(Locks::mutator_lock_); 439 std::shared_ptr<std::list<InstrumentationListener*>> field_read_listeners_ 440 GUARDED_BY(Locks::mutator_lock_); 441 std::shared_ptr<std::list<InstrumentationListener*>> field_write_listeners_ 442 GUARDED_BY(Locks::mutator_lock_); 443 std::shared_ptr<std::list<InstrumentationListener*>> exception_caught_listeners_ 444 GUARDED_BY(Locks::mutator_lock_); 445 446 // The set of methods being deoptimized (by the debugger) which must be executed with interpreter 447 // only. 448 mutable ReaderWriterMutex deoptimized_methods_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER; 449 std::multimap<int32_t, GcRoot<mirror::ArtMethod>> deoptimized_methods_ 450 GUARDED_BY(deoptimized_methods_lock_); 451 bool deoptimization_enabled_; 452 453 // Current interpreter handler table. This is updated each time the thread state flags are 454 // modified. 455 InterpreterHandlerTable interpreter_handler_table_ GUARDED_BY(Locks::mutator_lock_); 456 457 // Greater than 0 if quick alloc entry points instrumented. 458 // TODO: The access and changes to this is racy and should be guarded by a lock. 459 AtomicInteger quick_alloc_entry_points_instrumentation_counter_; 460 461 DISALLOW_COPY_AND_ASSIGN(Instrumentation); 462}; 463 464// An element in the instrumentation side stack maintained in art::Thread. 465struct InstrumentationStackFrame { 466 InstrumentationStackFrame(mirror::Object* this_object, mirror::ArtMethod* method, 467 uintptr_t return_pc, size_t frame_id, bool interpreter_entry) 468 : this_object_(this_object), method_(method), return_pc_(return_pc), frame_id_(frame_id), 469 interpreter_entry_(interpreter_entry) { 470 } 471 472 std::string Dump() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 473 474 mirror::Object* this_object_; 475 mirror::ArtMethod* method_; 476 uintptr_t return_pc_; 477 size_t frame_id_; 478 bool interpreter_entry_; 479}; 480 481} // namespace instrumentation 482} // namespace art 483 484#endif // ART_RUNTIME_INSTRUMENTATION_H_ 485