events.cc revision 0fa1786bdcc333873ed65a1f77a4669d5701ac5e
1/* Copyright (C) 2016 The Android Open Source Project
2 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
3 *
4 * This file implements interfaces from the file jvmti.h. This implementation
5 * is licensed under the same terms as the file jvmti.h.  The
6 * copyright and license information for the file jvmti.h follows.
7 *
8 * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
9 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
10 *
11 * This code is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License version 2 only, as
13 * published by the Free Software Foundation.  Oracle designates this
14 * particular file as subject to the "Classpath" exception as provided
15 * by Oracle in the LICENSE file that accompanied this code.
16 *
17 * This code is distributed in the hope that it will be useful, but WITHOUT
18 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
19 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
20 * version 2 for more details (a copy is included in the LICENSE file that
21 * accompanied this code).
22 *
23 * You should have received a copy of the GNU General Public License version
24 * 2 along with this work; if not, write to the Free Software Foundation,
25 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
26 *
27 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
28 * or visit www.oracle.com if you need additional information or have any
29 * questions.
30 */
31
32#include "events-inl.h"
33
34#include <array>
35
36#include "art_field-inl.h"
37#include "art_jvmti.h"
38#include "art_method-inl.h"
39#include "base/logging.h"
40#include "deopt_manager.h"
41#include "dex_file_types.h"
42#include "gc/allocation_listener.h"
43#include "gc/gc_pause_listener.h"
44#include "gc/heap.h"
45#include "gc/scoped_gc_critical_section.h"
46#include "handle_scope-inl.h"
47#include "instrumentation.h"
48#include "jni_env_ext-inl.h"
49#include "jni_internal.h"
50#include "mirror/class.h"
51#include "mirror/object-inl.h"
52#include "monitor.h"
53#include "nativehelper/scoped_local_ref.h"
54#include "runtime.h"
55#include "scoped_thread_state_change-inl.h"
56#include "stack.h"
57#include "thread-inl.h"
58#include "thread_list.h"
59#include "ti_phase.h"
60
61namespace openjdkjvmti {
62
63bool EventMasks::IsEnabledAnywhere(ArtJvmtiEvent event) {
64  return global_event_mask.Test(event) || unioned_thread_event_mask.Test(event);
65}
66
67EventMask& EventMasks::GetEventMask(art::Thread* thread) {
68  if (thread == nullptr) {
69    return global_event_mask;
70  }
71
72  for (auto& pair : thread_event_masks) {
73    const UniqueThread& unique_thread = pair.first;
74    if (unique_thread.first == thread &&
75        unique_thread.second == static_cast<uint32_t>(thread->GetTid())) {
76      return pair.second;
77    }
78  }
79
80  // TODO: Remove old UniqueThread with the same pointer, if exists.
81
82  thread_event_masks.emplace_back(UniqueThread(thread, thread->GetTid()), EventMask());
83  return thread_event_masks.back().second;
84}
85
86EventMask* EventMasks::GetEventMaskOrNull(art::Thread* thread) {
87  if (thread == nullptr) {
88    return &global_event_mask;
89  }
90
91  for (auto& pair : thread_event_masks) {
92    const UniqueThread& unique_thread = pair.first;
93    if (unique_thread.first == thread &&
94        unique_thread.second == static_cast<uint32_t>(thread->GetTid())) {
95      return &pair.second;
96    }
97  }
98
99  return nullptr;
100}
101
102
103void EventMasks::EnableEvent(art::Thread* thread, ArtJvmtiEvent event) {
104  DCHECK(EventMask::EventIsInRange(event));
105  GetEventMask(thread).Set(event);
106  if (thread != nullptr) {
107    unioned_thread_event_mask.Set(event, true);
108  }
109}
110
111void EventMasks::DisableEvent(art::Thread* thread, ArtJvmtiEvent event) {
112  DCHECK(EventMask::EventIsInRange(event));
113  GetEventMask(thread).Set(event, false);
114  if (thread != nullptr) {
115    // Regenerate union for the event.
116    bool union_value = false;
117    for (auto& pair : thread_event_masks) {
118      union_value |= pair.second.Test(event);
119      if (union_value) {
120        break;
121      }
122    }
123    unioned_thread_event_mask.Set(event, union_value);
124  }
125}
126
127void EventMasks::HandleChangedCapabilities(const jvmtiCapabilities& caps, bool caps_added) {
128  if (UNLIKELY(caps.can_retransform_classes == 1)) {
129    // If we are giving this env the retransform classes cap we need to switch all events of
130    // NonTransformable to Transformable and vice versa.
131    ArtJvmtiEvent to_remove = caps_added ? ArtJvmtiEvent::kClassFileLoadHookNonRetransformable
132                                         : ArtJvmtiEvent::kClassFileLoadHookRetransformable;
133    ArtJvmtiEvent to_add = caps_added ? ArtJvmtiEvent::kClassFileLoadHookRetransformable
134                                      : ArtJvmtiEvent::kClassFileLoadHookNonRetransformable;
135    if (global_event_mask.Test(to_remove)) {
136      CHECK(!global_event_mask.Test(to_add));
137      global_event_mask.Set(to_remove, false);
138      global_event_mask.Set(to_add, true);
139    }
140
141    if (unioned_thread_event_mask.Test(to_remove)) {
142      CHECK(!unioned_thread_event_mask.Test(to_add));
143      unioned_thread_event_mask.Set(to_remove, false);
144      unioned_thread_event_mask.Set(to_add, true);
145    }
146    for (auto thread_mask : thread_event_masks) {
147      if (thread_mask.second.Test(to_remove)) {
148        CHECK(!thread_mask.second.Test(to_add));
149        thread_mask.second.Set(to_remove, false);
150        thread_mask.second.Set(to_add, true);
151      }
152    }
153  }
154}
155
156void EventHandler::RegisterArtJvmTiEnv(ArtJvmTiEnv* env) {
157  // Since we never shrink this array we might as well try to fill gaps.
158  auto it = std::find(envs.begin(), envs.end(), nullptr);
159  if (it != envs.end()) {
160    *it = env;
161  } else {
162    envs.push_back(env);
163  }
164}
165
166void EventHandler::RemoveArtJvmTiEnv(ArtJvmTiEnv* env) {
167  // Since we might be currently iterating over the envs list we cannot actually erase elements.
168  // Instead we will simply replace them with 'nullptr' and skip them manually.
169  auto it = std::find(envs.begin(), envs.end(), env);
170  if (it != envs.end()) {
171    *it = nullptr;
172    for (size_t i = static_cast<size_t>(ArtJvmtiEvent::kMinEventTypeVal);
173         i <= static_cast<size_t>(ArtJvmtiEvent::kMaxEventTypeVal);
174         ++i) {
175      RecalculateGlobalEventMask(static_cast<ArtJvmtiEvent>(i));
176    }
177  }
178}
179
180static bool IsThreadControllable(ArtJvmtiEvent event) {
181  switch (event) {
182    case ArtJvmtiEvent::kVmInit:
183    case ArtJvmtiEvent::kVmStart:
184    case ArtJvmtiEvent::kVmDeath:
185    case ArtJvmtiEvent::kThreadStart:
186    case ArtJvmtiEvent::kCompiledMethodLoad:
187    case ArtJvmtiEvent::kCompiledMethodUnload:
188    case ArtJvmtiEvent::kDynamicCodeGenerated:
189    case ArtJvmtiEvent::kDataDumpRequest:
190      return false;
191
192    default:
193      return true;
194  }
195}
196
197template<typename Type>
198static Type AddLocalRef(art::JNIEnvExt* e, art::mirror::Object* obj)
199    REQUIRES_SHARED(art::Locks::mutator_lock_) {
200  return (obj == nullptr) ? nullptr : e->AddLocalReference<Type>(obj);
201}
202
203template<ArtJvmtiEvent kEvent, typename ...Args>
204static void RunEventCallback(EventHandler* handler,
205                             art::Thread* self,
206                             art::JNIEnvExt* jnienv,
207                             Args... args)
208    REQUIRES_SHARED(art::Locks::mutator_lock_) {
209  ScopedLocalRef<jthread> thread_jni(jnienv, AddLocalRef<jthread>(jnienv, self->GetPeer()));
210  handler->DispatchEvent<kEvent>(self,
211                                 static_cast<JNIEnv*>(jnienv),
212                                 thread_jni.get(),
213                                 args...);
214}
215
216class JvmtiAllocationListener : public art::gc::AllocationListener {
217 public:
218  explicit JvmtiAllocationListener(EventHandler* handler) : handler_(handler) {}
219
220  void ObjectAllocated(art::Thread* self, art::ObjPtr<art::mirror::Object>* obj, size_t byte_count)
221      OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
222    DCHECK_EQ(self, art::Thread::Current());
223
224    if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kVmObjectAlloc)) {
225      art::StackHandleScope<1> hs(self);
226      auto h = hs.NewHandleWrapper(obj);
227      // jvmtiEventVMObjectAlloc parameters:
228      //      jvmtiEnv *jvmti_env,
229      //      JNIEnv* jni_env,
230      //      jthread thread,
231      //      jobject object,
232      //      jclass object_klass,
233      //      jlong size
234      art::JNIEnvExt* jni_env = self->GetJniEnv();
235      ScopedLocalRef<jobject> object(
236          jni_env, jni_env->AddLocalReference<jobject>(*obj));
237      ScopedLocalRef<jclass> klass(
238          jni_env, jni_env->AddLocalReference<jclass>(obj->Ptr()->GetClass()));
239
240      RunEventCallback<ArtJvmtiEvent::kVmObjectAlloc>(handler_,
241                                                      self,
242                                                      jni_env,
243                                                      object.get(),
244                                                      klass.get(),
245                                                      static_cast<jlong>(byte_count));
246    }
247  }
248
249 private:
250  EventHandler* handler_;
251};
252
253static void SetupObjectAllocationTracking(art::gc::AllocationListener* listener, bool enable) {
254  // We must not hold the mutator lock here, but if we're in FastJNI, for example, we might. For
255  // now, do a workaround: (possibly) acquire and release.
256  art::ScopedObjectAccess soa(art::Thread::Current());
257  art::ScopedThreadSuspension sts(soa.Self(), art::ThreadState::kSuspended);
258  if (enable) {
259    art::Runtime::Current()->GetHeap()->SetAllocationListener(listener);
260  } else {
261    art::Runtime::Current()->GetHeap()->RemoveAllocationListener();
262  }
263}
264
265class JvmtiMonitorListener : public art::MonitorCallback {
266 public:
267  explicit JvmtiMonitorListener(EventHandler* handler) : handler_(handler) {}
268
269  void MonitorContendedLocking(art::Monitor* m)
270      OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
271    if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorContendedEnter)) {
272      art::Thread* self = art::Thread::Current();
273      art::JNIEnvExt* jnienv = self->GetJniEnv();
274      ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
275      RunEventCallback<ArtJvmtiEvent::kMonitorContendedEnter>(
276          handler_,
277          self,
278          jnienv,
279          mon.get());
280    }
281  }
282
283  void MonitorContendedLocked(art::Monitor* m)
284      OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
285    if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorContendedEntered)) {
286      art::Thread* self = art::Thread::Current();
287      art::JNIEnvExt* jnienv = self->GetJniEnv();
288      ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
289      RunEventCallback<ArtJvmtiEvent::kMonitorContendedEntered>(
290          handler_,
291          self,
292          jnienv,
293          mon.get());
294    }
295  }
296
297  void ObjectWaitStart(art::Handle<art::mirror::Object> obj, int64_t timeout)
298      OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
299    if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWait)) {
300      art::Thread* self = art::Thread::Current();
301      art::JNIEnvExt* jnienv = self->GetJniEnv();
302      ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, obj.Get()));
303      RunEventCallback<ArtJvmtiEvent::kMonitorWait>(
304          handler_,
305          self,
306          jnienv,
307          mon.get(),
308          static_cast<jlong>(timeout));
309    }
310  }
311
312
313  // Our interpretation of the spec is that the JVMTI_EVENT_MONITOR_WAITED will be sent immediately
314  // after a thread has woken up from a sleep caused by a call to Object#wait. If the thread will
315  // never go to sleep (due to not having the lock, having bad arguments, or having an exception
316  // propogated from JVMTI_EVENT_MONITOR_WAIT) we will not send this event.
317  //
318  // This does not fully match the RI semantics. Specifically, we will not send the
319  // JVMTI_EVENT_MONITOR_WAITED event in one situation where the RI would, there was an exception in
320  // the JVMTI_EVENT_MONITOR_WAIT event but otherwise the call was fine. In that case the RI would
321  // send this event and return without going to sleep.
322  //
323  // See b/65558434 for more discussion.
324  void MonitorWaitFinished(art::Monitor* m, bool timeout)
325      OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
326    if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWaited)) {
327      art::Thread* self = art::Thread::Current();
328      art::JNIEnvExt* jnienv = self->GetJniEnv();
329      ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
330      RunEventCallback<ArtJvmtiEvent::kMonitorWaited>(
331          handler_,
332          self,
333          jnienv,
334          mon.get(),
335          static_cast<jboolean>(timeout));
336    }
337  }
338
339 private:
340  EventHandler* handler_;
341};
342
343static void SetupMonitorListener(art::MonitorCallback* listener, bool enable) {
344  // We must not hold the mutator lock here, but if we're in FastJNI, for example, we might. For
345  // now, do a workaround: (possibly) acquire and release.
346  art::ScopedObjectAccess soa(art::Thread::Current());
347  if (enable) {
348    art::Runtime::Current()->GetRuntimeCallbacks()->AddMonitorCallback(listener);
349  } else {
350    art::Runtime::Current()->GetRuntimeCallbacks()->RemoveMonitorCallback(listener);
351  }
352}
353
354// Report GC pauses (see spec) as GARBAGE_COLLECTION_START and GARBAGE_COLLECTION_END.
355class JvmtiGcPauseListener : public art::gc::GcPauseListener {
356 public:
357  explicit JvmtiGcPauseListener(EventHandler* handler)
358      : handler_(handler),
359        start_enabled_(false),
360        finish_enabled_(false) {}
361
362  void StartPause() OVERRIDE {
363    handler_->DispatchEvent<ArtJvmtiEvent::kGarbageCollectionStart>(nullptr);
364  }
365
366  void EndPause() OVERRIDE {
367    handler_->DispatchEvent<ArtJvmtiEvent::kGarbageCollectionFinish>(nullptr);
368  }
369
370  bool IsEnabled() {
371    return start_enabled_ || finish_enabled_;
372  }
373
374  void SetStartEnabled(bool e) {
375    start_enabled_ = e;
376  }
377
378  void SetFinishEnabled(bool e) {
379    finish_enabled_ = e;
380  }
381
382 private:
383  EventHandler* handler_;
384  bool start_enabled_;
385  bool finish_enabled_;
386};
387
388static void SetupGcPauseTracking(JvmtiGcPauseListener* listener, ArtJvmtiEvent event, bool enable) {
389  bool old_state = listener->IsEnabled();
390
391  if (event == ArtJvmtiEvent::kGarbageCollectionStart) {
392    listener->SetStartEnabled(enable);
393  } else {
394    listener->SetFinishEnabled(enable);
395  }
396
397  bool new_state = listener->IsEnabled();
398
399  if (old_state != new_state) {
400    if (new_state) {
401      art::Runtime::Current()->GetHeap()->SetGcPauseListener(listener);
402    } else {
403      art::Runtime::Current()->GetHeap()->RemoveGcPauseListener();
404    }
405  }
406}
407
408class JvmtiMethodTraceListener FINAL : public art::instrumentation::InstrumentationListener {
409 public:
410  explicit JvmtiMethodTraceListener(EventHandler* handler) : event_handler_(handler) {}
411
412  // Call-back for when a method is entered.
413  void MethodEntered(art::Thread* self,
414                     art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
415                     art::ArtMethod* method,
416                     uint32_t dex_pc ATTRIBUTE_UNUSED)
417      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
418    if (!method->IsRuntimeMethod() &&
419        event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodEntry)) {
420      art::JNIEnvExt* jnienv = self->GetJniEnv();
421      RunEventCallback<ArtJvmtiEvent::kMethodEntry>(event_handler_,
422                                                    self,
423                                                    jnienv,
424                                                    art::jni::EncodeArtMethod(method));
425    }
426  }
427
428  // Callback for when a method is exited with a reference return value.
429  void MethodExited(art::Thread* self,
430                    art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
431                    art::ArtMethod* method,
432                    uint32_t dex_pc ATTRIBUTE_UNUSED,
433                    art::Handle<art::mirror::Object> return_value)
434      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
435    if (!method->IsRuntimeMethod() &&
436        event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
437      DCHECK_EQ(method->GetReturnTypePrimitive(), art::Primitive::kPrimNot)
438          << method->PrettyMethod();
439      DCHECK(!self->IsExceptionPending());
440      jvalue val;
441      art::JNIEnvExt* jnienv = self->GetJniEnv();
442      ScopedLocalRef<jobject> return_jobj(jnienv, AddLocalRef<jobject>(jnienv, return_value.Get()));
443      val.l = return_jobj.get();
444      RunEventCallback<ArtJvmtiEvent::kMethodExit>(
445          event_handler_,
446          self,
447          jnienv,
448          art::jni::EncodeArtMethod(method),
449          /*was_popped_by_exception*/ static_cast<jboolean>(JNI_FALSE),
450          val);
451    }
452  }
453
454  // Call-back for when a method is exited.
455  void MethodExited(art::Thread* self,
456                    art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
457                    art::ArtMethod* method,
458                    uint32_t dex_pc ATTRIBUTE_UNUSED,
459                    const art::JValue& return_value)
460      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
461    if (!method->IsRuntimeMethod() &&
462        event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
463      DCHECK_NE(method->GetReturnTypePrimitive(), art::Primitive::kPrimNot)
464          << method->PrettyMethod();
465      DCHECK(!self->IsExceptionPending());
466      jvalue val;
467      art::JNIEnvExt* jnienv = self->GetJniEnv();
468      // 64bit integer is the largest value in the union so we should be fine simply copying it into
469      // the union.
470      val.j = return_value.GetJ();
471      RunEventCallback<ArtJvmtiEvent::kMethodExit>(
472          event_handler_,
473          self,
474          jnienv,
475          art::jni::EncodeArtMethod(method),
476          /*was_popped_by_exception*/ static_cast<jboolean>(JNI_FALSE),
477          val);
478    }
479  }
480
481  // Call-back for when a method is popped due to an exception throw. A method will either cause a
482  // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
483  void MethodUnwind(art::Thread* self,
484                    art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
485                    art::ArtMethod* method,
486                    uint32_t dex_pc ATTRIBUTE_UNUSED)
487      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
488    if (!method->IsRuntimeMethod() &&
489        event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
490      jvalue val;
491      // Just set this to 0xffffffffffffffff so it's not uninitialized.
492      val.j = static_cast<jlong>(-1);
493      art::JNIEnvExt* jnienv = self->GetJniEnv();
494      art::StackHandleScope<1> hs(self);
495      art::Handle<art::mirror::Throwable> old_exception(hs.NewHandle(self->GetException()));
496      CHECK(!old_exception.IsNull());
497      self->ClearException();
498      RunEventCallback<ArtJvmtiEvent::kMethodExit>(
499          event_handler_,
500          self,
501          jnienv,
502          art::jni::EncodeArtMethod(method),
503          /*was_popped_by_exception*/ static_cast<jboolean>(JNI_TRUE),
504          val);
505      // Match RI behavior of just throwing away original exception if a new one is thrown.
506      if (LIKELY(!self->IsExceptionPending())) {
507        self->SetException(old_exception.Get());
508      }
509    }
510  }
511
512  // Call-back for when the dex pc moves in a method.
513  void DexPcMoved(art::Thread* self,
514                  art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
515                  art::ArtMethod* method,
516                  uint32_t new_dex_pc)
517      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
518    DCHECK(!method->IsRuntimeMethod());
519    // Default methods might be copied to multiple classes. We need to get the canonical version of
520    // this method so that we can check for breakpoints correctly.
521    // TODO We should maybe do this on other events to ensure that we are consistent WRT default
522    // methods. This could interact with obsolete methods if we ever let interface redefinition
523    // happen though.
524    method = method->GetCanonicalMethod();
525    art::JNIEnvExt* jnienv = self->GetJniEnv();
526    jmethodID jmethod = art::jni::EncodeArtMethod(method);
527    jlocation location = static_cast<jlocation>(new_dex_pc);
528    // Step event is reported first according to the spec.
529    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kSingleStep)) {
530      RunEventCallback<ArtJvmtiEvent::kSingleStep>(event_handler_, self, jnienv, jmethod, location);
531    }
532    // Next we do the Breakpoint events. The Dispatch code will filter the individual
533    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kBreakpoint)) {
534      RunEventCallback<ArtJvmtiEvent::kBreakpoint>(event_handler_, self, jnienv, jmethod, location);
535    }
536  }
537
538  // Call-back for when we read from a field.
539  void FieldRead(art::Thread* self,
540                 art::Handle<art::mirror::Object> this_object,
541                 art::ArtMethod* method,
542                 uint32_t dex_pc,
543                 art::ArtField* field)
544      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
545    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldAccess)) {
546      art::JNIEnvExt* jnienv = self->GetJniEnv();
547      // DCHECK(!self->IsExceptionPending());
548      ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
549      ScopedLocalRef<jobject> fklass(jnienv,
550                                     AddLocalRef<jobject>(jnienv,
551                                                          field->GetDeclaringClass().Ptr()));
552      RunEventCallback<ArtJvmtiEvent::kFieldAccess>(event_handler_,
553                                                    self,
554                                                    jnienv,
555                                                    art::jni::EncodeArtMethod(method),
556                                                    static_cast<jlocation>(dex_pc),
557                                                    static_cast<jclass>(fklass.get()),
558                                                    this_ref.get(),
559                                                    art::jni::EncodeArtField(field));
560    }
561  }
562
563  void FieldWritten(art::Thread* self,
564                    art::Handle<art::mirror::Object> this_object,
565                    art::ArtMethod* method,
566                    uint32_t dex_pc,
567                    art::ArtField* field,
568                    art::Handle<art::mirror::Object> new_val)
569      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
570    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldModification)) {
571      art::JNIEnvExt* jnienv = self->GetJniEnv();
572      // DCHECK(!self->IsExceptionPending());
573      ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
574      ScopedLocalRef<jobject> fklass(jnienv,
575                                     AddLocalRef<jobject>(jnienv,
576                                                          field->GetDeclaringClass().Ptr()));
577      ScopedLocalRef<jobject> fval(jnienv, AddLocalRef<jobject>(jnienv, new_val.Get()));
578      jvalue val;
579      val.l = fval.get();
580      RunEventCallback<ArtJvmtiEvent::kFieldModification>(
581          event_handler_,
582          self,
583          jnienv,
584          art::jni::EncodeArtMethod(method),
585          static_cast<jlocation>(dex_pc),
586          static_cast<jclass>(fklass.get()),
587          field->IsStatic() ? nullptr :  this_ref.get(),
588          art::jni::EncodeArtField(field),
589          'L',  // type_char
590          val);
591    }
592  }
593
594  // Call-back for when we write into a field.
595  void FieldWritten(art::Thread* self,
596                    art::Handle<art::mirror::Object> this_object,
597                    art::ArtMethod* method,
598                    uint32_t dex_pc,
599                    art::ArtField* field,
600                    const art::JValue& field_value)
601      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
602    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldModification)) {
603      art::JNIEnvExt* jnienv = self->GetJniEnv();
604      DCHECK(!self->IsExceptionPending());
605      ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
606      ScopedLocalRef<jobject> fklass(jnienv,
607                                     AddLocalRef<jobject>(jnienv,
608                                                          field->GetDeclaringClass().Ptr()));
609      char type_char = art::Primitive::Descriptor(field->GetTypeAsPrimitiveType())[0];
610      jvalue val;
611      // 64bit integer is the largest value in the union so we should be fine simply copying it into
612      // the union.
613      val.j = field_value.GetJ();
614      RunEventCallback<ArtJvmtiEvent::kFieldModification>(
615          event_handler_,
616          self,
617          jnienv,
618          art::jni::EncodeArtMethod(method),
619          static_cast<jlocation>(dex_pc),
620          static_cast<jclass>(fklass.get()),
621          field->IsStatic() ? nullptr :  this_ref.get(),  // nb static field modification get given
622                                                          // the class as this_object for some
623                                                          // reason.
624          art::jni::EncodeArtField(field),
625          type_char,
626          val);
627    }
628  }
629
630  void WatchedFramePop(art::Thread* self, const art::ShadowFrame& frame)
631      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
632      art::JNIEnvExt* jnienv = self->GetJniEnv();
633    jboolean is_exception_pending = self->IsExceptionPending();
634    RunEventCallback<ArtJvmtiEvent::kFramePop>(
635        event_handler_,
636        self,
637        jnienv,
638        art::jni::EncodeArtMethod(frame.GetMethod()),
639        is_exception_pending,
640        &frame);
641  }
642
643  static void FindCatchMethodsFromThrow(art::Thread* self,
644                                        art::Handle<art::mirror::Throwable> exception,
645                                        /*out*/ art::ArtMethod** out_method,
646                                        /*out*/ uint32_t* dex_pc)
647      REQUIRES_SHARED(art::Locks::mutator_lock_) {
648    // Finds the location where this exception will most likely be caught. We ignore intervening
649    // native frames (which could catch the exception) and return the closest java frame with a
650    // compatible catch statement.
651    class CatchLocationFinder FINAL : public art::StackVisitor {
652     public:
653      CatchLocationFinder(art::Thread* target,
654                          art::Handle<art::mirror::Class> exception_class,
655                          art::Context* context,
656                          /*out*/ art::ArtMethod** out_catch_method,
657                          /*out*/ uint32_t* out_catch_pc)
658          REQUIRES_SHARED(art::Locks::mutator_lock_)
659        : StackVisitor(target, context, art::StackVisitor::StackWalkKind::kIncludeInlinedFrames),
660          exception_class_(exception_class),
661          catch_method_ptr_(out_catch_method),
662          catch_dex_pc_ptr_(out_catch_pc) {}
663
664      bool VisitFrame() OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
665        art::ArtMethod* method = GetMethod();
666        DCHECK(method != nullptr);
667        if (method->IsRuntimeMethod()) {
668          return true;
669        }
670
671        if (!method->IsNative()) {
672          uint32_t cur_dex_pc = GetDexPc();
673          if (cur_dex_pc == art::dex::kDexNoIndex) {
674            // This frame looks opaque. Just keep on going.
675            return true;
676          }
677          bool has_no_move_exception = false;
678          uint32_t found_dex_pc = method->FindCatchBlock(
679              exception_class_, cur_dex_pc, &has_no_move_exception);
680          if (found_dex_pc != art::dex::kDexNoIndex) {
681            // We found the catch. Store the result and return.
682            *catch_method_ptr_ = method;
683            *catch_dex_pc_ptr_ = found_dex_pc;
684            return false;
685          }
686        }
687        return true;
688      }
689
690     private:
691      art::Handle<art::mirror::Class> exception_class_;
692      art::ArtMethod** catch_method_ptr_;
693      uint32_t* catch_dex_pc_ptr_;
694
695      DISALLOW_COPY_AND_ASSIGN(CatchLocationFinder);
696    };
697
698    art::StackHandleScope<1> hs(self);
699    *out_method = nullptr;
700    *dex_pc = 0;
701    std::unique_ptr<art::Context> context(art::Context::Create());
702
703    CatchLocationFinder clf(self,
704                            hs.NewHandle(exception->GetClass()),
705                            context.get(),
706                            /*out*/ out_method,
707                            /*out*/ dex_pc);
708    clf.WalkStack(/* include_transitions */ false);
709  }
710
711  // Call-back when an exception is thrown.
712  void ExceptionThrown(art::Thread* self, art::Handle<art::mirror::Throwable> exception_object)
713      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
714    DCHECK(self->IsExceptionThrownByCurrentMethod(exception_object.Get()));
715    // The instrumentation events get rid of this for us.
716    DCHECK(!self->IsExceptionPending());
717    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kException)) {
718      art::JNIEnvExt* jnienv = self->GetJniEnv();
719      art::ArtMethod* catch_method;
720      uint32_t catch_pc;
721      FindCatchMethodsFromThrow(self, exception_object, &catch_method, &catch_pc);
722      uint32_t dex_pc = 0;
723      art::ArtMethod* method = self->GetCurrentMethod(&dex_pc,
724                                                      /* check_suspended */ true,
725                                                      /* abort_on_error */ art::kIsDebugBuild);
726      ScopedLocalRef<jobject> exception(jnienv,
727                                        AddLocalRef<jobject>(jnienv, exception_object.Get()));
728      RunEventCallback<ArtJvmtiEvent::kException>(
729          event_handler_,
730          self,
731          jnienv,
732          art::jni::EncodeArtMethod(method),
733          static_cast<jlocation>(dex_pc),
734          exception.get(),
735          art::jni::EncodeArtMethod(catch_method),
736          static_cast<jlocation>(catch_pc));
737    }
738    return;
739  }
740
741  // Call-back when an exception is handled.
742  void ExceptionHandled(art::Thread* self, art::Handle<art::mirror::Throwable> exception_object)
743      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
744    // Since the exception has already been handled there shouldn't be one pending.
745    DCHECK(!self->IsExceptionPending());
746    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kExceptionCatch)) {
747      art::JNIEnvExt* jnienv = self->GetJniEnv();
748      uint32_t dex_pc;
749      art::ArtMethod* method = self->GetCurrentMethod(&dex_pc,
750                                                      /* check_suspended */ true,
751                                                      /* abort_on_error */ art::kIsDebugBuild);
752      ScopedLocalRef<jobject> exception(jnienv,
753                                        AddLocalRef<jobject>(jnienv, exception_object.Get()));
754      RunEventCallback<ArtJvmtiEvent::kExceptionCatch>(
755          event_handler_,
756          self,
757          jnienv,
758          art::jni::EncodeArtMethod(method),
759          static_cast<jlocation>(dex_pc),
760          exception.get());
761    }
762    return;
763  }
764
765  // Call-back for when we execute a branch.
766  void Branch(art::Thread* self ATTRIBUTE_UNUSED,
767              art::ArtMethod* method ATTRIBUTE_UNUSED,
768              uint32_t dex_pc ATTRIBUTE_UNUSED,
769              int32_t dex_pc_offset ATTRIBUTE_UNUSED)
770      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
771    return;
772  }
773
774  // Call-back for when we get an invokevirtual or an invokeinterface.
775  void InvokeVirtualOrInterface(art::Thread* self ATTRIBUTE_UNUSED,
776                                art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
777                                art::ArtMethod* caller ATTRIBUTE_UNUSED,
778                                uint32_t dex_pc ATTRIBUTE_UNUSED,
779                                art::ArtMethod* callee ATTRIBUTE_UNUSED)
780      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
781    return;
782  }
783
784 private:
785  EventHandler* const event_handler_;
786};
787
788static uint32_t GetInstrumentationEventsFor(ArtJvmtiEvent event) {
789  switch (event) {
790    case ArtJvmtiEvent::kMethodEntry:
791      return art::instrumentation::Instrumentation::kMethodEntered;
792    case ArtJvmtiEvent::kMethodExit:
793      return art::instrumentation::Instrumentation::kMethodExited |
794             art::instrumentation::Instrumentation::kMethodUnwind;
795    case ArtJvmtiEvent::kFieldModification:
796      return art::instrumentation::Instrumentation::kFieldWritten;
797    case ArtJvmtiEvent::kFieldAccess:
798      return art::instrumentation::Instrumentation::kFieldRead;
799    case ArtJvmtiEvent::kBreakpoint:
800    case ArtJvmtiEvent::kSingleStep:
801      return art::instrumentation::Instrumentation::kDexPcMoved;
802    case ArtJvmtiEvent::kFramePop:
803      return art::instrumentation::Instrumentation::kWatchedFramePop;
804    case ArtJvmtiEvent::kException:
805      return art::instrumentation::Instrumentation::kExceptionThrown;
806    case ArtJvmtiEvent::kExceptionCatch:
807      return art::instrumentation::Instrumentation::kExceptionHandled;
808    default:
809      LOG(FATAL) << "Unknown event ";
810      return 0;
811  }
812}
813
814static bool EventNeedsFullDeopt(ArtJvmtiEvent event) {
815  switch (event) {
816    case ArtJvmtiEvent::kBreakpoint:
817    case ArtJvmtiEvent::kException:
818      return false;
819    // TODO We should support more of these or at least do something to make them discriminate by
820    // thread.
821    case ArtJvmtiEvent::kMethodEntry:
822    case ArtJvmtiEvent::kExceptionCatch:
823    case ArtJvmtiEvent::kMethodExit:
824    case ArtJvmtiEvent::kFieldModification:
825    case ArtJvmtiEvent::kFieldAccess:
826    case ArtJvmtiEvent::kSingleStep:
827    case ArtJvmtiEvent::kFramePop:
828      return true;
829    default:
830      LOG(FATAL) << "Unexpected event type!";
831      UNREACHABLE();
832  }
833}
834
835static void SetupTraceListener(JvmtiMethodTraceListener* listener,
836                               ArtJvmtiEvent event,
837                               bool enable) {
838  bool needs_full_deopt = EventNeedsFullDeopt(event);
839  // Make sure we can deopt.
840  {
841    art::ScopedObjectAccess soa(art::Thread::Current());
842    DeoptManager* deopt_manager = DeoptManager::Get();
843    if (enable) {
844      deopt_manager->AddDeoptimizationRequester();
845      if (needs_full_deopt) {
846        deopt_manager->AddDeoptimizeAllMethods();
847      }
848    } else {
849      if (needs_full_deopt) {
850        deopt_manager->RemoveDeoptimizeAllMethods();
851      }
852      deopt_manager->RemoveDeoptimizationRequester();
853    }
854  }
855
856  // Add the actual listeners.
857  art::ScopedThreadStateChange stsc(art::Thread::Current(), art::ThreadState::kNative);
858  uint32_t new_events = GetInstrumentationEventsFor(event);
859  art::instrumentation::Instrumentation* instr = art::Runtime::Current()->GetInstrumentation();
860  art::gc::ScopedGCCriticalSection gcs(art::Thread::Current(),
861                                       art::gc::kGcCauseInstrumentation,
862                                       art::gc::kCollectorTypeInstrumentation);
863  art::ScopedSuspendAll ssa("jvmti method tracing installation");
864  if (enable) {
865    instr->AddListener(listener, new_events);
866  } else {
867    instr->RemoveListener(listener, new_events);
868  }
869}
870
871// Makes sure that all compiled methods are AsyncDeoptimizable so we can deoptimize (and force to
872// the switch interpreter) when we try to get or set a local variable.
873void EventHandler::HandleLocalAccessCapabilityAdded() {
874  class UpdateEntryPointsClassVisitor : public art::ClassVisitor {
875   public:
876    explicit UpdateEntryPointsClassVisitor(art::Runtime* runtime)
877        : runtime_(runtime) {}
878
879    bool operator()(art::ObjPtr<art::mirror::Class> klass)
880        OVERRIDE REQUIRES(art::Locks::mutator_lock_) {
881      if (!klass->IsLoaded()) {
882        // Skip classes that aren't loaded since they might not have fully allocated and initialized
883        // their methods. Furthemore since the jvmti-plugin must have been loaded by this point
884        // these methods will definitately be using debuggable code.
885        return true;
886      }
887      for (auto& m : klass->GetMethods(art::kRuntimePointerSize)) {
888        const void* code = m.GetEntryPointFromQuickCompiledCode();
889        if (m.IsNative() || m.IsProxyMethod()) {
890          continue;
891        } else if (!runtime_->GetClassLinker()->IsQuickToInterpreterBridge(code) &&
892                   !runtime_->IsAsyncDeoptimizeable(reinterpret_cast<uintptr_t>(code))) {
893          runtime_->GetInstrumentation()->UpdateMethodsCodeToInterpreterEntryPoint(&m);
894        }
895      }
896      return true;
897    }
898
899   private:
900    art::Runtime* runtime_;
901  };
902  art::ScopedObjectAccess soa(art::Thread::Current());
903  UpdateEntryPointsClassVisitor visitor(art::Runtime::Current());
904  art::Runtime::Current()->GetClassLinker()->VisitClasses(&visitor);
905}
906
907bool EventHandler::OtherMonitorEventsEnabledAnywhere(ArtJvmtiEvent event) {
908  std::array<ArtJvmtiEvent, 4> events {
909    {
910      ArtJvmtiEvent::kMonitorContendedEnter,
911      ArtJvmtiEvent::kMonitorContendedEntered,
912      ArtJvmtiEvent::kMonitorWait,
913      ArtJvmtiEvent::kMonitorWaited
914    }
915  };
916  for (ArtJvmtiEvent e : events) {
917    if (e != event && IsEventEnabledAnywhere(e)) {
918      return true;
919    }
920  }
921  return false;
922}
923
924// Handle special work for the given event type, if necessary.
925void EventHandler::HandleEventType(ArtJvmtiEvent event, bool enable) {
926  switch (event) {
927    case ArtJvmtiEvent::kVmObjectAlloc:
928      SetupObjectAllocationTracking(alloc_listener_.get(), enable);
929      return;
930
931    case ArtJvmtiEvent::kGarbageCollectionStart:
932    case ArtJvmtiEvent::kGarbageCollectionFinish:
933      SetupGcPauseTracking(gc_pause_listener_.get(), event, enable);
934      return;
935
936    case ArtJvmtiEvent::kBreakpoint:
937    case ArtJvmtiEvent::kSingleStep: {
938      ArtJvmtiEvent other = (event == ArtJvmtiEvent::kBreakpoint) ? ArtJvmtiEvent::kSingleStep
939                                                                  : ArtJvmtiEvent::kBreakpoint;
940      // We only need to do anything if there isn't already a listener installed/held-on by the
941      // other jvmti event that uses DexPcMoved.
942      if (!IsEventEnabledAnywhere(other)) {
943        SetupTraceListener(method_trace_listener_.get(), event, enable);
944      }
945      return;
946    }
947    // FramePop can never be disabled once it's been turned on since we would either need to deal
948    // with dangling pointers or have missed events.
949    // TODO We really need to make this not the case anymore.
950    case ArtJvmtiEvent::kFramePop:
951      if (!enable || (enable && frame_pop_enabled)) {
952        break;
953      } else {
954        SetupTraceListener(method_trace_listener_.get(), event, enable);
955        break;
956      }
957    case ArtJvmtiEvent::kMethodEntry:
958    case ArtJvmtiEvent::kMethodExit:
959    case ArtJvmtiEvent::kFieldAccess:
960    case ArtJvmtiEvent::kFieldModification:
961    case ArtJvmtiEvent::kException:
962    case ArtJvmtiEvent::kExceptionCatch:
963      SetupTraceListener(method_trace_listener_.get(), event, enable);
964      return;
965    case ArtJvmtiEvent::kMonitorContendedEnter:
966    case ArtJvmtiEvent::kMonitorContendedEntered:
967    case ArtJvmtiEvent::kMonitorWait:
968    case ArtJvmtiEvent::kMonitorWaited:
969      if (!OtherMonitorEventsEnabledAnywhere(event)) {
970        SetupMonitorListener(monitor_listener_.get(), enable);
971      }
972      return;
973    default:
974      break;
975  }
976}
977
978// Checks to see if the env has the capabilities associated with the given event.
979static bool HasAssociatedCapability(ArtJvmTiEnv* env,
980                                    ArtJvmtiEvent event) {
981  jvmtiCapabilities caps = env->capabilities;
982  switch (event) {
983    case ArtJvmtiEvent::kBreakpoint:
984      return caps.can_generate_breakpoint_events == 1;
985
986    case ArtJvmtiEvent::kCompiledMethodLoad:
987    case ArtJvmtiEvent::kCompiledMethodUnload:
988      return caps.can_generate_compiled_method_load_events == 1;
989
990    case ArtJvmtiEvent::kException:
991    case ArtJvmtiEvent::kExceptionCatch:
992      return caps.can_generate_exception_events == 1;
993
994    case ArtJvmtiEvent::kFieldAccess:
995      return caps.can_generate_field_access_events == 1;
996
997    case ArtJvmtiEvent::kFieldModification:
998      return caps.can_generate_field_modification_events == 1;
999
1000    case ArtJvmtiEvent::kFramePop:
1001      return caps.can_generate_frame_pop_events == 1;
1002
1003    case ArtJvmtiEvent::kGarbageCollectionStart:
1004    case ArtJvmtiEvent::kGarbageCollectionFinish:
1005      return caps.can_generate_garbage_collection_events == 1;
1006
1007    case ArtJvmtiEvent::kMethodEntry:
1008      return caps.can_generate_method_entry_events == 1;
1009
1010    case ArtJvmtiEvent::kMethodExit:
1011      return caps.can_generate_method_exit_events == 1;
1012
1013    case ArtJvmtiEvent::kMonitorContendedEnter:
1014    case ArtJvmtiEvent::kMonitorContendedEntered:
1015    case ArtJvmtiEvent::kMonitorWait:
1016    case ArtJvmtiEvent::kMonitorWaited:
1017      return caps.can_generate_monitor_events == 1;
1018
1019    case ArtJvmtiEvent::kNativeMethodBind:
1020      return caps.can_generate_native_method_bind_events == 1;
1021
1022    case ArtJvmtiEvent::kObjectFree:
1023      return caps.can_generate_object_free_events == 1;
1024
1025    case ArtJvmtiEvent::kSingleStep:
1026      return caps.can_generate_single_step_events == 1;
1027
1028    case ArtJvmtiEvent::kVmObjectAlloc:
1029      return caps.can_generate_vm_object_alloc_events == 1;
1030
1031    default:
1032      return true;
1033  }
1034}
1035
1036jvmtiError EventHandler::SetEvent(ArtJvmTiEnv* env,
1037                                  art::Thread* thread,
1038                                  ArtJvmtiEvent event,
1039                                  jvmtiEventMode mode) {
1040  if (thread != nullptr) {
1041    art::ThreadState state = thread->GetState();
1042    if (state == art::ThreadState::kStarting ||
1043        state == art::ThreadState::kTerminated ||
1044        thread->IsStillStarting()) {
1045      return ERR(THREAD_NOT_ALIVE);
1046    }
1047    if (!IsThreadControllable(event)) {
1048      return ERR(ILLEGAL_ARGUMENT);
1049    }
1050  }
1051
1052  if (mode != JVMTI_ENABLE && mode != JVMTI_DISABLE) {
1053    return ERR(ILLEGAL_ARGUMENT);
1054  }
1055
1056  if (!EventMask::EventIsInRange(event)) {
1057    return ERR(INVALID_EVENT_TYPE);
1058  }
1059
1060  if (!HasAssociatedCapability(env, event)) {
1061    return ERR(MUST_POSSESS_CAPABILITY);
1062  }
1063
1064  bool old_state = global_mask.Test(event);
1065
1066  if (mode == JVMTI_ENABLE) {
1067    env->event_masks.EnableEvent(thread, event);
1068    global_mask.Set(event);
1069  } else {
1070    DCHECK_EQ(mode, JVMTI_DISABLE);
1071
1072    env->event_masks.DisableEvent(thread, event);
1073    RecalculateGlobalEventMask(event);
1074  }
1075
1076  bool new_state = global_mask.Test(event);
1077
1078  // Handle any special work required for the event type.
1079  if (new_state != old_state) {
1080    HandleEventType(event, mode == JVMTI_ENABLE);
1081  }
1082
1083  return ERR(NONE);
1084}
1085
1086void EventHandler::HandleBreakpointEventsChanged(bool added) {
1087  if (added) {
1088    DeoptManager::Get()->AddDeoptimizationRequester();
1089  } else {
1090    DeoptManager::Get()->RemoveDeoptimizationRequester();
1091  }
1092}
1093
1094void EventHandler::Shutdown() {
1095  // Need to remove the method_trace_listener_ if it's there.
1096  art::Thread* self = art::Thread::Current();
1097  art::gc::ScopedGCCriticalSection gcs(self,
1098                                       art::gc::kGcCauseInstrumentation,
1099                                       art::gc::kCollectorTypeInstrumentation);
1100  art::ScopedSuspendAll ssa("jvmti method tracing uninstallation");
1101  // Just remove every possible event.
1102  art::Runtime::Current()->GetInstrumentation()->RemoveListener(method_trace_listener_.get(), ~0);
1103}
1104
1105EventHandler::EventHandler() {
1106  alloc_listener_.reset(new JvmtiAllocationListener(this));
1107  gc_pause_listener_.reset(new JvmtiGcPauseListener(this));
1108  method_trace_listener_.reset(new JvmtiMethodTraceListener(this));
1109  monitor_listener_.reset(new JvmtiMonitorListener(this));
1110}
1111
1112EventHandler::~EventHandler() {
1113}
1114
1115}  // namespace openjdkjvmti
1116