events.cc revision b284f8d775ac32d8109744d94b99da451570beef
1/* Copyright (C) 2016 The Android Open Source Project
2 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
3 *
4 * This file implements interfaces from the file jvmti.h. This implementation
5 * is licensed under the same terms as the file jvmti.h.  The
6 * copyright and license information for the file jvmti.h follows.
7 *
8 * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
9 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
10 *
11 * This code is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License version 2 only, as
13 * published by the Free Software Foundation.  Oracle designates this
14 * particular file as subject to the "Classpath" exception as provided
15 * by Oracle in the LICENSE file that accompanied this code.
16 *
17 * This code is distributed in the hope that it will be useful, but WITHOUT
18 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
19 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
20 * version 2 for more details (a copy is included in the LICENSE file that
21 * accompanied this code).
22 *
23 * You should have received a copy of the GNU General Public License version
24 * 2 along with this work; if not, write to the Free Software Foundation,
25 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
26 *
27 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
28 * or visit www.oracle.com if you need additional information or have any
29 * questions.
30 */
31
32#include "events-inl.h"
33
34#include <array>
35
36#include "art_field-inl.h"
37#include "art_jvmti.h"
38#include "art_method-inl.h"
39#include "base/logging.h"
40#include "deopt_manager.h"
41#include "dex_file_types.h"
42#include "gc/allocation_listener.h"
43#include "gc/gc_pause_listener.h"
44#include "gc/heap.h"
45#include "gc/scoped_gc_critical_section.h"
46#include "handle_scope-inl.h"
47#include "instrumentation.h"
48#include "jni_env_ext-inl.h"
49#include "jni_internal.h"
50#include "mirror/class.h"
51#include "mirror/object-inl.h"
52#include "monitor.h"
53#include "nativehelper/scoped_local_ref.h"
54#include "runtime.h"
55#include "scoped_thread_state_change-inl.h"
56#include "stack.h"
57#include "thread-inl.h"
58#include "thread_list.h"
59#include "ti_phase.h"
60
61namespace openjdkjvmti {
62
63void ArtJvmtiEventCallbacks::CopyExtensionsFrom(const ArtJvmtiEventCallbacks* cb) {
64  if (art::kIsDebugBuild) {
65    ArtJvmtiEventCallbacks clean;
66    DCHECK_EQ(memcmp(&clean, this, sizeof(clean)), 0)
67        << "CopyExtensionsFrom called with initialized eventsCallbacks!";
68  }
69  if (cb != nullptr) {
70    memcpy(this, cb, sizeof(*this));
71  } else {
72    memset(this, 0, sizeof(*this));
73  }
74}
75
76jvmtiError ArtJvmtiEventCallbacks::Set(jint index, jvmtiExtensionEvent cb) {
77  switch (index) {
78    case static_cast<jint>(ArtJvmtiEvent::kDdmPublishChunk):
79      DdmPublishChunk = reinterpret_cast<ArtJvmtiEventDdmPublishChunk>(cb);
80      return OK;
81    default:
82      return ERR(ILLEGAL_ARGUMENT);
83  }
84}
85
86
87bool IsExtensionEvent(jint e) {
88  return e >= static_cast<jint>(ArtJvmtiEvent::kMinEventTypeVal) &&
89      e <= static_cast<jint>(ArtJvmtiEvent::kMaxEventTypeVal) &&
90      IsExtensionEvent(static_cast<ArtJvmtiEvent>(e));
91}
92
93bool IsExtensionEvent(ArtJvmtiEvent e) {
94  switch (e) {
95    case ArtJvmtiEvent::kDdmPublishChunk:
96      return true;
97    default:
98      return false;
99  }
100}
101
102bool EventMasks::IsEnabledAnywhere(ArtJvmtiEvent event) {
103  return global_event_mask.Test(event) || unioned_thread_event_mask.Test(event);
104}
105
106EventMask& EventMasks::GetEventMask(art::Thread* thread) {
107  if (thread == nullptr) {
108    return global_event_mask;
109  }
110
111  for (auto& pair : thread_event_masks) {
112    const UniqueThread& unique_thread = pair.first;
113    if (unique_thread.first == thread &&
114        unique_thread.second == static_cast<uint32_t>(thread->GetTid())) {
115      return pair.second;
116    }
117  }
118
119  // TODO: Remove old UniqueThread with the same pointer, if exists.
120
121  thread_event_masks.emplace_back(UniqueThread(thread, thread->GetTid()), EventMask());
122  return thread_event_masks.back().second;
123}
124
125EventMask* EventMasks::GetEventMaskOrNull(art::Thread* thread) {
126  if (thread == nullptr) {
127    return &global_event_mask;
128  }
129
130  for (auto& pair : thread_event_masks) {
131    const UniqueThread& unique_thread = pair.first;
132    if (unique_thread.first == thread &&
133        unique_thread.second == static_cast<uint32_t>(thread->GetTid())) {
134      return &pair.second;
135    }
136  }
137
138  return nullptr;
139}
140
141
142void EventMasks::EnableEvent(art::Thread* thread, ArtJvmtiEvent event) {
143  DCHECK(EventMask::EventIsInRange(event));
144  GetEventMask(thread).Set(event);
145  if (thread != nullptr) {
146    unioned_thread_event_mask.Set(event, true);
147  }
148}
149
150void EventMasks::DisableEvent(art::Thread* thread, ArtJvmtiEvent event) {
151  DCHECK(EventMask::EventIsInRange(event));
152  GetEventMask(thread).Set(event, false);
153  if (thread != nullptr) {
154    // Regenerate union for the event.
155    bool union_value = false;
156    for (auto& pair : thread_event_masks) {
157      union_value |= pair.second.Test(event);
158      if (union_value) {
159        break;
160      }
161    }
162    unioned_thread_event_mask.Set(event, union_value);
163  }
164}
165
166void EventMasks::HandleChangedCapabilities(const jvmtiCapabilities& caps, bool caps_added) {
167  if (UNLIKELY(caps.can_retransform_classes == 1)) {
168    // If we are giving this env the retransform classes cap we need to switch all events of
169    // NonTransformable to Transformable and vice versa.
170    ArtJvmtiEvent to_remove = caps_added ? ArtJvmtiEvent::kClassFileLoadHookNonRetransformable
171                                         : ArtJvmtiEvent::kClassFileLoadHookRetransformable;
172    ArtJvmtiEvent to_add = caps_added ? ArtJvmtiEvent::kClassFileLoadHookRetransformable
173                                      : ArtJvmtiEvent::kClassFileLoadHookNonRetransformable;
174    if (global_event_mask.Test(to_remove)) {
175      CHECK(!global_event_mask.Test(to_add));
176      global_event_mask.Set(to_remove, false);
177      global_event_mask.Set(to_add, true);
178    }
179
180    if (unioned_thread_event_mask.Test(to_remove)) {
181      CHECK(!unioned_thread_event_mask.Test(to_add));
182      unioned_thread_event_mask.Set(to_remove, false);
183      unioned_thread_event_mask.Set(to_add, true);
184    }
185    for (auto thread_mask : thread_event_masks) {
186      if (thread_mask.second.Test(to_remove)) {
187        CHECK(!thread_mask.second.Test(to_add));
188        thread_mask.second.Set(to_remove, false);
189        thread_mask.second.Set(to_add, true);
190      }
191    }
192  }
193}
194
195void EventHandler::RegisterArtJvmTiEnv(ArtJvmTiEnv* env) {
196  art::MutexLock mu(art::Thread::Current(), envs_lock_);
197  envs.push_back(env);
198}
199
200void EventHandler::RemoveArtJvmTiEnv(ArtJvmTiEnv* env) {
201  art::MutexLock mu(art::Thread::Current(), envs_lock_);
202  // Since we might be currently iterating over the envs list we cannot actually erase elements.
203  // Instead we will simply replace them with 'nullptr' and skip them manually.
204  auto it = std::find(envs.begin(), envs.end(), env);
205  if (it != envs.end()) {
206    envs.erase(it);
207    for (size_t i = static_cast<size_t>(ArtJvmtiEvent::kMinEventTypeVal);
208         i <= static_cast<size_t>(ArtJvmtiEvent::kMaxEventTypeVal);
209         ++i) {
210      RecalculateGlobalEventMaskLocked(static_cast<ArtJvmtiEvent>(i));
211    }
212  }
213}
214
215static bool IsThreadControllable(ArtJvmtiEvent event) {
216  switch (event) {
217    case ArtJvmtiEvent::kVmInit:
218    case ArtJvmtiEvent::kVmStart:
219    case ArtJvmtiEvent::kVmDeath:
220    case ArtJvmtiEvent::kThreadStart:
221    case ArtJvmtiEvent::kCompiledMethodLoad:
222    case ArtJvmtiEvent::kCompiledMethodUnload:
223    case ArtJvmtiEvent::kDynamicCodeGenerated:
224    case ArtJvmtiEvent::kDataDumpRequest:
225      return false;
226
227    default:
228      return true;
229  }
230}
231
232template<typename Type>
233static Type AddLocalRef(art::JNIEnvExt* e, art::mirror::Object* obj)
234    REQUIRES_SHARED(art::Locks::mutator_lock_) {
235  return (obj == nullptr) ? nullptr : e->AddLocalReference<Type>(obj);
236}
237
238template<ArtJvmtiEvent kEvent, typename ...Args>
239static void RunEventCallback(EventHandler* handler,
240                             art::Thread* self,
241                             art::JNIEnvExt* jnienv,
242                             Args... args)
243    REQUIRES_SHARED(art::Locks::mutator_lock_) {
244  ScopedLocalRef<jthread> thread_jni(jnienv, AddLocalRef<jthread>(jnienv, self->GetPeer()));
245  handler->DispatchEvent<kEvent>(self,
246                                 static_cast<JNIEnv*>(jnienv),
247                                 thread_jni.get(),
248                                 args...);
249}
250
251static void SetupDdmTracking(art::DdmCallback* listener, bool enable) {
252  art::ScopedObjectAccess soa(art::Thread::Current());
253  if (enable) {
254    art::Runtime::Current()->GetRuntimeCallbacks()->AddDdmCallback(listener);
255  } else {
256    art::Runtime::Current()->GetRuntimeCallbacks()->RemoveDdmCallback(listener);
257  }
258}
259
260class JvmtiDdmChunkListener : public art::DdmCallback {
261 public:
262  explicit JvmtiDdmChunkListener(EventHandler* handler) : handler_(handler) {}
263
264  void DdmPublishChunk(uint32_t type, const art::ArrayRef<const uint8_t>& data)
265      OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
266    if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kDdmPublishChunk)) {
267      art::Thread* self = art::Thread::Current();
268      handler_->DispatchEvent<ArtJvmtiEvent::kDdmPublishChunk>(
269          self,
270          static_cast<JNIEnv*>(self->GetJniEnv()),
271          static_cast<jint>(type),
272          static_cast<jint>(data.size()),
273          reinterpret_cast<const jbyte*>(data.data()));
274    }
275  }
276
277 private:
278  EventHandler* handler_;
279
280  DISALLOW_COPY_AND_ASSIGN(JvmtiDdmChunkListener);
281};
282
283class JvmtiAllocationListener : public art::gc::AllocationListener {
284 public:
285  explicit JvmtiAllocationListener(EventHandler* handler) : handler_(handler) {}
286
287  void ObjectAllocated(art::Thread* self, art::ObjPtr<art::mirror::Object>* obj, size_t byte_count)
288      OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
289    DCHECK_EQ(self, art::Thread::Current());
290
291    if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kVmObjectAlloc)) {
292      art::StackHandleScope<1> hs(self);
293      auto h = hs.NewHandleWrapper(obj);
294      // jvmtiEventVMObjectAlloc parameters:
295      //      jvmtiEnv *jvmti_env,
296      //      JNIEnv* jni_env,
297      //      jthread thread,
298      //      jobject object,
299      //      jclass object_klass,
300      //      jlong size
301      art::JNIEnvExt* jni_env = self->GetJniEnv();
302      ScopedLocalRef<jobject> object(
303          jni_env, jni_env->AddLocalReference<jobject>(*obj));
304      ScopedLocalRef<jclass> klass(
305          jni_env, jni_env->AddLocalReference<jclass>(obj->Ptr()->GetClass()));
306
307      RunEventCallback<ArtJvmtiEvent::kVmObjectAlloc>(handler_,
308                                                      self,
309                                                      jni_env,
310                                                      object.get(),
311                                                      klass.get(),
312                                                      static_cast<jlong>(byte_count));
313    }
314  }
315
316 private:
317  EventHandler* handler_;
318};
319
320static void SetupObjectAllocationTracking(art::gc::AllocationListener* listener, bool enable) {
321  // We must not hold the mutator lock here, but if we're in FastJNI, for example, we might. For
322  // now, do a workaround: (possibly) acquire and release.
323  art::ScopedObjectAccess soa(art::Thread::Current());
324  art::ScopedThreadSuspension sts(soa.Self(), art::ThreadState::kSuspended);
325  if (enable) {
326    art::Runtime::Current()->GetHeap()->SetAllocationListener(listener);
327  } else {
328    art::Runtime::Current()->GetHeap()->RemoveAllocationListener();
329  }
330}
331
332class JvmtiMonitorListener : public art::MonitorCallback {
333 public:
334  explicit JvmtiMonitorListener(EventHandler* handler) : handler_(handler) {}
335
336  void MonitorContendedLocking(art::Monitor* m)
337      OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
338    if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorContendedEnter)) {
339      art::Thread* self = art::Thread::Current();
340      art::JNIEnvExt* jnienv = self->GetJniEnv();
341      ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
342      RunEventCallback<ArtJvmtiEvent::kMonitorContendedEnter>(
343          handler_,
344          self,
345          jnienv,
346          mon.get());
347    }
348  }
349
350  void MonitorContendedLocked(art::Monitor* m)
351      OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
352    if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorContendedEntered)) {
353      art::Thread* self = art::Thread::Current();
354      art::JNIEnvExt* jnienv = self->GetJniEnv();
355      ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
356      RunEventCallback<ArtJvmtiEvent::kMonitorContendedEntered>(
357          handler_,
358          self,
359          jnienv,
360          mon.get());
361    }
362  }
363
364  void ObjectWaitStart(art::Handle<art::mirror::Object> obj, int64_t timeout)
365      OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
366    if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWait)) {
367      art::Thread* self = art::Thread::Current();
368      art::JNIEnvExt* jnienv = self->GetJniEnv();
369      ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, obj.Get()));
370      RunEventCallback<ArtJvmtiEvent::kMonitorWait>(
371          handler_,
372          self,
373          jnienv,
374          mon.get(),
375          static_cast<jlong>(timeout));
376    }
377  }
378
379
380  // Our interpretation of the spec is that the JVMTI_EVENT_MONITOR_WAITED will be sent immediately
381  // after a thread has woken up from a sleep caused by a call to Object#wait. If the thread will
382  // never go to sleep (due to not having the lock, having bad arguments, or having an exception
383  // propogated from JVMTI_EVENT_MONITOR_WAIT) we will not send this event.
384  //
385  // This does not fully match the RI semantics. Specifically, we will not send the
386  // JVMTI_EVENT_MONITOR_WAITED event in one situation where the RI would, there was an exception in
387  // the JVMTI_EVENT_MONITOR_WAIT event but otherwise the call was fine. In that case the RI would
388  // send this event and return without going to sleep.
389  //
390  // See b/65558434 for more discussion.
391  void MonitorWaitFinished(art::Monitor* m, bool timeout)
392      OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
393    if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWaited)) {
394      art::Thread* self = art::Thread::Current();
395      art::JNIEnvExt* jnienv = self->GetJniEnv();
396      ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
397      RunEventCallback<ArtJvmtiEvent::kMonitorWaited>(
398          handler_,
399          self,
400          jnienv,
401          mon.get(),
402          static_cast<jboolean>(timeout));
403    }
404  }
405
406 private:
407  EventHandler* handler_;
408};
409
410static void SetupMonitorListener(art::MonitorCallback* listener, bool enable) {
411  // We must not hold the mutator lock here, but if we're in FastJNI, for example, we might. For
412  // now, do a workaround: (possibly) acquire and release.
413  art::ScopedObjectAccess soa(art::Thread::Current());
414  if (enable) {
415    art::Runtime::Current()->GetRuntimeCallbacks()->AddMonitorCallback(listener);
416  } else {
417    art::Runtime::Current()->GetRuntimeCallbacks()->RemoveMonitorCallback(listener);
418  }
419}
420
421// Report GC pauses (see spec) as GARBAGE_COLLECTION_START and GARBAGE_COLLECTION_END.
422class JvmtiGcPauseListener : public art::gc::GcPauseListener {
423 public:
424  explicit JvmtiGcPauseListener(EventHandler* handler)
425      : handler_(handler),
426        start_enabled_(false),
427        finish_enabled_(false) {}
428
429  void StartPause() OVERRIDE {
430    handler_->DispatchEvent<ArtJvmtiEvent::kGarbageCollectionStart>(art::Thread::Current());
431  }
432
433  void EndPause() OVERRIDE {
434    handler_->DispatchEvent<ArtJvmtiEvent::kGarbageCollectionFinish>(art::Thread::Current());
435  }
436
437  bool IsEnabled() {
438    return start_enabled_ || finish_enabled_;
439  }
440
441  void SetStartEnabled(bool e) {
442    start_enabled_ = e;
443  }
444
445  void SetFinishEnabled(bool e) {
446    finish_enabled_ = e;
447  }
448
449 private:
450  EventHandler* handler_;
451  bool start_enabled_;
452  bool finish_enabled_;
453};
454
455static void SetupGcPauseTracking(JvmtiGcPauseListener* listener, ArtJvmtiEvent event, bool enable) {
456  bool old_state = listener->IsEnabled();
457
458  if (event == ArtJvmtiEvent::kGarbageCollectionStart) {
459    listener->SetStartEnabled(enable);
460  } else {
461    listener->SetFinishEnabled(enable);
462  }
463
464  bool new_state = listener->IsEnabled();
465
466  if (old_state != new_state) {
467    if (new_state) {
468      art::Runtime::Current()->GetHeap()->SetGcPauseListener(listener);
469    } else {
470      art::Runtime::Current()->GetHeap()->RemoveGcPauseListener();
471    }
472  }
473}
474
475class JvmtiMethodTraceListener FINAL : public art::instrumentation::InstrumentationListener {
476 public:
477  explicit JvmtiMethodTraceListener(EventHandler* handler) : event_handler_(handler) {}
478
479  // Call-back for when a method is entered.
480  void MethodEntered(art::Thread* self,
481                     art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
482                     art::ArtMethod* method,
483                     uint32_t dex_pc ATTRIBUTE_UNUSED)
484      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
485    if (!method->IsRuntimeMethod() &&
486        event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodEntry)) {
487      art::JNIEnvExt* jnienv = self->GetJniEnv();
488      RunEventCallback<ArtJvmtiEvent::kMethodEntry>(event_handler_,
489                                                    self,
490                                                    jnienv,
491                                                    art::jni::EncodeArtMethod(method));
492    }
493  }
494
495  // Callback for when a method is exited with a reference return value.
496  void MethodExited(art::Thread* self,
497                    art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
498                    art::ArtMethod* method,
499                    uint32_t dex_pc ATTRIBUTE_UNUSED,
500                    art::Handle<art::mirror::Object> return_value)
501      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
502    if (!method->IsRuntimeMethod() &&
503        event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
504      DCHECK_EQ(method->GetReturnTypePrimitive(), art::Primitive::kPrimNot)
505          << method->PrettyMethod();
506      DCHECK(!self->IsExceptionPending());
507      jvalue val;
508      art::JNIEnvExt* jnienv = self->GetJniEnv();
509      ScopedLocalRef<jobject> return_jobj(jnienv, AddLocalRef<jobject>(jnienv, return_value.Get()));
510      val.l = return_jobj.get();
511      RunEventCallback<ArtJvmtiEvent::kMethodExit>(
512          event_handler_,
513          self,
514          jnienv,
515          art::jni::EncodeArtMethod(method),
516          /*was_popped_by_exception*/ static_cast<jboolean>(JNI_FALSE),
517          val);
518    }
519  }
520
521  // Call-back for when a method is exited.
522  void MethodExited(art::Thread* self,
523                    art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
524                    art::ArtMethod* method,
525                    uint32_t dex_pc ATTRIBUTE_UNUSED,
526                    const art::JValue& return_value)
527      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
528    if (!method->IsRuntimeMethod() &&
529        event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
530      DCHECK_NE(method->GetReturnTypePrimitive(), art::Primitive::kPrimNot)
531          << method->PrettyMethod();
532      DCHECK(!self->IsExceptionPending());
533      jvalue val;
534      art::JNIEnvExt* jnienv = self->GetJniEnv();
535      // 64bit integer is the largest value in the union so we should be fine simply copying it into
536      // the union.
537      val.j = return_value.GetJ();
538      RunEventCallback<ArtJvmtiEvent::kMethodExit>(
539          event_handler_,
540          self,
541          jnienv,
542          art::jni::EncodeArtMethod(method),
543          /*was_popped_by_exception*/ static_cast<jboolean>(JNI_FALSE),
544          val);
545    }
546  }
547
548  // Call-back for when a method is popped due to an exception throw. A method will either cause a
549  // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
550  void MethodUnwind(art::Thread* self,
551                    art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
552                    art::ArtMethod* method,
553                    uint32_t dex_pc ATTRIBUTE_UNUSED)
554      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
555    if (!method->IsRuntimeMethod() &&
556        event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
557      jvalue val;
558      // Just set this to 0xffffffffffffffff so it's not uninitialized.
559      val.j = static_cast<jlong>(-1);
560      art::JNIEnvExt* jnienv = self->GetJniEnv();
561      art::StackHandleScope<1> hs(self);
562      art::Handle<art::mirror::Throwable> old_exception(hs.NewHandle(self->GetException()));
563      CHECK(!old_exception.IsNull());
564      self->ClearException();
565      RunEventCallback<ArtJvmtiEvent::kMethodExit>(
566          event_handler_,
567          self,
568          jnienv,
569          art::jni::EncodeArtMethod(method),
570          /*was_popped_by_exception*/ static_cast<jboolean>(JNI_TRUE),
571          val);
572      // Match RI behavior of just throwing away original exception if a new one is thrown.
573      if (LIKELY(!self->IsExceptionPending())) {
574        self->SetException(old_exception.Get());
575      }
576    }
577  }
578
579  // Call-back for when the dex pc moves in a method.
580  void DexPcMoved(art::Thread* self,
581                  art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
582                  art::ArtMethod* method,
583                  uint32_t new_dex_pc)
584      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
585    DCHECK(!method->IsRuntimeMethod());
586    // Default methods might be copied to multiple classes. We need to get the canonical version of
587    // this method so that we can check for breakpoints correctly.
588    // TODO We should maybe do this on other events to ensure that we are consistent WRT default
589    // methods. This could interact with obsolete methods if we ever let interface redefinition
590    // happen though.
591    method = method->GetCanonicalMethod();
592    art::JNIEnvExt* jnienv = self->GetJniEnv();
593    jmethodID jmethod = art::jni::EncodeArtMethod(method);
594    jlocation location = static_cast<jlocation>(new_dex_pc);
595    // Step event is reported first according to the spec.
596    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kSingleStep)) {
597      RunEventCallback<ArtJvmtiEvent::kSingleStep>(event_handler_, self, jnienv, jmethod, location);
598    }
599    // Next we do the Breakpoint events. The Dispatch code will filter the individual
600    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kBreakpoint)) {
601      RunEventCallback<ArtJvmtiEvent::kBreakpoint>(event_handler_, self, jnienv, jmethod, location);
602    }
603  }
604
605  // Call-back for when we read from a field.
606  void FieldRead(art::Thread* self,
607                 art::Handle<art::mirror::Object> this_object,
608                 art::ArtMethod* method,
609                 uint32_t dex_pc,
610                 art::ArtField* field)
611      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
612    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldAccess)) {
613      art::JNIEnvExt* jnienv = self->GetJniEnv();
614      // DCHECK(!self->IsExceptionPending());
615      ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
616      ScopedLocalRef<jobject> fklass(jnienv,
617                                     AddLocalRef<jobject>(jnienv,
618                                                          field->GetDeclaringClass().Ptr()));
619      RunEventCallback<ArtJvmtiEvent::kFieldAccess>(event_handler_,
620                                                    self,
621                                                    jnienv,
622                                                    art::jni::EncodeArtMethod(method),
623                                                    static_cast<jlocation>(dex_pc),
624                                                    static_cast<jclass>(fklass.get()),
625                                                    this_ref.get(),
626                                                    art::jni::EncodeArtField(field));
627    }
628  }
629
630  void FieldWritten(art::Thread* self,
631                    art::Handle<art::mirror::Object> this_object,
632                    art::ArtMethod* method,
633                    uint32_t dex_pc,
634                    art::ArtField* field,
635                    art::Handle<art::mirror::Object> new_val)
636      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
637    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldModification)) {
638      art::JNIEnvExt* jnienv = self->GetJniEnv();
639      // DCHECK(!self->IsExceptionPending());
640      ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
641      ScopedLocalRef<jobject> fklass(jnienv,
642                                     AddLocalRef<jobject>(jnienv,
643                                                          field->GetDeclaringClass().Ptr()));
644      ScopedLocalRef<jobject> fval(jnienv, AddLocalRef<jobject>(jnienv, new_val.Get()));
645      jvalue val;
646      val.l = fval.get();
647      RunEventCallback<ArtJvmtiEvent::kFieldModification>(
648          event_handler_,
649          self,
650          jnienv,
651          art::jni::EncodeArtMethod(method),
652          static_cast<jlocation>(dex_pc),
653          static_cast<jclass>(fklass.get()),
654          field->IsStatic() ? nullptr :  this_ref.get(),
655          art::jni::EncodeArtField(field),
656          'L',  // type_char
657          val);
658    }
659  }
660
661  // Call-back for when we write into a field.
662  void FieldWritten(art::Thread* self,
663                    art::Handle<art::mirror::Object> this_object,
664                    art::ArtMethod* method,
665                    uint32_t dex_pc,
666                    art::ArtField* field,
667                    const art::JValue& field_value)
668      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
669    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldModification)) {
670      art::JNIEnvExt* jnienv = self->GetJniEnv();
671      DCHECK(!self->IsExceptionPending());
672      ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
673      ScopedLocalRef<jobject> fklass(jnienv,
674                                     AddLocalRef<jobject>(jnienv,
675                                                          field->GetDeclaringClass().Ptr()));
676      char type_char = art::Primitive::Descriptor(field->GetTypeAsPrimitiveType())[0];
677      jvalue val;
678      // 64bit integer is the largest value in the union so we should be fine simply copying it into
679      // the union.
680      val.j = field_value.GetJ();
681      RunEventCallback<ArtJvmtiEvent::kFieldModification>(
682          event_handler_,
683          self,
684          jnienv,
685          art::jni::EncodeArtMethod(method),
686          static_cast<jlocation>(dex_pc),
687          static_cast<jclass>(fklass.get()),
688          field->IsStatic() ? nullptr :  this_ref.get(),  // nb static field modification get given
689                                                          // the class as this_object for some
690                                                          // reason.
691          art::jni::EncodeArtField(field),
692          type_char,
693          val);
694    }
695  }
696
697  void WatchedFramePop(art::Thread* self, const art::ShadowFrame& frame)
698      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
699      art::JNIEnvExt* jnienv = self->GetJniEnv();
700    jboolean is_exception_pending = self->IsExceptionPending();
701    RunEventCallback<ArtJvmtiEvent::kFramePop>(
702        event_handler_,
703        self,
704        jnienv,
705        art::jni::EncodeArtMethod(frame.GetMethod()),
706        is_exception_pending,
707        &frame);
708  }
709
710  static void FindCatchMethodsFromThrow(art::Thread* self,
711                                        art::Handle<art::mirror::Throwable> exception,
712                                        /*out*/ art::ArtMethod** out_method,
713                                        /*out*/ uint32_t* dex_pc)
714      REQUIRES_SHARED(art::Locks::mutator_lock_) {
715    // Finds the location where this exception will most likely be caught. We ignore intervening
716    // native frames (which could catch the exception) and return the closest java frame with a
717    // compatible catch statement.
718    class CatchLocationFinder FINAL : public art::StackVisitor {
719     public:
720      CatchLocationFinder(art::Thread* target,
721                          art::Handle<art::mirror::Class> exception_class,
722                          art::Context* context,
723                          /*out*/ art::ArtMethod** out_catch_method,
724                          /*out*/ uint32_t* out_catch_pc)
725          REQUIRES_SHARED(art::Locks::mutator_lock_)
726        : StackVisitor(target, context, art::StackVisitor::StackWalkKind::kIncludeInlinedFrames),
727          exception_class_(exception_class),
728          catch_method_ptr_(out_catch_method),
729          catch_dex_pc_ptr_(out_catch_pc) {}
730
731      bool VisitFrame() OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
732        art::ArtMethod* method = GetMethod();
733        DCHECK(method != nullptr);
734        if (method->IsRuntimeMethod()) {
735          return true;
736        }
737
738        if (!method->IsNative()) {
739          uint32_t cur_dex_pc = GetDexPc();
740          if (cur_dex_pc == art::dex::kDexNoIndex) {
741            // This frame looks opaque. Just keep on going.
742            return true;
743          }
744          bool has_no_move_exception = false;
745          uint32_t found_dex_pc = method->FindCatchBlock(
746              exception_class_, cur_dex_pc, &has_no_move_exception);
747          if (found_dex_pc != art::dex::kDexNoIndex) {
748            // We found the catch. Store the result and return.
749            *catch_method_ptr_ = method;
750            *catch_dex_pc_ptr_ = found_dex_pc;
751            return false;
752          }
753        }
754        return true;
755      }
756
757     private:
758      art::Handle<art::mirror::Class> exception_class_;
759      art::ArtMethod** catch_method_ptr_;
760      uint32_t* catch_dex_pc_ptr_;
761
762      DISALLOW_COPY_AND_ASSIGN(CatchLocationFinder);
763    };
764
765    art::StackHandleScope<1> hs(self);
766    *out_method = nullptr;
767    *dex_pc = 0;
768    std::unique_ptr<art::Context> context(art::Context::Create());
769
770    CatchLocationFinder clf(self,
771                            hs.NewHandle(exception->GetClass()),
772                            context.get(),
773                            /*out*/ out_method,
774                            /*out*/ dex_pc);
775    clf.WalkStack(/* include_transitions */ false);
776  }
777
778  // Call-back when an exception is thrown.
779  void ExceptionThrown(art::Thread* self, art::Handle<art::mirror::Throwable> exception_object)
780      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
781    DCHECK(self->IsExceptionThrownByCurrentMethod(exception_object.Get()));
782    // The instrumentation events get rid of this for us.
783    DCHECK(!self->IsExceptionPending());
784    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kException)) {
785      art::JNIEnvExt* jnienv = self->GetJniEnv();
786      art::ArtMethod* catch_method;
787      uint32_t catch_pc;
788      FindCatchMethodsFromThrow(self, exception_object, &catch_method, &catch_pc);
789      uint32_t dex_pc = 0;
790      art::ArtMethod* method = self->GetCurrentMethod(&dex_pc,
791                                                      /* check_suspended */ true,
792                                                      /* abort_on_error */ art::kIsDebugBuild);
793      ScopedLocalRef<jobject> exception(jnienv,
794                                        AddLocalRef<jobject>(jnienv, exception_object.Get()));
795      RunEventCallback<ArtJvmtiEvent::kException>(
796          event_handler_,
797          self,
798          jnienv,
799          art::jni::EncodeArtMethod(method),
800          static_cast<jlocation>(dex_pc),
801          exception.get(),
802          art::jni::EncodeArtMethod(catch_method),
803          static_cast<jlocation>(catch_pc));
804    }
805    return;
806  }
807
808  // Call-back when an exception is handled.
809  void ExceptionHandled(art::Thread* self, art::Handle<art::mirror::Throwable> exception_object)
810      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
811    // Since the exception has already been handled there shouldn't be one pending.
812    DCHECK(!self->IsExceptionPending());
813    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kExceptionCatch)) {
814      art::JNIEnvExt* jnienv = self->GetJniEnv();
815      uint32_t dex_pc;
816      art::ArtMethod* method = self->GetCurrentMethod(&dex_pc,
817                                                      /* check_suspended */ true,
818                                                      /* abort_on_error */ art::kIsDebugBuild);
819      ScopedLocalRef<jobject> exception(jnienv,
820                                        AddLocalRef<jobject>(jnienv, exception_object.Get()));
821      RunEventCallback<ArtJvmtiEvent::kExceptionCatch>(
822          event_handler_,
823          self,
824          jnienv,
825          art::jni::EncodeArtMethod(method),
826          static_cast<jlocation>(dex_pc),
827          exception.get());
828    }
829    return;
830  }
831
832  // Call-back for when we execute a branch.
833  void Branch(art::Thread* self ATTRIBUTE_UNUSED,
834              art::ArtMethod* method ATTRIBUTE_UNUSED,
835              uint32_t dex_pc ATTRIBUTE_UNUSED,
836              int32_t dex_pc_offset ATTRIBUTE_UNUSED)
837      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
838    return;
839  }
840
841  // Call-back for when we get an invokevirtual or an invokeinterface.
842  void InvokeVirtualOrInterface(art::Thread* self ATTRIBUTE_UNUSED,
843                                art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
844                                art::ArtMethod* caller ATTRIBUTE_UNUSED,
845                                uint32_t dex_pc ATTRIBUTE_UNUSED,
846                                art::ArtMethod* callee ATTRIBUTE_UNUSED)
847      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
848    return;
849  }
850
851 private:
852  EventHandler* const event_handler_;
853};
854
855static uint32_t GetInstrumentationEventsFor(ArtJvmtiEvent event) {
856  switch (event) {
857    case ArtJvmtiEvent::kMethodEntry:
858      return art::instrumentation::Instrumentation::kMethodEntered;
859    case ArtJvmtiEvent::kMethodExit:
860      return art::instrumentation::Instrumentation::kMethodExited |
861             art::instrumentation::Instrumentation::kMethodUnwind;
862    case ArtJvmtiEvent::kFieldModification:
863      return art::instrumentation::Instrumentation::kFieldWritten;
864    case ArtJvmtiEvent::kFieldAccess:
865      return art::instrumentation::Instrumentation::kFieldRead;
866    case ArtJvmtiEvent::kBreakpoint:
867    case ArtJvmtiEvent::kSingleStep:
868      return art::instrumentation::Instrumentation::kDexPcMoved;
869    case ArtJvmtiEvent::kFramePop:
870      return art::instrumentation::Instrumentation::kWatchedFramePop;
871    case ArtJvmtiEvent::kException:
872      return art::instrumentation::Instrumentation::kExceptionThrown;
873    case ArtJvmtiEvent::kExceptionCatch:
874      return art::instrumentation::Instrumentation::kExceptionHandled;
875    default:
876      LOG(FATAL) << "Unknown event ";
877      return 0;
878  }
879}
880
881static bool EventNeedsFullDeopt(ArtJvmtiEvent event) {
882  switch (event) {
883    case ArtJvmtiEvent::kBreakpoint:
884    case ArtJvmtiEvent::kException:
885      return false;
886    // TODO We should support more of these or at least do something to make them discriminate by
887    // thread.
888    case ArtJvmtiEvent::kMethodEntry:
889    case ArtJvmtiEvent::kExceptionCatch:
890    case ArtJvmtiEvent::kMethodExit:
891    case ArtJvmtiEvent::kFieldModification:
892    case ArtJvmtiEvent::kFieldAccess:
893    case ArtJvmtiEvent::kSingleStep:
894    case ArtJvmtiEvent::kFramePop:
895      return true;
896    default:
897      LOG(FATAL) << "Unexpected event type!";
898      UNREACHABLE();
899  }
900}
901
902static void SetupTraceListener(JvmtiMethodTraceListener* listener,
903                               ArtJvmtiEvent event,
904                               bool enable) {
905  bool needs_full_deopt = EventNeedsFullDeopt(event);
906  // Make sure we can deopt.
907  {
908    art::ScopedObjectAccess soa(art::Thread::Current());
909    DeoptManager* deopt_manager = DeoptManager::Get();
910    if (enable) {
911      deopt_manager->AddDeoptimizationRequester();
912      if (needs_full_deopt) {
913        deopt_manager->AddDeoptimizeAllMethods();
914      }
915    } else {
916      if (needs_full_deopt) {
917        deopt_manager->RemoveDeoptimizeAllMethods();
918      }
919      deopt_manager->RemoveDeoptimizationRequester();
920    }
921  }
922
923  // Add the actual listeners.
924  art::ScopedThreadStateChange stsc(art::Thread::Current(), art::ThreadState::kNative);
925  uint32_t new_events = GetInstrumentationEventsFor(event);
926  art::instrumentation::Instrumentation* instr = art::Runtime::Current()->GetInstrumentation();
927  art::gc::ScopedGCCriticalSection gcs(art::Thread::Current(),
928                                       art::gc::kGcCauseInstrumentation,
929                                       art::gc::kCollectorTypeInstrumentation);
930  art::ScopedSuspendAll ssa("jvmti method tracing installation");
931  if (enable) {
932    instr->AddListener(listener, new_events);
933  } else {
934    instr->RemoveListener(listener, new_events);
935  }
936}
937
938// Makes sure that all compiled methods are AsyncDeoptimizable so we can deoptimize (and force to
939// the switch interpreter) when we try to get or set a local variable.
940void EventHandler::HandleLocalAccessCapabilityAdded() {
941  class UpdateEntryPointsClassVisitor : public art::ClassVisitor {
942   public:
943    explicit UpdateEntryPointsClassVisitor(art::Runtime* runtime)
944        : runtime_(runtime) {}
945
946    bool operator()(art::ObjPtr<art::mirror::Class> klass)
947        OVERRIDE REQUIRES(art::Locks::mutator_lock_) {
948      if (!klass->IsLoaded()) {
949        // Skip classes that aren't loaded since they might not have fully allocated and initialized
950        // their methods. Furthemore since the jvmti-plugin must have been loaded by this point
951        // these methods will definitately be using debuggable code.
952        return true;
953      }
954      for (auto& m : klass->GetMethods(art::kRuntimePointerSize)) {
955        const void* code = m.GetEntryPointFromQuickCompiledCode();
956        if (m.IsNative() || m.IsProxyMethod()) {
957          continue;
958        } else if (!runtime_->GetClassLinker()->IsQuickToInterpreterBridge(code) &&
959                   !runtime_->IsAsyncDeoptimizeable(reinterpret_cast<uintptr_t>(code))) {
960          runtime_->GetInstrumentation()->UpdateMethodsCodeToInterpreterEntryPoint(&m);
961        }
962      }
963      return true;
964    }
965
966   private:
967    art::Runtime* runtime_;
968  };
969  art::ScopedObjectAccess soa(art::Thread::Current());
970  UpdateEntryPointsClassVisitor visitor(art::Runtime::Current());
971  art::Runtime::Current()->GetClassLinker()->VisitClasses(&visitor);
972}
973
974bool EventHandler::OtherMonitorEventsEnabledAnywhere(ArtJvmtiEvent event) {
975  std::array<ArtJvmtiEvent, 4> events {
976    {
977      ArtJvmtiEvent::kMonitorContendedEnter,
978      ArtJvmtiEvent::kMonitorContendedEntered,
979      ArtJvmtiEvent::kMonitorWait,
980      ArtJvmtiEvent::kMonitorWaited
981    }
982  };
983  for (ArtJvmtiEvent e : events) {
984    if (e != event && IsEventEnabledAnywhere(e)) {
985      return true;
986    }
987  }
988  return false;
989}
990
991// Handle special work for the given event type, if necessary.
992void EventHandler::HandleEventType(ArtJvmtiEvent event, bool enable) {
993  switch (event) {
994    case ArtJvmtiEvent::kDdmPublishChunk:
995      SetupDdmTracking(ddm_listener_.get(), enable);
996      return;
997    case ArtJvmtiEvent::kVmObjectAlloc:
998      SetupObjectAllocationTracking(alloc_listener_.get(), enable);
999      return;
1000
1001    case ArtJvmtiEvent::kGarbageCollectionStart:
1002    case ArtJvmtiEvent::kGarbageCollectionFinish:
1003      SetupGcPauseTracking(gc_pause_listener_.get(), event, enable);
1004      return;
1005
1006    case ArtJvmtiEvent::kBreakpoint:
1007    case ArtJvmtiEvent::kSingleStep: {
1008      ArtJvmtiEvent other = (event == ArtJvmtiEvent::kBreakpoint) ? ArtJvmtiEvent::kSingleStep
1009                                                                  : ArtJvmtiEvent::kBreakpoint;
1010      // We only need to do anything if there isn't already a listener installed/held-on by the
1011      // other jvmti event that uses DexPcMoved.
1012      if (!IsEventEnabledAnywhere(other)) {
1013        SetupTraceListener(method_trace_listener_.get(), event, enable);
1014      }
1015      return;
1016    }
1017    // FramePop can never be disabled once it's been turned on since we would either need to deal
1018    // with dangling pointers or have missed events.
1019    // TODO We really need to make this not the case anymore.
1020    case ArtJvmtiEvent::kFramePop:
1021      if (!enable || (enable && frame_pop_enabled)) {
1022        break;
1023      } else {
1024        SetupTraceListener(method_trace_listener_.get(), event, enable);
1025        break;
1026      }
1027    case ArtJvmtiEvent::kMethodEntry:
1028    case ArtJvmtiEvent::kMethodExit:
1029    case ArtJvmtiEvent::kFieldAccess:
1030    case ArtJvmtiEvent::kFieldModification:
1031    case ArtJvmtiEvent::kException:
1032    case ArtJvmtiEvent::kExceptionCatch:
1033      SetupTraceListener(method_trace_listener_.get(), event, enable);
1034      return;
1035    case ArtJvmtiEvent::kMonitorContendedEnter:
1036    case ArtJvmtiEvent::kMonitorContendedEntered:
1037    case ArtJvmtiEvent::kMonitorWait:
1038    case ArtJvmtiEvent::kMonitorWaited:
1039      if (!OtherMonitorEventsEnabledAnywhere(event)) {
1040        SetupMonitorListener(monitor_listener_.get(), enable);
1041      }
1042      return;
1043    default:
1044      break;
1045  }
1046}
1047
1048// Checks to see if the env has the capabilities associated with the given event.
1049static bool HasAssociatedCapability(ArtJvmTiEnv* env,
1050                                    ArtJvmtiEvent event) {
1051  jvmtiCapabilities caps = env->capabilities;
1052  switch (event) {
1053    case ArtJvmtiEvent::kBreakpoint:
1054      return caps.can_generate_breakpoint_events == 1;
1055
1056    case ArtJvmtiEvent::kCompiledMethodLoad:
1057    case ArtJvmtiEvent::kCompiledMethodUnload:
1058      return caps.can_generate_compiled_method_load_events == 1;
1059
1060    case ArtJvmtiEvent::kException:
1061    case ArtJvmtiEvent::kExceptionCatch:
1062      return caps.can_generate_exception_events == 1;
1063
1064    case ArtJvmtiEvent::kFieldAccess:
1065      return caps.can_generate_field_access_events == 1;
1066
1067    case ArtJvmtiEvent::kFieldModification:
1068      return caps.can_generate_field_modification_events == 1;
1069
1070    case ArtJvmtiEvent::kFramePop:
1071      return caps.can_generate_frame_pop_events == 1;
1072
1073    case ArtJvmtiEvent::kGarbageCollectionStart:
1074    case ArtJvmtiEvent::kGarbageCollectionFinish:
1075      return caps.can_generate_garbage_collection_events == 1;
1076
1077    case ArtJvmtiEvent::kMethodEntry:
1078      return caps.can_generate_method_entry_events == 1;
1079
1080    case ArtJvmtiEvent::kMethodExit:
1081      return caps.can_generate_method_exit_events == 1;
1082
1083    case ArtJvmtiEvent::kMonitorContendedEnter:
1084    case ArtJvmtiEvent::kMonitorContendedEntered:
1085    case ArtJvmtiEvent::kMonitorWait:
1086    case ArtJvmtiEvent::kMonitorWaited:
1087      return caps.can_generate_monitor_events == 1;
1088
1089    case ArtJvmtiEvent::kNativeMethodBind:
1090      return caps.can_generate_native_method_bind_events == 1;
1091
1092    case ArtJvmtiEvent::kObjectFree:
1093      return caps.can_generate_object_free_events == 1;
1094
1095    case ArtJvmtiEvent::kSingleStep:
1096      return caps.can_generate_single_step_events == 1;
1097
1098    case ArtJvmtiEvent::kVmObjectAlloc:
1099      return caps.can_generate_vm_object_alloc_events == 1;
1100
1101    default:
1102      return true;
1103  }
1104}
1105
1106jvmtiError EventHandler::SetEvent(ArtJvmTiEnv* env,
1107                                  art::Thread* thread,
1108                                  ArtJvmtiEvent event,
1109                                  jvmtiEventMode mode) {
1110  if (thread != nullptr) {
1111    art::ThreadState state = thread->GetState();
1112    if (state == art::ThreadState::kStarting ||
1113        state == art::ThreadState::kTerminated ||
1114        thread->IsStillStarting()) {
1115      return ERR(THREAD_NOT_ALIVE);
1116    }
1117    if (!IsThreadControllable(event)) {
1118      return ERR(ILLEGAL_ARGUMENT);
1119    }
1120  }
1121
1122  if (mode != JVMTI_ENABLE && mode != JVMTI_DISABLE) {
1123    return ERR(ILLEGAL_ARGUMENT);
1124  }
1125
1126  if (!EventMask::EventIsInRange(event)) {
1127    return ERR(INVALID_EVENT_TYPE);
1128  }
1129
1130  if (!HasAssociatedCapability(env, event)) {
1131    return ERR(MUST_POSSESS_CAPABILITY);
1132  }
1133
1134  bool old_state = global_mask.Test(event);
1135
1136  if (mode == JVMTI_ENABLE) {
1137    env->event_masks.EnableEvent(thread, event);
1138    global_mask.Set(event);
1139  } else {
1140    DCHECK_EQ(mode, JVMTI_DISABLE);
1141
1142    env->event_masks.DisableEvent(thread, event);
1143    RecalculateGlobalEventMask(event);
1144  }
1145
1146  bool new_state = global_mask.Test(event);
1147
1148  // Handle any special work required for the event type.
1149  if (new_state != old_state) {
1150    HandleEventType(event, mode == JVMTI_ENABLE);
1151  }
1152
1153  return ERR(NONE);
1154}
1155
1156void EventHandler::HandleBreakpointEventsChanged(bool added) {
1157  if (added) {
1158    DeoptManager::Get()->AddDeoptimizationRequester();
1159  } else {
1160    DeoptManager::Get()->RemoveDeoptimizationRequester();
1161  }
1162}
1163
1164void EventHandler::Shutdown() {
1165  // Need to remove the method_trace_listener_ if it's there.
1166  art::Thread* self = art::Thread::Current();
1167  art::gc::ScopedGCCriticalSection gcs(self,
1168                                       art::gc::kGcCauseInstrumentation,
1169                                       art::gc::kCollectorTypeInstrumentation);
1170  art::ScopedSuspendAll ssa("jvmti method tracing uninstallation");
1171  // Just remove every possible event.
1172  art::Runtime::Current()->GetInstrumentation()->RemoveListener(method_trace_listener_.get(), ~0);
1173}
1174
1175EventHandler::EventHandler() : envs_lock_("JVMTI Environment List Lock",
1176                                          art::LockLevel::kTopLockLevel) {
1177  alloc_listener_.reset(new JvmtiAllocationListener(this));
1178  ddm_listener_.reset(new JvmtiDdmChunkListener(this));
1179  gc_pause_listener_.reset(new JvmtiGcPauseListener(this));
1180  method_trace_listener_.reset(new JvmtiMethodTraceListener(this));
1181  monitor_listener_.reset(new JvmtiMonitorListener(this));
1182}
1183
1184EventHandler::~EventHandler() {
1185}
1186
1187}  // namespace openjdkjvmti
1188