events.cc revision 373a9b5c718a45ac484afcf4fe6ce84f4bb562b3
1/* Copyright (C) 2016 The Android Open Source Project
2 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
3 *
4 * This file implements interfaces from the file jvmti.h. This implementation
5 * is licensed under the same terms as the file jvmti.h.  The
6 * copyright and license information for the file jvmti.h follows.
7 *
8 * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
9 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
10 *
11 * This code is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License version 2 only, as
13 * published by the Free Software Foundation.  Oracle designates this
14 * particular file as subject to the "Classpath" exception as provided
15 * by Oracle in the LICENSE file that accompanied this code.
16 *
17 * This code is distributed in the hope that it will be useful, but WITHOUT
18 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
19 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
20 * version 2 for more details (a copy is included in the LICENSE file that
21 * accompanied this code).
22 *
23 * You should have received a copy of the GNU General Public License version
24 * 2 along with this work; if not, write to the Free Software Foundation,
25 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
26 *
27 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
28 * or visit www.oracle.com if you need additional information or have any
29 * questions.
30 */
31
32#include "events-inl.h"
33
34#include <array>
35
36#include "art_field-inl.h"
37#include "art_jvmti.h"
38#include "art_method-inl.h"
39#include "base/logging.h"
40#include "dex_file_types.h"
41#include "gc/allocation_listener.h"
42#include "gc/gc_pause_listener.h"
43#include "gc/heap.h"
44#include "gc/scoped_gc_critical_section.h"
45#include "handle_scope-inl.h"
46#include "instrumentation.h"
47#include "jni_env_ext-inl.h"
48#include "jni_internal.h"
49#include "mirror/class.h"
50#include "mirror/object-inl.h"
51#include "monitor.h"
52#include "nativehelper/scoped_local_ref.h"
53#include "runtime.h"
54#include "scoped_thread_state_change-inl.h"
55#include "stack.h"
56#include "thread-inl.h"
57#include "thread_list.h"
58#include "ti_phase.h"
59
60namespace openjdkjvmti {
61
62bool EventMasks::IsEnabledAnywhere(ArtJvmtiEvent event) {
63  return global_event_mask.Test(event) || unioned_thread_event_mask.Test(event);
64}
65
66EventMask& EventMasks::GetEventMask(art::Thread* thread) {
67  if (thread == nullptr) {
68    return global_event_mask;
69  }
70
71  for (auto& pair : thread_event_masks) {
72    const UniqueThread& unique_thread = pair.first;
73    if (unique_thread.first == thread &&
74        unique_thread.second == static_cast<uint32_t>(thread->GetTid())) {
75      return pair.second;
76    }
77  }
78
79  // TODO: Remove old UniqueThread with the same pointer, if exists.
80
81  thread_event_masks.emplace_back(UniqueThread(thread, thread->GetTid()), EventMask());
82  return thread_event_masks.back().second;
83}
84
85EventMask* EventMasks::GetEventMaskOrNull(art::Thread* thread) {
86  if (thread == nullptr) {
87    return &global_event_mask;
88  }
89
90  for (auto& pair : thread_event_masks) {
91    const UniqueThread& unique_thread = pair.first;
92    if (unique_thread.first == thread &&
93        unique_thread.second == static_cast<uint32_t>(thread->GetTid())) {
94      return &pair.second;
95    }
96  }
97
98  return nullptr;
99}
100
101
102void EventMasks::EnableEvent(art::Thread* thread, ArtJvmtiEvent event) {
103  DCHECK(EventMask::EventIsInRange(event));
104  GetEventMask(thread).Set(event);
105  if (thread != nullptr) {
106    unioned_thread_event_mask.Set(event, true);
107  }
108}
109
110void EventMasks::DisableEvent(art::Thread* thread, ArtJvmtiEvent event) {
111  DCHECK(EventMask::EventIsInRange(event));
112  GetEventMask(thread).Set(event, false);
113  if (thread != nullptr) {
114    // Regenerate union for the event.
115    bool union_value = false;
116    for (auto& pair : thread_event_masks) {
117      union_value |= pair.second.Test(event);
118      if (union_value) {
119        break;
120      }
121    }
122    unioned_thread_event_mask.Set(event, union_value);
123  }
124}
125
126void EventMasks::HandleChangedCapabilities(const jvmtiCapabilities& caps, bool caps_added) {
127  if (UNLIKELY(caps.can_retransform_classes == 1)) {
128    // If we are giving this env the retransform classes cap we need to switch all events of
129    // NonTransformable to Transformable and vice versa.
130    ArtJvmtiEvent to_remove = caps_added ? ArtJvmtiEvent::kClassFileLoadHookNonRetransformable
131                                         : ArtJvmtiEvent::kClassFileLoadHookRetransformable;
132    ArtJvmtiEvent to_add = caps_added ? ArtJvmtiEvent::kClassFileLoadHookRetransformable
133                                      : ArtJvmtiEvent::kClassFileLoadHookNonRetransformable;
134    if (global_event_mask.Test(to_remove)) {
135      CHECK(!global_event_mask.Test(to_add));
136      global_event_mask.Set(to_remove, false);
137      global_event_mask.Set(to_add, true);
138    }
139
140    if (unioned_thread_event_mask.Test(to_remove)) {
141      CHECK(!unioned_thread_event_mask.Test(to_add));
142      unioned_thread_event_mask.Set(to_remove, false);
143      unioned_thread_event_mask.Set(to_add, true);
144    }
145    for (auto thread_mask : thread_event_masks) {
146      if (thread_mask.second.Test(to_remove)) {
147        CHECK(!thread_mask.second.Test(to_add));
148        thread_mask.second.Set(to_remove, false);
149        thread_mask.second.Set(to_add, true);
150      }
151    }
152  }
153}
154
155void EventHandler::RegisterArtJvmTiEnv(ArtJvmTiEnv* env) {
156  // Since we never shrink this array we might as well try to fill gaps.
157  auto it = std::find(envs.begin(), envs.end(), nullptr);
158  if (it != envs.end()) {
159    *it = env;
160  } else {
161    envs.push_back(env);
162  }
163}
164
165void EventHandler::RemoveArtJvmTiEnv(ArtJvmTiEnv* env) {
166  // Since we might be currently iterating over the envs list we cannot actually erase elements.
167  // Instead we will simply replace them with 'nullptr' and skip them manually.
168  auto it = std::find(envs.begin(), envs.end(), env);
169  if (it != envs.end()) {
170    *it = nullptr;
171    for (size_t i = static_cast<size_t>(ArtJvmtiEvent::kMinEventTypeVal);
172         i <= static_cast<size_t>(ArtJvmtiEvent::kMaxEventTypeVal);
173         ++i) {
174      RecalculateGlobalEventMask(static_cast<ArtJvmtiEvent>(i));
175    }
176  }
177}
178
179static bool IsThreadControllable(ArtJvmtiEvent event) {
180  switch (event) {
181    case ArtJvmtiEvent::kVmInit:
182    case ArtJvmtiEvent::kVmStart:
183    case ArtJvmtiEvent::kVmDeath:
184    case ArtJvmtiEvent::kThreadStart:
185    case ArtJvmtiEvent::kCompiledMethodLoad:
186    case ArtJvmtiEvent::kCompiledMethodUnload:
187    case ArtJvmtiEvent::kDynamicCodeGenerated:
188    case ArtJvmtiEvent::kDataDumpRequest:
189      return false;
190
191    default:
192      return true;
193  }
194}
195
196template<typename Type>
197static Type AddLocalRef(art::JNIEnvExt* e, art::mirror::Object* obj)
198    REQUIRES_SHARED(art::Locks::mutator_lock_) {
199  return (obj == nullptr) ? nullptr : e->AddLocalReference<Type>(obj);
200}
201
202template<ArtJvmtiEvent kEvent, typename ...Args>
203static void RunEventCallback(EventHandler* handler,
204                             art::Thread* self,
205                             art::JNIEnvExt* jnienv,
206                             Args... args)
207    REQUIRES_SHARED(art::Locks::mutator_lock_) {
208  ScopedLocalRef<jthread> thread_jni(jnienv, AddLocalRef<jthread>(jnienv, self->GetPeer()));
209  handler->DispatchEvent<kEvent>(self,
210                                 static_cast<JNIEnv*>(jnienv),
211                                 thread_jni.get(),
212                                 args...);
213}
214
215class JvmtiAllocationListener : public art::gc::AllocationListener {
216 public:
217  explicit JvmtiAllocationListener(EventHandler* handler) : handler_(handler) {}
218
219  void ObjectAllocated(art::Thread* self, art::ObjPtr<art::mirror::Object>* obj, size_t byte_count)
220      OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
221    DCHECK_EQ(self, art::Thread::Current());
222
223    if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kVmObjectAlloc)) {
224      art::StackHandleScope<1> hs(self);
225      auto h = hs.NewHandleWrapper(obj);
226      // jvmtiEventVMObjectAlloc parameters:
227      //      jvmtiEnv *jvmti_env,
228      //      JNIEnv* jni_env,
229      //      jthread thread,
230      //      jobject object,
231      //      jclass object_klass,
232      //      jlong size
233      art::JNIEnvExt* jni_env = self->GetJniEnv();
234      ScopedLocalRef<jobject> object(
235          jni_env, jni_env->AddLocalReference<jobject>(*obj));
236      ScopedLocalRef<jclass> klass(
237          jni_env, jni_env->AddLocalReference<jclass>(obj->Ptr()->GetClass()));
238
239      RunEventCallback<ArtJvmtiEvent::kVmObjectAlloc>(handler_,
240                                                      self,
241                                                      jni_env,
242                                                      object.get(),
243                                                      klass.get(),
244                                                      static_cast<jlong>(byte_count));
245    }
246  }
247
248 private:
249  EventHandler* handler_;
250};
251
252static void SetupObjectAllocationTracking(art::gc::AllocationListener* listener, bool enable) {
253  // We must not hold the mutator lock here, but if we're in FastJNI, for example, we might. For
254  // now, do a workaround: (possibly) acquire and release.
255  art::ScopedObjectAccess soa(art::Thread::Current());
256  art::ScopedThreadSuspension sts(soa.Self(), art::ThreadState::kSuspended);
257  if (enable) {
258    art::Runtime::Current()->GetHeap()->SetAllocationListener(listener);
259  } else {
260    art::Runtime::Current()->GetHeap()->RemoveAllocationListener();
261  }
262}
263
264class JvmtiMonitorListener : public art::MonitorCallback {
265 public:
266  explicit JvmtiMonitorListener(EventHandler* handler) : handler_(handler) {}
267
268  void MonitorContendedLocking(art::Monitor* m)
269      OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
270    if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorContendedEnter)) {
271      art::Thread* self = art::Thread::Current();
272      art::JNIEnvExt* jnienv = self->GetJniEnv();
273      ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
274      RunEventCallback<ArtJvmtiEvent::kMonitorContendedEnter>(
275          handler_,
276          self,
277          jnienv,
278          mon.get());
279    }
280  }
281
282  void MonitorContendedLocked(art::Monitor* m)
283      OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
284    if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorContendedEntered)) {
285      art::Thread* self = art::Thread::Current();
286      art::JNIEnvExt* jnienv = self->GetJniEnv();
287      ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
288      RunEventCallback<ArtJvmtiEvent::kMonitorContendedEntered>(
289          handler_,
290          self,
291          jnienv,
292          mon.get());
293    }
294  }
295
296  void ObjectWaitStart(art::Handle<art::mirror::Object> obj, int64_t timeout)
297      OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
298    if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWait)) {
299      art::Thread* self = art::Thread::Current();
300      art::JNIEnvExt* jnienv = self->GetJniEnv();
301      ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, obj.Get()));
302      RunEventCallback<ArtJvmtiEvent::kMonitorWait>(
303          handler_,
304          self,
305          jnienv,
306          mon.get(),
307          static_cast<jlong>(timeout));
308    }
309  }
310
311
312  // Our interpretation of the spec is that the JVMTI_EVENT_MONITOR_WAITED will be sent immediately
313  // after a thread has woken up from a sleep caused by a call to Object#wait. If the thread will
314  // never go to sleep (due to not having the lock, having bad arguments, or having an exception
315  // propogated from JVMTI_EVENT_MONITOR_WAIT) we will not send this event.
316  //
317  // This does not fully match the RI semantics. Specifically, we will not send the
318  // JVMTI_EVENT_MONITOR_WAITED event in one situation where the RI would, there was an exception in
319  // the JVMTI_EVENT_MONITOR_WAIT event but otherwise the call was fine. In that case the RI would
320  // send this event and return without going to sleep.
321  //
322  // See b/65558434 for more discussion.
323  void MonitorWaitFinished(art::Monitor* m, bool timeout)
324      OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
325    if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWaited)) {
326      art::Thread* self = art::Thread::Current();
327      art::JNIEnvExt* jnienv = self->GetJniEnv();
328      ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
329      RunEventCallback<ArtJvmtiEvent::kMonitorWaited>(
330          handler_,
331          self,
332          jnienv,
333          mon.get(),
334          static_cast<jboolean>(timeout));
335    }
336  }
337
338 private:
339  EventHandler* handler_;
340};
341
342static void SetupMonitorListener(art::MonitorCallback* listener, bool enable) {
343  // We must not hold the mutator lock here, but if we're in FastJNI, for example, we might. For
344  // now, do a workaround: (possibly) acquire and release.
345  art::ScopedObjectAccess soa(art::Thread::Current());
346  if (enable) {
347    art::Runtime::Current()->GetRuntimeCallbacks()->AddMonitorCallback(listener);
348  } else {
349    art::Runtime::Current()->GetRuntimeCallbacks()->RemoveMonitorCallback(listener);
350  }
351}
352
353// Report GC pauses (see spec) as GARBAGE_COLLECTION_START and GARBAGE_COLLECTION_END.
354class JvmtiGcPauseListener : public art::gc::GcPauseListener {
355 public:
356  explicit JvmtiGcPauseListener(EventHandler* handler)
357      : handler_(handler),
358        start_enabled_(false),
359        finish_enabled_(false) {}
360
361  void StartPause() OVERRIDE {
362    handler_->DispatchEvent<ArtJvmtiEvent::kGarbageCollectionStart>(nullptr);
363  }
364
365  void EndPause() OVERRIDE {
366    handler_->DispatchEvent<ArtJvmtiEvent::kGarbageCollectionFinish>(nullptr);
367  }
368
369  bool IsEnabled() {
370    return start_enabled_ || finish_enabled_;
371  }
372
373  void SetStartEnabled(bool e) {
374    start_enabled_ = e;
375  }
376
377  void SetFinishEnabled(bool e) {
378    finish_enabled_ = e;
379  }
380
381 private:
382  EventHandler* handler_;
383  bool start_enabled_;
384  bool finish_enabled_;
385};
386
387static void SetupGcPauseTracking(JvmtiGcPauseListener* listener, ArtJvmtiEvent event, bool enable) {
388  bool old_state = listener->IsEnabled();
389
390  if (event == ArtJvmtiEvent::kGarbageCollectionStart) {
391    listener->SetStartEnabled(enable);
392  } else {
393    listener->SetFinishEnabled(enable);
394  }
395
396  bool new_state = listener->IsEnabled();
397
398  if (old_state != new_state) {
399    if (new_state) {
400      art::Runtime::Current()->GetHeap()->SetGcPauseListener(listener);
401    } else {
402      art::Runtime::Current()->GetHeap()->RemoveGcPauseListener();
403    }
404  }
405}
406
407class JvmtiMethodTraceListener FINAL : public art::instrumentation::InstrumentationListener {
408 public:
409  explicit JvmtiMethodTraceListener(EventHandler* handler) : event_handler_(handler) {}
410
411  // Call-back for when a method is entered.
412  void MethodEntered(art::Thread* self,
413                     art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
414                     art::ArtMethod* method,
415                     uint32_t dex_pc ATTRIBUTE_UNUSED)
416      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
417    if (!method->IsRuntimeMethod() &&
418        event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodEntry)) {
419      art::JNIEnvExt* jnienv = self->GetJniEnv();
420      RunEventCallback<ArtJvmtiEvent::kMethodEntry>(event_handler_,
421                                                    self,
422                                                    jnienv,
423                                                    art::jni::EncodeArtMethod(method));
424    }
425  }
426
427  // Callback for when a method is exited with a reference return value.
428  void MethodExited(art::Thread* self,
429                    art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
430                    art::ArtMethod* method,
431                    uint32_t dex_pc ATTRIBUTE_UNUSED,
432                    art::Handle<art::mirror::Object> return_value)
433      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
434    if (!method->IsRuntimeMethod() &&
435        event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
436      DCHECK_EQ(method->GetReturnTypePrimitive(), art::Primitive::kPrimNot)
437          << method->PrettyMethod();
438      DCHECK(!self->IsExceptionPending());
439      jvalue val;
440      art::JNIEnvExt* jnienv = self->GetJniEnv();
441      ScopedLocalRef<jobject> return_jobj(jnienv, AddLocalRef<jobject>(jnienv, return_value.Get()));
442      val.l = return_jobj.get();
443      RunEventCallback<ArtJvmtiEvent::kMethodExit>(
444          event_handler_,
445          self,
446          jnienv,
447          art::jni::EncodeArtMethod(method),
448          /*was_popped_by_exception*/ static_cast<jboolean>(JNI_FALSE),
449          val);
450    }
451  }
452
453  // Call-back for when a method is exited.
454  void MethodExited(art::Thread* self,
455                    art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
456                    art::ArtMethod* method,
457                    uint32_t dex_pc ATTRIBUTE_UNUSED,
458                    const art::JValue& return_value)
459      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
460    if (!method->IsRuntimeMethod() &&
461        event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
462      DCHECK_NE(method->GetReturnTypePrimitive(), art::Primitive::kPrimNot)
463          << method->PrettyMethod();
464      DCHECK(!self->IsExceptionPending());
465      jvalue val;
466      art::JNIEnvExt* jnienv = self->GetJniEnv();
467      // 64bit integer is the largest value in the union so we should be fine simply copying it into
468      // the union.
469      val.j = return_value.GetJ();
470      RunEventCallback<ArtJvmtiEvent::kMethodExit>(
471          event_handler_,
472          self,
473          jnienv,
474          art::jni::EncodeArtMethod(method),
475          /*was_popped_by_exception*/ static_cast<jboolean>(JNI_FALSE),
476          val);
477    }
478  }
479
480  // Call-back for when a method is popped due to an exception throw. A method will either cause a
481  // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
482  void MethodUnwind(art::Thread* self,
483                    art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
484                    art::ArtMethod* method,
485                    uint32_t dex_pc ATTRIBUTE_UNUSED)
486      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
487    if (!method->IsRuntimeMethod() &&
488        event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
489      jvalue val;
490      // Just set this to 0xffffffffffffffff so it's not uninitialized.
491      val.j = static_cast<jlong>(-1);
492      art::JNIEnvExt* jnienv = self->GetJniEnv();
493      art::StackHandleScope<1> hs(self);
494      art::Handle<art::mirror::Throwable> old_exception(hs.NewHandle(self->GetException()));
495      CHECK(!old_exception.IsNull());
496      self->ClearException();
497      RunEventCallback<ArtJvmtiEvent::kMethodExit>(
498          event_handler_,
499          self,
500          jnienv,
501          art::jni::EncodeArtMethod(method),
502          /*was_popped_by_exception*/ static_cast<jboolean>(JNI_TRUE),
503          val);
504      // Match RI behavior of just throwing away original exception if a new one is thrown.
505      if (LIKELY(!self->IsExceptionPending())) {
506        self->SetException(old_exception.Get());
507      }
508    }
509  }
510
511  // Call-back for when the dex pc moves in a method.
512  void DexPcMoved(art::Thread* self,
513                  art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
514                  art::ArtMethod* method,
515                  uint32_t new_dex_pc)
516      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
517    DCHECK(!method->IsRuntimeMethod());
518    // Default methods might be copied to multiple classes. We need to get the canonical version of
519    // this method so that we can check for breakpoints correctly.
520    // TODO We should maybe do this on other events to ensure that we are consistent WRT default
521    // methods. This could interact with obsolete methods if we ever let interface redefinition
522    // happen though.
523    method = method->GetCanonicalMethod();
524    art::JNIEnvExt* jnienv = self->GetJniEnv();
525    jmethodID jmethod = art::jni::EncodeArtMethod(method);
526    jlocation location = static_cast<jlocation>(new_dex_pc);
527    // Step event is reported first according to the spec.
528    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kSingleStep)) {
529      RunEventCallback<ArtJvmtiEvent::kSingleStep>(event_handler_, self, jnienv, jmethod, location);
530    }
531    // Next we do the Breakpoint events. The Dispatch code will filter the individual
532    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kBreakpoint)) {
533      RunEventCallback<ArtJvmtiEvent::kBreakpoint>(event_handler_, self, jnienv, jmethod, location);
534    }
535  }
536
537  // Call-back for when we read from a field.
538  void FieldRead(art::Thread* self,
539                 art::Handle<art::mirror::Object> this_object,
540                 art::ArtMethod* method,
541                 uint32_t dex_pc,
542                 art::ArtField* field)
543      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
544    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldAccess)) {
545      art::JNIEnvExt* jnienv = self->GetJniEnv();
546      // DCHECK(!self->IsExceptionPending());
547      ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
548      ScopedLocalRef<jobject> fklass(jnienv,
549                                     AddLocalRef<jobject>(jnienv,
550                                                          field->GetDeclaringClass().Ptr()));
551      RunEventCallback<ArtJvmtiEvent::kFieldAccess>(event_handler_,
552                                                    self,
553                                                    jnienv,
554                                                    art::jni::EncodeArtMethod(method),
555                                                    static_cast<jlocation>(dex_pc),
556                                                    static_cast<jclass>(fklass.get()),
557                                                    this_ref.get(),
558                                                    art::jni::EncodeArtField(field));
559    }
560  }
561
562  void FieldWritten(art::Thread* self,
563                    art::Handle<art::mirror::Object> this_object,
564                    art::ArtMethod* method,
565                    uint32_t dex_pc,
566                    art::ArtField* field,
567                    art::Handle<art::mirror::Object> new_val)
568      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
569    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldModification)) {
570      art::JNIEnvExt* jnienv = self->GetJniEnv();
571      // DCHECK(!self->IsExceptionPending());
572      ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
573      ScopedLocalRef<jobject> fklass(jnienv,
574                                     AddLocalRef<jobject>(jnienv,
575                                                          field->GetDeclaringClass().Ptr()));
576      ScopedLocalRef<jobject> fval(jnienv, AddLocalRef<jobject>(jnienv, new_val.Get()));
577      jvalue val;
578      val.l = fval.get();
579      RunEventCallback<ArtJvmtiEvent::kFieldModification>(
580          event_handler_,
581          self,
582          jnienv,
583          art::jni::EncodeArtMethod(method),
584          static_cast<jlocation>(dex_pc),
585          static_cast<jclass>(fklass.get()),
586          field->IsStatic() ? nullptr :  this_ref.get(),
587          art::jni::EncodeArtField(field),
588          'L',  // type_char
589          val);
590    }
591  }
592
593  // Call-back for when we write into a field.
594  void FieldWritten(art::Thread* self,
595                    art::Handle<art::mirror::Object> this_object,
596                    art::ArtMethod* method,
597                    uint32_t dex_pc,
598                    art::ArtField* field,
599                    const art::JValue& field_value)
600      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
601    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldModification)) {
602      art::JNIEnvExt* jnienv = self->GetJniEnv();
603      DCHECK(!self->IsExceptionPending());
604      ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
605      ScopedLocalRef<jobject> fklass(jnienv,
606                                     AddLocalRef<jobject>(jnienv,
607                                                          field->GetDeclaringClass().Ptr()));
608      char type_char = art::Primitive::Descriptor(field->GetTypeAsPrimitiveType())[0];
609      jvalue val;
610      // 64bit integer is the largest value in the union so we should be fine simply copying it into
611      // the union.
612      val.j = field_value.GetJ();
613      RunEventCallback<ArtJvmtiEvent::kFieldModification>(
614          event_handler_,
615          self,
616          jnienv,
617          art::jni::EncodeArtMethod(method),
618          static_cast<jlocation>(dex_pc),
619          static_cast<jclass>(fklass.get()),
620          field->IsStatic() ? nullptr :  this_ref.get(),  // nb static field modification get given
621                                                          // the class as this_object for some
622                                                          // reason.
623          art::jni::EncodeArtField(field),
624          type_char,
625          val);
626    }
627  }
628
629  void WatchedFramePop(art::Thread* self, const art::ShadowFrame& frame)
630      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
631      art::JNIEnvExt* jnienv = self->GetJniEnv();
632    jboolean is_exception_pending = self->IsExceptionPending();
633    RunEventCallback<ArtJvmtiEvent::kFramePop>(
634        event_handler_,
635        self,
636        jnienv,
637        art::jni::EncodeArtMethod(frame.GetMethod()),
638        is_exception_pending,
639        &frame);
640  }
641
642  static void FindCatchMethodsFromThrow(art::Thread* self,
643                                        art::Handle<art::mirror::Throwable> exception,
644                                        /*out*/ art::ArtMethod** out_method,
645                                        /*out*/ uint32_t* dex_pc)
646      REQUIRES_SHARED(art::Locks::mutator_lock_) {
647    // Finds the location where this exception will most likely be caught. We ignore intervening
648    // native frames (which could catch the exception) and return the closest java frame with a
649    // compatible catch statement.
650    class CatchLocationFinder FINAL : public art::StackVisitor {
651     public:
652      CatchLocationFinder(art::Thread* target,
653                          art::Handle<art::mirror::Class> exception_class,
654                          art::Context* context,
655                          /*out*/ art::ArtMethod** out_catch_method,
656                          /*out*/ uint32_t* out_catch_pc)
657          REQUIRES_SHARED(art::Locks::mutator_lock_)
658        : StackVisitor(target, context, art::StackVisitor::StackWalkKind::kIncludeInlinedFrames),
659          exception_class_(exception_class),
660          catch_method_ptr_(out_catch_method),
661          catch_dex_pc_ptr_(out_catch_pc) {}
662
663      bool VisitFrame() OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
664        art::ArtMethod* method = GetMethod();
665        DCHECK(method != nullptr);
666        if (method->IsRuntimeMethod()) {
667          return true;
668        }
669
670        if (!method->IsNative()) {
671          uint32_t cur_dex_pc = GetDexPc();
672          if (cur_dex_pc == art::dex::kDexNoIndex) {
673            // This frame looks opaque. Just keep on going.
674            return true;
675          }
676          bool has_no_move_exception = false;
677          uint32_t found_dex_pc = method->FindCatchBlock(
678              exception_class_, cur_dex_pc, &has_no_move_exception);
679          if (found_dex_pc != art::dex::kDexNoIndex) {
680            // We found the catch. Store the result and return.
681            *catch_method_ptr_ = method;
682            *catch_dex_pc_ptr_ = found_dex_pc;
683            return false;
684          }
685        }
686        return true;
687      }
688
689     private:
690      art::Handle<art::mirror::Class> exception_class_;
691      art::ArtMethod** catch_method_ptr_;
692      uint32_t* catch_dex_pc_ptr_;
693
694      DISALLOW_COPY_AND_ASSIGN(CatchLocationFinder);
695    };
696
697    art::StackHandleScope<1> hs(self);
698    *out_method = nullptr;
699    *dex_pc = 0;
700    std::unique_ptr<art::Context> context(art::Context::Create());
701
702    CatchLocationFinder clf(self,
703                            hs.NewHandle(exception->GetClass()),
704                            context.get(),
705                            /*out*/ out_method,
706                            /*out*/ dex_pc);
707    clf.WalkStack(/* include_transitions */ false);
708  }
709
710  // Call-back when an exception is thrown.
711  void ExceptionThrown(art::Thread* self, art::Handle<art::mirror::Throwable> exception_object)
712      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
713    DCHECK(self->IsExceptionThrownByCurrentMethod(exception_object.Get()));
714    // The instrumentation events get rid of this for us.
715    DCHECK(!self->IsExceptionPending());
716    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kException)) {
717      art::JNIEnvExt* jnienv = self->GetJniEnv();
718      art::ArtMethod* catch_method;
719      uint32_t catch_pc;
720      FindCatchMethodsFromThrow(self, exception_object, &catch_method, &catch_pc);
721      uint32_t dex_pc = 0;
722      art::ArtMethod* method = self->GetCurrentMethod(&dex_pc,
723                                                      /* check_suspended */ true,
724                                                      /* abort_on_error */ art::kIsDebugBuild);
725      ScopedLocalRef<jobject> exception(jnienv,
726                                        AddLocalRef<jobject>(jnienv, exception_object.Get()));
727      RunEventCallback<ArtJvmtiEvent::kException>(
728          event_handler_,
729          self,
730          jnienv,
731          art::jni::EncodeArtMethod(method),
732          static_cast<jlocation>(dex_pc),
733          exception.get(),
734          art::jni::EncodeArtMethod(catch_method),
735          static_cast<jlocation>(catch_pc));
736    }
737    return;
738  }
739
740  // Call-back when an exception is handled.
741  void ExceptionHandled(art::Thread* self, art::Handle<art::mirror::Throwable> exception_object)
742      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
743    // Since the exception has already been handled there shouldn't be one pending.
744    DCHECK(!self->IsExceptionPending());
745    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kExceptionCatch)) {
746      art::JNIEnvExt* jnienv = self->GetJniEnv();
747      uint32_t dex_pc;
748      art::ArtMethod* method = self->GetCurrentMethod(&dex_pc,
749                                                      /* check_suspended */ true,
750                                                      /* abort_on_error */ art::kIsDebugBuild);
751      ScopedLocalRef<jobject> exception(jnienv,
752                                        AddLocalRef<jobject>(jnienv, exception_object.Get()));
753      RunEventCallback<ArtJvmtiEvent::kExceptionCatch>(
754          event_handler_,
755          self,
756          jnienv,
757          art::jni::EncodeArtMethod(method),
758          static_cast<jlocation>(dex_pc),
759          exception.get());
760    }
761    return;
762  }
763
764  // Call-back for when we execute a branch.
765  void Branch(art::Thread* self ATTRIBUTE_UNUSED,
766              art::ArtMethod* method ATTRIBUTE_UNUSED,
767              uint32_t dex_pc ATTRIBUTE_UNUSED,
768              int32_t dex_pc_offset ATTRIBUTE_UNUSED)
769      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
770    return;
771  }
772
773  // Call-back for when we get an invokevirtual or an invokeinterface.
774  void InvokeVirtualOrInterface(art::Thread* self ATTRIBUTE_UNUSED,
775                                art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
776                                art::ArtMethod* caller ATTRIBUTE_UNUSED,
777                                uint32_t dex_pc ATTRIBUTE_UNUSED,
778                                art::ArtMethod* callee ATTRIBUTE_UNUSED)
779      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
780    return;
781  }
782
783 private:
784  EventHandler* const event_handler_;
785};
786
787static uint32_t GetInstrumentationEventsFor(ArtJvmtiEvent event) {
788  switch (event) {
789    case ArtJvmtiEvent::kMethodEntry:
790      return art::instrumentation::Instrumentation::kMethodEntered;
791    case ArtJvmtiEvent::kMethodExit:
792      return art::instrumentation::Instrumentation::kMethodExited |
793             art::instrumentation::Instrumentation::kMethodUnwind;
794    case ArtJvmtiEvent::kFieldModification:
795      return art::instrumentation::Instrumentation::kFieldWritten;
796    case ArtJvmtiEvent::kFieldAccess:
797      return art::instrumentation::Instrumentation::kFieldRead;
798    case ArtJvmtiEvent::kBreakpoint:
799    case ArtJvmtiEvent::kSingleStep:
800      return art::instrumentation::Instrumentation::kDexPcMoved;
801    case ArtJvmtiEvent::kFramePop:
802      return art::instrumentation::Instrumentation::kWatchedFramePop;
803    case ArtJvmtiEvent::kException:
804      return art::instrumentation::Instrumentation::kExceptionThrown;
805    case ArtJvmtiEvent::kExceptionCatch:
806      return art::instrumentation::Instrumentation::kExceptionHandled;
807    default:
808      LOG(FATAL) << "Unknown event ";
809      return 0;
810  }
811}
812
813static void SetupTraceListener(JvmtiMethodTraceListener* listener,
814                               ArtJvmtiEvent event,
815                               bool enable) {
816  art::ScopedThreadStateChange stsc(art::Thread::Current(), art::ThreadState::kNative);
817  uint32_t new_events = GetInstrumentationEventsFor(event);
818  art::instrumentation::Instrumentation* instr = art::Runtime::Current()->GetInstrumentation();
819  art::gc::ScopedGCCriticalSection gcs(art::Thread::Current(),
820                                       art::gc::kGcCauseInstrumentation,
821                                       art::gc::kCollectorTypeInstrumentation);
822  art::ScopedSuspendAll ssa("jvmti method tracing installation");
823  if (enable) {
824    // TODO Depending on the features being used we should be able to avoid deoptimizing everything
825    // like we do here.
826    if (!instr->AreAllMethodsDeoptimized()) {
827      instr->EnableMethodTracing("jvmti-tracing", /*needs_interpreter*/true);
828    }
829    instr->AddListener(listener, new_events);
830  } else {
831    instr->RemoveListener(listener, new_events);
832  }
833}
834
835// Makes sure that all compiled methods are AsyncDeoptimizable so we can deoptimize (and force to
836// the switch interpreter) when we try to get or set a local variable.
837void EventHandler::HandleLocalAccessCapabilityAdded() {
838  class UpdateEntryPointsClassVisitor : public art::ClassVisitor {
839   public:
840    explicit UpdateEntryPointsClassVisitor(art::Runtime* runtime)
841        : runtime_(runtime) {}
842
843    bool operator()(art::ObjPtr<art::mirror::Class> klass)
844        OVERRIDE REQUIRES(art::Locks::mutator_lock_) {
845      if (!klass->IsLoaded()) {
846        // Skip classes that aren't loaded since they might not have fully allocated and initialized
847        // their methods. Furthemore since the jvmti-plugin must have been loaded by this point
848        // these methods will definitately be using debuggable code.
849        return true;
850      }
851      for (auto& m : klass->GetMethods(art::kRuntimePointerSize)) {
852        const void* code = m.GetEntryPointFromQuickCompiledCode();
853        if (m.IsNative() || m.IsProxyMethod()) {
854          continue;
855        } else if (!runtime_->GetClassLinker()->IsQuickToInterpreterBridge(code) &&
856                   !runtime_->IsAsyncDeoptimizeable(reinterpret_cast<uintptr_t>(code))) {
857          runtime_->GetInstrumentation()->UpdateMethodsCodeToInterpreterEntryPoint(&m);
858        }
859      }
860      return true;
861    }
862
863   private:
864    art::Runtime* runtime_;
865  };
866  art::ScopedObjectAccess soa(art::Thread::Current());
867  UpdateEntryPointsClassVisitor visitor(art::Runtime::Current());
868  art::Runtime::Current()->GetClassLinker()->VisitClasses(&visitor);
869}
870
871bool EventHandler::OtherMonitorEventsEnabledAnywhere(ArtJvmtiEvent event) {
872  std::array<ArtJvmtiEvent, 4> events {
873    {
874      ArtJvmtiEvent::kMonitorContendedEnter,
875      ArtJvmtiEvent::kMonitorContendedEntered,
876      ArtJvmtiEvent::kMonitorWait,
877      ArtJvmtiEvent::kMonitorWaited
878    }
879  };
880  for (ArtJvmtiEvent e : events) {
881    if (e != event && IsEventEnabledAnywhere(e)) {
882      return true;
883    }
884  }
885  return false;
886}
887
888// Handle special work for the given event type, if necessary.
889void EventHandler::HandleEventType(ArtJvmtiEvent event, bool enable) {
890  switch (event) {
891    case ArtJvmtiEvent::kVmObjectAlloc:
892      SetupObjectAllocationTracking(alloc_listener_.get(), enable);
893      return;
894
895    case ArtJvmtiEvent::kGarbageCollectionStart:
896    case ArtJvmtiEvent::kGarbageCollectionFinish:
897      SetupGcPauseTracking(gc_pause_listener_.get(), event, enable);
898      return;
899
900    case ArtJvmtiEvent::kBreakpoint:
901    case ArtJvmtiEvent::kSingleStep: {
902      ArtJvmtiEvent other = (event == ArtJvmtiEvent::kBreakpoint) ? ArtJvmtiEvent::kSingleStep
903                                                                  : ArtJvmtiEvent::kBreakpoint;
904      // We only need to do anything if there isn't already a listener installed/held-on by the
905      // other jvmti event that uses DexPcMoved.
906      if (!IsEventEnabledAnywhere(other)) {
907        SetupTraceListener(method_trace_listener_.get(), event, enable);
908      }
909      return;
910    }
911    // FramePop can never be disabled once it's been turned on since we would either need to deal
912    // with dangling pointers or have missed events.
913    case ArtJvmtiEvent::kFramePop:
914      if (!enable || (enable && frame_pop_enabled)) {
915        break;
916      } else {
917        SetupTraceListener(method_trace_listener_.get(), event, enable);
918        break;
919      }
920    case ArtJvmtiEvent::kMethodEntry:
921    case ArtJvmtiEvent::kMethodExit:
922    case ArtJvmtiEvent::kFieldAccess:
923    case ArtJvmtiEvent::kFieldModification:
924    case ArtJvmtiEvent::kException:
925    case ArtJvmtiEvent::kExceptionCatch:
926      SetupTraceListener(method_trace_listener_.get(), event, enable);
927      return;
928    case ArtJvmtiEvent::kMonitorContendedEnter:
929    case ArtJvmtiEvent::kMonitorContendedEntered:
930    case ArtJvmtiEvent::kMonitorWait:
931    case ArtJvmtiEvent::kMonitorWaited:
932      if (!OtherMonitorEventsEnabledAnywhere(event)) {
933        SetupMonitorListener(monitor_listener_.get(), enable);
934      }
935      return;
936    default:
937      break;
938  }
939}
940
941// Checks to see if the env has the capabilities associated with the given event.
942static bool HasAssociatedCapability(ArtJvmTiEnv* env,
943                                    ArtJvmtiEvent event) {
944  jvmtiCapabilities caps = env->capabilities;
945  switch (event) {
946    case ArtJvmtiEvent::kBreakpoint:
947      return caps.can_generate_breakpoint_events == 1;
948
949    case ArtJvmtiEvent::kCompiledMethodLoad:
950    case ArtJvmtiEvent::kCompiledMethodUnload:
951      return caps.can_generate_compiled_method_load_events == 1;
952
953    case ArtJvmtiEvent::kException:
954    case ArtJvmtiEvent::kExceptionCatch:
955      return caps.can_generate_exception_events == 1;
956
957    case ArtJvmtiEvent::kFieldAccess:
958      return caps.can_generate_field_access_events == 1;
959
960    case ArtJvmtiEvent::kFieldModification:
961      return caps.can_generate_field_modification_events == 1;
962
963    case ArtJvmtiEvent::kFramePop:
964      return caps.can_generate_frame_pop_events == 1;
965
966    case ArtJvmtiEvent::kGarbageCollectionStart:
967    case ArtJvmtiEvent::kGarbageCollectionFinish:
968      return caps.can_generate_garbage_collection_events == 1;
969
970    case ArtJvmtiEvent::kMethodEntry:
971      return caps.can_generate_method_entry_events == 1;
972
973    case ArtJvmtiEvent::kMethodExit:
974      return caps.can_generate_method_exit_events == 1;
975
976    case ArtJvmtiEvent::kMonitorContendedEnter:
977    case ArtJvmtiEvent::kMonitorContendedEntered:
978    case ArtJvmtiEvent::kMonitorWait:
979    case ArtJvmtiEvent::kMonitorWaited:
980      return caps.can_generate_monitor_events == 1;
981
982    case ArtJvmtiEvent::kNativeMethodBind:
983      return caps.can_generate_native_method_bind_events == 1;
984
985    case ArtJvmtiEvent::kObjectFree:
986      return caps.can_generate_object_free_events == 1;
987
988    case ArtJvmtiEvent::kSingleStep:
989      return caps.can_generate_single_step_events == 1;
990
991    case ArtJvmtiEvent::kVmObjectAlloc:
992      return caps.can_generate_vm_object_alloc_events == 1;
993
994    default:
995      return true;
996  }
997}
998
999jvmtiError EventHandler::SetEvent(ArtJvmTiEnv* env,
1000                                  art::Thread* thread,
1001                                  ArtJvmtiEvent event,
1002                                  jvmtiEventMode mode) {
1003  if (thread != nullptr) {
1004    art::ThreadState state = thread->GetState();
1005    if (state == art::ThreadState::kStarting ||
1006        state == art::ThreadState::kTerminated ||
1007        thread->IsStillStarting()) {
1008      return ERR(THREAD_NOT_ALIVE);
1009    }
1010    if (!IsThreadControllable(event)) {
1011      return ERR(ILLEGAL_ARGUMENT);
1012    }
1013  }
1014
1015  if (mode != JVMTI_ENABLE && mode != JVMTI_DISABLE) {
1016    return ERR(ILLEGAL_ARGUMENT);
1017  }
1018
1019  if (!EventMask::EventIsInRange(event)) {
1020    return ERR(INVALID_EVENT_TYPE);
1021  }
1022
1023  if (!HasAssociatedCapability(env, event)) {
1024    return ERR(MUST_POSSESS_CAPABILITY);
1025  }
1026
1027  bool old_state = global_mask.Test(event);
1028
1029  if (mode == JVMTI_ENABLE) {
1030    env->event_masks.EnableEvent(thread, event);
1031    global_mask.Set(event);
1032  } else {
1033    DCHECK_EQ(mode, JVMTI_DISABLE);
1034
1035    env->event_masks.DisableEvent(thread, event);
1036    RecalculateGlobalEventMask(event);
1037  }
1038
1039  bool new_state = global_mask.Test(event);
1040
1041  // Handle any special work required for the event type.
1042  if (new_state != old_state) {
1043    HandleEventType(event, mode == JVMTI_ENABLE);
1044  }
1045
1046  return ERR(NONE);
1047}
1048
1049void EventHandler::Shutdown() {
1050  // Need to remove the method_trace_listener_ if it's there.
1051  art::Thread* self = art::Thread::Current();
1052  art::gc::ScopedGCCriticalSection gcs(self,
1053                                       art::gc::kGcCauseInstrumentation,
1054                                       art::gc::kCollectorTypeInstrumentation);
1055  art::ScopedSuspendAll ssa("jvmti method tracing uninstallation");
1056  // Just remove every possible event.
1057  art::Runtime::Current()->GetInstrumentation()->RemoveListener(method_trace_listener_.get(), ~0);
1058}
1059
1060EventHandler::EventHandler() {
1061  alloc_listener_.reset(new JvmtiAllocationListener(this));
1062  gc_pause_listener_.reset(new JvmtiGcPauseListener(this));
1063  method_trace_listener_.reset(new JvmtiMethodTraceListener(this));
1064  monitor_listener_.reset(new JvmtiMonitorListener(this));
1065}
1066
1067EventHandler::~EventHandler() {
1068}
1069
1070}  // namespace openjdkjvmti
1071