events.cc revision 06c42a571358b5e5adb69104b183af8f32f4c07d
1/* Copyright (C) 2016 The Android Open Source Project
2 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
3 *
4 * This file implements interfaces from the file jvmti.h. This implementation
5 * is licensed under the same terms as the file jvmti.h.  The
6 * copyright and license information for the file jvmti.h follows.
7 *
8 * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
9 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
10 *
11 * This code is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License version 2 only, as
13 * published by the Free Software Foundation.  Oracle designates this
14 * particular file as subject to the "Classpath" exception as provided
15 * by Oracle in the LICENSE file that accompanied this code.
16 *
17 * This code is distributed in the hope that it will be useful, but WITHOUT
18 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
19 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
20 * version 2 for more details (a copy is included in the LICENSE file that
21 * accompanied this code).
22 *
23 * You should have received a copy of the GNU General Public License version
24 * 2 along with this work; if not, write to the Free Software Foundation,
25 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
26 *
27 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
28 * or visit www.oracle.com if you need additional information or have any
29 * questions.
30 */
31
32#include "events-inl.h"
33
34#include "art_field-inl.h"
35#include "art_jvmti.h"
36#include "art_method-inl.h"
37#include "base/logging.h"
38#include "gc/allocation_listener.h"
39#include "gc/gc_pause_listener.h"
40#include "gc/heap.h"
41#include "gc/scoped_gc_critical_section.h"
42#include "handle_scope-inl.h"
43#include "instrumentation.h"
44#include "jni_env_ext-inl.h"
45#include "jni_internal.h"
46#include "mirror/class.h"
47#include "mirror/object-inl.h"
48#include "nativehelper/ScopedLocalRef.h"
49#include "runtime.h"
50#include "scoped_thread_state_change-inl.h"
51#include "thread-inl.h"
52#include "thread_list.h"
53#include "ti_phase.h"
54
55namespace openjdkjvmti {
56
57bool EventMasks::IsEnabledAnywhere(ArtJvmtiEvent event) {
58  return global_event_mask.Test(event) || unioned_thread_event_mask.Test(event);
59}
60
61EventMask& EventMasks::GetEventMask(art::Thread* thread) {
62  if (thread == nullptr) {
63    return global_event_mask;
64  }
65
66  for (auto& pair : thread_event_masks) {
67    const UniqueThread& unique_thread = pair.first;
68    if (unique_thread.first == thread &&
69        unique_thread.second == static_cast<uint32_t>(thread->GetTid())) {
70      return pair.second;
71    }
72  }
73
74  // TODO: Remove old UniqueThread with the same pointer, if exists.
75
76  thread_event_masks.emplace_back(UniqueThread(thread, thread->GetTid()), EventMask());
77  return thread_event_masks.back().second;
78}
79
80EventMask* EventMasks::GetEventMaskOrNull(art::Thread* thread) {
81  if (thread == nullptr) {
82    return &global_event_mask;
83  }
84
85  for (auto& pair : thread_event_masks) {
86    const UniqueThread& unique_thread = pair.first;
87    if (unique_thread.first == thread &&
88        unique_thread.second == static_cast<uint32_t>(thread->GetTid())) {
89      return &pair.second;
90    }
91  }
92
93  return nullptr;
94}
95
96
97void EventMasks::EnableEvent(art::Thread* thread, ArtJvmtiEvent event) {
98  DCHECK(EventMask::EventIsInRange(event));
99  GetEventMask(thread).Set(event);
100  if (thread != nullptr) {
101    unioned_thread_event_mask.Set(event, true);
102  }
103}
104
105void EventMasks::DisableEvent(art::Thread* thread, ArtJvmtiEvent event) {
106  DCHECK(EventMask::EventIsInRange(event));
107  GetEventMask(thread).Set(event, false);
108  if (thread != nullptr) {
109    // Regenerate union for the event.
110    bool union_value = false;
111    for (auto& pair : thread_event_masks) {
112      union_value |= pair.second.Test(event);
113      if (union_value) {
114        break;
115      }
116    }
117    unioned_thread_event_mask.Set(event, union_value);
118  }
119}
120
121void EventMasks::HandleChangedCapabilities(const jvmtiCapabilities& caps, bool caps_added) {
122  if (UNLIKELY(caps.can_retransform_classes == 1)) {
123    // If we are giving this env the retransform classes cap we need to switch all events of
124    // NonTransformable to Transformable and vice versa.
125    ArtJvmtiEvent to_remove = caps_added ? ArtJvmtiEvent::kClassFileLoadHookNonRetransformable
126                                         : ArtJvmtiEvent::kClassFileLoadHookRetransformable;
127    ArtJvmtiEvent to_add = caps_added ? ArtJvmtiEvent::kClassFileLoadHookRetransformable
128                                      : ArtJvmtiEvent::kClassFileLoadHookNonRetransformable;
129    if (global_event_mask.Test(to_remove)) {
130      CHECK(!global_event_mask.Test(to_add));
131      global_event_mask.Set(to_remove, false);
132      global_event_mask.Set(to_add, true);
133    }
134
135    if (unioned_thread_event_mask.Test(to_remove)) {
136      CHECK(!unioned_thread_event_mask.Test(to_add));
137      unioned_thread_event_mask.Set(to_remove, false);
138      unioned_thread_event_mask.Set(to_add, true);
139    }
140    for (auto thread_mask : thread_event_masks) {
141      if (thread_mask.second.Test(to_remove)) {
142        CHECK(!thread_mask.second.Test(to_add));
143        thread_mask.second.Set(to_remove, false);
144        thread_mask.second.Set(to_add, true);
145      }
146    }
147  }
148}
149
150void EventHandler::RegisterArtJvmTiEnv(ArtJvmTiEnv* env) {
151  // Since we never shrink this array we might as well try to fill gaps.
152  auto it = std::find(envs.begin(), envs.end(), nullptr);
153  if (it != envs.end()) {
154    *it = env;
155  } else {
156    envs.push_back(env);
157  }
158}
159
160void EventHandler::RemoveArtJvmTiEnv(ArtJvmTiEnv* env) {
161  // Since we might be currently iterating over the envs list we cannot actually erase elements.
162  // Instead we will simply replace them with 'nullptr' and skip them manually.
163  auto it = std::find(envs.begin(), envs.end(), env);
164  if (it != envs.end()) {
165    *it = nullptr;
166    for (size_t i = static_cast<size_t>(ArtJvmtiEvent::kMinEventTypeVal);
167         i <= static_cast<size_t>(ArtJvmtiEvent::kMaxEventTypeVal);
168         ++i) {
169      RecalculateGlobalEventMask(static_cast<ArtJvmtiEvent>(i));
170    }
171  }
172}
173
174static bool IsThreadControllable(ArtJvmtiEvent event) {
175  switch (event) {
176    case ArtJvmtiEvent::kVmInit:
177    case ArtJvmtiEvent::kVmStart:
178    case ArtJvmtiEvent::kVmDeath:
179    case ArtJvmtiEvent::kThreadStart:
180    case ArtJvmtiEvent::kCompiledMethodLoad:
181    case ArtJvmtiEvent::kCompiledMethodUnload:
182    case ArtJvmtiEvent::kDynamicCodeGenerated:
183    case ArtJvmtiEvent::kDataDumpRequest:
184      return false;
185
186    default:
187      return true;
188  }
189}
190
191class JvmtiAllocationListener : public art::gc::AllocationListener {
192 public:
193  explicit JvmtiAllocationListener(EventHandler* handler) : handler_(handler) {}
194
195  void ObjectAllocated(art::Thread* self, art::ObjPtr<art::mirror::Object>* obj, size_t byte_count)
196      OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
197    DCHECK_EQ(self, art::Thread::Current());
198
199    if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kVmObjectAlloc)) {
200      art::StackHandleScope<1> hs(self);
201      auto h = hs.NewHandleWrapper(obj);
202      // jvmtiEventVMObjectAlloc parameters:
203      //      jvmtiEnv *jvmti_env,
204      //      JNIEnv* jni_env,
205      //      jthread thread,
206      //      jobject object,
207      //      jclass object_klass,
208      //      jlong size
209      art::JNIEnvExt* jni_env = self->GetJniEnv();
210
211      jthread thread_peer;
212      if (self->IsStillStarting()) {
213        thread_peer = nullptr;
214      } else {
215        thread_peer = jni_env->AddLocalReference<jthread>(self->GetPeer());
216      }
217
218      ScopedLocalRef<jthread> thread(jni_env, thread_peer);
219      ScopedLocalRef<jobject> object(
220          jni_env, jni_env->AddLocalReference<jobject>(*obj));
221      ScopedLocalRef<jclass> klass(
222          jni_env, jni_env->AddLocalReference<jclass>(obj->Ptr()->GetClass()));
223
224      handler_->DispatchEvent<ArtJvmtiEvent::kVmObjectAlloc>(self,
225                                                             reinterpret_cast<JNIEnv*>(jni_env),
226                                                             thread.get(),
227                                                             object.get(),
228                                                             klass.get(),
229                                                             static_cast<jlong>(byte_count));
230    }
231  }
232
233 private:
234  EventHandler* handler_;
235};
236
237static void SetupObjectAllocationTracking(art::gc::AllocationListener* listener, bool enable) {
238  // We must not hold the mutator lock here, but if we're in FastJNI, for example, we might. For
239  // now, do a workaround: (possibly) acquire and release.
240  art::ScopedObjectAccess soa(art::Thread::Current());
241  art::ScopedThreadSuspension sts(soa.Self(), art::ThreadState::kSuspended);
242  if (enable) {
243    art::Runtime::Current()->GetHeap()->SetAllocationListener(listener);
244  } else {
245    art::Runtime::Current()->GetHeap()->RemoveAllocationListener();
246  }
247}
248
249// Report GC pauses (see spec) as GARBAGE_COLLECTION_START and GARBAGE_COLLECTION_END.
250class JvmtiGcPauseListener : public art::gc::GcPauseListener {
251 public:
252  explicit JvmtiGcPauseListener(EventHandler* handler)
253      : handler_(handler),
254        start_enabled_(false),
255        finish_enabled_(false) {}
256
257  void StartPause() OVERRIDE {
258    handler_->DispatchEvent<ArtJvmtiEvent::kGarbageCollectionStart>(nullptr);
259  }
260
261  void EndPause() OVERRIDE {
262    handler_->DispatchEvent<ArtJvmtiEvent::kGarbageCollectionFinish>(nullptr);
263  }
264
265  bool IsEnabled() {
266    return start_enabled_ || finish_enabled_;
267  }
268
269  void SetStartEnabled(bool e) {
270    start_enabled_ = e;
271  }
272
273  void SetFinishEnabled(bool e) {
274    finish_enabled_ = e;
275  }
276
277 private:
278  EventHandler* handler_;
279  bool start_enabled_;
280  bool finish_enabled_;
281};
282
283static void SetupGcPauseTracking(JvmtiGcPauseListener* listener, ArtJvmtiEvent event, bool enable) {
284  bool old_state = listener->IsEnabled();
285
286  if (event == ArtJvmtiEvent::kGarbageCollectionStart) {
287    listener->SetStartEnabled(enable);
288  } else {
289    listener->SetFinishEnabled(enable);
290  }
291
292  bool new_state = listener->IsEnabled();
293
294  if (old_state != new_state) {
295    if (new_state) {
296      art::Runtime::Current()->GetHeap()->SetGcPauseListener(listener);
297    } else {
298      art::Runtime::Current()->GetHeap()->RemoveGcPauseListener();
299    }
300  }
301}
302
303template<typename Type>
304static Type AddLocalRef(art::JNIEnvExt* e, art::mirror::Object* obj)
305    REQUIRES_SHARED(art::Locks::mutator_lock_) {
306  return (obj == nullptr) ? nullptr : e->AddLocalReference<Type>(obj);
307}
308
309class JvmtiMethodTraceListener FINAL : public art::instrumentation::InstrumentationListener {
310 public:
311  explicit JvmtiMethodTraceListener(EventHandler* handler) : event_handler_(handler) {}
312
313  template<ArtJvmtiEvent kEvent, typename ...Args>
314  void RunEventCallback(art::Thread* self, art::JNIEnvExt* jnienv, Args... args)
315      REQUIRES_SHARED(art::Locks::mutator_lock_) {
316    ScopedLocalRef<jthread> thread_jni(jnienv, AddLocalRef<jthread>(jnienv, self->GetPeer()));
317    // Just give the event a good sized JNI frame. 100 should be fine.
318    jnienv->PushFrame(100);
319    {
320      // Need to do trampoline! :(
321      art::ScopedThreadSuspension sts(self, art::ThreadState::kNative);
322      event_handler_->DispatchEvent<kEvent>(self,
323                                            static_cast<JNIEnv*>(jnienv),
324                                            thread_jni.get(),
325                                            args...);
326    }
327    jnienv->PopFrame();
328  }
329
330  // Call-back for when a method is entered.
331  void MethodEntered(art::Thread* self,
332                     art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
333                     art::ArtMethod* method,
334                     uint32_t dex_pc ATTRIBUTE_UNUSED)
335      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
336    if (!method->IsRuntimeMethod() &&
337        event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodEntry)) {
338      art::JNIEnvExt* jnienv = self->GetJniEnv();
339      RunEventCallback<ArtJvmtiEvent::kMethodEntry>(self,
340                                                    jnienv,
341                                                    art::jni::EncodeArtMethod(method));
342    }
343  }
344
345  // Callback for when a method is exited with a reference return value.
346  void MethodExited(art::Thread* self,
347                    art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
348                    art::ArtMethod* method,
349                    uint32_t dex_pc ATTRIBUTE_UNUSED,
350                    art::Handle<art::mirror::Object> return_value)
351      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
352    if (!method->IsRuntimeMethod() &&
353        event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
354      DCHECK_EQ(method->GetReturnTypePrimitive(), art::Primitive::kPrimNot)
355          << method->PrettyMethod();
356      DCHECK(!self->IsExceptionPending());
357      jvalue val;
358      art::JNIEnvExt* jnienv = self->GetJniEnv();
359      ScopedLocalRef<jobject> return_jobj(jnienv, AddLocalRef<jobject>(jnienv, return_value.Get()));
360      val.l = return_jobj.get();
361      RunEventCallback<ArtJvmtiEvent::kMethodExit>(
362          self,
363          jnienv,
364          art::jni::EncodeArtMethod(method),
365          /*was_popped_by_exception*/ static_cast<jboolean>(JNI_FALSE),
366          val);
367    }
368  }
369
370  // Call-back for when a method is exited.
371  void MethodExited(art::Thread* self,
372                    art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
373                    art::ArtMethod* method,
374                    uint32_t dex_pc ATTRIBUTE_UNUSED,
375                    const art::JValue& return_value)
376      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
377    if (!method->IsRuntimeMethod() &&
378        event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
379      DCHECK_NE(method->GetReturnTypePrimitive(), art::Primitive::kPrimNot)
380          << method->PrettyMethod();
381      DCHECK(!self->IsExceptionPending());
382      jvalue val;
383      art::JNIEnvExt* jnienv = self->GetJniEnv();
384      // 64bit integer is the largest value in the union so we should be fine simply copying it into
385      // the union.
386      val.j = return_value.GetJ();
387      RunEventCallback<ArtJvmtiEvent::kMethodExit>(
388          self,
389          jnienv,
390          art::jni::EncodeArtMethod(method),
391          /*was_popped_by_exception*/ static_cast<jboolean>(JNI_FALSE),
392          val);
393    }
394  }
395
396  // Call-back for when a method is popped due to an exception throw. A method will either cause a
397  // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
398  void MethodUnwind(art::Thread* self,
399                    art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
400                    art::ArtMethod* method,
401                    uint32_t dex_pc ATTRIBUTE_UNUSED)
402      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
403    if (!method->IsRuntimeMethod() &&
404        event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
405      jvalue val;
406      // Just set this to 0xffffffffffffffff so it's not uninitialized.
407      val.j = static_cast<jlong>(-1);
408      art::JNIEnvExt* jnienv = self->GetJniEnv();
409      art::StackHandleScope<1> hs(self);
410      art::Handle<art::mirror::Throwable> old_exception(hs.NewHandle(self->GetException()));
411      CHECK(!old_exception.IsNull());
412      self->ClearException();
413      RunEventCallback<ArtJvmtiEvent::kMethodExit>(
414          self,
415          jnienv,
416          art::jni::EncodeArtMethod(method),
417          /*was_popped_by_exception*/ static_cast<jboolean>(JNI_TRUE),
418          val);
419      // Match RI behavior of just throwing away original exception if a new one is thrown.
420      if (LIKELY(!self->IsExceptionPending())) {
421        self->SetException(old_exception.Get());
422      }
423    }
424  }
425
426  // Call-back for when the dex pc moves in a method.
427  void DexPcMoved(art::Thread* self,
428                  art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
429                  art::ArtMethod* method,
430                  uint32_t new_dex_pc)
431      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
432    DCHECK(!method->IsRuntimeMethod());
433    // Default methods might be copied to multiple classes. We need to get the canonical version of
434    // this method so that we can check for breakpoints correctly.
435    // TODO We should maybe do this on other events to ensure that we are consistent WRT default
436    // methods. This could interact with obsolete methods if we ever let interface redefinition
437    // happen though.
438    method = method->GetCanonicalMethod();
439    art::JNIEnvExt* jnienv = self->GetJniEnv();
440    jmethodID jmethod = art::jni::EncodeArtMethod(method);
441    jlocation location = static_cast<jlocation>(new_dex_pc);
442    // Step event is reported first according to the spec.
443    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kSingleStep)) {
444      RunEventCallback<ArtJvmtiEvent::kSingleStep>(self, jnienv, jmethod, location);
445    }
446    // Next we do the Breakpoint events. The Dispatch code will filter the individual
447    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kBreakpoint)) {
448      RunEventCallback<ArtJvmtiEvent::kBreakpoint>(self, jnienv, jmethod, location);
449    }
450  }
451
452  // Call-back for when we read from a field.
453  void FieldRead(art::Thread* self,
454                 art::Handle<art::mirror::Object> this_object,
455                 art::ArtMethod* method,
456                 uint32_t dex_pc,
457                 art::ArtField* field)
458      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
459    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldAccess)) {
460      art::JNIEnvExt* jnienv = self->GetJniEnv();
461      // DCHECK(!self->IsExceptionPending());
462      ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
463      ScopedLocalRef<jobject> fklass(jnienv,
464                                     AddLocalRef<jobject>(jnienv,
465                                                          field->GetDeclaringClass().Ptr()));
466      RunEventCallback<ArtJvmtiEvent::kFieldAccess>(self,
467                                                    jnienv,
468                                                    art::jni::EncodeArtMethod(method),
469                                                    static_cast<jlocation>(dex_pc),
470                                                    static_cast<jclass>(fklass.get()),
471                                                    this_ref.get(),
472                                                    art::jni::EncodeArtField(field));
473    }
474  }
475
476  void FieldWritten(art::Thread* self,
477                    art::Handle<art::mirror::Object> this_object,
478                    art::ArtMethod* method,
479                    uint32_t dex_pc,
480                    art::ArtField* field,
481                    art::Handle<art::mirror::Object> new_val)
482      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
483    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldModification)) {
484      art::JNIEnvExt* jnienv = self->GetJniEnv();
485      // DCHECK(!self->IsExceptionPending());
486      ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
487      ScopedLocalRef<jobject> fklass(jnienv,
488                                     AddLocalRef<jobject>(jnienv,
489                                                          field->GetDeclaringClass().Ptr()));
490      ScopedLocalRef<jobject> fval(jnienv, AddLocalRef<jobject>(jnienv, new_val.Get()));
491      jvalue val;
492      val.l = fval.get();
493      RunEventCallback<ArtJvmtiEvent::kFieldModification>(
494          self,
495          jnienv,
496          art::jni::EncodeArtMethod(method),
497          static_cast<jlocation>(dex_pc),
498          static_cast<jclass>(fklass.get()),
499          field->IsStatic() ? nullptr :  this_ref.get(),
500          art::jni::EncodeArtField(field),
501          'L',  // type_char
502          val);
503    }
504  }
505
506  // Call-back for when we write into a field.
507  void FieldWritten(art::Thread* self,
508                    art::Handle<art::mirror::Object> this_object,
509                    art::ArtMethod* method,
510                    uint32_t dex_pc,
511                    art::ArtField* field,
512                    const art::JValue& field_value)
513      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
514    if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldModification)) {
515      art::JNIEnvExt* jnienv = self->GetJniEnv();
516      DCHECK(!self->IsExceptionPending());
517      ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
518      ScopedLocalRef<jobject> fklass(jnienv,
519                                     AddLocalRef<jobject>(jnienv,
520                                                          field->GetDeclaringClass().Ptr()));
521      char type_char = art::Primitive::Descriptor(field->GetTypeAsPrimitiveType())[0];
522      jvalue val;
523      // 64bit integer is the largest value in the union so we should be fine simply copying it into
524      // the union.
525      val.j = field_value.GetJ();
526      RunEventCallback<ArtJvmtiEvent::kFieldModification>(
527          self,
528          jnienv,
529          art::jni::EncodeArtMethod(method),
530          static_cast<jlocation>(dex_pc),
531          static_cast<jclass>(fklass.get()),
532          field->IsStatic() ? nullptr :  this_ref.get(),  // nb static field modification get given
533                                                          // the class as this_object for some
534                                                          // reason.
535          art::jni::EncodeArtField(field),
536          type_char,
537          val);
538    }
539  }
540
541  // Call-back when an exception is caught.
542  void ExceptionCaught(art::Thread* self ATTRIBUTE_UNUSED,
543                       art::Handle<art::mirror::Throwable> exception_object ATTRIBUTE_UNUSED)
544      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
545    return;
546  }
547
548  // Call-back for when we execute a branch.
549  void Branch(art::Thread* self ATTRIBUTE_UNUSED,
550              art::ArtMethod* method ATTRIBUTE_UNUSED,
551              uint32_t dex_pc ATTRIBUTE_UNUSED,
552              int32_t dex_pc_offset ATTRIBUTE_UNUSED)
553      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
554    return;
555  }
556
557  // Call-back for when we get an invokevirtual or an invokeinterface.
558  void InvokeVirtualOrInterface(art::Thread* self ATTRIBUTE_UNUSED,
559                                art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
560                                art::ArtMethod* caller ATTRIBUTE_UNUSED,
561                                uint32_t dex_pc ATTRIBUTE_UNUSED,
562                                art::ArtMethod* callee ATTRIBUTE_UNUSED)
563      REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
564    return;
565  }
566
567 private:
568  EventHandler* const event_handler_;
569};
570
571static uint32_t GetInstrumentationEventsFor(ArtJvmtiEvent event) {
572  switch (event) {
573    case ArtJvmtiEvent::kMethodEntry:
574      return art::instrumentation::Instrumentation::kMethodEntered;
575    case ArtJvmtiEvent::kMethodExit:
576      return art::instrumentation::Instrumentation::kMethodExited |
577             art::instrumentation::Instrumentation::kMethodUnwind;
578    case ArtJvmtiEvent::kFieldModification:
579      return art::instrumentation::Instrumentation::kFieldWritten;
580    case ArtJvmtiEvent::kFieldAccess:
581      return art::instrumentation::Instrumentation::kFieldRead;
582    case ArtJvmtiEvent::kBreakpoint:
583    case ArtJvmtiEvent::kSingleStep:
584      return art::instrumentation::Instrumentation::kDexPcMoved;
585    default:
586      LOG(FATAL) << "Unknown event ";
587      return 0;
588  }
589}
590
591static void SetupTraceListener(JvmtiMethodTraceListener* listener,
592                               ArtJvmtiEvent event,
593                               bool enable) {
594  art::ScopedThreadStateChange stsc(art::Thread::Current(), art::ThreadState::kNative);
595  uint32_t new_events = GetInstrumentationEventsFor(event);
596  art::instrumentation::Instrumentation* instr = art::Runtime::Current()->GetInstrumentation();
597  art::gc::ScopedGCCriticalSection gcs(art::Thread::Current(),
598                                       art::gc::kGcCauseInstrumentation,
599                                       art::gc::kCollectorTypeInstrumentation);
600  art::ScopedSuspendAll ssa("jvmti method tracing installation");
601  if (enable) {
602    // TODO Depending on the features being used we should be able to avoid deoptimizing everything
603    // like we do here.
604    if (!instr->AreAllMethodsDeoptimized()) {
605      instr->EnableMethodTracing("jvmti-tracing", /*needs_interpreter*/true);
606    }
607    instr->AddListener(listener, new_events);
608  } else {
609    instr->RemoveListener(listener, new_events);
610  }
611}
612
613void EventHandler::HandleLocalAccessCapabilityAdded() {
614  art::ScopedThreadStateChange stsc(art::Thread::Current(), art::ThreadState::kNative);
615  art::instrumentation::Instrumentation* instr = art::Runtime::Current()->GetInstrumentation();
616  art::gc::ScopedGCCriticalSection gcs(art::Thread::Current(),
617                                       art::gc::kGcCauseInstrumentation,
618                                       art::gc::kCollectorTypeInstrumentation);
619  art::ScopedSuspendAll ssa("Deoptimize everything for local variable access", true);
620  // TODO This should be disabled when there are no environments using it.
621  if (!instr->CanDeoptimize()) {
622    instr->EnableDeoptimization();
623  }
624  // TODO We should be able to support can_access_local_variables without this.
625  instr->DeoptimizeEverything("jvmti-local-variable-access");
626}
627
628// Handle special work for the given event type, if necessary.
629void EventHandler::HandleEventType(ArtJvmtiEvent event, bool enable) {
630  switch (event) {
631    case ArtJvmtiEvent::kVmObjectAlloc:
632      SetupObjectAllocationTracking(alloc_listener_.get(), enable);
633      return;
634
635    case ArtJvmtiEvent::kGarbageCollectionStart:
636    case ArtJvmtiEvent::kGarbageCollectionFinish:
637      SetupGcPauseTracking(gc_pause_listener_.get(), event, enable);
638      return;
639
640    case ArtJvmtiEvent::kBreakpoint:
641    case ArtJvmtiEvent::kSingleStep: {
642      ArtJvmtiEvent other = (event == ArtJvmtiEvent::kBreakpoint) ? ArtJvmtiEvent::kSingleStep
643                                                                  : ArtJvmtiEvent::kBreakpoint;
644      // We only need to do anything if there isn't already a listener installed/held-on by the
645      // other jvmti event that uses DexPcMoved.
646      if (!IsEventEnabledAnywhere(other)) {
647        SetupTraceListener(method_trace_listener_.get(), event, enable);
648      }
649      return;
650    }
651    case ArtJvmtiEvent::kMethodEntry:
652    case ArtJvmtiEvent::kMethodExit:
653    case ArtJvmtiEvent::kFieldAccess:
654    case ArtJvmtiEvent::kFieldModification:
655      SetupTraceListener(method_trace_listener_.get(), event, enable);
656      return;
657
658    default:
659      break;
660  }
661}
662
663// Checks to see if the env has the capabilities associated with the given event.
664static bool HasAssociatedCapability(ArtJvmTiEnv* env,
665                                    ArtJvmtiEvent event) {
666  jvmtiCapabilities caps = env->capabilities;
667  switch (event) {
668    case ArtJvmtiEvent::kBreakpoint:
669      return caps.can_generate_breakpoint_events == 1;
670
671    case ArtJvmtiEvent::kCompiledMethodLoad:
672    case ArtJvmtiEvent::kCompiledMethodUnload:
673      return caps.can_generate_compiled_method_load_events == 1;
674
675    case ArtJvmtiEvent::kException:
676    case ArtJvmtiEvent::kExceptionCatch:
677      return caps.can_generate_exception_events == 1;
678
679    case ArtJvmtiEvent::kFieldAccess:
680      return caps.can_generate_field_access_events == 1;
681
682    case ArtJvmtiEvent::kFieldModification:
683      return caps.can_generate_field_modification_events == 1;
684
685    case ArtJvmtiEvent::kFramePop:
686      return caps.can_generate_frame_pop_events == 1;
687
688    case ArtJvmtiEvent::kGarbageCollectionStart:
689    case ArtJvmtiEvent::kGarbageCollectionFinish:
690      return caps.can_generate_garbage_collection_events == 1;
691
692    case ArtJvmtiEvent::kMethodEntry:
693      return caps.can_generate_method_entry_events == 1;
694
695    case ArtJvmtiEvent::kMethodExit:
696      return caps.can_generate_method_exit_events == 1;
697
698    case ArtJvmtiEvent::kMonitorContendedEnter:
699    case ArtJvmtiEvent::kMonitorContendedEntered:
700    case ArtJvmtiEvent::kMonitorWait:
701    case ArtJvmtiEvent::kMonitorWaited:
702      return caps.can_generate_monitor_events == 1;
703
704    case ArtJvmtiEvent::kNativeMethodBind:
705      return caps.can_generate_native_method_bind_events == 1;
706
707    case ArtJvmtiEvent::kObjectFree:
708      return caps.can_generate_object_free_events == 1;
709
710    case ArtJvmtiEvent::kSingleStep:
711      return caps.can_generate_single_step_events == 1;
712
713    case ArtJvmtiEvent::kVmObjectAlloc:
714      return caps.can_generate_vm_object_alloc_events == 1;
715
716    default:
717      return true;
718  }
719}
720
721jvmtiError EventHandler::SetEvent(ArtJvmTiEnv* env,
722                                  art::Thread* thread,
723                                  ArtJvmtiEvent event,
724                                  jvmtiEventMode mode) {
725  if (thread != nullptr) {
726    art::ThreadState state = thread->GetState();
727    if (state == art::ThreadState::kStarting ||
728        state == art::ThreadState::kTerminated ||
729        thread->IsStillStarting()) {
730      return ERR(THREAD_NOT_ALIVE);
731    }
732    if (!IsThreadControllable(event)) {
733      return ERR(ILLEGAL_ARGUMENT);
734    }
735  }
736
737  if (mode != JVMTI_ENABLE && mode != JVMTI_DISABLE) {
738    return ERR(ILLEGAL_ARGUMENT);
739  }
740
741  if (!EventMask::EventIsInRange(event)) {
742    return ERR(INVALID_EVENT_TYPE);
743  }
744
745  if (!HasAssociatedCapability(env, event)) {
746    return ERR(MUST_POSSESS_CAPABILITY);
747  }
748
749  bool old_state = global_mask.Test(event);
750
751  if (mode == JVMTI_ENABLE) {
752    env->event_masks.EnableEvent(thread, event);
753    global_mask.Set(event);
754  } else {
755    DCHECK_EQ(mode, JVMTI_DISABLE);
756
757    env->event_masks.DisableEvent(thread, event);
758    RecalculateGlobalEventMask(event);
759  }
760
761  bool new_state = global_mask.Test(event);
762
763  // Handle any special work required for the event type.
764  if (new_state != old_state) {
765    HandleEventType(event, mode == JVMTI_ENABLE);
766  }
767
768  return ERR(NONE);
769}
770
771void EventHandler::Shutdown() {
772  // Need to remove the method_trace_listener_ if it's there.
773  art::Thread* self = art::Thread::Current();
774  art::gc::ScopedGCCriticalSection gcs(self,
775                                       art::gc::kGcCauseInstrumentation,
776                                       art::gc::kCollectorTypeInstrumentation);
777  art::ScopedSuspendAll ssa("jvmti method tracing uninstallation");
778  // Just remove every possible event.
779  art::Runtime::Current()->GetInstrumentation()->RemoveListener(method_trace_listener_.get(), ~0);
780}
781
782EventHandler::EventHandler() {
783  alloc_listener_.reset(new JvmtiAllocationListener(this));
784  gc_pause_listener_.reset(new JvmtiGcPauseListener(this));
785  method_trace_listener_.reset(new JvmtiMethodTraceListener(this));
786}
787
788EventHandler::~EventHandler() {
789}
790
791}  // namespace openjdkjvmti
792