1/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "ti_heap.h"
18
19#include "art_field-inl.h"
20#include "art_jvmti.h"
21#include "base/macros.h"
22#include "base/mutex.h"
23#include "class_linker.h"
24#include "gc/heap.h"
25#include "gc_root-inl.h"
26#include "jni_env_ext.h"
27#include "jni_internal.h"
28#include "jvmti_weak_table-inl.h"
29#include "mirror/class.h"
30#include "mirror/object-inl.h"
31#include "mirror/object_array-inl.h"
32#include "object_callbacks.h"
33#include "object_tagging.h"
34#include "obj_ptr-inl.h"
35#include "primitive.h"
36#include "runtime.h"
37#include "scoped_thread_state_change-inl.h"
38#include "thread-inl.h"
39#include "thread_list.h"
40
41namespace openjdkjvmti {
42
43namespace {
44
45struct IndexCache {
46  // The number of interface fields implemented by the class. This is a prefix to all assigned
47  // field indices.
48  size_t interface_fields;
49
50  // It would be nice to also cache the following, but it is complicated to wire up into the
51  // generic visit:
52  // The number of fields in interfaces and superclasses. This is the first index assigned to
53  // fields of the class.
54  // size_t superclass_fields;
55};
56using IndexCachingTable = JvmtiWeakTable<IndexCache>;
57
58static IndexCachingTable gIndexCachingTable;
59
60// Report the contents of a string, if a callback is set.
61jint ReportString(art::ObjPtr<art::mirror::Object> obj,
62                  jvmtiEnv* env,
63                  ObjectTagTable* tag_table,
64                  const jvmtiHeapCallbacks* cb,
65                  const void* user_data) REQUIRES_SHARED(art::Locks::mutator_lock_) {
66  if (UNLIKELY(cb->string_primitive_value_callback != nullptr) && obj->IsString()) {
67    art::ObjPtr<art::mirror::String> str = obj->AsString();
68    int32_t string_length = str->GetLength();
69    JvmtiUniquePtr<uint16_t[]> data;
70
71    if (string_length > 0) {
72      jvmtiError alloc_error;
73      data = AllocJvmtiUniquePtr<uint16_t[]>(env, string_length, &alloc_error);
74      if (data == nullptr) {
75        // TODO: Not really sure what to do here. Should we abort the iteration and go all the way
76        //       back? For now just warn.
77        LOG(WARNING) << "Unable to allocate buffer for string reporting! Silently dropping value."
78                     << " >" << str->ToModifiedUtf8() << "<";
79        return 0;
80      }
81
82      if (str->IsCompressed()) {
83        uint8_t* compressed_data = str->GetValueCompressed();
84        for (int32_t i = 0; i != string_length; ++i) {
85          data[i] = compressed_data[i];
86        }
87      } else {
88        // Can copy directly.
89        memcpy(data.get(), str->GetValue(), string_length * sizeof(uint16_t));
90      }
91    }
92
93    const jlong class_tag = tag_table->GetTagOrZero(obj->GetClass());
94    jlong string_tag = tag_table->GetTagOrZero(obj.Ptr());
95    const jlong saved_string_tag = string_tag;
96
97    jint result = cb->string_primitive_value_callback(class_tag,
98                                                      obj->SizeOf(),
99                                                      &string_tag,
100                                                      data.get(),
101                                                      string_length,
102                                                      const_cast<void*>(user_data));
103    if (string_tag != saved_string_tag) {
104      tag_table->Set(obj.Ptr(), string_tag);
105    }
106
107    return result;
108  }
109  return 0;
110}
111
112// Report the contents of a primitive array, if a callback is set.
113jint ReportPrimitiveArray(art::ObjPtr<art::mirror::Object> obj,
114                          jvmtiEnv* env,
115                          ObjectTagTable* tag_table,
116                          const jvmtiHeapCallbacks* cb,
117                          const void* user_data) REQUIRES_SHARED(art::Locks::mutator_lock_) {
118  if (UNLIKELY(cb->array_primitive_value_callback != nullptr) &&
119      obj->IsArrayInstance() &&
120      !obj->IsObjectArray()) {
121    art::ObjPtr<art::mirror::Array> array = obj->AsArray();
122    int32_t array_length = array->GetLength();
123    size_t component_size = array->GetClass()->GetComponentSize();
124    art::Primitive::Type art_prim_type = array->GetClass()->GetComponentType()->GetPrimitiveType();
125    jvmtiPrimitiveType prim_type =
126        static_cast<jvmtiPrimitiveType>(art::Primitive::Descriptor(art_prim_type)[0]);
127    DCHECK(prim_type == JVMTI_PRIMITIVE_TYPE_BOOLEAN ||
128           prim_type == JVMTI_PRIMITIVE_TYPE_BYTE ||
129           prim_type == JVMTI_PRIMITIVE_TYPE_CHAR ||
130           prim_type == JVMTI_PRIMITIVE_TYPE_SHORT ||
131           prim_type == JVMTI_PRIMITIVE_TYPE_INT ||
132           prim_type == JVMTI_PRIMITIVE_TYPE_LONG ||
133           prim_type == JVMTI_PRIMITIVE_TYPE_FLOAT ||
134           prim_type == JVMTI_PRIMITIVE_TYPE_DOUBLE);
135
136    const jlong class_tag = tag_table->GetTagOrZero(obj->GetClass());
137    jlong array_tag = tag_table->GetTagOrZero(obj.Ptr());
138    const jlong saved_array_tag = array_tag;
139
140    jint result;
141    if (array_length == 0) {
142      result = cb->array_primitive_value_callback(class_tag,
143                                                  obj->SizeOf(),
144                                                  &array_tag,
145                                                  0,
146                                                  prim_type,
147                                                  nullptr,
148                                                  const_cast<void*>(user_data));
149    } else {
150      jvmtiError alloc_error;
151      JvmtiUniquePtr<char[]> data = AllocJvmtiUniquePtr<char[]>(env,
152                                                                array_length * component_size,
153                                                                &alloc_error);
154      if (data == nullptr) {
155        // TODO: Not really sure what to do here. Should we abort the iteration and go all the way
156        //       back? For now just warn.
157        LOG(WARNING) << "Unable to allocate buffer for array reporting! Silently dropping value.";
158        return 0;
159      }
160
161      memcpy(data.get(), array->GetRawData(component_size, 0), array_length * component_size);
162
163      result = cb->array_primitive_value_callback(class_tag,
164                                                  obj->SizeOf(),
165                                                  &array_tag,
166                                                  array_length,
167                                                  prim_type,
168                                                  data.get(),
169                                                  const_cast<void*>(user_data));
170    }
171
172    if (array_tag != saved_array_tag) {
173      tag_table->Set(obj.Ptr(), array_tag);
174    }
175
176    return result;
177  }
178  return 0;
179}
180
181template <typename UserData>
182bool VisitorFalse(art::ObjPtr<art::mirror::Object> obj ATTRIBUTE_UNUSED,
183                  art::ObjPtr<art::mirror::Class> klass ATTRIBUTE_UNUSED,
184                  art::ArtField& field ATTRIBUTE_UNUSED,
185                  size_t field_index ATTRIBUTE_UNUSED,
186                  UserData* user_data ATTRIBUTE_UNUSED) {
187  return false;
188}
189
190template <typename UserData, bool kCallVisitorOnRecursion>
191class FieldVisitor {
192 public:
193  // Report the contents of a primitive fields of the given object, if a callback is set.
194  template <typename StaticPrimitiveVisitor,
195            typename StaticReferenceVisitor,
196            typename InstancePrimitiveVisitor,
197            typename InstanceReferenceVisitor>
198  static bool ReportFields(art::ObjPtr<art::mirror::Object> obj,
199                           UserData* user_data,
200                           StaticPrimitiveVisitor& static_prim_visitor,
201                           StaticReferenceVisitor& static_ref_visitor,
202                           InstancePrimitiveVisitor& instance_prim_visitor,
203                           InstanceReferenceVisitor& instance_ref_visitor)
204      REQUIRES_SHARED(art::Locks::mutator_lock_) {
205    FieldVisitor fv(user_data);
206
207    if (obj->IsClass()) {
208      // When visiting a class, we only visit the static fields of the given class. No field of
209      // superclasses is visited.
210      art::ObjPtr<art::mirror::Class> klass = obj->AsClass();
211      // Only report fields on resolved classes. We need valid field data.
212      if (!klass->IsResolved()) {
213        return false;
214      }
215      return fv.ReportFieldsImpl(nullptr,
216                                 obj->AsClass(),
217                                 obj->AsClass()->IsInterface(),
218                                 static_prim_visitor,
219                                 static_ref_visitor,
220                                 instance_prim_visitor,
221                                 instance_ref_visitor);
222    } else {
223      // See comment above. Just double-checking here, but an instance *should* mean the class was
224      // resolved.
225      DCHECK(obj->GetClass()->IsResolved() || obj->GetClass()->IsErroneousResolved());
226      return fv.ReportFieldsImpl(obj,
227                                 obj->GetClass(),
228                                 false,
229                                 static_prim_visitor,
230                                 static_ref_visitor,
231                                 instance_prim_visitor,
232                                 instance_ref_visitor);
233    }
234  }
235
236 private:
237  explicit FieldVisitor(UserData* user_data) : user_data_(user_data) {}
238
239  // Report the contents of fields of the given object. If obj is null, report the static fields,
240  // otherwise the instance fields.
241  template <typename StaticPrimitiveVisitor,
242            typename StaticReferenceVisitor,
243            typename InstancePrimitiveVisitor,
244            typename InstanceReferenceVisitor>
245  bool ReportFieldsImpl(art::ObjPtr<art::mirror::Object> obj,
246                        art::ObjPtr<art::mirror::Class> klass,
247                        bool skip_java_lang_object,
248                        StaticPrimitiveVisitor& static_prim_visitor,
249                        StaticReferenceVisitor& static_ref_visitor,
250                        InstancePrimitiveVisitor& instance_prim_visitor,
251                        InstanceReferenceVisitor& instance_ref_visitor)
252      REQUIRES_SHARED(art::Locks::mutator_lock_) {
253    // Compute the offset of field indices.
254    size_t interface_field_count = CountInterfaceFields(klass);
255
256    size_t tmp;
257    bool aborted = ReportFieldsRecursive(obj,
258                                         klass,
259                                         interface_field_count,
260                                         skip_java_lang_object,
261                                         static_prim_visitor,
262                                         static_ref_visitor,
263                                         instance_prim_visitor,
264                                         instance_ref_visitor,
265                                         &tmp);
266    return aborted;
267  }
268
269  // Visit primitive fields in an object (instance). Return true if the visit was aborted.
270  template <typename StaticPrimitiveVisitor,
271            typename StaticReferenceVisitor,
272            typename InstancePrimitiveVisitor,
273            typename InstanceReferenceVisitor>
274  bool ReportFieldsRecursive(art::ObjPtr<art::mirror::Object> obj,
275                             art::ObjPtr<art::mirror::Class> klass,
276                             size_t interface_fields,
277                             bool skip_java_lang_object,
278                             StaticPrimitiveVisitor& static_prim_visitor,
279                             StaticReferenceVisitor& static_ref_visitor,
280                             InstancePrimitiveVisitor& instance_prim_visitor,
281                             InstanceReferenceVisitor& instance_ref_visitor,
282                             size_t* field_index_out)
283      REQUIRES_SHARED(art::Locks::mutator_lock_) {
284    DCHECK(klass != nullptr);
285    size_t field_index;
286    if (klass->GetSuperClass() == nullptr) {
287      // j.l.Object. Start with the fields from interfaces.
288      field_index = interface_fields;
289      if (skip_java_lang_object) {
290        *field_index_out = field_index;
291        return false;
292      }
293    } else {
294      // Report superclass fields.
295      if (kCallVisitorOnRecursion) {
296        if (ReportFieldsRecursive(obj,
297                                  klass->GetSuperClass(),
298                                  interface_fields,
299                                  skip_java_lang_object,
300                                  static_prim_visitor,
301                                  static_ref_visitor,
302                                  instance_prim_visitor,
303                                  instance_ref_visitor,
304                                  &field_index)) {
305          return true;
306        }
307      } else {
308        // Still call, but with empty visitor. This is required for correct counting.
309        ReportFieldsRecursive(obj,
310                              klass->GetSuperClass(),
311                              interface_fields,
312                              skip_java_lang_object,
313                              VisitorFalse<UserData>,
314                              VisitorFalse<UserData>,
315                              VisitorFalse<UserData>,
316                              VisitorFalse<UserData>,
317                              &field_index);
318      }
319    }
320
321    // Now visit fields for the current klass.
322
323    for (auto& static_field : klass->GetSFields()) {
324      if (static_field.IsPrimitiveType()) {
325        if (static_prim_visitor(obj,
326                                klass,
327                                static_field,
328                                field_index,
329                                user_data_)) {
330          return true;
331        }
332      } else {
333        if (static_ref_visitor(obj,
334                               klass,
335                               static_field,
336                               field_index,
337                               user_data_)) {
338          return true;
339        }
340      }
341      field_index++;
342    }
343
344    for (auto& instance_field : klass->GetIFields()) {
345      if (instance_field.IsPrimitiveType()) {
346        if (instance_prim_visitor(obj,
347                                  klass,
348                                  instance_field,
349                                  field_index,
350                                  user_data_)) {
351          return true;
352        }
353      } else {
354        if (instance_ref_visitor(obj,
355                                 klass,
356                                 instance_field,
357                                 field_index,
358                                 user_data_)) {
359          return true;
360        }
361      }
362      field_index++;
363    }
364
365    *field_index_out = field_index;
366    return false;
367  }
368
369  // Implements a visit of the implemented interfaces of a given class.
370  template <typename T>
371  struct RecursiveInterfaceVisit {
372    static void VisitStatic(art::Thread* self, art::ObjPtr<art::mirror::Class> klass, T& visitor)
373        REQUIRES_SHARED(art::Locks::mutator_lock_) {
374      RecursiveInterfaceVisit rv;
375      rv.Visit(self, klass, visitor);
376    }
377
378    void Visit(art::Thread* self, art::ObjPtr<art::mirror::Class> klass, T& visitor)
379        REQUIRES_SHARED(art::Locks::mutator_lock_) {
380      // First visit the parent, to get the order right.
381      // (We do this in preparation for actual visiting of interface fields.)
382      if (klass->GetSuperClass() != nullptr) {
383        Visit(self, klass->GetSuperClass(), visitor);
384      }
385      for (uint32_t i = 0; i != klass->NumDirectInterfaces(); ++i) {
386        art::ObjPtr<art::mirror::Class> inf_klass =
387            art::mirror::Class::GetDirectInterface(self, klass, i);
388        DCHECK(inf_klass != nullptr);
389        VisitInterface(self, inf_klass, visitor);
390      }
391    }
392
393    void VisitInterface(art::Thread* self, art::ObjPtr<art::mirror::Class> inf_klass, T& visitor)
394        REQUIRES_SHARED(art::Locks::mutator_lock_) {
395      auto it = visited_interfaces.find(inf_klass.Ptr());
396      if (it != visited_interfaces.end()) {
397        return;
398      }
399      visited_interfaces.insert(inf_klass.Ptr());
400
401      // Let the visitor know about this one. Note that this order is acceptable, as the ordering
402      // of these fields never matters for known visitors.
403      visitor(inf_klass);
404
405      // Now visit the superinterfaces.
406      for (uint32_t i = 0; i != inf_klass->NumDirectInterfaces(); ++i) {
407        art::ObjPtr<art::mirror::Class> super_inf_klass =
408            art::mirror::Class::GetDirectInterface(self, inf_klass, i);
409        DCHECK(super_inf_klass != nullptr);
410        VisitInterface(self, super_inf_klass, visitor);
411      }
412    }
413
414    std::unordered_set<art::mirror::Class*> visited_interfaces;
415  };
416
417  // Counting interface fields. Note that we cannot use the interface table, as that only contains
418  // "non-marker" interfaces (= interfaces with methods).
419  static size_t CountInterfaceFields(art::ObjPtr<art::mirror::Class> klass)
420      REQUIRES_SHARED(art::Locks::mutator_lock_) {
421    // Do we have a cached value?
422    IndexCache tmp;
423    if (gIndexCachingTable.GetTag(klass.Ptr(), &tmp)) {
424      return tmp.interface_fields;
425    }
426
427    size_t count = 0;
428    auto visitor = [&count](art::ObjPtr<art::mirror::Class> inf_klass)
429        REQUIRES_SHARED(art::Locks::mutator_lock_) {
430      DCHECK(inf_klass->IsInterface());
431      DCHECK_EQ(0u, inf_klass->NumInstanceFields());
432      count += inf_klass->NumStaticFields();
433    };
434    RecursiveInterfaceVisit<decltype(visitor)>::VisitStatic(art::Thread::Current(), klass, visitor);
435
436    // Store this into the cache.
437    tmp.interface_fields = count;
438    gIndexCachingTable.Set(klass.Ptr(), tmp);
439
440    return count;
441  }
442
443  UserData* user_data_;
444};
445
446// Debug helper. Prints the structure of an object.
447template <bool kStatic, bool kRef>
448struct DumpVisitor {
449  static bool Callback(art::ObjPtr<art::mirror::Object> obj ATTRIBUTE_UNUSED,
450                       art::ObjPtr<art::mirror::Class> klass ATTRIBUTE_UNUSED,
451                       art::ArtField& field,
452                       size_t field_index,
453                       void* user_data ATTRIBUTE_UNUSED)
454      REQUIRES_SHARED(art::Locks::mutator_lock_) {
455    LOG(ERROR) << (kStatic ? "static " : "instance ")
456               << (kRef ? "ref " : "primitive ")
457               << field.PrettyField()
458               << " @ "
459               << field_index;
460    return false;
461  }
462};
463ATTRIBUTE_UNUSED
464void DumpObjectFields(art::ObjPtr<art::mirror::Object> obj)
465    REQUIRES_SHARED(art::Locks::mutator_lock_) {
466  if (obj->IsClass()) {
467    FieldVisitor<void, false>:: ReportFields(obj,
468                                             nullptr,
469                                             DumpVisitor<true, false>::Callback,
470                                             DumpVisitor<true, true>::Callback,
471                                             DumpVisitor<false, false>::Callback,
472                                             DumpVisitor<false, true>::Callback);
473  } else {
474    FieldVisitor<void, true>::ReportFields(obj,
475                                           nullptr,
476                                           DumpVisitor<true, false>::Callback,
477                                           DumpVisitor<true, true>::Callback,
478                                           DumpVisitor<false, false>::Callback,
479                                           DumpVisitor<false, true>::Callback);
480  }
481}
482
483class ReportPrimitiveField {
484 public:
485  static bool Report(art::ObjPtr<art::mirror::Object> obj,
486                     ObjectTagTable* tag_table,
487                     const jvmtiHeapCallbacks* cb,
488                     const void* user_data)
489      REQUIRES_SHARED(art::Locks::mutator_lock_) {
490    if (UNLIKELY(cb->primitive_field_callback != nullptr)) {
491      jlong class_tag = tag_table->GetTagOrZero(obj->GetClass());
492      ReportPrimitiveField rpf(tag_table, class_tag, cb, user_data);
493      if (obj->IsClass()) {
494        return FieldVisitor<ReportPrimitiveField, false>::ReportFields(
495            obj,
496            &rpf,
497            ReportPrimitiveFieldCallback<true>,
498            VisitorFalse<ReportPrimitiveField>,
499            VisitorFalse<ReportPrimitiveField>,
500            VisitorFalse<ReportPrimitiveField>);
501      } else {
502        return FieldVisitor<ReportPrimitiveField, true>::ReportFields(
503            obj,
504            &rpf,
505            VisitorFalse<ReportPrimitiveField>,
506            VisitorFalse<ReportPrimitiveField>,
507            ReportPrimitiveFieldCallback<false>,
508            VisitorFalse<ReportPrimitiveField>);
509      }
510    }
511    return false;
512  }
513
514
515 private:
516  ReportPrimitiveField(ObjectTagTable* tag_table,
517                       jlong class_tag,
518                       const jvmtiHeapCallbacks* cb,
519                       const void* user_data)
520      : tag_table_(tag_table), class_tag_(class_tag), cb_(cb), user_data_(user_data) {}
521
522  template <bool kReportStatic>
523  static bool ReportPrimitiveFieldCallback(art::ObjPtr<art::mirror::Object> obj,
524                                           art::ObjPtr<art::mirror::Class> klass,
525                                           art::ArtField& field,
526                                           size_t field_index,
527                                           ReportPrimitiveField* user_data)
528      REQUIRES_SHARED(art::Locks::mutator_lock_) {
529    art::Primitive::Type art_prim_type = field.GetTypeAsPrimitiveType();
530    jvmtiPrimitiveType prim_type =
531        static_cast<jvmtiPrimitiveType>(art::Primitive::Descriptor(art_prim_type)[0]);
532    DCHECK(prim_type == JVMTI_PRIMITIVE_TYPE_BOOLEAN ||
533           prim_type == JVMTI_PRIMITIVE_TYPE_BYTE ||
534           prim_type == JVMTI_PRIMITIVE_TYPE_CHAR ||
535           prim_type == JVMTI_PRIMITIVE_TYPE_SHORT ||
536           prim_type == JVMTI_PRIMITIVE_TYPE_INT ||
537           prim_type == JVMTI_PRIMITIVE_TYPE_LONG ||
538           prim_type == JVMTI_PRIMITIVE_TYPE_FLOAT ||
539           prim_type == JVMTI_PRIMITIVE_TYPE_DOUBLE);
540    jvmtiHeapReferenceInfo info;
541    info.field.index = field_index;
542
543    jvalue value;
544    memset(&value, 0, sizeof(jvalue));
545    art::ObjPtr<art::mirror::Object> src = kReportStatic ? klass : obj;
546    switch (art_prim_type) {
547      case art::Primitive::Type::kPrimBoolean:
548        value.z = field.GetBoolean(src) == 0 ? JNI_FALSE : JNI_TRUE;
549        break;
550      case art::Primitive::Type::kPrimByte:
551        value.b = field.GetByte(src);
552        break;
553      case art::Primitive::Type::kPrimChar:
554        value.c = field.GetChar(src);
555        break;
556      case art::Primitive::Type::kPrimShort:
557        value.s = field.GetShort(src);
558        break;
559      case art::Primitive::Type::kPrimInt:
560        value.i = field.GetInt(src);
561        break;
562      case art::Primitive::Type::kPrimLong:
563        value.j = field.GetLong(src);
564        break;
565      case art::Primitive::Type::kPrimFloat:
566        value.f = field.GetFloat(src);
567        break;
568      case art::Primitive::Type::kPrimDouble:
569        value.d = field.GetDouble(src);
570        break;
571      case art::Primitive::Type::kPrimVoid:
572      case art::Primitive::Type::kPrimNot: {
573        LOG(FATAL) << "Should not reach here";
574        UNREACHABLE();
575      }
576    }
577
578    jlong obj_tag = user_data->tag_table_->GetTagOrZero(src.Ptr());
579    const jlong saved_obj_tag = obj_tag;
580
581    jint ret = user_data->cb_->primitive_field_callback(kReportStatic
582                                                            ? JVMTI_HEAP_REFERENCE_STATIC_FIELD
583                                                            : JVMTI_HEAP_REFERENCE_FIELD,
584                                                        &info,
585                                                        user_data->class_tag_,
586                                                        &obj_tag,
587                                                        value,
588                                                        prim_type,
589                                                        const_cast<void*>(user_data->user_data_));
590
591    if (saved_obj_tag != obj_tag) {
592      user_data->tag_table_->Set(src.Ptr(), obj_tag);
593    }
594
595    if ((ret & JVMTI_VISIT_ABORT) != 0) {
596      return true;
597    }
598
599    return false;
600  }
601
602  ObjectTagTable* tag_table_;
603  jlong class_tag_;
604  const jvmtiHeapCallbacks* cb_;
605  const void* user_data_;
606};
607
608struct HeapFilter {
609  explicit HeapFilter(jint heap_filter)
610      : filter_out_tagged((heap_filter & JVMTI_HEAP_FILTER_TAGGED) != 0),
611        filter_out_untagged((heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) != 0),
612        filter_out_class_tagged((heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) != 0),
613        filter_out_class_untagged((heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) != 0),
614        any_filter(filter_out_tagged ||
615                   filter_out_untagged ||
616                   filter_out_class_tagged ||
617                   filter_out_class_untagged) {
618  }
619
620  bool ShouldReportByHeapFilter(jlong tag, jlong class_tag) const {
621    if (!any_filter) {
622      return true;
623    }
624
625    if ((tag == 0 && filter_out_untagged) || (tag != 0 && filter_out_tagged)) {
626      return false;
627    }
628
629    if ((class_tag == 0 && filter_out_class_untagged) ||
630        (class_tag != 0 && filter_out_class_tagged)) {
631      return false;
632    }
633
634    return true;
635  }
636
637  const bool filter_out_tagged;
638  const bool filter_out_untagged;
639  const bool filter_out_class_tagged;
640  const bool filter_out_class_untagged;
641  const bool any_filter;
642};
643
644}  // namespace
645
646void HeapUtil::Register() {
647  art::Runtime::Current()->AddSystemWeakHolder(&gIndexCachingTable);
648}
649
650void HeapUtil::Unregister() {
651  art::Runtime::Current()->RemoveSystemWeakHolder(&gIndexCachingTable);
652}
653
654template <typename Callback>
655struct IterateThroughHeapData {
656  IterateThroughHeapData(Callback _cb,
657                         ObjectTagTable* _tag_table,
658                         jvmtiEnv* _env,
659                         art::ObjPtr<art::mirror::Class> klass,
660                         jint _heap_filter,
661                         const jvmtiHeapCallbacks* _callbacks,
662                         const void* _user_data)
663      : cb(_cb),
664        tag_table(_tag_table),
665        heap_filter(_heap_filter),
666        filter_klass(klass),
667        env(_env),
668        callbacks(_callbacks),
669        user_data(_user_data),
670        stop_reports(false) {
671  }
672
673  static void ObjectCallback(art::mirror::Object* obj, void* arg)
674      REQUIRES_SHARED(art::Locks::mutator_lock_) {
675    IterateThroughHeapData* ithd = reinterpret_cast<IterateThroughHeapData*>(arg);
676    ithd->ObjectCallback(obj);
677  }
678
679  void ObjectCallback(art::mirror::Object* obj)
680      REQUIRES_SHARED(art::Locks::mutator_lock_) {
681    // Early return, as we can't really stop visiting.
682    if (stop_reports) {
683      return;
684    }
685
686    art::ScopedAssertNoThreadSuspension no_suspension("IterateThroughHeapCallback");
687
688    jlong tag = 0;
689    tag_table->GetTag(obj, &tag);
690
691    jlong class_tag = 0;
692    art::ObjPtr<art::mirror::Class> klass = obj->GetClass();
693    tag_table->GetTag(klass.Ptr(), &class_tag);
694    // For simplicity, even if we find a tag = 0, assume 0 = not tagged.
695
696    if (!heap_filter.ShouldReportByHeapFilter(tag, class_tag)) {
697      return;
698    }
699
700    if (filter_klass != nullptr) {
701      if (filter_klass != klass) {
702        return;
703      }
704    }
705
706    jlong size = obj->SizeOf();
707
708    jint length = -1;
709    if (obj->IsArrayInstance()) {
710      length = obj->AsArray()->GetLength();
711    }
712
713    jlong saved_tag = tag;
714    jint ret = cb(obj, callbacks, class_tag, size, &tag, length, const_cast<void*>(user_data));
715
716    if (tag != saved_tag) {
717      tag_table->Set(obj, tag);
718    }
719
720    stop_reports = (ret & JVMTI_VISIT_ABORT) != 0;
721
722    if (!stop_reports) {
723      jint string_ret = ReportString(obj, env, tag_table, callbacks, user_data);
724      stop_reports = (string_ret & JVMTI_VISIT_ABORT) != 0;
725    }
726
727    if (!stop_reports) {
728      jint array_ret = ReportPrimitiveArray(obj, env, tag_table, callbacks, user_data);
729      stop_reports = (array_ret & JVMTI_VISIT_ABORT) != 0;
730    }
731
732    if (!stop_reports) {
733      stop_reports = ReportPrimitiveField::Report(obj, tag_table, callbacks, user_data);
734    }
735  }
736
737  Callback cb;
738  ObjectTagTable* tag_table;
739  const HeapFilter heap_filter;
740  art::ObjPtr<art::mirror::Class> filter_klass;
741  jvmtiEnv* env;
742  const jvmtiHeapCallbacks* callbacks;
743  const void* user_data;
744
745  bool stop_reports;
746};
747
748template <typename T>
749static jvmtiError DoIterateThroughHeap(T fn,
750                                       jvmtiEnv* env,
751                                       ObjectTagTable* tag_table,
752                                       jint heap_filter,
753                                       jclass klass,
754                                       const jvmtiHeapCallbacks* callbacks,
755                                       const void* user_data) {
756  if (callbacks == nullptr) {
757    return ERR(NULL_POINTER);
758  }
759
760  art::Thread* self = art::Thread::Current();
761  art::ScopedObjectAccess soa(self);      // Now we know we have the shared lock.
762
763  using Iterator = IterateThroughHeapData<T>;
764  Iterator ithd(fn,
765                tag_table,
766                env,
767                soa.Decode<art::mirror::Class>(klass),
768                heap_filter,
769                callbacks,
770                user_data);
771
772  art::Runtime::Current()->GetHeap()->VisitObjects(Iterator::ObjectCallback, &ithd);
773
774  return ERR(NONE);
775}
776
777jvmtiError HeapUtil::IterateThroughHeap(jvmtiEnv* env,
778                                        jint heap_filter,
779                                        jclass klass,
780                                        const jvmtiHeapCallbacks* callbacks,
781                                        const void* user_data) {
782  auto JvmtiIterateHeap = [](art::mirror::Object* obj ATTRIBUTE_UNUSED,
783                             const jvmtiHeapCallbacks* cb_callbacks,
784                             jlong class_tag,
785                             jlong size,
786                             jlong* tag,
787                             jint length,
788                             void* cb_user_data)
789      REQUIRES_SHARED(art::Locks::mutator_lock_) {
790    return cb_callbacks->heap_iteration_callback(class_tag,
791                                                 size,
792                                                 tag,
793                                                 length,
794                                                 cb_user_data);
795  };
796  return DoIterateThroughHeap(JvmtiIterateHeap,
797                              env,
798                              ArtJvmTiEnv::AsArtJvmTiEnv(env)->object_tag_table.get(),
799                              heap_filter,
800                              klass,
801                              callbacks,
802                              user_data);
803}
804
805class FollowReferencesHelper FINAL {
806 public:
807  FollowReferencesHelper(HeapUtil* h,
808                         jvmtiEnv* jvmti_env,
809                         art::ObjPtr<art::mirror::Object> initial_object,
810                         const jvmtiHeapCallbacks* callbacks,
811                         art::ObjPtr<art::mirror::Class> class_filter,
812                         jint heap_filter,
813                         const void* user_data)
814      : env(jvmti_env),
815        tag_table_(h->GetTags()),
816        initial_object_(initial_object),
817        callbacks_(callbacks),
818        class_filter_(class_filter),
819        heap_filter_(heap_filter),
820        user_data_(user_data),
821        start_(0),
822        stop_reports_(false) {
823  }
824
825  void Init()
826      REQUIRES_SHARED(art::Locks::mutator_lock_)
827      REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
828    if (initial_object_.IsNull()) {
829      CollectAndReportRootsVisitor carrv(this, tag_table_, &worklist_, &visited_);
830
831      // We need precise info (e.g., vregs).
832      constexpr art::VisitRootFlags kRootFlags = static_cast<art::VisitRootFlags>(
833          art::VisitRootFlags::kVisitRootFlagAllRoots | art::VisitRootFlags::kVisitRootFlagPrecise);
834      art::Runtime::Current()->VisitRoots(&carrv, kRootFlags);
835
836      art::Runtime::Current()->VisitImageRoots(&carrv);
837      stop_reports_ = carrv.IsStopReports();
838
839      if (stop_reports_) {
840        worklist_.clear();
841      }
842    } else {
843      visited_.insert(initial_object_.Ptr());
844      worklist_.push_back(initial_object_.Ptr());
845    }
846  }
847
848  void Work()
849      REQUIRES_SHARED(art::Locks::mutator_lock_)
850      REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
851    // Currently implemented as a BFS. To lower overhead, we don't erase elements immediately
852    // from the head of the work list, instead postponing until there's a gap that's "large."
853    //
854    // Alternatively, we can implement a DFS and use the work list as a stack.
855    while (start_ < worklist_.size()) {
856      art::mirror::Object* cur_obj = worklist_[start_];
857      start_++;
858
859      if (start_ >= kMaxStart) {
860        worklist_.erase(worklist_.begin(), worklist_.begin() + start_);
861        start_ = 0;
862      }
863
864      VisitObject(cur_obj);
865
866      if (stop_reports_) {
867        break;
868      }
869    }
870  }
871
872 private:
873  class CollectAndReportRootsVisitor FINAL : public art::RootVisitor {
874   public:
875    CollectAndReportRootsVisitor(FollowReferencesHelper* helper,
876                                 ObjectTagTable* tag_table,
877                                 std::vector<art::mirror::Object*>* worklist,
878                                 std::unordered_set<art::mirror::Object*>* visited)
879        : helper_(helper),
880          tag_table_(tag_table),
881          worklist_(worklist),
882          visited_(visited),
883          stop_reports_(false) {}
884
885    void VisitRoots(art::mirror::Object*** roots, size_t count, const art::RootInfo& info)
886        OVERRIDE
887        REQUIRES_SHARED(art::Locks::mutator_lock_)
888        REQUIRES(!*helper_->tag_table_->GetAllowDisallowLock()) {
889      for (size_t i = 0; i != count; ++i) {
890        AddRoot(*roots[i], info);
891      }
892    }
893
894    void VisitRoots(art::mirror::CompressedReference<art::mirror::Object>** roots,
895                    size_t count,
896                    const art::RootInfo& info)
897        OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_)
898        REQUIRES(!*helper_->tag_table_->GetAllowDisallowLock()) {
899      for (size_t i = 0; i != count; ++i) {
900        AddRoot(roots[i]->AsMirrorPtr(), info);
901      }
902    }
903
904    bool IsStopReports() {
905      return stop_reports_;
906    }
907
908   private:
909    void AddRoot(art::mirror::Object* root_obj, const art::RootInfo& info)
910        REQUIRES_SHARED(art::Locks::mutator_lock_)
911        REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
912      if (stop_reports_) {
913        return;
914      }
915      bool add_to_worklist = ReportRoot(root_obj, info);
916      // We use visited_ to mark roots already so we do not need another set.
917      if (visited_->find(root_obj) == visited_->end()) {
918        if (add_to_worklist) {
919          visited_->insert(root_obj);
920          worklist_->push_back(root_obj);
921        }
922      }
923    }
924
925    // Remove NO_THREAD_SAFETY_ANALYSIS once ASSERT_CAPABILITY works correctly.
926    art::Thread* FindThread(const art::RootInfo& info) NO_THREAD_SAFETY_ANALYSIS {
927      art::Locks::thread_list_lock_->AssertExclusiveHeld(art::Thread::Current());
928      return art::Runtime::Current()->GetThreadList()->FindThreadByThreadId(info.GetThreadId());
929    }
930
931    jvmtiHeapReferenceKind GetReferenceKind(const art::RootInfo& info,
932                                            jvmtiHeapReferenceInfo* ref_info)
933        REQUIRES_SHARED(art::Locks::mutator_lock_) {
934      // TODO: Fill in ref_info.
935      memset(ref_info, 0, sizeof(jvmtiHeapReferenceInfo));
936
937      switch (info.GetType()) {
938        case art::RootType::kRootJNIGlobal:
939          return JVMTI_HEAP_REFERENCE_JNI_GLOBAL;
940
941        case art::RootType::kRootJNILocal:
942        {
943          uint32_t thread_id = info.GetThreadId();
944          ref_info->jni_local.thread_id = thread_id;
945
946          art::Thread* thread = FindThread(info);
947          if (thread != nullptr) {
948            art::mirror::Object* thread_obj;
949            if (thread->IsStillStarting()) {
950              thread_obj = nullptr;
951            } else {
952              thread_obj = thread->GetPeerFromOtherThread();
953            }
954            if (thread_obj != nullptr) {
955              ref_info->jni_local.thread_tag = tag_table_->GetTagOrZero(thread_obj);
956            }
957          }
958
959          // TODO: We don't have this info.
960          if (thread != nullptr) {
961            ref_info->jni_local.depth = 0;
962            art::ArtMethod* method = thread->GetCurrentMethod(nullptr, false /* abort_on_error */);
963            if (method != nullptr) {
964              ref_info->jni_local.method = art::jni::EncodeArtMethod(method);
965            }
966          }
967
968          return JVMTI_HEAP_REFERENCE_JNI_LOCAL;
969        }
970
971        case art::RootType::kRootJavaFrame:
972        {
973          uint32_t thread_id = info.GetThreadId();
974          ref_info->stack_local.thread_id = thread_id;
975
976          art::Thread* thread = FindThread(info);
977          if (thread != nullptr) {
978            art::mirror::Object* thread_obj;
979            if (thread->IsStillStarting()) {
980              thread_obj = nullptr;
981            } else {
982              thread_obj = thread->GetPeerFromOtherThread();
983            }
984            if (thread_obj != nullptr) {
985              ref_info->stack_local.thread_tag = tag_table_->GetTagOrZero(thread_obj);
986            }
987          }
988
989          auto& java_info = static_cast<const art::JavaFrameRootInfo&>(info);
990          ref_info->stack_local.slot = static_cast<jint>(java_info.GetVReg());
991          const art::StackVisitor* visitor = java_info.GetVisitor();
992          ref_info->stack_local.location =
993              static_cast<jlocation>(visitor->GetDexPc(false /* abort_on_failure */));
994          ref_info->stack_local.depth = static_cast<jint>(visitor->GetFrameDepth());
995          art::ArtMethod* method = visitor->GetMethod();
996          if (method != nullptr) {
997            ref_info->stack_local.method = art::jni::EncodeArtMethod(method);
998          }
999
1000          return JVMTI_HEAP_REFERENCE_STACK_LOCAL;
1001        }
1002
1003        case art::RootType::kRootNativeStack:
1004        case art::RootType::kRootThreadBlock:
1005        case art::RootType::kRootThreadObject:
1006          return JVMTI_HEAP_REFERENCE_THREAD;
1007
1008        case art::RootType::kRootStickyClass:
1009        case art::RootType::kRootInternedString:
1010          // Note: this isn't a root in the RI.
1011          return JVMTI_HEAP_REFERENCE_SYSTEM_CLASS;
1012
1013        case art::RootType::kRootMonitorUsed:
1014        case art::RootType::kRootJNIMonitor:
1015          return JVMTI_HEAP_REFERENCE_MONITOR;
1016
1017        case art::RootType::kRootFinalizing:
1018        case art::RootType::kRootDebugger:
1019        case art::RootType::kRootReferenceCleanup:
1020        case art::RootType::kRootVMInternal:
1021        case art::RootType::kRootUnknown:
1022          return JVMTI_HEAP_REFERENCE_OTHER;
1023      }
1024      LOG(FATAL) << "Unreachable";
1025      UNREACHABLE();
1026    }
1027
1028    bool ReportRoot(art::mirror::Object* root_obj, const art::RootInfo& info)
1029        REQUIRES_SHARED(art::Locks::mutator_lock_)
1030        REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1031      jvmtiHeapReferenceInfo ref_info;
1032      jvmtiHeapReferenceKind kind = GetReferenceKind(info, &ref_info);
1033      jint result = helper_->ReportReference(kind, &ref_info, nullptr, root_obj);
1034      if ((result & JVMTI_VISIT_ABORT) != 0) {
1035        stop_reports_ = true;
1036      }
1037      return (result & JVMTI_VISIT_OBJECTS) != 0;
1038    }
1039
1040   private:
1041    FollowReferencesHelper* helper_;
1042    ObjectTagTable* tag_table_;
1043    std::vector<art::mirror::Object*>* worklist_;
1044    std::unordered_set<art::mirror::Object*>* visited_;
1045    bool stop_reports_;
1046  };
1047
1048  void VisitObject(art::mirror::Object* obj)
1049      REQUIRES_SHARED(art::Locks::mutator_lock_)
1050      REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1051    if (obj->IsClass()) {
1052      VisitClass(obj->AsClass());
1053      return;
1054    }
1055    if (obj->IsArrayInstance()) {
1056      VisitArray(obj);
1057      return;
1058    }
1059
1060    // All instance fields.
1061    auto report_instance_field = [&](art::ObjPtr<art::mirror::Object> src,
1062                                     art::ObjPtr<art::mirror::Class> obj_klass ATTRIBUTE_UNUSED,
1063                                     art::ArtField& field,
1064                                     size_t field_index,
1065                                     void* user_data ATTRIBUTE_UNUSED)
1066        REQUIRES_SHARED(art::Locks::mutator_lock_)
1067        REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1068      art::ObjPtr<art::mirror::Object> field_value = field.GetObject(src);
1069      if (field_value != nullptr) {
1070        jvmtiHeapReferenceInfo reference_info;
1071        memset(&reference_info, 0, sizeof(reference_info));
1072
1073        reference_info.field.index = field_index;
1074
1075        jvmtiHeapReferenceKind kind =
1076            field.GetOffset().Int32Value() == art::mirror::Object::ClassOffset().Int32Value()
1077                ? JVMTI_HEAP_REFERENCE_CLASS
1078                : JVMTI_HEAP_REFERENCE_FIELD;
1079        const jvmtiHeapReferenceInfo* reference_info_ptr =
1080            kind == JVMTI_HEAP_REFERENCE_CLASS ? nullptr : &reference_info;
1081
1082        return !ReportReferenceMaybeEnqueue(kind, reference_info_ptr, src.Ptr(), field_value.Ptr());
1083      }
1084      return false;
1085    };
1086    stop_reports_ = FieldVisitor<void, true>::ReportFields(obj,
1087                                                           nullptr,
1088                                                           VisitorFalse<void>,
1089                                                           VisitorFalse<void>,
1090                                                           VisitorFalse<void>,
1091                                                           report_instance_field);
1092    if (stop_reports_) {
1093      return;
1094    }
1095
1096    jint string_ret = ReportString(obj, env, tag_table_, callbacks_, user_data_);
1097    stop_reports_ = (string_ret & JVMTI_VISIT_ABORT) != 0;
1098    if (stop_reports_) {
1099      return;
1100    }
1101
1102    stop_reports_ = ReportPrimitiveField::Report(obj, tag_table_, callbacks_, user_data_);
1103  }
1104
1105  void VisitArray(art::mirror::Object* array)
1106      REQUIRES_SHARED(art::Locks::mutator_lock_)
1107      REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1108    stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_CLASS,
1109                                                 nullptr,
1110                                                 array,
1111                                                 array->GetClass());
1112    if (stop_reports_) {
1113      return;
1114    }
1115
1116    if (array->IsObjectArray()) {
1117      art::mirror::ObjectArray<art::mirror::Object>* obj_array =
1118          array->AsObjectArray<art::mirror::Object>();
1119      int32_t length = obj_array->GetLength();
1120      for (int32_t i = 0; i != length; ++i) {
1121        art::mirror::Object* elem = obj_array->GetWithoutChecks(i);
1122        if (elem != nullptr) {
1123          jvmtiHeapReferenceInfo reference_info;
1124          reference_info.array.index = i;
1125          stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT,
1126                                                       &reference_info,
1127                                                       array,
1128                                                       elem);
1129          if (stop_reports_) {
1130            break;
1131          }
1132        }
1133      }
1134    } else {
1135      if (!stop_reports_) {
1136        jint array_ret = ReportPrimitiveArray(array, env, tag_table_, callbacks_, user_data_);
1137        stop_reports_ = (array_ret & JVMTI_VISIT_ABORT) != 0;
1138      }
1139    }
1140  }
1141
1142  void VisitClass(art::mirror::Class* klass)
1143      REQUIRES_SHARED(art::Locks::mutator_lock_)
1144      REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1145    // TODO: Are erroneous classes reported? Are non-prepared ones? For now, just use resolved ones.
1146    if (!klass->IsResolved()) {
1147      return;
1148    }
1149
1150    // Superclass.
1151    stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_SUPERCLASS,
1152                                                 nullptr,
1153                                                 klass,
1154                                                 klass->GetSuperClass());
1155    if (stop_reports_) {
1156      return;
1157    }
1158
1159    // Directly implemented or extended interfaces.
1160    art::Thread* self = art::Thread::Current();
1161    art::StackHandleScope<1> hs(self);
1162    art::Handle<art::mirror::Class> h_klass(hs.NewHandle<art::mirror::Class>(klass));
1163    for (size_t i = 0; i < h_klass->NumDirectInterfaces(); ++i) {
1164      art::ObjPtr<art::mirror::Class> inf_klass =
1165          art::mirror::Class::ResolveDirectInterface(self, h_klass, i);
1166      if (inf_klass == nullptr) {
1167        // TODO: With a resolved class this should not happen...
1168        self->ClearException();
1169        break;
1170      }
1171
1172      stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_INTERFACE,
1173                                                   nullptr,
1174                                                   klass,
1175                                                   inf_klass.Ptr());
1176      if (stop_reports_) {
1177        return;
1178      }
1179    }
1180
1181    // Classloader.
1182    // TODO: What about the boot classpath loader? We'll skip for now, but do we have to find the
1183    //       fake BootClassLoader?
1184    if (klass->GetClassLoader() != nullptr) {
1185      stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_CLASS_LOADER,
1186                                                   nullptr,
1187                                                   klass,
1188                                                   klass->GetClassLoader());
1189      if (stop_reports_) {
1190        return;
1191      }
1192    }
1193    DCHECK_EQ(h_klass.Get(), klass);
1194
1195    // Declared static fields.
1196    auto report_static_field = [&](art::ObjPtr<art::mirror::Object> obj ATTRIBUTE_UNUSED,
1197                                   art::ObjPtr<art::mirror::Class> obj_klass,
1198                                   art::ArtField& field,
1199                                   size_t field_index,
1200                                   void* user_data ATTRIBUTE_UNUSED)
1201        REQUIRES_SHARED(art::Locks::mutator_lock_)
1202        REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1203      art::ObjPtr<art::mirror::Object> field_value = field.GetObject(obj_klass);
1204      if (field_value != nullptr) {
1205        jvmtiHeapReferenceInfo reference_info;
1206        memset(&reference_info, 0, sizeof(reference_info));
1207
1208        reference_info.field.index = static_cast<jint>(field_index);
1209
1210        return !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
1211                                            &reference_info,
1212                                            obj_klass.Ptr(),
1213                                            field_value.Ptr());
1214      }
1215      return false;
1216    };
1217    stop_reports_ = FieldVisitor<void, false>::ReportFields(klass,
1218                                                            nullptr,
1219                                                            VisitorFalse<void>,
1220                                                            report_static_field,
1221                                                            VisitorFalse<void>,
1222                                                            VisitorFalse<void>);
1223    if (stop_reports_) {
1224      return;
1225    }
1226
1227    stop_reports_ = ReportPrimitiveField::Report(klass, tag_table_, callbacks_, user_data_);
1228  }
1229
1230  void MaybeEnqueue(art::mirror::Object* obj) REQUIRES_SHARED(art::Locks::mutator_lock_) {
1231    if (visited_.find(obj) == visited_.end()) {
1232      worklist_.push_back(obj);
1233      visited_.insert(obj);
1234    }
1235  }
1236
1237  bool ReportReferenceMaybeEnqueue(jvmtiHeapReferenceKind kind,
1238                                   const jvmtiHeapReferenceInfo* reference_info,
1239                                   art::mirror::Object* referree,
1240                                   art::mirror::Object* referrer)
1241      REQUIRES_SHARED(art::Locks::mutator_lock_)
1242      REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1243    jint result = ReportReference(kind, reference_info, referree, referrer);
1244    if ((result & JVMTI_VISIT_ABORT) == 0) {
1245      if ((result & JVMTI_VISIT_OBJECTS) != 0) {
1246        MaybeEnqueue(referrer);
1247      }
1248      return true;
1249    } else {
1250      return false;
1251    }
1252  }
1253
1254  jint ReportReference(jvmtiHeapReferenceKind kind,
1255                       const jvmtiHeapReferenceInfo* reference_info,
1256                       art::mirror::Object* referrer,
1257                       art::mirror::Object* referree)
1258      REQUIRES_SHARED(art::Locks::mutator_lock_)
1259      REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1260    if (referree == nullptr || stop_reports_) {
1261      return 0;
1262    }
1263
1264    if (UNLIKELY(class_filter_ != nullptr) && class_filter_ != referree->GetClass()) {
1265      return JVMTI_VISIT_OBJECTS;
1266    }
1267
1268    const jlong class_tag = tag_table_->GetTagOrZero(referree->GetClass());
1269    jlong tag = tag_table_->GetTagOrZero(referree);
1270
1271    if (!heap_filter_.ShouldReportByHeapFilter(tag, class_tag)) {
1272      return JVMTI_VISIT_OBJECTS;
1273    }
1274
1275    const jlong referrer_class_tag =
1276        referrer == nullptr ? 0 : tag_table_->GetTagOrZero(referrer->GetClass());
1277    const jlong size = static_cast<jlong>(referree->SizeOf());
1278    jlong saved_tag = tag;
1279    jlong referrer_tag = 0;
1280    jlong saved_referrer_tag = 0;
1281    jlong* referrer_tag_ptr;
1282    if (referrer == nullptr) {
1283      referrer_tag_ptr = nullptr;
1284    } else {
1285      if (referrer == referree) {
1286        referrer_tag_ptr = &tag;
1287      } else {
1288        referrer_tag = saved_referrer_tag = tag_table_->GetTagOrZero(referrer);
1289        referrer_tag_ptr = &referrer_tag;
1290      }
1291    }
1292
1293    jint length = -1;
1294    if (referree->IsArrayInstance()) {
1295      length = referree->AsArray()->GetLength();
1296    }
1297
1298    jint result = callbacks_->heap_reference_callback(kind,
1299                                                      reference_info,
1300                                                      class_tag,
1301                                                      referrer_class_tag,
1302                                                      size,
1303                                                      &tag,
1304                                                      referrer_tag_ptr,
1305                                                      length,
1306                                                      const_cast<void*>(user_data_));
1307
1308    if (tag != saved_tag) {
1309      tag_table_->Set(referree, tag);
1310    }
1311    if (referrer_tag != saved_referrer_tag) {
1312      tag_table_->Set(referrer, referrer_tag);
1313    }
1314
1315    return result;
1316  }
1317
1318  jvmtiEnv* env;
1319  ObjectTagTable* tag_table_;
1320  art::ObjPtr<art::mirror::Object> initial_object_;
1321  const jvmtiHeapCallbacks* callbacks_;
1322  art::ObjPtr<art::mirror::Class> class_filter_;
1323  const HeapFilter heap_filter_;
1324  const void* user_data_;
1325
1326  std::vector<art::mirror::Object*> worklist_;
1327  size_t start_;
1328  static constexpr size_t kMaxStart = 1000000U;
1329
1330  std::unordered_set<art::mirror::Object*> visited_;
1331
1332  bool stop_reports_;
1333
1334  friend class CollectAndReportRootsVisitor;
1335};
1336
1337jvmtiError HeapUtil::FollowReferences(jvmtiEnv* env,
1338                                      jint heap_filter,
1339                                      jclass klass,
1340                                      jobject initial_object,
1341                                      const jvmtiHeapCallbacks* callbacks,
1342                                      const void* user_data) {
1343  if (callbacks == nullptr) {
1344    return ERR(NULL_POINTER);
1345  }
1346
1347  art::Thread* self = art::Thread::Current();
1348
1349  art::gc::Heap* heap = art::Runtime::Current()->GetHeap();
1350  if (heap->IsGcConcurrentAndMoving()) {
1351    // Need to take a heap dump while GC isn't running. See the
1352    // comment in Heap::VisitObjects().
1353    heap->IncrementDisableMovingGC(self);
1354  }
1355  {
1356    art::ScopedObjectAccess soa(self);      // Now we know we have the shared lock.
1357    art::ScopedThreadSuspension sts(self, art::kWaitingForVisitObjects);
1358    art::ScopedSuspendAll ssa("FollowReferences");
1359
1360    art::ObjPtr<art::mirror::Class> class_filter = klass == nullptr
1361        ? nullptr
1362        : art::ObjPtr<art::mirror::Class>::DownCast(self->DecodeJObject(klass));
1363    FollowReferencesHelper frh(this,
1364                               env,
1365                               self->DecodeJObject(initial_object),
1366                               callbacks,
1367                               class_filter,
1368                               heap_filter,
1369                               user_data);
1370    frh.Init();
1371    frh.Work();
1372  }
1373  if (heap->IsGcConcurrentAndMoving()) {
1374    heap->DecrementDisableMovingGC(self);
1375  }
1376
1377  return ERR(NONE);
1378}
1379
1380jvmtiError HeapUtil::GetLoadedClasses(jvmtiEnv* env,
1381                                      jint* class_count_ptr,
1382                                      jclass** classes_ptr) {
1383  if (class_count_ptr == nullptr || classes_ptr == nullptr) {
1384    return ERR(NULL_POINTER);
1385  }
1386
1387  class ReportClassVisitor : public art::ClassVisitor {
1388   public:
1389    explicit ReportClassVisitor(art::Thread* self) : self_(self) {}
1390
1391    bool operator()(art::ObjPtr<art::mirror::Class> klass)
1392        OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
1393      classes_.push_back(self_->GetJniEnv()->AddLocalReference<jclass>(klass));
1394      return true;
1395    }
1396
1397    art::Thread* self_;
1398    std::vector<jclass> classes_;
1399  };
1400
1401  art::Thread* self = art::Thread::Current();
1402  ReportClassVisitor rcv(self);
1403  {
1404    art::ScopedObjectAccess soa(self);
1405    art::Runtime::Current()->GetClassLinker()->VisitClasses(&rcv);
1406  }
1407
1408  size_t size = rcv.classes_.size();
1409  jclass* classes = nullptr;
1410  jvmtiError alloc_ret = env->Allocate(static_cast<jlong>(size * sizeof(jclass)),
1411                                       reinterpret_cast<unsigned char**>(&classes));
1412  if (alloc_ret != ERR(NONE)) {
1413    return alloc_ret;
1414  }
1415
1416  for (size_t i = 0; i < size; ++i) {
1417    classes[i] = rcv.classes_[i];
1418  }
1419  *classes_ptr = classes;
1420  *class_count_ptr = static_cast<jint>(size);
1421
1422  return ERR(NONE);
1423}
1424
1425jvmtiError HeapUtil::ForceGarbageCollection(jvmtiEnv* env ATTRIBUTE_UNUSED) {
1426  art::Runtime::Current()->GetHeap()->CollectGarbage(false);
1427
1428  return ERR(NONE);
1429}
1430
1431static constexpr jint kHeapIdDefault = 0;
1432static constexpr jint kHeapIdImage = 1;
1433static constexpr jint kHeapIdZygote = 2;
1434static constexpr jint kHeapIdApp = 3;
1435
1436static jint GetHeapId(art::ObjPtr<art::mirror::Object> obj)
1437    REQUIRES_SHARED(art::Locks::mutator_lock_) {
1438  if (obj == nullptr) {
1439    return -1;
1440  }
1441
1442  art::gc::Heap* const heap = art::Runtime::Current()->GetHeap();
1443  const art::gc::space::ContinuousSpace* const space =
1444      heap->FindContinuousSpaceFromObject(obj, true);
1445  jint heap_type = kHeapIdApp;
1446  if (space != nullptr) {
1447    if (space->IsZygoteSpace()) {
1448      heap_type = kHeapIdZygote;
1449    } else if (space->IsImageSpace() && heap->ObjectIsInBootImageSpace(obj)) {
1450      // Only count objects in the boot image as HPROF_HEAP_IMAGE, this leaves app image objects
1451      // as HPROF_HEAP_APP. b/35762934
1452      heap_type = kHeapIdImage;
1453    }
1454  } else {
1455    const auto* los = heap->GetLargeObjectsSpace();
1456    if (los->Contains(obj.Ptr()) && los->IsZygoteLargeObject(art::Thread::Current(), obj.Ptr())) {
1457      heap_type = kHeapIdZygote;
1458    }
1459  }
1460  return heap_type;
1461};
1462
1463jvmtiError HeapExtensions::GetObjectHeapId(jvmtiEnv* env, jlong tag, jint* heap_id, ...) {
1464  if (heap_id == nullptr) {
1465    return ERR(NULL_POINTER);
1466  }
1467
1468  art::Thread* self = art::Thread::Current();
1469
1470  auto work = [&]() REQUIRES_SHARED(art::Locks::mutator_lock_) {
1471    ObjectTagTable* tag_table = ArtJvmTiEnv::AsArtJvmTiEnv(env)->object_tag_table.get();
1472    art::ObjPtr<art::mirror::Object> obj = tag_table->Find(tag);
1473    jint heap_type = GetHeapId(obj);
1474    if (heap_type == -1) {
1475      return ERR(NOT_FOUND);
1476    }
1477    *heap_id = heap_type;
1478    return ERR(NONE);
1479  };
1480
1481  if (!art::Locks::mutator_lock_->IsSharedHeld(self)) {
1482    if (!self->IsThreadSuspensionAllowable()) {
1483      return ERR(INTERNAL);
1484    }
1485    art::ScopedObjectAccess soa(self);
1486    return work();
1487  } else {
1488    // We cannot use SOA in this case. We might be holding the lock, but may not be in the
1489    // runnable state (e.g., during GC).
1490    art::Locks::mutator_lock_->AssertSharedHeld(self);
1491    // TODO: Investigate why ASSERT_SHARED_CAPABILITY doesn't work.
1492    auto annotalysis_workaround = [&]() NO_THREAD_SAFETY_ANALYSIS {
1493      return work();
1494    };
1495    return annotalysis_workaround();
1496  }
1497}
1498
1499static jvmtiError CopyStringAndReturn(jvmtiEnv* env, const char* in, char** out) {
1500  jvmtiError error;
1501  JvmtiUniquePtr<char[]> param_name = CopyString(env, in, &error);
1502  if (param_name == nullptr) {
1503    return error;
1504  }
1505  *out = param_name.release();
1506  return ERR(NONE);
1507}
1508
1509static constexpr const char* kHeapIdDefaultName = "default";
1510static constexpr const char* kHeapIdImageName = "image";
1511static constexpr const char* kHeapIdZygoteName = "zygote";
1512static constexpr const char* kHeapIdAppName = "app";
1513
1514jvmtiError HeapExtensions::GetHeapName(jvmtiEnv* env, jint heap_id, char** heap_name, ...) {
1515  switch (heap_id) {
1516    case kHeapIdDefault:
1517      return CopyStringAndReturn(env, kHeapIdDefaultName, heap_name);
1518    case kHeapIdImage:
1519      return CopyStringAndReturn(env, kHeapIdImageName, heap_name);
1520    case kHeapIdZygote:
1521      return CopyStringAndReturn(env, kHeapIdZygoteName, heap_name);
1522    case kHeapIdApp:
1523      return CopyStringAndReturn(env, kHeapIdAppName, heap_name);
1524
1525    default:
1526      return ERR(ILLEGAL_ARGUMENT);
1527  }
1528}
1529
1530jvmtiError HeapExtensions::IterateThroughHeapExt(jvmtiEnv* env,
1531                                                 jint heap_filter,
1532                                                 jclass klass,
1533                                                 const jvmtiHeapCallbacks* callbacks,
1534                                                 const void* user_data) {
1535  if (ArtJvmTiEnv::AsArtJvmTiEnv(env)->capabilities.can_tag_objects != 1) { \
1536    return ERR(MUST_POSSESS_CAPABILITY); \
1537  }
1538
1539  // ART extension API: Also pass the heap id.
1540  auto ArtIterateHeap = [](art::mirror::Object* obj,
1541                           const jvmtiHeapCallbacks* cb_callbacks,
1542                           jlong class_tag,
1543                           jlong size,
1544                           jlong* tag,
1545                           jint length,
1546                           void* cb_user_data)
1547      REQUIRES_SHARED(art::Locks::mutator_lock_) {
1548    jint heap_id = GetHeapId(obj);
1549    using ArtExtensionAPI = jint (*)(jlong, jlong, jlong*, jint length, void*, jint);
1550    return reinterpret_cast<ArtExtensionAPI>(cb_callbacks->heap_iteration_callback)(
1551        class_tag, size, tag, length, cb_user_data, heap_id);
1552  };
1553  return DoIterateThroughHeap(ArtIterateHeap,
1554                              env,
1555                              ArtJvmTiEnv::AsArtJvmTiEnv(env)->object_tag_table.get(),
1556                              heap_filter,
1557                              klass,
1558                              callbacks,
1559                              user_data);
1560}
1561
1562}  // namespace openjdkjvmti
1563