art_method.h revision cdca476bf3394ce9d97a369e84e701b427009318
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_ART_METHOD_H_
18#define ART_RUNTIME_ART_METHOD_H_
19
20#include "base/bit_utils.h"
21#include "base/casts.h"
22#include "dex_file.h"
23#include "gc_root.h"
24#include "invoke_type.h"
25#include "method_reference.h"
26#include "modifiers.h"
27#include "mirror/object.h"
28#include "read_barrier_option.h"
29#include "stack.h"
30#include "utils.h"
31
32namespace art {
33
34union JValue;
35class OatQuickMethodHeader;
36class ProfilingInfo;
37class ScopedObjectAccessAlreadyRunnable;
38class StringPiece;
39class ShadowFrame;
40
41namespace mirror {
42class Array;
43class Class;
44class IfTable;
45class PointerArray;
46}  // namespace mirror
47
48// Table to resolve IMT conflicts at runtime. The table is attached to
49// the jni entrypoint of IMT conflict ArtMethods.
50// The table contains a list of pairs of { interface_method, implementation_method }
51// with the last entry being null to make an assembly implementation of a lookup
52// faster.
53class ImtConflictTable {
54  enum MethodIndex {
55    kMethodInterface,
56    kMethodImplementation,
57    kMethodCount,  // Number of elements in enum.
58  };
59
60 public:
61  // Build a new table copying `other` and adding the new entry formed of
62  // the pair { `interface_method`, `implementation_method` }
63  ImtConflictTable(ImtConflictTable* other,
64                   ArtMethod* interface_method,
65                   ArtMethod* implementation_method,
66                   size_t pointer_size) {
67    const size_t count = other->NumEntries(pointer_size);
68    for (size_t i = 0; i < count; ++i) {
69      SetInterfaceMethod(i, pointer_size, other->GetInterfaceMethod(i, pointer_size));
70      SetImplementationMethod(i, pointer_size, other->GetImplementationMethod(i, pointer_size));
71    }
72    SetInterfaceMethod(count, pointer_size, interface_method);
73    SetImplementationMethod(count, pointer_size, implementation_method);
74    // Add the null marker.
75    SetInterfaceMethod(count + 1, pointer_size, nullptr);
76    SetImplementationMethod(count + 1, pointer_size, nullptr);
77  }
78
79  // num_entries excludes the header.
80  ImtConflictTable(size_t num_entries, size_t pointer_size) {
81    SetInterfaceMethod(num_entries, pointer_size, nullptr);
82    SetImplementationMethod(num_entries, pointer_size, nullptr);
83  }
84
85  // Set an entry at an index.
86  void SetInterfaceMethod(size_t index, size_t pointer_size, ArtMethod* method) {
87    SetMethod(index * kMethodCount + kMethodInterface, pointer_size, method);
88  }
89
90  void SetImplementationMethod(size_t index, size_t pointer_size, ArtMethod* method) {
91    SetMethod(index * kMethodCount + kMethodImplementation, pointer_size, method);
92  }
93
94  ArtMethod* GetInterfaceMethod(size_t index, size_t pointer_size) const {
95    return GetMethod(index * kMethodCount + kMethodInterface, pointer_size);
96  }
97
98  ArtMethod* GetImplementationMethod(size_t index, size_t pointer_size) const {
99    return GetMethod(index * kMethodCount + kMethodImplementation, pointer_size);
100  }
101
102  // Visit all of the entries.
103  // NO_THREAD_SAFETY_ANALYSIS for calling with held locks. Visitor is passed a pair of ArtMethod*
104  // and also returns one. The order is <interface, implementation>.
105  template<typename Visitor>
106  void Visit(const Visitor& visitor, size_t pointer_size) NO_THREAD_SAFETY_ANALYSIS {
107    uint32_t table_index = 0;
108    for (;;) {
109      ArtMethod* interface_method = GetInterfaceMethod(table_index, pointer_size);
110      if (interface_method == nullptr) {
111        break;
112      }
113      ArtMethod* implementation_method = GetImplementationMethod(table_index, pointer_size);
114      auto input = std::make_pair(interface_method, implementation_method);
115      std::pair<ArtMethod*, ArtMethod*> updated = visitor(input);
116      if (input.first != updated.first) {
117        SetInterfaceMethod(table_index, pointer_size, updated.first);
118      }
119      if (input.second != updated.second) {
120        SetImplementationMethod(table_index, pointer_size, updated.second);
121      }
122      ++table_index;
123    }
124  }
125
126  // Lookup the implementation ArtMethod associated to `interface_method`. Return null
127  // if not found.
128  ArtMethod* Lookup(ArtMethod* interface_method, size_t pointer_size) const {
129    uint32_t table_index = 0;
130    for (;;) {
131      ArtMethod* current_interface_method = GetInterfaceMethod(table_index, pointer_size);
132      if (current_interface_method == nullptr) {
133        break;
134      }
135      if (current_interface_method == interface_method) {
136        return GetImplementationMethod(table_index, pointer_size);
137      }
138      ++table_index;
139    }
140    return nullptr;
141  }
142
143  // Compute the number of entries in this table.
144  size_t NumEntries(size_t pointer_size) const {
145    uint32_t table_index = 0;
146    while (GetInterfaceMethod(table_index, pointer_size) != nullptr) {
147      ++table_index;
148    }
149    return table_index;
150  }
151
152  // Compute the size in bytes taken by this table.
153  size_t ComputeSize(size_t pointer_size) const {
154    // Add the end marker.
155    return ComputeSize(NumEntries(pointer_size), pointer_size);
156  }
157
158  // Compute the size in bytes needed for copying the given `table` and add
159  // one more entry.
160  static size_t ComputeSizeWithOneMoreEntry(ImtConflictTable* table, size_t pointer_size) {
161    return table->ComputeSize(pointer_size) + EntrySize(pointer_size);
162  }
163
164  // Compute size with a fixed number of entries.
165  static size_t ComputeSize(size_t num_entries, size_t pointer_size) {
166    return (num_entries + 1) * EntrySize(pointer_size);  // Add one for null terminator.
167  }
168
169  static size_t EntrySize(size_t pointer_size) {
170    return pointer_size * static_cast<size_t>(kMethodCount);
171  }
172
173 private:
174  ArtMethod* GetMethod(size_t index, size_t pointer_size) const {
175    if (pointer_size == 8) {
176      return reinterpret_cast<ArtMethod*>(static_cast<uintptr_t>(data64_[index]));
177    } else {
178      DCHECK_EQ(pointer_size, 4u);
179      return reinterpret_cast<ArtMethod*>(static_cast<uintptr_t>(data32_[index]));
180    }
181  }
182
183  void SetMethod(size_t index, size_t pointer_size, ArtMethod* method) {
184    if (pointer_size == 8) {
185      data64_[index] = dchecked_integral_cast<uint64_t>(reinterpret_cast<uintptr_t>(method));
186    } else {
187      DCHECK_EQ(pointer_size, 4u);
188      data32_[index] = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(method));
189    }
190  }
191
192  // Array of entries that the assembly stubs will iterate over. Note that this is
193  // not fixed size, and we allocate data prior to calling the constructor
194  // of ImtConflictTable.
195  union {
196    uint32_t data32_[0];
197    uint64_t data64_[0];
198  };
199
200  DISALLOW_COPY_AND_ASSIGN(ImtConflictTable);
201};
202
203class ArtMethod FINAL {
204 public:
205  ArtMethod() : access_flags_(0), dex_code_item_offset_(0), dex_method_index_(0),
206      method_index_(0) { }
207
208  ArtMethod(ArtMethod* src, size_t image_pointer_size) {
209    CopyFrom(src, image_pointer_size);
210  }
211
212  static ArtMethod* FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa,
213                                        jobject jlr_method)
214      SHARED_REQUIRES(Locks::mutator_lock_);
215
216  template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
217  ALWAYS_INLINE mirror::Class* GetDeclaringClass() SHARED_REQUIRES(Locks::mutator_lock_);
218
219  template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
220  ALWAYS_INLINE mirror::Class* GetDeclaringClassUnchecked()
221      SHARED_REQUIRES(Locks::mutator_lock_);
222
223  void SetDeclaringClass(mirror::Class *new_declaring_class)
224      SHARED_REQUIRES(Locks::mutator_lock_);
225
226  bool CASDeclaringClass(mirror::Class* expected_class, mirror::Class* desired_class)
227      SHARED_REQUIRES(Locks::mutator_lock_);
228
229  static MemberOffset DeclaringClassOffset() {
230    return MemberOffset(OFFSETOF_MEMBER(ArtMethod, declaring_class_));
231  }
232
233  // Note: GetAccessFlags acquires the mutator lock in debug mode to check that it is not called for
234  // a proxy method.
235  template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
236  ALWAYS_INLINE uint32_t GetAccessFlags();
237
238  void SetAccessFlags(uint32_t new_access_flags) {
239    // Not called within a transaction.
240    access_flags_ = new_access_flags;
241  }
242
243  // Approximate what kind of method call would be used for this method.
244  InvokeType GetInvokeType() SHARED_REQUIRES(Locks::mutator_lock_);
245
246  // Returns true if the method is declared public.
247  bool IsPublic() {
248    return (GetAccessFlags() & kAccPublic) != 0;
249  }
250
251  // Returns true if the method is declared private.
252  bool IsPrivate() {
253    return (GetAccessFlags() & kAccPrivate) != 0;
254  }
255
256  // Returns true if the method is declared static.
257  bool IsStatic() {
258    return (GetAccessFlags() & kAccStatic) != 0;
259  }
260
261  // Returns true if the method is a constructor.
262  bool IsConstructor() {
263    return (GetAccessFlags() & kAccConstructor) != 0;
264  }
265
266  // Returns true if the method is a class initializer.
267  bool IsClassInitializer() {
268    return IsConstructor() && IsStatic();
269  }
270
271  // Returns true if the method is static, private, or a constructor.
272  bool IsDirect() {
273    return IsDirect(GetAccessFlags());
274  }
275
276  static bool IsDirect(uint32_t access_flags) {
277    constexpr uint32_t direct = kAccStatic | kAccPrivate | kAccConstructor;
278    return (access_flags & direct) != 0;
279  }
280
281  // Returns true if the method is declared synchronized.
282  bool IsSynchronized() {
283    constexpr uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized;
284    return (GetAccessFlags() & synchonized) != 0;
285  }
286
287  bool IsFinal() {
288    return (GetAccessFlags() & kAccFinal) != 0;
289  }
290
291  bool IsCopied() {
292    const bool copied = (GetAccessFlags() & kAccCopied) != 0;
293    // (IsMiranda() || IsDefaultConflicting()) implies copied
294    DCHECK(!(IsMiranda() || IsDefaultConflicting()) || copied)
295        << "Miranda or default-conflict methods must always be copied.";
296    return copied;
297  }
298
299  bool IsMiranda() {
300    return (GetAccessFlags() & kAccMiranda) != 0;
301  }
302
303  // Returns true if invoking this method will not throw an AbstractMethodError or
304  // IncompatibleClassChangeError.
305  bool IsInvokable() {
306    return !IsAbstract() && !IsDefaultConflicting();
307  }
308
309  bool IsCompilable() {
310    return (GetAccessFlags() & kAccCompileDontBother) == 0;
311  }
312
313  // A default conflict method is a special sentinel method that stands for a conflict between
314  // multiple default methods. It cannot be invoked, throwing an IncompatibleClassChangeError if one
315  // attempts to do so.
316  bool IsDefaultConflicting() {
317    return (GetAccessFlags() & kAccDefaultConflict) != 0u;
318  }
319
320  // This is set by the class linker.
321  bool IsDefault() {
322    return (GetAccessFlags() & kAccDefault) != 0;
323  }
324
325  template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
326  bool IsNative() {
327    return (GetAccessFlags<kReadBarrierOption>() & kAccNative) != 0;
328  }
329
330  bool IsFastNative() {
331    constexpr uint32_t mask = kAccFastNative | kAccNative;
332    return (GetAccessFlags() & mask) == mask;
333  }
334
335  bool IsAbstract() {
336    return (GetAccessFlags() & kAccAbstract) != 0;
337  }
338
339  bool IsSynthetic() {
340    return (GetAccessFlags() & kAccSynthetic) != 0;
341  }
342
343  bool IsProxyMethod() SHARED_REQUIRES(Locks::mutator_lock_);
344
345  bool SkipAccessChecks() {
346    return (GetAccessFlags() & kAccSkipAccessChecks) != 0;
347  }
348
349  void SetSkipAccessChecks() {
350    DCHECK(!SkipAccessChecks());
351    SetAccessFlags(GetAccessFlags() | kAccSkipAccessChecks);
352  }
353
354  // Returns true if this method could be overridden by a default method.
355  bool IsOverridableByDefaultMethod() SHARED_REQUIRES(Locks::mutator_lock_);
356
357  bool CheckIncompatibleClassChange(InvokeType type) SHARED_REQUIRES(Locks::mutator_lock_);
358
359  // Throws the error that would result from trying to invoke this method (i.e.
360  // IncompatibleClassChangeError or AbstractMethodError). Only call if !IsInvokable();
361  void ThrowInvocationTimeError() SHARED_REQUIRES(Locks::mutator_lock_);
362
363  uint16_t GetMethodIndex() SHARED_REQUIRES(Locks::mutator_lock_);
364
365  // Doesn't do erroneous / unresolved class checks.
366  uint16_t GetMethodIndexDuringLinking() SHARED_REQUIRES(Locks::mutator_lock_);
367
368  size_t GetVtableIndex() SHARED_REQUIRES(Locks::mutator_lock_) {
369    return GetMethodIndex();
370  }
371
372  void SetMethodIndex(uint16_t new_method_index) SHARED_REQUIRES(Locks::mutator_lock_) {
373    // Not called within a transaction.
374    method_index_ = new_method_index;
375  }
376
377  static MemberOffset DexMethodIndexOffset() {
378    return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_method_index_);
379  }
380
381  static MemberOffset MethodIndexOffset() {
382    return OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_index_);
383  }
384
385  uint32_t GetCodeItemOffset() {
386    return dex_code_item_offset_;
387  }
388
389  void SetCodeItemOffset(uint32_t new_code_off) {
390    // Not called within a transaction.
391    dex_code_item_offset_ = new_code_off;
392  }
393
394  // Number of 32bit registers that would be required to hold all the arguments
395  static size_t NumArgRegisters(const StringPiece& shorty);
396
397  ALWAYS_INLINE uint32_t GetDexMethodIndex() SHARED_REQUIRES(Locks::mutator_lock_);
398
399  void SetDexMethodIndex(uint32_t new_idx) {
400    // Not called within a transaction.
401    dex_method_index_ = new_idx;
402  }
403
404  ALWAYS_INLINE ArtMethod** GetDexCacheResolvedMethods(size_t pointer_size)
405      SHARED_REQUIRES(Locks::mutator_lock_);
406  ALWAYS_INLINE ArtMethod* GetDexCacheResolvedMethod(uint16_t method_index, size_t ptr_size)
407      SHARED_REQUIRES(Locks::mutator_lock_);
408  ALWAYS_INLINE void SetDexCacheResolvedMethod(uint16_t method_index,
409                                               ArtMethod* new_method,
410                                               size_t ptr_size)
411      SHARED_REQUIRES(Locks::mutator_lock_);
412  ALWAYS_INLINE void SetDexCacheResolvedMethods(ArtMethod** new_dex_cache_methods, size_t ptr_size)
413      SHARED_REQUIRES(Locks::mutator_lock_);
414  bool HasDexCacheResolvedMethods(size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_);
415  bool HasSameDexCacheResolvedMethods(ArtMethod* other, size_t pointer_size)
416      SHARED_REQUIRES(Locks::mutator_lock_);
417  bool HasSameDexCacheResolvedMethods(ArtMethod** other_cache, size_t pointer_size)
418      SHARED_REQUIRES(Locks::mutator_lock_);
419
420  template <bool kWithCheck = true>
421  mirror::Class* GetDexCacheResolvedType(uint32_t type_idx, size_t ptr_size)
422      SHARED_REQUIRES(Locks::mutator_lock_);
423  void SetDexCacheResolvedTypes(GcRoot<mirror::Class>* new_dex_cache_types, size_t ptr_size)
424      SHARED_REQUIRES(Locks::mutator_lock_);
425  bool HasDexCacheResolvedTypes(size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_);
426  bool HasSameDexCacheResolvedTypes(ArtMethod* other, size_t pointer_size)
427      SHARED_REQUIRES(Locks::mutator_lock_);
428  bool HasSameDexCacheResolvedTypes(GcRoot<mirror::Class>* other_cache, size_t pointer_size)
429      SHARED_REQUIRES(Locks::mutator_lock_);
430
431  // Get the Class* from the type index into this method's dex cache.
432  mirror::Class* GetClassFromTypeIndex(uint16_t type_idx, bool resolve, size_t ptr_size)
433      SHARED_REQUIRES(Locks::mutator_lock_);
434
435  // Returns true if this method has the same name and signature of the other method.
436  bool HasSameNameAndSignature(ArtMethod* other) SHARED_REQUIRES(Locks::mutator_lock_);
437
438  // Find the method that this method overrides.
439  ArtMethod* FindOverriddenMethod(size_t pointer_size)
440      SHARED_REQUIRES(Locks::mutator_lock_);
441
442  // Find the method index for this method within other_dexfile. If this method isn't present then
443  // return DexFile::kDexNoIndex. The name_and_signature_idx MUST refer to a MethodId with the same
444  // name and signature in the other_dexfile, such as the method index used to resolve this method
445  // in the other_dexfile.
446  uint32_t FindDexMethodIndexInOtherDexFile(const DexFile& other_dexfile,
447                                            uint32_t name_and_signature_idx)
448      SHARED_REQUIRES(Locks::mutator_lock_);
449
450  void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, const char* shorty)
451      SHARED_REQUIRES(Locks::mutator_lock_);
452
453  const void* GetEntryPointFromQuickCompiledCode() {
454    return GetEntryPointFromQuickCompiledCodePtrSize(sizeof(void*));
455  }
456  ALWAYS_INLINE const void* GetEntryPointFromQuickCompiledCodePtrSize(size_t pointer_size) {
457    return GetNativePointer<const void*>(
458        EntryPointFromQuickCompiledCodeOffset(pointer_size), pointer_size);
459  }
460
461  void SetEntryPointFromQuickCompiledCode(const void* entry_point_from_quick_compiled_code) {
462    SetEntryPointFromQuickCompiledCodePtrSize(entry_point_from_quick_compiled_code,
463                                              sizeof(void*));
464  }
465  ALWAYS_INLINE void SetEntryPointFromQuickCompiledCodePtrSize(
466      const void* entry_point_from_quick_compiled_code, size_t pointer_size) {
467    SetNativePointer(EntryPointFromQuickCompiledCodeOffset(pointer_size),
468                     entry_point_from_quick_compiled_code, pointer_size);
469  }
470
471  void RegisterNative(const void* native_method, bool is_fast)
472      SHARED_REQUIRES(Locks::mutator_lock_);
473
474  void UnregisterNative() SHARED_REQUIRES(Locks::mutator_lock_);
475
476  static MemberOffset DexCacheResolvedMethodsOffset(size_t pointer_size) {
477    return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
478        PtrSizedFields, dex_cache_resolved_methods_) / sizeof(void*) * pointer_size);
479  }
480
481  static MemberOffset DexCacheResolvedTypesOffset(size_t pointer_size) {
482    return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
483        PtrSizedFields, dex_cache_resolved_types_) / sizeof(void*) * pointer_size);
484  }
485
486  static MemberOffset EntryPointFromJniOffset(size_t pointer_size) {
487    return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
488        PtrSizedFields, entry_point_from_jni_) / sizeof(void*) * pointer_size);
489  }
490
491  static MemberOffset EntryPointFromQuickCompiledCodeOffset(size_t pointer_size) {
492    return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
493        PtrSizedFields, entry_point_from_quick_compiled_code_) / sizeof(void*) * pointer_size);
494  }
495
496  ProfilingInfo* GetProfilingInfo(size_t pointer_size) {
497    return reinterpret_cast<ProfilingInfo*>(GetEntryPointFromJniPtrSize(pointer_size));
498  }
499
500  ImtConflictTable* GetImtConflictTable(size_t pointer_size) {
501    DCHECK(IsRuntimeMethod());
502    return reinterpret_cast<ImtConflictTable*>(GetEntryPointFromJniPtrSize(pointer_size));
503  }
504
505  ALWAYS_INLINE void SetImtConflictTable(ImtConflictTable* table, size_t pointer_size) {
506    SetEntryPointFromJniPtrSize(table, pointer_size);
507  }
508
509  ALWAYS_INLINE void SetProfilingInfo(ProfilingInfo* info) {
510    SetEntryPointFromJniPtrSize(info, sizeof(void*));
511  }
512
513  ALWAYS_INLINE void SetProfilingInfoPtrSize(ProfilingInfo* info, size_t pointer_size) {
514    SetEntryPointFromJniPtrSize(info, pointer_size);
515  }
516
517  static MemberOffset ProfilingInfoOffset() {
518    return EntryPointFromJniOffset(sizeof(void*));
519  }
520
521  void* GetEntryPointFromJni() {
522    return GetEntryPointFromJniPtrSize(sizeof(void*));
523  }
524
525  ALWAYS_INLINE void* GetEntryPointFromJniPtrSize(size_t pointer_size) {
526    return GetNativePointer<void*>(EntryPointFromJniOffset(pointer_size), pointer_size);
527  }
528
529  void SetEntryPointFromJni(const void* entrypoint) {
530    DCHECK(IsNative());
531    SetEntryPointFromJniPtrSize(entrypoint, sizeof(void*));
532  }
533
534  ALWAYS_INLINE void SetEntryPointFromJniPtrSize(const void* entrypoint, size_t pointer_size) {
535    SetNativePointer(EntryPointFromJniOffset(pointer_size), entrypoint, pointer_size);
536  }
537
538  // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal
539  // conventions for a method of managed code. Returns false for Proxy methods.
540  ALWAYS_INLINE bool IsRuntimeMethod();
541
542  // Is this a hand crafted method used for something like describing callee saves?
543  bool IsCalleeSaveMethod() SHARED_REQUIRES(Locks::mutator_lock_);
544
545  bool IsResolutionMethod() SHARED_REQUIRES(Locks::mutator_lock_);
546
547  bool IsImtUnimplementedMethod() SHARED_REQUIRES(Locks::mutator_lock_);
548
549  MethodReference ToMethodReference() SHARED_REQUIRES(Locks::mutator_lock_) {
550    return MethodReference(GetDexFile(), GetDexMethodIndex());
551  }
552
553  // Find the catch block for the given exception type and dex_pc. When a catch block is found,
554  // indicates whether the found catch block is responsible for clearing the exception or whether
555  // a move-exception instruction is present.
556  uint32_t FindCatchBlock(Handle<mirror::Class> exception_type, uint32_t dex_pc,
557                          bool* has_no_move_exception)
558      SHARED_REQUIRES(Locks::mutator_lock_);
559
560  // NO_THREAD_SAFETY_ANALYSIS since we don't know what the callback requires.
561  template<typename RootVisitorType>
562  void VisitRoots(RootVisitorType& visitor, size_t pointer_size) NO_THREAD_SAFETY_ANALYSIS;
563
564  const DexFile* GetDexFile() SHARED_REQUIRES(Locks::mutator_lock_);
565
566  const char* GetDeclaringClassDescriptor() SHARED_REQUIRES(Locks::mutator_lock_);
567
568  const char* GetShorty() SHARED_REQUIRES(Locks::mutator_lock_) {
569    uint32_t unused_length;
570    return GetShorty(&unused_length);
571  }
572
573  const char* GetShorty(uint32_t* out_length) SHARED_REQUIRES(Locks::mutator_lock_);
574
575  const Signature GetSignature() SHARED_REQUIRES(Locks::mutator_lock_);
576
577  ALWAYS_INLINE const char* GetName() SHARED_REQUIRES(Locks::mutator_lock_);
578
579  mirror::String* GetNameAsString(Thread* self) SHARED_REQUIRES(Locks::mutator_lock_);
580
581  const DexFile::CodeItem* GetCodeItem() SHARED_REQUIRES(Locks::mutator_lock_);
582
583  bool IsResolvedTypeIdx(uint16_t type_idx, size_t ptr_size) SHARED_REQUIRES(Locks::mutator_lock_);
584
585  int32_t GetLineNumFromDexPC(uint32_t dex_pc) SHARED_REQUIRES(Locks::mutator_lock_);
586
587  const DexFile::ProtoId& GetPrototype() SHARED_REQUIRES(Locks::mutator_lock_);
588
589  const DexFile::TypeList* GetParameterTypeList() SHARED_REQUIRES(Locks::mutator_lock_);
590
591  const char* GetDeclaringClassSourceFile() SHARED_REQUIRES(Locks::mutator_lock_);
592
593  uint16_t GetClassDefIndex() SHARED_REQUIRES(Locks::mutator_lock_);
594
595  const DexFile::ClassDef& GetClassDef() SHARED_REQUIRES(Locks::mutator_lock_);
596
597  const char* GetReturnTypeDescriptor() SHARED_REQUIRES(Locks::mutator_lock_);
598
599  const char* GetTypeDescriptorFromTypeIdx(uint16_t type_idx)
600      SHARED_REQUIRES(Locks::mutator_lock_);
601
602  // May cause thread suspension due to GetClassFromTypeIdx calling ResolveType this caused a large
603  // number of bugs at call sites.
604  mirror::Class* GetReturnType(bool resolve, size_t ptr_size)
605      SHARED_REQUIRES(Locks::mutator_lock_);
606
607  mirror::ClassLoader* GetClassLoader() SHARED_REQUIRES(Locks::mutator_lock_);
608
609  mirror::DexCache* GetDexCache() SHARED_REQUIRES(Locks::mutator_lock_);
610
611  ALWAYS_INLINE ArtMethod* GetInterfaceMethodIfProxy(size_t pointer_size)
612      SHARED_REQUIRES(Locks::mutator_lock_);
613
614  // May cause thread suspension due to class resolution.
615  bool EqualParameters(Handle<mirror::ObjectArray<mirror::Class>> params)
616      SHARED_REQUIRES(Locks::mutator_lock_);
617
618  // Size of an instance of this native class.
619  static size_t Size(size_t pointer_size) {
620    return RoundUp(OFFSETOF_MEMBER(ArtMethod, ptr_sized_fields_), pointer_size) +
621        (sizeof(PtrSizedFields) / sizeof(void*)) * pointer_size;
622  }
623
624  // Alignment of an instance of this native class.
625  static size_t Alignment(size_t pointer_size) {
626    // The ArtMethod alignment is the same as image pointer size. This differs from
627    // alignof(ArtMethod) if cross-compiling with pointer_size != sizeof(void*).
628    return pointer_size;
629  }
630
631  void CopyFrom(ArtMethod* src, size_t image_pointer_size)
632      SHARED_REQUIRES(Locks::mutator_lock_);
633
634  ALWAYS_INLINE GcRoot<mirror::Class>* GetDexCacheResolvedTypes(size_t pointer_size)
635      SHARED_REQUIRES(Locks::mutator_lock_);
636
637  // Note, hotness_counter_ updates are non-atomic but it doesn't need to be precise.  Also,
638  // given that the counter is only 16 bits wide we can expect wrap-around in some
639  // situations.  Consumers of hotness_count_ must be able to deal with that.
640  uint16_t IncrementCounter() {
641    return ++hotness_count_;
642  }
643
644  void ClearCounter() {
645    hotness_count_ = 0;
646  }
647
648  void SetCounter(int16_t hotness_count) {
649    hotness_count_ = hotness_count;
650  }
651
652  uint16_t GetCounter() const {
653    return hotness_count_;
654  }
655
656  const uint8_t* GetQuickenedInfo() SHARED_REQUIRES(Locks::mutator_lock_);
657
658  // Returns the method header for the compiled code containing 'pc'. Note that runtime
659  // methods will return null for this method, as they are not oat based.
660  const OatQuickMethodHeader* GetOatQuickMethodHeader(uintptr_t pc)
661      SHARED_REQUIRES(Locks::mutator_lock_);
662
663  // Returns whether the method has any compiled code, JIT or AOT.
664  bool HasAnyCompiledCode() SHARED_REQUIRES(Locks::mutator_lock_);
665
666
667  // Update heap objects and non-entrypoint pointers by the passed in visitor for image relocation.
668  // Does not use read barrier.
669  template <typename Visitor>
670  ALWAYS_INLINE void UpdateObjectsForImageRelocation(const Visitor& visitor, size_t pointer_size)
671      SHARED_REQUIRES(Locks::mutator_lock_);
672
673  // Update entry points by passing them through the visitor.
674  template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
675  ALWAYS_INLINE void UpdateEntrypoints(const Visitor& visitor, size_t pointer_size);
676
677 protected:
678  // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
679  // The class we are a part of.
680  GcRoot<mirror::Class> declaring_class_;
681
682  // Access flags; low 16 bits are defined by spec.
683  uint32_t access_flags_;
684
685  /* Dex file fields. The defining dex file is available via declaring_class_->dex_cache_ */
686
687  // Offset to the CodeItem.
688  uint32_t dex_code_item_offset_;
689
690  // Index into method_ids of the dex file associated with this method.
691  uint32_t dex_method_index_;
692
693  /* End of dex file fields. */
694
695  // Entry within a dispatch table for this method. For static/direct methods the index is into
696  // the declaringClass.directMethods, for virtual methods the vtable and for interface methods the
697  // ifTable.
698  uint16_t method_index_;
699
700  // The hotness we measure for this method. Managed by the interpreter. Not atomic, as we allow
701  // missing increments: if the method is hot, we will see it eventually.
702  uint16_t hotness_count_;
703
704  // Fake padding field gets inserted here.
705
706  // Must be the last fields in the method.
707  // PACKED(4) is necessary for the correctness of
708  // RoundUp(OFFSETOF_MEMBER(ArtMethod, ptr_sized_fields_), pointer_size).
709  struct PACKED(4) PtrSizedFields {
710    // Short cuts to declaring_class_->dex_cache_ member for fast compiled code access.
711    ArtMethod** dex_cache_resolved_methods_;
712
713    // Short cuts to declaring_class_->dex_cache_ member for fast compiled code access.
714    GcRoot<mirror::Class>* dex_cache_resolved_types_;
715
716    // Pointer to JNI function registered to this method, or a function to resolve the JNI function,
717    // or the profiling data for non-native methods, or an ImtConflictTable.
718    void* entry_point_from_jni_;
719
720    // Method dispatch from quick compiled code invokes this pointer which may cause bridging into
721    // the interpreter.
722    void* entry_point_from_quick_compiled_code_;
723  } ptr_sized_fields_;
724
725 private:
726  static size_t PtrSizedFieldsOffset(size_t pointer_size) {
727    // Round up to pointer size for padding field.
728    return RoundUp(OFFSETOF_MEMBER(ArtMethod, ptr_sized_fields_), pointer_size);
729  }
730
731  template<typename T>
732  ALWAYS_INLINE T GetNativePointer(MemberOffset offset, size_t pointer_size) const {
733    static_assert(std::is_pointer<T>::value, "T must be a pointer type");
734    DCHECK(ValidPointerSize(pointer_size)) << pointer_size;
735    const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
736    if (pointer_size == sizeof(uint32_t)) {
737      return reinterpret_cast<T>(*reinterpret_cast<const uint32_t*>(addr));
738    } else {
739      auto v = *reinterpret_cast<const uint64_t*>(addr);
740      return reinterpret_cast<T>(dchecked_integral_cast<uintptr_t>(v));
741    }
742  }
743
744  template<typename T>
745  ALWAYS_INLINE void SetNativePointer(MemberOffset offset, T new_value, size_t pointer_size) {
746    static_assert(std::is_pointer<T>::value, "T must be a pointer type");
747    DCHECK(ValidPointerSize(pointer_size)) << pointer_size;
748    const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
749    if (pointer_size == sizeof(uint32_t)) {
750      uintptr_t ptr = reinterpret_cast<uintptr_t>(new_value);
751      *reinterpret_cast<uint32_t*>(addr) = dchecked_integral_cast<uint32_t>(ptr);
752    } else {
753      *reinterpret_cast<uint64_t*>(addr) = reinterpret_cast<uintptr_t>(new_value);
754    }
755  }
756
757  DISALLOW_COPY_AND_ASSIGN(ArtMethod);  // Need to use CopyFrom to deal with 32 vs 64 bits.
758};
759
760}  // namespace art
761
762#endif  // ART_RUNTIME_ART_METHOD_H_
763