art_method.cc revision 77a48ae01bbc5b05ca009cf09e2fcb53e4c8ff23
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "art_method.h"
18
19#include "arch/context.h"
20#include "art_field-inl.h"
21#include "art_method-inl.h"
22#include "base/stringpiece.h"
23#include "class_linker-inl.h"
24#include "debugger.h"
25#include "dex_file-inl.h"
26#include "dex_instruction.h"
27#include "entrypoints/entrypoint_utils.h"
28#include "entrypoints/runtime_asm_entrypoints.h"
29#include "gc/accounting/card_table-inl.h"
30#include "interpreter/interpreter.h"
31#include "jit/jit.h"
32#include "jit/jit_code_cache.h"
33#include "jit/profiling_info.h"
34#include "jni_internal.h"
35#include "mapping_table.h"
36#include "mirror/abstract_method.h"
37#include "mirror/class-inl.h"
38#include "mirror/object_array-inl.h"
39#include "mirror/object-inl.h"
40#include "mirror/string.h"
41#include "oat_file-inl.h"
42#include "scoped_thread_state_change.h"
43#include "well_known_classes.h"
44
45namespace art {
46
47extern "C" void art_quick_invoke_stub(ArtMethod*, uint32_t*, uint32_t, Thread*, JValue*,
48                                      const char*);
49extern "C" void art_quick_invoke_static_stub(ArtMethod*, uint32_t*, uint32_t, Thread*, JValue*,
50                                             const char*);
51
52ArtMethod* ArtMethod::FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa,
53                                          jobject jlr_method) {
54  auto* abstract_method = soa.Decode<mirror::AbstractMethod*>(jlr_method);
55  DCHECK(abstract_method != nullptr);
56  return abstract_method->GetArtMethod();
57}
58
59mirror::String* ArtMethod::GetNameAsString(Thread* self) {
60  CHECK(!IsProxyMethod());
61  StackHandleScope<1> hs(self);
62  Handle<mirror::DexCache> dex_cache(hs.NewHandle(GetDexCache()));
63  auto* dex_file = dex_cache->GetDexFile();
64  uint32_t dex_method_idx = GetDexMethodIndex();
65  const DexFile::MethodId& method_id = dex_file->GetMethodId(dex_method_idx);
66  return Runtime::Current()->GetClassLinker()->ResolveString(*dex_file, method_id.name_idx_,
67                                                             dex_cache);
68}
69
70InvokeType ArtMethod::GetInvokeType() {
71  // TODO: kSuper?
72  if (GetDeclaringClass()->IsInterface()) {
73    return kInterface;
74  } else if (IsStatic()) {
75    return kStatic;
76  } else if (IsDirect()) {
77    return kDirect;
78  } else {
79    return kVirtual;
80  }
81}
82
83size_t ArtMethod::NumArgRegisters(const StringPiece& shorty) {
84  CHECK_LE(1U, shorty.length());
85  uint32_t num_registers = 0;
86  for (size_t i = 1; i < shorty.length(); ++i) {
87    char ch = shorty[i];
88    if (ch == 'D' || ch == 'J') {
89      num_registers += 2;
90    } else {
91      num_registers += 1;
92    }
93  }
94  return num_registers;
95}
96
97static bool HasSameNameAndSignature(ArtMethod* method1, ArtMethod* method2)
98    SHARED_REQUIRES(Locks::mutator_lock_) {
99  ScopedAssertNoThreadSuspension ants(Thread::Current(), "HasSameNameAndSignature");
100  const DexFile* dex_file = method1->GetDexFile();
101  const DexFile::MethodId& mid = dex_file->GetMethodId(method1->GetDexMethodIndex());
102  if (method1->GetDexCache() == method2->GetDexCache()) {
103    const DexFile::MethodId& mid2 = dex_file->GetMethodId(method2->GetDexMethodIndex());
104    return mid.name_idx_ == mid2.name_idx_ && mid.proto_idx_ == mid2.proto_idx_;
105  }
106  const DexFile* dex_file2 = method2->GetDexFile();
107  const DexFile::MethodId& mid2 = dex_file2->GetMethodId(method2->GetDexMethodIndex());
108  if (!DexFileStringEquals(dex_file, mid.name_idx_, dex_file2, mid2.name_idx_)) {
109    return false;  // Name mismatch.
110  }
111  return dex_file->GetMethodSignature(mid) == dex_file2->GetMethodSignature(mid2);
112}
113
114ArtMethod* ArtMethod::FindOverriddenMethod(size_t pointer_size) {
115  if (IsStatic()) {
116    return nullptr;
117  }
118  mirror::Class* declaring_class = GetDeclaringClass();
119  mirror::Class* super_class = declaring_class->GetSuperClass();
120  uint16_t method_index = GetMethodIndex();
121  ArtMethod* result = nullptr;
122  // Did this method override a super class method? If so load the result from the super class'
123  // vtable
124  if (super_class->HasVTable() && method_index < super_class->GetVTableLength()) {
125    result = super_class->GetVTableEntry(method_index, pointer_size);
126  } else {
127    // Method didn't override superclass method so search interfaces
128    if (IsProxyMethod()) {
129      result = mirror::DexCache::GetElementPtrSize(GetDexCacheResolvedMethods(pointer_size),
130                                                   GetDexMethodIndex(),
131                                                   pointer_size);
132      CHECK_EQ(result,
133               Runtime::Current()->GetClassLinker()->FindMethodForProxy(GetDeclaringClass(), this));
134    } else {
135      mirror::IfTable* iftable = GetDeclaringClass()->GetIfTable();
136      for (size_t i = 0; i < iftable->Count() && result == nullptr; i++) {
137        mirror::Class* interface = iftable->GetInterface(i);
138        for (size_t j = 0; j < interface->NumVirtualMethods(); ++j) {
139          ArtMethod* interface_method = interface->GetVirtualMethod(j, pointer_size);
140          if (HasSameNameAndSignature(
141              this, interface_method->GetInterfaceMethodIfProxy(sizeof(void*)))) {
142            result = interface_method;
143            break;
144          }
145        }
146      }
147    }
148  }
149  DCHECK(result == nullptr || HasSameNameAndSignature(
150      GetInterfaceMethodIfProxy(sizeof(void*)), result->GetInterfaceMethodIfProxy(sizeof(void*))));
151  return result;
152}
153
154uint32_t ArtMethod::FindDexMethodIndexInOtherDexFile(const DexFile& other_dexfile,
155                                                     uint32_t name_and_signature_idx) {
156  const DexFile* dexfile = GetDexFile();
157  const uint32_t dex_method_idx = GetDexMethodIndex();
158  const DexFile::MethodId& mid = dexfile->GetMethodId(dex_method_idx);
159  const DexFile::MethodId& name_and_sig_mid = other_dexfile.GetMethodId(name_and_signature_idx);
160  DCHECK_STREQ(dexfile->GetMethodName(mid), other_dexfile.GetMethodName(name_and_sig_mid));
161  DCHECK_EQ(dexfile->GetMethodSignature(mid), other_dexfile.GetMethodSignature(name_and_sig_mid));
162  if (dexfile == &other_dexfile) {
163    return dex_method_idx;
164  }
165  const char* mid_declaring_class_descriptor = dexfile->StringByTypeIdx(mid.class_idx_);
166  const DexFile::StringId* other_descriptor =
167      other_dexfile.FindStringId(mid_declaring_class_descriptor);
168  if (other_descriptor != nullptr) {
169    const DexFile::TypeId* other_type_id =
170        other_dexfile.FindTypeId(other_dexfile.GetIndexForStringId(*other_descriptor));
171    if (other_type_id != nullptr) {
172      const DexFile::MethodId* other_mid = other_dexfile.FindMethodId(
173          *other_type_id, other_dexfile.GetStringId(name_and_sig_mid.name_idx_),
174          other_dexfile.GetProtoId(name_and_sig_mid.proto_idx_));
175      if (other_mid != nullptr) {
176        return other_dexfile.GetIndexForMethodId(*other_mid);
177      }
178    }
179  }
180  return DexFile::kDexNoIndex;
181}
182
183uint32_t ArtMethod::ToDexPc(const uintptr_t pc, bool abort_on_failure) {
184  const void* entry_point = GetQuickOatEntryPoint(sizeof(void*));
185  uint32_t sought_offset = pc - reinterpret_cast<uintptr_t>(entry_point);
186  if (IsOptimized(sizeof(void*))) {
187    CodeInfo code_info = GetOptimizedCodeInfo();
188    StackMapEncoding encoding = code_info.ExtractEncoding();
189    StackMap stack_map = code_info.GetStackMapForNativePcOffset(sought_offset, encoding);
190    if (stack_map.IsValid()) {
191      return stack_map.GetDexPc(encoding);
192    }
193  } else {
194    MappingTable table(entry_point != nullptr ?
195        GetMappingTable(EntryPointToCodePointer(entry_point), sizeof(void*)) : nullptr);
196    if (table.TotalSize() == 0) {
197      // NOTE: Special methods (see Mir2Lir::GenSpecialCase()) have an empty mapping
198      // but they have no suspend checks and, consequently, we never call ToDexPc() for them.
199      DCHECK(IsNative() || IsCalleeSaveMethod() || IsProxyMethod()) << PrettyMethod(this);
200      return DexFile::kDexNoIndex;   // Special no mapping case
201    }
202    // Assume the caller wants a pc-to-dex mapping so check here first.
203    typedef MappingTable::PcToDexIterator It;
204    for (It cur = table.PcToDexBegin(), end = table.PcToDexEnd(); cur != end; ++cur) {
205      if (cur.NativePcOffset() == sought_offset) {
206        return cur.DexPc();
207      }
208    }
209    // Now check dex-to-pc mappings.
210    typedef MappingTable::DexToPcIterator It2;
211    for (It2 cur = table.DexToPcBegin(), end = table.DexToPcEnd(); cur != end; ++cur) {
212      if (cur.NativePcOffset() == sought_offset) {
213        return cur.DexPc();
214      }
215    }
216  }
217  if (abort_on_failure) {
218      LOG(FATAL) << "Failed to find Dex offset for PC offset " << reinterpret_cast<void*>(sought_offset)
219             << "(PC " << reinterpret_cast<void*>(pc) << ", entry_point=" << entry_point
220             << " current entry_point=" << GetQuickOatEntryPoint(sizeof(void*))
221             << ") in " << PrettyMethod(this);
222  }
223  return DexFile::kDexNoIndex;
224}
225
226uintptr_t ArtMethod::ToNativeQuickPc(const uint32_t dex_pc, bool abort_on_failure) {
227  const void* entry_point = GetQuickOatEntryPoint(sizeof(void*));
228  if (IsOptimized(sizeof(void*))) {
229    // Optimized code does not have a mapping table. Search for the dex-to-pc
230    // mapping in stack maps.
231    CodeInfo code_info = GetOptimizedCodeInfo();
232    StackMapEncoding encoding = code_info.ExtractEncoding();
233
234    // Assume the caller needs the mapping for a catch handler. If there are
235    // multiple stack maps for this dex_pc, it will hit the catch stack map first.
236    StackMap stack_map = code_info.GetCatchStackMapForDexPc(dex_pc, encoding);
237    if (stack_map.IsValid()) {
238      return reinterpret_cast<uintptr_t>(entry_point) + stack_map.GetNativePcOffset(encoding);
239    }
240  } else {
241    MappingTable table(entry_point != nullptr ?
242        GetMappingTable(EntryPointToCodePointer(entry_point), sizeof(void*)) : nullptr);
243    if (table.TotalSize() == 0) {
244      DCHECK_EQ(dex_pc, 0U);
245      return 0;   // Special no mapping/pc == 0 case
246    }
247    // Assume the caller wants a dex-to-pc mapping so check here first.
248    typedef MappingTable::DexToPcIterator It;
249    for (It cur = table.DexToPcBegin(), end = table.DexToPcEnd(); cur != end; ++cur) {
250      if (cur.DexPc() == dex_pc) {
251        return reinterpret_cast<uintptr_t>(entry_point) + cur.NativePcOffset();
252      }
253    }
254    // Now check pc-to-dex mappings.
255    typedef MappingTable::PcToDexIterator It2;
256    for (It2 cur = table.PcToDexBegin(), end = table.PcToDexEnd(); cur != end; ++cur) {
257      if (cur.DexPc() == dex_pc) {
258        return reinterpret_cast<uintptr_t>(entry_point) + cur.NativePcOffset();
259      }
260    }
261  }
262
263  if (abort_on_failure) {
264    LOG(FATAL) << "Failed to find native offset for dex pc 0x" << std::hex << dex_pc
265               << " in " << PrettyMethod(this);
266  }
267  return UINTPTR_MAX;
268}
269
270uint32_t ArtMethod::FindCatchBlock(Handle<mirror::Class> exception_type,
271                                   uint32_t dex_pc, bool* has_no_move_exception) {
272  const DexFile::CodeItem* code_item = GetCodeItem();
273  // Set aside the exception while we resolve its type.
274  Thread* self = Thread::Current();
275  StackHandleScope<1> hs(self);
276  Handle<mirror::Throwable> exception(hs.NewHandle(self->GetException()));
277  self->ClearException();
278  // Default to handler not found.
279  uint32_t found_dex_pc = DexFile::kDexNoIndex;
280  // Iterate over the catch handlers associated with dex_pc.
281  size_t pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
282  for (CatchHandlerIterator it(*code_item, dex_pc); it.HasNext(); it.Next()) {
283    uint16_t iter_type_idx = it.GetHandlerTypeIndex();
284    // Catch all case
285    if (iter_type_idx == DexFile::kDexNoIndex16) {
286      found_dex_pc = it.GetHandlerAddress();
287      break;
288    }
289    // Does this catch exception type apply?
290    mirror::Class* iter_exception_type = GetClassFromTypeIndex(iter_type_idx,
291                                                               true /* resolve */,
292                                                               pointer_size);
293    if (UNLIKELY(iter_exception_type == nullptr)) {
294      // Now have a NoClassDefFoundError as exception. Ignore in case the exception class was
295      // removed by a pro-guard like tool.
296      // Note: this is not RI behavior. RI would have failed when loading the class.
297      self->ClearException();
298      // Delete any long jump context as this routine is called during a stack walk which will
299      // release its in use context at the end.
300      delete self->GetLongJumpContext();
301      LOG(WARNING) << "Unresolved exception class when finding catch block: "
302        << DescriptorToDot(GetTypeDescriptorFromTypeIdx(iter_type_idx));
303    } else if (iter_exception_type->IsAssignableFrom(exception_type.Get())) {
304      found_dex_pc = it.GetHandlerAddress();
305      break;
306    }
307  }
308  if (found_dex_pc != DexFile::kDexNoIndex) {
309    const Instruction* first_catch_instr =
310        Instruction::At(&code_item->insns_[found_dex_pc]);
311    *has_no_move_exception = (first_catch_instr->Opcode() != Instruction::MOVE_EXCEPTION);
312  }
313  // Put the exception back.
314  if (exception.Get() != nullptr) {
315    self->SetException(exception.Get());
316  }
317  return found_dex_pc;
318}
319
320void ArtMethod::AssertPcIsWithinQuickCode(uintptr_t pc) {
321  if (IsNative() || IsRuntimeMethod() || IsProxyMethod()) {
322    return;
323  }
324  if (pc == reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc())) {
325    return;
326  }
327  const void* code = GetEntryPointFromQuickCompiledCode();
328  if (code == GetQuickInstrumentationEntryPoint()) {
329    return;
330  }
331  ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
332  if (class_linker->IsQuickToInterpreterBridge(code) ||
333      class_linker->IsQuickResolutionStub(code)) {
334    return;
335  }
336  // If we are the JIT then we may have just compiled the method after the
337  // IsQuickToInterpreterBridge check.
338  jit::Jit* const jit = Runtime::Current()->GetJit();
339  if (jit != nullptr &&
340      jit->GetCodeCache()->ContainsCodePtr(reinterpret_cast<const void*>(code))) {
341    return;
342  }
343  /*
344   * During a stack walk, a return PC may point past-the-end of the code
345   * in the case that the last instruction is a call that isn't expected to
346   * return.  Thus, we check <= code + GetCodeSize().
347   *
348   * NOTE: For Thumb both pc and code are offset by 1 indicating the Thumb state.
349   */
350  CHECK(PcIsWithinQuickCode(reinterpret_cast<uintptr_t>(code), pc))
351      << PrettyMethod(this)
352      << " pc=" << std::hex << pc
353      << " code=" << code
354      << " size=" << GetCodeSize(
355          EntryPointToCodePointer(reinterpret_cast<const void*>(code)));
356}
357
358bool ArtMethod::IsEntrypointInterpreter() {
359  ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
360  const void* oat_quick_code = class_linker->GetOatMethodQuickCodeFor(this);
361  return oat_quick_code == nullptr || oat_quick_code != GetEntryPointFromQuickCompiledCode();
362}
363
364const void* ArtMethod::GetQuickOatEntryPoint(size_t pointer_size) {
365  if (IsAbstract() || IsRuntimeMethod() || IsProxyMethod()) {
366    return nullptr;
367  }
368  Runtime* runtime = Runtime::Current();
369  ClassLinker* class_linker = runtime->GetClassLinker();
370  const void* code = runtime->GetInstrumentation()->GetQuickCodeFor(this, pointer_size);
371  // On failure, instead of null we get the quick-generic-jni-trampoline for native method
372  // indicating the generic JNI, or the quick-to-interpreter-bridge (but not the trampoline)
373  // for non-native methods.
374  if (class_linker->IsQuickToInterpreterBridge(code) ||
375      class_linker->IsQuickGenericJniStub(code)) {
376    return nullptr;
377  }
378  return code;
379}
380
381#ifndef NDEBUG
382uintptr_t ArtMethod::NativeQuickPcOffset(const uintptr_t pc, const void* quick_entry_point) {
383  CHECK_NE(quick_entry_point, GetQuickToInterpreterBridge());
384  CHECK_EQ(quick_entry_point,
385           Runtime::Current()->GetInstrumentation()->GetQuickCodeFor(this, sizeof(void*)));
386  return pc - reinterpret_cast<uintptr_t>(quick_entry_point);
387}
388#endif
389
390void ArtMethod::Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result,
391                       const char* shorty) {
392  if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEnd())) {
393    ThrowStackOverflowError(self);
394    return;
395  }
396
397  if (kIsDebugBuild) {
398    self->AssertThreadSuspensionIsAllowable();
399    CHECK_EQ(kRunnable, self->GetState());
400    CHECK_STREQ(GetInterfaceMethodIfProxy(sizeof(void*))->GetShorty(), shorty);
401  }
402
403  // Push a transition back into managed code onto the linked list in thread.
404  ManagedStack fragment;
405  self->PushManagedStackFragment(&fragment);
406
407  Runtime* runtime = Runtime::Current();
408  // Call the invoke stub, passing everything as arguments.
409  // If the runtime is not yet started or it is required by the debugger, then perform the
410  // Invocation by the interpreter.
411  if (UNLIKELY(!runtime->IsStarted() || Dbg::IsForcedInterpreterNeededForCalling(self, this))) {
412    if (IsStatic()) {
413      art::interpreter::EnterInterpreterFromInvoke(self, this, nullptr, args, result);
414    } else {
415      mirror::Object* receiver =
416          reinterpret_cast<StackReference<mirror::Object>*>(&args[0])->AsMirrorPtr();
417      art::interpreter::EnterInterpreterFromInvoke(self, this, receiver, args + 1, result);
418    }
419  } else {
420    DCHECK_EQ(runtime->GetClassLinker()->GetImagePointerSize(), sizeof(void*));
421
422    constexpr bool kLogInvocationStartAndReturn = false;
423    bool have_quick_code = GetEntryPointFromQuickCompiledCode() != nullptr;
424    if (LIKELY(have_quick_code)) {
425      if (kLogInvocationStartAndReturn) {
426        LOG(INFO) << StringPrintf(
427            "Invoking '%s' quick code=%p static=%d", PrettyMethod(this).c_str(),
428            GetEntryPointFromQuickCompiledCode(), static_cast<int>(IsStatic() ? 1 : 0));
429      }
430
431      // Ensure that we won't be accidentally calling quick compiled code when -Xint.
432      if (kIsDebugBuild && runtime->GetInstrumentation()->IsForcedInterpretOnly()) {
433        DCHECK(!runtime->UseJit());
434        CHECK(IsEntrypointInterpreter())
435            << "Don't call compiled code when -Xint " << PrettyMethod(this);
436      }
437
438      if (!IsStatic()) {
439        (*art_quick_invoke_stub)(this, args, args_size, self, result, shorty);
440      } else {
441        (*art_quick_invoke_static_stub)(this, args, args_size, self, result, shorty);
442      }
443      if (UNLIKELY(self->GetException() == Thread::GetDeoptimizationException())) {
444        // Unusual case where we were running generated code and an
445        // exception was thrown to force the activations to be removed from the
446        // stack. Continue execution in the interpreter.
447        self->ClearException();
448        ShadowFrame* shadow_frame =
449            self->PopStackedShadowFrame(StackedShadowFrameType::kDeoptimizationShadowFrame);
450        mirror::Throwable* pending_exception = nullptr;
451        self->PopDeoptimizationContext(result, &pending_exception);
452        self->SetTopOfStack(nullptr);
453        self->SetTopOfShadowStack(shadow_frame);
454
455        // Restore the exception that was pending before deoptimization then interpret the
456        // deoptimized frames.
457        if (pending_exception != nullptr) {
458          self->SetException(pending_exception);
459        }
460        interpreter::EnterInterpreterFromDeoptimize(self, shadow_frame, result);
461      }
462      if (kLogInvocationStartAndReturn) {
463        LOG(INFO) << StringPrintf("Returned '%s' quick code=%p", PrettyMethod(this).c_str(),
464                                  GetEntryPointFromQuickCompiledCode());
465      }
466    } else {
467      LOG(INFO) << "Not invoking '" << PrettyMethod(this) << "' code=null";
468      if (result != nullptr) {
469        result->SetJ(0);
470      }
471    }
472  }
473
474  // Pop transition.
475  self->PopManagedStackFragment(fragment);
476}
477
478// Counts the number of references in the parameter list of the corresponding method.
479// Note: Thus does _not_ include "this" for non-static methods.
480static uint32_t GetNumberOfReferenceArgsWithoutReceiver(ArtMethod* method)
481    SHARED_REQUIRES(Locks::mutator_lock_) {
482  uint32_t shorty_len;
483  const char* shorty = method->GetShorty(&shorty_len);
484  uint32_t refs = 0;
485  for (uint32_t i = 1; i < shorty_len ; ++i) {
486    if (shorty[i] == 'L') {
487      refs++;
488    }
489  }
490  return refs;
491}
492
493QuickMethodFrameInfo ArtMethod::GetQuickFrameInfo() {
494  Runtime* runtime = Runtime::Current();
495
496  if (UNLIKELY(IsAbstract())) {
497    return runtime->GetCalleeSaveMethodFrameInfo(Runtime::kRefsAndArgs);
498  }
499
500  // This goes before IsProxyMethod since runtime methods have a null declaring class.
501  if (UNLIKELY(IsRuntimeMethod())) {
502    return runtime->GetRuntimeMethodFrameInfo(this);
503  }
504
505  // For Proxy method we add special handling for the direct method case  (there is only one
506  // direct method - constructor). Direct method is cloned from original
507  // java.lang.reflect.Proxy class together with code and as a result it is executed as usual
508  // quick compiled method without any stubs. So the frame info should be returned as it is a
509  // quick method not a stub. However, if instrumentation stubs are installed, the
510  // instrumentation->GetQuickCodeFor() returns the artQuickProxyInvokeHandler instead of an
511  // oat code pointer, thus we have to add a special case here.
512  if (UNLIKELY(IsProxyMethod())) {
513    if (IsDirect()) {
514      CHECK(IsConstructor());
515      return GetQuickFrameInfo(EntryPointToCodePointer(GetEntryPointFromQuickCompiledCode()));
516    } else {
517      return runtime->GetCalleeSaveMethodFrameInfo(Runtime::kRefsAndArgs);
518    }
519  }
520
521  const void* entry_point = runtime->GetInstrumentation()->GetQuickCodeFor(this, sizeof(void*));
522  ClassLinker* class_linker = runtime->GetClassLinker();
523  // On failure, instead of null we get the quick-generic-jni-trampoline for native method
524  // indicating the generic JNI, or the quick-to-interpreter-bridge (but not the trampoline)
525  // for non-native methods. And we really shouldn't see a failure for non-native methods here.
526  DCHECK(!class_linker->IsQuickToInterpreterBridge(entry_point));
527
528  if (class_linker->IsQuickGenericJniStub(entry_point)) {
529    // Generic JNI frame.
530    DCHECK(IsNative());
531    uint32_t handle_refs = GetNumberOfReferenceArgsWithoutReceiver(this) + 1;
532    size_t scope_size = HandleScope::SizeOf(handle_refs);
533    QuickMethodFrameInfo callee_info = runtime->GetCalleeSaveMethodFrameInfo(Runtime::kRefsAndArgs);
534
535    // Callee saves + handle scope + method ref + alignment
536    // Note: -sizeof(void*) since callee-save frame stores a whole method pointer.
537    size_t frame_size = RoundUp(callee_info.FrameSizeInBytes() - sizeof(void*) +
538                                sizeof(ArtMethod*) + scope_size, kStackAlignment);
539    return QuickMethodFrameInfo(frame_size, callee_info.CoreSpillMask(), callee_info.FpSpillMask());
540  }
541
542  const void* code_pointer = EntryPointToCodePointer(entry_point);
543  return GetQuickFrameInfo(code_pointer);
544}
545
546void ArtMethod::RegisterNative(const void* native_method, bool is_fast) {
547  CHECK(IsNative()) << PrettyMethod(this);
548  CHECK(!IsFastNative()) << PrettyMethod(this);
549  CHECK(native_method != nullptr) << PrettyMethod(this);
550  if (is_fast) {
551    SetAccessFlags(GetAccessFlags() | kAccFastNative);
552  }
553  SetEntryPointFromJni(native_method);
554}
555
556void ArtMethod::UnregisterNative() {
557  CHECK(IsNative() && !IsFastNative()) << PrettyMethod(this);
558  // restore stub to lookup native pointer via dlsym
559  RegisterNative(GetJniDlsymLookupStub(), false);
560}
561
562bool ArtMethod::EqualParameters(Handle<mirror::ObjectArray<mirror::Class>> params) {
563  auto* dex_cache = GetDexCache();
564  auto* dex_file = dex_cache->GetDexFile();
565  const auto& method_id = dex_file->GetMethodId(GetDexMethodIndex());
566  const auto& proto_id = dex_file->GetMethodPrototype(method_id);
567  const DexFile::TypeList* proto_params = dex_file->GetProtoParameters(proto_id);
568  auto count = proto_params != nullptr ? proto_params->Size() : 0u;
569  auto param_len = params.Get() != nullptr ? params->GetLength() : 0u;
570  if (param_len != count) {
571    return false;
572  }
573  auto* cl = Runtime::Current()->GetClassLinker();
574  for (size_t i = 0; i < count; ++i) {
575    auto type_idx = proto_params->GetTypeItem(i).type_idx_;
576    auto* type = cl->ResolveType(type_idx, this);
577    if (type == nullptr) {
578      Thread::Current()->AssertPendingException();
579      return false;
580    }
581    if (type != params->GetWithoutChecks(i)) {
582      return false;
583    }
584  }
585  return true;
586}
587
588const uint8_t* ArtMethod::GetQuickenedInfo() {
589  bool found = false;
590  OatFile::OatMethod oat_method =
591      Runtime::Current()->GetClassLinker()->FindOatMethodFor(this, &found);
592  if (!found || (oat_method.GetQuickCode() != nullptr)) {
593    return nullptr;
594  }
595  return oat_method.GetVmapTable();
596}
597
598ProfilingInfo* ArtMethod::CreateProfilingInfo() {
599  ProfilingInfo* info = ProfilingInfo::Create(this);
600  MemberOffset offset = ArtMethod::EntryPointFromJniOffset(sizeof(void*));
601  uintptr_t pointer = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
602  if (!reinterpret_cast<Atomic<ProfilingInfo*>*>(pointer)->
603          CompareExchangeStrongSequentiallyConsistent(nullptr, info)) {
604    return GetProfilingInfo();
605  } else {
606    return info;
607  }
608}
609
610}  // namespace art
611