art_method.cc revision 40c8141b48275afd1680b99878782848ab3a6761
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "art_method.h"
18
19#include "arch/context.h"
20#include "art_field-inl.h"
21#include "art_method-inl.h"
22#include "base/stringpiece.h"
23#include "dex_file-inl.h"
24#include "dex_instruction.h"
25#include "entrypoints/entrypoint_utils.h"
26#include "entrypoints/runtime_asm_entrypoints.h"
27#include "gc/accounting/card_table-inl.h"
28#include "interpreter/interpreter.h"
29#include "jit/jit.h"
30#include "jit/jit_code_cache.h"
31#include "jni_internal.h"
32#include "mapping_table.h"
33#include "mirror/abstract_method.h"
34#include "mirror/class-inl.h"
35#include "mirror/object_array-inl.h"
36#include "mirror/object-inl.h"
37#include "mirror/string.h"
38#include "oat_file-inl.h"
39#include "scoped_thread_state_change.h"
40#include "well_known_classes.h"
41
42namespace art {
43
44extern "C" void art_quick_invoke_stub(ArtMethod*, uint32_t*, uint32_t, Thread*, JValue*,
45                                      const char*);
46#if defined(__LP64__) || defined(__arm__) || defined(__i386__)
47extern "C" void art_quick_invoke_static_stub(ArtMethod*, uint32_t*, uint32_t, Thread*, JValue*,
48                                             const char*);
49#endif
50
51ArtMethod* ArtMethod::FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa,
52                                          jobject jlr_method) {
53  auto* abstract_method = soa.Decode<mirror::AbstractMethod*>(jlr_method);
54  DCHECK(abstract_method != nullptr);
55  return abstract_method->GetArtMethod();
56}
57
58mirror::String* ArtMethod::GetNameAsString(Thread* self) {
59  CHECK(!IsProxyMethod());
60  StackHandleScope<1> hs(self);
61  Handle<mirror::DexCache> dex_cache(hs.NewHandle(GetDexCache()));
62  auto* dex_file = dex_cache->GetDexFile();
63  uint32_t dex_method_idx = GetDexMethodIndex();
64  const DexFile::MethodId& method_id = dex_file->GetMethodId(dex_method_idx);
65  return Runtime::Current()->GetClassLinker()->ResolveString(*dex_file, method_id.name_idx_,
66                                                             dex_cache);
67}
68
69InvokeType ArtMethod::GetInvokeType() {
70  // TODO: kSuper?
71  if (GetDeclaringClass()->IsInterface()) {
72    return kInterface;
73  } else if (IsStatic()) {
74    return kStatic;
75  } else if (IsDirect()) {
76    return kDirect;
77  } else {
78    return kVirtual;
79  }
80}
81
82size_t ArtMethod::NumArgRegisters(const StringPiece& shorty) {
83  CHECK_LE(1U, shorty.length());
84  uint32_t num_registers = 0;
85  for (size_t i = 1; i < shorty.length(); ++i) {
86    char ch = shorty[i];
87    if (ch == 'D' || ch == 'J') {
88      num_registers += 2;
89    } else {
90      num_registers += 1;
91    }
92  }
93  return num_registers;
94}
95
96static bool HasSameNameAndSignature(ArtMethod* method1, ArtMethod* method2)
97    SHARED_REQUIRES(Locks::mutator_lock_) {
98  ScopedAssertNoThreadSuspension ants(Thread::Current(), "HasSameNameAndSignature");
99  const DexFile* dex_file = method1->GetDexFile();
100  const DexFile::MethodId& mid = dex_file->GetMethodId(method1->GetDexMethodIndex());
101  if (method1->GetDexCache() == method2->GetDexCache()) {
102    const DexFile::MethodId& mid2 = dex_file->GetMethodId(method2->GetDexMethodIndex());
103    return mid.name_idx_ == mid2.name_idx_ && mid.proto_idx_ == mid2.proto_idx_;
104  }
105  const DexFile* dex_file2 = method2->GetDexFile();
106  const DexFile::MethodId& mid2 = dex_file2->GetMethodId(method2->GetDexMethodIndex());
107  if (!DexFileStringEquals(dex_file, mid.name_idx_, dex_file2, mid2.name_idx_)) {
108    return false;  // Name mismatch.
109  }
110  return dex_file->GetMethodSignature(mid) == dex_file2->GetMethodSignature(mid2);
111}
112
113ArtMethod* ArtMethod::FindOverriddenMethod(size_t pointer_size) {
114  if (IsStatic()) {
115    return nullptr;
116  }
117  mirror::Class* declaring_class = GetDeclaringClass();
118  mirror::Class* super_class = declaring_class->GetSuperClass();
119  uint16_t method_index = GetMethodIndex();
120  ArtMethod* result = nullptr;
121  // Did this method override a super class method? If so load the result from the super class'
122  // vtable
123  if (super_class->HasVTable() && method_index < super_class->GetVTableLength()) {
124    result = super_class->GetVTableEntry(method_index, pointer_size);
125  } else {
126    // Method didn't override superclass method so search interfaces
127    if (IsProxyMethod()) {
128      result = GetDexCacheResolvedMethods()->GetElementPtrSize<ArtMethod*>(
129          GetDexMethodIndex(), pointer_size);
130      CHECK_EQ(result,
131               Runtime::Current()->GetClassLinker()->FindMethodForProxy(GetDeclaringClass(), this));
132    } else {
133      mirror::IfTable* iftable = GetDeclaringClass()->GetIfTable();
134      for (size_t i = 0; i < iftable->Count() && result == nullptr; i++) {
135        mirror::Class* interface = iftable->GetInterface(i);
136        for (size_t j = 0; j < interface->NumVirtualMethods(); ++j) {
137          ArtMethod* interface_method = interface->GetVirtualMethod(j, pointer_size);
138          if (HasSameNameAndSignature(
139              this, interface_method->GetInterfaceMethodIfProxy(sizeof(void*)))) {
140            result = interface_method;
141            break;
142          }
143        }
144      }
145    }
146  }
147  DCHECK(result == nullptr || HasSameNameAndSignature(
148      GetInterfaceMethodIfProxy(sizeof(void*)), result->GetInterfaceMethodIfProxy(sizeof(void*))));
149  return result;
150}
151
152uint32_t ArtMethod::FindDexMethodIndexInOtherDexFile(const DexFile& other_dexfile,
153                                                     uint32_t name_and_signature_idx) {
154  const DexFile* dexfile = GetDexFile();
155  const uint32_t dex_method_idx = GetDexMethodIndex();
156  const DexFile::MethodId& mid = dexfile->GetMethodId(dex_method_idx);
157  const DexFile::MethodId& name_and_sig_mid = other_dexfile.GetMethodId(name_and_signature_idx);
158  DCHECK_STREQ(dexfile->GetMethodName(mid), other_dexfile.GetMethodName(name_and_sig_mid));
159  DCHECK_EQ(dexfile->GetMethodSignature(mid), other_dexfile.GetMethodSignature(name_and_sig_mid));
160  if (dexfile == &other_dexfile) {
161    return dex_method_idx;
162  }
163  const char* mid_declaring_class_descriptor = dexfile->StringByTypeIdx(mid.class_idx_);
164  const DexFile::StringId* other_descriptor =
165      other_dexfile.FindStringId(mid_declaring_class_descriptor);
166  if (other_descriptor != nullptr) {
167    const DexFile::TypeId* other_type_id =
168        other_dexfile.FindTypeId(other_dexfile.GetIndexForStringId(*other_descriptor));
169    if (other_type_id != nullptr) {
170      const DexFile::MethodId* other_mid = other_dexfile.FindMethodId(
171          *other_type_id, other_dexfile.GetStringId(name_and_sig_mid.name_idx_),
172          other_dexfile.GetProtoId(name_and_sig_mid.proto_idx_));
173      if (other_mid != nullptr) {
174        return other_dexfile.GetIndexForMethodId(*other_mid);
175      }
176    }
177  }
178  return DexFile::kDexNoIndex;
179}
180
181uint32_t ArtMethod::ToDexPc(const uintptr_t pc, bool abort_on_failure) {
182  const void* entry_point = GetQuickOatEntryPoint(sizeof(void*));
183  uint32_t sought_offset = pc - reinterpret_cast<uintptr_t>(entry_point);
184  if (IsOptimized(sizeof(void*))) {
185    CodeInfo code_info = GetOptimizedCodeInfo();
186    StackMapEncoding encoding = code_info.ExtractEncoding();
187    StackMap stack_map = code_info.GetStackMapForNativePcOffset(sought_offset, encoding);
188    if (stack_map.IsValid()) {
189      return stack_map.GetDexPc(encoding);
190    }
191  } else {
192    MappingTable table(entry_point != nullptr ?
193        GetMappingTable(EntryPointToCodePointer(entry_point), sizeof(void*)) : nullptr);
194    if (table.TotalSize() == 0) {
195      // NOTE: Special methods (see Mir2Lir::GenSpecialCase()) have an empty mapping
196      // but they have no suspend checks and, consequently, we never call ToDexPc() for them.
197      DCHECK(IsNative() || IsCalleeSaveMethod() || IsProxyMethod()) << PrettyMethod(this);
198      return DexFile::kDexNoIndex;   // Special no mapping case
199    }
200    // Assume the caller wants a pc-to-dex mapping so check here first.
201    typedef MappingTable::PcToDexIterator It;
202    for (It cur = table.PcToDexBegin(), end = table.PcToDexEnd(); cur != end; ++cur) {
203      if (cur.NativePcOffset() == sought_offset) {
204        return cur.DexPc();
205      }
206    }
207    // Now check dex-to-pc mappings.
208    typedef MappingTable::DexToPcIterator It2;
209    for (It2 cur = table.DexToPcBegin(), end = table.DexToPcEnd(); cur != end; ++cur) {
210      if (cur.NativePcOffset() == sought_offset) {
211        return cur.DexPc();
212      }
213    }
214  }
215  if (abort_on_failure) {
216      LOG(FATAL) << "Failed to find Dex offset for PC offset " << reinterpret_cast<void*>(sought_offset)
217             << "(PC " << reinterpret_cast<void*>(pc) << ", entry_point=" << entry_point
218             << " current entry_point=" << GetQuickOatEntryPoint(sizeof(void*))
219             << ") in " << PrettyMethod(this);
220  }
221  return DexFile::kDexNoIndex;
222}
223
224uintptr_t ArtMethod::ToNativeQuickPc(const uint32_t dex_pc, bool abort_on_failure) {
225  const void* entry_point = GetQuickOatEntryPoint(sizeof(void*));
226  MappingTable table(entry_point != nullptr ?
227      GetMappingTable(EntryPointToCodePointer(entry_point), sizeof(void*)) : nullptr);
228  if (table.TotalSize() == 0) {
229    DCHECK_EQ(dex_pc, 0U);
230    return 0;   // Special no mapping/pc == 0 case
231  }
232  // Assume the caller wants a dex-to-pc mapping so check here first.
233  typedef MappingTable::DexToPcIterator It;
234  for (It cur = table.DexToPcBegin(), end = table.DexToPcEnd(); cur != end; ++cur) {
235    if (cur.DexPc() == dex_pc) {
236      return reinterpret_cast<uintptr_t>(entry_point) + cur.NativePcOffset();
237    }
238  }
239  // Now check pc-to-dex mappings.
240  typedef MappingTable::PcToDexIterator It2;
241  for (It2 cur = table.PcToDexBegin(), end = table.PcToDexEnd(); cur != end; ++cur) {
242    if (cur.DexPc() == dex_pc) {
243      return reinterpret_cast<uintptr_t>(entry_point) + cur.NativePcOffset();
244    }
245  }
246  if (abort_on_failure) {
247    LOG(FATAL) << "Failed to find native offset for dex pc 0x" << std::hex << dex_pc
248               << " in " << PrettyMethod(this);
249  }
250  return UINTPTR_MAX;
251}
252
253uint32_t ArtMethod::FindCatchBlock(Handle<mirror::Class> exception_type,
254                                   uint32_t dex_pc, bool* has_no_move_exception) {
255  const DexFile::CodeItem* code_item = GetCodeItem();
256  // Set aside the exception while we resolve its type.
257  Thread* self = Thread::Current();
258  StackHandleScope<1> hs(self);
259  Handle<mirror::Throwable> exception(hs.NewHandle(self->GetException()));
260  self->ClearException();
261  // Default to handler not found.
262  uint32_t found_dex_pc = DexFile::kDexNoIndex;
263  // Iterate over the catch handlers associated with dex_pc.
264  for (CatchHandlerIterator it(*code_item, dex_pc); it.HasNext(); it.Next()) {
265    uint16_t iter_type_idx = it.GetHandlerTypeIndex();
266    // Catch all case
267    if (iter_type_idx == DexFile::kDexNoIndex16) {
268      found_dex_pc = it.GetHandlerAddress();
269      break;
270    }
271    // Does this catch exception type apply?
272    mirror::Class* iter_exception_type = GetClassFromTypeIndex(iter_type_idx, true);
273    if (UNLIKELY(iter_exception_type == nullptr)) {
274      // Now have a NoClassDefFoundError as exception. Ignore in case the exception class was
275      // removed by a pro-guard like tool.
276      // Note: this is not RI behavior. RI would have failed when loading the class.
277      self->ClearException();
278      // Delete any long jump context as this routine is called during a stack walk which will
279      // release its in use context at the end.
280      delete self->GetLongJumpContext();
281      LOG(WARNING) << "Unresolved exception class when finding catch block: "
282        << DescriptorToDot(GetTypeDescriptorFromTypeIdx(iter_type_idx));
283    } else if (iter_exception_type->IsAssignableFrom(exception_type.Get())) {
284      found_dex_pc = it.GetHandlerAddress();
285      break;
286    }
287  }
288  if (found_dex_pc != DexFile::kDexNoIndex) {
289    const Instruction* first_catch_instr =
290        Instruction::At(&code_item->insns_[found_dex_pc]);
291    *has_no_move_exception = (first_catch_instr->Opcode() != Instruction::MOVE_EXCEPTION);
292  }
293  // Put the exception back.
294  if (exception.Get() != nullptr) {
295    self->SetException(exception.Get());
296  }
297  return found_dex_pc;
298}
299
300void ArtMethod::AssertPcIsWithinQuickCode(uintptr_t pc) {
301  if (IsNative() || IsRuntimeMethod() || IsProxyMethod()) {
302    return;
303  }
304  if (pc == reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc())) {
305    return;
306  }
307  const void* code = GetEntryPointFromQuickCompiledCode();
308  if (code == GetQuickInstrumentationEntryPoint()) {
309    return;
310  }
311  ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
312  if (class_linker->IsQuickToInterpreterBridge(code) ||
313      class_linker->IsQuickResolutionStub(code)) {
314    return;
315  }
316  // If we are the JIT then we may have just compiled the method after the
317  // IsQuickToInterpreterBridge check.
318  jit::Jit* const jit = Runtime::Current()->GetJit();
319  if (jit != nullptr &&
320      jit->GetCodeCache()->ContainsCodePtr(reinterpret_cast<const void*>(code))) {
321    return;
322  }
323  /*
324   * During a stack walk, a return PC may point past-the-end of the code
325   * in the case that the last instruction is a call that isn't expected to
326   * return.  Thus, we check <= code + GetCodeSize().
327   *
328   * NOTE: For Thumb both pc and code are offset by 1 indicating the Thumb state.
329   */
330  CHECK(PcIsWithinQuickCode(reinterpret_cast<uintptr_t>(code), pc))
331      << PrettyMethod(this)
332      << " pc=" << std::hex << pc
333      << " code=" << code
334      << " size=" << GetCodeSize(
335          EntryPointToCodePointer(reinterpret_cast<const void*>(code)));
336}
337
338bool ArtMethod::IsEntrypointInterpreter() {
339  ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
340  const void* oat_quick_code = class_linker->GetOatMethodQuickCodeFor(this);
341  return oat_quick_code == nullptr || oat_quick_code != GetEntryPointFromQuickCompiledCode();
342}
343
344const void* ArtMethod::GetQuickOatEntryPoint(size_t pointer_size) {
345  if (IsAbstract() || IsRuntimeMethod() || IsProxyMethod()) {
346    return nullptr;
347  }
348  Runtime* runtime = Runtime::Current();
349  ClassLinker* class_linker = runtime->GetClassLinker();
350  const void* code = runtime->GetInstrumentation()->GetQuickCodeFor(this, pointer_size);
351  // On failure, instead of null we get the quick-generic-jni-trampoline for native method
352  // indicating the generic JNI, or the quick-to-interpreter-bridge (but not the trampoline)
353  // for non-native methods.
354  if (class_linker->IsQuickToInterpreterBridge(code) ||
355      class_linker->IsQuickGenericJniStub(code)) {
356    return nullptr;
357  }
358  return code;
359}
360
361#ifndef NDEBUG
362uintptr_t ArtMethod::NativeQuickPcOffset(const uintptr_t pc, const void* quick_entry_point) {
363  CHECK_NE(quick_entry_point, GetQuickToInterpreterBridge());
364  CHECK_EQ(quick_entry_point,
365           Runtime::Current()->GetInstrumentation()->GetQuickCodeFor(this, sizeof(void*)));
366  return pc - reinterpret_cast<uintptr_t>(quick_entry_point);
367}
368#endif
369
370void ArtMethod::Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result,
371                       const char* shorty) {
372  if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEnd())) {
373    ThrowStackOverflowError(self);
374    return;
375  }
376
377  if (kIsDebugBuild) {
378    self->AssertThreadSuspensionIsAllowable();
379    CHECK_EQ(kRunnable, self->GetState());
380    CHECK_STREQ(GetInterfaceMethodIfProxy(sizeof(void*))->GetShorty(), shorty);
381  }
382
383  // Push a transition back into managed code onto the linked list in thread.
384  ManagedStack fragment;
385  self->PushManagedStackFragment(&fragment);
386
387  Runtime* runtime = Runtime::Current();
388  // Call the invoke stub, passing everything as arguments.
389  // If the runtime is not yet started or it is required by the debugger, then perform the
390  // Invocation by the interpreter.
391  if (UNLIKELY(!runtime->IsStarted() || Dbg::IsForcedInterpreterNeededForCalling(self, this))) {
392    if (IsStatic()) {
393      art::interpreter::EnterInterpreterFromInvoke(self, this, nullptr, args, result);
394    } else {
395      mirror::Object* receiver =
396          reinterpret_cast<StackReference<mirror::Object>*>(&args[0])->AsMirrorPtr();
397      art::interpreter::EnterInterpreterFromInvoke(self, this, receiver, args + 1, result);
398    }
399  } else {
400    DCHECK_EQ(runtime->GetClassLinker()->GetImagePointerSize(), sizeof(void*));
401
402    constexpr bool kLogInvocationStartAndReturn = false;
403    bool have_quick_code = GetEntryPointFromQuickCompiledCode() != nullptr;
404    if (LIKELY(have_quick_code)) {
405      if (kLogInvocationStartAndReturn) {
406        LOG(INFO) << StringPrintf(
407            "Invoking '%s' quick code=%p static=%d", PrettyMethod(this).c_str(),
408            GetEntryPointFromQuickCompiledCode(), static_cast<int>(IsStatic() ? 1 : 0));
409      }
410
411      // Ensure that we won't be accidentally calling quick compiled code when -Xint.
412      if (kIsDebugBuild && runtime->GetInstrumentation()->IsForcedInterpretOnly()) {
413        DCHECK(!runtime->UseJit());
414        CHECK(IsEntrypointInterpreter())
415            << "Don't call compiled code when -Xint " << PrettyMethod(this);
416      }
417
418#if defined(__LP64__) || defined(__arm__) || defined(__i386__)
419      if (!IsStatic()) {
420        (*art_quick_invoke_stub)(this, args, args_size, self, result, shorty);
421      } else {
422        (*art_quick_invoke_static_stub)(this, args, args_size, self, result, shorty);
423      }
424#else
425      (*art_quick_invoke_stub)(this, args, args_size, self, result, shorty);
426#endif
427      if (UNLIKELY(self->GetException() == Thread::GetDeoptimizationException())) {
428        // Unusual case where we were running generated code and an
429        // exception was thrown to force the activations to be removed from the
430        // stack. Continue execution in the interpreter.
431        self->ClearException();
432        ShadowFrame* shadow_frame =
433            self->PopStackedShadowFrame(StackedShadowFrameType::kDeoptimizationShadowFrame);
434        result->SetJ(self->PopDeoptimizationReturnValue().GetJ());
435        self->SetTopOfStack(nullptr);
436        self->SetTopOfShadowStack(shadow_frame);
437        interpreter::EnterInterpreterFromDeoptimize(self, shadow_frame, result);
438      }
439      if (kLogInvocationStartAndReturn) {
440        LOG(INFO) << StringPrintf("Returned '%s' quick code=%p", PrettyMethod(this).c_str(),
441                                  GetEntryPointFromQuickCompiledCode());
442      }
443    } else {
444      LOG(INFO) << "Not invoking '" << PrettyMethod(this) << "' code=null";
445      if (result != nullptr) {
446        result->SetJ(0);
447      }
448    }
449  }
450
451  // Pop transition.
452  self->PopManagedStackFragment(fragment);
453}
454
455// Counts the number of references in the parameter list of the corresponding method.
456// Note: Thus does _not_ include "this" for non-static methods.
457static uint32_t GetNumberOfReferenceArgsWithoutReceiver(ArtMethod* method)
458    SHARED_REQUIRES(Locks::mutator_lock_) {
459  uint32_t shorty_len;
460  const char* shorty = method->GetShorty(&shorty_len);
461  uint32_t refs = 0;
462  for (uint32_t i = 1; i < shorty_len ; ++i) {
463    if (shorty[i] == 'L') {
464      refs++;
465    }
466  }
467  return refs;
468}
469
470QuickMethodFrameInfo ArtMethod::GetQuickFrameInfo() {
471  Runtime* runtime = Runtime::Current();
472
473  if (UNLIKELY(IsAbstract())) {
474    return runtime->GetCalleeSaveMethodFrameInfo(Runtime::kRefsAndArgs);
475  }
476
477  // This goes before IsProxyMethod since runtime methods have a null declaring class.
478  if (UNLIKELY(IsRuntimeMethod())) {
479    return runtime->GetRuntimeMethodFrameInfo(this);
480  }
481
482  // For Proxy method we add special handling for the direct method case  (there is only one
483  // direct method - constructor). Direct method is cloned from original
484  // java.lang.reflect.Proxy class together with code and as a result it is executed as usual
485  // quick compiled method without any stubs. So the frame info should be returned as it is a
486  // quick method not a stub. However, if instrumentation stubs are installed, the
487  // instrumentation->GetQuickCodeFor() returns the artQuickProxyInvokeHandler instead of an
488  // oat code pointer, thus we have to add a special case here.
489  if (UNLIKELY(IsProxyMethod())) {
490    if (IsDirect()) {
491      CHECK(IsConstructor());
492      return GetQuickFrameInfo(EntryPointToCodePointer(GetEntryPointFromQuickCompiledCode()));
493    } else {
494      return runtime->GetCalleeSaveMethodFrameInfo(Runtime::kRefsAndArgs);
495    }
496  }
497
498  const void* entry_point = runtime->GetInstrumentation()->GetQuickCodeFor(this, sizeof(void*));
499  ClassLinker* class_linker = runtime->GetClassLinker();
500  // On failure, instead of null we get the quick-generic-jni-trampoline for native method
501  // indicating the generic JNI, or the quick-to-interpreter-bridge (but not the trampoline)
502  // for non-native methods. And we really shouldn't see a failure for non-native methods here.
503  DCHECK(!class_linker->IsQuickToInterpreterBridge(entry_point));
504
505  if (class_linker->IsQuickGenericJniStub(entry_point)) {
506    // Generic JNI frame.
507    DCHECK(IsNative());
508    uint32_t handle_refs = GetNumberOfReferenceArgsWithoutReceiver(this) + 1;
509    size_t scope_size = HandleScope::SizeOf(handle_refs);
510    QuickMethodFrameInfo callee_info = runtime->GetCalleeSaveMethodFrameInfo(Runtime::kRefsAndArgs);
511
512    // Callee saves + handle scope + method ref + alignment
513    // Note: -sizeof(void*) since callee-save frame stores a whole method pointer.
514    size_t frame_size = RoundUp(callee_info.FrameSizeInBytes() - sizeof(void*) +
515                                sizeof(ArtMethod*) + scope_size, kStackAlignment);
516    return QuickMethodFrameInfo(frame_size, callee_info.CoreSpillMask(), callee_info.FpSpillMask());
517  }
518
519  const void* code_pointer = EntryPointToCodePointer(entry_point);
520  return GetQuickFrameInfo(code_pointer);
521}
522
523void ArtMethod::RegisterNative(const void* native_method, bool is_fast) {
524  CHECK(IsNative()) << PrettyMethod(this);
525  CHECK(!IsFastNative()) << PrettyMethod(this);
526  CHECK(native_method != nullptr) << PrettyMethod(this);
527  if (is_fast) {
528    SetAccessFlags(GetAccessFlags() | kAccFastNative);
529  }
530  SetEntryPointFromJni(native_method);
531}
532
533void ArtMethod::UnregisterNative() {
534  CHECK(IsNative() && !IsFastNative()) << PrettyMethod(this);
535  // restore stub to lookup native pointer via dlsym
536  RegisterNative(GetJniDlsymLookupStub(), false);
537}
538
539bool ArtMethod::EqualParameters(Handle<mirror::ObjectArray<mirror::Class>> params) {
540  auto* dex_cache = GetDexCache();
541  auto* dex_file = dex_cache->GetDexFile();
542  const auto& method_id = dex_file->GetMethodId(GetDexMethodIndex());
543  const auto& proto_id = dex_file->GetMethodPrototype(method_id);
544  const DexFile::TypeList* proto_params = dex_file->GetProtoParameters(proto_id);
545  auto count = proto_params != nullptr ? proto_params->Size() : 0u;
546  auto param_len = params.Get() != nullptr ? params->GetLength() : 0u;
547  if (param_len != count) {
548    return false;
549  }
550  auto* cl = Runtime::Current()->GetClassLinker();
551  for (size_t i = 0; i < count; ++i) {
552    auto type_idx = proto_params->GetTypeItem(i).type_idx_;
553    auto* type = cl->ResolveType(type_idx, this);
554    if (type == nullptr) {
555      Thread::Current()->AssertPendingException();
556      return false;
557    }
558    if (type != params->GetWithoutChecks(i)) {
559      return false;
560    }
561  }
562  return true;
563}
564
565const uint8_t* ArtMethod::GetQuickenedInfo() {
566  bool found = false;
567  OatFile::OatMethod oat_method =
568      Runtime::Current()->GetClassLinker()->FindOatMethodFor(this, &found);
569  if (!found || (oat_method.GetQuickCode() != nullptr)) {
570    return nullptr;
571  }
572  return oat_method.GetVmapTable();
573}
574
575}  // namespace art
576