art_method.cc revision ff73498a5539d87424a964265e43765e788aec44
1/* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "art_method.h" 18 19#include "arch/context.h" 20#include "art_field-inl.h" 21#include "art_method-inl.h" 22#include "base/stringpiece.h" 23#include "class_linker-inl.h" 24#include "debugger.h" 25#include "dex_file-inl.h" 26#include "dex_instruction.h" 27#include "entrypoints/entrypoint_utils.h" 28#include "entrypoints/runtime_asm_entrypoints.h" 29#include "gc/accounting/card_table-inl.h" 30#include "interpreter/interpreter.h" 31#include "jit/jit.h" 32#include "jit/jit_code_cache.h" 33#include "jni_internal.h" 34#include "mapping_table.h" 35#include "mirror/abstract_method.h" 36#include "mirror/class-inl.h" 37#include "mirror/object_array-inl.h" 38#include "mirror/object-inl.h" 39#include "mirror/string.h" 40#include "oat_file-inl.h" 41#include "scoped_thread_state_change.h" 42#include "well_known_classes.h" 43 44namespace art { 45 46extern "C" void art_quick_invoke_stub(ArtMethod*, uint32_t*, uint32_t, Thread*, JValue*, 47 const char*); 48extern "C" void art_quick_invoke_static_stub(ArtMethod*, uint32_t*, uint32_t, Thread*, JValue*, 49 const char*); 50 51ArtMethod* ArtMethod::FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa, 52 jobject jlr_method) { 53 auto* abstract_method = soa.Decode<mirror::AbstractMethod*>(jlr_method); 54 DCHECK(abstract_method != nullptr); 55 return abstract_method->GetArtMethod(); 56} 57 58mirror::String* ArtMethod::GetNameAsString(Thread* self) { 59 CHECK(!IsProxyMethod()); 60 StackHandleScope<1> hs(self); 61 Handle<mirror::DexCache> dex_cache(hs.NewHandle(GetDexCache())); 62 auto* dex_file = dex_cache->GetDexFile(); 63 uint32_t dex_method_idx = GetDexMethodIndex(); 64 const DexFile::MethodId& method_id = dex_file->GetMethodId(dex_method_idx); 65 return Runtime::Current()->GetClassLinker()->ResolveString(*dex_file, method_id.name_idx_, 66 dex_cache); 67} 68 69InvokeType ArtMethod::GetInvokeType() { 70 // TODO: kSuper? 71 if (GetDeclaringClass()->IsInterface()) { 72 return kInterface; 73 } else if (IsStatic()) { 74 return kStatic; 75 } else if (IsDirect()) { 76 return kDirect; 77 } else { 78 return kVirtual; 79 } 80} 81 82size_t ArtMethod::NumArgRegisters(const StringPiece& shorty) { 83 CHECK_LE(1U, shorty.length()); 84 uint32_t num_registers = 0; 85 for (size_t i = 1; i < shorty.length(); ++i) { 86 char ch = shorty[i]; 87 if (ch == 'D' || ch == 'J') { 88 num_registers += 2; 89 } else { 90 num_registers += 1; 91 } 92 } 93 return num_registers; 94} 95 96static bool HasSameNameAndSignature(ArtMethod* method1, ArtMethod* method2) 97 SHARED_REQUIRES(Locks::mutator_lock_) { 98 ScopedAssertNoThreadSuspension ants(Thread::Current(), "HasSameNameAndSignature"); 99 const DexFile* dex_file = method1->GetDexFile(); 100 const DexFile::MethodId& mid = dex_file->GetMethodId(method1->GetDexMethodIndex()); 101 if (method1->GetDexCache() == method2->GetDexCache()) { 102 const DexFile::MethodId& mid2 = dex_file->GetMethodId(method2->GetDexMethodIndex()); 103 return mid.name_idx_ == mid2.name_idx_ && mid.proto_idx_ == mid2.proto_idx_; 104 } 105 const DexFile* dex_file2 = method2->GetDexFile(); 106 const DexFile::MethodId& mid2 = dex_file2->GetMethodId(method2->GetDexMethodIndex()); 107 if (!DexFileStringEquals(dex_file, mid.name_idx_, dex_file2, mid2.name_idx_)) { 108 return false; // Name mismatch. 109 } 110 return dex_file->GetMethodSignature(mid) == dex_file2->GetMethodSignature(mid2); 111} 112 113ArtMethod* ArtMethod::FindOverriddenMethod(size_t pointer_size) { 114 if (IsStatic()) { 115 return nullptr; 116 } 117 mirror::Class* declaring_class = GetDeclaringClass(); 118 mirror::Class* super_class = declaring_class->GetSuperClass(); 119 uint16_t method_index = GetMethodIndex(); 120 ArtMethod* result = nullptr; 121 // Did this method override a super class method? If so load the result from the super class' 122 // vtable 123 if (super_class->HasVTable() && method_index < super_class->GetVTableLength()) { 124 result = super_class->GetVTableEntry(method_index, pointer_size); 125 } else { 126 // Method didn't override superclass method so search interfaces 127 if (IsProxyMethod()) { 128 result = GetDexCacheResolvedMethods()->GetElementPtrSize<ArtMethod*>( 129 GetDexMethodIndex(), pointer_size); 130 CHECK_EQ(result, 131 Runtime::Current()->GetClassLinker()->FindMethodForProxy(GetDeclaringClass(), this)); 132 } else { 133 mirror::IfTable* iftable = GetDeclaringClass()->GetIfTable(); 134 for (size_t i = 0; i < iftable->Count() && result == nullptr; i++) { 135 mirror::Class* interface = iftable->GetInterface(i); 136 for (size_t j = 0; j < interface->NumVirtualMethods(); ++j) { 137 ArtMethod* interface_method = interface->GetVirtualMethod(j, pointer_size); 138 if (HasSameNameAndSignature( 139 this, interface_method->GetInterfaceMethodIfProxy(sizeof(void*)))) { 140 result = interface_method; 141 break; 142 } 143 } 144 } 145 } 146 } 147 DCHECK(result == nullptr || HasSameNameAndSignature( 148 GetInterfaceMethodIfProxy(sizeof(void*)), result->GetInterfaceMethodIfProxy(sizeof(void*)))); 149 return result; 150} 151 152uint32_t ArtMethod::FindDexMethodIndexInOtherDexFile(const DexFile& other_dexfile, 153 uint32_t name_and_signature_idx) { 154 const DexFile* dexfile = GetDexFile(); 155 const uint32_t dex_method_idx = GetDexMethodIndex(); 156 const DexFile::MethodId& mid = dexfile->GetMethodId(dex_method_idx); 157 const DexFile::MethodId& name_and_sig_mid = other_dexfile.GetMethodId(name_and_signature_idx); 158 DCHECK_STREQ(dexfile->GetMethodName(mid), other_dexfile.GetMethodName(name_and_sig_mid)); 159 DCHECK_EQ(dexfile->GetMethodSignature(mid), other_dexfile.GetMethodSignature(name_and_sig_mid)); 160 if (dexfile == &other_dexfile) { 161 return dex_method_idx; 162 } 163 const char* mid_declaring_class_descriptor = dexfile->StringByTypeIdx(mid.class_idx_); 164 const DexFile::StringId* other_descriptor = 165 other_dexfile.FindStringId(mid_declaring_class_descriptor); 166 if (other_descriptor != nullptr) { 167 const DexFile::TypeId* other_type_id = 168 other_dexfile.FindTypeId(other_dexfile.GetIndexForStringId(*other_descriptor)); 169 if (other_type_id != nullptr) { 170 const DexFile::MethodId* other_mid = other_dexfile.FindMethodId( 171 *other_type_id, other_dexfile.GetStringId(name_and_sig_mid.name_idx_), 172 other_dexfile.GetProtoId(name_and_sig_mid.proto_idx_)); 173 if (other_mid != nullptr) { 174 return other_dexfile.GetIndexForMethodId(*other_mid); 175 } 176 } 177 } 178 return DexFile::kDexNoIndex; 179} 180 181uint32_t ArtMethod::ToDexPc(const uintptr_t pc, bool abort_on_failure) { 182 const void* entry_point = GetQuickOatEntryPoint(sizeof(void*)); 183 uint32_t sought_offset = pc - reinterpret_cast<uintptr_t>(entry_point); 184 if (IsOptimized(sizeof(void*))) { 185 CodeInfo code_info = GetOptimizedCodeInfo(); 186 StackMapEncoding encoding = code_info.ExtractEncoding(); 187 StackMap stack_map = code_info.GetStackMapForNativePcOffset(sought_offset, encoding); 188 if (stack_map.IsValid()) { 189 return stack_map.GetDexPc(encoding); 190 } 191 } else { 192 MappingTable table(entry_point != nullptr ? 193 GetMappingTable(EntryPointToCodePointer(entry_point), sizeof(void*)) : nullptr); 194 if (table.TotalSize() == 0) { 195 // NOTE: Special methods (see Mir2Lir::GenSpecialCase()) have an empty mapping 196 // but they have no suspend checks and, consequently, we never call ToDexPc() for them. 197 DCHECK(IsNative() || IsCalleeSaveMethod() || IsProxyMethod()) << PrettyMethod(this); 198 return DexFile::kDexNoIndex; // Special no mapping case 199 } 200 // Assume the caller wants a pc-to-dex mapping so check here first. 201 typedef MappingTable::PcToDexIterator It; 202 for (It cur = table.PcToDexBegin(), end = table.PcToDexEnd(); cur != end; ++cur) { 203 if (cur.NativePcOffset() == sought_offset) { 204 return cur.DexPc(); 205 } 206 } 207 // Now check dex-to-pc mappings. 208 typedef MappingTable::DexToPcIterator It2; 209 for (It2 cur = table.DexToPcBegin(), end = table.DexToPcEnd(); cur != end; ++cur) { 210 if (cur.NativePcOffset() == sought_offset) { 211 return cur.DexPc(); 212 } 213 } 214 } 215 if (abort_on_failure) { 216 LOG(FATAL) << "Failed to find Dex offset for PC offset " << reinterpret_cast<void*>(sought_offset) 217 << "(PC " << reinterpret_cast<void*>(pc) << ", entry_point=" << entry_point 218 << " current entry_point=" << GetQuickOatEntryPoint(sizeof(void*)) 219 << ") in " << PrettyMethod(this); 220 } 221 return DexFile::kDexNoIndex; 222} 223 224uintptr_t ArtMethod::ToNativeQuickPc(const uint32_t dex_pc, bool abort_on_failure) { 225 const void* entry_point = GetQuickOatEntryPoint(sizeof(void*)); 226 MappingTable table(entry_point != nullptr ? 227 GetMappingTable(EntryPointToCodePointer(entry_point), sizeof(void*)) : nullptr); 228 if (table.TotalSize() == 0) { 229 DCHECK_EQ(dex_pc, 0U); 230 return 0; // Special no mapping/pc == 0 case 231 } 232 // Assume the caller wants a dex-to-pc mapping so check here first. 233 typedef MappingTable::DexToPcIterator It; 234 for (It cur = table.DexToPcBegin(), end = table.DexToPcEnd(); cur != end; ++cur) { 235 if (cur.DexPc() == dex_pc) { 236 return reinterpret_cast<uintptr_t>(entry_point) + cur.NativePcOffset(); 237 } 238 } 239 // Now check pc-to-dex mappings. 240 typedef MappingTable::PcToDexIterator It2; 241 for (It2 cur = table.PcToDexBegin(), end = table.PcToDexEnd(); cur != end; ++cur) { 242 if (cur.DexPc() == dex_pc) { 243 return reinterpret_cast<uintptr_t>(entry_point) + cur.NativePcOffset(); 244 } 245 } 246 if (abort_on_failure) { 247 LOG(FATAL) << "Failed to find native offset for dex pc 0x" << std::hex << dex_pc 248 << " in " << PrettyMethod(this); 249 } 250 return UINTPTR_MAX; 251} 252 253uint32_t ArtMethod::FindCatchBlock(Handle<mirror::Class> exception_type, 254 uint32_t dex_pc, bool* has_no_move_exception) { 255 const DexFile::CodeItem* code_item = GetCodeItem(); 256 // Set aside the exception while we resolve its type. 257 Thread* self = Thread::Current(); 258 StackHandleScope<1> hs(self); 259 Handle<mirror::Throwable> exception(hs.NewHandle(self->GetException())); 260 self->ClearException(); 261 // Default to handler not found. 262 uint32_t found_dex_pc = DexFile::kDexNoIndex; 263 // Iterate over the catch handlers associated with dex_pc. 264 for (CatchHandlerIterator it(*code_item, dex_pc); it.HasNext(); it.Next()) { 265 uint16_t iter_type_idx = it.GetHandlerTypeIndex(); 266 // Catch all case 267 if (iter_type_idx == DexFile::kDexNoIndex16) { 268 found_dex_pc = it.GetHandlerAddress(); 269 break; 270 } 271 // Does this catch exception type apply? 272 mirror::Class* iter_exception_type = GetClassFromTypeIndex(iter_type_idx, true); 273 if (UNLIKELY(iter_exception_type == nullptr)) { 274 // Now have a NoClassDefFoundError as exception. Ignore in case the exception class was 275 // removed by a pro-guard like tool. 276 // Note: this is not RI behavior. RI would have failed when loading the class. 277 self->ClearException(); 278 // Delete any long jump context as this routine is called during a stack walk which will 279 // release its in use context at the end. 280 delete self->GetLongJumpContext(); 281 LOG(WARNING) << "Unresolved exception class when finding catch block: " 282 << DescriptorToDot(GetTypeDescriptorFromTypeIdx(iter_type_idx)); 283 } else if (iter_exception_type->IsAssignableFrom(exception_type.Get())) { 284 found_dex_pc = it.GetHandlerAddress(); 285 break; 286 } 287 } 288 if (found_dex_pc != DexFile::kDexNoIndex) { 289 const Instruction* first_catch_instr = 290 Instruction::At(&code_item->insns_[found_dex_pc]); 291 *has_no_move_exception = (first_catch_instr->Opcode() != Instruction::MOVE_EXCEPTION); 292 } 293 // Put the exception back. 294 if (exception.Get() != nullptr) { 295 self->SetException(exception.Get()); 296 } 297 return found_dex_pc; 298} 299 300void ArtMethod::AssertPcIsWithinQuickCode(uintptr_t pc) { 301 if (IsNative() || IsRuntimeMethod() || IsProxyMethod()) { 302 return; 303 } 304 if (pc == reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc())) { 305 return; 306 } 307 const void* code = GetEntryPointFromQuickCompiledCode(); 308 if (code == GetQuickInstrumentationEntryPoint()) { 309 return; 310 } 311 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 312 if (class_linker->IsQuickToInterpreterBridge(code) || 313 class_linker->IsQuickResolutionStub(code)) { 314 return; 315 } 316 // If we are the JIT then we may have just compiled the method after the 317 // IsQuickToInterpreterBridge check. 318 jit::Jit* const jit = Runtime::Current()->GetJit(); 319 if (jit != nullptr && 320 jit->GetCodeCache()->ContainsCodePtr(reinterpret_cast<const void*>(code))) { 321 return; 322 } 323 /* 324 * During a stack walk, a return PC may point past-the-end of the code 325 * in the case that the last instruction is a call that isn't expected to 326 * return. Thus, we check <= code + GetCodeSize(). 327 * 328 * NOTE: For Thumb both pc and code are offset by 1 indicating the Thumb state. 329 */ 330 CHECK(PcIsWithinQuickCode(reinterpret_cast<uintptr_t>(code), pc)) 331 << PrettyMethod(this) 332 << " pc=" << std::hex << pc 333 << " code=" << code 334 << " size=" << GetCodeSize( 335 EntryPointToCodePointer(reinterpret_cast<const void*>(code))); 336} 337 338bool ArtMethod::IsEntrypointInterpreter() { 339 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 340 const void* oat_quick_code = class_linker->GetOatMethodQuickCodeFor(this); 341 return oat_quick_code == nullptr || oat_quick_code != GetEntryPointFromQuickCompiledCode(); 342} 343 344const void* ArtMethod::GetQuickOatEntryPoint(size_t pointer_size) { 345 if (IsAbstract() || IsRuntimeMethod() || IsProxyMethod()) { 346 return nullptr; 347 } 348 Runtime* runtime = Runtime::Current(); 349 ClassLinker* class_linker = runtime->GetClassLinker(); 350 const void* code = runtime->GetInstrumentation()->GetQuickCodeFor(this, pointer_size); 351 // On failure, instead of null we get the quick-generic-jni-trampoline for native method 352 // indicating the generic JNI, or the quick-to-interpreter-bridge (but not the trampoline) 353 // for non-native methods. 354 if (class_linker->IsQuickToInterpreterBridge(code) || 355 class_linker->IsQuickGenericJniStub(code)) { 356 return nullptr; 357 } 358 return code; 359} 360 361#ifndef NDEBUG 362uintptr_t ArtMethod::NativeQuickPcOffset(const uintptr_t pc, const void* quick_entry_point) { 363 CHECK_NE(quick_entry_point, GetQuickToInterpreterBridge()); 364 CHECK_EQ(quick_entry_point, 365 Runtime::Current()->GetInstrumentation()->GetQuickCodeFor(this, sizeof(void*))); 366 return pc - reinterpret_cast<uintptr_t>(quick_entry_point); 367} 368#endif 369 370void ArtMethod::Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, 371 const char* shorty) { 372 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEnd())) { 373 ThrowStackOverflowError(self); 374 return; 375 } 376 377 if (kIsDebugBuild) { 378 self->AssertThreadSuspensionIsAllowable(); 379 CHECK_EQ(kRunnable, self->GetState()); 380 CHECK_STREQ(GetInterfaceMethodIfProxy(sizeof(void*))->GetShorty(), shorty); 381 } 382 383 // Push a transition back into managed code onto the linked list in thread. 384 ManagedStack fragment; 385 self->PushManagedStackFragment(&fragment); 386 387 Runtime* runtime = Runtime::Current(); 388 // Call the invoke stub, passing everything as arguments. 389 // If the runtime is not yet started or it is required by the debugger, then perform the 390 // Invocation by the interpreter. 391 if (UNLIKELY(!runtime->IsStarted() || Dbg::IsForcedInterpreterNeededForCalling(self, this))) { 392 if (IsStatic()) { 393 art::interpreter::EnterInterpreterFromInvoke(self, this, nullptr, args, result); 394 } else { 395 mirror::Object* receiver = 396 reinterpret_cast<StackReference<mirror::Object>*>(&args[0])->AsMirrorPtr(); 397 art::interpreter::EnterInterpreterFromInvoke(self, this, receiver, args + 1, result); 398 } 399 } else { 400 DCHECK_EQ(runtime->GetClassLinker()->GetImagePointerSize(), sizeof(void*)); 401 402 constexpr bool kLogInvocationStartAndReturn = false; 403 bool have_quick_code = GetEntryPointFromQuickCompiledCode() != nullptr; 404 if (LIKELY(have_quick_code)) { 405 if (kLogInvocationStartAndReturn) { 406 LOG(INFO) << StringPrintf( 407 "Invoking '%s' quick code=%p static=%d", PrettyMethod(this).c_str(), 408 GetEntryPointFromQuickCompiledCode(), static_cast<int>(IsStatic() ? 1 : 0)); 409 } 410 411 // Ensure that we won't be accidentally calling quick compiled code when -Xint. 412 if (kIsDebugBuild && runtime->GetInstrumentation()->IsForcedInterpretOnly()) { 413 DCHECK(!runtime->UseJit()); 414 CHECK(IsEntrypointInterpreter()) 415 << "Don't call compiled code when -Xint " << PrettyMethod(this); 416 } 417 418 if (!IsStatic()) { 419 (*art_quick_invoke_stub)(this, args, args_size, self, result, shorty); 420 } else { 421 (*art_quick_invoke_static_stub)(this, args, args_size, self, result, shorty); 422 } 423 if (UNLIKELY(self->GetException() == Thread::GetDeoptimizationException())) { 424 // Unusual case where we were running generated code and an 425 // exception was thrown to force the activations to be removed from the 426 // stack. Continue execution in the interpreter. 427 self->ClearException(); 428 ShadowFrame* shadow_frame = 429 self->PopStackedShadowFrame(StackedShadowFrameType::kDeoptimizationShadowFrame); 430 result->SetJ(self->PopDeoptimizationReturnValue().GetJ()); 431 self->SetTopOfStack(nullptr); 432 self->SetTopOfShadowStack(shadow_frame); 433 interpreter::EnterInterpreterFromDeoptimize(self, shadow_frame, result); 434 } 435 if (kLogInvocationStartAndReturn) { 436 LOG(INFO) << StringPrintf("Returned '%s' quick code=%p", PrettyMethod(this).c_str(), 437 GetEntryPointFromQuickCompiledCode()); 438 } 439 } else { 440 LOG(INFO) << "Not invoking '" << PrettyMethod(this) << "' code=null"; 441 if (result != nullptr) { 442 result->SetJ(0); 443 } 444 } 445 } 446 447 // Pop transition. 448 self->PopManagedStackFragment(fragment); 449} 450 451// Counts the number of references in the parameter list of the corresponding method. 452// Note: Thus does _not_ include "this" for non-static methods. 453static uint32_t GetNumberOfReferenceArgsWithoutReceiver(ArtMethod* method) 454 SHARED_REQUIRES(Locks::mutator_lock_) { 455 uint32_t shorty_len; 456 const char* shorty = method->GetShorty(&shorty_len); 457 uint32_t refs = 0; 458 for (uint32_t i = 1; i < shorty_len ; ++i) { 459 if (shorty[i] == 'L') { 460 refs++; 461 } 462 } 463 return refs; 464} 465 466QuickMethodFrameInfo ArtMethod::GetQuickFrameInfo() { 467 Runtime* runtime = Runtime::Current(); 468 469 if (UNLIKELY(IsAbstract())) { 470 return runtime->GetCalleeSaveMethodFrameInfo(Runtime::kRefsAndArgs); 471 } 472 473 // This goes before IsProxyMethod since runtime methods have a null declaring class. 474 if (UNLIKELY(IsRuntimeMethod())) { 475 return runtime->GetRuntimeMethodFrameInfo(this); 476 } 477 478 // For Proxy method we add special handling for the direct method case (there is only one 479 // direct method - constructor). Direct method is cloned from original 480 // java.lang.reflect.Proxy class together with code and as a result it is executed as usual 481 // quick compiled method without any stubs. So the frame info should be returned as it is a 482 // quick method not a stub. However, if instrumentation stubs are installed, the 483 // instrumentation->GetQuickCodeFor() returns the artQuickProxyInvokeHandler instead of an 484 // oat code pointer, thus we have to add a special case here. 485 if (UNLIKELY(IsProxyMethod())) { 486 if (IsDirect()) { 487 CHECK(IsConstructor()); 488 return GetQuickFrameInfo(EntryPointToCodePointer(GetEntryPointFromQuickCompiledCode())); 489 } else { 490 return runtime->GetCalleeSaveMethodFrameInfo(Runtime::kRefsAndArgs); 491 } 492 } 493 494 const void* entry_point = runtime->GetInstrumentation()->GetQuickCodeFor(this, sizeof(void*)); 495 ClassLinker* class_linker = runtime->GetClassLinker(); 496 // On failure, instead of null we get the quick-generic-jni-trampoline for native method 497 // indicating the generic JNI, or the quick-to-interpreter-bridge (but not the trampoline) 498 // for non-native methods. And we really shouldn't see a failure for non-native methods here. 499 DCHECK(!class_linker->IsQuickToInterpreterBridge(entry_point)); 500 501 if (class_linker->IsQuickGenericJniStub(entry_point)) { 502 // Generic JNI frame. 503 DCHECK(IsNative()); 504 uint32_t handle_refs = GetNumberOfReferenceArgsWithoutReceiver(this) + 1; 505 size_t scope_size = HandleScope::SizeOf(handle_refs); 506 QuickMethodFrameInfo callee_info = runtime->GetCalleeSaveMethodFrameInfo(Runtime::kRefsAndArgs); 507 508 // Callee saves + handle scope + method ref + alignment 509 // Note: -sizeof(void*) since callee-save frame stores a whole method pointer. 510 size_t frame_size = RoundUp(callee_info.FrameSizeInBytes() - sizeof(void*) + 511 sizeof(ArtMethod*) + scope_size, kStackAlignment); 512 return QuickMethodFrameInfo(frame_size, callee_info.CoreSpillMask(), callee_info.FpSpillMask()); 513 } 514 515 const void* code_pointer = EntryPointToCodePointer(entry_point); 516 return GetQuickFrameInfo(code_pointer); 517} 518 519void ArtMethod::RegisterNative(const void* native_method, bool is_fast) { 520 CHECK(IsNative()) << PrettyMethod(this); 521 CHECK(!IsFastNative()) << PrettyMethod(this); 522 CHECK(native_method != nullptr) << PrettyMethod(this); 523 if (is_fast) { 524 SetAccessFlags(GetAccessFlags() | kAccFastNative); 525 } 526 SetEntryPointFromJni(native_method); 527} 528 529void ArtMethod::UnregisterNative() { 530 CHECK(IsNative() && !IsFastNative()) << PrettyMethod(this); 531 // restore stub to lookup native pointer via dlsym 532 RegisterNative(GetJniDlsymLookupStub(), false); 533} 534 535bool ArtMethod::EqualParameters(Handle<mirror::ObjectArray<mirror::Class>> params) { 536 auto* dex_cache = GetDexCache(); 537 auto* dex_file = dex_cache->GetDexFile(); 538 const auto& method_id = dex_file->GetMethodId(GetDexMethodIndex()); 539 const auto& proto_id = dex_file->GetMethodPrototype(method_id); 540 const DexFile::TypeList* proto_params = dex_file->GetProtoParameters(proto_id); 541 auto count = proto_params != nullptr ? proto_params->Size() : 0u; 542 auto param_len = params.Get() != nullptr ? params->GetLength() : 0u; 543 if (param_len != count) { 544 return false; 545 } 546 auto* cl = Runtime::Current()->GetClassLinker(); 547 for (size_t i = 0; i < count; ++i) { 548 auto type_idx = proto_params->GetTypeItem(i).type_idx_; 549 auto* type = cl->ResolveType(type_idx, this); 550 if (type == nullptr) { 551 Thread::Current()->AssertPendingException(); 552 return false; 553 } 554 if (type != params->GetWithoutChecks(i)) { 555 return false; 556 } 557 } 558 return true; 559} 560 561const uint8_t* ArtMethod::GetQuickenedInfo() { 562 bool found = false; 563 OatFile::OatMethod oat_method = 564 Runtime::Current()->GetClassLinker()->FindOatMethodFor(this, &found); 565 if (!found || (oat_method.GetQuickCode() != nullptr)) { 566 return nullptr; 567 } 568 return oat_method.GetVmapTable(); 569} 570 571} // namespace art 572