interpreter.cc revision ef41db7a3f322a1feb305fdb457410c4cea94d00
1/* 2 * Copyright (C) 2012 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "interpreter.h" 18 19#include <limits> 20 21#include "common_throws.h" 22#include "interpreter_common.h" 23#include "interpreter_mterp_impl.h" 24#include "interpreter_switch_impl.h" 25#include "jvalue-inl.h" 26#include "mirror/string-inl.h" 27#include "scoped_thread_state_change-inl.h" 28#include "ScopedLocalRef.h" 29#include "stack.h" 30#include "unstarted_runtime.h" 31#include "mterp/mterp.h" 32#include "jit/jit.h" 33#include "jit/jit_code_cache.h" 34 35namespace art { 36namespace interpreter { 37 38ALWAYS_INLINE static ObjPtr<mirror::Object> ObjArg(uint32_t arg) 39 REQUIRES_SHARED(Locks::mutator_lock_) { 40 return ObjPtr<mirror::Object>(reinterpret_cast<mirror::Object*>(arg)); 41} 42 43static void InterpreterJni(Thread* self, 44 ArtMethod* method, 45 const StringPiece& shorty, 46 ObjPtr<mirror::Object> receiver, 47 uint32_t* args, 48 JValue* result) 49 REQUIRES_SHARED(Locks::mutator_lock_) { 50 // TODO: The following enters JNI code using a typedef-ed function rather than the JNI compiler, 51 // it should be removed and JNI compiled stubs used instead. 52 ScopedObjectAccessUnchecked soa(self); 53 if (method->IsStatic()) { 54 if (shorty == "L") { 55 typedef jobject (fntype)(JNIEnv*, jclass); 56 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni()); 57 ScopedLocalRef<jclass> klass(soa.Env(), 58 soa.AddLocalReference<jclass>(method->GetDeclaringClass())); 59 jobject jresult; 60 { 61 ScopedThreadStateChange tsc(self, kNative); 62 jresult = fn(soa.Env(), klass.get()); 63 } 64 result->SetL(soa.Decode<mirror::Object>(jresult)); 65 } else if (shorty == "V") { 66 typedef void (fntype)(JNIEnv*, jclass); 67 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni()); 68 ScopedLocalRef<jclass> klass(soa.Env(), 69 soa.AddLocalReference<jclass>(method->GetDeclaringClass())); 70 ScopedThreadStateChange tsc(self, kNative); 71 fn(soa.Env(), klass.get()); 72 } else if (shorty == "Z") { 73 typedef jboolean (fntype)(JNIEnv*, jclass); 74 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni()); 75 ScopedLocalRef<jclass> klass(soa.Env(), 76 soa.AddLocalReference<jclass>(method->GetDeclaringClass())); 77 ScopedThreadStateChange tsc(self, kNative); 78 result->SetZ(fn(soa.Env(), klass.get())); 79 } else if (shorty == "BI") { 80 typedef jbyte (fntype)(JNIEnv*, jclass, jint); 81 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni()); 82 ScopedLocalRef<jclass> klass(soa.Env(), 83 soa.AddLocalReference<jclass>(method->GetDeclaringClass())); 84 ScopedThreadStateChange tsc(self, kNative); 85 result->SetB(fn(soa.Env(), klass.get(), args[0])); 86 } else if (shorty == "II") { 87 typedef jint (fntype)(JNIEnv*, jclass, jint); 88 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni()); 89 ScopedLocalRef<jclass> klass(soa.Env(), 90 soa.AddLocalReference<jclass>(method->GetDeclaringClass())); 91 ScopedThreadStateChange tsc(self, kNative); 92 result->SetI(fn(soa.Env(), klass.get(), args[0])); 93 } else if (shorty == "LL") { 94 typedef jobject (fntype)(JNIEnv*, jclass, jobject); 95 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni()); 96 ScopedLocalRef<jclass> klass(soa.Env(), 97 soa.AddLocalReference<jclass>(method->GetDeclaringClass())); 98 ScopedLocalRef<jobject> arg0(soa.Env(), 99 soa.AddLocalReference<jobject>(ObjArg(args[0]))); 100 jobject jresult; 101 { 102 ScopedThreadStateChange tsc(self, kNative); 103 jresult = fn(soa.Env(), klass.get(), arg0.get()); 104 } 105 result->SetL(soa.Decode<mirror::Object>(jresult)); 106 } else if (shorty == "IIZ") { 107 typedef jint (fntype)(JNIEnv*, jclass, jint, jboolean); 108 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni()); 109 ScopedLocalRef<jclass> klass(soa.Env(), 110 soa.AddLocalReference<jclass>(method->GetDeclaringClass())); 111 ScopedThreadStateChange tsc(self, kNative); 112 result->SetI(fn(soa.Env(), klass.get(), args[0], args[1])); 113 } else if (shorty == "ILI") { 114 typedef jint (fntype)(JNIEnv*, jclass, jobject, jint); 115 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>( 116 method->GetEntryPointFromJni())); 117 ScopedLocalRef<jclass> klass(soa.Env(), 118 soa.AddLocalReference<jclass>(method->GetDeclaringClass())); 119 ScopedLocalRef<jobject> arg0(soa.Env(), 120 soa.AddLocalReference<jobject>(ObjArg(args[0]))); 121 ScopedThreadStateChange tsc(self, kNative); 122 result->SetI(fn(soa.Env(), klass.get(), arg0.get(), args[1])); 123 } else if (shorty == "SIZ") { 124 typedef jshort (fntype)(JNIEnv*, jclass, jint, jboolean); 125 fntype* const fn = 126 reinterpret_cast<fntype*>(const_cast<void*>(method->GetEntryPointFromJni())); 127 ScopedLocalRef<jclass> klass(soa.Env(), 128 soa.AddLocalReference<jclass>(method->GetDeclaringClass())); 129 ScopedThreadStateChange tsc(self, kNative); 130 result->SetS(fn(soa.Env(), klass.get(), args[0], args[1])); 131 } else if (shorty == "VIZ") { 132 typedef void (fntype)(JNIEnv*, jclass, jint, jboolean); 133 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni()); 134 ScopedLocalRef<jclass> klass(soa.Env(), 135 soa.AddLocalReference<jclass>(method->GetDeclaringClass())); 136 ScopedThreadStateChange tsc(self, kNative); 137 fn(soa.Env(), klass.get(), args[0], args[1]); 138 } else if (shorty == "ZLL") { 139 typedef jboolean (fntype)(JNIEnv*, jclass, jobject, jobject); 140 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni()); 141 ScopedLocalRef<jclass> klass(soa.Env(), 142 soa.AddLocalReference<jclass>(method->GetDeclaringClass())); 143 ScopedLocalRef<jobject> arg0(soa.Env(), 144 soa.AddLocalReference<jobject>(ObjArg(args[0]))); 145 ScopedLocalRef<jobject> arg1(soa.Env(), 146 soa.AddLocalReference<jobject>(ObjArg(args[1]))); 147 ScopedThreadStateChange tsc(self, kNative); 148 result->SetZ(fn(soa.Env(), klass.get(), arg0.get(), arg1.get())); 149 } else if (shorty == "ZILL") { 150 typedef jboolean (fntype)(JNIEnv*, jclass, jint, jobject, jobject); 151 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni()); 152 ScopedLocalRef<jclass> klass(soa.Env(), 153 soa.AddLocalReference<jclass>(method->GetDeclaringClass())); 154 ScopedLocalRef<jobject> arg1(soa.Env(), 155 soa.AddLocalReference<jobject>(ObjArg(args[1]))); 156 ScopedLocalRef<jobject> arg2(soa.Env(), 157 soa.AddLocalReference<jobject>(ObjArg(args[2]))); 158 ScopedThreadStateChange tsc(self, kNative); 159 result->SetZ(fn(soa.Env(), klass.get(), args[0], arg1.get(), arg2.get())); 160 } else if (shorty == "VILII") { 161 typedef void (fntype)(JNIEnv*, jclass, jint, jobject, jint, jint); 162 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni()); 163 ScopedLocalRef<jclass> klass(soa.Env(), 164 soa.AddLocalReference<jclass>(method->GetDeclaringClass())); 165 ScopedLocalRef<jobject> arg1(soa.Env(), 166 soa.AddLocalReference<jobject>(ObjArg(args[1]))); 167 ScopedThreadStateChange tsc(self, kNative); 168 fn(soa.Env(), klass.get(), args[0], arg1.get(), args[2], args[3]); 169 } else if (shorty == "VLILII") { 170 typedef void (fntype)(JNIEnv*, jclass, jobject, jint, jobject, jint, jint); 171 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni()); 172 ScopedLocalRef<jclass> klass(soa.Env(), 173 soa.AddLocalReference<jclass>(method->GetDeclaringClass())); 174 ScopedLocalRef<jobject> arg0(soa.Env(), 175 soa.AddLocalReference<jobject>(ObjArg(args[0]))); 176 ScopedLocalRef<jobject> arg2(soa.Env(), 177 soa.AddLocalReference<jobject>(ObjArg(args[2]))); 178 ScopedThreadStateChange tsc(self, kNative); 179 fn(soa.Env(), klass.get(), arg0.get(), args[1], arg2.get(), args[3], args[4]); 180 } else { 181 LOG(FATAL) << "Do something with static native method: " << method->PrettyMethod() 182 << " shorty: " << shorty; 183 } 184 } else { 185 if (shorty == "L") { 186 typedef jobject (fntype)(JNIEnv*, jobject); 187 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni()); 188 ScopedLocalRef<jobject> rcvr(soa.Env(), 189 soa.AddLocalReference<jobject>(receiver)); 190 jobject jresult; 191 { 192 ScopedThreadStateChange tsc(self, kNative); 193 jresult = fn(soa.Env(), rcvr.get()); 194 } 195 result->SetL(soa.Decode<mirror::Object>(jresult)); 196 } else if (shorty == "V") { 197 typedef void (fntype)(JNIEnv*, jobject); 198 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni()); 199 ScopedLocalRef<jobject> rcvr(soa.Env(), 200 soa.AddLocalReference<jobject>(receiver)); 201 ScopedThreadStateChange tsc(self, kNative); 202 fn(soa.Env(), rcvr.get()); 203 } else if (shorty == "LL") { 204 typedef jobject (fntype)(JNIEnv*, jobject, jobject); 205 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni()); 206 ScopedLocalRef<jobject> rcvr(soa.Env(), 207 soa.AddLocalReference<jobject>(receiver)); 208 ScopedLocalRef<jobject> arg0(soa.Env(), 209 soa.AddLocalReference<jobject>(ObjArg(args[0]))); 210 jobject jresult; 211 { 212 ScopedThreadStateChange tsc(self, kNative); 213 jresult = fn(soa.Env(), rcvr.get(), arg0.get()); 214 } 215 result->SetL(soa.Decode<mirror::Object>(jresult)); 216 ScopedThreadStateChange tsc(self, kNative); 217 } else if (shorty == "III") { 218 typedef jint (fntype)(JNIEnv*, jobject, jint, jint); 219 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni()); 220 ScopedLocalRef<jobject> rcvr(soa.Env(), 221 soa.AddLocalReference<jobject>(receiver)); 222 ScopedThreadStateChange tsc(self, kNative); 223 result->SetI(fn(soa.Env(), rcvr.get(), args[0], args[1])); 224 } else { 225 LOG(FATAL) << "Do something with native method: " << method->PrettyMethod() 226 << " shorty: " << shorty; 227 } 228 } 229} 230 231enum InterpreterImplKind { 232 kSwitchImplKind, // Switch-based interpreter implementation. 233 kMterpImplKind // Assembly interpreter 234}; 235static std::ostream& operator<<(std::ostream& os, const InterpreterImplKind& rhs) { 236 os << ((rhs == kSwitchImplKind) 237 ? "Switch-based interpreter" 238 : "Asm interpreter"); 239 return os; 240} 241 242static constexpr InterpreterImplKind kInterpreterImplKind = kMterpImplKind; 243 244static inline JValue Execute( 245 Thread* self, 246 const DexFile::CodeItem* code_item, 247 ShadowFrame& shadow_frame, 248 JValue result_register, 249 bool stay_in_interpreter = false) REQUIRES_SHARED(Locks::mutator_lock_) { 250 DCHECK(!shadow_frame.GetMethod()->IsAbstract()); 251 DCHECK(!shadow_frame.GetMethod()->IsNative()); 252 if (LIKELY(shadow_frame.GetDexPC() == 0)) { // Entering the method, but not via deoptimization. 253 if (kIsDebugBuild) { 254 self->AssertNoPendingException(); 255 } 256 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation(); 257 ArtMethod *method = shadow_frame.GetMethod(); 258 259 if (UNLIKELY(instrumentation->HasMethodEntryListeners())) { 260 instrumentation->MethodEnterEvent(self, shadow_frame.GetThisObject(code_item->ins_size_), 261 method, 0); 262 } 263 264 if (!stay_in_interpreter) { 265 jit::Jit* jit = Runtime::Current()->GetJit(); 266 if (jit != nullptr) { 267 jit->MethodEntered(self, shadow_frame.GetMethod()); 268 if (jit->CanInvokeCompiledCode(method)) { 269 JValue result; 270 271 // Pop the shadow frame before calling into compiled code. 272 self->PopShadowFrame(); 273 ArtInterpreterToCompiledCodeBridge(self, nullptr, code_item, &shadow_frame, &result); 274 // Push the shadow frame back as the caller will expect it. 275 self->PushShadowFrame(&shadow_frame); 276 277 return result; 278 } 279 } 280 } 281 } 282 283 shadow_frame.GetMethod()->GetDeclaringClass()->AssertInitializedOrInitializingInThread(self); 284 285 // Lock counting is a special version of accessibility checks, and for simplicity and 286 // reduction of template parameters, we gate it behind access-checks mode. 287 ArtMethod* method = shadow_frame.GetMethod(); 288 DCHECK(!method->SkipAccessChecks() || !method->MustCountLocks()); 289 290 bool transaction_active = Runtime::Current()->IsActiveTransaction(); 291 if (LIKELY(method->SkipAccessChecks())) { 292 // Enter the "without access check" interpreter. 293 if (kInterpreterImplKind == kMterpImplKind) { 294 if (transaction_active) { 295 // No Mterp variant - just use the switch interpreter. 296 return ExecuteSwitchImpl<false, true>(self, code_item, shadow_frame, result_register, 297 false); 298 } else if (UNLIKELY(!Runtime::Current()->IsStarted())) { 299 return ExecuteSwitchImpl<false, false>(self, code_item, shadow_frame, result_register, 300 false); 301 } else { 302 while (true) { 303 // Mterp does not support all instrumentation/debugging. 304 if (MterpShouldSwitchInterpreters() != 0) { 305 return ExecuteSwitchImpl<false, false>(self, code_item, shadow_frame, result_register, 306 false); 307 } 308 bool returned = ExecuteMterpImpl(self, code_item, &shadow_frame, &result_register); 309 if (returned) { 310 return result_register; 311 } else { 312 // Mterp didn't like that instruction. Single-step it with the reference interpreter. 313 result_register = ExecuteSwitchImpl<false, false>(self, code_item, shadow_frame, 314 result_register, true); 315 if (shadow_frame.GetDexPC() == DexFile::kDexNoIndex) { 316 // Single-stepped a return or an exception not handled locally. Return to caller. 317 return result_register; 318 } 319 } 320 } 321 } 322 } else { 323 DCHECK_EQ(kInterpreterImplKind, kSwitchImplKind); 324 if (transaction_active) { 325 return ExecuteSwitchImpl<false, true>(self, code_item, shadow_frame, result_register, 326 false); 327 } else { 328 return ExecuteSwitchImpl<false, false>(self, code_item, shadow_frame, result_register, 329 false); 330 } 331 } 332 } else { 333 // Enter the "with access check" interpreter. 334 if (kInterpreterImplKind == kMterpImplKind) { 335 // No access check variants for Mterp. Just use the switch version. 336 if (transaction_active) { 337 return ExecuteSwitchImpl<true, true>(self, code_item, shadow_frame, result_register, 338 false); 339 } else { 340 return ExecuteSwitchImpl<true, false>(self, code_item, shadow_frame, result_register, 341 false); 342 } 343 } else { 344 DCHECK_EQ(kInterpreterImplKind, kSwitchImplKind); 345 if (transaction_active) { 346 return ExecuteSwitchImpl<true, true>(self, code_item, shadow_frame, result_register, 347 false); 348 } else { 349 return ExecuteSwitchImpl<true, false>(self, code_item, shadow_frame, result_register, 350 false); 351 } 352 } 353 } 354} 355 356void EnterInterpreterFromInvoke(Thread* self, 357 ArtMethod* method, 358 ObjPtr<mirror::Object> receiver, 359 uint32_t* args, 360 JValue* result, 361 bool stay_in_interpreter) { 362 DCHECK_EQ(self, Thread::Current()); 363 bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks(); 364 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) { 365 ThrowStackOverflowError(self); 366 return; 367 } 368 369 const char* old_cause = self->StartAssertNoThreadSuspension("EnterInterpreterFromInvoke"); 370 const DexFile::CodeItem* code_item = method->GetCodeItem(); 371 uint16_t num_regs; 372 uint16_t num_ins; 373 if (code_item != nullptr) { 374 num_regs = code_item->registers_size_; 375 num_ins = code_item->ins_size_; 376 } else if (!method->IsInvokable()) { 377 self->EndAssertNoThreadSuspension(old_cause); 378 method->ThrowInvocationTimeError(); 379 return; 380 } else { 381 DCHECK(method->IsNative()); 382 num_regs = num_ins = ArtMethod::NumArgRegisters(method->GetShorty()); 383 if (!method->IsStatic()) { 384 num_regs++; 385 num_ins++; 386 } 387 } 388 // Set up shadow frame with matching number of reference slots to vregs. 389 ShadowFrame* last_shadow_frame = self->GetManagedStack()->GetTopShadowFrame(); 390 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr = 391 CREATE_SHADOW_FRAME(num_regs, last_shadow_frame, method, /* dex pc */ 0); 392 ShadowFrame* shadow_frame = shadow_frame_unique_ptr.get(); 393 self->PushShadowFrame(shadow_frame); 394 395 size_t cur_reg = num_regs - num_ins; 396 if (!method->IsStatic()) { 397 CHECK(receiver != nullptr); 398 shadow_frame->SetVRegReference(cur_reg, receiver.Ptr()); 399 ++cur_reg; 400 } 401 uint32_t shorty_len = 0; 402 const char* shorty = method->GetShorty(&shorty_len); 403 for (size_t shorty_pos = 0, arg_pos = 0; cur_reg < num_regs; ++shorty_pos, ++arg_pos, cur_reg++) { 404 DCHECK_LT(shorty_pos + 1, shorty_len); 405 switch (shorty[shorty_pos + 1]) { 406 case 'L': { 407 ObjPtr<mirror::Object> o = 408 reinterpret_cast<StackReference<mirror::Object>*>(&args[arg_pos])->AsMirrorPtr(); 409 shadow_frame->SetVRegReference(cur_reg, o.Ptr()); 410 break; 411 } 412 case 'J': case 'D': { 413 uint64_t wide_value = (static_cast<uint64_t>(args[arg_pos + 1]) << 32) | args[arg_pos]; 414 shadow_frame->SetVRegLong(cur_reg, wide_value); 415 cur_reg++; 416 arg_pos++; 417 break; 418 } 419 default: 420 shadow_frame->SetVReg(cur_reg, args[arg_pos]); 421 break; 422 } 423 } 424 self->EndAssertNoThreadSuspension(old_cause); 425 // Do this after populating the shadow frame in case EnsureInitialized causes a GC. 426 if (method->IsStatic() && UNLIKELY(!method->GetDeclaringClass()->IsInitialized())) { 427 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 428 StackHandleScope<1> hs(self); 429 Handle<mirror::Class> h_class(hs.NewHandle(method->GetDeclaringClass())); 430 if (UNLIKELY(!class_linker->EnsureInitialized(self, h_class, true, true))) { 431 CHECK(self->IsExceptionPending()); 432 self->PopShadowFrame(); 433 return; 434 } 435 } 436 if (LIKELY(!method->IsNative())) { 437 JValue r = Execute(self, code_item, *shadow_frame, JValue(), stay_in_interpreter); 438 if (result != nullptr) { 439 *result = r; 440 } 441 } else { 442 // We don't expect to be asked to interpret native code (which is entered via a JNI compiler 443 // generated stub) except during testing and image writing. 444 // Update args to be the args in the shadow frame since the input ones could hold stale 445 // references pointers due to moving GC. 446 args = shadow_frame->GetVRegArgs(method->IsStatic() ? 0 : 1); 447 if (!Runtime::Current()->IsStarted()) { 448 UnstartedRuntime::Jni(self, method, receiver.Ptr(), args, result); 449 } else { 450 InterpreterJni(self, method, shorty, receiver, args, result); 451 } 452 } 453 self->PopShadowFrame(); 454} 455 456static bool IsStringInit(const Instruction* instr, ArtMethod* caller) 457 REQUIRES_SHARED(Locks::mutator_lock_) { 458 if (instr->Opcode() == Instruction::INVOKE_DIRECT || 459 instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE) { 460 // Instead of calling ResolveMethod() which has suspend point and can trigger 461 // GC, look up the callee method symbolically. 462 uint16_t callee_method_idx = (instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE) ? 463 instr->VRegB_3rc() : instr->VRegB_35c(); 464 const DexFile* dex_file = caller->GetDexFile(); 465 const DexFile::MethodId& method_id = dex_file->GetMethodId(callee_method_idx); 466 const char* class_name = dex_file->StringByTypeIdx(method_id.class_idx_); 467 const char* method_name = dex_file->GetMethodName(method_id); 468 // Compare method's class name and method name against string init. 469 // It's ok since it's not allowed to create your own java/lang/String. 470 // TODO: verify that assumption. 471 if ((strcmp(class_name, "Ljava/lang/String;") == 0) && 472 (strcmp(method_name, "<init>") == 0)) { 473 return true; 474 } 475 } 476 return false; 477} 478 479static int16_t GetReceiverRegisterForStringInit(const Instruction* instr) { 480 DCHECK(instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE || 481 instr->Opcode() == Instruction::INVOKE_DIRECT); 482 return (instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE) ? 483 instr->VRegC_3rc() : instr->VRegC_35c(); 484} 485 486void EnterInterpreterFromDeoptimize(Thread* self, 487 ShadowFrame* shadow_frame, 488 bool from_code, 489 JValue* ret_val) 490 REQUIRES_SHARED(Locks::mutator_lock_) { 491 JValue value; 492 // Set value to last known result in case the shadow frame chain is empty. 493 value.SetJ(ret_val->GetJ()); 494 // Are we executing the first shadow frame? 495 bool first = true; 496 while (shadow_frame != nullptr) { 497 // We do not want to recover lock state for lock counting when deoptimizing. Currently, 498 // the compiler should not have compiled a method that failed structured-locking checks. 499 DCHECK(!shadow_frame->GetMethod()->MustCountLocks()); 500 501 self->SetTopOfShadowStack(shadow_frame); 502 const DexFile::CodeItem* code_item = shadow_frame->GetMethod()->GetCodeItem(); 503 const uint32_t dex_pc = shadow_frame->GetDexPC(); 504 uint32_t new_dex_pc = dex_pc; 505 if (UNLIKELY(self->IsExceptionPending())) { 506 // If we deoptimize from the QuickExceptionHandler, we already reported the exception to 507 // the instrumentation. To prevent from reporting it a second time, we simply pass a 508 // null Instrumentation*. 509 const instrumentation::Instrumentation* const instrumentation = 510 first ? nullptr : Runtime::Current()->GetInstrumentation(); 511 uint32_t found_dex_pc = FindNextInstructionFollowingException(self, *shadow_frame, dex_pc, 512 instrumentation); 513 new_dex_pc = found_dex_pc; // the dex pc of a matching catch handler 514 // or DexFile::kDexNoIndex if there is none. 515 } else if (!from_code) { 516 // For the debugger and full deoptimization stack, we must go past the invoke 517 // instruction, as it already executed. 518 // TODO: should be tested more once b/17586779 is fixed. 519 const Instruction* instr = Instruction::At(&code_item->insns_[dex_pc]); 520 if (instr->IsInvoke()) { 521 if (IsStringInit(instr, shadow_frame->GetMethod())) { 522 uint16_t this_obj_vreg = GetReceiverRegisterForStringInit(instr); 523 // Move the StringFactory.newStringFromChars() result into the register representing 524 // "this object" when invoking the string constructor in the original dex instruction. 525 // Also move the result into all aliases. 526 DCHECK(value.GetL()->IsString()); 527 SetStringInitValueToAllAliases(shadow_frame, this_obj_vreg, value); 528 // Calling string constructor in the original dex code doesn't generate a result value. 529 value.SetJ(0); 530 } 531 new_dex_pc = dex_pc + instr->SizeInCodeUnits(); 532 } else if (instr->Opcode() == Instruction::NEW_INSTANCE) { 533 // It's possible to deoptimize at a NEW_INSTANCE dex instruciton that's for a 534 // java string, which is turned into a call into StringFactory.newEmptyString(); 535 // Move the StringFactory.newEmptyString() result into the destination register. 536 DCHECK(value.GetL()->IsString()); 537 shadow_frame->SetVRegReference(instr->VRegA_21c(), value.GetL()); 538 // new-instance doesn't generate a result value. 539 value.SetJ(0); 540 // Skip the dex instruction since we essentially come back from an invocation. 541 new_dex_pc = dex_pc + instr->SizeInCodeUnits(); 542 if (kIsDebugBuild) { 543 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 544 // This is a suspend point. But it's ok since value has been set into shadow_frame. 545 ObjPtr<mirror::Class> klass = class_linker->ResolveType( 546 instr->VRegB_21c(), shadow_frame->GetMethod()); 547 DCHECK(klass->IsStringClass()); 548 } 549 } else { 550 CHECK(false) << "Unexpected instruction opcode " << instr->Opcode() 551 << " at dex_pc " << dex_pc 552 << " of method: " << ArtMethod::PrettyMethod(shadow_frame->GetMethod(), false); 553 } 554 } else { 555 // Nothing to do, the dex_pc is the one at which the code requested 556 // the deoptimization. 557 } 558 if (new_dex_pc != DexFile::kDexNoIndex) { 559 shadow_frame->SetDexPC(new_dex_pc); 560 value = Execute(self, code_item, *shadow_frame, value); 561 } 562 ShadowFrame* old_frame = shadow_frame; 563 shadow_frame = shadow_frame->GetLink(); 564 ShadowFrame::DeleteDeoptimizedFrame(old_frame); 565 // Following deoptimizations of shadow frames must pass the invoke instruction. 566 from_code = false; 567 first = false; 568 } 569 ret_val->SetJ(value.GetJ()); 570} 571 572JValue EnterInterpreterFromEntryPoint(Thread* self, const DexFile::CodeItem* code_item, 573 ShadowFrame* shadow_frame) { 574 DCHECK_EQ(self, Thread::Current()); 575 bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks(); 576 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) { 577 ThrowStackOverflowError(self); 578 return JValue(); 579 } 580 581 jit::Jit* jit = Runtime::Current()->GetJit(); 582 if (jit != nullptr) { 583 jit->NotifyCompiledCodeToInterpreterTransition(self, shadow_frame->GetMethod()); 584 } 585 return Execute(self, code_item, *shadow_frame, JValue()); 586} 587 588void ArtInterpreterToInterpreterBridge(Thread* self, 589 const DexFile::CodeItem* code_item, 590 ShadowFrame* shadow_frame, 591 JValue* result) { 592 bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks(); 593 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) { 594 ThrowStackOverflowError(self); 595 return; 596 } 597 598 self->PushShadowFrame(shadow_frame); 599 ArtMethod* method = shadow_frame->GetMethod(); 600 // Ensure static methods are initialized. 601 const bool is_static = method->IsStatic(); 602 if (is_static) { 603 ObjPtr<mirror::Class> declaring_class = method->GetDeclaringClass(); 604 if (UNLIKELY(!declaring_class->IsInitialized())) { 605 StackHandleScope<1> hs(self); 606 HandleWrapperObjPtr<mirror::Class> h_declaring_class(hs.NewHandleWrapper(&declaring_class)); 607 if (UNLIKELY(!Runtime::Current()->GetClassLinker()->EnsureInitialized( 608 self, h_declaring_class, true, true))) { 609 DCHECK(self->IsExceptionPending()); 610 self->PopShadowFrame(); 611 return; 612 } 613 CHECK(h_declaring_class->IsInitializing()); 614 } 615 } 616 617 if (LIKELY(!shadow_frame->GetMethod()->IsNative())) { 618 result->SetJ(Execute(self, code_item, *shadow_frame, JValue()).GetJ()); 619 } else { 620 // We don't expect to be asked to interpret native code (which is entered via a JNI compiler 621 // generated stub) except during testing and image writing. 622 CHECK(!Runtime::Current()->IsStarted()); 623 ObjPtr<mirror::Object> receiver = is_static ? nullptr : shadow_frame->GetVRegReference(0); 624 uint32_t* args = shadow_frame->GetVRegArgs(is_static ? 0 : 1); 625 UnstartedRuntime::Jni(self, shadow_frame->GetMethod(), receiver.Ptr(), args, result); 626 } 627 628 self->PopShadowFrame(); 629} 630 631void CheckInterpreterAsmConstants() { 632 CheckMterpAsmConstants(); 633} 634 635void InitInterpreterTls(Thread* self) { 636 InitMterpTls(self); 637} 638 639} // namespace interpreter 640} // namespace art 641