interpreter_common.cc revision a5b09a67034e57a6e10231dd4bd92f4cb50b824c
1/* 2 * Copyright (C) 2012 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "interpreter_common.h" 18 19#include <cmath> 20 21#include "base/enums.h" 22#include "debugger.h" 23#include "entrypoints/runtime_asm_entrypoints.h" 24#include "jit/jit.h" 25#include "jvalue.h" 26#include "method_handles.h" 27#include "method_handles-inl.h" 28#include "mirror/array-inl.h" 29#include "mirror/class.h" 30#include "mirror/emulated_stack_frame.h" 31#include "mirror/method_handle_impl.h" 32#include "reflection.h" 33#include "reflection-inl.h" 34#include "stack.h" 35#include "unstarted_runtime.h" 36#include "verifier/method_verifier.h" 37#include "well_known_classes.h" 38 39namespace art { 40namespace interpreter { 41 42void ThrowNullPointerExceptionFromInterpreter() { 43 ThrowNullPointerExceptionFromDexPC(); 44} 45 46template<Primitive::Type field_type> 47static ALWAYS_INLINE void DoFieldGetCommon(Thread* self, 48 const ShadowFrame& shadow_frame, 49 ObjPtr<mirror::Object>& obj, 50 ArtField* field, 51 JValue* result) 52 REQUIRES_SHARED(Locks::mutator_lock_) { 53 field->GetDeclaringClass()->AssertInitializedOrInitializingInThread(self); 54 55 // Report this field access to instrumentation if needed. 56 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation(); 57 if (UNLIKELY(instrumentation->HasFieldReadListeners())) { 58 StackHandleScope<1> hs(self); 59 // Wrap in handle wrapper in case the listener does thread suspension. 60 HandleWrapperObjPtr<mirror::Object> h(hs.NewHandleWrapper(&obj)); 61 ObjPtr<mirror::Object> this_object; 62 if (!field->IsStatic()) { 63 this_object = obj; 64 } 65 instrumentation->FieldReadEvent(self, 66 this_object.Ptr(), 67 shadow_frame.GetMethod(), 68 shadow_frame.GetDexPC(), 69 field); 70 } 71 72 switch (field_type) { 73 case Primitive::kPrimBoolean: 74 result->SetZ(field->GetBoolean(obj)); 75 break; 76 case Primitive::kPrimByte: 77 result->SetB(field->GetByte(obj)); 78 break; 79 case Primitive::kPrimChar: 80 result->SetC(field->GetChar(obj)); 81 break; 82 case Primitive::kPrimShort: 83 result->SetS(field->GetShort(obj)); 84 break; 85 case Primitive::kPrimInt: 86 result->SetI(field->GetInt(obj)); 87 break; 88 case Primitive::kPrimLong: 89 result->SetJ(field->GetLong(obj)); 90 break; 91 case Primitive::kPrimNot: 92 result->SetL(field->GetObject(obj)); 93 break; 94 default: 95 LOG(FATAL) << "Unreachable: " << field_type; 96 UNREACHABLE(); 97 } 98} 99 100template<FindFieldType find_type, Primitive::Type field_type, bool do_access_check> 101bool DoFieldGet(Thread* self, ShadowFrame& shadow_frame, const Instruction* inst, 102 uint16_t inst_data) { 103 const bool is_static = (find_type == StaticObjectRead) || (find_type == StaticPrimitiveRead); 104 const uint32_t field_idx = is_static ? inst->VRegB_21c() : inst->VRegC_22c(); 105 ArtField* f = 106 FindFieldFromCode<find_type, do_access_check>(field_idx, shadow_frame.GetMethod(), self, 107 Primitive::ComponentSize(field_type)); 108 if (UNLIKELY(f == nullptr)) { 109 CHECK(self->IsExceptionPending()); 110 return false; 111 } 112 ObjPtr<mirror::Object> obj; 113 if (is_static) { 114 obj = f->GetDeclaringClass(); 115 } else { 116 obj = shadow_frame.GetVRegReference(inst->VRegB_22c(inst_data)); 117 if (UNLIKELY(obj == nullptr)) { 118 ThrowNullPointerExceptionForFieldAccess(f, true); 119 return false; 120 } 121 } 122 123 JValue result; 124 DoFieldGetCommon<field_type>(self, shadow_frame, obj, f, &result); 125 uint32_t vregA = is_static ? inst->VRegA_21c(inst_data) : inst->VRegA_22c(inst_data); 126 switch (field_type) { 127 case Primitive::kPrimBoolean: 128 shadow_frame.SetVReg(vregA, result.GetZ()); 129 break; 130 case Primitive::kPrimByte: 131 shadow_frame.SetVReg(vregA, result.GetB()); 132 break; 133 case Primitive::kPrimChar: 134 shadow_frame.SetVReg(vregA, result.GetC()); 135 break; 136 case Primitive::kPrimShort: 137 shadow_frame.SetVReg(vregA, result.GetS()); 138 break; 139 case Primitive::kPrimInt: 140 shadow_frame.SetVReg(vregA, result.GetI()); 141 break; 142 case Primitive::kPrimLong: 143 shadow_frame.SetVRegLong(vregA, result.GetJ()); 144 break; 145 case Primitive::kPrimNot: 146 shadow_frame.SetVRegReference(vregA, result.GetL()); 147 break; 148 default: 149 LOG(FATAL) << "Unreachable: " << field_type; 150 UNREACHABLE(); 151 } 152 return true; 153} 154 155// Explicitly instantiate all DoFieldGet functions. 156#define EXPLICIT_DO_FIELD_GET_TEMPLATE_DECL(_find_type, _field_type, _do_check) \ 157 template bool DoFieldGet<_find_type, _field_type, _do_check>(Thread* self, \ 158 ShadowFrame& shadow_frame, \ 159 const Instruction* inst, \ 160 uint16_t inst_data) 161 162#define EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(_find_type, _field_type) \ 163 EXPLICIT_DO_FIELD_GET_TEMPLATE_DECL(_find_type, _field_type, false); \ 164 EXPLICIT_DO_FIELD_GET_TEMPLATE_DECL(_find_type, _field_type, true); 165 166// iget-XXX 167EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(InstancePrimitiveRead, Primitive::kPrimBoolean) 168EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(InstancePrimitiveRead, Primitive::kPrimByte) 169EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(InstancePrimitiveRead, Primitive::kPrimChar) 170EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(InstancePrimitiveRead, Primitive::kPrimShort) 171EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(InstancePrimitiveRead, Primitive::kPrimInt) 172EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(InstancePrimitiveRead, Primitive::kPrimLong) 173EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(InstanceObjectRead, Primitive::kPrimNot) 174 175// sget-XXX 176EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(StaticPrimitiveRead, Primitive::kPrimBoolean) 177EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(StaticPrimitiveRead, Primitive::kPrimByte) 178EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(StaticPrimitiveRead, Primitive::kPrimChar) 179EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(StaticPrimitiveRead, Primitive::kPrimShort) 180EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(StaticPrimitiveRead, Primitive::kPrimInt) 181EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(StaticPrimitiveRead, Primitive::kPrimLong) 182EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL(StaticObjectRead, Primitive::kPrimNot) 183 184#undef EXPLICIT_DO_FIELD_GET_ALL_TEMPLATE_DECL 185#undef EXPLICIT_DO_FIELD_GET_TEMPLATE_DECL 186 187// Helper for getters in invoke-polymorphic. 188inline static void DoFieldGetForInvokePolymorphic(Thread* self, 189 const ShadowFrame& shadow_frame, 190 ObjPtr<mirror::Object>& obj, 191 ArtField* field, 192 Primitive::Type field_type, 193 JValue* result) 194 REQUIRES_SHARED(Locks::mutator_lock_) { 195 switch (field_type) { 196 case Primitive::kPrimBoolean: 197 DoFieldGetCommon<Primitive::kPrimBoolean>(self, shadow_frame, obj, field, result); 198 break; 199 case Primitive::kPrimByte: 200 DoFieldGetCommon<Primitive::kPrimByte>(self, shadow_frame, obj, field, result); 201 break; 202 case Primitive::kPrimChar: 203 DoFieldGetCommon<Primitive::kPrimChar>(self, shadow_frame, obj, field, result); 204 break; 205 case Primitive::kPrimShort: 206 DoFieldGetCommon<Primitive::kPrimShort>(self, shadow_frame, obj, field, result); 207 break; 208 case Primitive::kPrimInt: 209 DoFieldGetCommon<Primitive::kPrimInt>(self, shadow_frame, obj, field, result); 210 break; 211 case Primitive::kPrimLong: 212 DoFieldGetCommon<Primitive::kPrimLong>(self, shadow_frame, obj, field, result); 213 break; 214 case Primitive::kPrimFloat: 215 DoFieldGetCommon<Primitive::kPrimInt>(self, shadow_frame, obj, field, result); 216 break; 217 case Primitive::kPrimDouble: 218 DoFieldGetCommon<Primitive::kPrimLong>(self, shadow_frame, obj, field, result); 219 break; 220 case Primitive::kPrimNot: 221 DoFieldGetCommon<Primitive::kPrimNot>(self, shadow_frame, obj, field, result); 222 break; 223 case Primitive::kPrimVoid: 224 LOG(FATAL) << "Unreachable: " << field_type; 225 UNREACHABLE(); 226 } 227} 228 229// Handles iget-quick, iget-wide-quick and iget-object-quick instructions. 230// Returns true on success, otherwise throws an exception and returns false. 231template<Primitive::Type field_type> 232bool DoIGetQuick(ShadowFrame& shadow_frame, const Instruction* inst, uint16_t inst_data) { 233 ObjPtr<mirror::Object> obj = shadow_frame.GetVRegReference(inst->VRegB_22c(inst_data)); 234 if (UNLIKELY(obj == nullptr)) { 235 // We lost the reference to the field index so we cannot get a more 236 // precised exception message. 237 ThrowNullPointerExceptionFromDexPC(); 238 return false; 239 } 240 MemberOffset field_offset(inst->VRegC_22c()); 241 // Report this field access to instrumentation if needed. Since we only have the offset of 242 // the field from the base of the object, we need to look for it first. 243 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation(); 244 if (UNLIKELY(instrumentation->HasFieldReadListeners())) { 245 ArtField* f = ArtField::FindInstanceFieldWithOffset(obj->GetClass(), 246 field_offset.Uint32Value()); 247 DCHECK(f != nullptr); 248 DCHECK(!f->IsStatic()); 249 StackHandleScope<1> hs(Thread::Current()); 250 // Save obj in case the instrumentation event has thread suspension. 251 HandleWrapperObjPtr<mirror::Object> h = hs.NewHandleWrapper(&obj); 252 instrumentation->FieldReadEvent(Thread::Current(), 253 obj.Ptr(), 254 shadow_frame.GetMethod(), 255 shadow_frame.GetDexPC(), 256 f); 257 } 258 // Note: iget-x-quick instructions are only for non-volatile fields. 259 const uint32_t vregA = inst->VRegA_22c(inst_data); 260 switch (field_type) { 261 case Primitive::kPrimInt: 262 shadow_frame.SetVReg(vregA, static_cast<int32_t>(obj->GetField32(field_offset))); 263 break; 264 case Primitive::kPrimBoolean: 265 shadow_frame.SetVReg(vregA, static_cast<int32_t>(obj->GetFieldBoolean(field_offset))); 266 break; 267 case Primitive::kPrimByte: 268 shadow_frame.SetVReg(vregA, static_cast<int32_t>(obj->GetFieldByte(field_offset))); 269 break; 270 case Primitive::kPrimChar: 271 shadow_frame.SetVReg(vregA, static_cast<int32_t>(obj->GetFieldChar(field_offset))); 272 break; 273 case Primitive::kPrimShort: 274 shadow_frame.SetVReg(vregA, static_cast<int32_t>(obj->GetFieldShort(field_offset))); 275 break; 276 case Primitive::kPrimLong: 277 shadow_frame.SetVRegLong(vregA, static_cast<int64_t>(obj->GetField64(field_offset))); 278 break; 279 case Primitive::kPrimNot: 280 shadow_frame.SetVRegReference(vregA, obj->GetFieldObject<mirror::Object>(field_offset)); 281 break; 282 default: 283 LOG(FATAL) << "Unreachable: " << field_type; 284 UNREACHABLE(); 285 } 286 return true; 287} 288 289// Explicitly instantiate all DoIGetQuick functions. 290#define EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL(_field_type) \ 291 template bool DoIGetQuick<_field_type>(ShadowFrame& shadow_frame, const Instruction* inst, \ 292 uint16_t inst_data) 293 294EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL(Primitive::kPrimInt); // iget-quick. 295EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL(Primitive::kPrimBoolean); // iget-boolean-quick. 296EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL(Primitive::kPrimByte); // iget-byte-quick. 297EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL(Primitive::kPrimChar); // iget-char-quick. 298EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL(Primitive::kPrimShort); // iget-short-quick. 299EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL(Primitive::kPrimLong); // iget-wide-quick. 300EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL(Primitive::kPrimNot); // iget-object-quick. 301#undef EXPLICIT_DO_IGET_QUICK_TEMPLATE_DECL 302 303static JValue GetFieldValue(const ShadowFrame& shadow_frame, 304 Primitive::Type field_type, 305 uint32_t vreg) 306 REQUIRES_SHARED(Locks::mutator_lock_) { 307 JValue field_value; 308 switch (field_type) { 309 case Primitive::kPrimBoolean: 310 field_value.SetZ(static_cast<uint8_t>(shadow_frame.GetVReg(vreg))); 311 break; 312 case Primitive::kPrimByte: 313 field_value.SetB(static_cast<int8_t>(shadow_frame.GetVReg(vreg))); 314 break; 315 case Primitive::kPrimChar: 316 field_value.SetC(static_cast<uint16_t>(shadow_frame.GetVReg(vreg))); 317 break; 318 case Primitive::kPrimShort: 319 field_value.SetS(static_cast<int16_t>(shadow_frame.GetVReg(vreg))); 320 break; 321 case Primitive::kPrimInt: 322 case Primitive::kPrimFloat: 323 field_value.SetI(shadow_frame.GetVReg(vreg)); 324 break; 325 case Primitive::kPrimLong: 326 case Primitive::kPrimDouble: 327 field_value.SetJ(shadow_frame.GetVRegLong(vreg)); 328 break; 329 case Primitive::kPrimNot: 330 field_value.SetL(shadow_frame.GetVRegReference(vreg)); 331 break; 332 case Primitive::kPrimVoid: 333 LOG(FATAL) << "Unreachable: " << field_type; 334 UNREACHABLE(); 335 } 336 return field_value; 337} 338 339template<Primitive::Type field_type> 340static JValue GetFieldValue(const ShadowFrame& shadow_frame, uint32_t vreg) 341 REQUIRES_SHARED(Locks::mutator_lock_) { 342 JValue field_value; 343 switch (field_type) { 344 case Primitive::kPrimBoolean: 345 field_value.SetZ(static_cast<uint8_t>(shadow_frame.GetVReg(vreg))); 346 break; 347 case Primitive::kPrimByte: 348 field_value.SetB(static_cast<int8_t>(shadow_frame.GetVReg(vreg))); 349 break; 350 case Primitive::kPrimChar: 351 field_value.SetC(static_cast<uint16_t>(shadow_frame.GetVReg(vreg))); 352 break; 353 case Primitive::kPrimShort: 354 field_value.SetS(static_cast<int16_t>(shadow_frame.GetVReg(vreg))); 355 break; 356 case Primitive::kPrimInt: 357 field_value.SetI(shadow_frame.GetVReg(vreg)); 358 break; 359 case Primitive::kPrimLong: 360 field_value.SetJ(shadow_frame.GetVRegLong(vreg)); 361 break; 362 case Primitive::kPrimNot: 363 field_value.SetL(shadow_frame.GetVRegReference(vreg)); 364 break; 365 default: 366 LOG(FATAL) << "Unreachable: " << field_type; 367 UNREACHABLE(); 368 } 369 return field_value; 370} 371 372template<Primitive::Type field_type, bool do_assignability_check, bool transaction_active> 373static inline bool DoFieldPutCommon(Thread* self, 374 const ShadowFrame& shadow_frame, 375 ObjPtr<mirror::Object>& obj, 376 ArtField* f, 377 const JValue& value) 378 REQUIRES_SHARED(Locks::mutator_lock_) { 379 f->GetDeclaringClass()->AssertInitializedOrInitializingInThread(self); 380 381 // Report this field access to instrumentation if needed. Since we only have the offset of 382 // the field from the base of the object, we need to look for it first. 383 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation(); 384 if (UNLIKELY(instrumentation->HasFieldWriteListeners())) { 385 StackHandleScope<1> hs(self); 386 // Wrap in handle wrapper in case the listener does thread suspension. 387 HandleWrapperObjPtr<mirror::Object> h(hs.NewHandleWrapper(&obj)); 388 ObjPtr<mirror::Object> this_object = f->IsStatic() ? nullptr : obj; 389 instrumentation->FieldWriteEvent(self, this_object.Ptr(), 390 shadow_frame.GetMethod(), 391 shadow_frame.GetDexPC(), 392 f, 393 value); 394 } 395 396 switch (field_type) { 397 case Primitive::kPrimBoolean: 398 f->SetBoolean<transaction_active>(obj, value.GetZ()); 399 break; 400 case Primitive::kPrimByte: 401 f->SetByte<transaction_active>(obj, value.GetB()); 402 break; 403 case Primitive::kPrimChar: 404 f->SetChar<transaction_active>(obj, value.GetC()); 405 break; 406 case Primitive::kPrimShort: 407 f->SetShort<transaction_active>(obj, value.GetS()); 408 break; 409 case Primitive::kPrimInt: 410 f->SetInt<transaction_active>(obj, value.GetI()); 411 break; 412 case Primitive::kPrimLong: 413 f->SetLong<transaction_active>(obj, value.GetJ()); 414 break; 415 case Primitive::kPrimNot: { 416 ObjPtr<mirror::Object> reg = value.GetL(); 417 if (do_assignability_check && reg != nullptr) { 418 // FieldHelper::GetType can resolve classes, use a handle wrapper which will restore the 419 // object in the destructor. 420 ObjPtr<mirror::Class> field_class; 421 { 422 StackHandleScope<2> hs(self); 423 HandleWrapperObjPtr<mirror::Object> h_reg(hs.NewHandleWrapper(®)); 424 HandleWrapperObjPtr<mirror::Object> h_obj(hs.NewHandleWrapper(&obj)); 425 field_class = f->GetType<true>(); 426 } 427 if (!reg->VerifierInstanceOf(field_class.Ptr())) { 428 // This should never happen. 429 std::string temp1, temp2, temp3; 430 self->ThrowNewExceptionF("Ljava/lang/VirtualMachineError;", 431 "Put '%s' that is not instance of field '%s' in '%s'", 432 reg->GetClass()->GetDescriptor(&temp1), 433 field_class->GetDescriptor(&temp2), 434 f->GetDeclaringClass()->GetDescriptor(&temp3)); 435 return false; 436 } 437 } 438 f->SetObj<transaction_active>(obj, reg); 439 break; 440 } 441 default: 442 LOG(FATAL) << "Unreachable: " << field_type; 443 UNREACHABLE(); 444 } 445 return true; 446} 447 448template<FindFieldType find_type, Primitive::Type field_type, bool do_access_check, 449 bool transaction_active> 450bool DoFieldPut(Thread* self, const ShadowFrame& shadow_frame, const Instruction* inst, 451 uint16_t inst_data) { 452 const bool do_assignability_check = do_access_check; 453 bool is_static = (find_type == StaticObjectWrite) || (find_type == StaticPrimitiveWrite); 454 uint32_t field_idx = is_static ? inst->VRegB_21c() : inst->VRegC_22c(); 455 ArtField* f = 456 FindFieldFromCode<find_type, do_access_check>(field_idx, shadow_frame.GetMethod(), self, 457 Primitive::ComponentSize(field_type)); 458 if (UNLIKELY(f == nullptr)) { 459 CHECK(self->IsExceptionPending()); 460 return false; 461 } 462 ObjPtr<mirror::Object> obj; 463 if (is_static) { 464 obj = f->GetDeclaringClass(); 465 } else { 466 obj = shadow_frame.GetVRegReference(inst->VRegB_22c(inst_data)); 467 if (UNLIKELY(obj == nullptr)) { 468 ThrowNullPointerExceptionForFieldAccess(f, false); 469 return false; 470 } 471 } 472 473 uint32_t vregA = is_static ? inst->VRegA_21c(inst_data) : inst->VRegA_22c(inst_data); 474 JValue value = GetFieldValue<field_type>(shadow_frame, vregA); 475 return DoFieldPutCommon<field_type, do_assignability_check, transaction_active>(self, 476 shadow_frame, 477 obj, 478 f, 479 value); 480} 481 482// Explicitly instantiate all DoFieldPut functions. 483#define EXPLICIT_DO_FIELD_PUT_TEMPLATE_DECL(_find_type, _field_type, _do_check, _transaction_active) \ 484 template bool DoFieldPut<_find_type, _field_type, _do_check, _transaction_active>(Thread* self, \ 485 const ShadowFrame& shadow_frame, const Instruction* inst, uint16_t inst_data) 486 487#define EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(_find_type, _field_type) \ 488 EXPLICIT_DO_FIELD_PUT_TEMPLATE_DECL(_find_type, _field_type, false, false); \ 489 EXPLICIT_DO_FIELD_PUT_TEMPLATE_DECL(_find_type, _field_type, true, false); \ 490 EXPLICIT_DO_FIELD_PUT_TEMPLATE_DECL(_find_type, _field_type, false, true); \ 491 EXPLICIT_DO_FIELD_PUT_TEMPLATE_DECL(_find_type, _field_type, true, true); 492 493// iput-XXX 494EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(InstancePrimitiveWrite, Primitive::kPrimBoolean) 495EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(InstancePrimitiveWrite, Primitive::kPrimByte) 496EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(InstancePrimitiveWrite, Primitive::kPrimChar) 497EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(InstancePrimitiveWrite, Primitive::kPrimShort) 498EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(InstancePrimitiveWrite, Primitive::kPrimInt) 499EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(InstancePrimitiveWrite, Primitive::kPrimLong) 500EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(InstanceObjectWrite, Primitive::kPrimNot) 501 502// sput-XXX 503EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(StaticPrimitiveWrite, Primitive::kPrimBoolean) 504EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(StaticPrimitiveWrite, Primitive::kPrimByte) 505EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(StaticPrimitiveWrite, Primitive::kPrimChar) 506EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(StaticPrimitiveWrite, Primitive::kPrimShort) 507EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(StaticPrimitiveWrite, Primitive::kPrimInt) 508EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(StaticPrimitiveWrite, Primitive::kPrimLong) 509EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL(StaticObjectWrite, Primitive::kPrimNot) 510 511#undef EXPLICIT_DO_FIELD_PUT_ALL_TEMPLATE_DECL 512#undef EXPLICIT_DO_FIELD_PUT_TEMPLATE_DECL 513 514// Helper for setters in invoke-polymorphic. 515bool DoFieldPutForInvokePolymorphic(Thread* self, 516 ShadowFrame& shadow_frame, 517 ObjPtr<mirror::Object>& obj, 518 ArtField* field, 519 Primitive::Type field_type, 520 const JValue& value) 521 REQUIRES_SHARED(Locks::mutator_lock_) { 522 static const bool kDoCheckAssignability = false; 523 static const bool kTransaction = false; 524 switch (field_type) { 525 case Primitive::kPrimBoolean: 526 return DoFieldPutCommon<Primitive::kPrimBoolean, kDoCheckAssignability, kTransaction>( 527 self, shadow_frame, obj, field, value); 528 case Primitive::kPrimByte: 529 return DoFieldPutCommon<Primitive::kPrimByte, kDoCheckAssignability, kTransaction>( 530 self, shadow_frame, obj, field, value); 531 case Primitive::kPrimChar: 532 return DoFieldPutCommon<Primitive::kPrimChar, kDoCheckAssignability, kTransaction>( 533 self, shadow_frame, obj, field, value); 534 case Primitive::kPrimShort: 535 return DoFieldPutCommon<Primitive::kPrimShort, kDoCheckAssignability, kTransaction>( 536 self, shadow_frame, obj, field, value); 537 case Primitive::kPrimInt: 538 case Primitive::kPrimFloat: 539 return DoFieldPutCommon<Primitive::kPrimInt, kDoCheckAssignability, kTransaction>( 540 self, shadow_frame, obj, field, value); 541 case Primitive::kPrimLong: 542 case Primitive::kPrimDouble: 543 return DoFieldPutCommon<Primitive::kPrimLong, kDoCheckAssignability, kTransaction>( 544 self, shadow_frame, obj, field, value); 545 case Primitive::kPrimNot: 546 return DoFieldPutCommon<Primitive::kPrimNot, kDoCheckAssignability, kTransaction>( 547 self, shadow_frame, obj, field, value); 548 case Primitive::kPrimVoid: 549 LOG(FATAL) << "Unreachable: " << field_type; 550 UNREACHABLE(); 551 } 552} 553 554template<Primitive::Type field_type, bool transaction_active> 555bool DoIPutQuick(const ShadowFrame& shadow_frame, const Instruction* inst, uint16_t inst_data) { 556 ObjPtr<mirror::Object> obj = shadow_frame.GetVRegReference(inst->VRegB_22c(inst_data)); 557 if (UNLIKELY(obj == nullptr)) { 558 // We lost the reference to the field index so we cannot get a more 559 // precised exception message. 560 ThrowNullPointerExceptionFromDexPC(); 561 return false; 562 } 563 MemberOffset field_offset(inst->VRegC_22c()); 564 const uint32_t vregA = inst->VRegA_22c(inst_data); 565 // Report this field modification to instrumentation if needed. Since we only have the offset of 566 // the field from the base of the object, we need to look for it first. 567 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation(); 568 if (UNLIKELY(instrumentation->HasFieldWriteListeners())) { 569 ArtField* f = ArtField::FindInstanceFieldWithOffset(obj->GetClass(), 570 field_offset.Uint32Value()); 571 DCHECK(f != nullptr); 572 DCHECK(!f->IsStatic()); 573 JValue field_value = GetFieldValue<field_type>(shadow_frame, vregA); 574 StackHandleScope<1> hs(Thread::Current()); 575 // Save obj in case the instrumentation event has thread suspension. 576 HandleWrapperObjPtr<mirror::Object> h = hs.NewHandleWrapper(&obj); 577 instrumentation->FieldWriteEvent(Thread::Current(), 578 obj.Ptr(), 579 shadow_frame.GetMethod(), 580 shadow_frame.GetDexPC(), 581 f, 582 field_value); 583 } 584 // Note: iput-x-quick instructions are only for non-volatile fields. 585 switch (field_type) { 586 case Primitive::kPrimBoolean: 587 obj->SetFieldBoolean<transaction_active>(field_offset, shadow_frame.GetVReg(vregA)); 588 break; 589 case Primitive::kPrimByte: 590 obj->SetFieldByte<transaction_active>(field_offset, shadow_frame.GetVReg(vregA)); 591 break; 592 case Primitive::kPrimChar: 593 obj->SetFieldChar<transaction_active>(field_offset, shadow_frame.GetVReg(vregA)); 594 break; 595 case Primitive::kPrimShort: 596 obj->SetFieldShort<transaction_active>(field_offset, shadow_frame.GetVReg(vregA)); 597 break; 598 case Primitive::kPrimInt: 599 obj->SetField32<transaction_active>(field_offset, shadow_frame.GetVReg(vregA)); 600 break; 601 case Primitive::kPrimLong: 602 obj->SetField64<transaction_active>(field_offset, shadow_frame.GetVRegLong(vregA)); 603 break; 604 case Primitive::kPrimNot: 605 obj->SetFieldObject<transaction_active>(field_offset, shadow_frame.GetVRegReference(vregA)); 606 break; 607 default: 608 LOG(FATAL) << "Unreachable: " << field_type; 609 UNREACHABLE(); 610 } 611 return true; 612} 613 614// Explicitly instantiate all DoIPutQuick functions. 615#define EXPLICIT_DO_IPUT_QUICK_TEMPLATE_DECL(_field_type, _transaction_active) \ 616 template bool DoIPutQuick<_field_type, _transaction_active>(const ShadowFrame& shadow_frame, \ 617 const Instruction* inst, \ 618 uint16_t inst_data) 619 620#define EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(_field_type) \ 621 EXPLICIT_DO_IPUT_QUICK_TEMPLATE_DECL(_field_type, false); \ 622 EXPLICIT_DO_IPUT_QUICK_TEMPLATE_DECL(_field_type, true); 623 624EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimInt) // iput-quick. 625EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimBoolean) // iput-boolean-quick. 626EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimByte) // iput-byte-quick. 627EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimChar) // iput-char-quick. 628EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimShort) // iput-short-quick. 629EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimLong) // iput-wide-quick. 630EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL(Primitive::kPrimNot) // iput-object-quick. 631#undef EXPLICIT_DO_IPUT_QUICK_ALL_TEMPLATE_DECL 632#undef EXPLICIT_DO_IPUT_QUICK_TEMPLATE_DECL 633 634// We accept a null Instrumentation* meaning we must not report anything to the instrumentation. 635uint32_t FindNextInstructionFollowingException( 636 Thread* self, ShadowFrame& shadow_frame, uint32_t dex_pc, 637 const instrumentation::Instrumentation* instrumentation) { 638 self->VerifyStack(); 639 StackHandleScope<2> hs(self); 640 Handle<mirror::Throwable> exception(hs.NewHandle(self->GetException())); 641 if (instrumentation != nullptr && instrumentation->HasExceptionCaughtListeners() 642 && self->IsExceptionThrownByCurrentMethod(exception.Get())) { 643 instrumentation->ExceptionCaughtEvent(self, exception.Get()); 644 } 645 bool clear_exception = false; 646 uint32_t found_dex_pc = shadow_frame.GetMethod()->FindCatchBlock( 647 hs.NewHandle(exception->GetClass()), dex_pc, &clear_exception); 648 if (found_dex_pc == DexFile::kDexNoIndex && instrumentation != nullptr) { 649 // Exception is not caught by the current method. We will unwind to the 650 // caller. Notify any instrumentation listener. 651 instrumentation->MethodUnwindEvent(self, shadow_frame.GetThisObject(), 652 shadow_frame.GetMethod(), dex_pc); 653 } else { 654 // Exception is caught in the current method. We will jump to the found_dex_pc. 655 if (clear_exception) { 656 self->ClearException(); 657 } 658 } 659 return found_dex_pc; 660} 661 662void UnexpectedOpcode(const Instruction* inst, const ShadowFrame& shadow_frame) { 663 LOG(FATAL) << "Unexpected instruction: " 664 << inst->DumpString(shadow_frame.GetMethod()->GetDexFile()); 665 UNREACHABLE(); 666} 667 668void AbortTransactionF(Thread* self, const char* fmt, ...) { 669 va_list args; 670 va_start(args, fmt); 671 AbortTransactionV(self, fmt, args); 672 va_end(args); 673} 674 675void AbortTransactionV(Thread* self, const char* fmt, va_list args) { 676 CHECK(Runtime::Current()->IsActiveTransaction()); 677 // Constructs abort message. 678 std::string abort_msg; 679 StringAppendV(&abort_msg, fmt, args); 680 // Throws an exception so we can abort the transaction and rollback every change. 681 Runtime::Current()->AbortTransactionAndThrowAbortError(self, abort_msg); 682} 683 684// START DECLARATIONS : 685// 686// These additional declarations are required because clang complains 687// about ALWAYS_INLINE (-Werror, -Wgcc-compat) in definitions. 688// 689 690template <bool is_range, bool do_assignability_check> 691static ALWAYS_INLINE bool DoCallCommon(ArtMethod* called_method, 692 Thread* self, 693 ShadowFrame& shadow_frame, 694 JValue* result, 695 uint16_t number_of_inputs, 696 uint32_t (&arg)[Instruction::kMaxVarArgRegs], 697 uint32_t vregC) REQUIRES_SHARED(Locks::mutator_lock_); 698 699template <bool is_range> 700static ALWAYS_INLINE bool DoCallPolymorphic(ArtMethod* called_method, 701 Handle<mirror::MethodType> callsite_type, 702 Handle<mirror::MethodType> target_type, 703 Thread* self, 704 ShadowFrame& shadow_frame, 705 JValue* result, 706 uint32_t (&arg)[Instruction::kMaxVarArgRegs], 707 uint32_t vregC, 708 const MethodHandleKind handle_kind) 709 REQUIRES_SHARED(Locks::mutator_lock_); 710 711template <bool is_range> 712static ALWAYS_INLINE bool DoCallTransform(ArtMethod* called_method, 713 Handle<mirror::MethodType> callsite_type, 714 Handle<mirror::MethodType> callee_type, 715 Thread* self, 716 ShadowFrame& shadow_frame, 717 Handle<mirror::MethodHandleImpl> receiver, 718 JValue* result, 719 uint32_t (&arg)[Instruction::kMaxVarArgRegs], 720 uint32_t vregC) REQUIRES_SHARED(Locks::mutator_lock_); 721 722ALWAYS_INLINE void PerformCall(Thread* self, 723 const DexFile::CodeItem* code_item, 724 ArtMethod* caller_method, 725 const size_t first_dest_reg, 726 ShadowFrame* callee_frame, 727 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_); 728 729template <bool is_range> 730ALWAYS_INLINE void CopyRegisters(ShadowFrame& caller_frame, 731 ShadowFrame* callee_frame, 732 const uint32_t (&arg)[Instruction::kMaxVarArgRegs], 733 const size_t first_src_reg, 734 const size_t first_dest_reg, 735 const size_t num_regs) REQUIRES_SHARED(Locks::mutator_lock_); 736 737// END DECLARATIONS. 738 739void ArtInterpreterToCompiledCodeBridge(Thread* self, 740 ArtMethod* caller, 741 const DexFile::CodeItem* code_item, 742 ShadowFrame* shadow_frame, 743 JValue* result) 744 REQUIRES_SHARED(Locks::mutator_lock_) { 745 ArtMethod* method = shadow_frame->GetMethod(); 746 // Ensure static methods are initialized. 747 if (method->IsStatic()) { 748 ObjPtr<mirror::Class> declaringClass = method->GetDeclaringClass(); 749 if (UNLIKELY(!declaringClass->IsInitialized())) { 750 self->PushShadowFrame(shadow_frame); 751 StackHandleScope<1> hs(self); 752 Handle<mirror::Class> h_class(hs.NewHandle(declaringClass)); 753 if (UNLIKELY(!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, 754 true))) { 755 self->PopShadowFrame(); 756 DCHECK(self->IsExceptionPending()); 757 return; 758 } 759 self->PopShadowFrame(); 760 CHECK(h_class->IsInitializing()); 761 // Reload from shadow frame in case the method moved, this is faster than adding a handle. 762 method = shadow_frame->GetMethod(); 763 } 764 } 765 uint16_t arg_offset = (code_item == nullptr) 766 ? 0 767 : code_item->registers_size_ - code_item->ins_size_; 768 jit::Jit* jit = Runtime::Current()->GetJit(); 769 if (jit != nullptr && caller != nullptr) { 770 jit->NotifyInterpreterToCompiledCodeTransition(self, caller); 771 } 772 method->Invoke(self, shadow_frame->GetVRegArgs(arg_offset), 773 (shadow_frame->NumberOfVRegs() - arg_offset) * sizeof(uint32_t), 774 result, method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty()); 775} 776 777void SetStringInitValueToAllAliases(ShadowFrame* shadow_frame, 778 uint16_t this_obj_vreg, 779 JValue result) 780 REQUIRES_SHARED(Locks::mutator_lock_) { 781 ObjPtr<mirror::Object> existing = shadow_frame->GetVRegReference(this_obj_vreg); 782 if (existing == nullptr) { 783 // If it's null, we come from compiled code that was deoptimized. Nothing to do, 784 // as the compiler verified there was no alias. 785 // Set the new string result of the StringFactory. 786 shadow_frame->SetVRegReference(this_obj_vreg, result.GetL()); 787 return; 788 } 789 // Set the string init result into all aliases. 790 for (uint32_t i = 0, e = shadow_frame->NumberOfVRegs(); i < e; ++i) { 791 if (shadow_frame->GetVRegReference(i) == existing) { 792 DCHECK_EQ(shadow_frame->GetVRegReference(i), 793 reinterpret_cast<mirror::Object*>(shadow_frame->GetVReg(i))); 794 shadow_frame->SetVRegReference(i, result.GetL()); 795 DCHECK_EQ(shadow_frame->GetVRegReference(i), 796 reinterpret_cast<mirror::Object*>(shadow_frame->GetVReg(i))); 797 } 798 } 799} 800 801inline static bool IsInvokeExact(const DexFile& dex_file, int invoke_method_idx) { 802 // This check uses string comparison as it needs less code and data 803 // to do than fetching the associated ArtMethod from the DexCache 804 // and checking against ArtMethods in the well known classes. The 805 // verifier needs to perform a more rigorous check. 806 const char* method_name = dex_file.GetMethodName(dex_file.GetMethodId(invoke_method_idx)); 807 bool is_invoke_exact = (0 == strcmp(method_name, "invokeExact")); 808 DCHECK(is_invoke_exact || (0 == strcmp(method_name, "invoke"))); 809 return is_invoke_exact; 810} 811 812inline static ObjPtr<mirror::Class> GetAndInitializeDeclaringClass(Thread* self, ArtField* field) 813 REQUIRES_SHARED(Locks::mutator_lock_) { 814 // Method handle invocations on static fields should ensure class is 815 // initialized. This usually happens when an instance is constructed 816 // or class members referenced, but this is not guaranteed when 817 // looking up method handles. 818 ObjPtr<mirror::Class> klass = field->GetDeclaringClass(); 819 if (UNLIKELY(!klass->IsInitialized())) { 820 StackHandleScope<1> hs(self); 821 HandleWrapperObjPtr<mirror::Class> h(hs.NewHandleWrapper(&klass)); 822 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h, true, true)) { 823 DCHECK(self->IsExceptionPending()); 824 return nullptr; 825 } 826 } 827 return klass; 828} 829 830// Returns true iff. the callsite type for a polymorphic invoke is transformer 831// like, i.e that it has a single input argument whose type is 832// dalvik.system.EmulatedStackFrame. 833static inline bool IsCallerTransformer(Handle<mirror::MethodType> callsite_type) 834 REQUIRES_SHARED(Locks::mutator_lock_) { 835 ObjPtr<mirror::ObjectArray<mirror::Class>> param_types(callsite_type->GetPTypes()); 836 if (param_types->GetLength() == 1) { 837 ObjPtr<mirror::Class> param(param_types->GetWithoutChecks(0)); 838 return param == WellKnownClasses::ToClass(WellKnownClasses::dalvik_system_EmulatedStackFrame); 839 } 840 841 return false; 842} 843 844template<bool is_range, bool do_access_check> 845inline bool DoInvokePolymorphic(Thread* self, 846 ShadowFrame& shadow_frame, 847 const Instruction* inst, 848 uint16_t inst_data, 849 JValue* result) 850 REQUIRES_SHARED(Locks::mutator_lock_) { 851 // Invoke-polymorphic instructions always take a receiver. i.e, they are never static. 852 const uint32_t vRegC = (is_range) ? inst->VRegC_4rcc() : inst->VRegC_45cc(); 853 const int invoke_method_idx = (is_range) ? inst->VRegB_4rcc() : inst->VRegB_45cc(); 854 855 // Initialize |result| to 0 as this is the default return value for 856 // polymorphic invocations of method handle types with void return 857 // and provides sane return result in error cases. 858 result->SetJ(0); 859 860 // Determine if this invocation is MethodHandle.invoke() or 861 // MethodHandle.invokeExact(). 862 bool is_invoke_exact = IsInvokeExact(shadow_frame.GetMethod()->GetDeclaringClass()->GetDexFile(), 863 invoke_method_idx); 864 865 // The invoke_method_idx here is the name of the signature polymorphic method that 866 // was symbolically invoked in bytecode (say MethodHandle.invoke or MethodHandle.invokeExact) 867 // and not the method that we'll dispatch to in the end. 868 // 869 // TODO(narayan) We'll have to check in the verifier that this is in fact a 870 // signature polymorphic method so that we disallow calls via invoke-polymorphic 871 // to non sig-poly methods. This would also have the side effect of verifying 872 // that vRegC really is a reference type. 873 StackHandleScope<6> hs(self); 874 Handle<mirror::MethodHandleImpl> method_handle(hs.NewHandle( 875 ObjPtr<mirror::MethodHandleImpl>::DownCast( 876 MakeObjPtr(shadow_frame.GetVRegReference(vRegC))))); 877 if (UNLIKELY(method_handle.Get() == nullptr)) { 878 // Note that the invoke type is kVirtual here because a call to a signature 879 // polymorphic method is shaped like a virtual call at the bytecode level. 880 ThrowNullPointerExceptionForMethodAccess(invoke_method_idx, InvokeType::kVirtual); 881 return false; 882 } 883 884 // The vRegH value gives the index of the proto_id associated with this 885 // signature polymorphic callsite. 886 const uint32_t callsite_proto_id = (is_range) ? inst->VRegH_4rcc() : inst->VRegH_45cc(); 887 888 // Call through to the classlinker and ask it to resolve the static type associated 889 // with the callsite. This information is stored in the dex cache so it's 890 // guaranteed to be fast after the first resolution. 891 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 892 Handle<mirror::Class> caller_class(hs.NewHandle(shadow_frame.GetMethod()->GetDeclaringClass())); 893 Handle<mirror::MethodType> callsite_type(hs.NewHandle(class_linker->ResolveMethodType( 894 caller_class->GetDexFile(), callsite_proto_id, 895 hs.NewHandle<mirror::DexCache>(caller_class->GetDexCache()), 896 hs.NewHandle<mirror::ClassLoader>(caller_class->GetClassLoader())))); 897 898 // This implies we couldn't resolve one or more types in this method handle. 899 if (UNLIKELY(callsite_type.Get() == nullptr)) { 900 CHECK(self->IsExceptionPending()); 901 return false; 902 } 903 904 const MethodHandleKind handle_kind = method_handle->GetHandleKind(); 905 Handle<mirror::MethodType> handle_type(hs.NewHandle(method_handle->GetMethodType())); 906 CHECK(handle_type.Get() != nullptr); 907 { 908 // We need to check the nominal type of the handle in addition to the 909 // real type. The "nominal" type is present when MethodHandle.asType is 910 // called any handle, and results in the declared type of the handle 911 // changing. 912 ObjPtr<mirror::MethodType> nominal_type(method_handle->GetNominalType()); 913 ObjPtr<mirror::MethodType> check_type(nullptr); 914 if (LIKELY(nominal_type.Ptr() == nullptr)) { 915 check_type.Assign(handle_type.Get()); 916 } else { 917 check_type.Assign(nominal_type.Ptr()); 918 } 919 920 if (is_invoke_exact) { 921 if (UNLIKELY(!callsite_type->IsExactMatch(check_type.Ptr()))) { 922 ThrowWrongMethodTypeException(check_type.Ptr(), callsite_type.Get()); 923 return false; 924 } 925 } else { 926 if (UNLIKELY(!IsCallerTransformer(callsite_type) && 927 !callsite_type->IsConvertible(check_type.Ptr()))) { 928 ThrowWrongMethodTypeException(check_type.Ptr(), callsite_type.Get()); 929 return false; 930 } 931 } 932 } 933 934 uint32_t arg[Instruction::kMaxVarArgRegs] = {}; 935 uint32_t first_src_reg = 0; 936 if (is_range) { 937 first_src_reg = (inst->VRegC_4rcc() + 1); 938 } else { 939 inst->GetVarArgs(arg, inst_data); 940 arg[0] = arg[1]; 941 arg[1] = arg[2]; 942 arg[2] = arg[3]; 943 arg[3] = arg[4]; 944 arg[4] = 0; 945 first_src_reg = arg[0]; 946 } 947 948 if (IsInvoke(handle_kind)) { 949 // Get the method we're actually invoking along with the kind of 950 // invoke that is desired. We don't need to perform access checks at this 951 // point because they would have been performed on our behalf at the point 952 // of creation of the method handle. 953 ArtMethod* called_method = method_handle->GetTargetMethod(); 954 CHECK(called_method != nullptr); 955 956 if (handle_kind == kInvokeVirtual || handle_kind == kInvokeInterface) { 957 // TODO: Unfortunately, we have to postpone dynamic receiver based checks 958 // because the receiver might be cast or might come from an emulated stack 959 // frame, which means that it is unknown at this point. We perform these 960 // checks inside DoCallPolymorphic right before we do the actual invoke. 961 } else if (handle_kind == kInvokeDirect) { 962 // String constructors are a special case, they are replaced with StringFactory 963 // methods. 964 if (called_method->IsConstructor() && called_method->GetDeclaringClass()->IsStringClass()) { 965 DCHECK(handle_type->GetRType()->IsStringClass()); 966 called_method = WellKnownClasses::StringInitToStringFactory(called_method); 967 } 968 } else if (handle_kind == kInvokeSuper) { 969 ObjPtr<mirror::Class> declaring_class = called_method->GetDeclaringClass(); 970 971 // Note that we're not dynamically dispatching on the type of the receiver 972 // here. We use the static type of the "receiver" object that we've 973 // recorded in the method handle's type, which will be the same as the 974 // special caller that was specified at the point of lookup. 975 ObjPtr<mirror::Class> referrer_class = handle_type->GetPTypes()->Get(0); 976 if (!declaring_class->IsInterface()) { 977 ObjPtr<mirror::Class> super_class = referrer_class->GetSuperClass(); 978 uint16_t vtable_index = called_method->GetMethodIndex(); 979 DCHECK(super_class != nullptr); 980 DCHECK(super_class->HasVTable()); 981 // Note that super_class is a super of referrer_class and called_method 982 // will always be declared by super_class (or one of its super classes). 983 DCHECK_LT(vtable_index, super_class->GetVTableLength()); 984 called_method = super_class->GetVTableEntry(vtable_index, kRuntimePointerSize); 985 } else { 986 called_method = referrer_class->FindVirtualMethodForInterfaceSuper( 987 called_method, kRuntimePointerSize); 988 } 989 990 CHECK(called_method != nullptr); 991 } 992 993 bool call_success; 994 if (handle_kind == kInvokeTransform) { 995 call_success = DoCallTransform<is_range>(called_method, 996 callsite_type, 997 handle_type, 998 self, 999 shadow_frame, 1000 method_handle /* receiver */, 1001 result, 1002 arg, 1003 first_src_reg); 1004 } else { 1005 call_success = DoCallPolymorphic<is_range>(called_method, 1006 callsite_type, 1007 handle_type, 1008 self, 1009 shadow_frame, 1010 result, 1011 arg, 1012 first_src_reg, 1013 handle_kind); 1014 } 1015 if (LIKELY(call_success && ConvertReturnValue(callsite_type, handle_type, result))) { 1016 return true; 1017 } 1018 DCHECK(self->IsExceptionPending()); 1019 return false; 1020 } else { 1021 DCHECK(!is_range); 1022 ArtField* field = method_handle->GetTargetField(); 1023 Primitive::Type field_type = field->GetTypeAsPrimitiveType(); 1024 1025 switch (handle_kind) { 1026 case kInstanceGet: { 1027 ObjPtr<mirror::Object> obj = shadow_frame.GetVRegReference(first_src_reg); 1028 DoFieldGetForInvokePolymorphic(self, shadow_frame, obj, field, field_type, result); 1029 if (!ConvertReturnValue(callsite_type, handle_type, result)) { 1030 DCHECK(self->IsExceptionPending()); 1031 return false; 1032 } 1033 return true; 1034 } 1035 case kStaticGet: { 1036 ObjPtr<mirror::Object> obj = GetAndInitializeDeclaringClass(self, field); 1037 if (obj == nullptr) { 1038 DCHECK(self->IsExceptionPending()); 1039 return false; 1040 } 1041 DoFieldGetForInvokePolymorphic(self, shadow_frame, obj, field, field_type, result); 1042 if (!ConvertReturnValue(callsite_type, handle_type, result)) { 1043 DCHECK(self->IsExceptionPending()); 1044 return false; 1045 } 1046 return true; 1047 } 1048 case kInstancePut: { 1049 JValue value = GetFieldValue(shadow_frame, field_type, arg[1]); 1050 if (!ConvertArgumentValue(callsite_type, handle_type, 1, &value)) { 1051 DCHECK(self->IsExceptionPending()); 1052 return false; 1053 } 1054 ObjPtr<mirror::Object> obj = shadow_frame.GetVRegReference(first_src_reg); 1055 return DoFieldPutForInvokePolymorphic(self, shadow_frame, obj, field, field_type, value); 1056 } 1057 case kStaticPut: { 1058 JValue value = GetFieldValue(shadow_frame, field_type, arg[0]); 1059 if (!ConvertArgumentValue(callsite_type, handle_type, 0, &value)) { 1060 DCHECK(self->IsExceptionPending()); 1061 return false; 1062 } 1063 ObjPtr<mirror::Object> obj = field->GetDeclaringClass(); 1064 return DoFieldPutForInvokePolymorphic(self, shadow_frame, obj, field, field_type, value); 1065 } 1066 default: 1067 LOG(FATAL) << "Unreachable: " << handle_kind; 1068 UNREACHABLE(); 1069 } 1070 } 1071} 1072 1073// Calculate the number of ins for a proxy or native method, where we 1074// can't just look at the code item. 1075static inline size_t GetInsForProxyOrNativeMethod(ArtMethod* method) 1076 REQUIRES_SHARED(Locks::mutator_lock_) { 1077 DCHECK(method->IsNative() || method->IsProxyMethod()); 1078 1079 method = method->GetInterfaceMethodIfProxy(kRuntimePointerSize); 1080 size_t num_ins = 0; 1081 // Separate accounting for the receiver, which isn't a part of the 1082 // shorty. 1083 if (!method->IsStatic()) { 1084 ++num_ins; 1085 } 1086 1087 uint32_t shorty_len = 0; 1088 const char* shorty = method->GetShorty(&shorty_len); 1089 for (size_t i = 1; i < shorty_len; ++i) { 1090 const char c = shorty[i]; 1091 ++num_ins; 1092 if (c == 'J' || c == 'D') { 1093 ++num_ins; 1094 } 1095 } 1096 1097 return num_ins; 1098} 1099 1100 1101inline void PerformCall(Thread* self, 1102 const DexFile::CodeItem* code_item, 1103 ArtMethod* caller_method, 1104 const size_t first_dest_reg, 1105 ShadowFrame* callee_frame, 1106 JValue* result) { 1107 if (LIKELY(Runtime::Current()->IsStarted())) { 1108 ArtMethod* target = callee_frame->GetMethod(); 1109 if (ClassLinker::ShouldUseInterpreterEntrypoint( 1110 target, 1111 target->GetEntryPointFromQuickCompiledCode())) { 1112 ArtInterpreterToInterpreterBridge(self, code_item, callee_frame, result); 1113 } else { 1114 ArtInterpreterToCompiledCodeBridge( 1115 self, caller_method, code_item, callee_frame, result); 1116 } 1117 } else { 1118 UnstartedRuntime::Invoke(self, code_item, callee_frame, result, first_dest_reg); 1119 } 1120} 1121 1122template <bool is_range> 1123inline void CopyRegisters(ShadowFrame& caller_frame, 1124 ShadowFrame* callee_frame, 1125 const uint32_t (&arg)[Instruction::kMaxVarArgRegs], 1126 const size_t first_src_reg, 1127 const size_t first_dest_reg, 1128 const size_t num_regs) { 1129 if (is_range) { 1130 const size_t dest_reg_bound = first_dest_reg + num_regs; 1131 for (size_t src_reg = first_src_reg, dest_reg = first_dest_reg; dest_reg < dest_reg_bound; 1132 ++dest_reg, ++src_reg) { 1133 AssignRegister(callee_frame, caller_frame, dest_reg, src_reg); 1134 } 1135 } else { 1136 DCHECK_LE(num_regs, arraysize(arg)); 1137 1138 for (size_t arg_index = 0; arg_index < num_regs; ++arg_index) { 1139 AssignRegister(callee_frame, caller_frame, first_dest_reg + arg_index, arg[arg_index]); 1140 } 1141 } 1142} 1143 1144template <bool is_range> 1145static inline bool DoCallPolymorphic(ArtMethod* called_method, 1146 Handle<mirror::MethodType> callsite_type, 1147 Handle<mirror::MethodType> target_type, 1148 Thread* self, 1149 ShadowFrame& shadow_frame, 1150 JValue* result, 1151 uint32_t (&arg)[Instruction::kMaxVarArgRegs], 1152 uint32_t first_src_reg, 1153 const MethodHandleKind handle_kind) { 1154 // Compute method information. 1155 const DexFile::CodeItem* code_item = called_method->GetCodeItem(); 1156 1157 // Number of registers for the callee's call frame. Note that for non-exact 1158 // invokes, we always derive this information from the callee method. We 1159 // cannot guarantee during verification that the number of registers encoded 1160 // in the invoke is equal to the number of ins for the callee. This is because 1161 // some transformations (such as boxing a long -> Long or wideining an 1162 // int -> long will change that number. 1163 uint16_t num_regs; 1164 size_t num_input_regs; 1165 size_t first_dest_reg; 1166 if (LIKELY(code_item != nullptr)) { 1167 num_regs = code_item->registers_size_; 1168 first_dest_reg = num_regs - code_item->ins_size_; 1169 num_input_regs = code_item->ins_size_; 1170 // Parameter registers go at the end of the shadow frame. 1171 DCHECK_NE(first_dest_reg, (size_t)-1); 1172 } else { 1173 // No local regs for proxy and native methods. 1174 DCHECK(called_method->IsNative() || called_method->IsProxyMethod()); 1175 num_regs = num_input_regs = GetInsForProxyOrNativeMethod(called_method); 1176 first_dest_reg = 0; 1177 } 1178 1179 // Allocate shadow frame on the stack. 1180 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr = 1181 CREATE_SHADOW_FRAME(num_regs, &shadow_frame, called_method, /* dex pc */ 0); 1182 ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get(); 1183 1184 // Whether this polymorphic invoke was issued by a transformer method. 1185 bool is_caller_transformer = false; 1186 // Thread might be suspended during PerformArgumentConversions due to the 1187 // allocations performed during boxing. 1188 { 1189 ScopedStackedShadowFramePusher pusher( 1190 self, new_shadow_frame, StackedShadowFrameType::kShadowFrameUnderConstruction); 1191 if (callsite_type->IsExactMatch(target_type.Get())) { 1192 // This is an exact invoke, we can take the fast path of just copying all 1193 // registers without performing any argument conversions. 1194 CopyRegisters<is_range>(shadow_frame, 1195 new_shadow_frame, 1196 arg, 1197 first_src_reg, 1198 first_dest_reg, 1199 num_input_regs); 1200 } else { 1201 // This includes the case where we're entering this invoke-polymorphic 1202 // from a transformer method. In that case, the callsite_type will contain 1203 // a single argument of type dalvik.system.EmulatedStackFrame. In that 1204 // case, we'll have to unmarshal the EmulatedStackFrame into the 1205 // new_shadow_frame and perform argument conversions on it. 1206 if (IsCallerTransformer(callsite_type)) { 1207 is_caller_transformer = true; 1208 // The emulated stack frame is the first and only argument when we're coming 1209 // through from a transformer. 1210 ObjPtr<mirror::EmulatedStackFrame> emulated_stack_frame( 1211 reinterpret_cast<mirror::EmulatedStackFrame*>( 1212 shadow_frame.GetVRegReference(first_src_reg))); 1213 if (!emulated_stack_frame->WriteToShadowFrame(self, 1214 target_type, 1215 first_dest_reg, 1216 new_shadow_frame)) { 1217 DCHECK(self->IsExceptionPending()); 1218 result->SetL(0); 1219 return false; 1220 } 1221 } else if (!ConvertAndCopyArgumentsFromCallerFrame<is_range>(self, 1222 callsite_type, 1223 target_type, 1224 shadow_frame, 1225 first_src_reg, 1226 first_dest_reg, 1227 arg, 1228 new_shadow_frame)) { 1229 DCHECK(self->IsExceptionPending()); 1230 result->SetL(0); 1231 return false; 1232 } 1233 } 1234 } 1235 1236 // See TODO in DoInvokePolymorphic : We need to perform this dynamic, receiver 1237 // based dispatch right before we perform the actual call, because the 1238 // receiver isn't known very early. 1239 if (handle_kind == kInvokeVirtual || handle_kind == kInvokeInterface) { 1240 ObjPtr<mirror::Object> receiver(new_shadow_frame->GetVRegReference(first_dest_reg)); 1241 ObjPtr<mirror::Class> declaring_class(called_method->GetDeclaringClass()); 1242 // Verify that _vRegC is an object reference and of the type expected by 1243 // the receiver. 1244 if (!VerifyObjectIsClass(receiver, declaring_class)) { 1245 DCHECK(self->IsExceptionPending()); 1246 return false; 1247 } 1248 1249 called_method = receiver->GetClass()->FindVirtualMethodForVirtualOrInterface( 1250 called_method, kRuntimePointerSize); 1251 } 1252 1253 PerformCall(self, code_item, shadow_frame.GetMethod(), first_dest_reg, new_shadow_frame, result); 1254 1255 // If the caller of this signature polymorphic method was a transformer, 1256 // we need to copy the result back out to the emulated stack frame. 1257 if (is_caller_transformer && !self->IsExceptionPending()) { 1258 ObjPtr<mirror::EmulatedStackFrame> emulated_stack_frame( 1259 reinterpret_cast<mirror::EmulatedStackFrame*>( 1260 shadow_frame.GetVRegReference(first_src_reg))); 1261 1262 emulated_stack_frame->SetReturnValue(self, *result); 1263 } 1264 1265 return !self->IsExceptionPending(); 1266} 1267 1268template <bool is_range> 1269static inline bool DoCallTransform(ArtMethod* called_method, 1270 Handle<mirror::MethodType> callsite_type, 1271 Handle<mirror::MethodType> callee_type, 1272 Thread* self, 1273 ShadowFrame& shadow_frame, 1274 Handle<mirror::MethodHandleImpl> receiver, 1275 JValue* result, 1276 uint32_t (&arg)[Instruction::kMaxVarArgRegs], 1277 uint32_t first_src_reg) { 1278 // This can be fixed to two, because the method we're calling here 1279 // (MethodHandle.transformInternal) doesn't have any locals and the signature 1280 // is known : 1281 // 1282 // private MethodHandle.transformInternal(EmulatedStackFrame sf); 1283 // 1284 // This means we need only two vregs : 1285 // - One for the receiver object. 1286 // - One for the only method argument (an EmulatedStackFrame). 1287 static constexpr size_t kNumRegsForTransform = 2; 1288 1289 const DexFile::CodeItem* code_item = called_method->GetCodeItem(); 1290 DCHECK(code_item != nullptr); 1291 DCHECK_EQ(kNumRegsForTransform, code_item->registers_size_); 1292 DCHECK_EQ(kNumRegsForTransform, code_item->ins_size_); 1293 1294 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr = 1295 CREATE_SHADOW_FRAME(kNumRegsForTransform, &shadow_frame, called_method, /* dex pc */ 0); 1296 ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get(); 1297 1298 StackHandleScope<1> hs(self); 1299 MutableHandle<mirror::EmulatedStackFrame> sf(hs.NewHandle<mirror::EmulatedStackFrame>(nullptr)); 1300 if (IsCallerTransformer(callsite_type)) { 1301 // If we're entering this transformer from another transformer, we can pass 1302 // through the handle directly to the callee, instead of having to 1303 // instantiate a new stack frame based on the shadow frame. 1304 sf.Assign(reinterpret_cast<mirror::EmulatedStackFrame*>( 1305 shadow_frame.GetVRegReference(first_src_reg))); 1306 } else { 1307 sf.Assign(mirror::EmulatedStackFrame::CreateFromShadowFrameAndArgs<is_range>( 1308 self, 1309 callsite_type, 1310 callee_type, 1311 shadow_frame, 1312 first_src_reg, 1313 arg)); 1314 1315 // Something went wrong while creating the emulated stack frame, we should 1316 // throw the pending exception. 1317 if (sf.Get() == nullptr) { 1318 DCHECK(self->IsExceptionPending()); 1319 return false; 1320 } 1321 } 1322 1323 new_shadow_frame->SetVRegReference(0, receiver.Get()); 1324 new_shadow_frame->SetVRegReference(1, sf.Get()); 1325 1326 PerformCall(self, 1327 code_item, 1328 shadow_frame.GetMethod(), 1329 0 /* first dest reg */, 1330 new_shadow_frame, 1331 result); 1332 1333 // If the called transformer method we called has returned a value, then we 1334 // need to copy it back to |result|. 1335 if (!self->IsExceptionPending()) { 1336 sf->GetReturnValue(self, result); 1337 } 1338 1339 return !self->IsExceptionPending(); 1340} 1341 1342template <bool is_range, 1343 bool do_assignability_check> 1344static inline bool DoCallCommon(ArtMethod* called_method, 1345 Thread* self, 1346 ShadowFrame& shadow_frame, 1347 JValue* result, 1348 uint16_t number_of_inputs, 1349 uint32_t (&arg)[Instruction::kMaxVarArgRegs], 1350 uint32_t vregC) { 1351 bool string_init = false; 1352 // Replace calls to String.<init> with equivalent StringFactory call. 1353 if (UNLIKELY(called_method->GetDeclaringClass()->IsStringClass() 1354 && called_method->IsConstructor())) { 1355 called_method = WellKnownClasses::StringInitToStringFactory(called_method); 1356 string_init = true; 1357 } 1358 1359 // Compute method information. 1360 const DexFile::CodeItem* code_item = called_method->GetCodeItem(); 1361 1362 // Number of registers for the callee's call frame. 1363 uint16_t num_regs; 1364 if (LIKELY(code_item != nullptr)) { 1365 num_regs = code_item->registers_size_; 1366 DCHECK_EQ(string_init ? number_of_inputs - 1 : number_of_inputs, code_item->ins_size_); 1367 } else { 1368 DCHECK(called_method->IsNative() || called_method->IsProxyMethod()); 1369 num_regs = number_of_inputs; 1370 } 1371 1372 // Hack for String init: 1373 // 1374 // Rewrite invoke-x java.lang.String.<init>(this, a, b, c, ...) into: 1375 // invoke-x StringFactory(a, b, c, ...) 1376 // by effectively dropping the first virtual register from the invoke. 1377 // 1378 // (at this point the ArtMethod has already been replaced, 1379 // so we just need to fix-up the arguments) 1380 // 1381 // Note that FindMethodFromCode in entrypoint_utils-inl.h was also special-cased 1382 // to handle the compiler optimization of replacing `this` with null without 1383 // throwing NullPointerException. 1384 uint32_t string_init_vreg_this = is_range ? vregC : arg[0]; 1385 if (UNLIKELY(string_init)) { 1386 DCHECK_GT(num_regs, 0u); // As the method is an instance method, there should be at least 1. 1387 1388 // The new StringFactory call is static and has one fewer argument. 1389 if (code_item == nullptr) { 1390 DCHECK(called_method->IsNative() || called_method->IsProxyMethod()); 1391 num_regs--; 1392 } // else ... don't need to change num_regs since it comes up from the string_init's code item 1393 number_of_inputs--; 1394 1395 // Rewrite the var-args, dropping the 0th argument ("this") 1396 for (uint32_t i = 1; i < arraysize(arg); ++i) { 1397 arg[i - 1] = arg[i]; 1398 } 1399 arg[arraysize(arg) - 1] = 0; 1400 1401 // Rewrite the non-var-arg case 1402 vregC++; // Skips the 0th vreg in the range ("this"). 1403 } 1404 1405 // Parameter registers go at the end of the shadow frame. 1406 DCHECK_GE(num_regs, number_of_inputs); 1407 size_t first_dest_reg = num_regs - number_of_inputs; 1408 DCHECK_NE(first_dest_reg, (size_t)-1); 1409 1410 // Allocate shadow frame on the stack. 1411 const char* old_cause = self->StartAssertNoThreadSuspension("DoCallCommon"); 1412 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr = 1413 CREATE_SHADOW_FRAME(num_regs, &shadow_frame, called_method, /* dex pc */ 0); 1414 ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get(); 1415 1416 // Initialize new shadow frame by copying the registers from the callee shadow frame. 1417 if (do_assignability_check) { 1418 // Slow path. 1419 // We might need to do class loading, which incurs a thread state change to kNative. So 1420 // register the shadow frame as under construction and allow suspension again. 1421 ScopedStackedShadowFramePusher pusher( 1422 self, new_shadow_frame, StackedShadowFrameType::kShadowFrameUnderConstruction); 1423 self->EndAssertNoThreadSuspension(old_cause); 1424 1425 // ArtMethod here is needed to check type information of the call site against the callee. 1426 // Type information is retrieved from a DexFile/DexCache for that respective declared method. 1427 // 1428 // As a special case for proxy methods, which are not dex-backed, 1429 // we have to retrieve type information from the proxy's method 1430 // interface method instead (which is dex backed since proxies are never interfaces). 1431 ArtMethod* method = 1432 new_shadow_frame->GetMethod()->GetInterfaceMethodIfProxy(kRuntimePointerSize); 1433 1434 // We need to do runtime check on reference assignment. We need to load the shorty 1435 // to get the exact type of each reference argument. 1436 const DexFile::TypeList* params = method->GetParameterTypeList(); 1437 uint32_t shorty_len = 0; 1438 const char* shorty = method->GetShorty(&shorty_len); 1439 1440 // Handle receiver apart since it's not part of the shorty. 1441 size_t dest_reg = first_dest_reg; 1442 size_t arg_offset = 0; 1443 1444 if (!method->IsStatic()) { 1445 size_t receiver_reg = is_range ? vregC : arg[0]; 1446 new_shadow_frame->SetVRegReference(dest_reg, shadow_frame.GetVRegReference(receiver_reg)); 1447 ++dest_reg; 1448 ++arg_offset; 1449 DCHECK(!string_init); // All StringFactory methods are static. 1450 } 1451 1452 // Copy the caller's invoke-* arguments into the callee's parameter registers. 1453 for (uint32_t shorty_pos = 0; dest_reg < num_regs; ++shorty_pos, ++dest_reg, ++arg_offset) { 1454 // Skip the 0th 'shorty' type since it represents the return type. 1455 DCHECK_LT(shorty_pos + 1, shorty_len) << "for shorty '" << shorty << "'"; 1456 const size_t src_reg = (is_range) ? vregC + arg_offset : arg[arg_offset]; 1457 switch (shorty[shorty_pos + 1]) { 1458 // Handle Object references. 1 virtual register slot. 1459 case 'L': { 1460 ObjPtr<mirror::Object> o = shadow_frame.GetVRegReference(src_reg); 1461 if (do_assignability_check && o != nullptr) { 1462 PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize(); 1463 const dex::TypeIndex type_idx = params->GetTypeItem(shorty_pos).type_idx_; 1464 ObjPtr<mirror::Class> arg_type = method->GetDexCacheResolvedType(type_idx, 1465 pointer_size); 1466 if (arg_type == nullptr) { 1467 StackHandleScope<1> hs(self); 1468 // Preserve o since it is used below and GetClassFromTypeIndex may cause thread 1469 // suspension. 1470 HandleWrapperObjPtr<mirror::Object> h = hs.NewHandleWrapper(&o); 1471 arg_type = method->GetClassFromTypeIndex(type_idx, true /* resolve */, pointer_size); 1472 if (arg_type == nullptr) { 1473 CHECK(self->IsExceptionPending()); 1474 return false; 1475 } 1476 } 1477 if (!o->VerifierInstanceOf(arg_type)) { 1478 // This should never happen. 1479 std::string temp1, temp2; 1480 self->ThrowNewExceptionF("Ljava/lang/VirtualMachineError;", 1481 "Invoking %s with bad arg %d, type '%s' not instance of '%s'", 1482 new_shadow_frame->GetMethod()->GetName(), shorty_pos, 1483 o->GetClass()->GetDescriptor(&temp1), 1484 arg_type->GetDescriptor(&temp2)); 1485 return false; 1486 } 1487 } 1488 new_shadow_frame->SetVRegReference(dest_reg, o.Ptr()); 1489 break; 1490 } 1491 // Handle doubles and longs. 2 consecutive virtual register slots. 1492 case 'J': case 'D': { 1493 uint64_t wide_value = 1494 (static_cast<uint64_t>(shadow_frame.GetVReg(src_reg + 1)) << BitSizeOf<uint32_t>()) | 1495 static_cast<uint32_t>(shadow_frame.GetVReg(src_reg)); 1496 new_shadow_frame->SetVRegLong(dest_reg, wide_value); 1497 // Skip the next virtual register slot since we already used it. 1498 ++dest_reg; 1499 ++arg_offset; 1500 break; 1501 } 1502 // Handle all other primitives that are always 1 virtual register slot. 1503 default: 1504 new_shadow_frame->SetVReg(dest_reg, shadow_frame.GetVReg(src_reg)); 1505 break; 1506 } 1507 } 1508 } else { 1509 if (is_range) { 1510 DCHECK_EQ(num_regs, first_dest_reg + number_of_inputs); 1511 } 1512 1513 CopyRegisters<is_range>(shadow_frame, 1514 new_shadow_frame, 1515 arg, 1516 vregC, 1517 first_dest_reg, 1518 number_of_inputs); 1519 self->EndAssertNoThreadSuspension(old_cause); 1520 } 1521 1522 PerformCall(self, code_item, shadow_frame.GetMethod(), first_dest_reg, new_shadow_frame, result); 1523 1524 if (string_init && !self->IsExceptionPending()) { 1525 SetStringInitValueToAllAliases(&shadow_frame, string_init_vreg_this, *result); 1526 } 1527 1528 return !self->IsExceptionPending(); 1529} 1530 1531template<bool is_range, bool do_assignability_check> 1532bool DoCall(ArtMethod* called_method, Thread* self, ShadowFrame& shadow_frame, 1533 const Instruction* inst, uint16_t inst_data, JValue* result) { 1534 // Argument word count. 1535 const uint16_t number_of_inputs = 1536 (is_range) ? inst->VRegA_3rc(inst_data) : inst->VRegA_35c(inst_data); 1537 1538 // TODO: find a cleaner way to separate non-range and range information without duplicating 1539 // code. 1540 uint32_t arg[Instruction::kMaxVarArgRegs] = {}; // only used in invoke-XXX. 1541 uint32_t vregC = 0; 1542 if (is_range) { 1543 vregC = inst->VRegC_3rc(); 1544 } else { 1545 vregC = inst->VRegC_35c(); 1546 inst->GetVarArgs(arg, inst_data); 1547 } 1548 1549 return DoCallCommon<is_range, do_assignability_check>( 1550 called_method, self, shadow_frame, 1551 result, number_of_inputs, arg, vregC); 1552} 1553 1554template <bool is_range, bool do_access_check, bool transaction_active> 1555bool DoFilledNewArray(const Instruction* inst, 1556 const ShadowFrame& shadow_frame, 1557 Thread* self, 1558 JValue* result) { 1559 DCHECK(inst->Opcode() == Instruction::FILLED_NEW_ARRAY || 1560 inst->Opcode() == Instruction::FILLED_NEW_ARRAY_RANGE); 1561 const int32_t length = is_range ? inst->VRegA_3rc() : inst->VRegA_35c(); 1562 if (!is_range) { 1563 // Checks FILLED_NEW_ARRAY's length does not exceed 5 arguments. 1564 CHECK_LE(length, 5); 1565 } 1566 if (UNLIKELY(length < 0)) { 1567 ThrowNegativeArraySizeException(length); 1568 return false; 1569 } 1570 uint16_t type_idx = is_range ? inst->VRegB_3rc() : inst->VRegB_35c(); 1571 ObjPtr<mirror::Class> array_class = ResolveVerifyAndClinit(dex::TypeIndex(type_idx), 1572 shadow_frame.GetMethod(), 1573 self, 1574 false, 1575 do_access_check); 1576 if (UNLIKELY(array_class == nullptr)) { 1577 DCHECK(self->IsExceptionPending()); 1578 return false; 1579 } 1580 CHECK(array_class->IsArrayClass()); 1581 ObjPtr<mirror::Class> component_class = array_class->GetComponentType(); 1582 const bool is_primitive_int_component = component_class->IsPrimitiveInt(); 1583 if (UNLIKELY(component_class->IsPrimitive() && !is_primitive_int_component)) { 1584 if (component_class->IsPrimitiveLong() || component_class->IsPrimitiveDouble()) { 1585 ThrowRuntimeException("Bad filled array request for type %s", 1586 component_class->PrettyDescriptor().c_str()); 1587 } else { 1588 self->ThrowNewExceptionF("Ljava/lang/InternalError;", 1589 "Found type %s; filled-new-array not implemented for anything but 'int'", 1590 component_class->PrettyDescriptor().c_str()); 1591 } 1592 return false; 1593 } 1594 ObjPtr<mirror::Object> new_array = mirror::Array::Alloc<true>( 1595 self, 1596 array_class, 1597 length, 1598 array_class->GetComponentSizeShift(), 1599 Runtime::Current()->GetHeap()->GetCurrentAllocator()); 1600 if (UNLIKELY(new_array == nullptr)) { 1601 self->AssertPendingOOMException(); 1602 return false; 1603 } 1604 uint32_t arg[Instruction::kMaxVarArgRegs]; // only used in filled-new-array. 1605 uint32_t vregC = 0; // only used in filled-new-array-range. 1606 if (is_range) { 1607 vregC = inst->VRegC_3rc(); 1608 } else { 1609 inst->GetVarArgs(arg); 1610 } 1611 for (int32_t i = 0; i < length; ++i) { 1612 size_t src_reg = is_range ? vregC + i : arg[i]; 1613 if (is_primitive_int_component) { 1614 new_array->AsIntArray()->SetWithoutChecks<transaction_active>( 1615 i, shadow_frame.GetVReg(src_reg)); 1616 } else { 1617 new_array->AsObjectArray<mirror::Object>()->SetWithoutChecks<transaction_active>( 1618 i, shadow_frame.GetVRegReference(src_reg)); 1619 } 1620 } 1621 1622 result->SetL(new_array); 1623 return true; 1624} 1625 1626// TODO: Use ObjPtr here. 1627template<typename T> 1628static void RecordArrayElementsInTransactionImpl(mirror::PrimitiveArray<T>* array, 1629 int32_t count) 1630 REQUIRES_SHARED(Locks::mutator_lock_) { 1631 Runtime* runtime = Runtime::Current(); 1632 for (int32_t i = 0; i < count; ++i) { 1633 runtime->RecordWriteArray(array, i, array->GetWithoutChecks(i)); 1634 } 1635} 1636 1637void RecordArrayElementsInTransaction(ObjPtr<mirror::Array> array, int32_t count) 1638 REQUIRES_SHARED(Locks::mutator_lock_) { 1639 DCHECK(Runtime::Current()->IsActiveTransaction()); 1640 DCHECK(array != nullptr); 1641 DCHECK_LE(count, array->GetLength()); 1642 Primitive::Type primitive_component_type = array->GetClass()->GetComponentType()->GetPrimitiveType(); 1643 switch (primitive_component_type) { 1644 case Primitive::kPrimBoolean: 1645 RecordArrayElementsInTransactionImpl(array->AsBooleanArray(), count); 1646 break; 1647 case Primitive::kPrimByte: 1648 RecordArrayElementsInTransactionImpl(array->AsByteArray(), count); 1649 break; 1650 case Primitive::kPrimChar: 1651 RecordArrayElementsInTransactionImpl(array->AsCharArray(), count); 1652 break; 1653 case Primitive::kPrimShort: 1654 RecordArrayElementsInTransactionImpl(array->AsShortArray(), count); 1655 break; 1656 case Primitive::kPrimInt: 1657 RecordArrayElementsInTransactionImpl(array->AsIntArray(), count); 1658 break; 1659 case Primitive::kPrimFloat: 1660 RecordArrayElementsInTransactionImpl(array->AsFloatArray(), count); 1661 break; 1662 case Primitive::kPrimLong: 1663 RecordArrayElementsInTransactionImpl(array->AsLongArray(), count); 1664 break; 1665 case Primitive::kPrimDouble: 1666 RecordArrayElementsInTransactionImpl(array->AsDoubleArray(), count); 1667 break; 1668 default: 1669 LOG(FATAL) << "Unsupported primitive type " << primitive_component_type 1670 << " in fill-array-data"; 1671 break; 1672 } 1673} 1674 1675// Explicit DoCall template function declarations. 1676#define EXPLICIT_DO_CALL_TEMPLATE_DECL(_is_range, _do_assignability_check) \ 1677 template REQUIRES_SHARED(Locks::mutator_lock_) \ 1678 bool DoCall<_is_range, _do_assignability_check>(ArtMethod* method, Thread* self, \ 1679 ShadowFrame& shadow_frame, \ 1680 const Instruction* inst, uint16_t inst_data, \ 1681 JValue* result) 1682EXPLICIT_DO_CALL_TEMPLATE_DECL(false, false); 1683EXPLICIT_DO_CALL_TEMPLATE_DECL(false, true); 1684EXPLICIT_DO_CALL_TEMPLATE_DECL(true, false); 1685EXPLICIT_DO_CALL_TEMPLATE_DECL(true, true); 1686#undef EXPLICIT_DO_CALL_TEMPLATE_DECL 1687 1688// Explicit DoInvokePolymorphic template function declarations. 1689#define EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(_is_range, _do_assignability_check) \ 1690 template REQUIRES_SHARED(Locks::mutator_lock_) \ 1691 bool DoInvokePolymorphic<_is_range, _do_assignability_check>( \ 1692 Thread* self, ShadowFrame& shadow_frame, const Instruction* inst, \ 1693 uint16_t inst_data, JValue* result) 1694 1695EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(false, false); 1696EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(false, true); 1697EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(true, false); 1698EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(true, true); 1699#undef EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL 1700 1701// Explicit DoFilledNewArray template function declarations. 1702#define EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(_is_range_, _check, _transaction_active) \ 1703 template REQUIRES_SHARED(Locks::mutator_lock_) \ 1704 bool DoFilledNewArray<_is_range_, _check, _transaction_active>(const Instruction* inst, \ 1705 const ShadowFrame& shadow_frame, \ 1706 Thread* self, JValue* result) 1707#define EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL(_transaction_active) \ 1708 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(false, false, _transaction_active); \ 1709 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(false, true, _transaction_active); \ 1710 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(true, false, _transaction_active); \ 1711 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(true, true, _transaction_active) 1712EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL(false); 1713EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL(true); 1714#undef EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL 1715#undef EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL 1716 1717} // namespace interpreter 1718} // namespace art 1719