entrypoint_utils.h revision bb8f0ab736b61db8f543e433859272e83f96ee9b
1/* 2 * Copyright (C) 2012 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#ifndef ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_H_ 18#define ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_H_ 19 20#include "base/macros.h" 21#include "class_linker.h" 22#include "common_throws.h" 23#include "dex_file.h" 24#include "indirect_reference_table.h" 25#include "invoke_type.h" 26#include "jni_internal.h" 27#include "mirror/art_method.h" 28#include "mirror/array.h" 29#include "mirror/class-inl.h" 30#include "mirror/object-inl.h" 31#include "mirror/throwable.h" 32#include "locks.h" 33#include "object_utils.h" 34#include "sirt_ref.h" 35#include "thread.h" 36 37namespace art { 38 39namespace mirror { 40 class Class; 41 class ArtField; 42 class Object; 43} // namespace mirror 44 45// TODO: Fix no thread safety analysis when GCC can handle template specialization. 46template <const bool kAccessCheck> 47ALWAYS_INLINE static inline mirror::Class* CheckObjectAlloc(uint32_t type_idx, 48 mirror::ArtMethod* method, 49 Thread* self, bool* slow_path) 50 NO_THREAD_SAFETY_ANALYSIS { 51 mirror::Class* klass = method->GetDexCacheResolvedTypes()->GetWithoutChecks(type_idx); 52 if (UNLIKELY(klass == NULL)) { 53 klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method); 54 *slow_path = true; 55 if (klass == NULL) { 56 DCHECK(self->IsExceptionPending()); 57 return nullptr; // Failure 58 } 59 } 60 if (kAccessCheck) { 61 if (UNLIKELY(!klass->IsInstantiable())) { 62 ThrowLocation throw_location = self->GetCurrentLocationForThrow(); 63 self->ThrowNewException(throw_location, "Ljava/lang/InstantiationError;", 64 PrettyDescriptor(klass).c_str()); 65 *slow_path = true; 66 return nullptr; // Failure 67 } 68 mirror::Class* referrer = method->GetDeclaringClass(); 69 if (UNLIKELY(!referrer->CanAccess(klass))) { 70 ThrowIllegalAccessErrorClass(referrer, klass); 71 *slow_path = true; 72 return nullptr; // Failure 73 } 74 } 75 if (UNLIKELY(!klass->IsInitialized())) { 76 SirtRef<mirror::Class> sirt_klass(self, klass); 77 // EnsureInitialized (the class initializer) might cause a GC. 78 // may cause us to suspend meaning that another thread may try to 79 // change the allocator while we are stuck in the entrypoints of 80 // an old allocator. Also, the class initialization may fail. To 81 // handle these cases we mark the slow path boolean as true so 82 // that the caller knows to check the allocator type to see if it 83 // has changed and to null-check the return value in case the 84 // initialization fails. 85 *slow_path = true; 86 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(sirt_klass, true, true)) { 87 DCHECK(self->IsExceptionPending()); 88 return nullptr; // Failure 89 } 90 return sirt_klass.get(); 91 } 92 return klass; 93} 94 95// TODO: Fix no thread safety analysis when annotalysis is smarter. 96ALWAYS_INLINE static inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass, 97 Thread* self, bool* slow_path) 98 NO_THREAD_SAFETY_ANALYSIS { 99 if (UNLIKELY(!klass->IsInitialized())) { 100 SirtRef<mirror::Class> sirt_class(self, klass); 101 // EnsureInitialized (the class initializer) might cause a GC. 102 // may cause us to suspend meaning that another thread may try to 103 // change the allocator while we are stuck in the entrypoints of 104 // an old allocator. Also, the class initialization may fail. To 105 // handle these cases we mark the slow path boolean as true so 106 // that the caller knows to check the allocator type to see if it 107 // has changed and to null-check the return value in case the 108 // initialization fails. 109 *slow_path = true; 110 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(sirt_class, true, true)) { 111 DCHECK(self->IsExceptionPending()); 112 return nullptr; // Failure 113 } 114 return sirt_class.get(); 115 } 116 return klass; 117} 118 119// Given the context of a calling Method, use its DexCache to resolve a type to a Class. If it 120// cannot be resolved, throw an error. If it can, use it to create an instance. 121// When verification/compiler hasn't been able to verify access, optionally perform an access 122// check. 123// TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter. 124template <bool kAccessCheck, bool kInstrumented> 125ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCode(uint32_t type_idx, 126 mirror::ArtMethod* method, 127 Thread* self, 128 gc::AllocatorType allocator_type) 129 NO_THREAD_SAFETY_ANALYSIS { 130 bool slow_path = false; 131 mirror::Class* klass = CheckObjectAlloc<kAccessCheck>(type_idx, method, self, &slow_path); 132 if (UNLIKELY(slow_path)) { 133 if (klass == nullptr) { 134 return nullptr; 135 } 136 gc::Heap* heap = Runtime::Current()->GetHeap(); 137 return klass->Alloc<kInstrumented>(self, heap->GetCurrentAllocator()); 138 } 139 return klass->Alloc<kInstrumented>(self, allocator_type); 140} 141 142// Given the context of a calling Method and a resolved class, create an instance. 143// TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter. 144template <bool kInstrumented> 145ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCodeResolved(mirror::Class* klass, 146 mirror::ArtMethod* method, 147 Thread* self, 148 gc::AllocatorType allocator_type) 149 NO_THREAD_SAFETY_ANALYSIS { 150 DCHECK(klass != nullptr); 151 bool slow_path = false; 152 klass = CheckClassInitializedForObjectAlloc(klass, self, &slow_path); 153 if (UNLIKELY(slow_path)) { 154 if (klass == nullptr) { 155 return nullptr; 156 } 157 gc::Heap* heap = Runtime::Current()->GetHeap(); 158 return klass->Alloc<kInstrumented>(self, heap->GetCurrentAllocator()); 159 } 160 return klass->Alloc<kInstrumented>(self, allocator_type); 161} 162 163// Given the context of a calling Method and an initialized class, create an instance. 164// TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter. 165template <bool kInstrumented> 166ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass, 167 mirror::ArtMethod* method, 168 Thread* self, 169 gc::AllocatorType allocator_type) 170 NO_THREAD_SAFETY_ANALYSIS { 171 DCHECK(klass != nullptr); 172 return klass->Alloc<kInstrumented>(self, allocator_type); 173} 174 175 176// TODO: Fix no thread safety analysis when GCC can handle template specialization. 177template <bool kAccessCheck> 178ALWAYS_INLINE static inline mirror::Class* CheckArrayAlloc(uint32_t type_idx, 179 mirror::ArtMethod* method, 180 int32_t component_count, 181 bool* slow_path) 182 NO_THREAD_SAFETY_ANALYSIS { 183 if (UNLIKELY(component_count < 0)) { 184 ThrowNegativeArraySizeException(component_count); 185 *slow_path = true; 186 return nullptr; // Failure 187 } 188 mirror::Class* klass = method->GetDexCacheResolvedTypes()->GetWithoutChecks(type_idx); 189 if (UNLIKELY(klass == nullptr)) { // Not in dex cache so try to resolve 190 klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method); 191 *slow_path = true; 192 if (klass == nullptr) { // Error 193 DCHECK(Thread::Current()->IsExceptionPending()); 194 return nullptr; // Failure 195 } 196 CHECK(klass->IsArrayClass()) << PrettyClass(klass); 197 } 198 if (kAccessCheck) { 199 mirror::Class* referrer = method->GetDeclaringClass(); 200 if (UNLIKELY(!referrer->CanAccess(klass))) { 201 ThrowIllegalAccessErrorClass(referrer, klass); 202 *slow_path = true; 203 return nullptr; // Failure 204 } 205 } 206 return klass; 207} 208 209// Given the context of a calling Method, use its DexCache to resolve a type to an array Class. If 210// it cannot be resolved, throw an error. If it can, use it to create an array. 211// When verification/compiler hasn't been able to verify access, optionally perform an access 212// check. 213// TODO: Fix no thread safety analysis when GCC can handle template specialization. 214template <bool kAccessCheck, bool kInstrumented> 215ALWAYS_INLINE static inline mirror::Array* AllocArrayFromCode(uint32_t type_idx, 216 mirror::ArtMethod* method, 217 int32_t component_count, 218 Thread* self, 219 gc::AllocatorType allocator_type) 220 NO_THREAD_SAFETY_ANALYSIS { 221 bool slow_path = false; 222 mirror::Class* klass = CheckArrayAlloc<kAccessCheck>(type_idx, method, component_count, 223 &slow_path); 224 if (UNLIKELY(slow_path)) { 225 if (klass == nullptr) { 226 return nullptr; 227 } 228 gc::Heap* heap = Runtime::Current()->GetHeap(); 229 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count, 230 heap->GetCurrentAllocator()); 231 } 232 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count, allocator_type); 233} 234 235template <bool kAccessCheck, bool kInstrumented> 236ALWAYS_INLINE static inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass, 237 mirror::ArtMethod* method, 238 int32_t component_count, 239 Thread* self, 240 gc::AllocatorType allocator_type) 241 NO_THREAD_SAFETY_ANALYSIS { 242 DCHECK(klass != nullptr); 243 if (UNLIKELY(component_count < 0)) { 244 ThrowNegativeArraySizeException(component_count); 245 return nullptr; // Failure 246 } 247 if (kAccessCheck) { 248 mirror::Class* referrer = method->GetDeclaringClass(); 249 if (UNLIKELY(!referrer->CanAccess(klass))) { 250 ThrowIllegalAccessErrorClass(referrer, klass); 251 return nullptr; // Failure 252 } 253 } 254 // No need to retry a slow-path allocation as the above code won't 255 // cause a GC or thread suspension. 256 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count, allocator_type); 257} 258 259extern mirror::Array* CheckAndAllocArrayFromCode(uint32_t type_idx, mirror::ArtMethod* method, 260 int32_t component_count, Thread* self, 261 bool access_check, 262 gc::AllocatorType allocator_type) 263 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 264 265extern mirror::Array* CheckAndAllocArrayFromCodeInstrumented(uint32_t type_idx, 266 mirror::ArtMethod* method, 267 int32_t component_count, Thread* self, 268 bool access_check, 269 gc::AllocatorType allocator_type) 270 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 271 272// Type of find field operation for fast and slow case. 273enum FindFieldType { 274 InstanceObjectRead, 275 InstanceObjectWrite, 276 InstancePrimitiveRead, 277 InstancePrimitiveWrite, 278 StaticObjectRead, 279 StaticObjectWrite, 280 StaticPrimitiveRead, 281 StaticPrimitiveWrite, 282}; 283 284template<FindFieldType type, bool access_check> 285static inline mirror::ArtField* FindFieldFromCode(uint32_t field_idx, const mirror::ArtMethod* referrer, 286 Thread* self, size_t expected_size) { 287 bool is_primitive; 288 bool is_set; 289 bool is_static; 290 switch (type) { 291 case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break; 292 case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break; 293 case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break; 294 case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break; 295 case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break; 296 case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break; 297 case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break; 298 case StaticPrimitiveWrite: // Keep GCC happy by having a default handler, fall-through. 299 default: is_primitive = true; is_set = true; is_static = true; break; 300 } 301 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 302 mirror::ArtField* resolved_field = class_linker->ResolveField(field_idx, referrer, is_static); 303 if (UNLIKELY(resolved_field == nullptr)) { 304 DCHECK(self->IsExceptionPending()); // Throw exception and unwind. 305 return nullptr; // Failure. 306 } 307 mirror::Class* fields_class = resolved_field->GetDeclaringClass(); 308 if (access_check) { 309 if (UNLIKELY(resolved_field->IsStatic() != is_static)) { 310 ThrowIncompatibleClassChangeErrorField(resolved_field, is_static, referrer); 311 return nullptr; 312 } 313 mirror::Class* referring_class = referrer->GetDeclaringClass(); 314 if (UNLIKELY(!referring_class->CanAccessResolvedField<true>(fields_class, resolved_field, 315 field_idx))) { 316 DCHECK(self->IsExceptionPending()); // Throw exception and unwind. 317 return nullptr; // Failure. 318 } 319 if (UNLIKELY(is_set && resolved_field->IsFinal() && (fields_class != referring_class))) { 320 ThrowIllegalAccessErrorFinalField(referrer, resolved_field); 321 return nullptr; // Failure. 322 } else { 323 FieldHelper fh(resolved_field); 324 if (UNLIKELY(fh.IsPrimitiveType() != is_primitive || 325 fh.FieldSize() != expected_size)) { 326 ThrowLocation throw_location = self->GetCurrentLocationForThrow(); 327 DCHECK(throw_location.GetMethod() == referrer); 328 self->ThrowNewExceptionF(throw_location, "Ljava/lang/NoSuchFieldError;", 329 "Attempted read of %zd-bit %s on field '%s'", 330 expected_size * (32 / sizeof(int32_t)), 331 is_primitive ? "primitive" : "non-primitive", 332 PrettyField(resolved_field, true).c_str()); 333 return nullptr; // Failure. 334 } 335 } 336 } 337 if (!is_static) { 338 // instance fields must be being accessed on an initialized class 339 return resolved_field; 340 } else { 341 // If the class is initialized we're done. 342 if (LIKELY(fields_class->IsInitialized())) { 343 return resolved_field; 344 } else { 345 SirtRef<mirror::Class> sirt_class(self, fields_class); 346 if (LIKELY(class_linker->EnsureInitialized(sirt_class, true, true))) { 347 // Otherwise let's ensure the class is initialized before resolving the field. 348 return resolved_field; 349 } else { 350 DCHECK(self->IsExceptionPending()); // Throw exception and unwind 351 return nullptr; // Failure. 352 } 353 } 354 } 355} 356 357// Explicit template declarations of FindFieldFromCode for all field access types. 358#define EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \ 359template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \ 360mirror::ArtField* FindFieldFromCode<_type, _access_check>(uint32_t field_idx, \ 361 const mirror::ArtMethod* referrer, \ 362 Thread* self, size_t expected_size) \ 363 364#define EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \ 365 EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, false); \ 366 EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, true) 367 368EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectRead); 369EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectWrite); 370EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveRead); 371EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveWrite); 372EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectRead); 373EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectWrite); 374EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveRead); 375EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveWrite); 376 377#undef EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL 378#undef EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL 379 380template<InvokeType type, bool access_check> 381static inline mirror::ArtMethod* FindMethodFromCode(uint32_t method_idx, mirror::Object* this_object, 382 mirror::ArtMethod* referrer, Thread* self) { 383 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 384 mirror::ArtMethod* resolved_method = class_linker->ResolveMethod(method_idx, referrer, type); 385 if (UNLIKELY(resolved_method == nullptr)) { 386 DCHECK(self->IsExceptionPending()); // Throw exception and unwind. 387 return nullptr; // Failure. 388 } else if (UNLIKELY(this_object == nullptr && type != kStatic)) { 389 // Maintain interpreter-like semantics where NullPointerException is thrown 390 // after potential NoSuchMethodError from class linker. 391 ThrowLocation throw_location = self->GetCurrentLocationForThrow(); 392 DCHECK(referrer == throw_location.GetMethod()); 393 ThrowNullPointerExceptionForMethodAccess(throw_location, method_idx, type); 394 return nullptr; // Failure. 395 } else if (access_check) { 396 // Incompatible class change should have been handled in resolve method. 397 if (UNLIKELY(resolved_method->CheckIncompatibleClassChange(type))) { 398 ThrowIncompatibleClassChangeError(type, resolved_method->GetInvokeType(), resolved_method, 399 referrer); 400 return nullptr; // Failure. 401 } 402 mirror::Class* methods_class = resolved_method->GetDeclaringClass(); 403 mirror::Class* referring_class = referrer->GetDeclaringClass(); 404 bool can_access_resolved_method = 405 referring_class->CanAccessResolvedMethod<true, type>(methods_class, resolved_method, 406 method_idx); 407 if (UNLIKELY(!can_access_resolved_method)) { 408 DCHECK(self->IsExceptionPending()); // Throw exception and unwind. 409 return nullptr; // Failure. 410 } 411 } 412 switch (type) { 413 case kStatic: 414 case kDirect: 415 return resolved_method; 416 case kVirtual: { 417 mirror::ObjectArray<mirror::ArtMethod>* vtable = this_object->GetClass()->GetVTable(); 418 uint16_t vtable_index = resolved_method->GetMethodIndex(); 419 if (access_check && 420 (vtable == nullptr || vtable_index >= static_cast<uint32_t>(vtable->GetLength()))) { 421 // Behavior to agree with that of the verifier. 422 MethodHelper mh(resolved_method); 423 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(), mh.GetName(), 424 mh.GetSignature()); 425 return nullptr; // Failure. 426 } 427 DCHECK(vtable != nullptr); 428 return vtable->GetWithoutChecks(vtable_index); 429 } 430 case kSuper: { 431 mirror::Class* super_class = referrer->GetDeclaringClass()->GetSuperClass(); 432 uint16_t vtable_index = resolved_method->GetMethodIndex(); 433 mirror::ObjectArray<mirror::ArtMethod>* vtable; 434 if (access_check) { 435 // Check existence of super class. 436 vtable = (super_class != nullptr) ? super_class->GetVTable() : nullptr; 437 if (vtable == nullptr || vtable_index >= static_cast<uint32_t>(vtable->GetLength())) { 438 // Behavior to agree with that of the verifier. 439 MethodHelper mh(resolved_method); 440 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(), mh.GetName(), 441 mh.GetSignature()); 442 return nullptr; // Failure. 443 } 444 } else { 445 // Super class must exist. 446 DCHECK(super_class != nullptr); 447 vtable = super_class->GetVTable(); 448 } 449 DCHECK(vtable != nullptr); 450 return vtable->GetWithoutChecks(vtable_index); 451 } 452 case kInterface: { 453 uint32_t imt_index = resolved_method->GetDexMethodIndex() % ClassLinker::kImtSize; 454 mirror::ObjectArray<mirror::ArtMethod>* imt_table = this_object->GetClass()->GetImTable(); 455 mirror::ArtMethod* imt_method = imt_table->Get(imt_index); 456 if (!imt_method->IsImtConflictMethod()) { 457 return imt_method; 458 } else { 459 mirror::ArtMethod* interface_method = 460 this_object->GetClass()->FindVirtualMethodForInterface(resolved_method); 461 if (UNLIKELY(interface_method == nullptr)) { 462 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(resolved_method, this_object, 463 referrer); 464 return nullptr; // Failure. 465 } else { 466 return interface_method; 467 } 468 } 469 } 470 default: 471 LOG(FATAL) << "Unknown invoke type " << type; 472 return nullptr; // Failure. 473 } 474} 475 476// Explicit template declarations of FindMethodFromCode for all invoke types. 477#define EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \ 478 template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \ 479 mirror::ArtMethod* FindMethodFromCode<_type, _access_check>(uint32_t method_idx, \ 480 mirror::Object* this_object, \ 481 mirror::ArtMethod* referrer, \ 482 Thread* self) 483#define EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \ 484 EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, false); \ 485 EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, true) 486 487EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kStatic); 488EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kDirect); 489EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kVirtual); 490EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kSuper); 491EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kInterface); 492 493#undef EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL 494#undef EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL 495 496// Fast path field resolution that can't initialize classes or throw exceptions. 497static inline mirror::ArtField* FindFieldFast(uint32_t field_idx, 498 const mirror::ArtMethod* referrer, 499 FindFieldType type, size_t expected_size) 500 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 501 mirror::ArtField* resolved_field = 502 referrer->GetDeclaringClass()->GetDexCache()->GetResolvedField(field_idx); 503 if (UNLIKELY(resolved_field == NULL)) { 504 return NULL; 505 } 506 mirror::Class* fields_class = resolved_field->GetDeclaringClass(); 507 // Check class is initiliazed or initializing. 508 if (UNLIKELY(!fields_class->IsInitializing())) { 509 return NULL; 510 } 511 // Check for incompatible class change. 512 bool is_primitive; 513 bool is_set; 514 bool is_static; 515 switch (type) { 516 case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break; 517 case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break; 518 case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break; 519 case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break; 520 case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break; 521 case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break; 522 case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break; 523 case StaticPrimitiveWrite: is_primitive = true; is_set = true; is_static = true; break; 524 default: 525 LOG(FATAL) << "UNREACHABLE"; // Assignment below to avoid GCC warnings. 526 is_primitive = true; 527 is_set = true; 528 is_static = true; 529 break; 530 } 531 if (UNLIKELY(resolved_field->IsStatic() != is_static)) { 532 // Incompatible class change. 533 return NULL; 534 } 535 mirror::Class* referring_class = referrer->GetDeclaringClass(); 536 if (UNLIKELY(!referring_class->CanAccess(fields_class) || 537 !referring_class->CanAccessMember(fields_class, 538 resolved_field->GetAccessFlags()) || 539 (is_set && resolved_field->IsFinal() && (fields_class != referring_class)))) { 540 // Illegal access. 541 return NULL; 542 } 543 FieldHelper fh(resolved_field); 544 if (UNLIKELY(fh.IsPrimitiveType() != is_primitive || 545 fh.FieldSize() != expected_size)) { 546 return NULL; 547 } 548 return resolved_field; 549} 550 551// Fast path method resolution that can't throw exceptions. 552static inline mirror::ArtMethod* FindMethodFast(uint32_t method_idx, 553 mirror::Object* this_object, 554 const mirror::ArtMethod* referrer, 555 bool access_check, InvokeType type) 556 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 557 bool is_direct = type == kStatic || type == kDirect; 558 if (UNLIKELY(this_object == NULL && !is_direct)) { 559 return NULL; 560 } 561 mirror::ArtMethod* resolved_method = 562 referrer->GetDeclaringClass()->GetDexCache()->GetResolvedMethod(method_idx); 563 if (UNLIKELY(resolved_method == NULL)) { 564 return NULL; 565 } 566 if (access_check) { 567 // Check for incompatible class change errors and access. 568 bool icce = resolved_method->CheckIncompatibleClassChange(type); 569 if (UNLIKELY(icce)) { 570 return NULL; 571 } 572 mirror::Class* methods_class = resolved_method->GetDeclaringClass(); 573 mirror::Class* referring_class = referrer->GetDeclaringClass(); 574 if (UNLIKELY(!referring_class->CanAccess(methods_class) || 575 !referring_class->CanAccessMember(methods_class, 576 resolved_method->GetAccessFlags()))) { 577 // Potential illegal access, may need to refine the method's class. 578 return NULL; 579 } 580 } 581 if (type == kInterface) { // Most common form of slow path dispatch. 582 return this_object->GetClass()->FindVirtualMethodForInterface(resolved_method); 583 } else if (is_direct) { 584 return resolved_method; 585 } else if (type == kSuper) { 586 return referrer->GetDeclaringClass()->GetSuperClass()->GetVTable()-> 587 Get(resolved_method->GetMethodIndex()); 588 } else { 589 DCHECK(type == kVirtual); 590 return this_object->GetClass()->GetVTable()->Get(resolved_method->GetMethodIndex()); 591 } 592} 593 594static inline mirror::Class* ResolveVerifyAndClinit(uint32_t type_idx, 595 const mirror::ArtMethod* referrer, 596 Thread* self, bool can_run_clinit, 597 bool verify_access) 598 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 599 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 600 mirror::Class* klass = class_linker->ResolveType(type_idx, referrer); 601 if (UNLIKELY(klass == nullptr)) { 602 CHECK(self->IsExceptionPending()); 603 return nullptr; // Failure - Indicate to caller to deliver exception 604 } 605 // Perform access check if necessary. 606 mirror::Class* referring_class = referrer->GetDeclaringClass(); 607 if (verify_access && UNLIKELY(!referring_class->CanAccess(klass))) { 608 ThrowIllegalAccessErrorClass(referring_class, klass); 609 return nullptr; // Failure - Indicate to caller to deliver exception 610 } 611 // If we're just implementing const-class, we shouldn't call <clinit>. 612 if (!can_run_clinit) { 613 return klass; 614 } 615 // If we are the <clinit> of this class, just return our storage. 616 // 617 // Do not set the DexCache InitializedStaticStorage, since that implies <clinit> has finished 618 // running. 619 if (klass == referring_class && referrer->IsConstructor() && referrer->IsStatic()) { 620 return klass; 621 } 622 SirtRef<mirror::Class> sirt_class(self, klass); 623 if (!class_linker->EnsureInitialized(sirt_class, true, true)) { 624 CHECK(self->IsExceptionPending()); 625 return nullptr; // Failure - Indicate to caller to deliver exception 626 } 627 return sirt_class.get(); 628} 629 630extern void ThrowStackOverflowError(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 631 632static inline mirror::String* ResolveStringFromCode(const mirror::ArtMethod* referrer, 633 uint32_t string_idx) 634 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 635 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 636 return class_linker->ResolveString(string_idx, referrer); 637} 638 639static inline void UnlockJniSynchronizedMethod(jobject locked, Thread* self) 640 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) 641 UNLOCK_FUNCTION(monitor_lock_) { 642 // Save any pending exception over monitor exit call. 643 mirror::Throwable* saved_exception = NULL; 644 ThrowLocation saved_throw_location; 645 if (UNLIKELY(self->IsExceptionPending())) { 646 saved_exception = self->GetException(&saved_throw_location); 647 self->ClearException(); 648 } 649 // Decode locked object and unlock, before popping local references. 650 self->DecodeJObject(locked)->MonitorExit(self); 651 if (UNLIKELY(self->IsExceptionPending())) { 652 LOG(FATAL) << "Synchronized JNI code returning with an exception:\n" 653 << saved_exception->Dump() 654 << "\nEncountered second exception during implicit MonitorExit:\n" 655 << self->GetException(NULL)->Dump(); 656 } 657 // Restore pending exception. 658 if (saved_exception != NULL) { 659 self->SetException(saved_throw_location, saved_exception); 660 } 661} 662 663static inline void CheckReferenceResult(mirror::Object* o, Thread* self) 664 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 665 if (o == NULL) { 666 return; 667 } 668 mirror::ArtMethod* m = self->GetCurrentMethod(NULL); 669 if (o == kInvalidIndirectRefObject) { 670 JniAbortF(NULL, "invalid reference returned from %s", PrettyMethod(m).c_str()); 671 } 672 // Make sure that the result is an instance of the type this method was expected to return. 673 mirror::Class* return_type = MethodHelper(m).GetReturnType(); 674 675 if (!o->InstanceOf(return_type)) { 676 JniAbortF(NULL, "attempt to return an instance of %s from %s", 677 PrettyTypeOf(o).c_str(), PrettyMethod(m).c_str()); 678 } 679} 680 681static inline void CheckSuspend(Thread* thread) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 682 for (;;) { 683 if (thread->ReadFlag(kCheckpointRequest)) { 684 thread->RunCheckpointFunction(); 685 } else if (thread->ReadFlag(kSuspendRequest)) { 686 thread->FullSuspendCheck(); 687 } else { 688 break; 689 } 690 } 691} 692 693JValue InvokeProxyInvocationHandler(ScopedObjectAccessUnchecked& soa, const char* shorty, 694 jobject rcvr_jobj, jobject interface_art_method_jobj, 695 std::vector<jvalue>& args) 696 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 697 698// Entry point for deoptimization. 699extern "C" void art_quick_deoptimize(); 700static inline uintptr_t GetQuickDeoptimizationEntryPoint() { 701 return reinterpret_cast<uintptr_t>(art_quick_deoptimize); 702} 703 704// Return address of instrumentation stub. 705extern "C" void art_quick_instrumentation_entry(void*); 706static inline void* GetQuickInstrumentationEntryPoint() { 707 return reinterpret_cast<void*>(art_quick_instrumentation_entry); 708} 709 710// The return_pc of instrumentation exit stub. 711extern "C" void art_quick_instrumentation_exit(); 712static inline uintptr_t GetQuickInstrumentationExitPc() { 713 return reinterpret_cast<uintptr_t>(art_quick_instrumentation_exit); 714} 715 716extern "C" void art_portable_to_interpreter_bridge(mirror::ArtMethod*); 717static inline const void* GetPortableToInterpreterBridge() { 718 return reinterpret_cast<void*>(art_portable_to_interpreter_bridge); 719} 720 721extern "C" void art_quick_to_interpreter_bridge(mirror::ArtMethod*); 722static inline const void* GetQuickToInterpreterBridge() { 723 return reinterpret_cast<void*>(art_quick_to_interpreter_bridge); 724} 725 726// Return address of interpreter stub. 727static inline const void* GetCompiledCodeToInterpreterBridge() { 728#if defined(ART_USE_PORTABLE_COMPILER) 729 return GetPortableToInterpreterBridge(); 730#else 731 return GetQuickToInterpreterBridge(); 732#endif 733} 734 735 736static inline const void* GetPortableResolutionTrampoline(ClassLinker* class_linker) { 737 return class_linker->GetPortableResolutionTrampoline(); 738} 739 740static inline const void* GetQuickResolutionTrampoline(ClassLinker* class_linker) { 741 return class_linker->GetQuickResolutionTrampoline(); 742} 743 744// Return address of resolution trampoline stub for defined compiler. 745static inline const void* GetResolutionTrampoline(ClassLinker* class_linker) { 746#if defined(ART_USE_PORTABLE_COMPILER) 747 return GetPortableResolutionTrampoline(class_linker); 748#else 749 return GetQuickResolutionTrampoline(class_linker); 750#endif 751} 752 753static inline const void* GetPortableImtConflictTrampoline(ClassLinker* class_linker) { 754 return class_linker->GetPortableImtConflictTrampoline(); 755} 756 757static inline const void* GetQuickImtConflictTrampoline(ClassLinker* class_linker) { 758 return class_linker->GetQuickImtConflictTrampoline(); 759} 760 761// Return address of imt conflict trampoline stub for defined compiler. 762static inline const void* GetImtConflictTrampoline(ClassLinker* class_linker) { 763#if defined(ART_USE_PORTABLE_COMPILER) 764 return GetPortableImtConflictTrampoline(class_linker); 765#else 766 return GetQuickImtConflictTrampoline(class_linker); 767#endif 768} 769 770extern "C" void art_portable_proxy_invoke_handler(); 771static inline const void* GetPortableProxyInvokeHandler() { 772 return reinterpret_cast<void*>(art_portable_proxy_invoke_handler); 773} 774 775extern "C" void art_quick_proxy_invoke_handler(); 776static inline const void* GetQuickProxyInvokeHandler() { 777 return reinterpret_cast<void*>(art_quick_proxy_invoke_handler); 778} 779 780static inline const void* GetProxyInvokeHandler() { 781#if defined(ART_USE_PORTABLE_COMPILER) 782 return GetPortableProxyInvokeHandler(); 783#else 784 return GetQuickProxyInvokeHandler(); 785#endif 786} 787 788extern "C" void* art_jni_dlsym_lookup_stub(JNIEnv*, jobject); 789static inline void* GetJniDlsymLookupStub() { 790 return reinterpret_cast<void*>(art_jni_dlsym_lookup_stub); 791} 792 793template <typename INT_TYPE, typename FLOAT_TYPE> 794static inline INT_TYPE art_float_to_integral(FLOAT_TYPE f) { 795 const INT_TYPE kMaxInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::max()); 796 const INT_TYPE kMinInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::min()); 797 const FLOAT_TYPE kMaxIntAsFloat = static_cast<FLOAT_TYPE>(kMaxInt); 798 const FLOAT_TYPE kMinIntAsFloat = static_cast<FLOAT_TYPE>(kMinInt); 799 if (LIKELY(f > kMinIntAsFloat)) { 800 if (LIKELY(f < kMaxIntAsFloat)) { 801 return static_cast<INT_TYPE>(f); 802 } else { 803 return kMaxInt; 804 } 805 } else { 806 return (f != f) ? 0 : kMinInt; // f != f implies NaN 807 } 808} 809 810} // namespace art 811 812#endif // ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_H_ 813