compiler_driver.cc revision 808c7a57bb913b13c22884f57cdacd59bf1fdb3f
1/* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "compiler_driver.h" 18 19#include <unistd.h> 20#include <unordered_set> 21#include <vector> 22 23#ifndef __APPLE__ 24#include <malloc.h> // For mallinfo 25#endif 26 27#include "android-base/strings.h" 28 29#include "art_field-inl.h" 30#include "art_method-inl.h" 31#include "base/arena_allocator.h" 32#include "base/array_ref.h" 33#include "base/bit_vector.h" 34#include "base/enums.h" 35#include "base/logging.h" // For VLOG 36#include "base/stl_util.h" 37#include "base/systrace.h" 38#include "base/time_utils.h" 39#include "base/timing_logger.h" 40#include "class_linker-inl.h" 41#include "compiled_method-inl.h" 42#include "compiler.h" 43#include "compiler_callbacks.h" 44#include "compiler_driver-inl.h" 45#include "dex/dex_to_dex_compiler.h" 46#include "dex/verification_results.h" 47#include "dex/verified_method.h" 48#include "dex_compilation_unit.h" 49#include "dex_file-inl.h" 50#include "dex_file_annotations.h" 51#include "dex_instruction-inl.h" 52#include "driver/compiler_options.h" 53#include "gc/accounting/card_table-inl.h" 54#include "gc/accounting/heap_bitmap.h" 55#include "gc/space/image_space.h" 56#include "gc/space/space.h" 57#include "handle_scope-inl.h" 58#include "intrinsics_enum.h" 59#include "jni_internal.h" 60#include "linker/linker_patch.h" 61#include "mirror/class-inl.h" 62#include "mirror/class_loader.h" 63#include "mirror/dex_cache-inl.h" 64#include "mirror/object-inl.h" 65#include "mirror/object-refvisitor-inl.h" 66#include "mirror/object_array-inl.h" 67#include "mirror/throwable.h" 68#include "nativehelper/ScopedLocalRef.h" 69#include "object_lock.h" 70#include "runtime.h" 71#include "runtime_intrinsics.h" 72#include "scoped_thread_state_change-inl.h" 73#include "thread.h" 74#include "thread_list.h" 75#include "thread_pool.h" 76#include "trampolines/trampoline_compiler.h" 77#include "transaction.h" 78#include "utils/atomic_dex_ref_map-inl.h" 79#include "utils/dex_cache_arrays_layout-inl.h" 80#include "utils/swap_space.h" 81#include "vdex_file.h" 82#include "verifier/method_verifier-inl.h" 83#include "verifier/method_verifier.h" 84#include "verifier/verifier_deps.h" 85#include "verifier/verifier_enums.h" 86 87namespace art { 88 89static constexpr bool kTimeCompileMethod = !kIsDebugBuild; 90 91// Print additional info during profile guided compilation. 92static constexpr bool kDebugProfileGuidedCompilation = false; 93 94// Max encoded fields allowed for initializing app image. Hardcode the number for now 95// because 5000 should be large enough. 96static constexpr uint32_t kMaxEncodedFields = 5000; 97 98static double Percentage(size_t x, size_t y) { 99 return 100.0 * (static_cast<double>(x)) / (static_cast<double>(x + y)); 100} 101 102static void DumpStat(size_t x, size_t y, const char* str) { 103 if (x == 0 && y == 0) { 104 return; 105 } 106 LOG(INFO) << Percentage(x, y) << "% of " << str << " for " << (x + y) << " cases"; 107} 108 109class CompilerDriver::AOTCompilationStats { 110 public: 111 AOTCompilationStats() 112 : stats_lock_("AOT compilation statistics lock"), 113 resolved_types_(0), unresolved_types_(0), 114 resolved_instance_fields_(0), unresolved_instance_fields_(0), 115 resolved_local_static_fields_(0), resolved_static_fields_(0), unresolved_static_fields_(0), 116 type_based_devirtualization_(0), 117 safe_casts_(0), not_safe_casts_(0) { 118 for (size_t i = 0; i <= kMaxInvokeType; i++) { 119 resolved_methods_[i] = 0; 120 unresolved_methods_[i] = 0; 121 virtual_made_direct_[i] = 0; 122 direct_calls_to_boot_[i] = 0; 123 direct_methods_to_boot_[i] = 0; 124 } 125 } 126 127 void Dump() { 128 DumpStat(resolved_types_, unresolved_types_, "types resolved"); 129 DumpStat(resolved_instance_fields_, unresolved_instance_fields_, "instance fields resolved"); 130 DumpStat(resolved_local_static_fields_ + resolved_static_fields_, unresolved_static_fields_, 131 "static fields resolved"); 132 DumpStat(resolved_local_static_fields_, resolved_static_fields_ + unresolved_static_fields_, 133 "static fields local to a class"); 134 DumpStat(safe_casts_, not_safe_casts_, "check-casts removed based on type information"); 135 // Note, the code below subtracts the stat value so that when added to the stat value we have 136 // 100% of samples. TODO: clean this up. 137 DumpStat(type_based_devirtualization_, 138 resolved_methods_[kVirtual] + unresolved_methods_[kVirtual] + 139 resolved_methods_[kInterface] + unresolved_methods_[kInterface] - 140 type_based_devirtualization_, 141 "virtual/interface calls made direct based on type information"); 142 143 for (size_t i = 0; i <= kMaxInvokeType; i++) { 144 std::ostringstream oss; 145 oss << static_cast<InvokeType>(i) << " methods were AOT resolved"; 146 DumpStat(resolved_methods_[i], unresolved_methods_[i], oss.str().c_str()); 147 if (virtual_made_direct_[i] > 0) { 148 std::ostringstream oss2; 149 oss2 << static_cast<InvokeType>(i) << " methods made direct"; 150 DumpStat(virtual_made_direct_[i], 151 resolved_methods_[i] + unresolved_methods_[i] - virtual_made_direct_[i], 152 oss2.str().c_str()); 153 } 154 if (direct_calls_to_boot_[i] > 0) { 155 std::ostringstream oss2; 156 oss2 << static_cast<InvokeType>(i) << " method calls are direct into boot"; 157 DumpStat(direct_calls_to_boot_[i], 158 resolved_methods_[i] + unresolved_methods_[i] - direct_calls_to_boot_[i], 159 oss2.str().c_str()); 160 } 161 if (direct_methods_to_boot_[i] > 0) { 162 std::ostringstream oss2; 163 oss2 << static_cast<InvokeType>(i) << " method calls have methods in boot"; 164 DumpStat(direct_methods_to_boot_[i], 165 resolved_methods_[i] + unresolved_methods_[i] - direct_methods_to_boot_[i], 166 oss2.str().c_str()); 167 } 168 } 169 } 170 171// Allow lossy statistics in non-debug builds. 172#ifndef NDEBUG 173#define STATS_LOCK() MutexLock mu(Thread::Current(), stats_lock_) 174#else 175#define STATS_LOCK() 176#endif 177 178 void TypeDoesntNeedAccessCheck() REQUIRES(!stats_lock_) { 179 STATS_LOCK(); 180 resolved_types_++; 181 } 182 183 void TypeNeedsAccessCheck() REQUIRES(!stats_lock_) { 184 STATS_LOCK(); 185 unresolved_types_++; 186 } 187 188 void ResolvedInstanceField() REQUIRES(!stats_lock_) { 189 STATS_LOCK(); 190 resolved_instance_fields_++; 191 } 192 193 void UnresolvedInstanceField() REQUIRES(!stats_lock_) { 194 STATS_LOCK(); 195 unresolved_instance_fields_++; 196 } 197 198 void ResolvedLocalStaticField() REQUIRES(!stats_lock_) { 199 STATS_LOCK(); 200 resolved_local_static_fields_++; 201 } 202 203 void ResolvedStaticField() REQUIRES(!stats_lock_) { 204 STATS_LOCK(); 205 resolved_static_fields_++; 206 } 207 208 void UnresolvedStaticField() REQUIRES(!stats_lock_) { 209 STATS_LOCK(); 210 unresolved_static_fields_++; 211 } 212 213 // Indicate that type information from the verifier led to devirtualization. 214 void PreciseTypeDevirtualization() REQUIRES(!stats_lock_) { 215 STATS_LOCK(); 216 type_based_devirtualization_++; 217 } 218 219 // A check-cast could be eliminated due to verifier type analysis. 220 void SafeCast() REQUIRES(!stats_lock_) { 221 STATS_LOCK(); 222 safe_casts_++; 223 } 224 225 // A check-cast couldn't be eliminated due to verifier type analysis. 226 void NotASafeCast() REQUIRES(!stats_lock_) { 227 STATS_LOCK(); 228 not_safe_casts_++; 229 } 230 231 private: 232 Mutex stats_lock_; 233 234 size_t resolved_types_; 235 size_t unresolved_types_; 236 237 size_t resolved_instance_fields_; 238 size_t unresolved_instance_fields_; 239 240 size_t resolved_local_static_fields_; 241 size_t resolved_static_fields_; 242 size_t unresolved_static_fields_; 243 // Type based devirtualization for invoke interface and virtual. 244 size_t type_based_devirtualization_; 245 246 size_t resolved_methods_[kMaxInvokeType + 1]; 247 size_t unresolved_methods_[kMaxInvokeType + 1]; 248 size_t virtual_made_direct_[kMaxInvokeType + 1]; 249 size_t direct_calls_to_boot_[kMaxInvokeType + 1]; 250 size_t direct_methods_to_boot_[kMaxInvokeType + 1]; 251 252 size_t safe_casts_; 253 size_t not_safe_casts_; 254 255 DISALLOW_COPY_AND_ASSIGN(AOTCompilationStats); 256}; 257 258class CompilerDriver::DexFileMethodSet { 259 public: 260 explicit DexFileMethodSet(const DexFile& dex_file) 261 : dex_file_(dex_file), 262 method_indexes_(dex_file.NumMethodIds(), false, Allocator::GetMallocAllocator()) { 263 } 264 DexFileMethodSet(DexFileMethodSet&& other) = default; 265 266 const DexFile& GetDexFile() const { return dex_file_; } 267 268 BitVector& GetMethodIndexes() { return method_indexes_; } 269 const BitVector& GetMethodIndexes() const { return method_indexes_; } 270 271 private: 272 const DexFile& dex_file_; 273 BitVector method_indexes_; 274}; 275 276CompilerDriver::CompilerDriver( 277 const CompilerOptions* compiler_options, 278 VerificationResults* verification_results, 279 Compiler::Kind compiler_kind, 280 InstructionSet instruction_set, 281 const InstructionSetFeatures* instruction_set_features, 282 std::unordered_set<std::string>* image_classes, 283 std::unordered_set<std::string>* compiled_classes, 284 std::unordered_set<std::string>* compiled_methods, 285 size_t thread_count, 286 int swap_fd, 287 const ProfileCompilationInfo* profile_compilation_info) 288 : compiler_options_(compiler_options), 289 verification_results_(verification_results), 290 compiler_(Compiler::Create(this, compiler_kind)), 291 compiler_kind_(compiler_kind), 292 instruction_set_( 293 instruction_set == InstructionSet::kArm ? InstructionSet::kThumb2 : instruction_set), 294 instruction_set_features_(instruction_set_features), 295 requires_constructor_barrier_lock_("constructor barrier lock"), 296 non_relative_linker_patch_count_(0u), 297 image_classes_(image_classes), 298 classes_to_compile_(compiled_classes), 299 methods_to_compile_(compiled_methods), 300 number_of_soft_verifier_failures_(0), 301 had_hard_verifier_failure_(false), 302 parallel_thread_count_(thread_count), 303 stats_(new AOTCompilationStats), 304 compiler_context_(nullptr), 305 support_boot_image_fixup_(true), 306 compiled_method_storage_(swap_fd), 307 profile_compilation_info_(profile_compilation_info), 308 max_arena_alloc_(0), 309 dex_to_dex_references_lock_("dex-to-dex references lock"), 310 dex_to_dex_references_(), 311 current_dex_to_dex_methods_(nullptr) { 312 DCHECK(compiler_options_ != nullptr); 313 314 compiler_->Init(); 315 316 if (GetCompilerOptions().IsBootImage()) { 317 CHECK(image_classes_.get() != nullptr) << "Expected image classes for boot image"; 318 } 319 320 compiled_method_storage_.SetDedupeEnabled(compiler_options_->DeduplicateCode()); 321} 322 323CompilerDriver::~CompilerDriver() { 324 compiled_methods_.Visit([this](const DexFileReference& ref ATTRIBUTE_UNUSED, 325 CompiledMethod* method) { 326 if (method != nullptr) { 327 CompiledMethod::ReleaseSwapAllocatedCompiledMethod(this, method); 328 } 329 }); 330 compiler_->UnInit(); 331} 332 333 334#define CREATE_TRAMPOLINE(type, abi, offset) \ 335 if (Is64BitInstructionSet(instruction_set_)) { \ 336 return CreateTrampoline64(instruction_set_, abi, \ 337 type ## _ENTRYPOINT_OFFSET(PointerSize::k64, offset)); \ 338 } else { \ 339 return CreateTrampoline32(instruction_set_, abi, \ 340 type ## _ENTRYPOINT_OFFSET(PointerSize::k32, offset)); \ 341 } 342 343std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateJniDlsymLookup() const { 344 CREATE_TRAMPOLINE(JNI, kJniAbi, pDlsymLookup) 345} 346 347std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateQuickGenericJniTrampoline() 348 const { 349 CREATE_TRAMPOLINE(QUICK, kQuickAbi, pQuickGenericJniTrampoline) 350} 351 352std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateQuickImtConflictTrampoline() 353 const { 354 CREATE_TRAMPOLINE(QUICK, kQuickAbi, pQuickImtConflictTrampoline) 355} 356 357std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateQuickResolutionTrampoline() 358 const { 359 CREATE_TRAMPOLINE(QUICK, kQuickAbi, pQuickResolutionTrampoline) 360} 361 362std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateQuickToInterpreterBridge() 363 const { 364 CREATE_TRAMPOLINE(QUICK, kQuickAbi, pQuickToInterpreterBridge) 365} 366#undef CREATE_TRAMPOLINE 367 368void CompilerDriver::CompileAll(jobject class_loader, 369 const std::vector<const DexFile*>& dex_files, 370 TimingLogger* timings) { 371 DCHECK(!Runtime::Current()->IsStarted()); 372 373 InitializeThreadPools(); 374 375 VLOG(compiler) << "Before precompile " << GetMemoryUsageString(false); 376 // Precompile: 377 // 1) Load image classes 378 // 2) Resolve all classes 379 // 3) Attempt to verify all classes 380 // 4) Attempt to initialize image classes, and trivially initialized classes 381 PreCompile(class_loader, dex_files, timings); 382 if (GetCompilerOptions().IsBootImage()) { 383 // We don't need to setup the intrinsics for non boot image compilation, as 384 // those compilations will pick up a boot image that have the ArtMethod already 385 // set with the intrinsics flag. 386 InitializeIntrinsics(); 387 } 388 // Compile: 389 // 1) Compile all classes and methods enabled for compilation. May fall back to dex-to-dex 390 // compilation. 391 if (GetCompilerOptions().IsAnyCompilationEnabled()) { 392 Compile(class_loader, dex_files, timings); 393 } 394 if (GetCompilerOptions().GetDumpStats()) { 395 stats_->Dump(); 396 } 397 398 FreeThreadPools(); 399} 400 401static optimizer::DexToDexCompilationLevel GetDexToDexCompilationLevel( 402 Thread* self, const CompilerDriver& driver, Handle<mirror::ClassLoader> class_loader, 403 const DexFile& dex_file, const DexFile::ClassDef& class_def) 404 REQUIRES_SHARED(Locks::mutator_lock_) { 405 auto* const runtime = Runtime::Current(); 406 DCHECK(driver.GetCompilerOptions().IsQuickeningCompilationEnabled()); 407 const char* descriptor = dex_file.GetClassDescriptor(class_def); 408 ClassLinker* class_linker = runtime->GetClassLinker(); 409 mirror::Class* klass = class_linker->FindClass(self, descriptor, class_loader); 410 if (klass == nullptr) { 411 CHECK(self->IsExceptionPending()); 412 self->ClearException(); 413 return optimizer::DexToDexCompilationLevel::kDontDexToDexCompile; 414 } 415 // DexToDex at the kOptimize level may introduce quickened opcodes, which replace symbolic 416 // references with actual offsets. We cannot re-verify such instructions. 417 // 418 // We store the verification information in the class status in the oat file, which the linker 419 // can validate (checksums) and use to skip load-time verification. It is thus safe to 420 // optimize when a class has been fully verified before. 421 optimizer::DexToDexCompilationLevel max_level = optimizer::DexToDexCompilationLevel::kOptimize; 422 if (driver.GetCompilerOptions().GetDebuggable()) { 423 // We are debuggable so definitions of classes might be changed. We don't want to do any 424 // optimizations that could break that. 425 max_level = optimizer::DexToDexCompilationLevel::kDontDexToDexCompile; 426 } 427 if (!VdexFile::CanEncodeQuickenedData(dex_file)) { 428 // Don't do any dex level optimizations if we cannot encode the quickening. 429 return optimizer::DexToDexCompilationLevel::kDontDexToDexCompile; 430 } 431 if (klass->IsVerified()) { 432 // Class is verified so we can enable DEX-to-DEX compilation for performance. 433 return max_level; 434 } else { 435 // Class verification has failed: do not run DEX-to-DEX optimizations. 436 return optimizer::DexToDexCompilationLevel::kDontDexToDexCompile; 437 } 438} 439 440static optimizer::DexToDexCompilationLevel GetDexToDexCompilationLevel( 441 Thread* self, 442 const CompilerDriver& driver, 443 jobject jclass_loader, 444 const DexFile& dex_file, 445 const DexFile::ClassDef& class_def) { 446 ScopedObjectAccess soa(self); 447 StackHandleScope<1> hs(soa.Self()); 448 Handle<mirror::ClassLoader> class_loader( 449 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader))); 450 return GetDexToDexCompilationLevel(self, driver, class_loader, dex_file, class_def); 451} 452 453// Does the runtime for the InstructionSet provide an implementation returned by 454// GetQuickGenericJniStub allowing down calls that aren't compiled using a JNI compiler? 455static bool InstructionSetHasGenericJniStub(InstructionSet isa) { 456 switch (isa) { 457 case InstructionSet::kArm: 458 case InstructionSet::kArm64: 459 case InstructionSet::kThumb2: 460 case InstructionSet::kMips: 461 case InstructionSet::kMips64: 462 case InstructionSet::kX86: 463 case InstructionSet::kX86_64: return true; 464 default: return false; 465 } 466} 467 468static void CompileMethod(Thread* self, 469 CompilerDriver* driver, 470 const DexFile::CodeItem* code_item, 471 uint32_t access_flags, 472 InvokeType invoke_type, 473 uint16_t class_def_idx, 474 uint32_t method_idx, 475 Handle<mirror::ClassLoader> class_loader, 476 const DexFile& dex_file, 477 optimizer::DexToDexCompilationLevel dex_to_dex_compilation_level, 478 bool compilation_enabled, 479 Handle<mirror::DexCache> dex_cache) { 480 DCHECK(driver != nullptr); 481 CompiledMethod* compiled_method = nullptr; 482 uint64_t start_ns = kTimeCompileMethod ? NanoTime() : 0; 483 MethodReference method_ref(&dex_file, method_idx); 484 485 if (driver->GetCurrentDexToDexMethods() != nullptr) { 486 // This is the second pass when we dex-to-dex compile previously marked methods. 487 // TODO: Refactor the compilation to avoid having to distinguish the two passes 488 // here. That should be done on a higher level. http://b/29089975 489 if (driver->GetCurrentDexToDexMethods()->IsBitSet(method_idx)) { 490 VerificationResults* results = driver->GetVerificationResults(); 491 DCHECK(results != nullptr); 492 const VerifiedMethod* verified_method = results->GetVerifiedMethod(method_ref); 493 // Do not optimize if a VerifiedMethod is missing. SafeCast elision, 494 // for example, relies on it. 495 compiled_method = optimizer::ArtCompileDEX( 496 driver, 497 code_item, 498 access_flags, 499 invoke_type, 500 class_def_idx, 501 method_idx, 502 class_loader, 503 dex_file, 504 (verified_method != nullptr) 505 ? dex_to_dex_compilation_level 506 : optimizer::DexToDexCompilationLevel::kDontDexToDexCompile); 507 } 508 } else if ((access_flags & kAccNative) != 0) { 509 // Are we extracting only and have support for generic JNI down calls? 510 if (!driver->GetCompilerOptions().IsJniCompilationEnabled() && 511 InstructionSetHasGenericJniStub(driver->GetInstructionSet())) { 512 // Leaving this empty will trigger the generic JNI version 513 } else { 514 // Query any JNI optimization annotations such as @FastNative or @CriticalNative. 515 access_flags |= annotations::GetNativeMethodAnnotationAccessFlags( 516 dex_file, dex_file.GetClassDef(class_def_idx), method_idx); 517 518 compiled_method = driver->GetCompiler()->JniCompile( 519 access_flags, method_idx, dex_file, dex_cache); 520 CHECK(compiled_method != nullptr); 521 } 522 } else if ((access_flags & kAccAbstract) != 0) { 523 // Abstract methods don't have code. 524 } else { 525 VerificationResults* results = driver->GetVerificationResults(); 526 DCHECK(results != nullptr); 527 const VerifiedMethod* verified_method = results->GetVerifiedMethod(method_ref); 528 bool compile = compilation_enabled && 529 // Basic checks, e.g., not <clinit>. 530 results->IsCandidateForCompilation(method_ref, access_flags) && 531 // Did not fail to create VerifiedMethod metadata. 532 verified_method != nullptr && 533 // Do not have failures that should punt to the interpreter. 534 !verified_method->HasRuntimeThrow() && 535 (verified_method->GetEncounteredVerificationFailures() & 536 (verifier::VERIFY_ERROR_FORCE_INTERPRETER | verifier::VERIFY_ERROR_LOCKING)) == 0 && 537 // Is eligable for compilation by methods-to-compile filter. 538 driver->IsMethodToCompile(method_ref) && 539 driver->ShouldCompileBasedOnProfile(method_ref); 540 541 if (compile) { 542 // NOTE: if compiler declines to compile this method, it will return null. 543 compiled_method = driver->GetCompiler()->Compile(code_item, 544 access_flags, 545 invoke_type, 546 class_def_idx, 547 method_idx, 548 class_loader, 549 dex_file, 550 dex_cache); 551 } 552 if (compiled_method == nullptr && 553 dex_to_dex_compilation_level != optimizer::DexToDexCompilationLevel::kDontDexToDexCompile) { 554 DCHECK(!Runtime::Current()->UseJitCompilation()); 555 // TODO: add a command-line option to disable DEX-to-DEX compilation ? 556 driver->MarkForDexToDexCompilation(self, method_ref); 557 } 558 } 559 if (kTimeCompileMethod) { 560 uint64_t duration_ns = NanoTime() - start_ns; 561 if (duration_ns > MsToNs(driver->GetCompiler()->GetMaximumCompilationTimeBeforeWarning())) { 562 LOG(WARNING) << "Compilation of " << dex_file.PrettyMethod(method_idx) 563 << " took " << PrettyDuration(duration_ns); 564 } 565 } 566 567 if (compiled_method != nullptr) { 568 // Count non-relative linker patches. 569 size_t non_relative_linker_patch_count = 0u; 570 for (const linker::LinkerPatch& patch : compiled_method->GetPatches()) { 571 if (!patch.IsPcRelative()) { 572 ++non_relative_linker_patch_count; 573 } 574 } 575 bool compile_pic = driver->GetCompilerOptions().GetCompilePic(); // Off by default 576 // When compiling with PIC, there should be zero non-relative linker patches 577 CHECK(!compile_pic || non_relative_linker_patch_count == 0u); 578 579 driver->AddCompiledMethod(method_ref, compiled_method, non_relative_linker_patch_count); 580 } 581 582 if (self->IsExceptionPending()) { 583 ScopedObjectAccess soa(self); 584 LOG(FATAL) << "Unexpected exception compiling: " << dex_file.PrettyMethod(method_idx) << "\n" 585 << self->GetException()->Dump(); 586 } 587} 588 589void CompilerDriver::CompileOne(Thread* self, ArtMethod* method, TimingLogger* timings) { 590 DCHECK(!Runtime::Current()->IsStarted()); 591 jobject jclass_loader; 592 const DexFile* dex_file; 593 uint16_t class_def_idx; 594 uint32_t method_idx = method->GetDexMethodIndex(); 595 uint32_t access_flags = method->GetAccessFlags(); 596 InvokeType invoke_type = method->GetInvokeType(); 597 StackHandleScope<2> hs(self); 598 Handle<mirror::DexCache> dex_cache(hs.NewHandle(method->GetDexCache())); 599 Handle<mirror::ClassLoader> class_loader( 600 hs.NewHandle(method->GetDeclaringClass()->GetClassLoader())); 601 { 602 ScopedObjectAccessUnchecked soa(self); 603 ScopedLocalRef<jobject> local_class_loader( 604 soa.Env(), soa.AddLocalReference<jobject>(class_loader.Get())); 605 jclass_loader = soa.Env()->NewGlobalRef(local_class_loader.get()); 606 // Find the dex_file 607 dex_file = method->GetDexFile(); 608 class_def_idx = method->GetClassDefIndex(); 609 } 610 const DexFile::CodeItem* code_item = dex_file->GetCodeItem(method->GetCodeItemOffset()); 611 612 // Go to native so that we don't block GC during compilation. 613 ScopedThreadSuspension sts(self, kNative); 614 615 std::vector<const DexFile*> dex_files; 616 dex_files.push_back(dex_file); 617 618 InitializeThreadPools(); 619 620 PreCompile(jclass_loader, dex_files, timings); 621 622 // Can we run DEX-to-DEX compiler on this class ? 623 optimizer::DexToDexCompilationLevel dex_to_dex_compilation_level = 624 GetDexToDexCompilationLevel(self, 625 *this, 626 jclass_loader, 627 *dex_file, 628 dex_file->GetClassDef(class_def_idx)); 629 630 DCHECK(current_dex_to_dex_methods_ == nullptr); 631 CompileMethod(self, 632 this, 633 code_item, 634 access_flags, 635 invoke_type, 636 class_def_idx, 637 method_idx, 638 class_loader, 639 *dex_file, 640 dex_to_dex_compilation_level, 641 true, 642 dex_cache); 643 644 ArrayRef<DexFileMethodSet> dex_to_dex_references; 645 { 646 // From this point on, we shall not modify dex_to_dex_references_, so 647 // just grab a reference to it that we use without holding the mutex. 648 MutexLock lock(Thread::Current(), dex_to_dex_references_lock_); 649 dex_to_dex_references = ArrayRef<DexFileMethodSet>(dex_to_dex_references_); 650 } 651 if (!dex_to_dex_references.empty()) { 652 DCHECK_EQ(dex_to_dex_references.size(), 1u); 653 DCHECK(&dex_to_dex_references[0].GetDexFile() == dex_file); 654 current_dex_to_dex_methods_ = &dex_to_dex_references.front().GetMethodIndexes(); 655 DCHECK(current_dex_to_dex_methods_->IsBitSet(method_idx)); 656 DCHECK_EQ(current_dex_to_dex_methods_->NumSetBits(), 1u); 657 CompileMethod(self, 658 this, 659 code_item, 660 access_flags, 661 invoke_type, 662 class_def_idx, 663 method_idx, 664 class_loader, 665 *dex_file, 666 dex_to_dex_compilation_level, 667 true, 668 dex_cache); 669 current_dex_to_dex_methods_ = nullptr; 670 } 671 672 FreeThreadPools(); 673 674 self->GetJniEnv()->DeleteGlobalRef(jclass_loader); 675} 676 677void CompilerDriver::Resolve(jobject class_loader, 678 const std::vector<const DexFile*>& dex_files, 679 TimingLogger* timings) { 680 // Resolution allocates classes and needs to run single-threaded to be deterministic. 681 bool force_determinism = GetCompilerOptions().IsForceDeterminism(); 682 ThreadPool* resolve_thread_pool = force_determinism 683 ? single_thread_pool_.get() 684 : parallel_thread_pool_.get(); 685 size_t resolve_thread_count = force_determinism ? 1U : parallel_thread_count_; 686 687 for (size_t i = 0; i != dex_files.size(); ++i) { 688 const DexFile* dex_file = dex_files[i]; 689 CHECK(dex_file != nullptr); 690 ResolveDexFile(class_loader, 691 *dex_file, 692 dex_files, 693 resolve_thread_pool, 694 resolve_thread_count, 695 timings); 696 } 697} 698 699// Resolve const-strings in the code. Done to have deterministic allocation behavior. Right now 700// this is single-threaded for simplicity. 701// TODO: Collect the relevant string indices in parallel, then allocate them sequentially in a 702// stable order. 703 704static void ResolveConstStrings(Handle<mirror::DexCache> dex_cache, 705 const DexFile::CodeItem* code_item) 706 REQUIRES_SHARED(Locks::mutator_lock_) { 707 if (code_item == nullptr) { 708 // Abstract or native method. 709 return; 710 } 711 712 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker(); 713 for (const DexInstructionPcPair& inst : code_item->Instructions()) { 714 switch (inst->Opcode()) { 715 case Instruction::CONST_STRING: 716 case Instruction::CONST_STRING_JUMBO: { 717 dex::StringIndex string_index((inst->Opcode() == Instruction::CONST_STRING) 718 ? inst->VRegB_21c() 719 : inst->VRegB_31c()); 720 ObjPtr<mirror::String> string = class_linker->ResolveString(string_index, dex_cache); 721 CHECK(string != nullptr) << "Could not allocate a string when forcing determinism"; 722 break; 723 } 724 725 default: 726 break; 727 } 728 } 729} 730 731static void ResolveConstStrings(CompilerDriver* driver, 732 const std::vector<const DexFile*>& dex_files, 733 TimingLogger* timings) { 734 ScopedObjectAccess soa(Thread::Current()); 735 StackHandleScope<1> hs(soa.Self()); 736 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker(); 737 MutableHandle<mirror::DexCache> dex_cache(hs.NewHandle<mirror::DexCache>(nullptr)); 738 739 for (const DexFile* dex_file : dex_files) { 740 dex_cache.Assign(class_linker->FindDexCache(soa.Self(), *dex_file)); 741 TimingLogger::ScopedTiming t("Resolve const-string Strings", timings); 742 743 size_t class_def_count = dex_file->NumClassDefs(); 744 for (size_t class_def_index = 0; class_def_index < class_def_count; ++class_def_index) { 745 const DexFile::ClassDef& class_def = dex_file->GetClassDef(class_def_index); 746 747 const uint8_t* class_data = dex_file->GetClassData(class_def); 748 if (class_data == nullptr) { 749 // empty class, probably a marker interface 750 continue; 751 } 752 753 ClassDataItemIterator it(*dex_file, class_data); 754 it.SkipAllFields(); 755 756 bool compilation_enabled = driver->IsClassToCompile( 757 dex_file->StringByTypeIdx(class_def.class_idx_)); 758 if (!compilation_enabled) { 759 // Compilation is skipped, do not resolve const-string in code of this class. 760 // TODO: Make sure that inlining honors this. 761 continue; 762 } 763 764 // Direct and virtual methods. 765 int64_t previous_method_idx = -1; 766 while (it.HasNextMethod()) { 767 uint32_t method_idx = it.GetMemberIndex(); 768 if (method_idx == previous_method_idx) { 769 // smali can create dex files with two encoded_methods sharing the same method_idx 770 // http://code.google.com/p/smali/issues/detail?id=119 771 it.Next(); 772 continue; 773 } 774 previous_method_idx = method_idx; 775 ResolveConstStrings(dex_cache, it.GetMethodCodeItem()); 776 it.Next(); 777 } 778 DCHECK(!it.HasNext()); 779 } 780 } 781} 782 783inline void CompilerDriver::CheckThreadPools() { 784 DCHECK(parallel_thread_pool_ != nullptr); 785 DCHECK(single_thread_pool_ != nullptr); 786} 787 788static void EnsureVerifiedOrVerifyAtRuntime(jobject jclass_loader, 789 const std::vector<const DexFile*>& dex_files) { 790 ScopedObjectAccess soa(Thread::Current()); 791 StackHandleScope<2> hs(soa.Self()); 792 Handle<mirror::ClassLoader> class_loader( 793 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader))); 794 MutableHandle<mirror::Class> cls(hs.NewHandle<mirror::Class>(nullptr)); 795 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 796 797 for (const DexFile* dex_file : dex_files) { 798 for (uint32_t i = 0; i < dex_file->NumClassDefs(); ++i) { 799 const DexFile::ClassDef& class_def = dex_file->GetClassDef(i); 800 const char* descriptor = dex_file->GetClassDescriptor(class_def); 801 cls.Assign(class_linker->FindClass(soa.Self(), descriptor, class_loader)); 802 if (cls == nullptr) { 803 soa.Self()->ClearException(); 804 } else if (&cls->GetDexFile() == dex_file) { 805 DCHECK(cls->IsErroneous() || cls->IsVerified() || cls->ShouldVerifyAtRuntime()) 806 << cls->PrettyClass() 807 << " " << cls->GetStatus(); 808 } 809 } 810 } 811} 812 813void CompilerDriver::PreCompile(jobject class_loader, 814 const std::vector<const DexFile*>& dex_files, 815 TimingLogger* timings) { 816 CheckThreadPools(); 817 818 LoadImageClasses(timings); 819 VLOG(compiler) << "LoadImageClasses: " << GetMemoryUsageString(false); 820 821 if (compiler_options_->IsAnyCompilationEnabled()) { 822 // Avoid adding the dex files in the case where we aren't going to add compiled methods. 823 // This reduces RAM usage for this case. 824 for (const DexFile* dex_file : dex_files) { 825 // Can be already inserted if the caller is CompileOne. This happens for gtests. 826 if (!compiled_methods_.HaveDexFile(dex_file)) { 827 compiled_methods_.AddDexFile(dex_file); 828 } 829 } 830 // Resolve eagerly to prepare for compilation. 831 Resolve(class_loader, dex_files, timings); 832 VLOG(compiler) << "Resolve: " << GetMemoryUsageString(false); 833 } 834 835 if (compiler_options_->AssumeClassesAreVerified()) { 836 VLOG(compiler) << "Verify none mode specified, skipping verification."; 837 SetVerified(class_loader, dex_files, timings); 838 } 839 840 if (!compiler_options_->IsVerificationEnabled()) { 841 return; 842 } 843 844 if (GetCompilerOptions().IsForceDeterminism() && GetCompilerOptions().IsBootImage()) { 845 // Resolve strings from const-string. Do this now to have a deterministic image. 846 ResolveConstStrings(this, dex_files, timings); 847 VLOG(compiler) << "Resolve const-strings: " << GetMemoryUsageString(false); 848 } 849 850 Verify(class_loader, dex_files, timings); 851 VLOG(compiler) << "Verify: " << GetMemoryUsageString(false); 852 853 if (had_hard_verifier_failure_ && GetCompilerOptions().AbortOnHardVerifierFailure()) { 854 // Avoid dumping threads. Even if we shut down the thread pools, there will still be three 855 // instances of this thread's stack. 856 LOG(FATAL_WITHOUT_ABORT) << "Had a hard failure verifying all classes, and was asked to abort " 857 << "in such situations. Please check the log."; 858 abort(); 859 } else if (number_of_soft_verifier_failures_ > 0 && 860 GetCompilerOptions().AbortOnSoftVerifierFailure()) { 861 LOG(FATAL_WITHOUT_ABORT) << "Had " << number_of_soft_verifier_failures_ << " soft failure(s) " 862 << "verifying all classes, and was asked to abort in such situations. " 863 << "Please check the log."; 864 abort(); 865 } 866 867 if (compiler_options_->IsAnyCompilationEnabled()) { 868 if (kIsDebugBuild) { 869 EnsureVerifiedOrVerifyAtRuntime(class_loader, dex_files); 870 } 871 InitializeClasses(class_loader, dex_files, timings); 872 VLOG(compiler) << "InitializeClasses: " << GetMemoryUsageString(false); 873 } 874 875 UpdateImageClasses(timings); 876 VLOG(compiler) << "UpdateImageClasses: " << GetMemoryUsageString(false); 877} 878 879bool CompilerDriver::IsImageClass(const char* descriptor) const { 880 if (image_classes_ != nullptr) { 881 // If we have a set of image classes, use those. 882 return image_classes_->find(descriptor) != image_classes_->end(); 883 } 884 // No set of image classes, assume we include all the classes. 885 // NOTE: Currently only reachable from InitImageMethodVisitor for the app image case. 886 return !GetCompilerOptions().IsBootImage(); 887} 888 889bool CompilerDriver::IsClassToCompile(const char* descriptor) const { 890 if (classes_to_compile_ == nullptr) { 891 return true; 892 } 893 return classes_to_compile_->find(descriptor) != classes_to_compile_->end(); 894} 895 896bool CompilerDriver::IsMethodToCompile(const MethodReference& method_ref) const { 897 if (methods_to_compile_ == nullptr) { 898 return true; 899 } 900 901 std::string tmp = method_ref.PrettyMethod(); 902 return methods_to_compile_->find(tmp.c_str()) != methods_to_compile_->end(); 903} 904 905bool CompilerDriver::ShouldCompileBasedOnProfile(const MethodReference& method_ref) const { 906 // Profile compilation info may be null if no profile is passed. 907 if (!CompilerFilter::DependsOnProfile(compiler_options_->GetCompilerFilter())) { 908 // Use the compiler filter instead of the presence of profile_compilation_info_ since 909 // we may want to have full speed compilation along with profile based layout optimizations. 910 return true; 911 } 912 // If we are using a profile filter but do not have a profile compilation info, compile nothing. 913 if (profile_compilation_info_ == nullptr) { 914 return false; 915 } 916 // Compile only hot methods, it is the profile saver's job to decide what startup methods to mark 917 // as hot. 918 bool result = profile_compilation_info_->GetMethodHotness(method_ref).IsHot(); 919 920 if (kDebugProfileGuidedCompilation) { 921 LOG(INFO) << "[ProfileGuidedCompilation] " 922 << (result ? "Compiled" : "Skipped") << " method:" << method_ref.PrettyMethod(true); 923 } 924 return result; 925} 926 927class ResolveCatchBlockExceptionsClassVisitor : public ClassVisitor { 928 public: 929 ResolveCatchBlockExceptionsClassVisitor() : classes_() {} 930 931 virtual bool operator()(ObjPtr<mirror::Class> c) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) { 932 classes_.push_back(c); 933 return true; 934 } 935 936 void FindExceptionTypesToResolve( 937 std::set<std::pair<dex::TypeIndex, const DexFile*>>* exceptions_to_resolve) 938 REQUIRES_SHARED(Locks::mutator_lock_) { 939 const auto pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize(); 940 for (ObjPtr<mirror::Class> klass : classes_) { 941 for (ArtMethod& method : klass->GetMethods(pointer_size)) { 942 FindExceptionTypesToResolveForMethod(&method, exceptions_to_resolve); 943 } 944 } 945 } 946 947 private: 948 void FindExceptionTypesToResolveForMethod( 949 ArtMethod* method, 950 std::set<std::pair<dex::TypeIndex, const DexFile*>>* exceptions_to_resolve) 951 REQUIRES_SHARED(Locks::mutator_lock_) { 952 if (method->GetCodeItem() == nullptr) { 953 return; // native or abstract method 954 } 955 CodeItemDataAccessor accessor(method); 956 if (accessor.TriesSize() == 0) { 957 return; // nothing to process 958 } 959 const uint8_t* encoded_catch_handler_list = accessor.GetCatchHandlerData(); 960 size_t num_encoded_catch_handlers = DecodeUnsignedLeb128(&encoded_catch_handler_list); 961 for (size_t i = 0; i < num_encoded_catch_handlers; i++) { 962 int32_t encoded_catch_handler_size = DecodeSignedLeb128(&encoded_catch_handler_list); 963 bool has_catch_all = false; 964 if (encoded_catch_handler_size <= 0) { 965 encoded_catch_handler_size = -encoded_catch_handler_size; 966 has_catch_all = true; 967 } 968 for (int32_t j = 0; j < encoded_catch_handler_size; j++) { 969 dex::TypeIndex encoded_catch_handler_handlers_type_idx = 970 dex::TypeIndex(DecodeUnsignedLeb128(&encoded_catch_handler_list)); 971 // Add to set of types to resolve if not already in the dex cache resolved types 972 if (!method->IsResolvedTypeIdx(encoded_catch_handler_handlers_type_idx)) { 973 exceptions_to_resolve->emplace(encoded_catch_handler_handlers_type_idx, 974 method->GetDexFile()); 975 } 976 // ignore address associated with catch handler 977 DecodeUnsignedLeb128(&encoded_catch_handler_list); 978 } 979 if (has_catch_all) { 980 // ignore catch all address 981 DecodeUnsignedLeb128(&encoded_catch_handler_list); 982 } 983 } 984 } 985 986 std::vector<ObjPtr<mirror::Class>> classes_; 987}; 988 989class RecordImageClassesVisitor : public ClassVisitor { 990 public: 991 explicit RecordImageClassesVisitor(std::unordered_set<std::string>* image_classes) 992 : image_classes_(image_classes) {} 993 994 bool operator()(ObjPtr<mirror::Class> klass) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) { 995 std::string temp; 996 image_classes_->insert(klass->GetDescriptor(&temp)); 997 return true; 998 } 999 1000 private: 1001 std::unordered_set<std::string>* const image_classes_; 1002}; 1003 1004// Make a list of descriptors for classes to include in the image 1005void CompilerDriver::LoadImageClasses(TimingLogger* timings) { 1006 CHECK(timings != nullptr); 1007 if (!GetCompilerOptions().IsBootImage()) { 1008 return; 1009 } 1010 1011 TimingLogger::ScopedTiming t("LoadImageClasses", timings); 1012 // Make a first class to load all classes explicitly listed in the file 1013 Thread* self = Thread::Current(); 1014 ScopedObjectAccess soa(self); 1015 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 1016 CHECK(image_classes_.get() != nullptr); 1017 for (auto it = image_classes_->begin(), end = image_classes_->end(); it != end;) { 1018 const std::string& descriptor(*it); 1019 StackHandleScope<1> hs(self); 1020 Handle<mirror::Class> klass( 1021 hs.NewHandle(class_linker->FindSystemClass(self, descriptor.c_str()))); 1022 if (klass == nullptr) { 1023 VLOG(compiler) << "Failed to find class " << descriptor; 1024 image_classes_->erase(it++); 1025 self->ClearException(); 1026 } else { 1027 ++it; 1028 } 1029 } 1030 1031 // Resolve exception classes referenced by the loaded classes. The catch logic assumes 1032 // exceptions are resolved by the verifier when there is a catch block in an interested method. 1033 // Do this here so that exception classes appear to have been specified image classes. 1034 std::set<std::pair<dex::TypeIndex, const DexFile*>> unresolved_exception_types; 1035 StackHandleScope<1> hs(self); 1036 Handle<mirror::Class> java_lang_Throwable( 1037 hs.NewHandle(class_linker->FindSystemClass(self, "Ljava/lang/Throwable;"))); 1038 do { 1039 unresolved_exception_types.clear(); 1040 { 1041 // Thread suspension is not allowed while ResolveCatchBlockExceptionsClassVisitor 1042 // is using a std::vector<ObjPtr<mirror::Class>>. 1043 ScopedAssertNoThreadSuspension ants(__FUNCTION__); 1044 ResolveCatchBlockExceptionsClassVisitor visitor; 1045 class_linker->VisitClasses(&visitor); 1046 visitor.FindExceptionTypesToResolve(&unresolved_exception_types); 1047 } 1048 for (const auto& exception_type : unresolved_exception_types) { 1049 dex::TypeIndex exception_type_idx = exception_type.first; 1050 const DexFile* dex_file = exception_type.second; 1051 StackHandleScope<1> hs2(self); 1052 Handle<mirror::DexCache> dex_cache(hs2.NewHandle(class_linker->RegisterDexFile(*dex_file, 1053 nullptr))); 1054 ObjPtr<mirror::Class> klass = 1055 (dex_cache != nullptr) 1056 ? class_linker->ResolveType(exception_type_idx, 1057 dex_cache, 1058 ScopedNullHandle<mirror::ClassLoader>()) 1059 : nullptr; 1060 if (klass == nullptr) { 1061 const DexFile::TypeId& type_id = dex_file->GetTypeId(exception_type_idx); 1062 const char* descriptor = dex_file->GetTypeDescriptor(type_id); 1063 LOG(FATAL) << "Failed to resolve class " << descriptor; 1064 } 1065 DCHECK(java_lang_Throwable->IsAssignableFrom(klass)); 1066 } 1067 // Resolving exceptions may load classes that reference more exceptions, iterate until no 1068 // more are found 1069 } while (!unresolved_exception_types.empty()); 1070 1071 // We walk the roots looking for classes so that we'll pick up the 1072 // above classes plus any classes them depend on such super 1073 // classes, interfaces, and the required ClassLinker roots. 1074 RecordImageClassesVisitor visitor(image_classes_.get()); 1075 class_linker->VisitClasses(&visitor); 1076 1077 CHECK_NE(image_classes_->size(), 0U); 1078} 1079 1080static void MaybeAddToImageClasses(Thread* self, 1081 ObjPtr<mirror::Class> klass, 1082 std::unordered_set<std::string>* image_classes) 1083 REQUIRES_SHARED(Locks::mutator_lock_) { 1084 DCHECK_EQ(self, Thread::Current()); 1085 StackHandleScope<1> hs(self); 1086 std::string temp; 1087 const PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize(); 1088 while (!klass->IsObjectClass()) { 1089 const char* descriptor = klass->GetDescriptor(&temp); 1090 std::pair<std::unordered_set<std::string>::iterator, bool> result = 1091 image_classes->insert(descriptor); 1092 if (!result.second) { // Previously inserted. 1093 break; 1094 } 1095 VLOG(compiler) << "Adding " << descriptor << " to image classes"; 1096 for (size_t i = 0, num_interfaces = klass->NumDirectInterfaces(); i != num_interfaces; ++i) { 1097 ObjPtr<mirror::Class> interface = mirror::Class::GetDirectInterface(self, klass, i); 1098 DCHECK(interface != nullptr); 1099 MaybeAddToImageClasses(self, interface, image_classes); 1100 } 1101 for (auto& m : klass->GetVirtualMethods(pointer_size)) { 1102 MaybeAddToImageClasses(self, m.GetDeclaringClass(), image_classes); 1103 } 1104 if (klass->IsArrayClass()) { 1105 MaybeAddToImageClasses(self, klass->GetComponentType(), image_classes); 1106 } 1107 klass.Assign(klass->GetSuperClass()); 1108 } 1109} 1110 1111// Keeps all the data for the update together. Also doubles as the reference visitor. 1112// Note: we can use object pointers because we suspend all threads. 1113class ClinitImageUpdate { 1114 public: 1115 static ClinitImageUpdate* Create(VariableSizedHandleScope& hs, 1116 std::unordered_set<std::string>* image_class_descriptors, 1117 Thread* self, 1118 ClassLinker* linker) { 1119 std::unique_ptr<ClinitImageUpdate> res(new ClinitImageUpdate(hs, 1120 image_class_descriptors, 1121 self, 1122 linker)); 1123 return res.release(); 1124 } 1125 1126 ~ClinitImageUpdate() { 1127 // Allow others to suspend again. 1128 self_->EndAssertNoThreadSuspension(old_cause_); 1129 } 1130 1131 // Visitor for VisitReferences. 1132 void operator()(ObjPtr<mirror::Object> object, 1133 MemberOffset field_offset, 1134 bool /* is_static */) const 1135 REQUIRES_SHARED(Locks::mutator_lock_) { 1136 mirror::Object* ref = object->GetFieldObject<mirror::Object>(field_offset); 1137 if (ref != nullptr) { 1138 VisitClinitClassesObject(ref); 1139 } 1140 } 1141 1142 // java.lang.ref.Reference visitor for VisitReferences. 1143 void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED, 1144 ObjPtr<mirror::Reference> ref ATTRIBUTE_UNUSED) const {} 1145 1146 // Ignore class native roots. 1147 void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) 1148 const {} 1149 void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {} 1150 1151 void Walk() REQUIRES_SHARED(Locks::mutator_lock_) { 1152 // Use the initial classes as roots for a search. 1153 for (Handle<mirror::Class> klass_root : image_classes_) { 1154 VisitClinitClassesObject(klass_root.Get()); 1155 } 1156 Thread* self = Thread::Current(); 1157 ScopedAssertNoThreadSuspension ants(__FUNCTION__); 1158 for (Handle<mirror::Class> h_klass : to_insert_) { 1159 MaybeAddToImageClasses(self, h_klass.Get(), image_class_descriptors_); 1160 } 1161 } 1162 1163 private: 1164 class FindImageClassesVisitor : public ClassVisitor { 1165 public: 1166 explicit FindImageClassesVisitor(VariableSizedHandleScope& hs, 1167 ClinitImageUpdate* data) 1168 : data_(data), 1169 hs_(hs) {} 1170 1171 bool operator()(ObjPtr<mirror::Class> klass) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) { 1172 std::string temp; 1173 const char* name = klass->GetDescriptor(&temp); 1174 if (data_->image_class_descriptors_->find(name) != data_->image_class_descriptors_->end()) { 1175 data_->image_classes_.push_back(hs_.NewHandle(klass)); 1176 } else { 1177 // Check whether it is initialized and has a clinit. They must be kept, too. 1178 if (klass->IsInitialized() && klass->FindClassInitializer( 1179 Runtime::Current()->GetClassLinker()->GetImagePointerSize()) != nullptr) { 1180 data_->image_classes_.push_back(hs_.NewHandle(klass)); 1181 } 1182 } 1183 return true; 1184 } 1185 1186 private: 1187 ClinitImageUpdate* const data_; 1188 VariableSizedHandleScope& hs_; 1189 }; 1190 1191 ClinitImageUpdate(VariableSizedHandleScope& hs, 1192 std::unordered_set<std::string>* image_class_descriptors, 1193 Thread* self, 1194 ClassLinker* linker) REQUIRES_SHARED(Locks::mutator_lock_) 1195 : hs_(hs), 1196 image_class_descriptors_(image_class_descriptors), 1197 self_(self) { 1198 CHECK(linker != nullptr); 1199 CHECK(image_class_descriptors != nullptr); 1200 1201 // Make sure nobody interferes with us. 1202 old_cause_ = self->StartAssertNoThreadSuspension("Boot image closure"); 1203 1204 // Find all the already-marked classes. 1205 WriterMutexLock mu(self, *Locks::heap_bitmap_lock_); 1206 FindImageClassesVisitor visitor(hs_, this); 1207 linker->VisitClasses(&visitor); 1208 } 1209 1210 void VisitClinitClassesObject(mirror::Object* object) const 1211 REQUIRES_SHARED(Locks::mutator_lock_) { 1212 DCHECK(object != nullptr); 1213 if (marked_objects_.find(object) != marked_objects_.end()) { 1214 // Already processed. 1215 return; 1216 } 1217 1218 // Mark it. 1219 marked_objects_.insert(object); 1220 1221 if (object->IsClass()) { 1222 // Add to the TODO list since MaybeAddToImageClasses may cause thread suspension. Thread 1223 // suspensionb is not safe to do in VisitObjects or VisitReferences. 1224 to_insert_.push_back(hs_.NewHandle(object->AsClass())); 1225 } else { 1226 // Else visit the object's class. 1227 VisitClinitClassesObject(object->GetClass()); 1228 } 1229 1230 // If it is not a DexCache, visit all references. 1231 if (!object->IsDexCache()) { 1232 object->VisitReferences(*this, *this); 1233 } 1234 } 1235 1236 VariableSizedHandleScope& hs_; 1237 mutable std::vector<Handle<mirror::Class>> to_insert_; 1238 mutable std::unordered_set<mirror::Object*> marked_objects_; 1239 std::unordered_set<std::string>* const image_class_descriptors_; 1240 std::vector<Handle<mirror::Class>> image_classes_; 1241 Thread* const self_; 1242 const char* old_cause_; 1243 1244 DISALLOW_COPY_AND_ASSIGN(ClinitImageUpdate); 1245}; 1246 1247void CompilerDriver::UpdateImageClasses(TimingLogger* timings) { 1248 if (GetCompilerOptions().IsBootImage()) { 1249 TimingLogger::ScopedTiming t("UpdateImageClasses", timings); 1250 1251 Runtime* runtime = Runtime::Current(); 1252 1253 // Suspend all threads. 1254 ScopedSuspendAll ssa(__FUNCTION__); 1255 1256 VariableSizedHandleScope hs(Thread::Current()); 1257 std::string error_msg; 1258 std::unique_ptr<ClinitImageUpdate> update(ClinitImageUpdate::Create(hs, 1259 image_classes_.get(), 1260 Thread::Current(), 1261 runtime->GetClassLinker())); 1262 1263 // Do the marking. 1264 update->Walk(); 1265 } 1266} 1267 1268bool CompilerDriver::CanAssumeClassIsLoaded(mirror::Class* klass) { 1269 Runtime* runtime = Runtime::Current(); 1270 if (!runtime->IsAotCompiler()) { 1271 DCHECK(runtime->UseJitCompilation()); 1272 // Having the klass reference here implies that the klass is already loaded. 1273 return true; 1274 } 1275 if (!GetCompilerOptions().IsBootImage()) { 1276 // Assume loaded only if klass is in the boot image. App classes cannot be assumed 1277 // loaded because we don't even know what class loader will be used to load them. 1278 bool class_in_image = runtime->GetHeap()->FindSpaceFromObject(klass, false)->IsImageSpace(); 1279 return class_in_image; 1280 } 1281 std::string temp; 1282 const char* descriptor = klass->GetDescriptor(&temp); 1283 return IsImageClass(descriptor); 1284} 1285 1286void CompilerDriver::MarkForDexToDexCompilation(Thread* self, const MethodReference& method_ref) { 1287 MutexLock lock(self, dex_to_dex_references_lock_); 1288 // Since we're compiling one dex file at a time, we need to look for the 1289 // current dex file entry only at the end of dex_to_dex_references_. 1290 if (dex_to_dex_references_.empty() || 1291 &dex_to_dex_references_.back().GetDexFile() != method_ref.dex_file) { 1292 dex_to_dex_references_.emplace_back(*method_ref.dex_file); 1293 } 1294 dex_to_dex_references_.back().GetMethodIndexes().SetBit(method_ref.index); 1295} 1296 1297bool CompilerDriver::CanAccessTypeWithoutChecks(ObjPtr<mirror::Class> referrer_class, 1298 ObjPtr<mirror::Class> resolved_class) { 1299 if (resolved_class == nullptr) { 1300 stats_->TypeNeedsAccessCheck(); 1301 return false; // Unknown class needs access checks. 1302 } 1303 bool is_accessible = resolved_class->IsPublic(); // Public classes are always accessible. 1304 if (!is_accessible) { 1305 if (referrer_class == nullptr) { 1306 stats_->TypeNeedsAccessCheck(); 1307 return false; // Incomplete referrer knowledge needs access check. 1308 } 1309 // Perform access check, will return true if access is ok or false if we're going to have to 1310 // check this at runtime (for example for class loaders). 1311 is_accessible = referrer_class->CanAccess(resolved_class); 1312 } 1313 if (is_accessible) { 1314 stats_->TypeDoesntNeedAccessCheck(); 1315 } else { 1316 stats_->TypeNeedsAccessCheck(); 1317 } 1318 return is_accessible; 1319} 1320 1321bool CompilerDriver::CanAccessInstantiableTypeWithoutChecks(ObjPtr<mirror::Class> referrer_class, 1322 ObjPtr<mirror::Class> resolved_class, 1323 bool* finalizable) { 1324 if (resolved_class == nullptr) { 1325 stats_->TypeNeedsAccessCheck(); 1326 // Be conservative. 1327 *finalizable = true; 1328 return false; // Unknown class needs access checks. 1329 } 1330 *finalizable = resolved_class->IsFinalizable(); 1331 bool is_accessible = resolved_class->IsPublic(); // Public classes are always accessible. 1332 if (!is_accessible) { 1333 if (referrer_class == nullptr) { 1334 stats_->TypeNeedsAccessCheck(); 1335 return false; // Incomplete referrer knowledge needs access check. 1336 } 1337 // Perform access and instantiable checks, will return true if access is ok or false if we're 1338 // going to have to check this at runtime (for example for class loaders). 1339 is_accessible = referrer_class->CanAccess(resolved_class); 1340 } 1341 bool result = is_accessible && resolved_class->IsInstantiable(); 1342 if (result) { 1343 stats_->TypeDoesntNeedAccessCheck(); 1344 } else { 1345 stats_->TypeNeedsAccessCheck(); 1346 } 1347 return result; 1348} 1349 1350void CompilerDriver::ProcessedInstanceField(bool resolved) { 1351 if (!resolved) { 1352 stats_->UnresolvedInstanceField(); 1353 } else { 1354 stats_->ResolvedInstanceField(); 1355 } 1356} 1357 1358void CompilerDriver::ProcessedStaticField(bool resolved, bool local) { 1359 if (!resolved) { 1360 stats_->UnresolvedStaticField(); 1361 } else if (local) { 1362 stats_->ResolvedLocalStaticField(); 1363 } else { 1364 stats_->ResolvedStaticField(); 1365 } 1366} 1367 1368ArtField* CompilerDriver::ComputeInstanceFieldInfo(uint32_t field_idx, 1369 const DexCompilationUnit* mUnit, 1370 bool is_put, 1371 const ScopedObjectAccess& soa) { 1372 // Try to resolve the field and compiling method's class. 1373 ArtField* resolved_field; 1374 ObjPtr<mirror::Class> referrer_class; 1375 Handle<mirror::DexCache> dex_cache(mUnit->GetDexCache()); 1376 { 1377 Handle<mirror::ClassLoader> class_loader = mUnit->GetClassLoader(); 1378 resolved_field = ResolveField(soa, dex_cache, class_loader, field_idx, /* is_static */ false); 1379 referrer_class = resolved_field != nullptr 1380 ? ResolveCompilingMethodsClass(soa, dex_cache, class_loader, mUnit) : nullptr; 1381 } 1382 bool can_link = false; 1383 if (resolved_field != nullptr && referrer_class != nullptr) { 1384 std::pair<bool, bool> fast_path = IsFastInstanceField( 1385 dex_cache.Get(), referrer_class, resolved_field, field_idx); 1386 can_link = is_put ? fast_path.second : fast_path.first; 1387 } 1388 ProcessedInstanceField(can_link); 1389 return can_link ? resolved_field : nullptr; 1390} 1391 1392bool CompilerDriver::ComputeInstanceFieldInfo(uint32_t field_idx, const DexCompilationUnit* mUnit, 1393 bool is_put, MemberOffset* field_offset, 1394 bool* is_volatile) { 1395 ScopedObjectAccess soa(Thread::Current()); 1396 ArtField* resolved_field = ComputeInstanceFieldInfo(field_idx, mUnit, is_put, soa); 1397 1398 if (resolved_field == nullptr) { 1399 // Conservative defaults. 1400 *is_volatile = true; 1401 *field_offset = MemberOffset(static_cast<size_t>(-1)); 1402 return false; 1403 } else { 1404 *is_volatile = resolved_field->IsVolatile(); 1405 *field_offset = resolved_field->GetOffset(); 1406 return true; 1407 } 1408} 1409 1410const VerifiedMethod* CompilerDriver::GetVerifiedMethod(const DexFile* dex_file, 1411 uint32_t method_idx) const { 1412 MethodReference ref(dex_file, method_idx); 1413 return verification_results_->GetVerifiedMethod(ref); 1414} 1415 1416bool CompilerDriver::IsSafeCast(const DexCompilationUnit* mUnit, uint32_t dex_pc) { 1417 if (!compiler_options_->IsVerificationEnabled()) { 1418 // If we didn't verify, every cast has to be treated as non-safe. 1419 return false; 1420 } 1421 DCHECK(mUnit->GetVerifiedMethod() != nullptr); 1422 bool result = mUnit->GetVerifiedMethod()->IsSafeCast(dex_pc); 1423 if (result) { 1424 stats_->SafeCast(); 1425 } else { 1426 stats_->NotASafeCast(); 1427 } 1428 return result; 1429} 1430 1431class CompilationVisitor { 1432 public: 1433 virtual ~CompilationVisitor() {} 1434 virtual void Visit(size_t index) = 0; 1435}; 1436 1437class ParallelCompilationManager { 1438 public: 1439 ParallelCompilationManager(ClassLinker* class_linker, 1440 jobject class_loader, 1441 CompilerDriver* compiler, 1442 const DexFile* dex_file, 1443 const std::vector<const DexFile*>& dex_files, 1444 ThreadPool* thread_pool) 1445 : index_(0), 1446 class_linker_(class_linker), 1447 class_loader_(class_loader), 1448 compiler_(compiler), 1449 dex_file_(dex_file), 1450 dex_files_(dex_files), 1451 thread_pool_(thread_pool) {} 1452 1453 ClassLinker* GetClassLinker() const { 1454 CHECK(class_linker_ != nullptr); 1455 return class_linker_; 1456 } 1457 1458 jobject GetClassLoader() const { 1459 return class_loader_; 1460 } 1461 1462 CompilerDriver* GetCompiler() const { 1463 CHECK(compiler_ != nullptr); 1464 return compiler_; 1465 } 1466 1467 const DexFile* GetDexFile() const { 1468 CHECK(dex_file_ != nullptr); 1469 return dex_file_; 1470 } 1471 1472 const std::vector<const DexFile*>& GetDexFiles() const { 1473 return dex_files_; 1474 } 1475 1476 void ForAll(size_t begin, size_t end, CompilationVisitor* visitor, size_t work_units) 1477 REQUIRES(!*Locks::mutator_lock_) { 1478 Thread* self = Thread::Current(); 1479 self->AssertNoPendingException(); 1480 CHECK_GT(work_units, 0U); 1481 1482 index_.StoreRelaxed(begin); 1483 for (size_t i = 0; i < work_units; ++i) { 1484 thread_pool_->AddTask(self, new ForAllClosure(this, end, visitor)); 1485 } 1486 thread_pool_->StartWorkers(self); 1487 1488 // Ensure we're suspended while we're blocked waiting for the other threads to finish (worker 1489 // thread destructor's called below perform join). 1490 CHECK_NE(self->GetState(), kRunnable); 1491 1492 // Wait for all the worker threads to finish. 1493 thread_pool_->Wait(self, true, false); 1494 1495 // And stop the workers accepting jobs. 1496 thread_pool_->StopWorkers(self); 1497 } 1498 1499 size_t NextIndex() { 1500 return index_.FetchAndAddSequentiallyConsistent(1); 1501 } 1502 1503 private: 1504 class ForAllClosure : public Task { 1505 public: 1506 ForAllClosure(ParallelCompilationManager* manager, size_t end, CompilationVisitor* visitor) 1507 : manager_(manager), 1508 end_(end), 1509 visitor_(visitor) {} 1510 1511 virtual void Run(Thread* self) { 1512 while (true) { 1513 const size_t index = manager_->NextIndex(); 1514 if (UNLIKELY(index >= end_)) { 1515 break; 1516 } 1517 visitor_->Visit(index); 1518 self->AssertNoPendingException(); 1519 } 1520 } 1521 1522 virtual void Finalize() { 1523 delete this; 1524 } 1525 1526 private: 1527 ParallelCompilationManager* const manager_; 1528 const size_t end_; 1529 CompilationVisitor* const visitor_; 1530 }; 1531 1532 AtomicInteger index_; 1533 ClassLinker* const class_linker_; 1534 const jobject class_loader_; 1535 CompilerDriver* const compiler_; 1536 const DexFile* const dex_file_; 1537 const std::vector<const DexFile*>& dex_files_; 1538 ThreadPool* const thread_pool_; 1539 1540 DISALLOW_COPY_AND_ASSIGN(ParallelCompilationManager); 1541}; 1542 1543// A fast version of SkipClass above if the class pointer is available 1544// that avoids the expensive FindInClassPath search. 1545static bool SkipClass(jobject class_loader, const DexFile& dex_file, ObjPtr<mirror::Class> klass) 1546 REQUIRES_SHARED(Locks::mutator_lock_) { 1547 DCHECK(klass != nullptr); 1548 const DexFile& original_dex_file = *klass->GetDexCache()->GetDexFile(); 1549 if (&dex_file != &original_dex_file) { 1550 if (class_loader == nullptr) { 1551 LOG(WARNING) << "Skipping class " << klass->PrettyDescriptor() << " from " 1552 << dex_file.GetLocation() << " previously found in " 1553 << original_dex_file.GetLocation(); 1554 } 1555 return true; 1556 } 1557 return false; 1558} 1559 1560static void CheckAndClearResolveException(Thread* self) 1561 REQUIRES_SHARED(Locks::mutator_lock_) { 1562 CHECK(self->IsExceptionPending()); 1563 mirror::Throwable* exception = self->GetException(); 1564 std::string temp; 1565 const char* descriptor = exception->GetClass()->GetDescriptor(&temp); 1566 const char* expected_exceptions[] = { 1567 "Ljava/lang/IllegalAccessError;", 1568 "Ljava/lang/IncompatibleClassChangeError;", 1569 "Ljava/lang/InstantiationError;", 1570 "Ljava/lang/LinkageError;", 1571 "Ljava/lang/NoClassDefFoundError;", 1572 "Ljava/lang/NoSuchFieldError;", 1573 "Ljava/lang/NoSuchMethodError;" 1574 }; 1575 bool found = false; 1576 for (size_t i = 0; (found == false) && (i < arraysize(expected_exceptions)); ++i) { 1577 if (strcmp(descriptor, expected_exceptions[i]) == 0) { 1578 found = true; 1579 } 1580 } 1581 if (!found) { 1582 LOG(FATAL) << "Unexpected exception " << exception->Dump(); 1583 } 1584 self->ClearException(); 1585} 1586 1587bool CompilerDriver::RequiresConstructorBarrier(const DexFile& dex_file, 1588 uint16_t class_def_idx) const { 1589 const DexFile::ClassDef& class_def = dex_file.GetClassDef(class_def_idx); 1590 const uint8_t* class_data = dex_file.GetClassData(class_def); 1591 if (class_data == nullptr) { 1592 // Empty class such as a marker interface. 1593 return false; 1594 } 1595 ClassDataItemIterator it(dex_file, class_data); 1596 it.SkipStaticFields(); 1597 // We require a constructor barrier if there are final instance fields. 1598 while (it.HasNextInstanceField()) { 1599 if (it.MemberIsFinal()) { 1600 return true; 1601 } 1602 it.Next(); 1603 } 1604 return false; 1605} 1606 1607class ResolveClassFieldsAndMethodsVisitor : public CompilationVisitor { 1608 public: 1609 explicit ResolveClassFieldsAndMethodsVisitor(const ParallelCompilationManager* manager) 1610 : manager_(manager) {} 1611 1612 void Visit(size_t class_def_index) OVERRIDE REQUIRES(!Locks::mutator_lock_) { 1613 ScopedTrace trace(__FUNCTION__); 1614 Thread* const self = Thread::Current(); 1615 jobject jclass_loader = manager_->GetClassLoader(); 1616 const DexFile& dex_file = *manager_->GetDexFile(); 1617 ClassLinker* class_linker = manager_->GetClassLinker(); 1618 1619 // If an instance field is final then we need to have a barrier on the return, static final 1620 // fields are assigned within the lock held for class initialization. Conservatively assume 1621 // constructor barriers are always required. 1622 bool requires_constructor_barrier = true; 1623 1624 // Method and Field are the worst. We can't resolve without either 1625 // context from the code use (to disambiguate virtual vs direct 1626 // method and instance vs static field) or from class 1627 // definitions. While the compiler will resolve what it can as it 1628 // needs it, here we try to resolve fields and methods used in class 1629 // definitions, since many of them many never be referenced by 1630 // generated code. 1631 const DexFile::ClassDef& class_def = dex_file.GetClassDef(class_def_index); 1632 ScopedObjectAccess soa(self); 1633 StackHandleScope<2> hs(soa.Self()); 1634 Handle<mirror::ClassLoader> class_loader( 1635 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader))); 1636 Handle<mirror::DexCache> dex_cache(hs.NewHandle(class_linker->FindDexCache( 1637 soa.Self(), dex_file))); 1638 // Resolve the class. 1639 ObjPtr<mirror::Class> klass = 1640 class_linker->ResolveType(class_def.class_idx_, dex_cache, class_loader); 1641 bool resolve_fields_and_methods; 1642 if (klass == nullptr) { 1643 // Class couldn't be resolved, for example, super-class is in a different dex file. Don't 1644 // attempt to resolve methods and fields when there is no declaring class. 1645 CheckAndClearResolveException(soa.Self()); 1646 resolve_fields_and_methods = false; 1647 } else { 1648 // We successfully resolved a class, should we skip it? 1649 if (SkipClass(jclass_loader, dex_file, klass)) { 1650 return; 1651 } 1652 // We want to resolve the methods and fields eagerly. 1653 resolve_fields_and_methods = true; 1654 } 1655 // Note the class_data pointer advances through the headers, 1656 // static fields, instance fields, direct methods, and virtual 1657 // methods. 1658 const uint8_t* class_data = dex_file.GetClassData(class_def); 1659 if (class_data == nullptr) { 1660 // Empty class such as a marker interface. 1661 requires_constructor_barrier = false; 1662 } else { 1663 ClassDataItemIterator it(dex_file, class_data); 1664 while (it.HasNextStaticField()) { 1665 if (resolve_fields_and_methods) { 1666 ArtField* field = class_linker->ResolveField( 1667 it.GetMemberIndex(), dex_cache, class_loader, /* is_static */ true); 1668 if (field == nullptr) { 1669 CheckAndClearResolveException(soa.Self()); 1670 } 1671 } 1672 it.Next(); 1673 } 1674 // We require a constructor barrier if there are final instance fields. 1675 requires_constructor_barrier = false; 1676 while (it.HasNextInstanceField()) { 1677 if (it.MemberIsFinal()) { 1678 requires_constructor_barrier = true; 1679 } 1680 if (resolve_fields_and_methods) { 1681 ArtField* field = class_linker->ResolveField( 1682 it.GetMemberIndex(), dex_cache, class_loader, /* is_static */ false); 1683 if (field == nullptr) { 1684 CheckAndClearResolveException(soa.Self()); 1685 } 1686 } 1687 it.Next(); 1688 } 1689 if (resolve_fields_and_methods) { 1690 while (it.HasNextMethod()) { 1691 ArtMethod* method = class_linker->ResolveMethod<ClassLinker::ResolveMode::kNoChecks>( 1692 it.GetMemberIndex(), 1693 dex_cache, 1694 class_loader, 1695 /* referrer */ nullptr, 1696 it.GetMethodInvokeType(class_def)); 1697 if (method == nullptr) { 1698 CheckAndClearResolveException(soa.Self()); 1699 } 1700 it.Next(); 1701 } 1702 DCHECK(!it.HasNext()); 1703 } 1704 } 1705 manager_->GetCompiler()->SetRequiresConstructorBarrier(self, 1706 &dex_file, 1707 class_def_index, 1708 requires_constructor_barrier); 1709 } 1710 1711 private: 1712 const ParallelCompilationManager* const manager_; 1713}; 1714 1715class ResolveTypeVisitor : public CompilationVisitor { 1716 public: 1717 explicit ResolveTypeVisitor(const ParallelCompilationManager* manager) : manager_(manager) { 1718 } 1719 void Visit(size_t type_idx) OVERRIDE REQUIRES(!Locks::mutator_lock_) { 1720 // Class derived values are more complicated, they require the linker and loader. 1721 ScopedObjectAccess soa(Thread::Current()); 1722 ClassLinker* class_linker = manager_->GetClassLinker(); 1723 const DexFile& dex_file = *manager_->GetDexFile(); 1724 StackHandleScope<2> hs(soa.Self()); 1725 Handle<mirror::ClassLoader> class_loader( 1726 hs.NewHandle(soa.Decode<mirror::ClassLoader>(manager_->GetClassLoader()))); 1727 Handle<mirror::DexCache> dex_cache(hs.NewHandle(class_linker->RegisterDexFile( 1728 dex_file, 1729 class_loader.Get()))); 1730 ObjPtr<mirror::Class> klass = (dex_cache != nullptr) 1731 ? class_linker->ResolveType(dex::TypeIndex(type_idx), dex_cache, class_loader) 1732 : nullptr; 1733 1734 if (klass == nullptr) { 1735 soa.Self()->AssertPendingException(); 1736 mirror::Throwable* exception = soa.Self()->GetException(); 1737 VLOG(compiler) << "Exception during type resolution: " << exception->Dump(); 1738 if (exception->GetClass()->DescriptorEquals("Ljava/lang/OutOfMemoryError;")) { 1739 // There's little point continuing compilation if the heap is exhausted. 1740 LOG(FATAL) << "Out of memory during type resolution for compilation"; 1741 } 1742 soa.Self()->ClearException(); 1743 } 1744 } 1745 1746 private: 1747 const ParallelCompilationManager* const manager_; 1748}; 1749 1750void CompilerDriver::ResolveDexFile(jobject class_loader, 1751 const DexFile& dex_file, 1752 const std::vector<const DexFile*>& dex_files, 1753 ThreadPool* thread_pool, 1754 size_t thread_count, 1755 TimingLogger* timings) { 1756 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 1757 1758 // TODO: we could resolve strings here, although the string table is largely filled with class 1759 // and method names. 1760 1761 ParallelCompilationManager context(class_linker, class_loader, this, &dex_file, dex_files, 1762 thread_pool); 1763 if (GetCompilerOptions().IsBootImage()) { 1764 // For images we resolve all types, such as array, whereas for applications just those with 1765 // classdefs are resolved by ResolveClassFieldsAndMethods. 1766 TimingLogger::ScopedTiming t("Resolve Types", timings); 1767 ResolveTypeVisitor visitor(&context); 1768 context.ForAll(0, dex_file.NumTypeIds(), &visitor, thread_count); 1769 } 1770 1771 TimingLogger::ScopedTiming t("Resolve MethodsAndFields", timings); 1772 ResolveClassFieldsAndMethodsVisitor visitor(&context); 1773 context.ForAll(0, dex_file.NumClassDefs(), &visitor, thread_count); 1774} 1775 1776void CompilerDriver::SetVerified(jobject class_loader, 1777 const std::vector<const DexFile*>& dex_files, 1778 TimingLogger* timings) { 1779 // This can be run in parallel. 1780 for (const DexFile* dex_file : dex_files) { 1781 CHECK(dex_file != nullptr); 1782 SetVerifiedDexFile(class_loader, 1783 *dex_file, 1784 dex_files, 1785 parallel_thread_pool_.get(), 1786 parallel_thread_count_, 1787 timings); 1788 } 1789} 1790 1791static void PopulateVerifiedMethods(const DexFile& dex_file, 1792 uint32_t class_def_index, 1793 VerificationResults* verification_results) { 1794 const DexFile::ClassDef& class_def = dex_file.GetClassDef(class_def_index); 1795 const uint8_t* class_data = dex_file.GetClassData(class_def); 1796 if (class_data == nullptr) { 1797 return; 1798 } 1799 ClassDataItemIterator it(dex_file, class_data); 1800 it.SkipAllFields(); 1801 1802 while (it.HasNextMethod()) { 1803 verification_results->CreateVerifiedMethodFor(MethodReference(&dex_file, it.GetMemberIndex())); 1804 it.Next(); 1805 } 1806 DCHECK(!it.HasNext()); 1807} 1808 1809static void LoadAndUpdateStatus(const DexFile& dex_file, 1810 const DexFile::ClassDef& class_def, 1811 mirror::Class::Status status, 1812 Handle<mirror::ClassLoader> class_loader, 1813 Thread* self) 1814 REQUIRES_SHARED(Locks::mutator_lock_) { 1815 StackHandleScope<1> hs(self); 1816 const char* descriptor = dex_file.GetClassDescriptor(class_def); 1817 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 1818 Handle<mirror::Class> cls(hs.NewHandle<mirror::Class>( 1819 class_linker->FindClass(self, descriptor, class_loader))); 1820 if (cls != nullptr) { 1821 // Check that the class is resolved with the current dex file. We might get 1822 // a boot image class, or a class in a different dex file for multidex, and 1823 // we should not update the status in that case. 1824 if (&cls->GetDexFile() == &dex_file) { 1825 ObjectLock<mirror::Class> lock(self, cls); 1826 mirror::Class::SetStatus(cls, status, self); 1827 } 1828 } else { 1829 DCHECK(self->IsExceptionPending()); 1830 self->ClearException(); 1831 } 1832} 1833 1834bool CompilerDriver::FastVerify(jobject jclass_loader, 1835 const std::vector<const DexFile*>& dex_files, 1836 TimingLogger* timings) { 1837 verifier::VerifierDeps* verifier_deps = 1838 Runtime::Current()->GetCompilerCallbacks()->GetVerifierDeps(); 1839 // If there exist VerifierDeps that aren't the ones we just created to output, use them to verify. 1840 if (verifier_deps == nullptr || verifier_deps->OutputOnly()) { 1841 return false; 1842 } 1843 TimingLogger::ScopedTiming t("Fast Verify", timings); 1844 ScopedObjectAccess soa(Thread::Current()); 1845 StackHandleScope<2> hs(soa.Self()); 1846 Handle<mirror::ClassLoader> class_loader( 1847 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader))); 1848 if (!verifier_deps->ValidateDependencies(class_loader, soa.Self())) { 1849 return false; 1850 } 1851 1852 bool compiler_only_verifies = !GetCompilerOptions().IsAnyCompilationEnabled(); 1853 1854 // We successfully validated the dependencies, now update class status 1855 // of verified classes. Note that the dependencies also record which classes 1856 // could not be fully verified; we could try again, but that would hurt verification 1857 // time. So instead we assume these classes still need to be verified at 1858 // runtime. 1859 for (const DexFile* dex_file : dex_files) { 1860 // Fetch the list of unverified classes. 1861 const std::set<dex::TypeIndex>& unverified_classes = 1862 verifier_deps->GetUnverifiedClasses(*dex_file); 1863 for (uint32_t i = 0; i < dex_file->NumClassDefs(); ++i) { 1864 const DexFile::ClassDef& class_def = dex_file->GetClassDef(i); 1865 if (unverified_classes.find(class_def.class_idx_) == unverified_classes.end()) { 1866 if (compiler_only_verifies) { 1867 // Just update the compiled_classes_ map. The compiler doesn't need to resolve 1868 // the type. 1869 ClassReference ref(dex_file, i); 1870 mirror::Class::Status existing = mirror::Class::kStatusNotReady; 1871 DCHECK(compiled_classes_.Get(ref, &existing)) << ref.dex_file->GetLocation(); 1872 ClassStateTable::InsertResult result = 1873 compiled_classes_.Insert(ref, existing, mirror::Class::kStatusVerified); 1874 CHECK_EQ(result, ClassStateTable::kInsertResultSuccess); 1875 } else { 1876 // Update the class status, so later compilation stages know they don't need to verify 1877 // the class. 1878 LoadAndUpdateStatus( 1879 *dex_file, class_def, mirror::Class::kStatusVerified, class_loader, soa.Self()); 1880 // Create `VerifiedMethod`s for each methods, the compiler expects one for 1881 // quickening or compiling. 1882 // Note that this means: 1883 // - We're only going to compile methods that did verify. 1884 // - Quickening will not do checkcast ellision. 1885 // TODO(ngeoffray): Reconsider this once we refactor compiler filters. 1886 PopulateVerifiedMethods(*dex_file, i, verification_results_); 1887 } 1888 } else if (!compiler_only_verifies) { 1889 // Make sure later compilation stages know they should not try to verify 1890 // this class again. 1891 LoadAndUpdateStatus(*dex_file, 1892 class_def, 1893 mirror::Class::kStatusRetryVerificationAtRuntime, 1894 class_loader, 1895 soa.Self()); 1896 } 1897 } 1898 } 1899 return true; 1900} 1901 1902void CompilerDriver::Verify(jobject jclass_loader, 1903 const std::vector<const DexFile*>& dex_files, 1904 TimingLogger* timings) { 1905 if (FastVerify(jclass_loader, dex_files, timings)) { 1906 return; 1907 } 1908 1909 // If there is no existing `verifier_deps` (because of non-existing vdex), or 1910 // the existing `verifier_deps` is not valid anymore, create a new one for 1911 // non boot image compilation. The verifier will need it to record the new dependencies. 1912 // Then dex2oat can update the vdex file with these new dependencies. 1913 if (!GetCompilerOptions().IsBootImage()) { 1914 // Dex2oat creates the verifier deps. 1915 // Create the main VerifierDeps, and set it to this thread. 1916 verifier::VerifierDeps* verifier_deps = 1917 Runtime::Current()->GetCompilerCallbacks()->GetVerifierDeps(); 1918 CHECK(verifier_deps != nullptr); 1919 Thread::Current()->SetVerifierDeps(verifier_deps); 1920 // Create per-thread VerifierDeps to avoid contention on the main one. 1921 // We will merge them after verification. 1922 for (ThreadPoolWorker* worker : parallel_thread_pool_->GetWorkers()) { 1923 worker->GetThread()->SetVerifierDeps(new verifier::VerifierDeps(dex_files_for_oat_file_)); 1924 } 1925 } 1926 1927 // Verification updates VerifierDeps and needs to run single-threaded to be deterministic. 1928 bool force_determinism = GetCompilerOptions().IsForceDeterminism(); 1929 ThreadPool* verify_thread_pool = 1930 force_determinism ? single_thread_pool_.get() : parallel_thread_pool_.get(); 1931 size_t verify_thread_count = force_determinism ? 1U : parallel_thread_count_; 1932 for (const DexFile* dex_file : dex_files) { 1933 CHECK(dex_file != nullptr); 1934 VerifyDexFile(jclass_loader, 1935 *dex_file, 1936 dex_files, 1937 verify_thread_pool, 1938 verify_thread_count, 1939 timings); 1940 } 1941 1942 if (!GetCompilerOptions().IsBootImage()) { 1943 // Merge all VerifierDeps into the main one. 1944 verifier::VerifierDeps* verifier_deps = Thread::Current()->GetVerifierDeps(); 1945 for (ThreadPoolWorker* worker : parallel_thread_pool_->GetWorkers()) { 1946 verifier::VerifierDeps* thread_deps = worker->GetThread()->GetVerifierDeps(); 1947 worker->GetThread()->SetVerifierDeps(nullptr); 1948 verifier_deps->MergeWith(*thread_deps, dex_files_for_oat_file_); 1949 delete thread_deps; 1950 } 1951 Thread::Current()->SetVerifierDeps(nullptr); 1952 } 1953} 1954 1955class VerifyClassVisitor : public CompilationVisitor { 1956 public: 1957 VerifyClassVisitor(const ParallelCompilationManager* manager, verifier::HardFailLogMode log_level) 1958 : manager_(manager), log_level_(log_level) {} 1959 1960 virtual void Visit(size_t class_def_index) REQUIRES(!Locks::mutator_lock_) OVERRIDE { 1961 ScopedTrace trace(__FUNCTION__); 1962 ScopedObjectAccess soa(Thread::Current()); 1963 const DexFile& dex_file = *manager_->GetDexFile(); 1964 const DexFile::ClassDef& class_def = dex_file.GetClassDef(class_def_index); 1965 const char* descriptor = dex_file.GetClassDescriptor(class_def); 1966 ClassLinker* class_linker = manager_->GetClassLinker(); 1967 jobject jclass_loader = manager_->GetClassLoader(); 1968 StackHandleScope<3> hs(soa.Self()); 1969 Handle<mirror::ClassLoader> class_loader( 1970 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader))); 1971 Handle<mirror::Class> klass( 1972 hs.NewHandle(class_linker->FindClass(soa.Self(), descriptor, class_loader))); 1973 verifier::FailureKind failure_kind; 1974 if (klass == nullptr) { 1975 CHECK(soa.Self()->IsExceptionPending()); 1976 soa.Self()->ClearException(); 1977 1978 /* 1979 * At compile time, we can still structurally verify the class even if FindClass fails. 1980 * This is to ensure the class is structurally sound for compilation. An unsound class 1981 * will be rejected by the verifier and later skipped during compilation in the compiler. 1982 */ 1983 Handle<mirror::DexCache> dex_cache(hs.NewHandle(class_linker->FindDexCache( 1984 soa.Self(), dex_file))); 1985 std::string error_msg; 1986 failure_kind = 1987 verifier::MethodVerifier::VerifyClass(soa.Self(), 1988 &dex_file, 1989 dex_cache, 1990 class_loader, 1991 class_def, 1992 Runtime::Current()->GetCompilerCallbacks(), 1993 true /* allow soft failures */, 1994 log_level_, 1995 &error_msg); 1996 if (failure_kind == verifier::FailureKind::kHardFailure) { 1997 LOG(ERROR) << "Verification failed on class " << PrettyDescriptor(descriptor) 1998 << " because: " << error_msg; 1999 manager_->GetCompiler()->SetHadHardVerifierFailure(); 2000 } else if (failure_kind == verifier::FailureKind::kSoftFailure) { 2001 manager_->GetCompiler()->AddSoftVerifierFailure(); 2002 } else { 2003 // Force a soft failure for the VerifierDeps. This is a sanity measure, as 2004 // the vdex file already records that the class hasn't been resolved. It avoids 2005 // trying to do future verification optimizations when processing the vdex file. 2006 DCHECK(failure_kind == verifier::FailureKind::kNoFailure) << failure_kind; 2007 failure_kind = verifier::FailureKind::kSoftFailure; 2008 } 2009 } else if (!SkipClass(jclass_loader, dex_file, klass.Get())) { 2010 CHECK(klass->IsResolved()) << klass->PrettyClass(); 2011 failure_kind = class_linker->VerifyClass(soa.Self(), klass, log_level_); 2012 2013 if (klass->IsErroneous()) { 2014 // ClassLinker::VerifyClass throws, which isn't useful in the compiler. 2015 CHECK(soa.Self()->IsExceptionPending()); 2016 soa.Self()->ClearException(); 2017 manager_->GetCompiler()->SetHadHardVerifierFailure(); 2018 } else if (failure_kind == verifier::FailureKind::kSoftFailure) { 2019 manager_->GetCompiler()->AddSoftVerifierFailure(); 2020 } 2021 2022 CHECK(klass->ShouldVerifyAtRuntime() || klass->IsVerified() || klass->IsErroneous()) 2023 << klass->PrettyDescriptor() << ": state=" << klass->GetStatus(); 2024 2025 // Class has a meaningful status for the compiler now, record it. 2026 ClassReference ref(manager_->GetDexFile(), class_def_index); 2027 manager_->GetCompiler()->RecordClassStatus(ref, klass->GetStatus()); 2028 2029 // It is *very* problematic if there are resolution errors in the boot classpath. 2030 // 2031 // It is also bad if classes fail verification. For example, we rely on things working 2032 // OK without verification when the decryption dialog is brought up. It is thus highly 2033 // recommended to compile the boot classpath with 2034 // --abort-on-hard-verifier-error --abort-on-soft-verifier-error 2035 // which is the default build system configuration. 2036 if (kIsDebugBuild) { 2037 if (manager_->GetCompiler()->GetCompilerOptions().IsBootImage()) { 2038 if (!klass->IsResolved() || klass->IsErroneous()) { 2039 LOG(FATAL) << "Boot classpath class " << klass->PrettyClass() 2040 << " failed to resolve/is erroneous: state= " << klass->GetStatus(); 2041 UNREACHABLE(); 2042 } 2043 } 2044 if (klass->IsVerified()) { 2045 DCHECK_EQ(failure_kind, verifier::FailureKind::kNoFailure); 2046 } else if (klass->ShouldVerifyAtRuntime()) { 2047 DCHECK_EQ(failure_kind, verifier::FailureKind::kSoftFailure); 2048 } else { 2049 DCHECK_EQ(failure_kind, verifier::FailureKind::kHardFailure); 2050 } 2051 } 2052 } else { 2053 // Make the skip a soft failure, essentially being considered as verify at runtime. 2054 failure_kind = verifier::FailureKind::kSoftFailure; 2055 } 2056 verifier::VerifierDeps::MaybeRecordVerificationStatus( 2057 dex_file, class_def.class_idx_, failure_kind); 2058 soa.Self()->AssertNoPendingException(); 2059 } 2060 2061 private: 2062 const ParallelCompilationManager* const manager_; 2063 const verifier::HardFailLogMode log_level_; 2064}; 2065 2066void CompilerDriver::VerifyDexFile(jobject class_loader, 2067 const DexFile& dex_file, 2068 const std::vector<const DexFile*>& dex_files, 2069 ThreadPool* thread_pool, 2070 size_t thread_count, 2071 TimingLogger* timings) { 2072 TimingLogger::ScopedTiming t("Verify Dex File", timings); 2073 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 2074 ParallelCompilationManager context(class_linker, class_loader, this, &dex_file, dex_files, 2075 thread_pool); 2076 bool abort_on_verifier_failures = GetCompilerOptions().AbortOnHardVerifierFailure() 2077 || GetCompilerOptions().AbortOnSoftVerifierFailure(); 2078 verifier::HardFailLogMode log_level = abort_on_verifier_failures 2079 ? verifier::HardFailLogMode::kLogInternalFatal 2080 : verifier::HardFailLogMode::kLogWarning; 2081 VerifyClassVisitor visitor(&context, log_level); 2082 context.ForAll(0, dex_file.NumClassDefs(), &visitor, thread_count); 2083} 2084 2085class SetVerifiedClassVisitor : public CompilationVisitor { 2086 public: 2087 explicit SetVerifiedClassVisitor(const ParallelCompilationManager* manager) : manager_(manager) {} 2088 2089 virtual void Visit(size_t class_def_index) REQUIRES(!Locks::mutator_lock_) OVERRIDE { 2090 ScopedTrace trace(__FUNCTION__); 2091 ScopedObjectAccess soa(Thread::Current()); 2092 const DexFile& dex_file = *manager_->GetDexFile(); 2093 const DexFile::ClassDef& class_def = dex_file.GetClassDef(class_def_index); 2094 const char* descriptor = dex_file.GetClassDescriptor(class_def); 2095 ClassLinker* class_linker = manager_->GetClassLinker(); 2096 jobject jclass_loader = manager_->GetClassLoader(); 2097 StackHandleScope<3> hs(soa.Self()); 2098 Handle<mirror::ClassLoader> class_loader( 2099 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader))); 2100 Handle<mirror::Class> klass( 2101 hs.NewHandle(class_linker->FindClass(soa.Self(), descriptor, class_loader))); 2102 // Class might have failed resolution. Then don't set it to verified. 2103 if (klass != nullptr) { 2104 // Only do this if the class is resolved. If even resolution fails, quickening will go very, 2105 // very wrong. 2106 if (klass->IsResolved() && !klass->IsErroneousResolved()) { 2107 if (klass->GetStatus() < mirror::Class::kStatusVerified) { 2108 ObjectLock<mirror::Class> lock(soa.Self(), klass); 2109 // Set class status to verified. 2110 mirror::Class::SetStatus(klass, mirror::Class::kStatusVerified, soa.Self()); 2111 // Mark methods as pre-verified. If we don't do this, the interpreter will run with 2112 // access checks. 2113 klass->SetSkipAccessChecksFlagOnAllMethods( 2114 GetInstructionSetPointerSize(manager_->GetCompiler()->GetInstructionSet())); 2115 klass->SetVerificationAttempted(); 2116 } 2117 // Record the final class status if necessary. 2118 ClassReference ref(manager_->GetDexFile(), class_def_index); 2119 manager_->GetCompiler()->RecordClassStatus(ref, klass->GetStatus()); 2120 } 2121 } else { 2122 Thread* self = soa.Self(); 2123 DCHECK(self->IsExceptionPending()); 2124 self->ClearException(); 2125 } 2126 } 2127 2128 private: 2129 const ParallelCompilationManager* const manager_; 2130}; 2131 2132void CompilerDriver::SetVerifiedDexFile(jobject class_loader, 2133 const DexFile& dex_file, 2134 const std::vector<const DexFile*>& dex_files, 2135 ThreadPool* thread_pool, 2136 size_t thread_count, 2137 TimingLogger* timings) { 2138 TimingLogger::ScopedTiming t("Verify Dex File", timings); 2139 if (!compiled_classes_.HaveDexFile(&dex_file)) { 2140 compiled_classes_.AddDexFile(&dex_file); 2141 } 2142 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 2143 ParallelCompilationManager context(class_linker, class_loader, this, &dex_file, dex_files, 2144 thread_pool); 2145 SetVerifiedClassVisitor visitor(&context); 2146 context.ForAll(0, dex_file.NumClassDefs(), &visitor, thread_count); 2147} 2148 2149class InitializeClassVisitor : public CompilationVisitor { 2150 public: 2151 explicit InitializeClassVisitor(const ParallelCompilationManager* manager) : manager_(manager) {} 2152 2153 void Visit(size_t class_def_index) OVERRIDE { 2154 ScopedTrace trace(__FUNCTION__); 2155 jobject jclass_loader = manager_->GetClassLoader(); 2156 const DexFile& dex_file = *manager_->GetDexFile(); 2157 const DexFile::ClassDef& class_def = dex_file.GetClassDef(class_def_index); 2158 const DexFile::TypeId& class_type_id = dex_file.GetTypeId(class_def.class_idx_); 2159 const char* descriptor = dex_file.StringDataByIdx(class_type_id.descriptor_idx_); 2160 2161 ScopedObjectAccess soa(Thread::Current()); 2162 StackHandleScope<3> hs(soa.Self()); 2163 Handle<mirror::ClassLoader> class_loader( 2164 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader))); 2165 Handle<mirror::Class> klass( 2166 hs.NewHandle(manager_->GetClassLinker()->FindClass(soa.Self(), descriptor, class_loader))); 2167 2168 if (klass != nullptr && !SkipClass(manager_->GetClassLoader(), dex_file, klass.Get())) { 2169 TryInitializeClass(klass, class_loader); 2170 } 2171 // Clear any class not found or verification exceptions. 2172 soa.Self()->ClearException(); 2173 } 2174 2175 // A helper function for initializing klass. 2176 void TryInitializeClass(Handle<mirror::Class> klass, Handle<mirror::ClassLoader>& class_loader) 2177 REQUIRES_SHARED(Locks::mutator_lock_) { 2178 const DexFile& dex_file = klass->GetDexFile(); 2179 const DexFile::ClassDef* class_def = klass->GetClassDef(); 2180 const DexFile::TypeId& class_type_id = dex_file.GetTypeId(class_def->class_idx_); 2181 const char* descriptor = dex_file.StringDataByIdx(class_type_id.descriptor_idx_); 2182 ScopedObjectAccessUnchecked soa(Thread::Current()); 2183 StackHandleScope<3> hs(soa.Self()); 2184 const bool is_boot_image = manager_->GetCompiler()->GetCompilerOptions().IsBootImage(); 2185 const bool is_app_image = manager_->GetCompiler()->GetCompilerOptions().IsAppImage(); 2186 2187 mirror::Class::Status old_status = klass->GetStatus(); 2188 // Don't initialize classes in boot space when compiling app image 2189 if (is_app_image && klass->IsBootStrapClassLoaded()) { 2190 // Also return early and don't store the class status in the recorded class status. 2191 return; 2192 } 2193 // Only try to initialize classes that were successfully verified. 2194 if (klass->IsVerified()) { 2195 // Attempt to initialize the class but bail if we either need to initialize the super-class 2196 // or static fields. 2197 manager_->GetClassLinker()->EnsureInitialized(soa.Self(), klass, false, false); 2198 old_status = klass->GetStatus(); 2199 if (!klass->IsInitialized()) { 2200 // We don't want non-trivial class initialization occurring on multiple threads due to 2201 // deadlock problems. For example, a parent class is initialized (holding its lock) that 2202 // refers to a sub-class in its static/class initializer causing it to try to acquire the 2203 // sub-class' lock. While on a second thread the sub-class is initialized (holding its lock) 2204 // after first initializing its parents, whose locks are acquired. This leads to a 2205 // parent-to-child and a child-to-parent lock ordering and consequent potential deadlock. 2206 // We need to use an ObjectLock due to potential suspension in the interpreting code. Rather 2207 // than use a special Object for the purpose we use the Class of java.lang.Class. 2208 Handle<mirror::Class> h_klass(hs.NewHandle(klass->GetClass())); 2209 ObjectLock<mirror::Class> lock(soa.Self(), h_klass); 2210 // Attempt to initialize allowing initialization of parent classes but still not static 2211 // fields. 2212 // Initialize dependencies first only for app image, to make TryInitialize recursive. 2213 bool is_superclass_initialized = !is_app_image ? true : 2214 InitializeDependencies(klass, class_loader, soa.Self()); 2215 if (!is_app_image || (is_app_image && is_superclass_initialized)) { 2216 manager_->GetClassLinker()->EnsureInitialized(soa.Self(), klass, false, true); 2217 } 2218 // Otherwise it's in app image but superclasses can't be initialized, no need to proceed. 2219 old_status = klass->GetStatus(); 2220 2221 bool too_many_encoded_fields = false; 2222 if (!is_boot_image && klass->NumStaticFields() > kMaxEncodedFields) { 2223 too_many_encoded_fields = true; 2224 } 2225 // If the class was not initialized, we can proceed to see if we can initialize static 2226 // fields. Limit the max number of encoded fields. 2227 if (!klass->IsInitialized() && 2228 (is_app_image || is_boot_image) && 2229 is_superclass_initialized && 2230 !too_many_encoded_fields && 2231 manager_->GetCompiler()->IsImageClass(descriptor)) { 2232 bool can_init_static_fields = false; 2233 if (is_boot_image) { 2234 // We need to initialize static fields, we only do this for image classes that aren't 2235 // marked with the $NoPreloadHolder (which implies this should not be initialized 2236 // early). 2237 can_init_static_fields = !StringPiece(descriptor).ends_with("$NoPreloadHolder;"); 2238 } else { 2239 CHECK(is_app_image); 2240 // The boot image case doesn't need to recursively initialize the dependencies with 2241 // special logic since the class linker already does this. 2242 can_init_static_fields = 2243 !soa.Self()->IsExceptionPending() && 2244 is_superclass_initialized && 2245 NoClinitInDependency(klass, soa.Self(), &class_loader); 2246 // TODO The checking for clinit can be removed since it's already 2247 // checked when init superclass. Currently keep it because it contains 2248 // processing of intern strings. Will be removed later when intern strings 2249 // and clinit are both initialized. 2250 } 2251 2252 if (can_init_static_fields) { 2253 VLOG(compiler) << "Initializing: " << descriptor; 2254 // TODO multithreading support. We should ensure the current compilation thread has 2255 // exclusive access to the runtime and the transaction. To achieve this, we could use 2256 // a ReaderWriterMutex but we're holding the mutator lock so we fail mutex sanity 2257 // checks in Thread::AssertThreadSuspensionIsAllowable. 2258 Runtime* const runtime = Runtime::Current(); 2259 // Run the class initializer in transaction mode. 2260 runtime->EnterTransactionMode(is_app_image, klass.Get()); 2261 bool success = manager_->GetClassLinker()->EnsureInitialized(soa.Self(), klass, true, 2262 true); 2263 // TODO we detach transaction from runtime to indicate we quit the transactional 2264 // mode which prevents the GC from visiting objects modified during the transaction. 2265 // Ensure GC is not run so don't access freed objects when aborting transaction. 2266 2267 { 2268 ScopedAssertNoThreadSuspension ants("Transaction end"); 2269 2270 if (success) { 2271 runtime->ExitTransactionMode(); 2272 DCHECK(!runtime->IsActiveTransaction()); 2273 } 2274 2275 if (!success) { 2276 CHECK(soa.Self()->IsExceptionPending()); 2277 mirror::Throwable* exception = soa.Self()->GetException(); 2278 VLOG(compiler) << "Initialization of " << descriptor << " aborted because of " 2279 << exception->Dump(); 2280 std::ostream* file_log = manager_->GetCompiler()-> 2281 GetCompilerOptions().GetInitFailureOutput(); 2282 if (file_log != nullptr) { 2283 *file_log << descriptor << "\n"; 2284 *file_log << exception->Dump() << "\n"; 2285 } 2286 soa.Self()->ClearException(); 2287 runtime->RollbackAllTransactions(); 2288 CHECK_EQ(old_status, klass->GetStatus()) << "Previous class status not restored"; 2289 } else if (is_boot_image) { 2290 // For boot image, we want to put the updated status in the oat class since we can't 2291 // reject the image anyways. 2292 old_status = klass->GetStatus(); 2293 } 2294 } 2295 2296 if (!success) { 2297 // On failure, still intern strings of static fields and seen in <clinit>, as these 2298 // will be created in the zygote. This is separated from the transaction code just 2299 // above as we will allocate strings, so must be allowed to suspend. 2300 if (&klass->GetDexFile() == manager_->GetDexFile()) { 2301 InternStrings(klass, class_loader); 2302 } else { 2303 DCHECK(!is_boot_image) << "Boot image must have equal dex files"; 2304 } 2305 } 2306 } 2307 } 2308 // If the class still isn't initialized, at least try some checks that initialization 2309 // would do so they can be skipped at runtime. 2310 if (!klass->IsInitialized() && 2311 manager_->GetClassLinker()->ValidateSuperClassDescriptors(klass)) { 2312 old_status = mirror::Class::kStatusSuperclassValidated; 2313 } else { 2314 soa.Self()->ClearException(); 2315 } 2316 soa.Self()->AssertNoPendingException(); 2317 } 2318 } 2319 // Record the final class status if necessary. 2320 ClassReference ref(&dex_file, klass->GetDexClassDefIndex()); 2321 // Back up the status before doing initialization for static encoded fields, 2322 // because the static encoded branch wants to keep the status to uninitialized. 2323 manager_->GetCompiler()->RecordClassStatus(ref, old_status); 2324 } 2325 2326 private: 2327 void InternStrings(Handle<mirror::Class> klass, Handle<mirror::ClassLoader> class_loader) 2328 REQUIRES_SHARED(Locks::mutator_lock_) { 2329 DCHECK(manager_->GetCompiler()->GetCompilerOptions().IsBootImage()); 2330 DCHECK(klass->IsVerified()); 2331 DCHECK(!klass->IsInitialized()); 2332 2333 StackHandleScope<1> hs(Thread::Current()); 2334 Handle<mirror::DexCache> dex_cache = hs.NewHandle(klass->GetDexCache()); 2335 const DexFile::ClassDef* class_def = klass->GetClassDef(); 2336 ClassLinker* class_linker = manager_->GetClassLinker(); 2337 2338 // Check encoded final field values for strings and intern. 2339 annotations::RuntimeEncodedStaticFieldValueIterator value_it(dex_cache, 2340 class_loader, 2341 manager_->GetClassLinker(), 2342 *class_def); 2343 for ( ; value_it.HasNext(); value_it.Next()) { 2344 if (value_it.GetValueType() == annotations::RuntimeEncodedStaticFieldValueIterator::kString) { 2345 // Resolve the string. This will intern the string. 2346 art::ObjPtr<mirror::String> resolved = class_linker->ResolveString( 2347 dex::StringIndex(value_it.GetJavaValue().i), dex_cache); 2348 CHECK(resolved != nullptr); 2349 } 2350 } 2351 2352 // Intern strings seen in <clinit>. 2353 ArtMethod* clinit = klass->FindClassInitializer(class_linker->GetImagePointerSize()); 2354 if (clinit != nullptr) { 2355 const DexFile::CodeItem* code_item = clinit->GetCodeItem(); 2356 DCHECK(code_item != nullptr); 2357 for (const DexInstructionPcPair& inst : code_item->Instructions()) { 2358 if (inst->Opcode() == Instruction::CONST_STRING) { 2359 ObjPtr<mirror::String> s = class_linker->ResolveString( 2360 dex::StringIndex(inst->VRegB_21c()), dex_cache); 2361 CHECK(s != nullptr); 2362 } else if (inst->Opcode() == Instruction::CONST_STRING_JUMBO) { 2363 ObjPtr<mirror::String> s = class_linker->ResolveString( 2364 dex::StringIndex(inst->VRegB_31c()), dex_cache); 2365 CHECK(s != nullptr); 2366 } 2367 } 2368 } 2369 } 2370 2371 bool ResolveTypesOfMethods(Thread* self, ArtMethod* m) 2372 REQUIRES_SHARED(Locks::mutator_lock_) { 2373 // Return value of ResolveReturnType() is discarded because resolve will be done internally. 2374 ObjPtr<mirror::Class> rtn_type = m->ResolveReturnType(); 2375 if (rtn_type == nullptr) { 2376 self->ClearException(); 2377 return false; 2378 } 2379 const DexFile::TypeList* types = m->GetParameterTypeList(); 2380 if (types != nullptr) { 2381 for (uint32_t i = 0; i < types->Size(); ++i) { 2382 dex::TypeIndex param_type_idx = types->GetTypeItem(i).type_idx_; 2383 ObjPtr<mirror::Class> param_type = m->ResolveClassFromTypeIndex(param_type_idx); 2384 if (param_type == nullptr) { 2385 self->ClearException(); 2386 return false; 2387 } 2388 } 2389 } 2390 return true; 2391 } 2392 2393 // Pre resolve types mentioned in all method signatures before start a transaction 2394 // since ResolveType doesn't work in transaction mode. 2395 bool PreResolveTypes(Thread* self, const Handle<mirror::Class>& klass) 2396 REQUIRES_SHARED(Locks::mutator_lock_) { 2397 PointerSize pointer_size = manager_->GetClassLinker()->GetImagePointerSize(); 2398 for (ArtMethod& m : klass->GetMethods(pointer_size)) { 2399 if (!ResolveTypesOfMethods(self, &m)) { 2400 return false; 2401 } 2402 } 2403 if (klass->IsInterface()) { 2404 return true; 2405 } else if (klass->HasSuperClass()) { 2406 StackHandleScope<1> hs(self); 2407 MutableHandle<mirror::Class> super_klass(hs.NewHandle<mirror::Class>(klass->GetSuperClass())); 2408 for (int i = super_klass->GetVTableLength() - 1; i >= 0; --i) { 2409 ArtMethod* m = klass->GetVTableEntry(i, pointer_size); 2410 ArtMethod* super_m = super_klass->GetVTableEntry(i, pointer_size); 2411 if (!ResolveTypesOfMethods(self, m) || !ResolveTypesOfMethods(self, super_m)) { 2412 return false; 2413 } 2414 } 2415 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) { 2416 super_klass.Assign(klass->GetIfTable()->GetInterface(i)); 2417 if (klass->GetClassLoader() != super_klass->GetClassLoader()) { 2418 uint32_t num_methods = super_klass->NumVirtualMethods(); 2419 for (uint32_t j = 0; j < num_methods; ++j) { 2420 ArtMethod* m = klass->GetIfTable()->GetMethodArray(i)->GetElementPtrSize<ArtMethod*>( 2421 j, pointer_size); 2422 ArtMethod* super_m = super_klass->GetVirtualMethod(j, pointer_size); 2423 if (!ResolveTypesOfMethods(self, m) || !ResolveTypesOfMethods(self, super_m)) { 2424 return false; 2425 } 2426 } 2427 } 2428 } 2429 } 2430 return true; 2431 } 2432 2433 // Initialize the klass's dependencies recursively before initializing itself. 2434 // Checking for interfaces is also necessary since interfaces can contain 2435 // both default methods and static encoded fields. 2436 bool InitializeDependencies(const Handle<mirror::Class>& klass, 2437 Handle<mirror::ClassLoader> class_loader, 2438 Thread* self) 2439 REQUIRES_SHARED(Locks::mutator_lock_) { 2440 if (klass->HasSuperClass()) { 2441 ObjPtr<mirror::Class> super_class = klass->GetSuperClass(); 2442 StackHandleScope<1> hs(self); 2443 Handle<mirror::Class> handle_scope_super(hs.NewHandle(super_class)); 2444 if (!handle_scope_super->IsInitialized()) { 2445 this->TryInitializeClass(handle_scope_super, class_loader); 2446 if (!handle_scope_super->IsInitialized()) { 2447 return false; 2448 } 2449 } 2450 } 2451 2452 uint32_t num_if = klass->NumDirectInterfaces(); 2453 for (size_t i = 0; i < num_if; i++) { 2454 ObjPtr<mirror::Class> 2455 interface = mirror::Class::GetDirectInterface(self, klass.Get(), i); 2456 StackHandleScope<1> hs(self); 2457 Handle<mirror::Class> handle_interface(hs.NewHandle(interface)); 2458 2459 TryInitializeClass(handle_interface, class_loader); 2460 2461 if (!handle_interface->IsInitialized()) { 2462 return false; 2463 } 2464 } 2465 2466 return PreResolveTypes(self, klass); 2467 } 2468 2469 // In this phase the classes containing class initializers are ignored. Make sure no 2470 // clinit appears in kalss's super class chain and interfaces. 2471 bool NoClinitInDependency(const Handle<mirror::Class>& klass, 2472 Thread* self, 2473 Handle<mirror::ClassLoader>* class_loader) 2474 REQUIRES_SHARED(Locks::mutator_lock_) { 2475 ArtMethod* clinit = 2476 klass->FindClassInitializer(manager_->GetClassLinker()->GetImagePointerSize()); 2477 if (clinit != nullptr) { 2478 VLOG(compiler) << klass->PrettyClass() << ' ' << clinit->PrettyMethod(true); 2479 return false; 2480 } 2481 if (klass->HasSuperClass()) { 2482 ObjPtr<mirror::Class> super_class = klass->GetSuperClass(); 2483 StackHandleScope<1> hs(self); 2484 Handle<mirror::Class> handle_scope_super(hs.NewHandle(super_class)); 2485 if (!NoClinitInDependency(handle_scope_super, self, class_loader)) { 2486 return false; 2487 } 2488 } 2489 2490 uint32_t num_if = klass->NumDirectInterfaces(); 2491 for (size_t i = 0; i < num_if; i++) { 2492 ObjPtr<mirror::Class> 2493 interface = mirror::Class::GetDirectInterface(self, klass.Get(), i); 2494 StackHandleScope<1> hs(self); 2495 Handle<mirror::Class> handle_interface(hs.NewHandle(interface)); 2496 if (!NoClinitInDependency(handle_interface, self, class_loader)) { 2497 return false; 2498 } 2499 } 2500 2501 return true; 2502 } 2503 2504 const ParallelCompilationManager* const manager_; 2505}; 2506 2507void CompilerDriver::InitializeClasses(jobject jni_class_loader, 2508 const DexFile& dex_file, 2509 const std::vector<const DexFile*>& dex_files, 2510 TimingLogger* timings) { 2511 TimingLogger::ScopedTiming t("InitializeNoClinit", timings); 2512 2513 // Initialization allocates objects and needs to run single-threaded to be deterministic. 2514 bool force_determinism = GetCompilerOptions().IsForceDeterminism(); 2515 ThreadPool* init_thread_pool = force_determinism 2516 ? single_thread_pool_.get() 2517 : parallel_thread_pool_.get(); 2518 size_t init_thread_count = force_determinism ? 1U : parallel_thread_count_; 2519 2520 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 2521 ParallelCompilationManager context(class_linker, jni_class_loader, this, &dex_file, dex_files, 2522 init_thread_pool); 2523 2524 if (GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsAppImage()) { 2525 // Set the concurrency thread to 1 to support initialization for App Images since transaction 2526 // doesn't support multithreading now. 2527 // TODO: remove this when transactional mode supports multithreading. 2528 init_thread_count = 1U; 2529 } 2530 InitializeClassVisitor visitor(&context); 2531 context.ForAll(0, dex_file.NumClassDefs(), &visitor, init_thread_count); 2532} 2533 2534class InitializeArrayClassesAndCreateConflictTablesVisitor : public ClassVisitor { 2535 public: 2536 explicit InitializeArrayClassesAndCreateConflictTablesVisitor(VariableSizedHandleScope& hs) 2537 : hs_(hs) {} 2538 2539 virtual bool operator()(ObjPtr<mirror::Class> klass) OVERRIDE 2540 REQUIRES_SHARED(Locks::mutator_lock_) { 2541 if (Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) { 2542 return true; 2543 } 2544 if (klass->IsArrayClass()) { 2545 StackHandleScope<1> hs(Thread::Current()); 2546 auto h_klass = hs.NewHandleWrapper(&klass); 2547 Runtime::Current()->GetClassLinker()->EnsureInitialized(hs.Self(), h_klass, true, true); 2548 } 2549 // Collect handles since there may be thread suspension in future EnsureInitialized. 2550 to_visit_.push_back(hs_.NewHandle(klass)); 2551 return true; 2552 } 2553 2554 void FillAllIMTAndConflictTables() REQUIRES_SHARED(Locks::mutator_lock_) { 2555 for (Handle<mirror::Class> c : to_visit_) { 2556 // Create the conflict tables. 2557 FillIMTAndConflictTables(c.Get()); 2558 } 2559 } 2560 2561 private: 2562 void FillIMTAndConflictTables(ObjPtr<mirror::Class> klass) 2563 REQUIRES_SHARED(Locks::mutator_lock_) { 2564 if (!klass->ShouldHaveImt()) { 2565 return; 2566 } 2567 if (visited_classes_.find(klass) != visited_classes_.end()) { 2568 return; 2569 } 2570 if (klass->HasSuperClass()) { 2571 FillIMTAndConflictTables(klass->GetSuperClass()); 2572 } 2573 if (!klass->IsTemp()) { 2574 Runtime::Current()->GetClassLinker()->FillIMTAndConflictTables(klass); 2575 } 2576 visited_classes_.insert(klass); 2577 } 2578 2579 VariableSizedHandleScope& hs_; 2580 std::vector<Handle<mirror::Class>> to_visit_; 2581 std::unordered_set<ObjPtr<mirror::Class>, HashObjPtr> visited_classes_; 2582}; 2583 2584void CompilerDriver::InitializeClasses(jobject class_loader, 2585 const std::vector<const DexFile*>& dex_files, 2586 TimingLogger* timings) { 2587 for (size_t i = 0; i != dex_files.size(); ++i) { 2588 const DexFile* dex_file = dex_files[i]; 2589 CHECK(dex_file != nullptr); 2590 InitializeClasses(class_loader, *dex_file, dex_files, timings); 2591 } 2592 if (GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsAppImage()) { 2593 // Make sure that we call EnsureIntiailized on all the array classes to call 2594 // SetVerificationAttempted so that the access flags are set. If we do not do this they get 2595 // changed at runtime resulting in more dirty image pages. 2596 // Also create conflict tables. 2597 // Only useful if we are compiling an image (image_classes_ is not null). 2598 ScopedObjectAccess soa(Thread::Current()); 2599 VariableSizedHandleScope hs(soa.Self()); 2600 InitializeArrayClassesAndCreateConflictTablesVisitor visitor(hs); 2601 Runtime::Current()->GetClassLinker()->VisitClassesWithoutClassesLock(&visitor); 2602 visitor.FillAllIMTAndConflictTables(); 2603 } 2604 if (GetCompilerOptions().IsBootImage()) { 2605 // Prune garbage objects created during aborted transactions. 2606 Runtime::Current()->GetHeap()->CollectGarbage(true); 2607 } 2608} 2609 2610void CompilerDriver::Compile(jobject class_loader, 2611 const std::vector<const DexFile*>& dex_files, 2612 TimingLogger* timings) { 2613 if (kDebugProfileGuidedCompilation) { 2614 LOG(INFO) << "[ProfileGuidedCompilation] " << 2615 ((profile_compilation_info_ == nullptr) 2616 ? "null" 2617 : profile_compilation_info_->DumpInfo(&dex_files)); 2618 } 2619 2620 current_dex_to_dex_methods_ = nullptr; 2621 Thread* const self = Thread::Current(); 2622 { 2623 // Clear in case we aren't the first call to Compile. 2624 MutexLock mu(self, dex_to_dex_references_lock_); 2625 dex_to_dex_references_.clear(); 2626 } 2627 2628 for (const DexFile* dex_file : dex_files) { 2629 CHECK(dex_file != nullptr); 2630 CompileDexFile(class_loader, 2631 *dex_file, 2632 dex_files, 2633 parallel_thread_pool_.get(), 2634 parallel_thread_count_, 2635 timings); 2636 const ArenaPool* const arena_pool = Runtime::Current()->GetArenaPool(); 2637 const size_t arena_alloc = arena_pool->GetBytesAllocated(); 2638 max_arena_alloc_ = std::max(arena_alloc, max_arena_alloc_); 2639 Runtime::Current()->ReclaimArenaPoolMemory(); 2640 } 2641 2642 ArrayRef<DexFileMethodSet> dex_to_dex_references; 2643 { 2644 // From this point on, we shall not modify dex_to_dex_references_, so 2645 // just grab a reference to it that we use without holding the mutex. 2646 MutexLock lock(self, dex_to_dex_references_lock_); 2647 dex_to_dex_references = ArrayRef<DexFileMethodSet>(dex_to_dex_references_); 2648 } 2649 for (const auto& method_set : dex_to_dex_references) { 2650 current_dex_to_dex_methods_ = &method_set.GetMethodIndexes(); 2651 CompileDexFile(class_loader, 2652 method_set.GetDexFile(), 2653 dex_files, 2654 parallel_thread_pool_.get(), 2655 parallel_thread_count_, 2656 timings); 2657 } 2658 current_dex_to_dex_methods_ = nullptr; 2659 2660 VLOG(compiler) << "Compile: " << GetMemoryUsageString(false); 2661} 2662 2663class CompileClassVisitor : public CompilationVisitor { 2664 public: 2665 explicit CompileClassVisitor(const ParallelCompilationManager* manager) : manager_(manager) {} 2666 2667 virtual void Visit(size_t class_def_index) REQUIRES(!Locks::mutator_lock_) OVERRIDE { 2668 ScopedTrace trace(__FUNCTION__); 2669 const DexFile& dex_file = *manager_->GetDexFile(); 2670 const DexFile::ClassDef& class_def = dex_file.GetClassDef(class_def_index); 2671 ClassLinker* class_linker = manager_->GetClassLinker(); 2672 jobject jclass_loader = manager_->GetClassLoader(); 2673 ClassReference ref(&dex_file, class_def_index); 2674 // Skip compiling classes with generic verifier failures since they will still fail at runtime 2675 if (manager_->GetCompiler()->verification_results_->IsClassRejected(ref)) { 2676 return; 2677 } 2678 // Use a scoped object access to perform to the quick SkipClass check. 2679 const char* descriptor = dex_file.GetClassDescriptor(class_def); 2680 ScopedObjectAccess soa(Thread::Current()); 2681 StackHandleScope<3> hs(soa.Self()); 2682 Handle<mirror::ClassLoader> class_loader( 2683 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader))); 2684 Handle<mirror::Class> klass( 2685 hs.NewHandle(class_linker->FindClass(soa.Self(), descriptor, class_loader))); 2686 Handle<mirror::DexCache> dex_cache; 2687 if (klass == nullptr) { 2688 soa.Self()->AssertPendingException(); 2689 soa.Self()->ClearException(); 2690 dex_cache = hs.NewHandle(class_linker->FindDexCache(soa.Self(), dex_file)); 2691 } else if (SkipClass(jclass_loader, dex_file, klass.Get())) { 2692 return; 2693 } else { 2694 dex_cache = hs.NewHandle(klass->GetDexCache()); 2695 } 2696 2697 const uint8_t* class_data = dex_file.GetClassData(class_def); 2698 if (class_data == nullptr) { 2699 // empty class, probably a marker interface 2700 return; 2701 } 2702 2703 // Go to native so that we don't block GC during compilation. 2704 ScopedThreadSuspension sts(soa.Self(), kNative); 2705 2706 CompilerDriver* const driver = manager_->GetCompiler(); 2707 2708 // Can we run DEX-to-DEX compiler on this class ? 2709 optimizer::DexToDexCompilationLevel dex_to_dex_compilation_level = 2710 GetDexToDexCompilationLevel(soa.Self(), *driver, jclass_loader, dex_file, class_def); 2711 2712 ClassDataItemIterator it(dex_file, class_data); 2713 it.SkipAllFields(); 2714 2715 bool compilation_enabled = driver->IsClassToCompile( 2716 dex_file.StringByTypeIdx(class_def.class_idx_)); 2717 2718 // Compile direct and virtual methods. 2719 int64_t previous_method_idx = -1; 2720 while (it.HasNextMethod()) { 2721 uint32_t method_idx = it.GetMemberIndex(); 2722 if (method_idx == previous_method_idx) { 2723 // smali can create dex files with two encoded_methods sharing the same method_idx 2724 // http://code.google.com/p/smali/issues/detail?id=119 2725 it.Next(); 2726 continue; 2727 } 2728 previous_method_idx = method_idx; 2729 CompileMethod(soa.Self(), 2730 driver, 2731 it.GetMethodCodeItem(), 2732 it.GetMethodAccessFlags(), 2733 it.GetMethodInvokeType(class_def), 2734 class_def_index, 2735 method_idx, 2736 class_loader, 2737 dex_file, 2738 dex_to_dex_compilation_level, 2739 compilation_enabled, 2740 dex_cache); 2741 it.Next(); 2742 } 2743 DCHECK(!it.HasNext()); 2744 } 2745 2746 private: 2747 const ParallelCompilationManager* const manager_; 2748}; 2749 2750void CompilerDriver::CompileDexFile(jobject class_loader, 2751 const DexFile& dex_file, 2752 const std::vector<const DexFile*>& dex_files, 2753 ThreadPool* thread_pool, 2754 size_t thread_count, 2755 TimingLogger* timings) { 2756 TimingLogger::ScopedTiming t("Compile Dex File", timings); 2757 ParallelCompilationManager context(Runtime::Current()->GetClassLinker(), class_loader, this, 2758 &dex_file, dex_files, thread_pool); 2759 CompileClassVisitor visitor(&context); 2760 context.ForAll(0, dex_file.NumClassDefs(), &visitor, thread_count); 2761} 2762 2763void CompilerDriver::AddCompiledMethod(const MethodReference& method_ref, 2764 CompiledMethod* const compiled_method, 2765 size_t non_relative_linker_patch_count) { 2766 DCHECK(GetCompiledMethod(method_ref) == nullptr) << method_ref.PrettyMethod(); 2767 MethodTable::InsertResult result = compiled_methods_.Insert(method_ref, 2768 /*expected*/ nullptr, 2769 compiled_method); 2770 CHECK(result == MethodTable::kInsertResultSuccess); 2771 non_relative_linker_patch_count_.FetchAndAddRelaxed(non_relative_linker_patch_count); 2772 DCHECK(GetCompiledMethod(method_ref) != nullptr) << method_ref.PrettyMethod(); 2773} 2774 2775bool CompilerDriver::GetCompiledClass(const ClassReference& ref, 2776 mirror::Class::Status* status) const { 2777 DCHECK(status != nullptr); 2778 // The table doesn't know if something wasn't inserted. For this case it will return 2779 // kStatusNotReady. To handle this, just assume anything we didn't try to verify is not compiled. 2780 if (!compiled_classes_.Get(ref, status) || 2781 *status < mirror::Class::kStatusRetryVerificationAtRuntime) { 2782 return false; 2783 } 2784 return true; 2785} 2786 2787mirror::Class::Status CompilerDriver::GetClassStatus(const ClassReference& ref) const { 2788 mirror::Class::Status status = ClassStatus::kStatusNotReady; 2789 if (!GetCompiledClass(ref, &status)) { 2790 classpath_classes_.Get(ref, &status); 2791 } 2792 return status; 2793} 2794 2795void CompilerDriver::RecordClassStatus(const ClassReference& ref, mirror::Class::Status status) { 2796 switch (status) { 2797 case mirror::Class::kStatusErrorResolved: 2798 case mirror::Class::kStatusErrorUnresolved: 2799 case mirror::Class::kStatusNotReady: 2800 case mirror::Class::kStatusResolved: 2801 case mirror::Class::kStatusRetryVerificationAtRuntime: 2802 case mirror::Class::kStatusVerified: 2803 case mirror::Class::kStatusSuperclassValidated: 2804 case mirror::Class::kStatusInitialized: 2805 break; // Expected states. 2806 default: 2807 LOG(FATAL) << "Unexpected class status for class " 2808 << PrettyDescriptor( 2809 ref.dex_file->GetClassDescriptor(ref.dex_file->GetClassDef(ref.index))) 2810 << " of " << status; 2811 } 2812 2813 ClassStateTable::InsertResult result; 2814 ClassStateTable* table = &compiled_classes_; 2815 do { 2816 mirror::Class::Status existing = mirror::Class::kStatusNotReady; 2817 if (!table->Get(ref, &existing)) { 2818 // A classpath class. 2819 if (kIsDebugBuild) { 2820 // Check to make sure it's not a dex file for an oat file we are compiling since these 2821 // should always succeed. These do not include classes in for used libraries. 2822 for (const DexFile* dex_file : GetDexFilesForOatFile()) { 2823 CHECK_NE(ref.dex_file, dex_file) << ref.dex_file->GetLocation(); 2824 } 2825 } 2826 if (!classpath_classes_.HaveDexFile(ref.dex_file)) { 2827 // Boot classpath dex file. 2828 return; 2829 } 2830 table = &classpath_classes_; 2831 table->Get(ref, &existing); 2832 } 2833 if (existing >= status) { 2834 // Existing status is already better than we expect, break. 2835 break; 2836 } 2837 // Update the status if we now have a greater one. This happens with vdex, 2838 // which records a class is verified, but does not resolve it. 2839 result = table->Insert(ref, existing, status); 2840 CHECK(result != ClassStateTable::kInsertResultInvalidDexFile) << ref.dex_file->GetLocation(); 2841 } while (result != ClassStateTable::kInsertResultSuccess); 2842} 2843 2844CompiledMethod* CompilerDriver::GetCompiledMethod(MethodReference ref) const { 2845 CompiledMethod* compiled_method = nullptr; 2846 compiled_methods_.Get(ref, &compiled_method); 2847 return compiled_method; 2848} 2849 2850bool CompilerDriver::IsMethodVerifiedWithoutFailures(uint32_t method_idx, 2851 uint16_t class_def_idx, 2852 const DexFile& dex_file) const { 2853 const VerifiedMethod* verified_method = GetVerifiedMethod(&dex_file, method_idx); 2854 if (verified_method != nullptr) { 2855 return !verified_method->HasVerificationFailures(); 2856 } 2857 2858 // If we can't find verification metadata, check if this is a system class (we trust that system 2859 // classes have their methods verified). If it's not, be conservative and assume the method 2860 // has not been verified successfully. 2861 2862 // TODO: When compiling the boot image it should be safe to assume that everything is verified, 2863 // even if methods are not found in the verification cache. 2864 const char* descriptor = dex_file.GetClassDescriptor(dex_file.GetClassDef(class_def_idx)); 2865 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 2866 Thread* self = Thread::Current(); 2867 ScopedObjectAccess soa(self); 2868 bool is_system_class = class_linker->FindSystemClass(self, descriptor) != nullptr; 2869 if (!is_system_class) { 2870 self->ClearException(); 2871 } 2872 return is_system_class; 2873} 2874 2875size_t CompilerDriver::GetNonRelativeLinkerPatchCount() const { 2876 return non_relative_linker_patch_count_.LoadRelaxed(); 2877} 2878 2879void CompilerDriver::SetRequiresConstructorBarrier(Thread* self, 2880 const DexFile* dex_file, 2881 uint16_t class_def_index, 2882 bool requires) { 2883 WriterMutexLock mu(self, requires_constructor_barrier_lock_); 2884 requires_constructor_barrier_.emplace(ClassReference(dex_file, class_def_index), requires); 2885} 2886 2887bool CompilerDriver::RequiresConstructorBarrier(Thread* self, 2888 const DexFile* dex_file, 2889 uint16_t class_def_index) { 2890 ClassReference class_ref(dex_file, class_def_index); 2891 { 2892 ReaderMutexLock mu(self, requires_constructor_barrier_lock_); 2893 auto it = requires_constructor_barrier_.find(class_ref); 2894 if (it != requires_constructor_barrier_.end()) { 2895 return it->second; 2896 } 2897 } 2898 WriterMutexLock mu(self, requires_constructor_barrier_lock_); 2899 const bool requires = RequiresConstructorBarrier(*dex_file, class_def_index); 2900 requires_constructor_barrier_.emplace(class_ref, requires); 2901 return requires; 2902} 2903 2904std::string CompilerDriver::GetMemoryUsageString(bool extended) const { 2905 std::ostringstream oss; 2906 const gc::Heap* const heap = Runtime::Current()->GetHeap(); 2907 const size_t java_alloc = heap->GetBytesAllocated(); 2908 oss << "arena alloc=" << PrettySize(max_arena_alloc_) << " (" << max_arena_alloc_ << "B)"; 2909 oss << " java alloc=" << PrettySize(java_alloc) << " (" << java_alloc << "B)"; 2910#if defined(__BIONIC__) || defined(__GLIBC__) 2911 const struct mallinfo info = mallinfo(); 2912 const size_t allocated_space = static_cast<size_t>(info.uordblks); 2913 const size_t free_space = static_cast<size_t>(info.fordblks); 2914 oss << " native alloc=" << PrettySize(allocated_space) << " (" << allocated_space << "B)" 2915 << " free=" << PrettySize(free_space) << " (" << free_space << "B)"; 2916#endif 2917 compiled_method_storage_.DumpMemoryUsage(oss, extended); 2918 return oss.str(); 2919} 2920 2921bool CompilerDriver::MayInlineInternal(const DexFile* inlined_from, 2922 const DexFile* inlined_into) const { 2923 // We're not allowed to inline across dex files if we're the no-inline-from dex file. 2924 if (inlined_from != inlined_into && 2925 compiler_options_->GetNoInlineFromDexFile() != nullptr && 2926 ContainsElement(*compiler_options_->GetNoInlineFromDexFile(), inlined_from)) { 2927 return false; 2928 } 2929 2930 return true; 2931} 2932 2933void CompilerDriver::InitializeThreadPools() { 2934 size_t parallel_count = parallel_thread_count_ > 0 ? parallel_thread_count_ - 1 : 0; 2935 parallel_thread_pool_.reset( 2936 new ThreadPool("Compiler driver thread pool", parallel_count)); 2937 single_thread_pool_.reset(new ThreadPool("Single-threaded Compiler driver thread pool", 0)); 2938} 2939 2940void CompilerDriver::FreeThreadPools() { 2941 parallel_thread_pool_.reset(); 2942 single_thread_pool_.reset(); 2943} 2944 2945void CompilerDriver::SetDexFilesForOatFile(const std::vector<const DexFile*>& dex_files) { 2946 dex_files_for_oat_file_ = dex_files; 2947 compiled_classes_.AddDexFiles(dex_files); 2948} 2949 2950void CompilerDriver::SetClasspathDexFiles(const std::vector<const DexFile*>& dex_files) { 2951 classpath_classes_.AddDexFiles(dex_files); 2952} 2953 2954} // namespace art 2955