1/* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "image_writer.h" 18 19#include <sys/stat.h> 20 21#include <memory> 22#include <numeric> 23#include <vector> 24 25#include "art_field-inl.h" 26#include "art_method-inl.h" 27#include "base/logging.h" 28#include "base/unix_file/fd_file.h" 29#include "class_linker-inl.h" 30#include "compiled_method.h" 31#include "dex_file-inl.h" 32#include "driver/compiler_driver.h" 33#include "elf_file.h" 34#include "elf_utils.h" 35#include "elf_writer.h" 36#include "gc/accounting/card_table-inl.h" 37#include "gc/accounting/heap_bitmap.h" 38#include "gc/accounting/space_bitmap-inl.h" 39#include "gc/heap.h" 40#include "gc/space/large_object_space.h" 41#include "gc/space/space-inl.h" 42#include "globals.h" 43#include "image.h" 44#include "intern_table.h" 45#include "linear_alloc.h" 46#include "lock_word.h" 47#include "mirror/abstract_method.h" 48#include "mirror/array-inl.h" 49#include "mirror/class-inl.h" 50#include "mirror/class_loader.h" 51#include "mirror/dex_cache-inl.h" 52#include "mirror/method.h" 53#include "mirror/object-inl.h" 54#include "mirror/object_array-inl.h" 55#include "mirror/string-inl.h" 56#include "oat.h" 57#include "oat_file.h" 58#include "runtime.h" 59#include "scoped_thread_state_change.h" 60#include "handle_scope-inl.h" 61#include "utils/dex_cache_arrays_layout-inl.h" 62 63using ::art::mirror::Class; 64using ::art::mirror::DexCache; 65using ::art::mirror::Object; 66using ::art::mirror::ObjectArray; 67using ::art::mirror::String; 68 69namespace art { 70 71// Separate objects into multiple bins to optimize dirty memory use. 72static constexpr bool kBinObjects = true; 73static constexpr bool kComputeEagerResolvedStrings = false; 74 75static void CheckNoDexObjectsCallback(Object* obj, void* arg ATTRIBUTE_UNUSED) 76 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 77 Class* klass = obj->GetClass(); 78 CHECK_NE(PrettyClass(klass), "com.android.dex.Dex"); 79} 80 81static void CheckNoDexObjects() { 82 ScopedObjectAccess soa(Thread::Current()); 83 Runtime::Current()->GetHeap()->VisitObjects(CheckNoDexObjectsCallback, nullptr); 84} 85 86bool ImageWriter::PrepareImageAddressSpace() { 87 target_ptr_size_ = InstructionSetPointerSize(compiler_driver_.GetInstructionSet()); 88 { 89 Thread::Current()->TransitionFromSuspendedToRunnable(); 90 PruneNonImageClasses(); // Remove junk 91 ComputeLazyFieldsForImageClasses(); // Add useful information 92 93 // Calling this can in theory fill in some resolved strings. However, in practice it seems to 94 // never resolve any. 95 if (kComputeEagerResolvedStrings) { 96 ComputeEagerResolvedStrings(); 97 } 98 Thread::Current()->TransitionFromRunnableToSuspended(kNative); 99 } 100 gc::Heap* heap = Runtime::Current()->GetHeap(); 101 heap->CollectGarbage(false); // Remove garbage. 102 103 // Dex caches must not have their dex fields set in the image. These are memory buffers of mapped 104 // dex files. 105 // 106 // We may open them in the unstarted-runtime code for class metadata. Their fields should all be 107 // reset in PruneNonImageClasses and the objects reclaimed in the GC. Make sure that's actually 108 // true. 109 if (kIsDebugBuild) { 110 CheckNoDexObjects(); 111 } 112 113 if (kIsDebugBuild) { 114 ScopedObjectAccess soa(Thread::Current()); 115 CheckNonImageClassesRemoved(); 116 } 117 118 Thread::Current()->TransitionFromSuspendedToRunnable(); 119 CalculateNewObjectOffsets(); 120 Thread::Current()->TransitionFromRunnableToSuspended(kNative); 121 122 // This needs to happen after CalculateNewObjectOffsets since it relies on intern_table_bytes_ and 123 // bin size sums being calculated. 124 if (!AllocMemory()) { 125 return false; 126 } 127 128 return true; 129} 130 131bool ImageWriter::Write(const std::string& image_filename, 132 const std::string& oat_filename, 133 const std::string& oat_location) { 134 CHECK(!image_filename.empty()); 135 136 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 137 138 std::unique_ptr<File> oat_file(OS::OpenFileReadWrite(oat_filename.c_str())); 139 if (oat_file.get() == nullptr) { 140 PLOG(ERROR) << "Failed to open oat file " << oat_filename << " for " << oat_location; 141 return false; 142 } 143 std::string error_msg; 144 oat_file_ = OatFile::OpenReadable(oat_file.get(), oat_location, nullptr, &error_msg); 145 if (oat_file_ == nullptr) { 146 PLOG(ERROR) << "Failed to open writable oat file " << oat_filename << " for " << oat_location 147 << ": " << error_msg; 148 oat_file->Erase(); 149 return false; 150 } 151 CHECK_EQ(class_linker->RegisterOatFile(oat_file_), oat_file_); 152 153 interpreter_to_interpreter_bridge_offset_ = 154 oat_file_->GetOatHeader().GetInterpreterToInterpreterBridgeOffset(); 155 interpreter_to_compiled_code_bridge_offset_ = 156 oat_file_->GetOatHeader().GetInterpreterToCompiledCodeBridgeOffset(); 157 158 jni_dlsym_lookup_offset_ = oat_file_->GetOatHeader().GetJniDlsymLookupOffset(); 159 160 quick_generic_jni_trampoline_offset_ = 161 oat_file_->GetOatHeader().GetQuickGenericJniTrampolineOffset(); 162 quick_imt_conflict_trampoline_offset_ = 163 oat_file_->GetOatHeader().GetQuickImtConflictTrampolineOffset(); 164 quick_resolution_trampoline_offset_ = 165 oat_file_->GetOatHeader().GetQuickResolutionTrampolineOffset(); 166 quick_to_interpreter_bridge_offset_ = 167 oat_file_->GetOatHeader().GetQuickToInterpreterBridgeOffset(); 168 169 size_t oat_loaded_size = 0; 170 size_t oat_data_offset = 0; 171 ElfWriter::GetOatElfInformation(oat_file.get(), &oat_loaded_size, &oat_data_offset); 172 173 Thread::Current()->TransitionFromSuspendedToRunnable(); 174 175 CreateHeader(oat_loaded_size, oat_data_offset); 176 CopyAndFixupNativeData(); 177 // TODO: heap validation can't handle these fix up passes. 178 Runtime::Current()->GetHeap()->DisableObjectValidation(); 179 CopyAndFixupObjects(); 180 Thread::Current()->TransitionFromRunnableToSuspended(kNative); 181 182 SetOatChecksumFromElfFile(oat_file.get()); 183 184 if (oat_file->FlushCloseOrErase() != 0) { 185 LOG(ERROR) << "Failed to flush and close oat file " << oat_filename << " for " << oat_location; 186 return false; 187 } 188 189 std::unique_ptr<File> image_file(OS::CreateEmptyFile(image_filename.c_str())); 190 ImageHeader* image_header = reinterpret_cast<ImageHeader*>(image_->Begin()); 191 if (image_file.get() == nullptr) { 192 LOG(ERROR) << "Failed to open image file " << image_filename; 193 return false; 194 } 195 if (fchmod(image_file->Fd(), 0644) != 0) { 196 PLOG(ERROR) << "Failed to make image file world readable: " << image_filename; 197 image_file->Erase(); 198 return EXIT_FAILURE; 199 } 200 201 // Write out the image + fields + methods. 202 const auto write_count = image_header->GetImageSize(); 203 if (!image_file->WriteFully(image_->Begin(), write_count)) { 204 PLOG(ERROR) << "Failed to write image file " << image_filename; 205 image_file->Erase(); 206 return false; 207 } 208 209 // Write out the image bitmap at the page aligned start of the image end. 210 const ImageSection& bitmap_section = image_header->GetImageSection(ImageHeader::kSectionImageBitmap); 211 CHECK_ALIGNED(bitmap_section.Offset(), kPageSize); 212 if (!image_file->Write(reinterpret_cast<char*>(image_bitmap_->Begin()), 213 bitmap_section.Size(), bitmap_section.Offset())) { 214 PLOG(ERROR) << "Failed to write image file " << image_filename; 215 image_file->Erase(); 216 return false; 217 } 218 219 CHECK_EQ(bitmap_section.End(), static_cast<size_t>(image_file->GetLength())); 220 if (image_file->FlushCloseOrErase() != 0) { 221 PLOG(ERROR) << "Failed to flush and close image file " << image_filename; 222 return false; 223 } 224 return true; 225} 226 227void ImageWriter::SetImageOffset(mirror::Object* object, size_t offset) { 228 DCHECK(object != nullptr); 229 DCHECK_NE(offset, 0U); 230 231 // The object is already deflated from when we set the bin slot. Just overwrite the lock word. 232 object->SetLockWord(LockWord::FromForwardingAddress(offset), false); 233 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u); 234 DCHECK(IsImageOffsetAssigned(object)); 235} 236 237void ImageWriter::UpdateImageOffset(mirror::Object* obj, uintptr_t offset) { 238 DCHECK(IsImageOffsetAssigned(obj)) << obj << " " << offset; 239 obj->SetLockWord(LockWord::FromForwardingAddress(offset), false); 240 DCHECK_EQ(obj->GetLockWord(false).ReadBarrierState(), 0u); 241} 242 243void ImageWriter::AssignImageOffset(mirror::Object* object, ImageWriter::BinSlot bin_slot) { 244 DCHECK(object != nullptr); 245 DCHECK_NE(image_objects_offset_begin_, 0u); 246 247 size_t previous_bin_sizes = bin_slot_previous_sizes_[bin_slot.GetBin()]; 248 size_t new_offset = image_objects_offset_begin_ + previous_bin_sizes + bin_slot.GetIndex(); 249 DCHECK_ALIGNED(new_offset, kObjectAlignment); 250 251 SetImageOffset(object, new_offset); 252 DCHECK_LT(new_offset, image_end_); 253} 254 255bool ImageWriter::IsImageOffsetAssigned(mirror::Object* object) const { 256 // Will also return true if the bin slot was assigned since we are reusing the lock word. 257 DCHECK(object != nullptr); 258 return object->GetLockWord(false).GetState() == LockWord::kForwardingAddress; 259} 260 261size_t ImageWriter::GetImageOffset(mirror::Object* object) const { 262 DCHECK(object != nullptr); 263 DCHECK(IsImageOffsetAssigned(object)); 264 LockWord lock_word = object->GetLockWord(false); 265 size_t offset = lock_word.ForwardingAddress(); 266 DCHECK_LT(offset, image_end_); 267 return offset; 268} 269 270void ImageWriter::SetImageBinSlot(mirror::Object* object, BinSlot bin_slot) { 271 DCHECK(object != nullptr); 272 DCHECK(!IsImageOffsetAssigned(object)); 273 DCHECK(!IsImageBinSlotAssigned(object)); 274 275 // Before we stomp over the lock word, save the hash code for later. 276 Monitor::Deflate(Thread::Current(), object);; 277 LockWord lw(object->GetLockWord(false)); 278 switch (lw.GetState()) { 279 case LockWord::kFatLocked: { 280 LOG(FATAL) << "Fat locked object " << object << " found during object copy"; 281 break; 282 } 283 case LockWord::kThinLocked: { 284 LOG(FATAL) << "Thin locked object " << object << " found during object copy"; 285 break; 286 } 287 case LockWord::kUnlocked: 288 // No hash, don't need to save it. 289 break; 290 case LockWord::kHashCode: 291 DCHECK(saved_hashcode_map_.find(object) == saved_hashcode_map_.end()); 292 saved_hashcode_map_.emplace(object, lw.GetHashCode()); 293 break; 294 default: 295 LOG(FATAL) << "Unreachable."; 296 UNREACHABLE(); 297 } 298 object->SetLockWord(LockWord::FromForwardingAddress(bin_slot.Uint32Value()), false); 299 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u); 300 DCHECK(IsImageBinSlotAssigned(object)); 301} 302 303void ImageWriter::PrepareDexCacheArraySlots() { 304 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 305 ReaderMutexLock mu(Thread::Current(), *class_linker->DexLock()); 306 size_t dex_cache_count = class_linker->GetDexCacheCount(); 307 uint32_t size = 0u; 308 for (size_t idx = 0; idx < dex_cache_count; ++idx) { 309 DexCache* dex_cache = class_linker->GetDexCache(idx); 310 const DexFile* dex_file = dex_cache->GetDexFile(); 311 dex_cache_array_starts_.Put(dex_file, size); 312 DexCacheArraysLayout layout(target_ptr_size_, dex_file); 313 DCHECK(layout.Valid()); 314 auto types_size = layout.TypesSize(dex_file->NumTypeIds()); 315 auto methods_size = layout.MethodsSize(dex_file->NumMethodIds()); 316 auto fields_size = layout.FieldsSize(dex_file->NumFieldIds()); 317 auto strings_size = layout.StringsSize(dex_file->NumStringIds()); 318 dex_cache_array_indexes_.Put( 319 dex_cache->GetResolvedTypes(), 320 DexCacheArrayLocation {size + layout.TypesOffset(), types_size, kBinRegular}); 321 dex_cache_array_indexes_.Put( 322 dex_cache->GetResolvedMethods(), 323 DexCacheArrayLocation {size + layout.MethodsOffset(), methods_size, kBinArtMethodClean}); 324 AddMethodPointerArray(dex_cache->GetResolvedMethods()); 325 dex_cache_array_indexes_.Put( 326 dex_cache->GetResolvedFields(), 327 DexCacheArrayLocation {size + layout.FieldsOffset(), fields_size, kBinArtField}); 328 pointer_arrays_.emplace(dex_cache->GetResolvedFields(), kBinArtField); 329 dex_cache_array_indexes_.Put( 330 dex_cache->GetStrings(), 331 DexCacheArrayLocation {size + layout.StringsOffset(), strings_size, kBinRegular}); 332 size += layout.Size(); 333 CHECK_EQ(layout.Size(), types_size + methods_size + fields_size + strings_size); 334 } 335 // Set the slot size early to avoid DCHECK() failures in IsImageBinSlotAssigned() 336 // when AssignImageBinSlot() assigns their indexes out or order. 337 bin_slot_sizes_[kBinDexCacheArray] = size; 338} 339 340void ImageWriter::AddMethodPointerArray(mirror::PointerArray* arr) { 341 DCHECK(arr != nullptr); 342 if (kIsDebugBuild) { 343 for (size_t i = 0, len = arr->GetLength(); i < len; i++) { 344 auto* method = arr->GetElementPtrSize<ArtMethod*>(i, target_ptr_size_); 345 if (method != nullptr && !method->IsRuntimeMethod()) { 346 auto* klass = method->GetDeclaringClass(); 347 CHECK(klass == nullptr || IsImageClass(klass)) << PrettyClass(klass) 348 << " should be an image class"; 349 } 350 } 351 } 352 // kBinArtMethodClean picked arbitrarily, just required to differentiate between ArtFields and 353 // ArtMethods. 354 pointer_arrays_.emplace(arr, kBinArtMethodClean); 355} 356 357void ImageWriter::AssignImageBinSlot(mirror::Object* object) { 358 DCHECK(object != nullptr); 359 size_t object_size = object->SizeOf(); 360 361 // The magic happens here. We segregate objects into different bins based 362 // on how likely they are to get dirty at runtime. 363 // 364 // Likely-to-dirty objects get packed together into the same bin so that 365 // at runtime their page dirtiness ratio (how many dirty objects a page has) is 366 // maximized. 367 // 368 // This means more pages will stay either clean or shared dirty (with zygote) and 369 // the app will use less of its own (private) memory. 370 Bin bin = kBinRegular; 371 size_t current_offset = 0u; 372 373 if (kBinObjects) { 374 // 375 // Changing the bin of an object is purely a memory-use tuning. 376 // It has no change on runtime correctness. 377 // 378 // Memory analysis has determined that the following types of objects get dirtied 379 // the most: 380 // 381 // * Dex cache arrays are stored in a special bin. The arrays for each dex cache have 382 // a fixed layout which helps improve generated code (using PC-relative addressing), 383 // so we pre-calculate their offsets separately in PrepareDexCacheArraySlots(). 384 // Since these arrays are huge, most pages do not overlap other objects and it's not 385 // really important where they are for the clean/dirty separation. Due to their 386 // special PC-relative addressing, we arbitrarily keep them at the beginning. 387 // * Class'es which are verified [their clinit runs only at runtime] 388 // - classes in general [because their static fields get overwritten] 389 // - initialized classes with all-final statics are unlikely to be ever dirty, 390 // so bin them separately 391 // * Art Methods that are: 392 // - native [their native entry point is not looked up until runtime] 393 // - have declaring classes that aren't initialized 394 // [their interpreter/quick entry points are trampolines until the class 395 // becomes initialized] 396 // 397 // We also assume the following objects get dirtied either never or extremely rarely: 398 // * Strings (they are immutable) 399 // * Art methods that aren't native and have initialized declared classes 400 // 401 // We assume that "regular" bin objects are highly unlikely to become dirtied, 402 // so packing them together will not result in a noticeably tighter dirty-to-clean ratio. 403 // 404 if (object->IsClass()) { 405 bin = kBinClassVerified; 406 mirror::Class* klass = object->AsClass(); 407 408 // Add non-embedded vtable to the pointer array table if there is one. 409 auto* vtable = klass->GetVTable(); 410 if (vtable != nullptr) { 411 AddMethodPointerArray(vtable); 412 } 413 auto* iftable = klass->GetIfTable(); 414 if (iftable != nullptr) { 415 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) { 416 if (iftable->GetMethodArrayCount(i) > 0) { 417 AddMethodPointerArray(iftable->GetMethodArray(i)); 418 } 419 } 420 } 421 422 if (klass->GetStatus() == Class::kStatusInitialized) { 423 bin = kBinClassInitialized; 424 425 // If the class's static fields are all final, put it into a separate bin 426 // since it's very likely it will stay clean. 427 uint32_t num_static_fields = klass->NumStaticFields(); 428 if (num_static_fields == 0) { 429 bin = kBinClassInitializedFinalStatics; 430 } else { 431 // Maybe all the statics are final? 432 bool all_final = true; 433 for (uint32_t i = 0; i < num_static_fields; ++i) { 434 ArtField* field = klass->GetStaticField(i); 435 if (!field->IsFinal()) { 436 all_final = false; 437 break; 438 } 439 } 440 441 if (all_final) { 442 bin = kBinClassInitializedFinalStatics; 443 } 444 } 445 } 446 } else if (object->GetClass<kVerifyNone>()->IsStringClass()) { 447 bin = kBinString; // Strings are almost always immutable (except for object header). 448 } else if (object->IsArrayInstance()) { 449 mirror::Class* klass = object->GetClass<kVerifyNone>(); 450 if (klass->IsObjectArrayClass() || klass->IsIntArrayClass() || klass->IsLongArrayClass()) { 451 auto it = dex_cache_array_indexes_.find(object); 452 if (it != dex_cache_array_indexes_.end()) { 453 bin = kBinDexCacheArray; 454 // Use prepared offset defined by the DexCacheLayout. 455 current_offset = it->second.offset_; 456 // Override incase of cross compilation. 457 object_size = it->second.length_; 458 } // else bin = kBinRegular 459 } 460 } // else bin = kBinRegular 461 } 462 463 size_t offset_delta = RoundUp(object_size, kObjectAlignment); // 64-bit alignment 464 if (bin != kBinDexCacheArray) { 465 DCHECK(dex_cache_array_indexes_.find(object) == dex_cache_array_indexes_.end()) << object; 466 current_offset = bin_slot_sizes_[bin]; // How many bytes the current bin is at (aligned). 467 // Move the current bin size up to accomodate the object we just assigned a bin slot. 468 bin_slot_sizes_[bin] += offset_delta; 469 } 470 471 BinSlot new_bin_slot(bin, current_offset); 472 SetImageBinSlot(object, new_bin_slot); 473 474 ++bin_slot_count_[bin]; 475 476 // Grow the image closer to the end by the object we just assigned. 477 image_end_ += offset_delta; 478} 479 480bool ImageWriter::WillMethodBeDirty(ArtMethod* m) const { 481 if (m->IsNative()) { 482 return true; 483 } 484 mirror::Class* declaring_class = m->GetDeclaringClass(); 485 // Initialized is highly unlikely to dirty since there's no entry points to mutate. 486 return declaring_class == nullptr || declaring_class->GetStatus() != Class::kStatusInitialized; 487} 488 489bool ImageWriter::IsImageBinSlotAssigned(mirror::Object* object) const { 490 DCHECK(object != nullptr); 491 492 // We always stash the bin slot into a lockword, in the 'forwarding address' state. 493 // If it's in some other state, then we haven't yet assigned an image bin slot. 494 if (object->GetLockWord(false).GetState() != LockWord::kForwardingAddress) { 495 return false; 496 } else if (kIsDebugBuild) { 497 LockWord lock_word = object->GetLockWord(false); 498 size_t offset = lock_word.ForwardingAddress(); 499 BinSlot bin_slot(offset); 500 DCHECK_LT(bin_slot.GetIndex(), bin_slot_sizes_[bin_slot.GetBin()]) 501 << "bin slot offset should not exceed the size of that bin"; 502 } 503 return true; 504} 505 506ImageWriter::BinSlot ImageWriter::GetImageBinSlot(mirror::Object* object) const { 507 DCHECK(object != nullptr); 508 DCHECK(IsImageBinSlotAssigned(object)); 509 510 LockWord lock_word = object->GetLockWord(false); 511 size_t offset = lock_word.ForwardingAddress(); // TODO: ForwardingAddress should be uint32_t 512 DCHECK_LE(offset, std::numeric_limits<uint32_t>::max()); 513 514 BinSlot bin_slot(static_cast<uint32_t>(offset)); 515 DCHECK_LT(bin_slot.GetIndex(), bin_slot_sizes_[bin_slot.GetBin()]); 516 517 return bin_slot; 518} 519 520bool ImageWriter::AllocMemory() { 521 const size_t length = RoundUp(image_objects_offset_begin_ + GetBinSizeSum() + intern_table_bytes_, 522 kPageSize); 523 std::string error_msg; 524 image_.reset(MemMap::MapAnonymous("image writer image", nullptr, length, PROT_READ | PROT_WRITE, 525 false, false, &error_msg)); 526 if (UNLIKELY(image_.get() == nullptr)) { 527 LOG(ERROR) << "Failed to allocate memory for image file generation: " << error_msg; 528 return false; 529 } 530 531 // Create the image bitmap, only needs to cover mirror object section which is up to image_end_. 532 CHECK_LE(image_end_, length); 533 image_bitmap_.reset(gc::accounting::ContinuousSpaceBitmap::Create( 534 "image bitmap", image_->Begin(), RoundUp(image_end_, kPageSize))); 535 if (image_bitmap_.get() == nullptr) { 536 LOG(ERROR) << "Failed to allocate memory for image bitmap"; 537 return false; 538 } 539 return true; 540} 541 542void ImageWriter::ComputeLazyFieldsForImageClasses() { 543 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 544 class_linker->VisitClassesWithoutClassesLock(ComputeLazyFieldsForClassesVisitor, nullptr); 545} 546 547bool ImageWriter::ComputeLazyFieldsForClassesVisitor(Class* c, void* /*arg*/) { 548 Thread* self = Thread::Current(); 549 StackHandleScope<1> hs(self); 550 mirror::Class::ComputeName(hs.NewHandle(c)); 551 return true; 552} 553 554void ImageWriter::ComputeEagerResolvedStringsCallback(Object* obj, void* arg ATTRIBUTE_UNUSED) { 555 if (!obj->GetClass()->IsStringClass()) { 556 return; 557 } 558 mirror::String* string = obj->AsString(); 559 const uint16_t* utf16_string = string->GetValue(); 560 size_t utf16_length = static_cast<size_t>(string->GetLength()); 561 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 562 ReaderMutexLock mu(Thread::Current(), *class_linker->DexLock()); 563 size_t dex_cache_count = class_linker->GetDexCacheCount(); 564 for (size_t i = 0; i < dex_cache_count; ++i) { 565 DexCache* dex_cache = class_linker->GetDexCache(i); 566 const DexFile& dex_file = *dex_cache->GetDexFile(); 567 const DexFile::StringId* string_id; 568 if (UNLIKELY(utf16_length == 0)) { 569 string_id = dex_file.FindStringId(""); 570 } else { 571 string_id = dex_file.FindStringId(utf16_string, utf16_length); 572 } 573 if (string_id != nullptr) { 574 // This string occurs in this dex file, assign the dex cache entry. 575 uint32_t string_idx = dex_file.GetIndexForStringId(*string_id); 576 if (dex_cache->GetResolvedString(string_idx) == nullptr) { 577 dex_cache->SetResolvedString(string_idx, string); 578 } 579 } 580 } 581} 582 583void ImageWriter::ComputeEagerResolvedStrings() { 584 Runtime::Current()->GetHeap()->VisitObjects(ComputeEagerResolvedStringsCallback, this); 585} 586 587bool ImageWriter::IsImageClass(Class* klass) { 588 if (klass == nullptr) { 589 return false; 590 } 591 std::string temp; 592 return compiler_driver_.IsImageClass(klass->GetDescriptor(&temp)); 593} 594 595struct NonImageClasses { 596 ImageWriter* image_writer; 597 std::set<std::string>* non_image_classes; 598}; 599 600void ImageWriter::PruneNonImageClasses() { 601 if (compiler_driver_.GetImageClasses() == nullptr) { 602 return; 603 } 604 Runtime* runtime = Runtime::Current(); 605 ClassLinker* class_linker = runtime->GetClassLinker(); 606 Thread* self = Thread::Current(); 607 608 // Make a list of classes we would like to prune. 609 std::set<std::string> non_image_classes; 610 NonImageClasses context; 611 context.image_writer = this; 612 context.non_image_classes = &non_image_classes; 613 class_linker->VisitClasses(NonImageClassesVisitor, &context); 614 615 // Remove the undesired classes from the class roots. 616 for (const std::string& it : non_image_classes) { 617 bool result = class_linker->RemoveClass(it.c_str(), nullptr); 618 DCHECK(result); 619 } 620 621 // Clear references to removed classes from the DexCaches. 622 const ArtMethod* resolution_method = runtime->GetResolutionMethod(); 623 size_t dex_cache_count; 624 { 625 ReaderMutexLock mu(self, *class_linker->DexLock()); 626 dex_cache_count = class_linker->GetDexCacheCount(); 627 } 628 for (size_t idx = 0; idx < dex_cache_count; ++idx) { 629 DexCache* dex_cache; 630 { 631 ReaderMutexLock mu(self, *class_linker->DexLock()); 632 dex_cache = class_linker->GetDexCache(idx); 633 } 634 for (size_t i = 0; i < dex_cache->NumResolvedTypes(); i++) { 635 Class* klass = dex_cache->GetResolvedType(i); 636 if (klass != nullptr && !IsImageClass(klass)) { 637 dex_cache->SetResolvedType(i, nullptr); 638 } 639 } 640 auto* resolved_methods = down_cast<mirror::PointerArray*>(dex_cache->GetResolvedMethods()); 641 for (size_t i = 0, len = resolved_methods->GetLength(); i < len; i++) { 642 auto* method = resolved_methods->GetElementPtrSize<ArtMethod*>(i, target_ptr_size_); 643 if (method != nullptr) { 644 auto* declaring_class = method->GetDeclaringClass(); 645 // Miranda methods may be held live by a class which was not an image class but have a 646 // declaring class which is an image class. Set it to the resolution method to be safe and 647 // prevent dangling pointers. 648 if (method->IsMiranda() || !IsImageClass(declaring_class)) { 649 resolved_methods->SetElementPtrSize(i, resolution_method, target_ptr_size_); 650 } else { 651 // Check that the class is still in the classes table. 652 DCHECK(class_linker->ClassInClassTable(declaring_class)) << "Class " 653 << PrettyClass(declaring_class) << " not in class linker table"; 654 } 655 } 656 } 657 for (size_t i = 0; i < dex_cache->NumResolvedFields(); i++) { 658 ArtField* field = dex_cache->GetResolvedField(i, target_ptr_size_); 659 if (field != nullptr && !IsImageClass(field->GetDeclaringClass())) { 660 dex_cache->SetResolvedField(i, nullptr, target_ptr_size_); 661 } 662 } 663 // Clean the dex field. It might have been populated during the initialization phase, but 664 // contains data only valid during a real run. 665 dex_cache->SetFieldObject<false>(mirror::DexCache::DexOffset(), nullptr); 666 } 667 668 // Drop the array class cache in the ClassLinker, as these are roots holding those classes live. 669 class_linker->DropFindArrayClassCache(); 670} 671 672bool ImageWriter::NonImageClassesVisitor(Class* klass, void* arg) { 673 NonImageClasses* context = reinterpret_cast<NonImageClasses*>(arg); 674 if (!context->image_writer->IsImageClass(klass)) { 675 std::string temp; 676 context->non_image_classes->insert(klass->GetDescriptor(&temp)); 677 } 678 return true; 679} 680 681void ImageWriter::CheckNonImageClassesRemoved() { 682 if (compiler_driver_.GetImageClasses() != nullptr) { 683 gc::Heap* heap = Runtime::Current()->GetHeap(); 684 heap->VisitObjects(CheckNonImageClassesRemovedCallback, this); 685 } 686} 687 688void ImageWriter::CheckNonImageClassesRemovedCallback(Object* obj, void* arg) { 689 ImageWriter* image_writer = reinterpret_cast<ImageWriter*>(arg); 690 if (obj->IsClass()) { 691 Class* klass = obj->AsClass(); 692 if (!image_writer->IsImageClass(klass)) { 693 image_writer->DumpImageClasses(); 694 std::string temp; 695 CHECK(image_writer->IsImageClass(klass)) << klass->GetDescriptor(&temp) 696 << " " << PrettyDescriptor(klass); 697 } 698 } 699} 700 701void ImageWriter::DumpImageClasses() { 702 auto image_classes = compiler_driver_.GetImageClasses(); 703 CHECK(image_classes != nullptr); 704 for (const std::string& image_class : *image_classes) { 705 LOG(INFO) << " " << image_class; 706 } 707} 708 709void ImageWriter::CalculateObjectBinSlots(Object* obj) { 710 DCHECK(obj != nullptr); 711 // if it is a string, we want to intern it if its not interned. 712 if (obj->GetClass()->IsStringClass()) { 713 // we must be an interned string that was forward referenced and already assigned 714 if (IsImageBinSlotAssigned(obj)) { 715 DCHECK_EQ(obj, obj->AsString()->Intern()); 716 return; 717 } 718 mirror::String* const interned = Runtime::Current()->GetInternTable()->InternStrong( 719 obj->AsString()->Intern()); 720 if (obj != interned) { 721 if (!IsImageBinSlotAssigned(interned)) { 722 // interned obj is after us, allocate its location early 723 AssignImageBinSlot(interned); 724 } 725 // point those looking for this object to the interned version. 726 SetImageBinSlot(obj, GetImageBinSlot(interned)); 727 return; 728 } 729 // else (obj == interned), nothing to do but fall through to the normal case 730 } 731 732 AssignImageBinSlot(obj); 733} 734 735ObjectArray<Object>* ImageWriter::CreateImageRoots() const { 736 Runtime* runtime = Runtime::Current(); 737 ClassLinker* class_linker = runtime->GetClassLinker(); 738 Thread* self = Thread::Current(); 739 StackHandleScope<3> hs(self); 740 Handle<Class> object_array_class(hs.NewHandle( 741 class_linker->FindSystemClass(self, "[Ljava/lang/Object;"))); 742 743 // build an Object[] of all the DexCaches used in the source_space_. 744 // Since we can't hold the dex lock when allocating the dex_caches 745 // ObjectArray, we lock the dex lock twice, first to get the number 746 // of dex caches first and then lock it again to copy the dex 747 // caches. We check that the number of dex caches does not change. 748 size_t dex_cache_count; 749 { 750 ReaderMutexLock mu(self, *class_linker->DexLock()); 751 dex_cache_count = class_linker->GetDexCacheCount(); 752 } 753 Handle<ObjectArray<Object>> dex_caches( 754 hs.NewHandle(ObjectArray<Object>::Alloc(self, object_array_class.Get(), 755 dex_cache_count))); 756 CHECK(dex_caches.Get() != nullptr) << "Failed to allocate a dex cache array."; 757 { 758 ReaderMutexLock mu(self, *class_linker->DexLock()); 759 CHECK_EQ(dex_cache_count, class_linker->GetDexCacheCount()) 760 << "The number of dex caches changed."; 761 for (size_t i = 0; i < dex_cache_count; ++i) { 762 dex_caches->Set<false>(i, class_linker->GetDexCache(i)); 763 } 764 } 765 766 // build an Object[] of the roots needed to restore the runtime 767 auto image_roots(hs.NewHandle( 768 ObjectArray<Object>::Alloc(self, object_array_class.Get(), ImageHeader::kImageRootsMax))); 769 image_roots->Set<false>(ImageHeader::kDexCaches, dex_caches.Get()); 770 image_roots->Set<false>(ImageHeader::kClassRoots, class_linker->GetClassRoots()); 771 for (int i = 0; i < ImageHeader::kImageRootsMax; i++) { 772 CHECK(image_roots->Get(i) != nullptr); 773 } 774 return image_roots.Get(); 775} 776 777// Walk instance fields of the given Class. Separate function to allow recursion on the super 778// class. 779void ImageWriter::WalkInstanceFields(mirror::Object* obj, mirror::Class* klass) { 780 // Visit fields of parent classes first. 781 StackHandleScope<1> hs(Thread::Current()); 782 Handle<mirror::Class> h_class(hs.NewHandle(klass)); 783 mirror::Class* super = h_class->GetSuperClass(); 784 if (super != nullptr) { 785 WalkInstanceFields(obj, super); 786 } 787 // 788 size_t num_reference_fields = h_class->NumReferenceInstanceFields(); 789 MemberOffset field_offset = h_class->GetFirstReferenceInstanceFieldOffset(); 790 for (size_t i = 0; i < num_reference_fields; ++i) { 791 mirror::Object* value = obj->GetFieldObject<mirror::Object>(field_offset); 792 if (value != nullptr) { 793 WalkFieldsInOrder(value); 794 } 795 field_offset = MemberOffset(field_offset.Uint32Value() + 796 sizeof(mirror::HeapReference<mirror::Object>)); 797 } 798} 799 800// For an unvisited object, visit it then all its children found via fields. 801void ImageWriter::WalkFieldsInOrder(mirror::Object* obj) { 802 // Use our own visitor routine (instead of GC visitor) to get better locality between 803 // an object and its fields 804 if (!IsImageBinSlotAssigned(obj)) { 805 // Walk instance fields of all objects 806 StackHandleScope<2> hs(Thread::Current()); 807 Handle<mirror::Object> h_obj(hs.NewHandle(obj)); 808 Handle<mirror::Class> klass(hs.NewHandle(obj->GetClass())); 809 // visit the object itself. 810 CalculateObjectBinSlots(h_obj.Get()); 811 WalkInstanceFields(h_obj.Get(), klass.Get()); 812 // Walk static fields of a Class. 813 if (h_obj->IsClass()) { 814 size_t num_reference_static_fields = klass->NumReferenceStaticFields(); 815 MemberOffset field_offset = klass->GetFirstReferenceStaticFieldOffset(target_ptr_size_); 816 for (size_t i = 0; i < num_reference_static_fields; ++i) { 817 mirror::Object* value = h_obj->GetFieldObject<mirror::Object>(field_offset); 818 if (value != nullptr) { 819 WalkFieldsInOrder(value); 820 } 821 field_offset = MemberOffset(field_offset.Uint32Value() + 822 sizeof(mirror::HeapReference<mirror::Object>)); 823 } 824 // Visit and assign offsets for fields. 825 auto* as_klass = h_obj->AsClass(); 826 ArtField* fields[] = { as_klass->GetSFields(), as_klass->GetIFields() }; 827 size_t num_fields[] = { as_klass->NumStaticFields(), as_klass->NumInstanceFields() }; 828 for (size_t i = 0; i < 2; ++i) { 829 for (size_t j = 0; j < num_fields[i]; ++j) { 830 auto* field = fields[i] + j; 831 auto it = native_object_reloc_.find(field); 832 CHECK(it == native_object_reloc_.end()) << "Field at index " << i << ":" << j 833 << " already assigned " << PrettyField(field); 834 native_object_reloc_.emplace( 835 field, NativeObjectReloc { bin_slot_sizes_[kBinArtField], kBinArtField }); 836 bin_slot_sizes_[kBinArtField] += sizeof(ArtField); 837 } 838 } 839 // Visit and assign offsets for methods. 840 IterationRange<StrideIterator<ArtMethod>> method_arrays[] = { 841 as_klass->GetDirectMethods(target_ptr_size_), 842 as_klass->GetVirtualMethods(target_ptr_size_) 843 }; 844 for (auto& array : method_arrays) { 845 bool any_dirty = false; 846 size_t count = 0; 847 for (auto& m : array) { 848 any_dirty = any_dirty || WillMethodBeDirty(&m); 849 ++count; 850 } 851 for (auto& m : array) { 852 AssignMethodOffset(&m, any_dirty ? kBinArtMethodDirty : kBinArtMethodClean); 853 } 854 (any_dirty ? dirty_methods_ : clean_methods_) += count; 855 } 856 } else if (h_obj->IsObjectArray()) { 857 // Walk elements of an object array. 858 int32_t length = h_obj->AsObjectArray<mirror::Object>()->GetLength(); 859 for (int32_t i = 0; i < length; i++) { 860 mirror::ObjectArray<mirror::Object>* obj_array = h_obj->AsObjectArray<mirror::Object>(); 861 mirror::Object* value = obj_array->Get(i); 862 if (value != nullptr) { 863 WalkFieldsInOrder(value); 864 } 865 } 866 } 867 } 868} 869 870void ImageWriter::AssignMethodOffset(ArtMethod* method, Bin bin) { 871 auto it = native_object_reloc_.find(method); 872 CHECK(it == native_object_reloc_.end()) << "Method " << method << " already assigned " 873 << PrettyMethod(method); 874 native_object_reloc_.emplace(method, NativeObjectReloc { bin_slot_sizes_[bin], bin }); 875 bin_slot_sizes_[bin] += ArtMethod::ObjectSize(target_ptr_size_); 876} 877 878void ImageWriter::WalkFieldsCallback(mirror::Object* obj, void* arg) { 879 ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg); 880 DCHECK(writer != nullptr); 881 writer->WalkFieldsInOrder(obj); 882} 883 884void ImageWriter::UnbinObjectsIntoOffsetCallback(mirror::Object* obj, void* arg) { 885 ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg); 886 DCHECK(writer != nullptr); 887 writer->UnbinObjectsIntoOffset(obj); 888} 889 890void ImageWriter::UnbinObjectsIntoOffset(mirror::Object* obj) { 891 CHECK(obj != nullptr); 892 893 // We know the bin slot, and the total bin sizes for all objects by now, 894 // so calculate the object's final image offset. 895 896 DCHECK(IsImageBinSlotAssigned(obj)); 897 BinSlot bin_slot = GetImageBinSlot(obj); 898 // Change the lockword from a bin slot into an offset 899 AssignImageOffset(obj, bin_slot); 900} 901 902void ImageWriter::CalculateNewObjectOffsets() { 903 Thread* const self = Thread::Current(); 904 StackHandleScope<1> hs(self); 905 Handle<ObjectArray<Object>> image_roots(hs.NewHandle(CreateImageRoots())); 906 907 auto* runtime = Runtime::Current(); 908 auto* heap = runtime->GetHeap(); 909 DCHECK_EQ(0U, image_end_); 910 911 // Leave space for the header, but do not write it yet, we need to 912 // know where image_roots is going to end up 913 image_end_ += RoundUp(sizeof(ImageHeader), kObjectAlignment); // 64-bit-alignment 914 915 image_objects_offset_begin_ = image_end_; 916 // Prepare bin slots for dex cache arrays. 917 PrepareDexCacheArraySlots(); 918 // Clear any pre-existing monitors which may have been in the monitor words, assign bin slots. 919 heap->VisitObjects(WalkFieldsCallback, this); 920 // Write the image runtime methods. 921 image_methods_[ImageHeader::kResolutionMethod] = runtime->GetResolutionMethod(); 922 image_methods_[ImageHeader::kImtConflictMethod] = runtime->GetImtConflictMethod(); 923 image_methods_[ImageHeader::kImtUnimplementedMethod] = runtime->GetImtUnimplementedMethod(); 924 image_methods_[ImageHeader::kCalleeSaveMethod] = runtime->GetCalleeSaveMethod(Runtime::kSaveAll); 925 image_methods_[ImageHeader::kRefsOnlySaveMethod] = 926 runtime->GetCalleeSaveMethod(Runtime::kRefsOnly); 927 image_methods_[ImageHeader::kRefsAndArgsSaveMethod] = 928 runtime->GetCalleeSaveMethod(Runtime::kRefsAndArgs); 929 for (auto* m : image_methods_) { 930 CHECK(m != nullptr); 931 CHECK(m->IsRuntimeMethod()); 932 AssignMethodOffset(m, kBinArtMethodDirty); 933 } 934 935 // Calculate cumulative bin slot sizes. 936 size_t previous_sizes = 0u; 937 for (size_t i = 0; i != kBinSize; ++i) { 938 bin_slot_previous_sizes_[i] = previous_sizes; 939 previous_sizes += bin_slot_sizes_[i]; 940 } 941 DCHECK_EQ(previous_sizes, GetBinSizeSum()); 942 DCHECK_EQ(image_end_, GetBinSizeSum(kBinMirrorCount) + image_objects_offset_begin_); 943 944 // Transform each object's bin slot into an offset which will be used to do the final copy. 945 heap->VisitObjects(UnbinObjectsIntoOffsetCallback, this); 946 947 DCHECK_EQ(image_end_, GetBinSizeSum(kBinMirrorCount) + image_objects_offset_begin_); 948 949 image_roots_address_ = PointerToLowMemUInt32(GetImageAddress(image_roots.Get())); 950 951 // Update the native relocations by adding their bin sums. 952 for (auto& pair : native_object_reloc_) { 953 auto& native_reloc = pair.second; 954 native_reloc.offset += image_objects_offset_begin_ + 955 bin_slot_previous_sizes_[native_reloc.bin_type]; 956 } 957 958 // Calculate how big the intern table will be after being serialized. 959 auto* const intern_table = Runtime::Current()->GetInternTable(); 960 CHECK_EQ(intern_table->WeakSize(), 0u) << " should have strong interned all the strings"; 961 intern_table_bytes_ = intern_table->WriteToMemory(nullptr); 962 963 // Note that image_end_ is left at end of used mirror object section. 964} 965 966void ImageWriter::CreateHeader(size_t oat_loaded_size, size_t oat_data_offset) { 967 CHECK_NE(0U, oat_loaded_size); 968 const uint8_t* oat_file_begin = GetOatFileBegin(); 969 const uint8_t* oat_file_end = oat_file_begin + oat_loaded_size; 970 oat_data_begin_ = oat_file_begin + oat_data_offset; 971 const uint8_t* oat_data_end = oat_data_begin_ + oat_file_->Size(); 972 973 // Create the image sections. 974 ImageSection sections[ImageHeader::kSectionCount]; 975 // Objects section 976 auto* objects_section = §ions[ImageHeader::kSectionObjects]; 977 *objects_section = ImageSection(0u, image_end_); 978 size_t cur_pos = objects_section->End(); 979 // Add field section. 980 auto* field_section = §ions[ImageHeader::kSectionArtFields]; 981 *field_section = ImageSection(cur_pos, bin_slot_sizes_[kBinArtField]); 982 CHECK_EQ(image_objects_offset_begin_ + bin_slot_previous_sizes_[kBinArtField], 983 field_section->Offset()); 984 cur_pos = field_section->End(); 985 // Add method section. 986 auto* methods_section = §ions[ImageHeader::kSectionArtMethods]; 987 *methods_section = ImageSection(cur_pos, bin_slot_sizes_[kBinArtMethodClean] + 988 bin_slot_sizes_[kBinArtMethodDirty]); 989 CHECK_EQ(image_objects_offset_begin_ + bin_slot_previous_sizes_[kBinArtMethodClean], 990 methods_section->Offset()); 991 cur_pos = methods_section->End(); 992 // Calculate the size of the interned strings. 993 auto* interned_strings_section = §ions[ImageHeader::kSectionInternedStrings]; 994 *interned_strings_section = ImageSection(cur_pos, intern_table_bytes_); 995 cur_pos = interned_strings_section->End(); 996 // Finally bitmap section. 997 const size_t bitmap_bytes = image_bitmap_->Size(); 998 auto* bitmap_section = §ions[ImageHeader::kSectionImageBitmap]; 999 *bitmap_section = ImageSection(RoundUp(cur_pos, kPageSize), RoundUp(bitmap_bytes, kPageSize)); 1000 cur_pos = bitmap_section->End(); 1001 if (kIsDebugBuild) { 1002 size_t idx = 0; 1003 for (const ImageSection& section : sections) { 1004 LOG(INFO) << static_cast<ImageHeader::ImageSections>(idx) << " " << section; 1005 ++idx; 1006 } 1007 LOG(INFO) << "Methods: clean=" << clean_methods_ << " dirty=" << dirty_methods_; 1008 } 1009 const size_t image_end = static_cast<uint32_t>(interned_strings_section->End()); 1010 CHECK_EQ(AlignUp(image_begin_ + image_end, kPageSize), oat_file_begin) << 1011 "Oat file should be right after the image."; 1012 // Create the header. 1013 new (image_->Begin()) ImageHeader( 1014 PointerToLowMemUInt32(image_begin_), image_end, 1015 sections, image_roots_address_, oat_file_->GetOatHeader().GetChecksum(), 1016 PointerToLowMemUInt32(oat_file_begin), PointerToLowMemUInt32(oat_data_begin_), 1017 PointerToLowMemUInt32(oat_data_end), PointerToLowMemUInt32(oat_file_end), target_ptr_size_, 1018 compile_pic_); 1019} 1020 1021ArtMethod* ImageWriter::GetImageMethodAddress(ArtMethod* method) { 1022 auto it = native_object_reloc_.find(method); 1023 CHECK(it != native_object_reloc_.end()) << PrettyMethod(method) << " @ " << method; 1024 CHECK_GE(it->second.offset, image_end_) << "ArtMethods should be after Objects"; 1025 return reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset); 1026} 1027 1028class FixupRootVisitor : public RootVisitor { 1029 public: 1030 explicit FixupRootVisitor(ImageWriter* image_writer) : image_writer_(image_writer) { 1031 } 1032 1033 void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED) 1034 OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 1035 for (size_t i = 0; i < count; ++i) { 1036 *roots[i] = ImageAddress(*roots[i]); 1037 } 1038 } 1039 1040 void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count, 1041 const RootInfo& info ATTRIBUTE_UNUSED) 1042 OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 1043 for (size_t i = 0; i < count; ++i) { 1044 roots[i]->Assign(ImageAddress(roots[i]->AsMirrorPtr())); 1045 } 1046 } 1047 1048 private: 1049 ImageWriter* const image_writer_; 1050 1051 mirror::Object* ImageAddress(mirror::Object* obj) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { 1052 const size_t offset = image_writer_->GetImageOffset(obj); 1053 auto* const dest = reinterpret_cast<Object*>(image_writer_->image_begin_ + offset); 1054 VLOG(compiler) << "Update root from " << obj << " to " << dest; 1055 return dest; 1056 } 1057}; 1058 1059void ImageWriter::CopyAndFixupNativeData() { 1060 // Copy ArtFields and methods to their locations and update the array for convenience. 1061 for (auto& pair : native_object_reloc_) { 1062 auto& native_reloc = pair.second; 1063 if (native_reloc.bin_type == kBinArtField) { 1064 auto* dest = image_->Begin() + native_reloc.offset; 1065 DCHECK_GE(dest, image_->Begin() + image_end_); 1066 memcpy(dest, pair.first, sizeof(ArtField)); 1067 reinterpret_cast<ArtField*>(dest)->SetDeclaringClass( 1068 GetImageAddress(reinterpret_cast<ArtField*>(pair.first)->GetDeclaringClass())); 1069 } else { 1070 CHECK(IsArtMethodBin(native_reloc.bin_type)) << native_reloc.bin_type; 1071 auto* dest = image_->Begin() + native_reloc.offset; 1072 DCHECK_GE(dest, image_->Begin() + image_end_); 1073 CopyAndFixupMethod(reinterpret_cast<ArtMethod*>(pair.first), 1074 reinterpret_cast<ArtMethod*>(dest)); 1075 } 1076 } 1077 // Fixup the image method roots. 1078 auto* image_header = reinterpret_cast<ImageHeader*>(image_->Begin()); 1079 const ImageSection& methods_section = image_header->GetMethodsSection(); 1080 for (size_t i = 0; i < ImageHeader::kImageMethodsCount; ++i) { 1081 auto* m = image_methods_[i]; 1082 CHECK(m != nullptr); 1083 auto it = native_object_reloc_.find(m); 1084 CHECK(it != native_object_reloc_.end()) << "No fowarding for " << PrettyMethod(m); 1085 auto& native_reloc = it->second; 1086 CHECK(methods_section.Contains(native_reloc.offset)) << native_reloc.offset << " not in " 1087 << methods_section; 1088 CHECK(IsArtMethodBin(native_reloc.bin_type)) << native_reloc.bin_type; 1089 auto* dest = reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset); 1090 image_header->SetImageMethod(static_cast<ImageHeader::ImageMethod>(i), dest); 1091 } 1092 // Write the intern table into the image. 1093 const ImageSection& intern_table_section = image_header->GetImageSection( 1094 ImageHeader::kSectionInternedStrings); 1095 InternTable* const intern_table = Runtime::Current()->GetInternTable(); 1096 uint8_t* const memory_ptr = image_->Begin() + intern_table_section.Offset(); 1097 const size_t intern_table_bytes = intern_table->WriteToMemory(memory_ptr); 1098 // Fixup the pointers in the newly written intern table to contain image addresses. 1099 InternTable temp_table; 1100 // Note that we require that ReadFromMemory does not make an internal copy of the elements so that 1101 // the VisitRoots() will update the memory directly rather than the copies. 1102 // This also relies on visit roots not doing any verification which could fail after we update 1103 // the roots to be the image addresses. 1104 temp_table.ReadFromMemory(memory_ptr); 1105 CHECK_EQ(temp_table.Size(), intern_table->Size()); 1106 FixupRootVisitor visitor(this); 1107 temp_table.VisitRoots(&visitor, kVisitRootFlagAllRoots); 1108 CHECK_EQ(intern_table_bytes, intern_table_bytes_); 1109} 1110 1111void ImageWriter::CopyAndFixupObjects() { 1112 gc::Heap* heap = Runtime::Current()->GetHeap(); 1113 heap->VisitObjects(CopyAndFixupObjectsCallback, this); 1114 // Fix up the object previously had hash codes. 1115 for (const auto& hash_pair : saved_hashcode_map_) { 1116 Object* const obj = hash_pair.first; 1117 DCHECK_EQ(obj->GetLockWord(false).ReadBarrierState(), 0U); 1118 obj->SetLockWord(LockWord::FromHashCode(hash_pair.second, 0U), false); 1119 } 1120 saved_hashcode_map_.clear(); 1121} 1122 1123void ImageWriter::CopyAndFixupObjectsCallback(Object* obj, void* arg) { 1124 DCHECK(obj != nullptr); 1125 DCHECK(arg != nullptr); 1126 reinterpret_cast<ImageWriter*>(arg)->CopyAndFixupObject(obj); 1127} 1128 1129void ImageWriter::FixupPointerArray(mirror::Object* dst, mirror::PointerArray* arr, 1130 mirror::Class* klass, Bin array_type) { 1131 CHECK(klass->IsArrayClass()); 1132 CHECK(arr->IsIntArray() || arr->IsLongArray()) << PrettyClass(klass) << " " << arr; 1133 // Fixup int and long pointers for the ArtMethod or ArtField arrays. 1134 const size_t num_elements = arr->GetLength(); 1135 dst->SetClass(GetImageAddress(arr->GetClass())); 1136 auto* dest_array = down_cast<mirror::PointerArray*>(dst); 1137 for (size_t i = 0, count = num_elements; i < count; ++i) { 1138 auto* elem = arr->GetElementPtrSize<void*>(i, target_ptr_size_); 1139 if (elem != nullptr) { 1140 auto it = native_object_reloc_.find(elem); 1141 if (it == native_object_reloc_.end()) { 1142 if (IsArtMethodBin(array_type)) { 1143 auto* method = reinterpret_cast<ArtMethod*>(elem); 1144 LOG(FATAL) << "No relocation entry for ArtMethod " << PrettyMethod(method) << " @ " 1145 << method << " idx=" << i << "/" << num_elements << " with declaring class " 1146 << PrettyClass(method->GetDeclaringClass()); 1147 } else { 1148 CHECK_EQ(array_type, kBinArtField); 1149 auto* field = reinterpret_cast<ArtField*>(elem); 1150 LOG(FATAL) << "No relocation entry for ArtField " << PrettyField(field) << " @ " 1151 << field << " idx=" << i << "/" << num_elements << " with declaring class " 1152 << PrettyClass(field->GetDeclaringClass()); 1153 } 1154 } else { 1155 elem = image_begin_ + it->second.offset; 1156 } 1157 } 1158 dest_array->SetElementPtrSize<false, true>(i, elem, target_ptr_size_); 1159 } 1160} 1161 1162void ImageWriter::CopyAndFixupObject(Object* obj) { 1163 size_t offset = GetImageOffset(obj); 1164 auto* dst = reinterpret_cast<Object*>(image_->Begin() + offset); 1165 DCHECK_LT(offset, image_end_); 1166 const auto* src = reinterpret_cast<const uint8_t*>(obj); 1167 1168 image_bitmap_->Set(dst); // Mark the obj as live. 1169 1170 const size_t n = obj->SizeOf(); 1171 DCHECK_LE(offset + n, image_->Size()); 1172 memcpy(dst, src, n); 1173 1174 // Write in a hash code of objects which have inflated monitors or a hash code in their monitor 1175 // word. 1176 const auto it = saved_hashcode_map_.find(obj); 1177 dst->SetLockWord(it != saved_hashcode_map_.end() ? 1178 LockWord::FromHashCode(it->second, 0u) : LockWord::Default(), false); 1179 FixupObject(obj, dst); 1180} 1181 1182// Rewrite all the references in the copied object to point to their image address equivalent 1183class FixupVisitor { 1184 public: 1185 FixupVisitor(ImageWriter* image_writer, Object* copy) : image_writer_(image_writer), copy_(copy) { 1186 } 1187 1188 void operator()(Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const 1189 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { 1190 Object* ref = obj->GetFieldObject<Object, kVerifyNone>(offset); 1191 // Use SetFieldObjectWithoutWriteBarrier to avoid card marking since we are writing to the 1192 // image. 1193 copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>( 1194 offset, image_writer_->GetImageAddress(ref)); 1195 } 1196 1197 // java.lang.ref.Reference visitor. 1198 void operator()(mirror::Class* klass ATTRIBUTE_UNUSED, mirror::Reference* ref) const 1199 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) 1200 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) { 1201 copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>( 1202 mirror::Reference::ReferentOffset(), image_writer_->GetImageAddress(ref->GetReferent())); 1203 } 1204 1205 protected: 1206 ImageWriter* const image_writer_; 1207 mirror::Object* const copy_; 1208}; 1209 1210class FixupClassVisitor FINAL : public FixupVisitor { 1211 public: 1212 FixupClassVisitor(ImageWriter* image_writer, Object* copy) : FixupVisitor(image_writer, copy) { 1213 } 1214 1215 void operator()(Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const 1216 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { 1217 DCHECK(obj->IsClass()); 1218 FixupVisitor::operator()(obj, offset, /*is_static*/false); 1219 } 1220 1221 void operator()(mirror::Class* klass ATTRIBUTE_UNUSED, 1222 mirror::Reference* ref ATTRIBUTE_UNUSED) const 1223 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) 1224 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) { 1225 LOG(FATAL) << "Reference not expected here."; 1226 } 1227}; 1228 1229void ImageWriter::FixupClass(mirror::Class* orig, mirror::Class* copy) { 1230 // Copy and fix up ArtFields in the class. 1231 ArtField* fields[2] = { orig->GetSFields(), orig->GetIFields() }; 1232 size_t num_fields[2] = { orig->NumStaticFields(), orig->NumInstanceFields() }; 1233 // Update the field arrays. 1234 for (size_t i = 0; i < 2; ++i) { 1235 if (num_fields[i] == 0) { 1236 CHECK(fields[i] == nullptr); 1237 continue; 1238 } 1239 auto it = native_object_reloc_.find(fields[i]); 1240 CHECK(it != native_object_reloc_.end()) << PrettyClass(orig) << " : " << PrettyField(fields[i]); 1241 auto* image_fields = reinterpret_cast<ArtField*>(image_begin_ + it->second.offset); 1242 if (i == 0) { 1243 copy->SetSFieldsUnchecked(image_fields); 1244 } else { 1245 copy->SetIFieldsUnchecked(image_fields); 1246 } 1247 } 1248 // Update direct / virtual method arrays. 1249 auto* direct_methods = orig->GetDirectMethodsPtr(); 1250 if (direct_methods != nullptr) { 1251 auto it = native_object_reloc_.find(direct_methods); 1252 CHECK(it != native_object_reloc_.end()) << PrettyClass(orig); 1253 copy->SetDirectMethodsPtrUnchecked( 1254 reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset)); 1255 } 1256 auto* virtual_methods = orig->GetVirtualMethodsPtr(); 1257 if (virtual_methods != nullptr) { 1258 auto it = native_object_reloc_.find(virtual_methods); 1259 CHECK(it != native_object_reloc_.end()) << PrettyClass(orig); 1260 copy->SetVirtualMethodsPtr( 1261 reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset)); 1262 } 1263 // Fix up embedded tables. 1264 if (orig->ShouldHaveEmbeddedImtAndVTable()) { 1265 for (int32_t i = 0; i < orig->GetEmbeddedVTableLength(); ++i) { 1266 auto it = native_object_reloc_.find(orig->GetEmbeddedVTableEntry(i, target_ptr_size_)); 1267 CHECK(it != native_object_reloc_.end()) << PrettyClass(orig); 1268 copy->SetEmbeddedVTableEntryUnchecked( 1269 i, reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset), target_ptr_size_); 1270 } 1271 for (size_t i = 0; i < mirror::Class::kImtSize; ++i) { 1272 auto it = native_object_reloc_.find(orig->GetEmbeddedImTableEntry(i, target_ptr_size_)); 1273 CHECK(it != native_object_reloc_.end()) << PrettyClass(orig); 1274 copy->SetEmbeddedImTableEntry( 1275 i, reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset), target_ptr_size_); 1276 } 1277 } 1278 FixupClassVisitor visitor(this, copy); 1279 static_cast<mirror::Object*>(orig)->VisitReferences<true /*visit class*/>(visitor, visitor); 1280} 1281 1282void ImageWriter::FixupObject(Object* orig, Object* copy) { 1283 DCHECK(orig != nullptr); 1284 DCHECK(copy != nullptr); 1285 if (kUseBakerOrBrooksReadBarrier) { 1286 orig->AssertReadBarrierPointer(); 1287 if (kUseBrooksReadBarrier) { 1288 // Note the address 'copy' isn't the same as the image address of 'orig'. 1289 copy->SetReadBarrierPointer(GetImageAddress(orig)); 1290 DCHECK_EQ(copy->GetReadBarrierPointer(), GetImageAddress(orig)); 1291 } 1292 } 1293 auto* klass = orig->GetClass(); 1294 if (klass->IsIntArrayClass() || klass->IsLongArrayClass()) { 1295 // Is this a native dex cache array? 1296 auto it = pointer_arrays_.find(down_cast<mirror::PointerArray*>(orig)); 1297 if (it != pointer_arrays_.end()) { 1298 // Should only need to fixup every pointer array exactly once. 1299 FixupPointerArray(copy, down_cast<mirror::PointerArray*>(orig), klass, it->second); 1300 pointer_arrays_.erase(it); 1301 return; 1302 } 1303 CHECK(dex_cache_array_indexes_.find(orig) == dex_cache_array_indexes_.end()) 1304 << "Should have been pointer array."; 1305 } 1306 if (orig->IsClass()) { 1307 FixupClass(orig->AsClass<kVerifyNone>(), down_cast<mirror::Class*>(copy)); 1308 } else { 1309 if (klass == mirror::Method::StaticClass() || klass == mirror::Constructor::StaticClass()) { 1310 // Need to go update the ArtMethod. 1311 auto* dest = down_cast<mirror::AbstractMethod*>(copy); 1312 auto* src = down_cast<mirror::AbstractMethod*>(orig); 1313 ArtMethod* src_method = src->GetArtMethod(); 1314 auto it = native_object_reloc_.find(src_method); 1315 CHECK(it != native_object_reloc_.end()) << "Missing relocation for AbstractMethod.artMethod " 1316 << PrettyMethod(src_method); 1317 dest->SetArtMethod( 1318 reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset)); 1319 } 1320 FixupVisitor visitor(this, copy); 1321 orig->VisitReferences<true /*visit class*/>(visitor, visitor); 1322 } 1323} 1324 1325const uint8_t* ImageWriter::GetQuickCode(ArtMethod* method, bool* quick_is_interpreted) { 1326 DCHECK(!method->IsResolutionMethod() && !method->IsImtConflictMethod() && 1327 !method->IsImtUnimplementedMethod() && !method->IsAbstract()) << PrettyMethod(method); 1328 1329 // Use original code if it exists. Otherwise, set the code pointer to the resolution 1330 // trampoline. 1331 1332 // Quick entrypoint: 1333 uint32_t quick_oat_code_offset = PointerToLowMemUInt32( 1334 method->GetEntryPointFromQuickCompiledCodePtrSize(target_ptr_size_)); 1335 const uint8_t* quick_code = GetOatAddress(quick_oat_code_offset); 1336 *quick_is_interpreted = false; 1337 if (quick_code != nullptr && (!method->IsStatic() || method->IsConstructor() || 1338 method->GetDeclaringClass()->IsInitialized())) { 1339 // We have code for a non-static or initialized method, just use the code. 1340 DCHECK_GE(quick_code, oat_data_begin_); 1341 } else if (quick_code == nullptr && method->IsNative() && 1342 (!method->IsStatic() || method->GetDeclaringClass()->IsInitialized())) { 1343 // Non-static or initialized native method missing compiled code, use generic JNI version. 1344 quick_code = GetOatAddress(quick_generic_jni_trampoline_offset_); 1345 DCHECK_GE(quick_code, oat_data_begin_); 1346 } else if (quick_code == nullptr && !method->IsNative()) { 1347 // We don't have code at all for a non-native method, use the interpreter. 1348 quick_code = GetOatAddress(quick_to_interpreter_bridge_offset_); 1349 *quick_is_interpreted = true; 1350 DCHECK_GE(quick_code, oat_data_begin_); 1351 } else { 1352 CHECK(!method->GetDeclaringClass()->IsInitialized()); 1353 // We have code for a static method, but need to go through the resolution stub for class 1354 // initialization. 1355 quick_code = GetOatAddress(quick_resolution_trampoline_offset_); 1356 DCHECK_GE(quick_code, oat_data_begin_); 1357 } 1358 return quick_code; 1359} 1360 1361const uint8_t* ImageWriter::GetQuickEntryPoint(ArtMethod* method) { 1362 // Calculate the quick entry point following the same logic as FixupMethod() below. 1363 // The resolution method has a special trampoline to call. 1364 Runtime* runtime = Runtime::Current(); 1365 if (UNLIKELY(method == runtime->GetResolutionMethod())) { 1366 return GetOatAddress(quick_resolution_trampoline_offset_); 1367 } else if (UNLIKELY(method == runtime->GetImtConflictMethod() || 1368 method == runtime->GetImtUnimplementedMethod())) { 1369 return GetOatAddress(quick_imt_conflict_trampoline_offset_); 1370 } else { 1371 // We assume all methods have code. If they don't currently then we set them to the use the 1372 // resolution trampoline. Abstract methods never have code and so we need to make sure their 1373 // use results in an AbstractMethodError. We use the interpreter to achieve this. 1374 if (UNLIKELY(method->IsAbstract())) { 1375 return GetOatAddress(quick_to_interpreter_bridge_offset_); 1376 } else { 1377 bool quick_is_interpreted; 1378 return GetQuickCode(method, &quick_is_interpreted); 1379 } 1380 } 1381} 1382 1383void ImageWriter::CopyAndFixupMethod(ArtMethod* orig, ArtMethod* copy) { 1384 memcpy(copy, orig, ArtMethod::ObjectSize(target_ptr_size_)); 1385 1386 copy->SetDeclaringClass(GetImageAddress(orig->GetDeclaringClassUnchecked())); 1387 copy->SetDexCacheResolvedMethods(GetImageAddress(orig->GetDexCacheResolvedMethods())); 1388 copy->SetDexCacheResolvedTypes(GetImageAddress(orig->GetDexCacheResolvedTypes())); 1389 1390 // OatWriter replaces the code_ with an offset value. Here we re-adjust to a pointer relative to 1391 // oat_begin_ 1392 1393 // The resolution method has a special trampoline to call. 1394 Runtime* runtime = Runtime::Current(); 1395 if (UNLIKELY(orig == runtime->GetResolutionMethod())) { 1396 copy->SetEntryPointFromQuickCompiledCodePtrSize( 1397 GetOatAddress(quick_resolution_trampoline_offset_), target_ptr_size_); 1398 } else if (UNLIKELY(orig == runtime->GetImtConflictMethod() || 1399 orig == runtime->GetImtUnimplementedMethod())) { 1400 copy->SetEntryPointFromQuickCompiledCodePtrSize( 1401 GetOatAddress(quick_imt_conflict_trampoline_offset_), target_ptr_size_); 1402 } else if (UNLIKELY(orig->IsRuntimeMethod())) { 1403 bool found_one = false; 1404 for (size_t i = 0; i < static_cast<size_t>(Runtime::kLastCalleeSaveType); ++i) { 1405 auto idx = static_cast<Runtime::CalleeSaveType>(i); 1406 if (runtime->HasCalleeSaveMethod(idx) && runtime->GetCalleeSaveMethod(idx) == orig) { 1407 found_one = true; 1408 break; 1409 } 1410 } 1411 CHECK(found_one) << "Expected to find callee save method but got " << PrettyMethod(orig); 1412 CHECK(copy->IsRuntimeMethod()); 1413 } else { 1414 // We assume all methods have code. If they don't currently then we set them to the use the 1415 // resolution trampoline. Abstract methods never have code and so we need to make sure their 1416 // use results in an AbstractMethodError. We use the interpreter to achieve this. 1417 if (UNLIKELY(orig->IsAbstract())) { 1418 copy->SetEntryPointFromQuickCompiledCodePtrSize( 1419 GetOatAddress(quick_to_interpreter_bridge_offset_), target_ptr_size_); 1420 copy->SetEntryPointFromInterpreterPtrSize( 1421 reinterpret_cast<EntryPointFromInterpreter*>(const_cast<uint8_t*>( 1422 GetOatAddress(interpreter_to_interpreter_bridge_offset_))), target_ptr_size_); 1423 } else { 1424 bool quick_is_interpreted; 1425 const uint8_t* quick_code = GetQuickCode(orig, &quick_is_interpreted); 1426 copy->SetEntryPointFromQuickCompiledCodePtrSize(quick_code, target_ptr_size_); 1427 1428 // JNI entrypoint: 1429 if (orig->IsNative()) { 1430 // The native method's pointer is set to a stub to lookup via dlsym. 1431 // Note this is not the code_ pointer, that is handled above. 1432 copy->SetEntryPointFromJniPtrSize( 1433 GetOatAddress(jni_dlsym_lookup_offset_), target_ptr_size_); 1434 } 1435 1436 // Interpreter entrypoint: 1437 // Set the interpreter entrypoint depending on whether there is compiled code or not. 1438 uint32_t interpreter_code = (quick_is_interpreted) 1439 ? interpreter_to_interpreter_bridge_offset_ 1440 : interpreter_to_compiled_code_bridge_offset_; 1441 EntryPointFromInterpreter* interpreter_entrypoint = 1442 reinterpret_cast<EntryPointFromInterpreter*>( 1443 const_cast<uint8_t*>(GetOatAddress(interpreter_code))); 1444 copy->SetEntryPointFromInterpreterPtrSize(interpreter_entrypoint, target_ptr_size_); 1445 } 1446 } 1447} 1448 1449static OatHeader* GetOatHeaderFromElf(ElfFile* elf) { 1450 uint64_t data_sec_offset; 1451 bool has_data_sec = elf->GetSectionOffsetAndSize(".rodata", &data_sec_offset, nullptr); 1452 if (!has_data_sec) { 1453 return nullptr; 1454 } 1455 return reinterpret_cast<OatHeader*>(elf->Begin() + data_sec_offset); 1456} 1457 1458void ImageWriter::SetOatChecksumFromElfFile(File* elf_file) { 1459 std::string error_msg; 1460 std::unique_ptr<ElfFile> elf(ElfFile::Open(elf_file, PROT_READ|PROT_WRITE, 1461 MAP_SHARED, &error_msg)); 1462 if (elf.get() == nullptr) { 1463 LOG(FATAL) << "Unable open oat file: " << error_msg; 1464 return; 1465 } 1466 OatHeader* oat_header = GetOatHeaderFromElf(elf.get()); 1467 CHECK(oat_header != nullptr); 1468 CHECK(oat_header->IsValid()); 1469 1470 ImageHeader* image_header = reinterpret_cast<ImageHeader*>(image_->Begin()); 1471 image_header->SetOatChecksum(oat_header->GetChecksum()); 1472} 1473 1474size_t ImageWriter::GetBinSizeSum(ImageWriter::Bin up_to) const { 1475 DCHECK_LE(up_to, kBinSize); 1476 return std::accumulate(&bin_slot_sizes_[0], &bin_slot_sizes_[up_to], /*init*/0); 1477} 1478 1479ImageWriter::BinSlot::BinSlot(uint32_t lockword) : lockword_(lockword) { 1480 // These values may need to get updated if more bins are added to the enum Bin 1481 static_assert(kBinBits == 3, "wrong number of bin bits"); 1482 static_assert(kBinShift == 27, "wrong number of shift"); 1483 static_assert(sizeof(BinSlot) == sizeof(LockWord), "BinSlot/LockWord must have equal sizes"); 1484 1485 DCHECK_LT(GetBin(), kBinSize); 1486 DCHECK_ALIGNED(GetIndex(), kObjectAlignment); 1487} 1488 1489ImageWriter::BinSlot::BinSlot(Bin bin, uint32_t index) 1490 : BinSlot(index | (static_cast<uint32_t>(bin) << kBinShift)) { 1491 DCHECK_EQ(index, GetIndex()); 1492} 1493 1494ImageWriter::Bin ImageWriter::BinSlot::GetBin() const { 1495 return static_cast<Bin>((lockword_ & kBinMask) >> kBinShift); 1496} 1497 1498uint32_t ImageWriter::BinSlot::GetIndex() const { 1499 return lockword_ & ~kBinMask; 1500} 1501 1502uint8_t* ImageWriter::GetOatFileBegin() const { 1503 DCHECK_GT(intern_table_bytes_, 0u); 1504 return image_begin_ + RoundUp( 1505 image_end_ + bin_slot_sizes_[kBinArtField] + bin_slot_sizes_[kBinArtMethodDirty] + 1506 bin_slot_sizes_[kBinArtMethodClean] + intern_table_bytes_, kPageSize); 1507} 1508 1509} // namespace art 1510