builder.cc revision fedd91d50930e160c021d65b3740264f6ffec260
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "builder.h" 18 19#include "art_field-inl.h" 20#include "base/arena_bit_vector.h" 21#include "base/bit_vector-inl.h" 22#include "base/logging.h" 23#include "class_linker.h" 24#include "dex/verified_method.h" 25#include "dex_file-inl.h" 26#include "dex_instruction-inl.h" 27#include "dex/verified_method.h" 28#include "driver/compiler_driver-inl.h" 29#include "driver/compiler_options.h" 30#include "mirror/class_loader.h" 31#include "mirror/dex_cache.h" 32#include "nodes.h" 33#include "primitive.h" 34#include "scoped_thread_state_change.h" 35#include "thread.h" 36#include "utils/dex_cache_arrays_layout-inl.h" 37 38namespace art { 39 40/** 41 * Helper class to add HTemporary instructions. This class is used when 42 * converting a DEX instruction to multiple HInstruction, and where those 43 * instructions do not die at the following instruction, but instead spans 44 * multiple instructions. 45 */ 46class Temporaries : public ValueObject { 47 public: 48 explicit Temporaries(HGraph* graph) : graph_(graph), index_(0) {} 49 50 void Add(HInstruction* instruction) { 51 HInstruction* temp = new (graph_->GetArena()) HTemporary(index_, instruction->GetDexPc()); 52 instruction->GetBlock()->AddInstruction(temp); 53 54 DCHECK(temp->GetPrevious() == instruction); 55 56 size_t offset; 57 if (instruction->GetType() == Primitive::kPrimLong 58 || instruction->GetType() == Primitive::kPrimDouble) { 59 offset = 2; 60 } else { 61 offset = 1; 62 } 63 index_ += offset; 64 65 graph_->UpdateTemporariesVRegSlots(index_); 66 } 67 68 private: 69 HGraph* const graph_; 70 71 // Current index in the temporary stack, updated by `Add`. 72 size_t index_; 73}; 74 75class SwitchTable : public ValueObject { 76 public: 77 SwitchTable(const Instruction& instruction, uint32_t dex_pc, bool sparse) 78 : instruction_(instruction), dex_pc_(dex_pc), sparse_(sparse) { 79 int32_t table_offset = instruction.VRegB_31t(); 80 const uint16_t* table = reinterpret_cast<const uint16_t*>(&instruction) + table_offset; 81 if (sparse) { 82 CHECK_EQ(table[0], static_cast<uint16_t>(Instruction::kSparseSwitchSignature)); 83 } else { 84 CHECK_EQ(table[0], static_cast<uint16_t>(Instruction::kPackedSwitchSignature)); 85 } 86 num_entries_ = table[1]; 87 values_ = reinterpret_cast<const int32_t*>(&table[2]); 88 } 89 90 uint16_t GetNumEntries() const { 91 return num_entries_; 92 } 93 94 void CheckIndex(size_t index) const { 95 if (sparse_) { 96 // In a sparse table, we have num_entries_ keys and num_entries_ values, in that order. 97 DCHECK_LT(index, 2 * static_cast<size_t>(num_entries_)); 98 } else { 99 // In a packed table, we have the starting key and num_entries_ values. 100 DCHECK_LT(index, 1 + static_cast<size_t>(num_entries_)); 101 } 102 } 103 104 int32_t GetEntryAt(size_t index) const { 105 CheckIndex(index); 106 return values_[index]; 107 } 108 109 uint32_t GetDexPcForIndex(size_t index) const { 110 CheckIndex(index); 111 return dex_pc_ + 112 (reinterpret_cast<const int16_t*>(values_ + index) - 113 reinterpret_cast<const int16_t*>(&instruction_)); 114 } 115 116 // Index of the first value in the table. 117 size_t GetFirstValueIndex() const { 118 if (sparse_) { 119 // In a sparse table, we have num_entries_ keys and num_entries_ values, in that order. 120 return num_entries_; 121 } else { 122 // In a packed table, we have the starting key and num_entries_ values. 123 return 1; 124 } 125 } 126 127 private: 128 const Instruction& instruction_; 129 const uint32_t dex_pc_; 130 131 // Whether this is a sparse-switch table (or a packed-switch one). 132 const bool sparse_; 133 134 // This can't be const as it needs to be computed off of the given instruction, and complicated 135 // expressions in the initializer list seemed very ugly. 136 uint16_t num_entries_; 137 138 const int32_t* values_; 139 140 DISALLOW_COPY_AND_ASSIGN(SwitchTable); 141}; 142 143void HGraphBuilder::InitializeLocals(uint16_t count) { 144 graph_->SetNumberOfVRegs(count); 145 locals_.resize(count); 146 for (int i = 0; i < count; i++) { 147 HLocal* local = new (arena_) HLocal(i); 148 entry_block_->AddInstruction(local); 149 locals_[i] = local; 150 } 151} 152 153void HGraphBuilder::InitializeParameters(uint16_t number_of_parameters) { 154 // dex_compilation_unit_ is null only when unit testing. 155 if (dex_compilation_unit_ == nullptr) { 156 return; 157 } 158 159 graph_->SetNumberOfInVRegs(number_of_parameters); 160 const char* shorty = dex_compilation_unit_->GetShorty(); 161 int locals_index = locals_.size() - number_of_parameters; 162 int parameter_index = 0; 163 164 const DexFile::MethodId& referrer_method_id = 165 dex_file_->GetMethodId(dex_compilation_unit_->GetDexMethodIndex()); 166 if (!dex_compilation_unit_->IsStatic()) { 167 // Add the implicit 'this' argument, not expressed in the signature. 168 HParameterValue* parameter = new (arena_) HParameterValue(*dex_file_, 169 referrer_method_id.class_idx_, 170 parameter_index++, 171 Primitive::kPrimNot, 172 true); 173 entry_block_->AddInstruction(parameter); 174 HLocal* local = GetLocalAt(locals_index++); 175 entry_block_->AddInstruction(new (arena_) HStoreLocal(local, parameter, local->GetDexPc())); 176 number_of_parameters--; 177 } 178 179 const DexFile::ProtoId& proto = dex_file_->GetMethodPrototype(referrer_method_id); 180 const DexFile::TypeList* arg_types = dex_file_->GetProtoParameters(proto); 181 for (int i = 0, shorty_pos = 1; i < number_of_parameters; i++) { 182 HParameterValue* parameter = new (arena_) HParameterValue( 183 *dex_file_, 184 arg_types->GetTypeItem(shorty_pos - 1).type_idx_, 185 parameter_index++, 186 Primitive::GetType(shorty[shorty_pos]), 187 false); 188 ++shorty_pos; 189 entry_block_->AddInstruction(parameter); 190 HLocal* local = GetLocalAt(locals_index++); 191 // Store the parameter value in the local that the dex code will use 192 // to reference that parameter. 193 entry_block_->AddInstruction(new (arena_) HStoreLocal(local, parameter, local->GetDexPc())); 194 bool is_wide = (parameter->GetType() == Primitive::kPrimLong) 195 || (parameter->GetType() == Primitive::kPrimDouble); 196 if (is_wide) { 197 i++; 198 locals_index++; 199 parameter_index++; 200 } 201 } 202} 203 204template<typename T> 205void HGraphBuilder::If_22t(const Instruction& instruction, uint32_t dex_pc) { 206 int32_t target_offset = instruction.GetTargetOffset(); 207 HBasicBlock* branch_target = FindBlockStartingAt(dex_pc + target_offset); 208 HBasicBlock* fallthrough_target = FindBlockStartingAt(dex_pc + instruction.SizeInCodeUnits()); 209 DCHECK(branch_target != nullptr); 210 DCHECK(fallthrough_target != nullptr); 211 PotentiallyAddSuspendCheck(branch_target, dex_pc); 212 HInstruction* first = LoadLocal(instruction.VRegA(), Primitive::kPrimInt, dex_pc); 213 HInstruction* second = LoadLocal(instruction.VRegB(), Primitive::kPrimInt, dex_pc); 214 T* comparison = new (arena_) T(first, second, dex_pc); 215 current_block_->AddInstruction(comparison); 216 HInstruction* ifinst = new (arena_) HIf(comparison, dex_pc); 217 current_block_->AddInstruction(ifinst); 218 current_block_->AddSuccessor(branch_target); 219 current_block_->AddSuccessor(fallthrough_target); 220 current_block_ = nullptr; 221} 222 223template<typename T> 224void HGraphBuilder::If_21t(const Instruction& instruction, uint32_t dex_pc) { 225 int32_t target_offset = instruction.GetTargetOffset(); 226 HBasicBlock* branch_target = FindBlockStartingAt(dex_pc + target_offset); 227 HBasicBlock* fallthrough_target = FindBlockStartingAt(dex_pc + instruction.SizeInCodeUnits()); 228 DCHECK(branch_target != nullptr); 229 DCHECK(fallthrough_target != nullptr); 230 PotentiallyAddSuspendCheck(branch_target, dex_pc); 231 HInstruction* value = LoadLocal(instruction.VRegA(), Primitive::kPrimInt, dex_pc); 232 T* comparison = new (arena_) T(value, graph_->GetIntConstant(0, dex_pc), dex_pc); 233 current_block_->AddInstruction(comparison); 234 HInstruction* ifinst = new (arena_) HIf(comparison, dex_pc); 235 current_block_->AddInstruction(ifinst); 236 current_block_->AddSuccessor(branch_target); 237 current_block_->AddSuccessor(fallthrough_target); 238 current_block_ = nullptr; 239} 240 241void HGraphBuilder::MaybeRecordStat(MethodCompilationStat compilation_stat) { 242 if (compilation_stats_ != nullptr) { 243 compilation_stats_->RecordStat(compilation_stat); 244 } 245} 246 247bool HGraphBuilder::SkipCompilation(const DexFile::CodeItem& code_item, 248 size_t number_of_branches) { 249 const CompilerOptions& compiler_options = compiler_driver_->GetCompilerOptions(); 250 CompilerOptions::CompilerFilter compiler_filter = compiler_options.GetCompilerFilter(); 251 if (compiler_filter == CompilerOptions::kEverything) { 252 return false; 253 } 254 255 if (compiler_options.IsHugeMethod(code_item.insns_size_in_code_units_)) { 256 VLOG(compiler) << "Skip compilation of huge method " 257 << PrettyMethod(dex_compilation_unit_->GetDexMethodIndex(), *dex_file_) 258 << ": " << code_item.insns_size_in_code_units_ << " code units"; 259 MaybeRecordStat(MethodCompilationStat::kNotCompiledHugeMethod); 260 return true; 261 } 262 263 // If it's large and contains no branches, it's likely to be machine generated initialization. 264 if (compiler_options.IsLargeMethod(code_item.insns_size_in_code_units_) 265 && (number_of_branches == 0)) { 266 VLOG(compiler) << "Skip compilation of large method with no branch " 267 << PrettyMethod(dex_compilation_unit_->GetDexMethodIndex(), *dex_file_) 268 << ": " << code_item.insns_size_in_code_units_ << " code units"; 269 MaybeRecordStat(MethodCompilationStat::kNotCompiledLargeMethodNoBranches); 270 return true; 271 } 272 273 return false; 274} 275 276void HGraphBuilder::CreateBlocksForTryCatch(const DexFile::CodeItem& code_item) { 277 if (code_item.tries_size_ == 0) { 278 return; 279 } 280 281 // Create branch targets at the start/end of the TryItem range. These are 282 // places where the program might fall through into/out of the a block and 283 // where TryBoundary instructions will be inserted later. Other edges which 284 // enter/exit the try blocks are a result of branches/switches. 285 for (size_t idx = 0; idx < code_item.tries_size_; ++idx) { 286 const DexFile::TryItem* try_item = DexFile::GetTryItems(code_item, idx); 287 uint32_t dex_pc_start = try_item->start_addr_; 288 uint32_t dex_pc_end = dex_pc_start + try_item->insn_count_; 289 FindOrCreateBlockStartingAt(dex_pc_start); 290 if (dex_pc_end < code_item.insns_size_in_code_units_) { 291 // TODO: Do not create block if the last instruction cannot fall through. 292 FindOrCreateBlockStartingAt(dex_pc_end); 293 } else { 294 // The TryItem spans until the very end of the CodeItem (or beyond if 295 // invalid) and therefore cannot have any code afterwards. 296 } 297 } 298 299 // Create branch targets for exception handlers. 300 const uint8_t* handlers_ptr = DexFile::GetCatchHandlerData(code_item, 0); 301 uint32_t handlers_size = DecodeUnsignedLeb128(&handlers_ptr); 302 for (uint32_t idx = 0; idx < handlers_size; ++idx) { 303 CatchHandlerIterator iterator(handlers_ptr); 304 for (; iterator.HasNext(); iterator.Next()) { 305 uint32_t address = iterator.GetHandlerAddress(); 306 HBasicBlock* block = FindOrCreateBlockStartingAt(address); 307 block->SetTryCatchInformation( 308 new (arena_) TryCatchInformation(iterator.GetHandlerTypeIndex(), *dex_file_)); 309 } 310 handlers_ptr = iterator.EndDataPointer(); 311 } 312} 313 314// Returns the TryItem stored for `block` or nullptr if there is no info for it. 315static const DexFile::TryItem* GetTryItem( 316 HBasicBlock* block, 317 const ArenaSafeMap<uint32_t, const DexFile::TryItem*>& try_block_info) { 318 auto iterator = try_block_info.find(block->GetBlockId()); 319 return (iterator == try_block_info.end()) ? nullptr : iterator->second; 320} 321 322void HGraphBuilder::LinkToCatchBlocks(HTryBoundary* try_boundary, 323 const DexFile::CodeItem& code_item, 324 const DexFile::TryItem* try_item) { 325 for (CatchHandlerIterator it(code_item, *try_item); it.HasNext(); it.Next()) { 326 try_boundary->AddExceptionHandler(FindBlockStartingAt(it.GetHandlerAddress())); 327 } 328} 329 330void HGraphBuilder::InsertTryBoundaryBlocks(const DexFile::CodeItem& code_item) { 331 if (code_item.tries_size_ == 0) { 332 return; 333 } 334 335 // Keep a map of all try blocks and their respective TryItems. We do not use 336 // the block's pointer but rather its id to ensure deterministic iteration. 337 ArenaSafeMap<uint32_t, const DexFile::TryItem*> try_block_info( 338 std::less<uint32_t>(), arena_->Adapter(kArenaAllocGraphBuilder)); 339 340 // Obtain TryItem information for blocks with throwing instructions, and split 341 // blocks which are both try & catch to simplify the graph. 342 // NOTE: We are appending new blocks inside the loop, so we need to use index 343 // because iterators can be invalidated. We remember the initial size to avoid 344 // iterating over the new blocks which cannot throw. 345 for (size_t i = 0, e = graph_->GetBlocks().size(); i < e; ++i) { 346 HBasicBlock* block = graph_->GetBlocks()[i]; 347 348 // Do not bother creating exceptional edges for try blocks which have no 349 // throwing instructions. In that case we simply assume that the block is 350 // not covered by a TryItem. This prevents us from creating a throw-catch 351 // loop for synchronized blocks. 352 if (block->HasThrowingInstructions()) { 353 // Try to find a TryItem covering the block. 354 DCHECK_NE(block->GetDexPc(), kNoDexPc) << "Block must have a dec_pc to find its TryItem."; 355 const int32_t try_item_idx = DexFile::FindTryItem(code_item, block->GetDexPc()); 356 if (try_item_idx != -1) { 357 // Block throwing and in a TryItem. Store the try block information. 358 HBasicBlock* throwing_block = block; 359 if (block->IsCatchBlock()) { 360 // Simplify blocks which are both try and catch, otherwise we would 361 // need a strategy for splitting exceptional edges. We split the block 362 // after the move-exception (if present) and mark the first part not 363 // throwing. The normal-flow edge between them will be split later. 364 throwing_block = block->SplitCatchBlockAfterMoveException(); 365 // Move-exception does not throw and the block has throwing insructions 366 // so it must have been possible to split it. 367 DCHECK(throwing_block != nullptr); 368 } 369 370 try_block_info.Put(throwing_block->GetBlockId(), 371 DexFile::GetTryItems(code_item, try_item_idx)); 372 } 373 } 374 } 375 376 // Do a pass over the try blocks and insert entering TryBoundaries where at 377 // least one predecessor is not covered by the same TryItem as the try block. 378 // We do not split each edge separately, but rather create one boundary block 379 // that all predecessors are relinked to. This preserves loop headers (b/23895756). 380 for (auto entry : try_block_info) { 381 HBasicBlock* try_block = graph_->GetBlocks()[entry.first]; 382 for (HBasicBlock* predecessor : try_block->GetPredecessors()) { 383 if (GetTryItem(predecessor, try_block_info) != entry.second) { 384 // Found a predecessor not covered by the same TryItem. Insert entering 385 // boundary block. 386 HTryBoundary* try_entry = 387 new (arena_) HTryBoundary(HTryBoundary::kEntry, try_block->GetDexPc()); 388 try_block->CreateImmediateDominator()->AddInstruction(try_entry); 389 LinkToCatchBlocks(try_entry, code_item, entry.second); 390 break; 391 } 392 } 393 } 394 395 // Do a second pass over the try blocks and insert exit TryBoundaries where 396 // the successor is not in the same TryItem. 397 for (auto entry : try_block_info) { 398 HBasicBlock* try_block = graph_->GetBlocks()[entry.first]; 399 // NOTE: Do not use iterators because SplitEdge would invalidate them. 400 for (size_t i = 0, e = try_block->GetSuccessors().size(); i < e; ++i) { 401 HBasicBlock* successor = try_block->GetSuccessors()[i]; 402 403 // If the successor is a try block, all of its predecessors must be 404 // covered by the same TryItem. Otherwise the previous pass would have 405 // created a non-throwing boundary block. 406 if (GetTryItem(successor, try_block_info) != nullptr) { 407 DCHECK_EQ(entry.second, GetTryItem(successor, try_block_info)); 408 continue; 409 } 410 411 // Preserve the invariant that Return(Void) always jumps to Exit by moving 412 // it outside the try block if necessary. 413 HInstruction* last_instruction = try_block->GetLastInstruction(); 414 if (last_instruction->IsReturn() || last_instruction->IsReturnVoid()) { 415 DCHECK_EQ(successor, exit_block_); 416 successor = try_block->SplitBefore(last_instruction); 417 } 418 419 // Insert TryBoundary and link to catch blocks. 420 HTryBoundary* try_exit = 421 new (arena_) HTryBoundary(HTryBoundary::kExit, successor->GetDexPc()); 422 graph_->SplitEdge(try_block, successor)->AddInstruction(try_exit); 423 LinkToCatchBlocks(try_exit, code_item, entry.second); 424 } 425 } 426} 427 428bool HGraphBuilder::BuildGraph(const DexFile::CodeItem& code_item) { 429 DCHECK(graph_->GetBlocks().empty()); 430 431 const uint16_t* code_ptr = code_item.insns_; 432 const uint16_t* code_end = code_item.insns_ + code_item.insns_size_in_code_units_; 433 code_start_ = code_ptr; 434 435 // Setup the graph with the entry block and exit block. 436 entry_block_ = new (arena_) HBasicBlock(graph_, 0); 437 graph_->AddBlock(entry_block_); 438 exit_block_ = new (arena_) HBasicBlock(graph_, kNoDexPc); 439 graph_->SetEntryBlock(entry_block_); 440 graph_->SetExitBlock(exit_block_); 441 442 graph_->SetHasTryCatch(code_item.tries_size_ != 0); 443 444 InitializeLocals(code_item.registers_size_); 445 graph_->SetMaximumNumberOfOutVRegs(code_item.outs_size_); 446 447 // Compute the number of dex instructions, blocks, and branches. We will 448 // check these values against limits given to the compiler. 449 size_t number_of_branches = 0; 450 451 // To avoid splitting blocks, we compute ahead of time the instructions that 452 // start a new block, and create these blocks. 453 if (!ComputeBranchTargets(code_ptr, code_end, &number_of_branches)) { 454 MaybeRecordStat(MethodCompilationStat::kNotCompiledBranchOutsideMethodCode); 455 return false; 456 } 457 458 // Note that the compiler driver is null when unit testing. 459 if ((compiler_driver_ != nullptr) && SkipCompilation(code_item, number_of_branches)) { 460 return false; 461 } 462 463 // Find locations where we want to generate extra stackmaps for native debugging. 464 // This allows us to generate the info only at interesting points (for example, 465 // at start of java statement) rather than before every dex instruction. 466 const bool native_debuggable = compiler_driver_ != nullptr && 467 compiler_driver_->GetCompilerOptions().GetNativeDebuggable(); 468 ArenaBitVector* native_debug_info_locations; 469 if (native_debuggable) { 470 const uint32_t num_instructions = code_item.insns_size_in_code_units_; 471 native_debug_info_locations = new (arena_) ArenaBitVector (arena_, num_instructions, false); 472 native_debug_info_locations->ClearAllBits(); 473 FindNativeDebugInfoLocations(code_item, native_debug_info_locations); 474 } 475 476 CreateBlocksForTryCatch(code_item); 477 478 InitializeParameters(code_item.ins_size_); 479 480 size_t dex_pc = 0; 481 while (code_ptr < code_end) { 482 // Update the current block if dex_pc starts a new block. 483 MaybeUpdateCurrentBlock(dex_pc); 484 const Instruction& instruction = *Instruction::At(code_ptr); 485 if (native_debuggable && native_debug_info_locations->IsBitSet(dex_pc)) { 486 if (current_block_ != nullptr) { 487 current_block_->AddInstruction(new (arena_) HNativeDebugInfo(dex_pc)); 488 } 489 } 490 if (!AnalyzeDexInstruction(instruction, dex_pc)) { 491 return false; 492 } 493 dex_pc += instruction.SizeInCodeUnits(); 494 code_ptr += instruction.SizeInCodeUnits(); 495 } 496 497 // Add Exit to the exit block. 498 exit_block_->AddInstruction(new (arena_) HExit()); 499 // Add the suspend check to the entry block. 500 entry_block_->AddInstruction(new (arena_) HSuspendCheck(0)); 501 entry_block_->AddInstruction(new (arena_) HGoto()); 502 // Add the exit block at the end. 503 graph_->AddBlock(exit_block_); 504 505 // Iterate over blocks covered by TryItems and insert TryBoundaries at entry 506 // and exit points. This requires all control-flow instructions and 507 // non-exceptional edges to have been created. 508 InsertTryBoundaryBlocks(code_item); 509 510 return true; 511} 512 513void HGraphBuilder::MaybeUpdateCurrentBlock(size_t dex_pc) { 514 HBasicBlock* block = FindBlockStartingAt(dex_pc); 515 if (block == nullptr) { 516 return; 517 } 518 519 if (current_block_ != nullptr) { 520 // Branching instructions clear current_block, so we know 521 // the last instruction of the current block is not a branching 522 // instruction. We add an unconditional goto to the found block. 523 current_block_->AddInstruction(new (arena_) HGoto(dex_pc)); 524 current_block_->AddSuccessor(block); 525 } 526 graph_->AddBlock(block); 527 current_block_ = block; 528} 529 530void HGraphBuilder::FindNativeDebugInfoLocations(const DexFile::CodeItem& code_item, 531 ArenaBitVector* locations) { 532 // The callback gets called when the line number changes. 533 // In other words, it marks the start of new java statement. 534 struct Callback { 535 static bool Position(void* ctx, const DexFile::PositionInfo& entry) { 536 static_cast<ArenaBitVector*>(ctx)->SetBit(entry.address_); 537 return false; 538 } 539 }; 540 dex_file_->DecodeDebugPositionInfo(&code_item, Callback::Position, locations); 541 // Add native debug info at the start of every basic block. 542 for (uint32_t pc = 0; pc < code_item.insns_size_in_code_units_; pc++) { 543 if (FindBlockStartingAt(pc) != nullptr) { 544 locations->SetBit(pc); 545 } 546 } 547 // Instruction-specific tweaks. 548 const Instruction* const begin = Instruction::At(code_item.insns_); 549 const Instruction* const end = begin->RelativeAt(code_item.insns_size_in_code_units_); 550 for (const Instruction* inst = begin; inst < end; inst = inst->Next()) { 551 switch (inst->Opcode()) { 552 case Instruction::MOVE_EXCEPTION: 553 case Instruction::MOVE_RESULT: 554 case Instruction::MOVE_RESULT_WIDE: 555 case Instruction::MOVE_RESULT_OBJECT: { 556 // The compiler checks that there are no instructions before those. 557 // So generate HNativeDebugInfo after them instead. 558 locations->ClearBit(inst->GetDexPc(code_item.insns_)); 559 const Instruction* next = inst->Next(); 560 if (next < end) { 561 locations->SetBit(next->GetDexPc(code_item.insns_)); 562 } 563 break; 564 } 565 default: 566 break; 567 } 568 } 569} 570 571bool HGraphBuilder::ComputeBranchTargets(const uint16_t* code_ptr, 572 const uint16_t* code_end, 573 size_t* number_of_branches) { 574 branch_targets_.resize(code_end - code_ptr, nullptr); 575 576 // Create the first block for the dex instructions, single successor of the entry block. 577 HBasicBlock* block = new (arena_) HBasicBlock(graph_, 0); 578 branch_targets_[0] = block; 579 entry_block_->AddSuccessor(block); 580 581 // Iterate over all instructions and find branching instructions. Create blocks for 582 // the locations these instructions branch to. 583 uint32_t dex_pc = 0; 584 while (code_ptr < code_end) { 585 const Instruction& instruction = *Instruction::At(code_ptr); 586 if (instruction.IsBranch()) { 587 (*number_of_branches)++; 588 int32_t target = instruction.GetTargetOffset() + dex_pc; 589 // Create a block for the target instruction. 590 FindOrCreateBlockStartingAt(target); 591 592 dex_pc += instruction.SizeInCodeUnits(); 593 code_ptr += instruction.SizeInCodeUnits(); 594 595 if (instruction.CanFlowThrough()) { 596 if (code_ptr >= code_end) { 597 // In the normal case we should never hit this but someone can artificially forge a dex 598 // file to fall-through out the method code. In this case we bail out compilation. 599 return false; 600 } else { 601 FindOrCreateBlockStartingAt(dex_pc); 602 } 603 } 604 } else if (instruction.IsSwitch()) { 605 SwitchTable table(instruction, dex_pc, instruction.Opcode() == Instruction::SPARSE_SWITCH); 606 607 uint16_t num_entries = table.GetNumEntries(); 608 609 // In a packed-switch, the entry at index 0 is the starting key. In a sparse-switch, the 610 // entry at index 0 is the first key, and values are after *all* keys. 611 size_t offset = table.GetFirstValueIndex(); 612 613 // Use a larger loop counter type to avoid overflow issues. 614 for (size_t i = 0; i < num_entries; ++i) { 615 // The target of the case. 616 uint32_t target = dex_pc + table.GetEntryAt(i + offset); 617 FindOrCreateBlockStartingAt(target); 618 619 // Create a block for the switch-case logic. The block gets the dex_pc 620 // of the SWITCH instruction because it is part of its semantics. 621 block = new (arena_) HBasicBlock(graph_, dex_pc); 622 branch_targets_[table.GetDexPcForIndex(i)] = block; 623 } 624 625 // Fall-through. Add a block if there is more code afterwards. 626 dex_pc += instruction.SizeInCodeUnits(); 627 code_ptr += instruction.SizeInCodeUnits(); 628 if (code_ptr >= code_end) { 629 // In the normal case we should never hit this but someone can artificially forge a dex 630 // file to fall-through out the method code. In this case we bail out compilation. 631 // (A switch can fall-through so we don't need to check CanFlowThrough().) 632 return false; 633 } else { 634 FindOrCreateBlockStartingAt(dex_pc); 635 } 636 } else { 637 code_ptr += instruction.SizeInCodeUnits(); 638 dex_pc += instruction.SizeInCodeUnits(); 639 } 640 } 641 return true; 642} 643 644HBasicBlock* HGraphBuilder::FindBlockStartingAt(int32_t dex_pc) const { 645 DCHECK_GE(dex_pc, 0); 646 return branch_targets_[dex_pc]; 647} 648 649HBasicBlock* HGraphBuilder::FindOrCreateBlockStartingAt(int32_t dex_pc) { 650 HBasicBlock* block = FindBlockStartingAt(dex_pc); 651 if (block == nullptr) { 652 block = new (arena_) HBasicBlock(graph_, dex_pc); 653 branch_targets_[dex_pc] = block; 654 } 655 return block; 656} 657 658template<typename T> 659void HGraphBuilder::Unop_12x(const Instruction& instruction, 660 Primitive::Type type, 661 uint32_t dex_pc) { 662 HInstruction* first = LoadLocal(instruction.VRegB(), type, dex_pc); 663 current_block_->AddInstruction(new (arena_) T(type, first, dex_pc)); 664 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction(), dex_pc); 665} 666 667void HGraphBuilder::Conversion_12x(const Instruction& instruction, 668 Primitive::Type input_type, 669 Primitive::Type result_type, 670 uint32_t dex_pc) { 671 HInstruction* first = LoadLocal(instruction.VRegB(), input_type, dex_pc); 672 current_block_->AddInstruction(new (arena_) HTypeConversion(result_type, first, dex_pc)); 673 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction(), dex_pc); 674} 675 676template<typename T> 677void HGraphBuilder::Binop_23x(const Instruction& instruction, 678 Primitive::Type type, 679 uint32_t dex_pc) { 680 HInstruction* first = LoadLocal(instruction.VRegB(), type, dex_pc); 681 HInstruction* second = LoadLocal(instruction.VRegC(), type, dex_pc); 682 current_block_->AddInstruction(new (arena_) T(type, first, second, dex_pc)); 683 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction(), dex_pc); 684} 685 686template<typename T> 687void HGraphBuilder::Binop_23x_shift(const Instruction& instruction, 688 Primitive::Type type, 689 uint32_t dex_pc) { 690 HInstruction* first = LoadLocal(instruction.VRegB(), type, dex_pc); 691 HInstruction* second = LoadLocal(instruction.VRegC(), Primitive::kPrimInt, dex_pc); 692 current_block_->AddInstruction(new (arena_) T(type, first, second, dex_pc)); 693 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction(), dex_pc); 694} 695 696void HGraphBuilder::Binop_23x_cmp(const Instruction& instruction, 697 Primitive::Type type, 698 ComparisonBias bias, 699 uint32_t dex_pc) { 700 HInstruction* first = LoadLocal(instruction.VRegB(), type, dex_pc); 701 HInstruction* second = LoadLocal(instruction.VRegC(), type, dex_pc); 702 current_block_->AddInstruction(new (arena_) HCompare(type, first, second, bias, dex_pc)); 703 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction(), dex_pc); 704} 705 706template<typename T> 707void HGraphBuilder::Binop_12x_shift(const Instruction& instruction, Primitive::Type type, 708 uint32_t dex_pc) { 709 HInstruction* first = LoadLocal(instruction.VRegA(), type, dex_pc); 710 HInstruction* second = LoadLocal(instruction.VRegB(), Primitive::kPrimInt, dex_pc); 711 current_block_->AddInstruction(new (arena_) T(type, first, second, dex_pc)); 712 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction(), dex_pc); 713} 714 715template<typename T> 716void HGraphBuilder::Binop_12x(const Instruction& instruction, 717 Primitive::Type type, 718 uint32_t dex_pc) { 719 HInstruction* first = LoadLocal(instruction.VRegA(), type, dex_pc); 720 HInstruction* second = LoadLocal(instruction.VRegB(), type, dex_pc); 721 current_block_->AddInstruction(new (arena_) T(type, first, second, dex_pc)); 722 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction(), dex_pc); 723} 724 725template<typename T> 726void HGraphBuilder::Binop_22s(const Instruction& instruction, bool reverse, uint32_t dex_pc) { 727 HInstruction* first = LoadLocal(instruction.VRegB(), Primitive::kPrimInt, dex_pc); 728 HInstruction* second = graph_->GetIntConstant(instruction.VRegC_22s(), dex_pc); 729 if (reverse) { 730 std::swap(first, second); 731 } 732 current_block_->AddInstruction(new (arena_) T(Primitive::kPrimInt, first, second, dex_pc)); 733 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction(), dex_pc); 734} 735 736template<typename T> 737void HGraphBuilder::Binop_22b(const Instruction& instruction, bool reverse, uint32_t dex_pc) { 738 HInstruction* first = LoadLocal(instruction.VRegB(), Primitive::kPrimInt, dex_pc); 739 HInstruction* second = graph_->GetIntConstant(instruction.VRegC_22b(), dex_pc); 740 if (reverse) { 741 std::swap(first, second); 742 } 743 current_block_->AddInstruction(new (arena_) T(Primitive::kPrimInt, first, second, dex_pc)); 744 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction(), dex_pc); 745} 746 747static bool RequiresConstructorBarrier(const DexCompilationUnit* cu, const CompilerDriver& driver) { 748 Thread* self = Thread::Current(); 749 return cu->IsConstructor() 750 && driver.RequiresConstructorBarrier(self, cu->GetDexFile(), cu->GetClassDefIndex()); 751} 752 753void HGraphBuilder::BuildReturn(const Instruction& instruction, 754 Primitive::Type type, 755 uint32_t dex_pc) { 756 if (type == Primitive::kPrimVoid) { 757 if (graph_->ShouldGenerateConstructorBarrier()) { 758 // The compilation unit is null during testing. 759 if (dex_compilation_unit_ != nullptr) { 760 DCHECK(RequiresConstructorBarrier(dex_compilation_unit_, *compiler_driver_)) 761 << "Inconsistent use of ShouldGenerateConstructorBarrier. Should not generate a barrier."; 762 } 763 current_block_->AddInstruction(new (arena_) HMemoryBarrier(kStoreStore, dex_pc)); 764 } 765 current_block_->AddInstruction(new (arena_) HReturnVoid(dex_pc)); 766 } else { 767 HInstruction* value = LoadLocal(instruction.VRegA(), type, dex_pc); 768 current_block_->AddInstruction(new (arena_) HReturn(value, dex_pc)); 769 } 770 current_block_->AddSuccessor(exit_block_); 771 current_block_ = nullptr; 772} 773 774static InvokeType GetInvokeTypeFromOpCode(Instruction::Code opcode) { 775 switch (opcode) { 776 case Instruction::INVOKE_STATIC: 777 case Instruction::INVOKE_STATIC_RANGE: 778 return kStatic; 779 case Instruction::INVOKE_DIRECT: 780 case Instruction::INVOKE_DIRECT_RANGE: 781 return kDirect; 782 case Instruction::INVOKE_VIRTUAL: 783 case Instruction::INVOKE_VIRTUAL_QUICK: 784 case Instruction::INVOKE_VIRTUAL_RANGE: 785 case Instruction::INVOKE_VIRTUAL_RANGE_QUICK: 786 return kVirtual; 787 case Instruction::INVOKE_INTERFACE: 788 case Instruction::INVOKE_INTERFACE_RANGE: 789 return kInterface; 790 case Instruction::INVOKE_SUPER_RANGE: 791 case Instruction::INVOKE_SUPER: 792 return kSuper; 793 default: 794 LOG(FATAL) << "Unexpected invoke opcode: " << opcode; 795 UNREACHABLE(); 796 } 797} 798 799ArtMethod* HGraphBuilder::ResolveMethod(uint16_t method_idx, InvokeType invoke_type) { 800 ScopedObjectAccess soa(Thread::Current()); 801 StackHandleScope<3> hs(soa.Self()); 802 803 ClassLinker* class_linker = dex_compilation_unit_->GetClassLinker(); 804 Handle<mirror::ClassLoader> class_loader(hs.NewHandle( 805 soa.Decode<mirror::ClassLoader*>(dex_compilation_unit_->GetClassLoader()))); 806 Handle<mirror::Class> compiling_class(hs.NewHandle(GetCompilingClass())); 807 808 ArtMethod* resolved_method = class_linker->ResolveMethod<ClassLinker::kForceICCECheck>( 809 *dex_compilation_unit_->GetDexFile(), 810 method_idx, 811 dex_compilation_unit_->GetDexCache(), 812 class_loader, 813 /* referrer */ nullptr, 814 invoke_type); 815 816 if (UNLIKELY(resolved_method == nullptr)) { 817 // Clean up any exception left by type resolution. 818 soa.Self()->ClearException(); 819 return nullptr; 820 } 821 822 // Check access. The class linker has a fast path for looking into the dex cache 823 // and does not check the access if it hits it. 824 if (compiling_class.Get() == nullptr) { 825 if (!resolved_method->IsPublic()) { 826 return nullptr; 827 } 828 } else if (!compiling_class->CanAccessResolvedMethod(resolved_method->GetDeclaringClass(), 829 resolved_method, 830 dex_compilation_unit_->GetDexCache().Get(), 831 method_idx)) { 832 return nullptr; 833 } 834 835 // We have to special case the invoke-super case, as ClassLinker::ResolveMethod does not. 836 // We need to look at the referrer's super class vtable. We need to do this to know if we need to 837 // make this an invoke-unresolved to handle cross-dex invokes or abstract super methods, both of 838 // which require runtime handling. 839 if (invoke_type == kSuper) { 840 if (compiling_class.Get() == nullptr) { 841 // We could not determine the method's class we need to wait until runtime. 842 DCHECK(Runtime::Current()->IsAotCompiler()); 843 return nullptr; 844 } 845 ArtMethod* current_method = graph_->GetArtMethod(); 846 DCHECK(current_method != nullptr); 847 Handle<mirror::Class> methods_class(hs.NewHandle( 848 dex_compilation_unit_->GetClassLinker()->ResolveReferencedClassOfMethod(Thread::Current(), 849 method_idx, 850 current_method))); 851 if (methods_class.Get() == nullptr) { 852 // Invoking a super method requires knowing the actual super class. If we did not resolve 853 // the compiling method's declaring class (which only happens for ahead of time 854 // compilation), bail out. 855 DCHECK(Runtime::Current()->IsAotCompiler()); 856 return nullptr; 857 } else { 858 ArtMethod* actual_method; 859 if (methods_class->IsInterface()) { 860 actual_method = methods_class->FindVirtualMethodForInterfaceSuper( 861 resolved_method, class_linker->GetImagePointerSize()); 862 } else { 863 uint16_t vtable_index = resolved_method->GetMethodIndex(); 864 actual_method = compiling_class->GetSuperClass()->GetVTableEntry( 865 vtable_index, class_linker->GetImagePointerSize()); 866 } 867 if (actual_method != resolved_method && 868 !IsSameDexFile(*actual_method->GetDexFile(), *dex_compilation_unit_->GetDexFile())) { 869 // The back-end code generator relies on this check in order to ensure that it will not 870 // attempt to read the dex_cache with a dex_method_index that is not from the correct 871 // dex_file. If we didn't do this check then the dex_method_index will not be updated in the 872 // builder, which means that the code-generator (and compiler driver during sharpening and 873 // inliner, maybe) might invoke an incorrect method. 874 // TODO: The actual method could still be referenced in the current dex file, so we 875 // could try locating it. 876 // TODO: Remove the dex_file restriction. 877 return nullptr; 878 } 879 if (!actual_method->IsInvokable()) { 880 // Fail if the actual method cannot be invoked. Otherwise, the runtime resolution stub 881 // could resolve the callee to the wrong method. 882 return nullptr; 883 } 884 resolved_method = actual_method; 885 } 886 } 887 888 // Check for incompatible class changes. The class linker has a fast path for 889 // looking into the dex cache and does not check incompatible class changes if it hits it. 890 if (resolved_method->CheckIncompatibleClassChange(invoke_type)) { 891 return nullptr; 892 } 893 894 return resolved_method; 895} 896 897bool HGraphBuilder::BuildInvoke(const Instruction& instruction, 898 uint32_t dex_pc, 899 uint32_t method_idx, 900 uint32_t number_of_vreg_arguments, 901 bool is_range, 902 uint32_t* args, 903 uint32_t register_index) { 904 InvokeType invoke_type = GetInvokeTypeFromOpCode(instruction.Opcode()); 905 const char* descriptor = dex_file_->GetMethodShorty(method_idx); 906 Primitive::Type return_type = Primitive::GetType(descriptor[0]); 907 908 // Remove the return type from the 'proto'. 909 size_t number_of_arguments = strlen(descriptor) - 1; 910 if (invoke_type != kStatic) { // instance call 911 // One extra argument for 'this'. 912 number_of_arguments++; 913 } 914 915 MethodReference target_method(dex_file_, method_idx); 916 917 // Special handling for string init. 918 int32_t string_init_offset = 0; 919 bool is_string_init = compiler_driver_->IsStringInit(method_idx, 920 dex_file_, 921 &string_init_offset); 922 // Replace calls to String.<init> with StringFactory. 923 if (is_string_init) { 924 HInvokeStaticOrDirect::DispatchInfo dispatch_info = { 925 HInvokeStaticOrDirect::MethodLoadKind::kStringInit, 926 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod, 927 dchecked_integral_cast<uint64_t>(string_init_offset), 928 0U 929 }; 930 HInvoke* invoke = new (arena_) HInvokeStaticOrDirect( 931 arena_, 932 number_of_arguments - 1, 933 Primitive::kPrimNot /*return_type */, 934 dex_pc, 935 method_idx, 936 target_method, 937 dispatch_info, 938 invoke_type, 939 kStatic /* optimized_invoke_type */, 940 HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit); 941 return HandleStringInit(invoke, 942 number_of_vreg_arguments, 943 args, 944 register_index, 945 is_range, 946 descriptor); 947 } 948 949 ArtMethod* resolved_method = ResolveMethod(method_idx, invoke_type); 950 951 if (UNLIKELY(resolved_method == nullptr)) { 952 MaybeRecordStat(MethodCompilationStat::kUnresolvedMethod); 953 HInvoke* invoke = new (arena_) HInvokeUnresolved(arena_, 954 number_of_arguments, 955 return_type, 956 dex_pc, 957 method_idx, 958 invoke_type); 959 return HandleInvoke(invoke, 960 number_of_vreg_arguments, 961 args, 962 register_index, 963 is_range, 964 descriptor, 965 nullptr /* clinit_check */); 966 } 967 968 // Potential class initialization check, in the case of a static method call. 969 HClinitCheck* clinit_check = nullptr; 970 HInvoke* invoke = nullptr; 971 if (invoke_type == kDirect || invoke_type == kStatic || invoke_type == kSuper) { 972 // By default, consider that the called method implicitly requires 973 // an initialization check of its declaring method. 974 HInvokeStaticOrDirect::ClinitCheckRequirement clinit_check_requirement 975 = HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit; 976 ScopedObjectAccess soa(Thread::Current()); 977 if (invoke_type == kStatic) { 978 clinit_check = ProcessClinitCheckForInvoke( 979 dex_pc, resolved_method, method_idx, &clinit_check_requirement); 980 } else if (invoke_type == kSuper) { 981 if (IsSameDexFile(*resolved_method->GetDexFile(), *dex_compilation_unit_->GetDexFile())) { 982 // Update the target method to the one resolved. Note that this may be a no-op if 983 // we resolved to the method referenced by the instruction. 984 method_idx = resolved_method->GetDexMethodIndex(); 985 target_method = MethodReference(dex_file_, method_idx); 986 } 987 } 988 989 HInvokeStaticOrDirect::DispatchInfo dispatch_info = { 990 HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod, 991 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod, 992 0u, 993 0U 994 }; 995 invoke = new (arena_) HInvokeStaticOrDirect(arena_, 996 number_of_arguments, 997 return_type, 998 dex_pc, 999 method_idx, 1000 target_method, 1001 dispatch_info, 1002 invoke_type, 1003 invoke_type, 1004 clinit_check_requirement); 1005 } else if (invoke_type == kVirtual) { 1006 ScopedObjectAccess soa(Thread::Current()); // Needed for the method index 1007 invoke = new (arena_) HInvokeVirtual(arena_, 1008 number_of_arguments, 1009 return_type, 1010 dex_pc, 1011 method_idx, 1012 resolved_method->GetMethodIndex()); 1013 } else { 1014 DCHECK_EQ(invoke_type, kInterface); 1015 ScopedObjectAccess soa(Thread::Current()); // Needed for the method index 1016 invoke = new (arena_) HInvokeInterface(arena_, 1017 number_of_arguments, 1018 return_type, 1019 dex_pc, 1020 method_idx, 1021 resolved_method->GetDexMethodIndex()); 1022 } 1023 1024 return HandleInvoke(invoke, 1025 number_of_vreg_arguments, 1026 args, 1027 register_index, 1028 is_range, 1029 descriptor, 1030 clinit_check); 1031} 1032 1033bool HGraphBuilder::BuildNewInstance(uint16_t type_index, uint32_t dex_pc) { 1034 bool finalizable; 1035 bool can_throw = NeedsAccessCheck(type_index, &finalizable); 1036 1037 // Only the non-resolved entrypoint handles the finalizable class case. If we 1038 // need access checks, then we haven't resolved the method and the class may 1039 // again be finalizable. 1040 QuickEntrypointEnum entrypoint = (finalizable || can_throw) 1041 ? kQuickAllocObject 1042 : kQuickAllocObjectInitialized; 1043 1044 ScopedObjectAccess soa(Thread::Current()); 1045 StackHandleScope<3> hs(soa.Self()); 1046 Handle<mirror::DexCache> dex_cache(hs.NewHandle( 1047 dex_compilation_unit_->GetClassLinker()->FindDexCache( 1048 soa.Self(), *dex_compilation_unit_->GetDexFile()))); 1049 Handle<mirror::Class> resolved_class(hs.NewHandle(dex_cache->GetResolvedType(type_index))); 1050 const DexFile& outer_dex_file = *outer_compilation_unit_->GetDexFile(); 1051 Handle<mirror::DexCache> outer_dex_cache(hs.NewHandle( 1052 outer_compilation_unit_->GetClassLinker()->FindDexCache(soa.Self(), outer_dex_file))); 1053 1054 if (outer_dex_cache.Get() != dex_cache.Get()) { 1055 // We currently do not support inlining allocations across dex files. 1056 return false; 1057 } 1058 1059 HLoadClass* load_class = new (arena_) HLoadClass( 1060 graph_->GetCurrentMethod(), 1061 type_index, 1062 outer_dex_file, 1063 IsOutermostCompilingClass(type_index), 1064 dex_pc, 1065 /*needs_access_check*/ can_throw, 1066 compiler_driver_->CanAssumeTypeIsPresentInDexCache(outer_dex_file, type_index)); 1067 1068 current_block_->AddInstruction(load_class); 1069 HInstruction* cls = load_class; 1070 if (!IsInitialized(resolved_class)) { 1071 cls = new (arena_) HClinitCheck(load_class, dex_pc); 1072 current_block_->AddInstruction(cls); 1073 } 1074 1075 current_block_->AddInstruction(new (arena_) HNewInstance( 1076 cls, 1077 graph_->GetCurrentMethod(), 1078 dex_pc, 1079 type_index, 1080 *dex_compilation_unit_->GetDexFile(), 1081 can_throw, 1082 finalizable, 1083 entrypoint)); 1084 return true; 1085} 1086 1087static bool IsSubClass(mirror::Class* to_test, mirror::Class* super_class) 1088 SHARED_REQUIRES(Locks::mutator_lock_) { 1089 return to_test != nullptr && !to_test->IsInterface() && to_test->IsSubClass(super_class); 1090} 1091 1092bool HGraphBuilder::IsInitialized(Handle<mirror::Class> cls) const { 1093 if (cls.Get() == nullptr) { 1094 return false; 1095 } 1096 1097 // `CanAssumeClassIsLoaded` will return true if we're JITting, or will 1098 // check whether the class is in an image for the AOT compilation. 1099 if (cls->IsInitialized() && 1100 compiler_driver_->CanAssumeClassIsLoaded(cls.Get())) { 1101 return true; 1102 } 1103 1104 if (IsSubClass(GetOutermostCompilingClass(), cls.Get())) { 1105 return true; 1106 } 1107 1108 // TODO: We should walk over the inlined methods, but we don't pass 1109 // that information to the builder. 1110 if (IsSubClass(GetCompilingClass(), cls.Get())) { 1111 return true; 1112 } 1113 1114 return false; 1115} 1116 1117HClinitCheck* HGraphBuilder::ProcessClinitCheckForInvoke( 1118 uint32_t dex_pc, 1119 ArtMethod* resolved_method, 1120 uint32_t method_idx, 1121 HInvokeStaticOrDirect::ClinitCheckRequirement* clinit_check_requirement) { 1122 const DexFile& outer_dex_file = *outer_compilation_unit_->GetDexFile(); 1123 Thread* self = Thread::Current(); 1124 StackHandleScope<4> hs(self); 1125 Handle<mirror::DexCache> dex_cache(hs.NewHandle( 1126 dex_compilation_unit_->GetClassLinker()->FindDexCache( 1127 self, *dex_compilation_unit_->GetDexFile()))); 1128 Handle<mirror::DexCache> outer_dex_cache(hs.NewHandle( 1129 outer_compilation_unit_->GetClassLinker()->FindDexCache( 1130 self, outer_dex_file))); 1131 Handle<mirror::Class> outer_class(hs.NewHandle(GetOutermostCompilingClass())); 1132 Handle<mirror::Class> resolved_method_class(hs.NewHandle(resolved_method->GetDeclaringClass())); 1133 1134 // The index at which the method's class is stored in the DexCache's type array. 1135 uint32_t storage_index = DexFile::kDexNoIndex; 1136 bool is_outer_class = (resolved_method->GetDeclaringClass() == outer_class.Get()); 1137 if (is_outer_class) { 1138 storage_index = outer_class->GetDexTypeIndex(); 1139 } else if (outer_dex_cache.Get() == dex_cache.Get()) { 1140 // Get `storage_index` from IsClassOfStaticMethodAvailableToReferrer. 1141 compiler_driver_->IsClassOfStaticMethodAvailableToReferrer(outer_dex_cache.Get(), 1142 GetCompilingClass(), 1143 resolved_method, 1144 method_idx, 1145 &storage_index); 1146 } 1147 1148 HClinitCheck* clinit_check = nullptr; 1149 1150 if (IsInitialized(resolved_method_class)) { 1151 *clinit_check_requirement = HInvokeStaticOrDirect::ClinitCheckRequirement::kNone; 1152 } else if (storage_index != DexFile::kDexNoIndex) { 1153 *clinit_check_requirement = HInvokeStaticOrDirect::ClinitCheckRequirement::kExplicit; 1154 HLoadClass* load_class = new (arena_) HLoadClass( 1155 graph_->GetCurrentMethod(), 1156 storage_index, 1157 outer_dex_file, 1158 is_outer_class, 1159 dex_pc, 1160 /*needs_access_check*/ false, 1161 compiler_driver_->CanAssumeTypeIsPresentInDexCache(outer_dex_file, storage_index)); 1162 current_block_->AddInstruction(load_class); 1163 clinit_check = new (arena_) HClinitCheck(load_class, dex_pc); 1164 current_block_->AddInstruction(clinit_check); 1165 } 1166 return clinit_check; 1167} 1168 1169bool HGraphBuilder::SetupInvokeArguments(HInvoke* invoke, 1170 uint32_t number_of_vreg_arguments, 1171 uint32_t* args, 1172 uint32_t register_index, 1173 bool is_range, 1174 const char* descriptor, 1175 size_t start_index, 1176 size_t* argument_index) { 1177 uint32_t descriptor_index = 1; // Skip the return type. 1178 uint32_t dex_pc = invoke->GetDexPc(); 1179 1180 for (size_t i = start_index; 1181 // Make sure we don't go over the expected arguments or over the number of 1182 // dex registers given. If the instruction was seen as dead by the verifier, 1183 // it hasn't been properly checked. 1184 (i < number_of_vreg_arguments) && (*argument_index < invoke->GetNumberOfArguments()); 1185 i++, (*argument_index)++) { 1186 Primitive::Type type = Primitive::GetType(descriptor[descriptor_index++]); 1187 bool is_wide = (type == Primitive::kPrimLong) || (type == Primitive::kPrimDouble); 1188 if (!is_range 1189 && is_wide 1190 && ((i + 1 == number_of_vreg_arguments) || (args[i] + 1 != args[i + 1]))) { 1191 // Longs and doubles should be in pairs, that is, sequential registers. The verifier should 1192 // reject any class where this is violated. However, the verifier only does these checks 1193 // on non trivially dead instructions, so we just bailout the compilation. 1194 VLOG(compiler) << "Did not compile " 1195 << PrettyMethod(dex_compilation_unit_->GetDexMethodIndex(), *dex_file_) 1196 << " because of non-sequential dex register pair in wide argument"; 1197 MaybeRecordStat(MethodCompilationStat::kNotCompiledMalformedOpcode); 1198 return false; 1199 } 1200 HInstruction* arg = LoadLocal(is_range ? register_index + i : args[i], type, dex_pc); 1201 invoke->SetArgumentAt(*argument_index, arg); 1202 if (is_wide) { 1203 i++; 1204 } 1205 } 1206 1207 if (*argument_index != invoke->GetNumberOfArguments()) { 1208 VLOG(compiler) << "Did not compile " 1209 << PrettyMethod(dex_compilation_unit_->GetDexMethodIndex(), *dex_file_) 1210 << " because of wrong number of arguments in invoke instruction"; 1211 MaybeRecordStat(MethodCompilationStat::kNotCompiledMalformedOpcode); 1212 return false; 1213 } 1214 1215 if (invoke->IsInvokeStaticOrDirect() && 1216 HInvokeStaticOrDirect::NeedsCurrentMethodInput( 1217 invoke->AsInvokeStaticOrDirect()->GetMethodLoadKind())) { 1218 invoke->SetArgumentAt(*argument_index, graph_->GetCurrentMethod()); 1219 (*argument_index)++; 1220 } 1221 1222 return true; 1223} 1224 1225bool HGraphBuilder::HandleInvoke(HInvoke* invoke, 1226 uint32_t number_of_vreg_arguments, 1227 uint32_t* args, 1228 uint32_t register_index, 1229 bool is_range, 1230 const char* descriptor, 1231 HClinitCheck* clinit_check) { 1232 DCHECK(!invoke->IsInvokeStaticOrDirect() || !invoke->AsInvokeStaticOrDirect()->IsStringInit()); 1233 1234 size_t start_index = 0; 1235 size_t argument_index = 0; 1236 if (invoke->GetOriginalInvokeType() != InvokeType::kStatic) { // Instance call. 1237 Temporaries temps(graph_); 1238 HInstruction* arg = LoadLocal( 1239 is_range ? register_index : args[0], Primitive::kPrimNot, invoke->GetDexPc()); 1240 HNullCheck* null_check = new (arena_) HNullCheck(arg, invoke->GetDexPc()); 1241 current_block_->AddInstruction(null_check); 1242 temps.Add(null_check); 1243 invoke->SetArgumentAt(0, null_check); 1244 start_index = 1; 1245 argument_index = 1; 1246 } 1247 1248 if (!SetupInvokeArguments(invoke, 1249 number_of_vreg_arguments, 1250 args, 1251 register_index, 1252 is_range, 1253 descriptor, 1254 start_index, 1255 &argument_index)) { 1256 return false; 1257 } 1258 1259 if (clinit_check != nullptr) { 1260 // Add the class initialization check as last input of `invoke`. 1261 DCHECK(invoke->IsInvokeStaticOrDirect()); 1262 DCHECK(invoke->AsInvokeStaticOrDirect()->GetClinitCheckRequirement() 1263 == HInvokeStaticOrDirect::ClinitCheckRequirement::kExplicit); 1264 invoke->SetArgumentAt(argument_index, clinit_check); 1265 argument_index++; 1266 } 1267 1268 current_block_->AddInstruction(invoke); 1269 latest_result_ = invoke; 1270 1271 return true; 1272} 1273 1274bool HGraphBuilder::HandleStringInit(HInvoke* invoke, 1275 uint32_t number_of_vreg_arguments, 1276 uint32_t* args, 1277 uint32_t register_index, 1278 bool is_range, 1279 const char* descriptor) { 1280 DCHECK(invoke->IsInvokeStaticOrDirect()); 1281 DCHECK(invoke->AsInvokeStaticOrDirect()->IsStringInit()); 1282 1283 size_t start_index = 1; 1284 size_t argument_index = 0; 1285 if (!SetupInvokeArguments(invoke, 1286 number_of_vreg_arguments, 1287 args, 1288 register_index, 1289 is_range, 1290 descriptor, 1291 start_index, 1292 &argument_index)) { 1293 return false; 1294 } 1295 1296 // Add move-result for StringFactory method. 1297 uint32_t orig_this_reg = is_range ? register_index : args[0]; 1298 HInstruction* new_instance = LoadLocal(orig_this_reg, Primitive::kPrimNot, invoke->GetDexPc()); 1299 invoke->SetArgumentAt(argument_index, new_instance); 1300 current_block_->AddInstruction(invoke); 1301 1302 latest_result_ = invoke; 1303 return true; 1304} 1305 1306static Primitive::Type GetFieldAccessType(const DexFile& dex_file, uint16_t field_index) { 1307 const DexFile::FieldId& field_id = dex_file.GetFieldId(field_index); 1308 const char* type = dex_file.GetFieldTypeDescriptor(field_id); 1309 return Primitive::GetType(type[0]); 1310} 1311 1312bool HGraphBuilder::BuildInstanceFieldAccess(const Instruction& instruction, 1313 uint32_t dex_pc, 1314 bool is_put) { 1315 uint32_t source_or_dest_reg = instruction.VRegA_22c(); 1316 uint32_t obj_reg = instruction.VRegB_22c(); 1317 uint16_t field_index; 1318 if (instruction.IsQuickened()) { 1319 if (!CanDecodeQuickenedInfo()) { 1320 return false; 1321 } 1322 field_index = LookupQuickenedInfo(dex_pc); 1323 } else { 1324 field_index = instruction.VRegC_22c(); 1325 } 1326 1327 ScopedObjectAccess soa(Thread::Current()); 1328 ArtField* resolved_field = 1329 compiler_driver_->ComputeInstanceFieldInfo(field_index, dex_compilation_unit_, is_put, soa); 1330 1331 1332 HInstruction* object = LoadLocal(obj_reg, Primitive::kPrimNot, dex_pc); 1333 HInstruction* null_check = new (arena_) HNullCheck(object, dex_pc); 1334 current_block_->AddInstruction(null_check); 1335 1336 Primitive::Type field_type = (resolved_field == nullptr) 1337 ? GetFieldAccessType(*dex_file_, field_index) 1338 : resolved_field->GetTypeAsPrimitiveType(); 1339 if (is_put) { 1340 Temporaries temps(graph_); 1341 // We need one temporary for the null check. 1342 temps.Add(null_check); 1343 HInstruction* value = LoadLocal(source_or_dest_reg, field_type, dex_pc); 1344 HInstruction* field_set = nullptr; 1345 if (resolved_field == nullptr) { 1346 MaybeRecordStat(MethodCompilationStat::kUnresolvedField); 1347 field_set = new (arena_) HUnresolvedInstanceFieldSet(null_check, 1348 value, 1349 field_type, 1350 field_index, 1351 dex_pc); 1352 } else { 1353 uint16_t class_def_index = resolved_field->GetDeclaringClass()->GetDexClassDefIndex(); 1354 field_set = new (arena_) HInstanceFieldSet(null_check, 1355 value, 1356 field_type, 1357 resolved_field->GetOffset(), 1358 resolved_field->IsVolatile(), 1359 field_index, 1360 class_def_index, 1361 *dex_file_, 1362 dex_compilation_unit_->GetDexCache(), 1363 dex_pc); 1364 } 1365 current_block_->AddInstruction(field_set); 1366 } else { 1367 HInstruction* field_get = nullptr; 1368 if (resolved_field == nullptr) { 1369 MaybeRecordStat(MethodCompilationStat::kUnresolvedField); 1370 field_get = new (arena_) HUnresolvedInstanceFieldGet(null_check, 1371 field_type, 1372 field_index, 1373 dex_pc); 1374 } else { 1375 uint16_t class_def_index = resolved_field->GetDeclaringClass()->GetDexClassDefIndex(); 1376 field_get = new (arena_) HInstanceFieldGet(null_check, 1377 field_type, 1378 resolved_field->GetOffset(), 1379 resolved_field->IsVolatile(), 1380 field_index, 1381 class_def_index, 1382 *dex_file_, 1383 dex_compilation_unit_->GetDexCache(), 1384 dex_pc); 1385 } 1386 current_block_->AddInstruction(field_get); 1387 UpdateLocal(source_or_dest_reg, field_get, dex_pc); 1388 } 1389 1390 return true; 1391} 1392 1393static mirror::Class* GetClassFrom(CompilerDriver* driver, 1394 const DexCompilationUnit& compilation_unit) { 1395 ScopedObjectAccess soa(Thread::Current()); 1396 StackHandleScope<2> hs(soa.Self()); 1397 const DexFile& dex_file = *compilation_unit.GetDexFile(); 1398 Handle<mirror::ClassLoader> class_loader(hs.NewHandle( 1399 soa.Decode<mirror::ClassLoader*>(compilation_unit.GetClassLoader()))); 1400 Handle<mirror::DexCache> dex_cache(hs.NewHandle( 1401 compilation_unit.GetClassLinker()->FindDexCache(soa.Self(), dex_file))); 1402 1403 return driver->ResolveCompilingMethodsClass(soa, dex_cache, class_loader, &compilation_unit); 1404} 1405 1406mirror::Class* HGraphBuilder::GetOutermostCompilingClass() const { 1407 return GetClassFrom(compiler_driver_, *outer_compilation_unit_); 1408} 1409 1410mirror::Class* HGraphBuilder::GetCompilingClass() const { 1411 return GetClassFrom(compiler_driver_, *dex_compilation_unit_); 1412} 1413 1414bool HGraphBuilder::IsOutermostCompilingClass(uint16_t type_index) const { 1415 ScopedObjectAccess soa(Thread::Current()); 1416 StackHandleScope<4> hs(soa.Self()); 1417 Handle<mirror::DexCache> dex_cache(hs.NewHandle( 1418 dex_compilation_unit_->GetClassLinker()->FindDexCache( 1419 soa.Self(), *dex_compilation_unit_->GetDexFile()))); 1420 Handle<mirror::ClassLoader> class_loader(hs.NewHandle( 1421 soa.Decode<mirror::ClassLoader*>(dex_compilation_unit_->GetClassLoader()))); 1422 Handle<mirror::Class> cls(hs.NewHandle(compiler_driver_->ResolveClass( 1423 soa, dex_cache, class_loader, type_index, dex_compilation_unit_))); 1424 Handle<mirror::Class> outer_class(hs.NewHandle(GetOutermostCompilingClass())); 1425 1426 // GetOutermostCompilingClass returns null when the class is unresolved 1427 // (e.g. if it derives from an unresolved class). This is bogus knowing that 1428 // we are compiling it. 1429 // When this happens we cannot establish a direct relation between the current 1430 // class and the outer class, so we return false. 1431 // (Note that this is only used for optimizing invokes and field accesses) 1432 return (cls.Get() != nullptr) && (outer_class.Get() == cls.Get()); 1433} 1434 1435void HGraphBuilder::BuildUnresolvedStaticFieldAccess(const Instruction& instruction, 1436 uint32_t dex_pc, 1437 bool is_put, 1438 Primitive::Type field_type) { 1439 uint32_t source_or_dest_reg = instruction.VRegA_21c(); 1440 uint16_t field_index = instruction.VRegB_21c(); 1441 1442 if (is_put) { 1443 HInstruction* value = LoadLocal(source_or_dest_reg, field_type, dex_pc); 1444 current_block_->AddInstruction( 1445 new (arena_) HUnresolvedStaticFieldSet(value, field_type, field_index, dex_pc)); 1446 } else { 1447 current_block_->AddInstruction( 1448 new (arena_) HUnresolvedStaticFieldGet(field_type, field_index, dex_pc)); 1449 UpdateLocal(source_or_dest_reg, current_block_->GetLastInstruction(), dex_pc); 1450 } 1451} 1452bool HGraphBuilder::BuildStaticFieldAccess(const Instruction& instruction, 1453 uint32_t dex_pc, 1454 bool is_put) { 1455 uint32_t source_or_dest_reg = instruction.VRegA_21c(); 1456 uint16_t field_index = instruction.VRegB_21c(); 1457 1458 ScopedObjectAccess soa(Thread::Current()); 1459 StackHandleScope<5> hs(soa.Self()); 1460 Handle<mirror::DexCache> dex_cache(hs.NewHandle( 1461 dex_compilation_unit_->GetClassLinker()->FindDexCache( 1462 soa.Self(), *dex_compilation_unit_->GetDexFile()))); 1463 Handle<mirror::ClassLoader> class_loader(hs.NewHandle( 1464 soa.Decode<mirror::ClassLoader*>(dex_compilation_unit_->GetClassLoader()))); 1465 ArtField* resolved_field = compiler_driver_->ResolveField( 1466 soa, dex_cache, class_loader, dex_compilation_unit_, field_index, true); 1467 1468 if (resolved_field == nullptr) { 1469 MaybeRecordStat(MethodCompilationStat::kUnresolvedField); 1470 Primitive::Type field_type = GetFieldAccessType(*dex_file_, field_index); 1471 BuildUnresolvedStaticFieldAccess(instruction, dex_pc, is_put, field_type); 1472 return true; 1473 } 1474 1475 Primitive::Type field_type = resolved_field->GetTypeAsPrimitiveType(); 1476 const DexFile& outer_dex_file = *outer_compilation_unit_->GetDexFile(); 1477 Handle<mirror::DexCache> outer_dex_cache(hs.NewHandle( 1478 outer_compilation_unit_->GetClassLinker()->FindDexCache(soa.Self(), outer_dex_file))); 1479 Handle<mirror::Class> outer_class(hs.NewHandle(GetOutermostCompilingClass())); 1480 1481 // The index at which the field's class is stored in the DexCache's type array. 1482 uint32_t storage_index; 1483 bool is_outer_class = (outer_class.Get() == resolved_field->GetDeclaringClass()); 1484 if (is_outer_class) { 1485 storage_index = outer_class->GetDexTypeIndex(); 1486 } else if (outer_dex_cache.Get() != dex_cache.Get()) { 1487 // The compiler driver cannot currently understand multiple dex caches involved. Just bailout. 1488 return false; 1489 } else { 1490 // TODO: This is rather expensive. Perf it and cache the results if needed. 1491 std::pair<bool, bool> pair = compiler_driver_->IsFastStaticField( 1492 outer_dex_cache.Get(), 1493 GetCompilingClass(), 1494 resolved_field, 1495 field_index, 1496 &storage_index); 1497 bool can_easily_access = is_put ? pair.second : pair.first; 1498 if (!can_easily_access) { 1499 MaybeRecordStat(MethodCompilationStat::kUnresolvedFieldNotAFastAccess); 1500 BuildUnresolvedStaticFieldAccess(instruction, dex_pc, is_put, field_type); 1501 return true; 1502 } 1503 } 1504 1505 bool is_in_cache = 1506 compiler_driver_->CanAssumeTypeIsPresentInDexCache(outer_dex_file, storage_index); 1507 HLoadClass* constant = new (arena_) HLoadClass(graph_->GetCurrentMethod(), 1508 storage_index, 1509 outer_dex_file, 1510 is_outer_class, 1511 dex_pc, 1512 /*needs_access_check*/ false, 1513 is_in_cache); 1514 current_block_->AddInstruction(constant); 1515 1516 HInstruction* cls = constant; 1517 1518 Handle<mirror::Class> klass(hs.NewHandle(resolved_field->GetDeclaringClass())); 1519 if (!IsInitialized(klass)) { 1520 cls = new (arena_) HClinitCheck(constant, dex_pc); 1521 current_block_->AddInstruction(cls); 1522 } 1523 1524 uint16_t class_def_index = klass->GetDexClassDefIndex(); 1525 if (is_put) { 1526 // We need to keep the class alive before loading the value. 1527 Temporaries temps(graph_); 1528 temps.Add(cls); 1529 HInstruction* value = LoadLocal(source_or_dest_reg, field_type, dex_pc); 1530 DCHECK_EQ(value->GetType(), field_type); 1531 current_block_->AddInstruction(new (arena_) HStaticFieldSet(cls, 1532 value, 1533 field_type, 1534 resolved_field->GetOffset(), 1535 resolved_field->IsVolatile(), 1536 field_index, 1537 class_def_index, 1538 *dex_file_, 1539 dex_cache_, 1540 dex_pc)); 1541 } else { 1542 current_block_->AddInstruction(new (arena_) HStaticFieldGet(cls, 1543 field_type, 1544 resolved_field->GetOffset(), 1545 resolved_field->IsVolatile(), 1546 field_index, 1547 class_def_index, 1548 *dex_file_, 1549 dex_cache_, 1550 dex_pc)); 1551 UpdateLocal(source_or_dest_reg, current_block_->GetLastInstruction(), dex_pc); 1552 } 1553 return true; 1554} 1555 1556void HGraphBuilder::BuildCheckedDivRem(uint16_t out_vreg, 1557 uint16_t first_vreg, 1558 int64_t second_vreg_or_constant, 1559 uint32_t dex_pc, 1560 Primitive::Type type, 1561 bool second_is_constant, 1562 bool isDiv) { 1563 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong); 1564 1565 HInstruction* first = LoadLocal(first_vreg, type, dex_pc); 1566 HInstruction* second = nullptr; 1567 if (second_is_constant) { 1568 if (type == Primitive::kPrimInt) { 1569 second = graph_->GetIntConstant(second_vreg_or_constant, dex_pc); 1570 } else { 1571 second = graph_->GetLongConstant(second_vreg_or_constant, dex_pc); 1572 } 1573 } else { 1574 second = LoadLocal(second_vreg_or_constant, type, dex_pc); 1575 } 1576 1577 if (!second_is_constant 1578 || (type == Primitive::kPrimInt && second->AsIntConstant()->GetValue() == 0) 1579 || (type == Primitive::kPrimLong && second->AsLongConstant()->GetValue() == 0)) { 1580 second = new (arena_) HDivZeroCheck(second, dex_pc); 1581 Temporaries temps(graph_); 1582 current_block_->AddInstruction(second); 1583 temps.Add(current_block_->GetLastInstruction()); 1584 } 1585 1586 if (isDiv) { 1587 current_block_->AddInstruction(new (arena_) HDiv(type, first, second, dex_pc)); 1588 } else { 1589 current_block_->AddInstruction(new (arena_) HRem(type, first, second, dex_pc)); 1590 } 1591 UpdateLocal(out_vreg, current_block_->GetLastInstruction(), dex_pc); 1592} 1593 1594void HGraphBuilder::BuildArrayAccess(const Instruction& instruction, 1595 uint32_t dex_pc, 1596 bool is_put, 1597 Primitive::Type anticipated_type) { 1598 uint8_t source_or_dest_reg = instruction.VRegA_23x(); 1599 uint8_t array_reg = instruction.VRegB_23x(); 1600 uint8_t index_reg = instruction.VRegC_23x(); 1601 1602 // We need one temporary for the null check, one for the index, and one for the length. 1603 Temporaries temps(graph_); 1604 1605 HInstruction* object = LoadLocal(array_reg, Primitive::kPrimNot, dex_pc); 1606 object = new (arena_) HNullCheck(object, dex_pc); 1607 current_block_->AddInstruction(object); 1608 temps.Add(object); 1609 1610 HInstruction* length = new (arena_) HArrayLength(object, dex_pc); 1611 current_block_->AddInstruction(length); 1612 temps.Add(length); 1613 HInstruction* index = LoadLocal(index_reg, Primitive::kPrimInt, dex_pc); 1614 index = new (arena_) HBoundsCheck(index, length, dex_pc); 1615 current_block_->AddInstruction(index); 1616 temps.Add(index); 1617 if (is_put) { 1618 HInstruction* value = LoadLocal(source_or_dest_reg, anticipated_type, dex_pc); 1619 // TODO: Insert a type check node if the type is Object. 1620 current_block_->AddInstruction(new (arena_) HArraySet( 1621 object, index, value, anticipated_type, dex_pc)); 1622 } else { 1623 current_block_->AddInstruction(new (arena_) HArrayGet(object, index, anticipated_type, dex_pc)); 1624 UpdateLocal(source_or_dest_reg, current_block_->GetLastInstruction(), dex_pc); 1625 } 1626 graph_->SetHasBoundsChecks(true); 1627} 1628 1629void HGraphBuilder::BuildFilledNewArray(uint32_t dex_pc, 1630 uint32_t type_index, 1631 uint32_t number_of_vreg_arguments, 1632 bool is_range, 1633 uint32_t* args, 1634 uint32_t register_index) { 1635 HInstruction* length = graph_->GetIntConstant(number_of_vreg_arguments, dex_pc); 1636 bool finalizable; 1637 QuickEntrypointEnum entrypoint = NeedsAccessCheck(type_index, &finalizable) 1638 ? kQuickAllocArrayWithAccessCheck 1639 : kQuickAllocArray; 1640 HInstruction* object = new (arena_) HNewArray(length, 1641 graph_->GetCurrentMethod(), 1642 dex_pc, 1643 type_index, 1644 *dex_compilation_unit_->GetDexFile(), 1645 entrypoint); 1646 current_block_->AddInstruction(object); 1647 1648 const char* descriptor = dex_file_->StringByTypeIdx(type_index); 1649 DCHECK_EQ(descriptor[0], '[') << descriptor; 1650 char primitive = descriptor[1]; 1651 DCHECK(primitive == 'I' 1652 || primitive == 'L' 1653 || primitive == '[') << descriptor; 1654 bool is_reference_array = (primitive == 'L') || (primitive == '['); 1655 Primitive::Type type = is_reference_array ? Primitive::kPrimNot : Primitive::kPrimInt; 1656 1657 Temporaries temps(graph_); 1658 temps.Add(object); 1659 for (size_t i = 0; i < number_of_vreg_arguments; ++i) { 1660 HInstruction* value = LoadLocal(is_range ? register_index + i : args[i], type, dex_pc); 1661 HInstruction* index = graph_->GetIntConstant(i, dex_pc); 1662 current_block_->AddInstruction( 1663 new (arena_) HArraySet(object, index, value, type, dex_pc)); 1664 } 1665 latest_result_ = object; 1666} 1667 1668template <typename T> 1669void HGraphBuilder::BuildFillArrayData(HInstruction* object, 1670 const T* data, 1671 uint32_t element_count, 1672 Primitive::Type anticipated_type, 1673 uint32_t dex_pc) { 1674 for (uint32_t i = 0; i < element_count; ++i) { 1675 HInstruction* index = graph_->GetIntConstant(i, dex_pc); 1676 HInstruction* value = graph_->GetIntConstant(data[i], dex_pc); 1677 current_block_->AddInstruction(new (arena_) HArraySet( 1678 object, index, value, anticipated_type, dex_pc)); 1679 } 1680} 1681 1682void HGraphBuilder::BuildFillArrayData(const Instruction& instruction, uint32_t dex_pc) { 1683 Temporaries temps(graph_); 1684 HInstruction* array = LoadLocal(instruction.VRegA_31t(), Primitive::kPrimNot, dex_pc); 1685 HNullCheck* null_check = new (arena_) HNullCheck(array, dex_pc); 1686 current_block_->AddInstruction(null_check); 1687 temps.Add(null_check); 1688 1689 HInstruction* length = new (arena_) HArrayLength(null_check, dex_pc); 1690 current_block_->AddInstruction(length); 1691 1692 int32_t payload_offset = instruction.VRegB_31t() + dex_pc; 1693 const Instruction::ArrayDataPayload* payload = 1694 reinterpret_cast<const Instruction::ArrayDataPayload*>(code_start_ + payload_offset); 1695 const uint8_t* data = payload->data; 1696 uint32_t element_count = payload->element_count; 1697 1698 // Implementation of this DEX instruction seems to be that the bounds check is 1699 // done before doing any stores. 1700 HInstruction* last_index = graph_->GetIntConstant(payload->element_count - 1, dex_pc); 1701 current_block_->AddInstruction(new (arena_) HBoundsCheck(last_index, length, dex_pc)); 1702 1703 switch (payload->element_width) { 1704 case 1: 1705 BuildFillArrayData(null_check, 1706 reinterpret_cast<const int8_t*>(data), 1707 element_count, 1708 Primitive::kPrimByte, 1709 dex_pc); 1710 break; 1711 case 2: 1712 BuildFillArrayData(null_check, 1713 reinterpret_cast<const int16_t*>(data), 1714 element_count, 1715 Primitive::kPrimShort, 1716 dex_pc); 1717 break; 1718 case 4: 1719 BuildFillArrayData(null_check, 1720 reinterpret_cast<const int32_t*>(data), 1721 element_count, 1722 Primitive::kPrimInt, 1723 dex_pc); 1724 break; 1725 case 8: 1726 BuildFillWideArrayData(null_check, 1727 reinterpret_cast<const int64_t*>(data), 1728 element_count, 1729 dex_pc); 1730 break; 1731 default: 1732 LOG(FATAL) << "Unknown element width for " << payload->element_width; 1733 } 1734 graph_->SetHasBoundsChecks(true); 1735} 1736 1737void HGraphBuilder::BuildFillWideArrayData(HInstruction* object, 1738 const int64_t* data, 1739 uint32_t element_count, 1740 uint32_t dex_pc) { 1741 for (uint32_t i = 0; i < element_count; ++i) { 1742 HInstruction* index = graph_->GetIntConstant(i, dex_pc); 1743 HInstruction* value = graph_->GetLongConstant(data[i], dex_pc); 1744 current_block_->AddInstruction(new (arena_) HArraySet( 1745 object, index, value, Primitive::kPrimLong, dex_pc)); 1746 } 1747} 1748 1749static TypeCheckKind ComputeTypeCheckKind(Handle<mirror::Class> cls) 1750 SHARED_REQUIRES(Locks::mutator_lock_) { 1751 if (cls.Get() == nullptr) { 1752 return TypeCheckKind::kUnresolvedCheck; 1753 } else if (cls->IsInterface()) { 1754 return TypeCheckKind::kInterfaceCheck; 1755 } else if (cls->IsArrayClass()) { 1756 if (cls->GetComponentType()->IsObjectClass()) { 1757 return TypeCheckKind::kArrayObjectCheck; 1758 } else if (cls->CannotBeAssignedFromOtherTypes()) { 1759 return TypeCheckKind::kExactCheck; 1760 } else { 1761 return TypeCheckKind::kArrayCheck; 1762 } 1763 } else if (cls->IsFinal()) { 1764 return TypeCheckKind::kExactCheck; 1765 } else if (cls->IsAbstract()) { 1766 return TypeCheckKind::kAbstractClassCheck; 1767 } else { 1768 return TypeCheckKind::kClassHierarchyCheck; 1769 } 1770} 1771 1772void HGraphBuilder::BuildTypeCheck(const Instruction& instruction, 1773 uint8_t destination, 1774 uint8_t reference, 1775 uint16_t type_index, 1776 uint32_t dex_pc) { 1777 bool type_known_final, type_known_abstract, use_declaring_class; 1778 bool can_access = compiler_driver_->CanAccessTypeWithoutChecks( 1779 dex_compilation_unit_->GetDexMethodIndex(), 1780 *dex_compilation_unit_->GetDexFile(), 1781 type_index, 1782 &type_known_final, 1783 &type_known_abstract, 1784 &use_declaring_class); 1785 1786 ScopedObjectAccess soa(Thread::Current()); 1787 StackHandleScope<2> hs(soa.Self()); 1788 const DexFile& dex_file = *dex_compilation_unit_->GetDexFile(); 1789 Handle<mirror::DexCache> dex_cache(hs.NewHandle( 1790 dex_compilation_unit_->GetClassLinker()->FindDexCache(soa.Self(), dex_file))); 1791 Handle<mirror::Class> resolved_class(hs.NewHandle(dex_cache->GetResolvedType(type_index))); 1792 1793 HInstruction* object = LoadLocal(reference, Primitive::kPrimNot, dex_pc); 1794 HLoadClass* cls = new (arena_) HLoadClass( 1795 graph_->GetCurrentMethod(), 1796 type_index, 1797 dex_file, 1798 IsOutermostCompilingClass(type_index), 1799 dex_pc, 1800 !can_access, 1801 compiler_driver_->CanAssumeTypeIsPresentInDexCache(dex_file, type_index)); 1802 current_block_->AddInstruction(cls); 1803 1804 // The class needs a temporary before being used by the type check. 1805 Temporaries temps(graph_); 1806 temps.Add(cls); 1807 1808 TypeCheckKind check_kind = ComputeTypeCheckKind(resolved_class); 1809 if (instruction.Opcode() == Instruction::INSTANCE_OF) { 1810 current_block_->AddInstruction(new (arena_) HInstanceOf(object, cls, check_kind, dex_pc)); 1811 UpdateLocal(destination, current_block_->GetLastInstruction(), dex_pc); 1812 } else { 1813 DCHECK_EQ(instruction.Opcode(), Instruction::CHECK_CAST); 1814 // We emit a CheckCast followed by a BoundType. CheckCast is a statement 1815 // which may throw. If it succeeds BoundType sets the new type of `object` 1816 // for all subsequent uses. 1817 current_block_->AddInstruction(new (arena_) HCheckCast(object, cls, check_kind, dex_pc)); 1818 current_block_->AddInstruction(new (arena_) HBoundType(object, dex_pc)); 1819 UpdateLocal(reference, current_block_->GetLastInstruction(), dex_pc); 1820 } 1821} 1822 1823bool HGraphBuilder::NeedsAccessCheck(uint32_t type_index, bool* finalizable) const { 1824 return !compiler_driver_->CanAccessInstantiableTypeWithoutChecks( 1825 dex_compilation_unit_->GetDexMethodIndex(), *dex_file_, type_index, finalizable); 1826} 1827 1828void HGraphBuilder::BuildSwitchJumpTable(const SwitchTable& table, 1829 const Instruction& instruction, 1830 HInstruction* value, 1831 uint32_t dex_pc) { 1832 // Add the successor blocks to the current block. 1833 uint16_t num_entries = table.GetNumEntries(); 1834 for (size_t i = 1; i <= num_entries; i++) { 1835 int32_t target_offset = table.GetEntryAt(i); 1836 HBasicBlock* case_target = FindBlockStartingAt(dex_pc + target_offset); 1837 DCHECK(case_target != nullptr); 1838 1839 // Add the target block as a successor. 1840 current_block_->AddSuccessor(case_target); 1841 } 1842 1843 // Add the default target block as the last successor. 1844 HBasicBlock* default_target = FindBlockStartingAt(dex_pc + instruction.SizeInCodeUnits()); 1845 DCHECK(default_target != nullptr); 1846 current_block_->AddSuccessor(default_target); 1847 1848 // Now add the Switch instruction. 1849 int32_t starting_key = table.GetEntryAt(0); 1850 current_block_->AddInstruction( 1851 new (arena_) HPackedSwitch(starting_key, num_entries, value, dex_pc)); 1852 // This block ends with control flow. 1853 current_block_ = nullptr; 1854} 1855 1856void HGraphBuilder::BuildPackedSwitch(const Instruction& instruction, uint32_t dex_pc) { 1857 // Verifier guarantees that the payload for PackedSwitch contains: 1858 // (a) number of entries (may be zero) 1859 // (b) first and lowest switch case value (entry 0, always present) 1860 // (c) list of target pcs (entries 1 <= i <= N) 1861 SwitchTable table(instruction, dex_pc, false); 1862 1863 // Value to test against. 1864 HInstruction* value = LoadLocal(instruction.VRegA(), Primitive::kPrimInt, dex_pc); 1865 1866 // Starting key value. 1867 int32_t starting_key = table.GetEntryAt(0); 1868 1869 // Retrieve number of entries. 1870 uint16_t num_entries = table.GetNumEntries(); 1871 if (num_entries == 0) { 1872 return; 1873 } 1874 1875 // Don't use a packed switch if there are very few entries. 1876 if (num_entries > kSmallSwitchThreshold) { 1877 BuildSwitchJumpTable(table, instruction, value, dex_pc); 1878 } else { 1879 // Chained cmp-and-branch, starting from starting_key. 1880 for (size_t i = 1; i <= num_entries; i++) { 1881 BuildSwitchCaseHelper(instruction, 1882 i, 1883 i == num_entries, 1884 table, 1885 value, 1886 starting_key + i - 1, 1887 table.GetEntryAt(i), 1888 dex_pc); 1889 } 1890 } 1891} 1892 1893void HGraphBuilder::BuildSparseSwitch(const Instruction& instruction, uint32_t dex_pc) { 1894 // Verifier guarantees that the payload for SparseSwitch contains: 1895 // (a) number of entries (may be zero) 1896 // (b) sorted key values (entries 0 <= i < N) 1897 // (c) target pcs corresponding to the switch values (entries N <= i < 2*N) 1898 SwitchTable table(instruction, dex_pc, true); 1899 1900 // Value to test against. 1901 HInstruction* value = LoadLocal(instruction.VRegA(), Primitive::kPrimInt, dex_pc); 1902 1903 uint16_t num_entries = table.GetNumEntries(); 1904 1905 for (size_t i = 0; i < num_entries; i++) { 1906 BuildSwitchCaseHelper(instruction, i, i == static_cast<size_t>(num_entries) - 1, table, value, 1907 table.GetEntryAt(i), table.GetEntryAt(i + num_entries), dex_pc); 1908 } 1909} 1910 1911void HGraphBuilder::BuildSwitchCaseHelper(const Instruction& instruction, size_t index, 1912 bool is_last_case, const SwitchTable& table, 1913 HInstruction* value, int32_t case_value_int, 1914 int32_t target_offset, uint32_t dex_pc) { 1915 HBasicBlock* case_target = FindBlockStartingAt(dex_pc + target_offset); 1916 DCHECK(case_target != nullptr); 1917 PotentiallyAddSuspendCheck(case_target, dex_pc); 1918 1919 // The current case's value. 1920 HInstruction* this_case_value = graph_->GetIntConstant(case_value_int, dex_pc); 1921 1922 // Compare value and this_case_value. 1923 HEqual* comparison = new (arena_) HEqual(value, this_case_value, dex_pc); 1924 current_block_->AddInstruction(comparison); 1925 HInstruction* ifinst = new (arena_) HIf(comparison, dex_pc); 1926 current_block_->AddInstruction(ifinst); 1927 1928 // Case hit: use the target offset to determine where to go. 1929 current_block_->AddSuccessor(case_target); 1930 1931 // Case miss: go to the next case (or default fall-through). 1932 // When there is a next case, we use the block stored with the table offset representing this 1933 // case (that is where we registered them in ComputeBranchTargets). 1934 // When there is no next case, we use the following instruction. 1935 // TODO: Find a good way to peel the last iteration to avoid conditional, but still have re-use. 1936 if (!is_last_case) { 1937 HBasicBlock* next_case_target = FindBlockStartingAt(table.GetDexPcForIndex(index)); 1938 DCHECK(next_case_target != nullptr); 1939 current_block_->AddSuccessor(next_case_target); 1940 1941 // Need to manually add the block, as there is no dex-pc transition for the cases. 1942 graph_->AddBlock(next_case_target); 1943 1944 current_block_ = next_case_target; 1945 } else { 1946 HBasicBlock* default_target = FindBlockStartingAt(dex_pc + instruction.SizeInCodeUnits()); 1947 DCHECK(default_target != nullptr); 1948 current_block_->AddSuccessor(default_target); 1949 current_block_ = nullptr; 1950 } 1951} 1952 1953void HGraphBuilder::PotentiallyAddSuspendCheck(HBasicBlock* target, uint32_t dex_pc) { 1954 int32_t target_offset = target->GetDexPc() - dex_pc; 1955 if (target_offset <= 0) { 1956 // DX generates back edges to the first encountered return. We can save 1957 // time of later passes by not adding redundant suspend checks. 1958 HInstruction* last_in_target = target->GetLastInstruction(); 1959 if (last_in_target != nullptr && 1960 (last_in_target->IsReturn() || last_in_target->IsReturnVoid())) { 1961 return; 1962 } 1963 1964 // Add a suspend check to backward branches which may potentially loop. We 1965 // can remove them after we recognize loops in the graph. 1966 current_block_->AddInstruction(new (arena_) HSuspendCheck(dex_pc)); 1967 } 1968} 1969 1970bool HGraphBuilder::CanDecodeQuickenedInfo() const { 1971 return interpreter_metadata_ != nullptr; 1972} 1973 1974uint16_t HGraphBuilder::LookupQuickenedInfo(uint32_t dex_pc) { 1975 DCHECK(interpreter_metadata_ != nullptr); 1976 uint32_t dex_pc_in_map = DecodeUnsignedLeb128(&interpreter_metadata_); 1977 DCHECK_EQ(dex_pc, dex_pc_in_map); 1978 return DecodeUnsignedLeb128(&interpreter_metadata_); 1979} 1980 1981bool HGraphBuilder::AnalyzeDexInstruction(const Instruction& instruction, uint32_t dex_pc) { 1982 if (current_block_ == nullptr) { 1983 return true; // Dead code 1984 } 1985 1986 switch (instruction.Opcode()) { 1987 case Instruction::CONST_4: { 1988 int32_t register_index = instruction.VRegA(); 1989 HIntConstant* constant = graph_->GetIntConstant(instruction.VRegB_11n(), dex_pc); 1990 UpdateLocal(register_index, constant, dex_pc); 1991 break; 1992 } 1993 1994 case Instruction::CONST_16: { 1995 int32_t register_index = instruction.VRegA(); 1996 HIntConstant* constant = graph_->GetIntConstant(instruction.VRegB_21s(), dex_pc); 1997 UpdateLocal(register_index, constant, dex_pc); 1998 break; 1999 } 2000 2001 case Instruction::CONST: { 2002 int32_t register_index = instruction.VRegA(); 2003 HIntConstant* constant = graph_->GetIntConstant(instruction.VRegB_31i(), dex_pc); 2004 UpdateLocal(register_index, constant, dex_pc); 2005 break; 2006 } 2007 2008 case Instruction::CONST_HIGH16: { 2009 int32_t register_index = instruction.VRegA(); 2010 HIntConstant* constant = graph_->GetIntConstant(instruction.VRegB_21h() << 16, dex_pc); 2011 UpdateLocal(register_index, constant, dex_pc); 2012 break; 2013 } 2014 2015 case Instruction::CONST_WIDE_16: { 2016 int32_t register_index = instruction.VRegA(); 2017 // Get 16 bits of constant value, sign extended to 64 bits. 2018 int64_t value = instruction.VRegB_21s(); 2019 value <<= 48; 2020 value >>= 48; 2021 HLongConstant* constant = graph_->GetLongConstant(value, dex_pc); 2022 UpdateLocal(register_index, constant, dex_pc); 2023 break; 2024 } 2025 2026 case Instruction::CONST_WIDE_32: { 2027 int32_t register_index = instruction.VRegA(); 2028 // Get 32 bits of constant value, sign extended to 64 bits. 2029 int64_t value = instruction.VRegB_31i(); 2030 value <<= 32; 2031 value >>= 32; 2032 HLongConstant* constant = graph_->GetLongConstant(value, dex_pc); 2033 UpdateLocal(register_index, constant, dex_pc); 2034 break; 2035 } 2036 2037 case Instruction::CONST_WIDE: { 2038 int32_t register_index = instruction.VRegA(); 2039 HLongConstant* constant = graph_->GetLongConstant(instruction.VRegB_51l(), dex_pc); 2040 UpdateLocal(register_index, constant, dex_pc); 2041 break; 2042 } 2043 2044 case Instruction::CONST_WIDE_HIGH16: { 2045 int32_t register_index = instruction.VRegA(); 2046 int64_t value = static_cast<int64_t>(instruction.VRegB_21h()) << 48; 2047 HLongConstant* constant = graph_->GetLongConstant(value, dex_pc); 2048 UpdateLocal(register_index, constant, dex_pc); 2049 break; 2050 } 2051 2052 // Note that the SSA building will refine the types. 2053 case Instruction::MOVE: 2054 case Instruction::MOVE_FROM16: 2055 case Instruction::MOVE_16: { 2056 HInstruction* value = LoadLocal(instruction.VRegB(), Primitive::kPrimInt, dex_pc); 2057 UpdateLocal(instruction.VRegA(), value, dex_pc); 2058 break; 2059 } 2060 2061 // Note that the SSA building will refine the types. 2062 case Instruction::MOVE_WIDE: 2063 case Instruction::MOVE_WIDE_FROM16: 2064 case Instruction::MOVE_WIDE_16: { 2065 HInstruction* value = LoadLocal(instruction.VRegB(), Primitive::kPrimLong, dex_pc); 2066 UpdateLocal(instruction.VRegA(), value, dex_pc); 2067 break; 2068 } 2069 2070 case Instruction::MOVE_OBJECT: 2071 case Instruction::MOVE_OBJECT_16: 2072 case Instruction::MOVE_OBJECT_FROM16: { 2073 HInstruction* value = LoadLocal(instruction.VRegB(), Primitive::kPrimNot, dex_pc); 2074 UpdateLocal(instruction.VRegA(), value, dex_pc); 2075 break; 2076 } 2077 2078 case Instruction::RETURN_VOID_NO_BARRIER: 2079 case Instruction::RETURN_VOID: { 2080 BuildReturn(instruction, Primitive::kPrimVoid, dex_pc); 2081 break; 2082 } 2083 2084#define IF_XX(comparison, cond) \ 2085 case Instruction::IF_##cond: If_22t<comparison>(instruction, dex_pc); break; \ 2086 case Instruction::IF_##cond##Z: If_21t<comparison>(instruction, dex_pc); break 2087 2088 IF_XX(HEqual, EQ); 2089 IF_XX(HNotEqual, NE); 2090 IF_XX(HLessThan, LT); 2091 IF_XX(HLessThanOrEqual, LE); 2092 IF_XX(HGreaterThan, GT); 2093 IF_XX(HGreaterThanOrEqual, GE); 2094 2095 case Instruction::GOTO: 2096 case Instruction::GOTO_16: 2097 case Instruction::GOTO_32: { 2098 int32_t offset = instruction.GetTargetOffset(); 2099 HBasicBlock* target = FindBlockStartingAt(offset + dex_pc); 2100 DCHECK(target != nullptr); 2101 PotentiallyAddSuspendCheck(target, dex_pc); 2102 current_block_->AddInstruction(new (arena_) HGoto(dex_pc)); 2103 current_block_->AddSuccessor(target); 2104 current_block_ = nullptr; 2105 break; 2106 } 2107 2108 case Instruction::RETURN: { 2109 BuildReturn(instruction, return_type_, dex_pc); 2110 break; 2111 } 2112 2113 case Instruction::RETURN_OBJECT: { 2114 BuildReturn(instruction, return_type_, dex_pc); 2115 break; 2116 } 2117 2118 case Instruction::RETURN_WIDE: { 2119 BuildReturn(instruction, return_type_, dex_pc); 2120 break; 2121 } 2122 2123 case Instruction::INVOKE_DIRECT: 2124 case Instruction::INVOKE_INTERFACE: 2125 case Instruction::INVOKE_STATIC: 2126 case Instruction::INVOKE_SUPER: 2127 case Instruction::INVOKE_VIRTUAL: 2128 case Instruction::INVOKE_VIRTUAL_QUICK: { 2129 uint16_t method_idx; 2130 if (instruction.Opcode() == Instruction::INVOKE_VIRTUAL_QUICK) { 2131 if (!CanDecodeQuickenedInfo()) { 2132 return false; 2133 } 2134 method_idx = LookupQuickenedInfo(dex_pc); 2135 } else { 2136 method_idx = instruction.VRegB_35c(); 2137 } 2138 uint32_t number_of_vreg_arguments = instruction.VRegA_35c(); 2139 uint32_t args[5]; 2140 instruction.GetVarArgs(args); 2141 if (!BuildInvoke(instruction, dex_pc, method_idx, 2142 number_of_vreg_arguments, false, args, -1)) { 2143 return false; 2144 } 2145 break; 2146 } 2147 2148 case Instruction::INVOKE_DIRECT_RANGE: 2149 case Instruction::INVOKE_INTERFACE_RANGE: 2150 case Instruction::INVOKE_STATIC_RANGE: 2151 case Instruction::INVOKE_SUPER_RANGE: 2152 case Instruction::INVOKE_VIRTUAL_RANGE: 2153 case Instruction::INVOKE_VIRTUAL_RANGE_QUICK: { 2154 uint16_t method_idx; 2155 if (instruction.Opcode() == Instruction::INVOKE_VIRTUAL_RANGE_QUICK) { 2156 if (!CanDecodeQuickenedInfo()) { 2157 return false; 2158 } 2159 method_idx = LookupQuickenedInfo(dex_pc); 2160 } else { 2161 method_idx = instruction.VRegB_3rc(); 2162 } 2163 uint32_t number_of_vreg_arguments = instruction.VRegA_3rc(); 2164 uint32_t register_index = instruction.VRegC(); 2165 if (!BuildInvoke(instruction, dex_pc, method_idx, 2166 number_of_vreg_arguments, true, nullptr, register_index)) { 2167 return false; 2168 } 2169 break; 2170 } 2171 2172 case Instruction::NEG_INT: { 2173 Unop_12x<HNeg>(instruction, Primitive::kPrimInt, dex_pc); 2174 break; 2175 } 2176 2177 case Instruction::NEG_LONG: { 2178 Unop_12x<HNeg>(instruction, Primitive::kPrimLong, dex_pc); 2179 break; 2180 } 2181 2182 case Instruction::NEG_FLOAT: { 2183 Unop_12x<HNeg>(instruction, Primitive::kPrimFloat, dex_pc); 2184 break; 2185 } 2186 2187 case Instruction::NEG_DOUBLE: { 2188 Unop_12x<HNeg>(instruction, Primitive::kPrimDouble, dex_pc); 2189 break; 2190 } 2191 2192 case Instruction::NOT_INT: { 2193 Unop_12x<HNot>(instruction, Primitive::kPrimInt, dex_pc); 2194 break; 2195 } 2196 2197 case Instruction::NOT_LONG: { 2198 Unop_12x<HNot>(instruction, Primitive::kPrimLong, dex_pc); 2199 break; 2200 } 2201 2202 case Instruction::INT_TO_LONG: { 2203 Conversion_12x(instruction, Primitive::kPrimInt, Primitive::kPrimLong, dex_pc); 2204 break; 2205 } 2206 2207 case Instruction::INT_TO_FLOAT: { 2208 Conversion_12x(instruction, Primitive::kPrimInt, Primitive::kPrimFloat, dex_pc); 2209 break; 2210 } 2211 2212 case Instruction::INT_TO_DOUBLE: { 2213 Conversion_12x(instruction, Primitive::kPrimInt, Primitive::kPrimDouble, dex_pc); 2214 break; 2215 } 2216 2217 case Instruction::LONG_TO_INT: { 2218 Conversion_12x(instruction, Primitive::kPrimLong, Primitive::kPrimInt, dex_pc); 2219 break; 2220 } 2221 2222 case Instruction::LONG_TO_FLOAT: { 2223 Conversion_12x(instruction, Primitive::kPrimLong, Primitive::kPrimFloat, dex_pc); 2224 break; 2225 } 2226 2227 case Instruction::LONG_TO_DOUBLE: { 2228 Conversion_12x(instruction, Primitive::kPrimLong, Primitive::kPrimDouble, dex_pc); 2229 break; 2230 } 2231 2232 case Instruction::FLOAT_TO_INT: { 2233 Conversion_12x(instruction, Primitive::kPrimFloat, Primitive::kPrimInt, dex_pc); 2234 break; 2235 } 2236 2237 case Instruction::FLOAT_TO_LONG: { 2238 Conversion_12x(instruction, Primitive::kPrimFloat, Primitive::kPrimLong, dex_pc); 2239 break; 2240 } 2241 2242 case Instruction::FLOAT_TO_DOUBLE: { 2243 Conversion_12x(instruction, Primitive::kPrimFloat, Primitive::kPrimDouble, dex_pc); 2244 break; 2245 } 2246 2247 case Instruction::DOUBLE_TO_INT: { 2248 Conversion_12x(instruction, Primitive::kPrimDouble, Primitive::kPrimInt, dex_pc); 2249 break; 2250 } 2251 2252 case Instruction::DOUBLE_TO_LONG: { 2253 Conversion_12x(instruction, Primitive::kPrimDouble, Primitive::kPrimLong, dex_pc); 2254 break; 2255 } 2256 2257 case Instruction::DOUBLE_TO_FLOAT: { 2258 Conversion_12x(instruction, Primitive::kPrimDouble, Primitive::kPrimFloat, dex_pc); 2259 break; 2260 } 2261 2262 case Instruction::INT_TO_BYTE: { 2263 Conversion_12x(instruction, Primitive::kPrimInt, Primitive::kPrimByte, dex_pc); 2264 break; 2265 } 2266 2267 case Instruction::INT_TO_SHORT: { 2268 Conversion_12x(instruction, Primitive::kPrimInt, Primitive::kPrimShort, dex_pc); 2269 break; 2270 } 2271 2272 case Instruction::INT_TO_CHAR: { 2273 Conversion_12x(instruction, Primitive::kPrimInt, Primitive::kPrimChar, dex_pc); 2274 break; 2275 } 2276 2277 case Instruction::ADD_INT: { 2278 Binop_23x<HAdd>(instruction, Primitive::kPrimInt, dex_pc); 2279 break; 2280 } 2281 2282 case Instruction::ADD_LONG: { 2283 Binop_23x<HAdd>(instruction, Primitive::kPrimLong, dex_pc); 2284 break; 2285 } 2286 2287 case Instruction::ADD_DOUBLE: { 2288 Binop_23x<HAdd>(instruction, Primitive::kPrimDouble, dex_pc); 2289 break; 2290 } 2291 2292 case Instruction::ADD_FLOAT: { 2293 Binop_23x<HAdd>(instruction, Primitive::kPrimFloat, dex_pc); 2294 break; 2295 } 2296 2297 case Instruction::SUB_INT: { 2298 Binop_23x<HSub>(instruction, Primitive::kPrimInt, dex_pc); 2299 break; 2300 } 2301 2302 case Instruction::SUB_LONG: { 2303 Binop_23x<HSub>(instruction, Primitive::kPrimLong, dex_pc); 2304 break; 2305 } 2306 2307 case Instruction::SUB_FLOAT: { 2308 Binop_23x<HSub>(instruction, Primitive::kPrimFloat, dex_pc); 2309 break; 2310 } 2311 2312 case Instruction::SUB_DOUBLE: { 2313 Binop_23x<HSub>(instruction, Primitive::kPrimDouble, dex_pc); 2314 break; 2315 } 2316 2317 case Instruction::ADD_INT_2ADDR: { 2318 Binop_12x<HAdd>(instruction, Primitive::kPrimInt, dex_pc); 2319 break; 2320 } 2321 2322 case Instruction::MUL_INT: { 2323 Binop_23x<HMul>(instruction, Primitive::kPrimInt, dex_pc); 2324 break; 2325 } 2326 2327 case Instruction::MUL_LONG: { 2328 Binop_23x<HMul>(instruction, Primitive::kPrimLong, dex_pc); 2329 break; 2330 } 2331 2332 case Instruction::MUL_FLOAT: { 2333 Binop_23x<HMul>(instruction, Primitive::kPrimFloat, dex_pc); 2334 break; 2335 } 2336 2337 case Instruction::MUL_DOUBLE: { 2338 Binop_23x<HMul>(instruction, Primitive::kPrimDouble, dex_pc); 2339 break; 2340 } 2341 2342 case Instruction::DIV_INT: { 2343 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegB(), instruction.VRegC(), 2344 dex_pc, Primitive::kPrimInt, false, true); 2345 break; 2346 } 2347 2348 case Instruction::DIV_LONG: { 2349 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegB(), instruction.VRegC(), 2350 dex_pc, Primitive::kPrimLong, false, true); 2351 break; 2352 } 2353 2354 case Instruction::DIV_FLOAT: { 2355 Binop_23x<HDiv>(instruction, Primitive::kPrimFloat, dex_pc); 2356 break; 2357 } 2358 2359 case Instruction::DIV_DOUBLE: { 2360 Binop_23x<HDiv>(instruction, Primitive::kPrimDouble, dex_pc); 2361 break; 2362 } 2363 2364 case Instruction::REM_INT: { 2365 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegB(), instruction.VRegC(), 2366 dex_pc, Primitive::kPrimInt, false, false); 2367 break; 2368 } 2369 2370 case Instruction::REM_LONG: { 2371 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegB(), instruction.VRegC(), 2372 dex_pc, Primitive::kPrimLong, false, false); 2373 break; 2374 } 2375 2376 case Instruction::REM_FLOAT: { 2377 Binop_23x<HRem>(instruction, Primitive::kPrimFloat, dex_pc); 2378 break; 2379 } 2380 2381 case Instruction::REM_DOUBLE: { 2382 Binop_23x<HRem>(instruction, Primitive::kPrimDouble, dex_pc); 2383 break; 2384 } 2385 2386 case Instruction::AND_INT: { 2387 Binop_23x<HAnd>(instruction, Primitive::kPrimInt, dex_pc); 2388 break; 2389 } 2390 2391 case Instruction::AND_LONG: { 2392 Binop_23x<HAnd>(instruction, Primitive::kPrimLong, dex_pc); 2393 break; 2394 } 2395 2396 case Instruction::SHL_INT: { 2397 Binop_23x_shift<HShl>(instruction, Primitive::kPrimInt, dex_pc); 2398 break; 2399 } 2400 2401 case Instruction::SHL_LONG: { 2402 Binop_23x_shift<HShl>(instruction, Primitive::kPrimLong, dex_pc); 2403 break; 2404 } 2405 2406 case Instruction::SHR_INT: { 2407 Binop_23x_shift<HShr>(instruction, Primitive::kPrimInt, dex_pc); 2408 break; 2409 } 2410 2411 case Instruction::SHR_LONG: { 2412 Binop_23x_shift<HShr>(instruction, Primitive::kPrimLong, dex_pc); 2413 break; 2414 } 2415 2416 case Instruction::USHR_INT: { 2417 Binop_23x_shift<HUShr>(instruction, Primitive::kPrimInt, dex_pc); 2418 break; 2419 } 2420 2421 case Instruction::USHR_LONG: { 2422 Binop_23x_shift<HUShr>(instruction, Primitive::kPrimLong, dex_pc); 2423 break; 2424 } 2425 2426 case Instruction::OR_INT: { 2427 Binop_23x<HOr>(instruction, Primitive::kPrimInt, dex_pc); 2428 break; 2429 } 2430 2431 case Instruction::OR_LONG: { 2432 Binop_23x<HOr>(instruction, Primitive::kPrimLong, dex_pc); 2433 break; 2434 } 2435 2436 case Instruction::XOR_INT: { 2437 Binop_23x<HXor>(instruction, Primitive::kPrimInt, dex_pc); 2438 break; 2439 } 2440 2441 case Instruction::XOR_LONG: { 2442 Binop_23x<HXor>(instruction, Primitive::kPrimLong, dex_pc); 2443 break; 2444 } 2445 2446 case Instruction::ADD_LONG_2ADDR: { 2447 Binop_12x<HAdd>(instruction, Primitive::kPrimLong, dex_pc); 2448 break; 2449 } 2450 2451 case Instruction::ADD_DOUBLE_2ADDR: { 2452 Binop_12x<HAdd>(instruction, Primitive::kPrimDouble, dex_pc); 2453 break; 2454 } 2455 2456 case Instruction::ADD_FLOAT_2ADDR: { 2457 Binop_12x<HAdd>(instruction, Primitive::kPrimFloat, dex_pc); 2458 break; 2459 } 2460 2461 case Instruction::SUB_INT_2ADDR: { 2462 Binop_12x<HSub>(instruction, Primitive::kPrimInt, dex_pc); 2463 break; 2464 } 2465 2466 case Instruction::SUB_LONG_2ADDR: { 2467 Binop_12x<HSub>(instruction, Primitive::kPrimLong, dex_pc); 2468 break; 2469 } 2470 2471 case Instruction::SUB_FLOAT_2ADDR: { 2472 Binop_12x<HSub>(instruction, Primitive::kPrimFloat, dex_pc); 2473 break; 2474 } 2475 2476 case Instruction::SUB_DOUBLE_2ADDR: { 2477 Binop_12x<HSub>(instruction, Primitive::kPrimDouble, dex_pc); 2478 break; 2479 } 2480 2481 case Instruction::MUL_INT_2ADDR: { 2482 Binop_12x<HMul>(instruction, Primitive::kPrimInt, dex_pc); 2483 break; 2484 } 2485 2486 case Instruction::MUL_LONG_2ADDR: { 2487 Binop_12x<HMul>(instruction, Primitive::kPrimLong, dex_pc); 2488 break; 2489 } 2490 2491 case Instruction::MUL_FLOAT_2ADDR: { 2492 Binop_12x<HMul>(instruction, Primitive::kPrimFloat, dex_pc); 2493 break; 2494 } 2495 2496 case Instruction::MUL_DOUBLE_2ADDR: { 2497 Binop_12x<HMul>(instruction, Primitive::kPrimDouble, dex_pc); 2498 break; 2499 } 2500 2501 case Instruction::DIV_INT_2ADDR: { 2502 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegA(), instruction.VRegB(), 2503 dex_pc, Primitive::kPrimInt, false, true); 2504 break; 2505 } 2506 2507 case Instruction::DIV_LONG_2ADDR: { 2508 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegA(), instruction.VRegB(), 2509 dex_pc, Primitive::kPrimLong, false, true); 2510 break; 2511 } 2512 2513 case Instruction::REM_INT_2ADDR: { 2514 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegA(), instruction.VRegB(), 2515 dex_pc, Primitive::kPrimInt, false, false); 2516 break; 2517 } 2518 2519 case Instruction::REM_LONG_2ADDR: { 2520 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegA(), instruction.VRegB(), 2521 dex_pc, Primitive::kPrimLong, false, false); 2522 break; 2523 } 2524 2525 case Instruction::REM_FLOAT_2ADDR: { 2526 Binop_12x<HRem>(instruction, Primitive::kPrimFloat, dex_pc); 2527 break; 2528 } 2529 2530 case Instruction::REM_DOUBLE_2ADDR: { 2531 Binop_12x<HRem>(instruction, Primitive::kPrimDouble, dex_pc); 2532 break; 2533 } 2534 2535 case Instruction::SHL_INT_2ADDR: { 2536 Binop_12x_shift<HShl>(instruction, Primitive::kPrimInt, dex_pc); 2537 break; 2538 } 2539 2540 case Instruction::SHL_LONG_2ADDR: { 2541 Binop_12x_shift<HShl>(instruction, Primitive::kPrimLong, dex_pc); 2542 break; 2543 } 2544 2545 case Instruction::SHR_INT_2ADDR: { 2546 Binop_12x_shift<HShr>(instruction, Primitive::kPrimInt, dex_pc); 2547 break; 2548 } 2549 2550 case Instruction::SHR_LONG_2ADDR: { 2551 Binop_12x_shift<HShr>(instruction, Primitive::kPrimLong, dex_pc); 2552 break; 2553 } 2554 2555 case Instruction::USHR_INT_2ADDR: { 2556 Binop_12x_shift<HUShr>(instruction, Primitive::kPrimInt, dex_pc); 2557 break; 2558 } 2559 2560 case Instruction::USHR_LONG_2ADDR: { 2561 Binop_12x_shift<HUShr>(instruction, Primitive::kPrimLong, dex_pc); 2562 break; 2563 } 2564 2565 case Instruction::DIV_FLOAT_2ADDR: { 2566 Binop_12x<HDiv>(instruction, Primitive::kPrimFloat, dex_pc); 2567 break; 2568 } 2569 2570 case Instruction::DIV_DOUBLE_2ADDR: { 2571 Binop_12x<HDiv>(instruction, Primitive::kPrimDouble, dex_pc); 2572 break; 2573 } 2574 2575 case Instruction::AND_INT_2ADDR: { 2576 Binop_12x<HAnd>(instruction, Primitive::kPrimInt, dex_pc); 2577 break; 2578 } 2579 2580 case Instruction::AND_LONG_2ADDR: { 2581 Binop_12x<HAnd>(instruction, Primitive::kPrimLong, dex_pc); 2582 break; 2583 } 2584 2585 case Instruction::OR_INT_2ADDR: { 2586 Binop_12x<HOr>(instruction, Primitive::kPrimInt, dex_pc); 2587 break; 2588 } 2589 2590 case Instruction::OR_LONG_2ADDR: { 2591 Binop_12x<HOr>(instruction, Primitive::kPrimLong, dex_pc); 2592 break; 2593 } 2594 2595 case Instruction::XOR_INT_2ADDR: { 2596 Binop_12x<HXor>(instruction, Primitive::kPrimInt, dex_pc); 2597 break; 2598 } 2599 2600 case Instruction::XOR_LONG_2ADDR: { 2601 Binop_12x<HXor>(instruction, Primitive::kPrimLong, dex_pc); 2602 break; 2603 } 2604 2605 case Instruction::ADD_INT_LIT16: { 2606 Binop_22s<HAdd>(instruction, false, dex_pc); 2607 break; 2608 } 2609 2610 case Instruction::AND_INT_LIT16: { 2611 Binop_22s<HAnd>(instruction, false, dex_pc); 2612 break; 2613 } 2614 2615 case Instruction::OR_INT_LIT16: { 2616 Binop_22s<HOr>(instruction, false, dex_pc); 2617 break; 2618 } 2619 2620 case Instruction::XOR_INT_LIT16: { 2621 Binop_22s<HXor>(instruction, false, dex_pc); 2622 break; 2623 } 2624 2625 case Instruction::RSUB_INT: { 2626 Binop_22s<HSub>(instruction, true, dex_pc); 2627 break; 2628 } 2629 2630 case Instruction::MUL_INT_LIT16: { 2631 Binop_22s<HMul>(instruction, false, dex_pc); 2632 break; 2633 } 2634 2635 case Instruction::ADD_INT_LIT8: { 2636 Binop_22b<HAdd>(instruction, false, dex_pc); 2637 break; 2638 } 2639 2640 case Instruction::AND_INT_LIT8: { 2641 Binop_22b<HAnd>(instruction, false, dex_pc); 2642 break; 2643 } 2644 2645 case Instruction::OR_INT_LIT8: { 2646 Binop_22b<HOr>(instruction, false, dex_pc); 2647 break; 2648 } 2649 2650 case Instruction::XOR_INT_LIT8: { 2651 Binop_22b<HXor>(instruction, false, dex_pc); 2652 break; 2653 } 2654 2655 case Instruction::RSUB_INT_LIT8: { 2656 Binop_22b<HSub>(instruction, true, dex_pc); 2657 break; 2658 } 2659 2660 case Instruction::MUL_INT_LIT8: { 2661 Binop_22b<HMul>(instruction, false, dex_pc); 2662 break; 2663 } 2664 2665 case Instruction::DIV_INT_LIT16: 2666 case Instruction::DIV_INT_LIT8: { 2667 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegB(), instruction.VRegC(), 2668 dex_pc, Primitive::kPrimInt, true, true); 2669 break; 2670 } 2671 2672 case Instruction::REM_INT_LIT16: 2673 case Instruction::REM_INT_LIT8: { 2674 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegB(), instruction.VRegC(), 2675 dex_pc, Primitive::kPrimInt, true, false); 2676 break; 2677 } 2678 2679 case Instruction::SHL_INT_LIT8: { 2680 Binop_22b<HShl>(instruction, false, dex_pc); 2681 break; 2682 } 2683 2684 case Instruction::SHR_INT_LIT8: { 2685 Binop_22b<HShr>(instruction, false, dex_pc); 2686 break; 2687 } 2688 2689 case Instruction::USHR_INT_LIT8: { 2690 Binop_22b<HUShr>(instruction, false, dex_pc); 2691 break; 2692 } 2693 2694 case Instruction::NEW_INSTANCE: { 2695 if (!BuildNewInstance(instruction.VRegB_21c(), dex_pc)) { 2696 return false; 2697 } 2698 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction(), dex_pc); 2699 break; 2700 } 2701 2702 case Instruction::NEW_ARRAY: { 2703 uint16_t type_index = instruction.VRegC_22c(); 2704 HInstruction* length = LoadLocal(instruction.VRegB_22c(), Primitive::kPrimInt, dex_pc); 2705 bool finalizable; 2706 QuickEntrypointEnum entrypoint = NeedsAccessCheck(type_index, &finalizable) 2707 ? kQuickAllocArrayWithAccessCheck 2708 : kQuickAllocArray; 2709 current_block_->AddInstruction(new (arena_) HNewArray(length, 2710 graph_->GetCurrentMethod(), 2711 dex_pc, 2712 type_index, 2713 *dex_compilation_unit_->GetDexFile(), 2714 entrypoint)); 2715 UpdateLocal(instruction.VRegA_22c(), current_block_->GetLastInstruction(), dex_pc); 2716 break; 2717 } 2718 2719 case Instruction::FILLED_NEW_ARRAY: { 2720 uint32_t number_of_vreg_arguments = instruction.VRegA_35c(); 2721 uint32_t type_index = instruction.VRegB_35c(); 2722 uint32_t args[5]; 2723 instruction.GetVarArgs(args); 2724 BuildFilledNewArray(dex_pc, type_index, number_of_vreg_arguments, false, args, 0); 2725 break; 2726 } 2727 2728 case Instruction::FILLED_NEW_ARRAY_RANGE: { 2729 uint32_t number_of_vreg_arguments = instruction.VRegA_3rc(); 2730 uint32_t type_index = instruction.VRegB_3rc(); 2731 uint32_t register_index = instruction.VRegC_3rc(); 2732 BuildFilledNewArray( 2733 dex_pc, type_index, number_of_vreg_arguments, true, nullptr, register_index); 2734 break; 2735 } 2736 2737 case Instruction::FILL_ARRAY_DATA: { 2738 BuildFillArrayData(instruction, dex_pc); 2739 break; 2740 } 2741 2742 case Instruction::MOVE_RESULT: 2743 case Instruction::MOVE_RESULT_WIDE: 2744 case Instruction::MOVE_RESULT_OBJECT: { 2745 if (latest_result_ == nullptr) { 2746 // Only dead code can lead to this situation, where the verifier 2747 // does not reject the method. 2748 } else { 2749 // An Invoke/FilledNewArray and its MoveResult could have landed in 2750 // different blocks if there was a try/catch block boundary between 2751 // them. For Invoke, we insert a StoreLocal after the instruction. For 2752 // FilledNewArray, the local needs to be updated after the array was 2753 // filled, otherwise we might overwrite an input vreg. 2754 HStoreLocal* update_local = 2755 new (arena_) HStoreLocal(GetLocalAt(instruction.VRegA()), latest_result_, dex_pc); 2756 HBasicBlock* block = latest_result_->GetBlock(); 2757 if (block == current_block_) { 2758 // MoveResult and the previous instruction are in the same block. 2759 current_block_->AddInstruction(update_local); 2760 } else { 2761 // The two instructions are in different blocks. Insert the MoveResult 2762 // before the final control-flow instruction of the previous block. 2763 DCHECK(block->EndsWithControlFlowInstruction()); 2764 DCHECK(current_block_->GetInstructions().IsEmpty()); 2765 block->InsertInstructionBefore(update_local, block->GetLastInstruction()); 2766 } 2767 latest_result_ = nullptr; 2768 } 2769 break; 2770 } 2771 2772 case Instruction::CMP_LONG: { 2773 Binop_23x_cmp(instruction, Primitive::kPrimLong, ComparisonBias::kNoBias, dex_pc); 2774 break; 2775 } 2776 2777 case Instruction::CMPG_FLOAT: { 2778 Binop_23x_cmp(instruction, Primitive::kPrimFloat, ComparisonBias::kGtBias, dex_pc); 2779 break; 2780 } 2781 2782 case Instruction::CMPG_DOUBLE: { 2783 Binop_23x_cmp(instruction, Primitive::kPrimDouble, ComparisonBias::kGtBias, dex_pc); 2784 break; 2785 } 2786 2787 case Instruction::CMPL_FLOAT: { 2788 Binop_23x_cmp(instruction, Primitive::kPrimFloat, ComparisonBias::kLtBias, dex_pc); 2789 break; 2790 } 2791 2792 case Instruction::CMPL_DOUBLE: { 2793 Binop_23x_cmp(instruction, Primitive::kPrimDouble, ComparisonBias::kLtBias, dex_pc); 2794 break; 2795 } 2796 2797 case Instruction::NOP: 2798 break; 2799 2800 case Instruction::IGET: 2801 case Instruction::IGET_QUICK: 2802 case Instruction::IGET_WIDE: 2803 case Instruction::IGET_WIDE_QUICK: 2804 case Instruction::IGET_OBJECT: 2805 case Instruction::IGET_OBJECT_QUICK: 2806 case Instruction::IGET_BOOLEAN: 2807 case Instruction::IGET_BOOLEAN_QUICK: 2808 case Instruction::IGET_BYTE: 2809 case Instruction::IGET_BYTE_QUICK: 2810 case Instruction::IGET_CHAR: 2811 case Instruction::IGET_CHAR_QUICK: 2812 case Instruction::IGET_SHORT: 2813 case Instruction::IGET_SHORT_QUICK: { 2814 if (!BuildInstanceFieldAccess(instruction, dex_pc, false)) { 2815 return false; 2816 } 2817 break; 2818 } 2819 2820 case Instruction::IPUT: 2821 case Instruction::IPUT_QUICK: 2822 case Instruction::IPUT_WIDE: 2823 case Instruction::IPUT_WIDE_QUICK: 2824 case Instruction::IPUT_OBJECT: 2825 case Instruction::IPUT_OBJECT_QUICK: 2826 case Instruction::IPUT_BOOLEAN: 2827 case Instruction::IPUT_BOOLEAN_QUICK: 2828 case Instruction::IPUT_BYTE: 2829 case Instruction::IPUT_BYTE_QUICK: 2830 case Instruction::IPUT_CHAR: 2831 case Instruction::IPUT_CHAR_QUICK: 2832 case Instruction::IPUT_SHORT: 2833 case Instruction::IPUT_SHORT_QUICK: { 2834 if (!BuildInstanceFieldAccess(instruction, dex_pc, true)) { 2835 return false; 2836 } 2837 break; 2838 } 2839 2840 case Instruction::SGET: 2841 case Instruction::SGET_WIDE: 2842 case Instruction::SGET_OBJECT: 2843 case Instruction::SGET_BOOLEAN: 2844 case Instruction::SGET_BYTE: 2845 case Instruction::SGET_CHAR: 2846 case Instruction::SGET_SHORT: { 2847 if (!BuildStaticFieldAccess(instruction, dex_pc, false)) { 2848 return false; 2849 } 2850 break; 2851 } 2852 2853 case Instruction::SPUT: 2854 case Instruction::SPUT_WIDE: 2855 case Instruction::SPUT_OBJECT: 2856 case Instruction::SPUT_BOOLEAN: 2857 case Instruction::SPUT_BYTE: 2858 case Instruction::SPUT_CHAR: 2859 case Instruction::SPUT_SHORT: { 2860 if (!BuildStaticFieldAccess(instruction, dex_pc, true)) { 2861 return false; 2862 } 2863 break; 2864 } 2865 2866#define ARRAY_XX(kind, anticipated_type) \ 2867 case Instruction::AGET##kind: { \ 2868 BuildArrayAccess(instruction, dex_pc, false, anticipated_type); \ 2869 break; \ 2870 } \ 2871 case Instruction::APUT##kind: { \ 2872 BuildArrayAccess(instruction, dex_pc, true, anticipated_type); \ 2873 break; \ 2874 } 2875 2876 ARRAY_XX(, Primitive::kPrimInt); 2877 ARRAY_XX(_WIDE, Primitive::kPrimLong); 2878 ARRAY_XX(_OBJECT, Primitive::kPrimNot); 2879 ARRAY_XX(_BOOLEAN, Primitive::kPrimBoolean); 2880 ARRAY_XX(_BYTE, Primitive::kPrimByte); 2881 ARRAY_XX(_CHAR, Primitive::kPrimChar); 2882 ARRAY_XX(_SHORT, Primitive::kPrimShort); 2883 2884 case Instruction::ARRAY_LENGTH: { 2885 HInstruction* object = LoadLocal(instruction.VRegB_12x(), Primitive::kPrimNot, dex_pc); 2886 // No need for a temporary for the null check, it is the only input of the following 2887 // instruction. 2888 object = new (arena_) HNullCheck(object, dex_pc); 2889 current_block_->AddInstruction(object); 2890 current_block_->AddInstruction(new (arena_) HArrayLength(object, dex_pc)); 2891 UpdateLocal(instruction.VRegA_12x(), current_block_->GetLastInstruction(), dex_pc); 2892 break; 2893 } 2894 2895 case Instruction::CONST_STRING: { 2896 uint32_t string_index = instruction.VRegB_21c(); 2897 bool in_dex_cache = compiler_driver_->CanAssumeStringIsPresentInDexCache( 2898 *dex_file_, string_index); 2899 current_block_->AddInstruction( 2900 new (arena_) HLoadString(graph_->GetCurrentMethod(), string_index, dex_pc, in_dex_cache)); 2901 UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction(), dex_pc); 2902 break; 2903 } 2904 2905 case Instruction::CONST_STRING_JUMBO: { 2906 uint32_t string_index = instruction.VRegB_31c(); 2907 bool in_dex_cache = compiler_driver_->CanAssumeStringIsPresentInDexCache( 2908 *dex_file_, string_index); 2909 current_block_->AddInstruction( 2910 new (arena_) HLoadString(graph_->GetCurrentMethod(), string_index, dex_pc, in_dex_cache)); 2911 UpdateLocal(instruction.VRegA_31c(), current_block_->GetLastInstruction(), dex_pc); 2912 break; 2913 } 2914 2915 case Instruction::CONST_CLASS: { 2916 uint16_t type_index = instruction.VRegB_21c(); 2917 bool type_known_final; 2918 bool type_known_abstract; 2919 bool dont_use_is_referrers_class; 2920 // `CanAccessTypeWithoutChecks` will tell whether the method being 2921 // built is trying to access its own class, so that the generated 2922 // code can optimize for this case. However, the optimization does not 2923 // work for inlining, so we use `IsOutermostCompilingClass` instead. 2924 bool can_access = compiler_driver_->CanAccessTypeWithoutChecks( 2925 dex_compilation_unit_->GetDexMethodIndex(), *dex_file_, type_index, 2926 &type_known_final, &type_known_abstract, &dont_use_is_referrers_class); 2927 current_block_->AddInstruction(new (arena_) HLoadClass( 2928 graph_->GetCurrentMethod(), 2929 type_index, 2930 *dex_file_, 2931 IsOutermostCompilingClass(type_index), 2932 dex_pc, 2933 !can_access, 2934 compiler_driver_->CanAssumeTypeIsPresentInDexCache(*dex_file_, type_index))); 2935 UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction(), dex_pc); 2936 break; 2937 } 2938 2939 case Instruction::MOVE_EXCEPTION: { 2940 current_block_->AddInstruction(new (arena_) HLoadException(dex_pc)); 2941 UpdateLocal(instruction.VRegA_11x(), current_block_->GetLastInstruction(), dex_pc); 2942 current_block_->AddInstruction(new (arena_) HClearException(dex_pc)); 2943 break; 2944 } 2945 2946 case Instruction::THROW: { 2947 HInstruction* exception = LoadLocal(instruction.VRegA_11x(), Primitive::kPrimNot, dex_pc); 2948 current_block_->AddInstruction(new (arena_) HThrow(exception, dex_pc)); 2949 // A throw instruction must branch to the exit block. 2950 current_block_->AddSuccessor(exit_block_); 2951 // We finished building this block. Set the current block to null to avoid 2952 // adding dead instructions to it. 2953 current_block_ = nullptr; 2954 break; 2955 } 2956 2957 case Instruction::INSTANCE_OF: { 2958 uint8_t destination = instruction.VRegA_22c(); 2959 uint8_t reference = instruction.VRegB_22c(); 2960 uint16_t type_index = instruction.VRegC_22c(); 2961 BuildTypeCheck(instruction, destination, reference, type_index, dex_pc); 2962 break; 2963 } 2964 2965 case Instruction::CHECK_CAST: { 2966 uint8_t reference = instruction.VRegA_21c(); 2967 uint16_t type_index = instruction.VRegB_21c(); 2968 BuildTypeCheck(instruction, -1, reference, type_index, dex_pc); 2969 break; 2970 } 2971 2972 case Instruction::MONITOR_ENTER: { 2973 current_block_->AddInstruction(new (arena_) HMonitorOperation( 2974 LoadLocal(instruction.VRegA_11x(), Primitive::kPrimNot, dex_pc), 2975 HMonitorOperation::kEnter, 2976 dex_pc)); 2977 break; 2978 } 2979 2980 case Instruction::MONITOR_EXIT: { 2981 current_block_->AddInstruction(new (arena_) HMonitorOperation( 2982 LoadLocal(instruction.VRegA_11x(), Primitive::kPrimNot, dex_pc), 2983 HMonitorOperation::kExit, 2984 dex_pc)); 2985 break; 2986 } 2987 2988 case Instruction::PACKED_SWITCH: { 2989 BuildPackedSwitch(instruction, dex_pc); 2990 break; 2991 } 2992 2993 case Instruction::SPARSE_SWITCH: { 2994 BuildSparseSwitch(instruction, dex_pc); 2995 break; 2996 } 2997 2998 default: 2999 VLOG(compiler) << "Did not compile " 3000 << PrettyMethod(dex_compilation_unit_->GetDexMethodIndex(), *dex_file_) 3001 << " because of unhandled instruction " 3002 << instruction.Name(); 3003 MaybeRecordStat(MethodCompilationStat::kNotCompiledUnhandledInstruction); 3004 return false; 3005 } 3006 return true; 3007} // NOLINT(readability/fn_size) 3008 3009HLocal* HGraphBuilder::GetLocalAt(uint32_t register_index) const { 3010 return locals_[register_index]; 3011} 3012 3013void HGraphBuilder::UpdateLocal(uint32_t register_index, 3014 HInstruction* instruction, 3015 uint32_t dex_pc) const { 3016 HLocal* local = GetLocalAt(register_index); 3017 current_block_->AddInstruction(new (arena_) HStoreLocal(local, instruction, dex_pc)); 3018} 3019 3020HInstruction* HGraphBuilder::LoadLocal(uint32_t register_index, 3021 Primitive::Type type, 3022 uint32_t dex_pc) const { 3023 HLocal* local = GetLocalAt(register_index); 3024 current_block_->AddInstruction(new (arena_) HLoadLocal(local, type, dex_pc)); 3025 return current_block_->GetLastInstruction(); 3026} 3027 3028} // namespace art 3029