Frontend.cpp revision ab35b50311951feea3782151dd5422ee944685c2
1/* 2 * Copyright (C) 2009 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "Dalvik.h" 18#include "libdex/DexOpcodes.h" 19#include "libdex/DexCatch.h" 20#include "interp/Jit.h" 21#include "CompilerInternals.h" 22#include "Dataflow.h" 23 24static inline bool contentIsInsn(const u2 *codePtr) { 25 u2 instr = *codePtr; 26 Opcode opcode = (Opcode)(instr & 0xff); 27 28 /* 29 * Since the low 8-bit in metadata may look like OP_NOP, we need to check 30 * both the low and whole sub-word to determine whether it is code or data. 31 */ 32 return (opcode != OP_NOP || instr == 0); 33} 34 35/* 36 * Parse an instruction, return the length of the instruction 37 */ 38static inline int parseInsn(const u2 *codePtr, DecodedInstruction *decInsn, 39 bool printMe) 40{ 41 // Don't parse instruction data 42 if (!contentIsInsn(codePtr)) { 43 return 0; 44 } 45 46 u2 instr = *codePtr; 47 Opcode opcode = dexOpcodeFromCodeUnit(instr); 48 49 dexDecodeInstruction(codePtr, decInsn); 50 if (printMe) { 51 char *decodedString = dvmCompilerGetDalvikDisassembly(decInsn, NULL); 52 ALOGD("%p: %#06x %s", codePtr, opcode, decodedString); 53 } 54 return dexGetWidthFromOpcode(opcode); 55} 56 57#define UNKNOWN_TARGET 0xffffffff 58 59/* 60 * Identify block-ending instructions and collect supplemental information 61 * regarding the following instructions. 62 */ 63static inline bool findBlockBoundary(const Method *caller, MIR *insn, 64 unsigned int curOffset, 65 unsigned int *target, bool *isInvoke, 66 const Method **callee) 67{ 68 switch (insn->dalvikInsn.opcode) { 69 /* Target is not compile-time constant */ 70 case OP_RETURN_VOID: 71 case OP_RETURN: 72 case OP_RETURN_WIDE: 73 case OP_RETURN_OBJECT: 74 case OP_THROW: 75 *target = UNKNOWN_TARGET; 76 break; 77 case OP_INVOKE_VIRTUAL: 78 case OP_INVOKE_VIRTUAL_RANGE: 79 case OP_INVOKE_INTERFACE: 80 case OP_INVOKE_INTERFACE_RANGE: 81 case OP_INVOKE_VIRTUAL_QUICK: 82 case OP_INVOKE_VIRTUAL_QUICK_RANGE: 83 *isInvoke = true; 84 break; 85 case OP_INVOKE_SUPER: 86 case OP_INVOKE_SUPER_RANGE: { 87 int mIndex = caller->clazz->pDvmDex-> 88 pResMethods[insn->dalvikInsn.vB]->methodIndex; 89 const Method *calleeMethod = 90 caller->clazz->super->vtable[mIndex]; 91 92 if (calleeMethod && !dvmIsNativeMethod(calleeMethod)) { 93 *target = (unsigned int) calleeMethod->insns; 94 } 95 *isInvoke = true; 96 *callee = calleeMethod; 97 break; 98 } 99 case OP_INVOKE_STATIC: 100 case OP_INVOKE_STATIC_RANGE: { 101 const Method *calleeMethod = 102 caller->clazz->pDvmDex->pResMethods[insn->dalvikInsn.vB]; 103 104 if (calleeMethod && !dvmIsNativeMethod(calleeMethod)) { 105 *target = (unsigned int) calleeMethod->insns; 106 } 107 *isInvoke = true; 108 *callee = calleeMethod; 109 break; 110 } 111 case OP_INVOKE_SUPER_QUICK: 112 case OP_INVOKE_SUPER_QUICK_RANGE: { 113 const Method *calleeMethod = 114 caller->clazz->super->vtable[insn->dalvikInsn.vB]; 115 116 if (calleeMethod && !dvmIsNativeMethod(calleeMethod)) { 117 *target = (unsigned int) calleeMethod->insns; 118 } 119 *isInvoke = true; 120 *callee = calleeMethod; 121 break; 122 } 123 case OP_INVOKE_DIRECT: 124 case OP_INVOKE_DIRECT_RANGE: { 125 const Method *calleeMethod = 126 caller->clazz->pDvmDex->pResMethods[insn->dalvikInsn.vB]; 127 if (calleeMethod && !dvmIsNativeMethod(calleeMethod)) { 128 *target = (unsigned int) calleeMethod->insns; 129 } 130 *isInvoke = true; 131 *callee = calleeMethod; 132 break; 133 } 134 case OP_GOTO: 135 case OP_GOTO_16: 136 case OP_GOTO_32: 137 *target = curOffset + (int) insn->dalvikInsn.vA; 138 break; 139 140 case OP_IF_EQ: 141 case OP_IF_NE: 142 case OP_IF_LT: 143 case OP_IF_GE: 144 case OP_IF_GT: 145 case OP_IF_LE: 146 *target = curOffset + (int) insn->dalvikInsn.vC; 147 break; 148 149 case OP_IF_EQZ: 150 case OP_IF_NEZ: 151 case OP_IF_LTZ: 152 case OP_IF_GEZ: 153 case OP_IF_GTZ: 154 case OP_IF_LEZ: 155 *target = curOffset + (int) insn->dalvikInsn.vB; 156 break; 157 158 default: 159 return false; 160 } 161 return true; 162} 163 164static inline bool isGoto(MIR *insn) 165{ 166 switch (insn->dalvikInsn.opcode) { 167 case OP_GOTO: 168 case OP_GOTO_16: 169 case OP_GOTO_32: 170 return true; 171 default: 172 return false; 173 } 174} 175 176/* 177 * Identify unconditional branch instructions 178 */ 179static inline bool isUnconditionalBranch(MIR *insn) 180{ 181 switch (insn->dalvikInsn.opcode) { 182 case OP_RETURN_VOID: 183 case OP_RETURN: 184 case OP_RETURN_WIDE: 185 case OP_RETURN_OBJECT: 186 return true; 187 default: 188 return isGoto(insn); 189 } 190} 191 192/* 193 * dvmHashTableLookup() callback 194 */ 195static int compareMethod(const CompilerMethodStats *m1, 196 const CompilerMethodStats *m2) 197{ 198 return (int) m1->method - (int) m2->method; 199} 200 201/* 202 * Analyze the body of the method to collect high-level information regarding 203 * inlining: 204 * - is empty method? 205 * - is getter/setter? 206 * - can throw exception? 207 * 208 * Currently the inliner only handles getters and setters. When its capability 209 * becomes more sophisticated more information will be retrieved here. 210 */ 211static int analyzeInlineTarget(DecodedInstruction *dalvikInsn, int attributes, 212 int offset) 213{ 214 int flags = dexGetFlagsFromOpcode(dalvikInsn->opcode); 215 int dalvikOpcode = dalvikInsn->opcode; 216 217 if (flags & kInstrInvoke) { 218 attributes &= ~METHOD_IS_LEAF; 219 } 220 221 if (!(flags & kInstrCanReturn)) { 222 if (!(dvmCompilerDataFlowAttributes[dalvikOpcode] & 223 DF_IS_GETTER)) { 224 attributes &= ~METHOD_IS_GETTER; 225 } 226 if (!(dvmCompilerDataFlowAttributes[dalvikOpcode] & 227 DF_IS_SETTER)) { 228 attributes &= ~METHOD_IS_SETTER; 229 } 230 } 231 232 /* 233 * The expected instruction sequence is setter will never return value and 234 * getter will also do. Clear the bits if the behavior is discovered 235 * otherwise. 236 */ 237 if (flags & kInstrCanReturn) { 238 if (dalvikOpcode == OP_RETURN_VOID) { 239 attributes &= ~METHOD_IS_GETTER; 240 } 241 else { 242 attributes &= ~METHOD_IS_SETTER; 243 } 244 } 245 246 if (flags & kInstrCanThrow) { 247 attributes &= ~METHOD_IS_THROW_FREE; 248 } 249 250 if (offset == 0 && dalvikOpcode == OP_RETURN_VOID) { 251 attributes |= METHOD_IS_EMPTY; 252 } 253 254 /* 255 * Check if this opcode is selected for single stepping. 256 * If so, don't inline the callee as there is no stack frame for the 257 * interpreter to single-step through the instruction. 258 */ 259 if (SINGLE_STEP_OP(dalvikOpcode)) { 260 attributes &= ~(METHOD_IS_GETTER | METHOD_IS_SETTER); 261 } 262 263 return attributes; 264} 265 266/* 267 * Analyze each method whose traces are ever compiled. Collect a variety of 268 * statistics like the ratio of exercised vs overall code and code bloat 269 * ratios. If isCallee is true, also analyze each instruction in more details 270 * to see if it is suitable for inlining. 271 */ 272CompilerMethodStats *dvmCompilerAnalyzeMethodBody(const Method *method, 273 bool isCallee) 274{ 275 const DexCode *dexCode = dvmGetMethodCode(method); 276 const u2 *codePtr = dexCode->insns; 277 const u2 *codeEnd = dexCode->insns + dexCode->insnsSize; 278 int insnSize = 0; 279 int hashValue = dvmComputeUtf8Hash(method->name); 280 281 CompilerMethodStats dummyMethodEntry; // For hash table lookup 282 CompilerMethodStats *realMethodEntry; // For hash table storage 283 284 /* For lookup only */ 285 dummyMethodEntry.method = method; 286 realMethodEntry = (CompilerMethodStats *) 287 dvmHashTableLookup(gDvmJit.methodStatsTable, 288 hashValue, 289 &dummyMethodEntry, 290 (HashCompareFunc) compareMethod, 291 false); 292 293 /* This method has never been analyzed before - create an entry */ 294 if (realMethodEntry == NULL) { 295 realMethodEntry = 296 (CompilerMethodStats *) calloc(1, sizeof(CompilerMethodStats)); 297 realMethodEntry->method = method; 298 299 dvmHashTableLookup(gDvmJit.methodStatsTable, hashValue, 300 realMethodEntry, 301 (HashCompareFunc) compareMethod, 302 true); 303 } 304 305 /* This method is invoked as a callee and has been analyzed - just return */ 306 if ((isCallee == true) && (realMethodEntry->attributes & METHOD_IS_CALLEE)) 307 return realMethodEntry; 308 309 /* 310 * Similarly, return if this method has been compiled before as a hot 311 * method already. 312 */ 313 if ((isCallee == false) && 314 (realMethodEntry->attributes & METHOD_IS_HOT)) 315 return realMethodEntry; 316 317 int attributes; 318 319 /* Method hasn't been analyzed for the desired purpose yet */ 320 if (isCallee) { 321 /* Aggressively set the attributes until proven otherwise */ 322 attributes = METHOD_IS_LEAF | METHOD_IS_THROW_FREE | METHOD_IS_CALLEE | 323 METHOD_IS_GETTER | METHOD_IS_SETTER; 324 } else { 325 attributes = METHOD_IS_HOT; 326 } 327 328 /* Count the number of instructions */ 329 while (codePtr < codeEnd) { 330 DecodedInstruction dalvikInsn; 331 int width = parseInsn(codePtr, &dalvikInsn, false); 332 333 /* Terminate when the data section is seen */ 334 if (width == 0) 335 break; 336 337 if (isCallee) { 338 attributes = analyzeInlineTarget(&dalvikInsn, attributes, insnSize); 339 } 340 341 insnSize += width; 342 codePtr += width; 343 } 344 345 /* 346 * Only handle simple getters/setters with one instruction followed by 347 * return 348 */ 349 if ((attributes & (METHOD_IS_GETTER | METHOD_IS_SETTER)) && 350 (insnSize != 3)) { 351 attributes &= ~(METHOD_IS_GETTER | METHOD_IS_SETTER); 352 } 353 354 realMethodEntry->dalvikSize = insnSize * 2; 355 realMethodEntry->attributes |= attributes; 356 357#if 0 358 /* Uncomment the following to explore various callee patterns */ 359 if (attributes & METHOD_IS_THROW_FREE) { 360 LOGE("%s%s is inlinable%s", method->clazz->descriptor, method->name, 361 (attributes & METHOD_IS_EMPTY) ? " empty" : ""); 362 } 363 364 if (attributes & METHOD_IS_LEAF) { 365 LOGE("%s%s is leaf %d%s", method->clazz->descriptor, method->name, 366 insnSize, insnSize < 5 ? " (small)" : ""); 367 } 368 369 if (attributes & (METHOD_IS_GETTER | METHOD_IS_SETTER)) { 370 LOGE("%s%s is %s", method->clazz->descriptor, method->name, 371 attributes & METHOD_IS_GETTER ? "getter": "setter"); 372 } 373 if (attributes == 374 (METHOD_IS_LEAF | METHOD_IS_THROW_FREE | METHOD_IS_CALLEE)) { 375 LOGE("%s%s is inlinable non setter/getter", method->clazz->descriptor, 376 method->name); 377 } 378#endif 379 380 return realMethodEntry; 381} 382 383/* 384 * Crawl the stack of the thread that requesed compilation to see if any of the 385 * ancestors are on the blacklist. 386 */ 387static bool filterMethodByCallGraph(Thread *thread, const char *curMethodName) 388{ 389 /* Crawl the Dalvik stack frames and compare the method name*/ 390 StackSaveArea *ssaPtr = ((StackSaveArea *) thread->interpSave.curFrame) - 1; 391 while (ssaPtr != ((StackSaveArea *) NULL) - 1) { 392 const Method *method = ssaPtr->method; 393 if (method) { 394 int hashValue = dvmComputeUtf8Hash(method->name); 395 bool found = 396 dvmHashTableLookup(gDvmJit.methodTable, hashValue, 397 (char *) method->name, 398 (HashCompareFunc) strcmp, false) != 399 NULL; 400 if (found) { 401 ALOGD("Method %s (--> %s) found on the JIT %s list", 402 method->name, curMethodName, 403 gDvmJit.includeSelectedMethod ? "white" : "black"); 404 return true; 405 } 406 407 } 408 ssaPtr = ((StackSaveArea *) ssaPtr->prevFrame) - 1; 409 }; 410 return false; 411} 412 413/* 414 * Since we are including instructions from possibly a cold method into the 415 * current trace, we need to make sure that all the associated information 416 * with the callee is properly initialized. If not, we punt on this inline 417 * target. 418 * 419 * TODO: volatile instructions will be handled later. 420 */ 421bool dvmCompilerCanIncludeThisInstruction(const Method *method, 422 const DecodedInstruction *insn) 423{ 424 switch (insn->opcode) { 425 case OP_NEW_INSTANCE: 426 case OP_CHECK_CAST: { 427 ClassObject *classPtr = (ClassObject *)(void*) 428 (method->clazz->pDvmDex->pResClasses[insn->vB]); 429 430 /* Class hasn't been initialized yet */ 431 if (classPtr == NULL) { 432 return false; 433 } 434 return true; 435 } 436 case OP_SGET: 437 case OP_SGET_WIDE: 438 case OP_SGET_OBJECT: 439 case OP_SGET_BOOLEAN: 440 case OP_SGET_BYTE: 441 case OP_SGET_CHAR: 442 case OP_SGET_SHORT: 443 case OP_SPUT: 444 case OP_SPUT_WIDE: 445 case OP_SPUT_OBJECT: 446 case OP_SPUT_BOOLEAN: 447 case OP_SPUT_BYTE: 448 case OP_SPUT_CHAR: 449 case OP_SPUT_SHORT: { 450 void *fieldPtr = (void*) 451 (method->clazz->pDvmDex->pResFields[insn->vB]); 452 453 if (fieldPtr == NULL) { 454 return false; 455 } 456 return true; 457 } 458 case OP_INVOKE_SUPER: 459 case OP_INVOKE_SUPER_RANGE: { 460 int mIndex = method->clazz->pDvmDex-> 461 pResMethods[insn->vB]->methodIndex; 462 const Method *calleeMethod = method->clazz->super->vtable[mIndex]; 463 if (calleeMethod == NULL) { 464 return false; 465 } 466 return true; 467 } 468 case OP_INVOKE_SUPER_QUICK: 469 case OP_INVOKE_SUPER_QUICK_RANGE: { 470 const Method *calleeMethod = method->clazz->super->vtable[insn->vB]; 471 if (calleeMethod == NULL) { 472 return false; 473 } 474 return true; 475 } 476 case OP_INVOKE_STATIC: 477 case OP_INVOKE_STATIC_RANGE: 478 case OP_INVOKE_DIRECT: 479 case OP_INVOKE_DIRECT_RANGE: { 480 const Method *calleeMethod = 481 method->clazz->pDvmDex->pResMethods[insn->vB]; 482 if (calleeMethod == NULL) { 483 return false; 484 } 485 return true; 486 } 487 case OP_CONST_CLASS: { 488 void *classPtr = (void*) 489 (method->clazz->pDvmDex->pResClasses[insn->vB]); 490 491 if (classPtr == NULL) { 492 return false; 493 } 494 return true; 495 } 496 case OP_CONST_STRING_JUMBO: 497 case OP_CONST_STRING: { 498 void *strPtr = (void*) 499 (method->clazz->pDvmDex->pResStrings[insn->vB]); 500 501 if (strPtr == NULL) { 502 return false; 503 } 504 return true; 505 } 506 default: 507 return true; 508 } 509} 510 511/* Split an existing block from the specified code offset into two */ 512static BasicBlock *splitBlock(CompilationUnit *cUnit, 513 unsigned int codeOffset, 514 BasicBlock *origBlock) 515{ 516 MIR *insn = origBlock->firstMIRInsn; 517 while (insn) { 518 if (insn->offset == codeOffset) break; 519 insn = insn->next; 520 } 521 if (insn == NULL) { 522 LOGE("Break split failed"); 523 dvmAbort(); 524 } 525 BasicBlock *bottomBlock = dvmCompilerNewBB(kDalvikByteCode, 526 cUnit->numBlocks++); 527 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bottomBlock); 528 529 bottomBlock->startOffset = codeOffset; 530 bottomBlock->firstMIRInsn = insn; 531 bottomBlock->lastMIRInsn = origBlock->lastMIRInsn; 532 533 /* Handle the taken path */ 534 bottomBlock->taken = origBlock->taken; 535 if (bottomBlock->taken) { 536 origBlock->taken = NULL; 537 dvmCompilerClearBit(bottomBlock->taken->predecessors, origBlock->id); 538 dvmCompilerSetBit(bottomBlock->taken->predecessors, bottomBlock->id); 539 } 540 541 /* Handle the fallthrough path */ 542 bottomBlock->needFallThroughBranch = origBlock->needFallThroughBranch; 543 bottomBlock->fallThrough = origBlock->fallThrough; 544 origBlock->fallThrough = bottomBlock; 545 origBlock->needFallThroughBranch = true; 546 dvmCompilerSetBit(bottomBlock->predecessors, origBlock->id); 547 if (bottomBlock->fallThrough) { 548 dvmCompilerClearBit(bottomBlock->fallThrough->predecessors, 549 origBlock->id); 550 dvmCompilerSetBit(bottomBlock->fallThrough->predecessors, 551 bottomBlock->id); 552 } 553 554 /* Handle the successor list */ 555 if (origBlock->successorBlockList.blockListType != kNotUsed) { 556 bottomBlock->successorBlockList = origBlock->successorBlockList; 557 origBlock->successorBlockList.blockListType = kNotUsed; 558 GrowableListIterator iterator; 559 560 dvmGrowableListIteratorInit(&bottomBlock->successorBlockList.blocks, 561 &iterator); 562 while (true) { 563 SuccessorBlockInfo *successorBlockInfo = 564 (SuccessorBlockInfo *) dvmGrowableListIteratorNext(&iterator); 565 if (successorBlockInfo == NULL) break; 566 BasicBlock *bb = successorBlockInfo->block; 567 dvmCompilerClearBit(bb->predecessors, origBlock->id); 568 dvmCompilerSetBit(bb->predecessors, bottomBlock->id); 569 } 570 } 571 572 origBlock->lastMIRInsn = insn->prev; 573 574 insn->prev->next = NULL; 575 insn->prev = NULL; 576 return bottomBlock; 577} 578 579/* 580 * Given a code offset, find out the block that starts with it. If the offset 581 * is in the middle of an existing block, split it into two. 582 */ 583static BasicBlock *findBlock(CompilationUnit *cUnit, 584 unsigned int codeOffset, 585 bool split, bool create) 586{ 587 GrowableList *blockList = &cUnit->blockList; 588 BasicBlock *bb; 589 unsigned int i; 590 591 for (i = 0; i < blockList->numUsed; i++) { 592 bb = (BasicBlock *) blockList->elemList[i]; 593 if (bb->blockType != kDalvikByteCode) continue; 594 if (bb->startOffset == codeOffset) return bb; 595 /* Check if a branch jumps into the middle of an existing block */ 596 if ((split == true) && (codeOffset > bb->startOffset) && 597 (bb->lastMIRInsn != NULL) && 598 (codeOffset <= bb->lastMIRInsn->offset)) { 599 BasicBlock *newBB = splitBlock(cUnit, codeOffset, bb); 600 return newBB; 601 } 602 } 603 if (create) { 604 bb = dvmCompilerNewBB(kDalvikByteCode, cUnit->numBlocks++); 605 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bb); 606 bb->startOffset = codeOffset; 607 return bb; 608 } 609 return NULL; 610} 611 612/* Dump the CFG into a DOT graph */ 613void dvmDumpCFG(CompilationUnit *cUnit, const char *dirPrefix) 614{ 615 const Method *method = cUnit->method; 616 FILE *file; 617 char *signature = dexProtoCopyMethodDescriptor(&method->prototype); 618 char startOffset[80]; 619 sprintf(startOffset, "_%x", cUnit->entryBlock->fallThrough->startOffset); 620 char *fileName = (char *) dvmCompilerNew( 621 strlen(dirPrefix) + 622 strlen(method->clazz->descriptor) + 623 strlen(method->name) + 624 strlen(signature) + 625 strlen(startOffset) + 626 strlen(".dot") + 1, true); 627 sprintf(fileName, "%s%s%s%s%s.dot", dirPrefix, 628 method->clazz->descriptor, method->name, signature, startOffset); 629 free(signature); 630 631 /* 632 * Convert the special characters into a filesystem- and shell-friendly 633 * format. 634 */ 635 int i; 636 for (i = strlen(dirPrefix); fileName[i]; i++) { 637 if (fileName[i] == '/') { 638 fileName[i] = '_'; 639 } else if (fileName[i] == ';') { 640 fileName[i] = '#'; 641 } else if (fileName[i] == '$') { 642 fileName[i] = '+'; 643 } else if (fileName[i] == '(' || fileName[i] == ')') { 644 fileName[i] = '@'; 645 } else if (fileName[i] == '<' || fileName[i] == '>') { 646 fileName[i] = '='; 647 } 648 } 649 file = fopen(fileName, "w"); 650 if (file == NULL) { 651 return; 652 } 653 fprintf(file, "digraph G {\n"); 654 655 fprintf(file, " rankdir=TB\n"); 656 657 int numReachableBlocks = cUnit->numReachableBlocks; 658 int idx; 659 const GrowableList *blockList = &cUnit->blockList; 660 661 for (idx = 0; idx < numReachableBlocks; idx++) { 662 int blockIdx = cUnit->dfsOrder.elemList[idx]; 663 BasicBlock *bb = (BasicBlock *) dvmGrowableListGetElement(blockList, 664 blockIdx); 665 if (bb == NULL) break; 666 if (bb->blockType == kEntryBlock) { 667 fprintf(file, " entry [shape=Mdiamond];\n"); 668 } else if (bb->blockType == kExitBlock) { 669 fprintf(file, " exit [shape=Mdiamond];\n"); 670 } else if (bb->blockType == kDalvikByteCode) { 671 fprintf(file, " block%04x [shape=record,label = \"{ \\\n", 672 bb->startOffset); 673 const MIR *mir; 674 fprintf(file, " {block id %d\\l}%s\\\n", bb->id, 675 bb->firstMIRInsn ? " | " : " "); 676 for (mir = bb->firstMIRInsn; mir; mir = mir->next) { 677 fprintf(file, " {%04x %s\\l}%s\\\n", mir->offset, 678 mir->ssaRep ? 679 dvmCompilerFullDisassembler(cUnit, mir) : 680 dexGetOpcodeName(mir->dalvikInsn.opcode), 681 mir->next ? " | " : " "); 682 } 683 fprintf(file, " }\"];\n\n"); 684 } else if (bb->blockType == kExceptionHandling) { 685 char blockName[BLOCK_NAME_LEN]; 686 687 dvmGetBlockName(bb, blockName); 688 fprintf(file, " %s [shape=invhouse];\n", blockName); 689 } 690 691 char blockName1[BLOCK_NAME_LEN], blockName2[BLOCK_NAME_LEN]; 692 693 if (bb->taken) { 694 dvmGetBlockName(bb, blockName1); 695 dvmGetBlockName(bb->taken, blockName2); 696 fprintf(file, " %s:s -> %s:n [style=dotted]\n", 697 blockName1, blockName2); 698 } 699 if (bb->fallThrough) { 700 dvmGetBlockName(bb, blockName1); 701 dvmGetBlockName(bb->fallThrough, blockName2); 702 fprintf(file, " %s:s -> %s:n\n", blockName1, blockName2); 703 } 704 705 if (bb->successorBlockList.blockListType != kNotUsed) { 706 fprintf(file, " succ%04x [shape=%s,label = \"{ \\\n", 707 bb->startOffset, 708 (bb->successorBlockList.blockListType == kCatch) ? 709 "Mrecord" : "record"); 710 GrowableListIterator iterator; 711 dvmGrowableListIteratorInit(&bb->successorBlockList.blocks, 712 &iterator); 713 SuccessorBlockInfo *successorBlockInfo = 714 (SuccessorBlockInfo *) dvmGrowableListIteratorNext(&iterator); 715 716 int succId = 0; 717 while (true) { 718 if (successorBlockInfo == NULL) break; 719 720 BasicBlock *destBlock = successorBlockInfo->block; 721 SuccessorBlockInfo *nextSuccessorBlockInfo = 722 (SuccessorBlockInfo *) dvmGrowableListIteratorNext(&iterator); 723 724 fprintf(file, " {<f%d> %04x: %04x\\l}%s\\\n", 725 succId++, 726 successorBlockInfo->key, 727 destBlock->startOffset, 728 (nextSuccessorBlockInfo != NULL) ? " | " : " "); 729 730 successorBlockInfo = nextSuccessorBlockInfo; 731 } 732 fprintf(file, " }\"];\n\n"); 733 734 dvmGetBlockName(bb, blockName1); 735 fprintf(file, " %s:s -> succ%04x:n [style=dashed]\n", 736 blockName1, bb->startOffset); 737 738 if (bb->successorBlockList.blockListType == kPackedSwitch || 739 bb->successorBlockList.blockListType == kSparseSwitch) { 740 741 dvmGrowableListIteratorInit(&bb->successorBlockList.blocks, 742 &iterator); 743 744 succId = 0; 745 while (true) { 746 SuccessorBlockInfo *successorBlockInfo = 747 (SuccessorBlockInfo *) 748 dvmGrowableListIteratorNext(&iterator); 749 if (successorBlockInfo == NULL) break; 750 751 BasicBlock *destBlock = successorBlockInfo->block; 752 753 dvmGetBlockName(destBlock, blockName2); 754 fprintf(file, " succ%04x:f%d:e -> %s:n\n", 755 bb->startOffset, succId++, 756 blockName2); 757 } 758 } 759 } 760 fprintf(file, "\n"); 761 762 /* 763 * If we need to debug the dominator tree, uncomment the following code 764 */ 765#if 1 766 dvmGetBlockName(bb, blockName1); 767 fprintf(file, " cfg%s [label=\"%s\", shape=none];\n", 768 blockName1, blockName1); 769 if (bb->iDom) { 770 dvmGetBlockName(bb->iDom, blockName2); 771 fprintf(file, " cfg%s:s -> cfg%s:n\n\n", 772 blockName2, blockName1); 773 } 774#endif 775 } 776 fprintf(file, "}\n"); 777 fclose(file); 778} 779 780/* Verify if all the successor is connected with all the claimed predecessors */ 781static bool verifyPredInfo(CompilationUnit *cUnit, BasicBlock *bb) 782{ 783 BitVectorIterator bvIterator; 784 785 dvmBitVectorIteratorInit(bb->predecessors, &bvIterator); 786 while (true) { 787 int blockIdx = dvmBitVectorIteratorNext(&bvIterator); 788 if (blockIdx == -1) break; 789 BasicBlock *predBB = (BasicBlock *) 790 dvmGrowableListGetElement(&cUnit->blockList, blockIdx); 791 bool found = false; 792 if (predBB->taken == bb) { 793 found = true; 794 } else if (predBB->fallThrough == bb) { 795 found = true; 796 } else if (predBB->successorBlockList.blockListType != kNotUsed) { 797 GrowableListIterator iterator; 798 dvmGrowableListIteratorInit(&predBB->successorBlockList.blocks, 799 &iterator); 800 while (true) { 801 SuccessorBlockInfo *successorBlockInfo = 802 (SuccessorBlockInfo *) 803 dvmGrowableListIteratorNext(&iterator); 804 if (successorBlockInfo == NULL) break; 805 BasicBlock *succBB = successorBlockInfo->block; 806 if (succBB == bb) { 807 found = true; 808 break; 809 } 810 } 811 } 812 if (found == false) { 813 char blockName1[BLOCK_NAME_LEN], blockName2[BLOCK_NAME_LEN]; 814 dvmGetBlockName(bb, blockName1); 815 dvmGetBlockName(predBB, blockName2); 816 dvmDumpCFG(cUnit, "/sdcard/cfg/"); 817 LOGE("Successor %s not found from %s", 818 blockName1, blockName2); 819 dvmAbort(); 820 } 821 } 822 return true; 823} 824 825/* Identify code range in try blocks and set up the empty catch blocks */ 826static void processTryCatchBlocks(CompilationUnit *cUnit) 827{ 828 const Method *meth = cUnit->method; 829 const DexCode *pCode = dvmGetMethodCode(meth); 830 int triesSize = pCode->triesSize; 831 int i; 832 int offset; 833 834 if (triesSize == 0) { 835 return; 836 } 837 838 const DexTry *pTries = dexGetTries(pCode); 839 BitVector *tryBlockAddr = cUnit->tryBlockAddr; 840 841 /* Mark all the insn offsets in Try blocks */ 842 for (i = 0; i < triesSize; i++) { 843 const DexTry* pTry = &pTries[i]; 844 /* all in 16-bit units */ 845 int startOffset = pTry->startAddr; 846 int endOffset = startOffset + pTry->insnCount; 847 848 for (offset = startOffset; offset < endOffset; offset++) { 849 dvmCompilerSetBit(tryBlockAddr, offset); 850 } 851 } 852 853 /* Iterate over each of the handlers to enqueue the empty Catch blocks */ 854 offset = dexGetFirstHandlerOffset(pCode); 855 int handlersSize = dexGetHandlersSize(pCode); 856 857 for (i = 0; i < handlersSize; i++) { 858 DexCatchIterator iterator; 859 dexCatchIteratorInit(&iterator, pCode, offset); 860 861 for (;;) { 862 DexCatchHandler* handler = dexCatchIteratorNext(&iterator); 863 864 if (handler == NULL) { 865 break; 866 } 867 868 /* 869 * Create dummy catch blocks first. Since these are created before 870 * other blocks are processed, "split" is specified as false. 871 */ 872 findBlock(cUnit, handler->address, 873 /* split */ 874 false, 875 /* create */ 876 true); 877 } 878 879 offset = dexCatchIteratorGetEndOffset(&iterator, pCode); 880 } 881} 882 883/* Process instructions with the kInstrCanBranch flag */ 884static void processCanBranch(CompilationUnit *cUnit, BasicBlock *curBlock, 885 MIR *insn, int curOffset, int width, int flags, 886 const u2* codePtr, const u2* codeEnd) 887{ 888 int target = curOffset; 889 switch (insn->dalvikInsn.opcode) { 890 case OP_GOTO: 891 case OP_GOTO_16: 892 case OP_GOTO_32: 893 target += (int) insn->dalvikInsn.vA; 894 break; 895 case OP_IF_EQ: 896 case OP_IF_NE: 897 case OP_IF_LT: 898 case OP_IF_GE: 899 case OP_IF_GT: 900 case OP_IF_LE: 901 target += (int) insn->dalvikInsn.vC; 902 break; 903 case OP_IF_EQZ: 904 case OP_IF_NEZ: 905 case OP_IF_LTZ: 906 case OP_IF_GEZ: 907 case OP_IF_GTZ: 908 case OP_IF_LEZ: 909 target += (int) insn->dalvikInsn.vB; 910 break; 911 default: 912 LOGE("Unexpected opcode(%d) with kInstrCanBranch set", 913 insn->dalvikInsn.opcode); 914 dvmAbort(); 915 } 916 BasicBlock *takenBlock = findBlock(cUnit, target, 917 /* split */ 918 true, 919 /* create */ 920 true); 921 curBlock->taken = takenBlock; 922 dvmCompilerSetBit(takenBlock->predecessors, curBlock->id); 923 924 /* Always terminate the current block for conditional branches */ 925 if (flags & kInstrCanContinue) { 926 BasicBlock *fallthroughBlock = findBlock(cUnit, 927 curOffset + width, 928 /* 929 * If the method is processed 930 * in sequential order from the 931 * beginning, we don't need to 932 * specify split for continue 933 * blocks. However, this 934 * routine can be called by 935 * compileLoop, which starts 936 * parsing the method from an 937 * arbitrary address in the 938 * method body. 939 */ 940 true, 941 /* create */ 942 true); 943 curBlock->fallThrough = fallthroughBlock; 944 dvmCompilerSetBit(fallthroughBlock->predecessors, curBlock->id); 945 } else if (codePtr < codeEnd) { 946 /* Create a fallthrough block for real instructions (incl. OP_NOP) */ 947 if (contentIsInsn(codePtr)) { 948 findBlock(cUnit, curOffset + width, 949 /* split */ 950 false, 951 /* create */ 952 true); 953 } 954 } 955} 956 957/* Process instructions with the kInstrCanSwitch flag */ 958static void processCanSwitch(CompilationUnit *cUnit, BasicBlock *curBlock, 959 MIR *insn, int curOffset, int width, int flags) 960{ 961 u2 *switchData= (u2 *) (cUnit->method->insns + curOffset + 962 insn->dalvikInsn.vB); 963 int size; 964 int *keyTable; 965 int *targetTable; 966 int i; 967 int firstKey; 968 969 /* 970 * Packed switch data format: 971 * ushort ident = 0x0100 magic value 972 * ushort size number of entries in the table 973 * int first_key first (and lowest) switch case value 974 * int targets[size] branch targets, relative to switch opcode 975 * 976 * Total size is (4+size*2) 16-bit code units. 977 */ 978 if (insn->dalvikInsn.opcode == OP_PACKED_SWITCH) { 979 assert(switchData[0] == kPackedSwitchSignature); 980 size = switchData[1]; 981 firstKey = switchData[2] | (switchData[3] << 16); 982 targetTable = (int *) &switchData[4]; 983 keyTable = NULL; // Make the compiler happy 984 /* 985 * Sparse switch data format: 986 * ushort ident = 0x0200 magic value 987 * ushort size number of entries in the table; > 0 988 * int keys[size] keys, sorted low-to-high; 32-bit aligned 989 * int targets[size] branch targets, relative to switch opcode 990 * 991 * Total size is (2+size*4) 16-bit code units. 992 */ 993 } else { 994 assert(switchData[0] == kSparseSwitchSignature); 995 size = switchData[1]; 996 keyTable = (int *) &switchData[2]; 997 targetTable = (int *) &switchData[2 + size*2]; 998 firstKey = 0; // To make the compiler happy 999 } 1000 1001 if (curBlock->successorBlockList.blockListType != kNotUsed) { 1002 LOGE("Successor block list already in use: %d", 1003 curBlock->successorBlockList.blockListType); 1004 dvmAbort(); 1005 } 1006 curBlock->successorBlockList.blockListType = 1007 (insn->dalvikInsn.opcode == OP_PACKED_SWITCH) ? 1008 kPackedSwitch : kSparseSwitch; 1009 dvmInitGrowableList(&curBlock->successorBlockList.blocks, size); 1010 1011 for (i = 0; i < size; i++) { 1012 BasicBlock *caseBlock = findBlock(cUnit, curOffset + targetTable[i], 1013 /* split */ 1014 true, 1015 /* create */ 1016 true); 1017 SuccessorBlockInfo *successorBlockInfo = 1018 (SuccessorBlockInfo *) dvmCompilerNew(sizeof(SuccessorBlockInfo), 1019 false); 1020 successorBlockInfo->block = caseBlock; 1021 successorBlockInfo->key = (insn->dalvikInsn.opcode == OP_PACKED_SWITCH)? 1022 firstKey + i : keyTable[i]; 1023 dvmInsertGrowableList(&curBlock->successorBlockList.blocks, 1024 (intptr_t) successorBlockInfo); 1025 dvmCompilerSetBit(caseBlock->predecessors, curBlock->id); 1026 } 1027 1028 /* Fall-through case */ 1029 BasicBlock *fallthroughBlock = findBlock(cUnit, 1030 curOffset + width, 1031 /* split */ 1032 false, 1033 /* create */ 1034 true); 1035 curBlock->fallThrough = fallthroughBlock; 1036 dvmCompilerSetBit(fallthroughBlock->predecessors, curBlock->id); 1037} 1038 1039/* Process instructions with the kInstrCanThrow flag */ 1040static void processCanThrow(CompilationUnit *cUnit, BasicBlock *curBlock, 1041 MIR *insn, int curOffset, int width, int flags, 1042 BitVector *tryBlockAddr, const u2 *codePtr, 1043 const u2* codeEnd) 1044{ 1045 const Method *method = cUnit->method; 1046 const DexCode *dexCode = dvmGetMethodCode(method); 1047 1048 /* In try block */ 1049 if (dvmIsBitSet(tryBlockAddr, curOffset)) { 1050 DexCatchIterator iterator; 1051 1052 if (!dexFindCatchHandler(&iterator, dexCode, curOffset)) { 1053 LOGE("Catch block not found in dexfile for insn %x in %s", 1054 curOffset, method->name); 1055 dvmAbort(); 1056 1057 } 1058 if (curBlock->successorBlockList.blockListType != kNotUsed) { 1059 LOGE("Successor block list already in use: %d", 1060 curBlock->successorBlockList.blockListType); 1061 dvmAbort(); 1062 } 1063 curBlock->successorBlockList.blockListType = kCatch; 1064 dvmInitGrowableList(&curBlock->successorBlockList.blocks, 2); 1065 1066 for (;;) { 1067 DexCatchHandler* handler = dexCatchIteratorNext(&iterator); 1068 1069 if (handler == NULL) { 1070 break; 1071 } 1072 1073 BasicBlock *catchBlock = findBlock(cUnit, handler->address, 1074 /* split */ 1075 false, 1076 /* create */ 1077 false); 1078 1079 SuccessorBlockInfo *successorBlockInfo = 1080 (SuccessorBlockInfo *) dvmCompilerNew(sizeof(SuccessorBlockInfo), 1081 false); 1082 successorBlockInfo->block = catchBlock; 1083 successorBlockInfo->key = handler->typeIdx; 1084 dvmInsertGrowableList(&curBlock->successorBlockList.blocks, 1085 (intptr_t) successorBlockInfo); 1086 dvmCompilerSetBit(catchBlock->predecessors, curBlock->id); 1087 } 1088 } else { 1089 BasicBlock *ehBlock = dvmCompilerNewBB(kExceptionHandling, 1090 cUnit->numBlocks++); 1091 curBlock->taken = ehBlock; 1092 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) ehBlock); 1093 ehBlock->startOffset = curOffset; 1094 dvmCompilerSetBit(ehBlock->predecessors, curBlock->id); 1095 } 1096 1097 /* 1098 * Force the current block to terminate. 1099 * 1100 * Data may be present before codeEnd, so we need to parse it to know 1101 * whether it is code or data. 1102 */ 1103 if (codePtr < codeEnd) { 1104 /* Create a fallthrough block for real instructions (incl. OP_NOP) */ 1105 if (contentIsInsn(codePtr)) { 1106 BasicBlock *fallthroughBlock = findBlock(cUnit, 1107 curOffset + width, 1108 /* split */ 1109 false, 1110 /* create */ 1111 true); 1112 /* 1113 * OP_THROW and OP_THROW_VERIFICATION_ERROR are unconditional 1114 * branches. 1115 */ 1116 if (insn->dalvikInsn.opcode != OP_THROW_VERIFICATION_ERROR && 1117 insn->dalvikInsn.opcode != OP_THROW) { 1118 curBlock->fallThrough = fallthroughBlock; 1119 dvmCompilerSetBit(fallthroughBlock->predecessors, curBlock->id); 1120 } 1121 } 1122 } 1123} 1124 1125/* 1126 * Similar to dvmCompileTrace, but the entity processed here is the whole 1127 * method. 1128 * 1129 * TODO: implementation will be revisited when the trace builder can provide 1130 * whole-method traces. 1131 */ 1132bool dvmCompileMethod(const Method *method, JitTranslationInfo *info) 1133{ 1134 CompilationUnit cUnit; 1135 const DexCode *dexCode = dvmGetMethodCode(method); 1136 const u2 *codePtr = dexCode->insns; 1137 const u2 *codeEnd = dexCode->insns + dexCode->insnsSize; 1138 int numBlocks = 0; 1139 unsigned int curOffset = 0; 1140 1141 /* Method already compiled */ 1142 if (dvmJitGetMethodAddr(codePtr)) { 1143 info->codeAddress = NULL; 1144 return false; 1145 } 1146 1147 memset(&cUnit, 0, sizeof(cUnit)); 1148 cUnit.method = method; 1149 1150 cUnit.jitMode = kJitMethod; 1151 1152 /* Initialize the block list */ 1153 dvmInitGrowableList(&cUnit.blockList, 4); 1154 1155 /* 1156 * FIXME - PC reconstruction list won't be needed after the codegen routines 1157 * are enhanced to true method mode. 1158 */ 1159 /* Initialize the PC reconstruction list */ 1160 dvmInitGrowableList(&cUnit.pcReconstructionList, 8); 1161 1162 /* Allocate the bit-vector to track the beginning of basic blocks */ 1163 BitVector *tryBlockAddr = dvmCompilerAllocBitVector(dexCode->insnsSize, 1164 true /* expandable */); 1165 cUnit.tryBlockAddr = tryBlockAddr; 1166 1167 /* Create the default entry and exit blocks and enter them to the list */ 1168 BasicBlock *entryBlock = dvmCompilerNewBB(kEntryBlock, numBlocks++); 1169 BasicBlock *exitBlock = dvmCompilerNewBB(kExitBlock, numBlocks++); 1170 1171 cUnit.entryBlock = entryBlock; 1172 cUnit.exitBlock = exitBlock; 1173 1174 dvmInsertGrowableList(&cUnit.blockList, (intptr_t) entryBlock); 1175 dvmInsertGrowableList(&cUnit.blockList, (intptr_t) exitBlock); 1176 1177 /* Current block to record parsed instructions */ 1178 BasicBlock *curBlock = dvmCompilerNewBB(kDalvikByteCode, numBlocks++); 1179 curBlock->startOffset = 0; 1180 dvmInsertGrowableList(&cUnit.blockList, (intptr_t) curBlock); 1181 entryBlock->fallThrough = curBlock; 1182 dvmCompilerSetBit(curBlock->predecessors, entryBlock->id); 1183 1184 /* 1185 * Store back the number of blocks since new blocks may be created of 1186 * accessing cUnit. 1187 */ 1188 cUnit.numBlocks = numBlocks; 1189 1190 /* Identify code range in try blocks and set up the empty catch blocks */ 1191 processTryCatchBlocks(&cUnit); 1192 1193 /* Parse all instructions and put them into containing basic blocks */ 1194 while (codePtr < codeEnd) { 1195 MIR *insn = (MIR *) dvmCompilerNew(sizeof(MIR), true); 1196 insn->offset = curOffset; 1197 int width = parseInsn(codePtr, &insn->dalvikInsn, false); 1198 insn->width = width; 1199 1200 /* Terminate when the data section is seen */ 1201 if (width == 0) 1202 break; 1203 1204 dvmCompilerAppendMIR(curBlock, insn); 1205 1206 codePtr += width; 1207 int flags = dexGetFlagsFromOpcode(insn->dalvikInsn.opcode); 1208 1209 if (flags & kInstrCanBranch) { 1210 processCanBranch(&cUnit, curBlock, insn, curOffset, width, flags, 1211 codePtr, codeEnd); 1212 } else if (flags & kInstrCanReturn) { 1213 curBlock->fallThrough = exitBlock; 1214 dvmCompilerSetBit(exitBlock->predecessors, curBlock->id); 1215 /* 1216 * Terminate the current block if there are instructions 1217 * afterwards. 1218 */ 1219 if (codePtr < codeEnd) { 1220 /* 1221 * Create a fallthrough block for real instructions 1222 * (incl. OP_NOP). 1223 */ 1224 if (contentIsInsn(codePtr)) { 1225 findBlock(&cUnit, curOffset + width, 1226 /* split */ 1227 false, 1228 /* create */ 1229 true); 1230 } 1231 } 1232 } else if (flags & kInstrCanThrow) { 1233 processCanThrow(&cUnit, curBlock, insn, curOffset, width, flags, 1234 tryBlockAddr, codePtr, codeEnd); 1235 } else if (flags & kInstrCanSwitch) { 1236 processCanSwitch(&cUnit, curBlock, insn, curOffset, width, flags); 1237 } 1238 curOffset += width; 1239 BasicBlock *nextBlock = findBlock(&cUnit, curOffset, 1240 /* split */ 1241 false, 1242 /* create */ 1243 false); 1244 if (nextBlock) { 1245 /* 1246 * The next instruction could be the target of a previously parsed 1247 * forward branch so a block is already created. If the current 1248 * instruction is not an unconditional branch, connect them through 1249 * the fall-through link. 1250 */ 1251 assert(curBlock->fallThrough == NULL || 1252 curBlock->fallThrough == nextBlock || 1253 curBlock->fallThrough == exitBlock); 1254 1255 if ((curBlock->fallThrough == NULL) && 1256 (flags & kInstrCanContinue)) { 1257 curBlock->fallThrough = nextBlock; 1258 dvmCompilerSetBit(nextBlock->predecessors, curBlock->id); 1259 } 1260 curBlock = nextBlock; 1261 } 1262 } 1263 1264 if (cUnit.printMe) { 1265 dvmCompilerDumpCompilationUnit(&cUnit); 1266 } 1267 1268 /* Adjust this value accordingly once inlining is performed */ 1269 cUnit.numDalvikRegisters = cUnit.method->registersSize; 1270 1271 /* Verify if all blocks are connected as claimed */ 1272 /* FIXME - to be disabled in the future */ 1273 dvmCompilerDataFlowAnalysisDispatcher(&cUnit, verifyPredInfo, 1274 kAllNodes, 1275 false /* isIterative */); 1276 1277 1278 /* Perform SSA transformation for the whole method */ 1279 dvmCompilerMethodSSATransformation(&cUnit); 1280 1281 dvmCompilerInitializeRegAlloc(&cUnit); // Needs to happen after SSA naming 1282 1283 /* Allocate Registers using simple local allocation scheme */ 1284 dvmCompilerLocalRegAlloc(&cUnit); 1285 1286 /* Convert MIR to LIR, etc. */ 1287 dvmCompilerMethodMIR2LIR(&cUnit); 1288 1289 // Debugging only 1290 //dvmDumpCFG(&cUnit, "/sdcard/cfg/"); 1291 1292 /* Method is not empty */ 1293 if (cUnit.firstLIRInsn) { 1294 /* Convert LIR into machine code. Loop for recoverable retries */ 1295 do { 1296 dvmCompilerAssembleLIR(&cUnit, info); 1297 cUnit.assemblerRetries++; 1298 if (cUnit.printMe && cUnit.assemblerStatus != kSuccess) 1299 ALOGD("Assembler abort #%d on %d",cUnit.assemblerRetries, 1300 cUnit.assemblerStatus); 1301 } while (cUnit.assemblerStatus == kRetryAll); 1302 1303 if (cUnit.printMe) { 1304 dvmCompilerCodegenDump(&cUnit); 1305 } 1306 1307 if (info->codeAddress) { 1308 dvmJitSetCodeAddr(dexCode->insns, info->codeAddress, 1309 info->instructionSet, true, 0); 1310 /* 1311 * Clear the codeAddress for the enclosing trace to reuse the info 1312 */ 1313 info->codeAddress = NULL; 1314 } 1315 } 1316 1317 return false; 1318} 1319 1320/* Extending the trace by crawling the code from curBlock */ 1321static bool exhaustTrace(CompilationUnit *cUnit, BasicBlock *curBlock) 1322{ 1323 unsigned int curOffset = curBlock->startOffset; 1324 const u2 *codePtr = cUnit->method->insns + curOffset; 1325 1326 if (curBlock->visited == true) return false; 1327 1328 curBlock->visited = true; 1329 1330 if (curBlock->blockType == kEntryBlock || 1331 curBlock->blockType == kExitBlock) { 1332 return false; 1333 } 1334 1335 /* 1336 * Block has been parsed - check the taken/fallThrough in case it is a split 1337 * block. 1338 */ 1339 if (curBlock->firstMIRInsn != NULL) { 1340 bool changed = false; 1341 if (curBlock->taken) 1342 changed |= exhaustTrace(cUnit, curBlock->taken); 1343 if (curBlock->fallThrough) 1344 changed |= exhaustTrace(cUnit, curBlock->fallThrough); 1345 return changed; 1346 } 1347 while (true) { 1348 MIR *insn = (MIR *) dvmCompilerNew(sizeof(MIR), true); 1349 insn->offset = curOffset; 1350 int width = parseInsn(codePtr, &insn->dalvikInsn, false); 1351 insn->width = width; 1352 1353 /* Terminate when the data section is seen */ 1354 if (width == 0) 1355 break; 1356 1357 dvmCompilerAppendMIR(curBlock, insn); 1358 1359 codePtr += width; 1360 int flags = dexGetFlagsFromOpcode(insn->dalvikInsn.opcode); 1361 1362 /* Stop extending the trace after seeing these instructions */ 1363 if (flags & (kInstrCanReturn | kInstrCanSwitch | kInstrInvoke)) { 1364 curBlock->fallThrough = cUnit->exitBlock; 1365 dvmCompilerSetBit(cUnit->exitBlock->predecessors, curBlock->id); 1366 break; 1367 } else if (flags & kInstrCanBranch) { 1368 processCanBranch(cUnit, curBlock, insn, curOffset, width, flags, 1369 codePtr, NULL); 1370 if (curBlock->taken) { 1371 exhaustTrace(cUnit, curBlock->taken); 1372 } 1373 if (curBlock->fallThrough) { 1374 exhaustTrace(cUnit, curBlock->fallThrough); 1375 } 1376 break; 1377 } 1378 curOffset += width; 1379 BasicBlock *nextBlock = findBlock(cUnit, curOffset, 1380 /* split */ 1381 false, 1382 /* create */ 1383 false); 1384 if (nextBlock) { 1385 /* 1386 * The next instruction could be the target of a previously parsed 1387 * forward branch so a block is already created. If the current 1388 * instruction is not an unconditional branch, connect them through 1389 * the fall-through link. 1390 */ 1391 assert(curBlock->fallThrough == NULL || 1392 curBlock->fallThrough == nextBlock || 1393 curBlock->fallThrough == cUnit->exitBlock); 1394 1395 if ((curBlock->fallThrough == NULL) && 1396 (flags & kInstrCanContinue)) { 1397 curBlock->needFallThroughBranch = true; 1398 curBlock->fallThrough = nextBlock; 1399 dvmCompilerSetBit(nextBlock->predecessors, curBlock->id); 1400 } 1401 /* Block has been visited - no more parsing needed */ 1402 if (nextBlock->visited == true) { 1403 return true; 1404 } 1405 curBlock = nextBlock; 1406 } 1407 } 1408 return true; 1409} 1410 1411/* Compile a loop */ 1412static bool compileLoop(CompilationUnit *cUnit, unsigned int startOffset, 1413 JitTraceDescription *desc, int numMaxInsts, 1414 JitTranslationInfo *info, jmp_buf *bailPtr, 1415 int optHints) 1416{ 1417 int numBlocks = 0; 1418 unsigned int curOffset = startOffset; 1419 bool changed; 1420 BasicBlock *bb; 1421#if defined(WITH_JIT_TUNING) 1422 CompilerMethodStats *methodStats; 1423#endif 1424 1425 cUnit->jitMode = kJitLoop; 1426 1427 /* Initialize the block list */ 1428 dvmInitGrowableList(&cUnit->blockList, 4); 1429 1430 /* Initialize the PC reconstruction list */ 1431 dvmInitGrowableList(&cUnit->pcReconstructionList, 8); 1432 1433 /* Create the default entry and exit blocks and enter them to the list */ 1434 BasicBlock *entryBlock = dvmCompilerNewBB(kEntryBlock, numBlocks++); 1435 entryBlock->startOffset = curOffset; 1436 BasicBlock *exitBlock = dvmCompilerNewBB(kExitBlock, numBlocks++); 1437 1438 cUnit->entryBlock = entryBlock; 1439 cUnit->exitBlock = exitBlock; 1440 1441 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) entryBlock); 1442 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) exitBlock); 1443 1444 /* Current block to record parsed instructions */ 1445 BasicBlock *curBlock = dvmCompilerNewBB(kDalvikByteCode, numBlocks++); 1446 curBlock->startOffset = curOffset; 1447 1448 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) curBlock); 1449 entryBlock->fallThrough = curBlock; 1450 dvmCompilerSetBit(curBlock->predecessors, entryBlock->id); 1451 1452 /* 1453 * Store back the number of blocks since new blocks may be created of 1454 * accessing cUnit. 1455 */ 1456 cUnit->numBlocks = numBlocks; 1457 1458 do { 1459 dvmCompilerDataFlowAnalysisDispatcher(cUnit, 1460 dvmCompilerClearVisitedFlag, 1461 kAllNodes, 1462 false /* isIterative */); 1463 changed = exhaustTrace(cUnit, curBlock); 1464 } while (changed); 1465 1466 /* Backward chaining block */ 1467 bb = dvmCompilerNewBB(kChainingCellBackwardBranch, cUnit->numBlocks++); 1468 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bb); 1469 cUnit->backChainBlock = bb; 1470 1471 /* A special block to host PC reconstruction code */ 1472 bb = dvmCompilerNewBB(kPCReconstruction, cUnit->numBlocks++); 1473 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bb); 1474 1475 /* And one final block that publishes the PC and raises the exception */ 1476 bb = dvmCompilerNewBB(kExceptionHandling, cUnit->numBlocks++); 1477 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bb); 1478 cUnit->puntBlock = bb; 1479 1480 cUnit->numDalvikRegisters = cUnit->method->registersSize; 1481 1482 /* Verify if all blocks are connected as claimed */ 1483 /* FIXME - to be disabled in the future */ 1484 dvmCompilerDataFlowAnalysisDispatcher(cUnit, verifyPredInfo, 1485 kAllNodes, 1486 false /* isIterative */); 1487 1488 1489 /* Try to identify a loop */ 1490 if (!dvmCompilerBuildLoop(cUnit)) 1491 goto bail; 1492 1493 dvmCompilerLoopOpt(cUnit); 1494 1495 /* 1496 * Change the backward branch to the backward chaining cell after dataflow 1497 * analsys/optimizations are done. 1498 */ 1499 dvmCompilerInsertBackwardChaining(cUnit); 1500 1501 dvmCompilerInitializeRegAlloc(cUnit); 1502 1503 /* Allocate Registers using simple local allocation scheme */ 1504 dvmCompilerLocalRegAlloc(cUnit); 1505 1506 /* Convert MIR to LIR, etc. */ 1507 dvmCompilerMIR2LIR(cUnit); 1508 1509 /* Loop contains never executed blocks / heavy instructions */ 1510 if (cUnit->quitLoopMode) { 1511 if (cUnit->printMe || gDvmJit.receivedSIGUSR2) { 1512 ALOGD("Loop trace @ offset %04x aborted due to unresolved code info", 1513 cUnit->entryBlock->startOffset); 1514 } 1515 goto bail; 1516 } 1517 1518 /* Convert LIR into machine code. Loop for recoverable retries */ 1519 do { 1520 dvmCompilerAssembleLIR(cUnit, info); 1521 cUnit->assemblerRetries++; 1522 if (cUnit->printMe && cUnit->assemblerStatus != kSuccess) 1523 ALOGD("Assembler abort #%d on %d", cUnit->assemblerRetries, 1524 cUnit->assemblerStatus); 1525 } while (cUnit->assemblerStatus == kRetryAll); 1526 1527 /* Loop is too big - bail out */ 1528 if (cUnit->assemblerStatus == kRetryHalve) { 1529 goto bail; 1530 } 1531 1532 if (cUnit->printMe || gDvmJit.receivedSIGUSR2) { 1533 ALOGD("Loop trace @ offset %04x", cUnit->entryBlock->startOffset); 1534 dvmCompilerCodegenDump(cUnit); 1535 } 1536 1537 /* 1538 * If this trace uses class objects as constants, 1539 * dvmJitInstallClassObjectPointers will switch the thread state 1540 * to running and look up the class pointers using the descriptor/loader 1541 * tuple stored in the callsite info structure. We need to make this window 1542 * as short as possible since it is blocking GC. 1543 */ 1544 if (cUnit->hasClassLiterals && info->codeAddress) { 1545 dvmJitInstallClassObjectPointers(cUnit, (char *) info->codeAddress); 1546 } 1547 1548 /* 1549 * Since callsiteinfo is allocated from the arena, delay the reset until 1550 * class pointers are resolved. 1551 */ 1552 dvmCompilerArenaReset(); 1553 1554 assert(cUnit->assemblerStatus == kSuccess); 1555#if defined(WITH_JIT_TUNING) 1556 /* Locate the entry to store compilation statistics for this method */ 1557 methodStats = dvmCompilerAnalyzeMethodBody(desc->method, false); 1558 methodStats->nativeSize += cUnit->totalSize; 1559#endif 1560 return info->codeAddress != NULL; 1561 1562bail: 1563 /* Retry the original trace with JIT_OPT_NO_LOOP disabled */ 1564 dvmCompilerArenaReset(); 1565 return dvmCompileTrace(desc, numMaxInsts, info, bailPtr, 1566 optHints | JIT_OPT_NO_LOOP); 1567} 1568 1569/* 1570 * Main entry point to start trace compilation. Basic blocks are constructed 1571 * first and they will be passed to the codegen routines to convert Dalvik 1572 * bytecode into machine code. 1573 */ 1574bool dvmCompileTrace(JitTraceDescription *desc, int numMaxInsts, 1575 JitTranslationInfo *info, jmp_buf *bailPtr, 1576 int optHints) 1577{ 1578 const DexCode *dexCode = dvmGetMethodCode(desc->method); 1579 const JitTraceRun* currRun = &desc->trace[0]; 1580 unsigned int curOffset = currRun->info.frag.startOffset; 1581 unsigned int startOffset = curOffset; 1582 unsigned int numInsts = currRun->info.frag.numInsts; 1583 const u2 *codePtr = dexCode->insns + curOffset; 1584 int traceSize = 0; // # of half-words 1585 const u2 *startCodePtr = codePtr; 1586 BasicBlock *curBB, *entryCodeBB; 1587 int numBlocks = 0; 1588 static int compilationId; 1589 CompilationUnit cUnit; 1590 GrowableList *blockList; 1591#if defined(WITH_JIT_TUNING) 1592 CompilerMethodStats *methodStats; 1593#endif 1594 1595 /* If we've already compiled this trace, just return success */ 1596 if (dvmJitGetTraceAddr(startCodePtr) && !info->discardResult) { 1597 /* 1598 * Make sure the codeAddress is NULL so that it won't clobber the 1599 * existing entry. 1600 */ 1601 info->codeAddress = NULL; 1602 return true; 1603 } 1604 1605 /* If the work order is stale, discard it */ 1606 if (info->cacheVersion != gDvmJit.cacheVersion) { 1607 return false; 1608 } 1609 1610 compilationId++; 1611 memset(&cUnit, 0, sizeof(CompilationUnit)); 1612 1613#if defined(WITH_JIT_TUNING) 1614 /* Locate the entry to store compilation statistics for this method */ 1615 methodStats = dvmCompilerAnalyzeMethodBody(desc->method, false); 1616#endif 1617 1618 /* Set the recover buffer pointer */ 1619 cUnit.bailPtr = bailPtr; 1620 1621 /* Initialize the printMe flag */ 1622 cUnit.printMe = gDvmJit.printMe; 1623 1624 /* Setup the method */ 1625 cUnit.method = desc->method; 1626 1627 /* Store the trace descriptor and set the initial mode */ 1628 cUnit.traceDesc = desc; 1629 cUnit.jitMode = kJitTrace; 1630 1631 /* Initialize the PC reconstruction list */ 1632 dvmInitGrowableList(&cUnit.pcReconstructionList, 8); 1633 1634 /* Initialize the basic block list */ 1635 blockList = &cUnit.blockList; 1636 dvmInitGrowableList(blockList, 8); 1637 1638 /* Identify traces that we don't want to compile */ 1639 if (gDvmJit.methodTable) { 1640 int len = strlen(desc->method->clazz->descriptor) + 1641 strlen(desc->method->name) + 1; 1642 char *fullSignature = (char *)dvmCompilerNew(len, true); 1643 strcpy(fullSignature, desc->method->clazz->descriptor); 1644 strcat(fullSignature, desc->method->name); 1645 1646 int hashValue = dvmComputeUtf8Hash(fullSignature); 1647 1648 /* 1649 * Doing three levels of screening to see whether we want to skip 1650 * compiling this method 1651 */ 1652 1653 /* First, check the full "class;method" signature */ 1654 bool methodFound = 1655 dvmHashTableLookup(gDvmJit.methodTable, hashValue, 1656 fullSignature, (HashCompareFunc) strcmp, 1657 false) != 1658 NULL; 1659 1660 /* Full signature not found - check the enclosing class */ 1661 if (methodFound == false) { 1662 int hashValue = dvmComputeUtf8Hash(desc->method->clazz->descriptor); 1663 methodFound = 1664 dvmHashTableLookup(gDvmJit.methodTable, hashValue, 1665 (char *) desc->method->clazz->descriptor, 1666 (HashCompareFunc) strcmp, false) != 1667 NULL; 1668 /* Enclosing class not found - check the method name */ 1669 if (methodFound == false) { 1670 int hashValue = dvmComputeUtf8Hash(desc->method->name); 1671 methodFound = 1672 dvmHashTableLookup(gDvmJit.methodTable, hashValue, 1673 (char *) desc->method->name, 1674 (HashCompareFunc) strcmp, false) != 1675 NULL; 1676 1677 /* 1678 * Debug by call-graph is enabled. Check if the debug list 1679 * covers any methods on the VM stack. 1680 */ 1681 if (methodFound == false && gDvmJit.checkCallGraph == true) { 1682 methodFound = 1683 filterMethodByCallGraph(info->requestingThread, 1684 desc->method->name); 1685 } 1686 } 1687 } 1688 1689 /* 1690 * Under the following conditions, the trace will be *conservatively* 1691 * compiled by only containing single-step instructions to and from the 1692 * interpreter. 1693 * 1) If includeSelectedMethod == false, the method matches the full or 1694 * partial signature stored in the hash table. 1695 * 1696 * 2) If includeSelectedMethod == true, the method does not match the 1697 * full and partial signature stored in the hash table. 1698 */ 1699 if (gDvmJit.includeSelectedMethod != methodFound) { 1700 cUnit.allSingleStep = true; 1701 } else { 1702 /* Compile the trace as normal */ 1703 1704 /* Print the method we cherry picked */ 1705 if (gDvmJit.includeSelectedMethod == true) { 1706 cUnit.printMe = true; 1707 } 1708 } 1709 } 1710 1711 /* Allocate the entry block */ 1712 curBB = dvmCompilerNewBB(kEntryBlock, numBlocks++); 1713 dvmInsertGrowableList(blockList, (intptr_t) curBB); 1714 curBB->startOffset = curOffset; 1715 1716 entryCodeBB = dvmCompilerNewBB(kDalvikByteCode, numBlocks++); 1717 dvmInsertGrowableList(blockList, (intptr_t) entryCodeBB); 1718 entryCodeBB->startOffset = curOffset; 1719 curBB->fallThrough = entryCodeBB; 1720 curBB = entryCodeBB; 1721 1722 if (cUnit.printMe) { 1723 ALOGD("--------\nCompiler: Building trace for %s, offset %#x", 1724 desc->method->name, curOffset); 1725 } 1726 1727 /* 1728 * Analyze the trace descriptor and include up to the maximal number 1729 * of Dalvik instructions into the IR. 1730 */ 1731 while (1) { 1732 MIR *insn; 1733 int width; 1734 insn = (MIR *)dvmCompilerNew(sizeof(MIR), true); 1735 insn->offset = curOffset; 1736 width = parseInsn(codePtr, &insn->dalvikInsn, cUnit.printMe); 1737 1738 /* The trace should never incude instruction data */ 1739 assert(width); 1740 insn->width = width; 1741 traceSize += width; 1742 dvmCompilerAppendMIR(curBB, insn); 1743 cUnit.numInsts++; 1744 1745 int flags = dexGetFlagsFromOpcode(insn->dalvikInsn.opcode); 1746 1747 if (flags & kInstrInvoke) { 1748 const Method *calleeMethod = (const Method *) 1749 currRun[JIT_TRACE_CUR_METHOD].info.meta; 1750 assert(numInsts == 1); 1751 CallsiteInfo *callsiteInfo = 1752 (CallsiteInfo *)dvmCompilerNew(sizeof(CallsiteInfo), true); 1753 callsiteInfo->classDescriptor = (const char *) 1754 currRun[JIT_TRACE_CLASS_DESC].info.meta; 1755 callsiteInfo->classLoader = (Object *) 1756 currRun[JIT_TRACE_CLASS_LOADER].info.meta; 1757 callsiteInfo->method = calleeMethod; 1758 insn->meta.callsiteInfo = callsiteInfo; 1759 } 1760 1761 /* Instruction limit reached - terminate the trace here */ 1762 if (cUnit.numInsts >= numMaxInsts) { 1763 break; 1764 } 1765 if (--numInsts == 0) { 1766 if (currRun->info.frag.runEnd) { 1767 break; 1768 } else { 1769 /* Advance to the next trace description (ie non-meta info) */ 1770 do { 1771 currRun++; 1772 } while (!currRun->isCode); 1773 1774 /* Dummy end-of-run marker seen */ 1775 if (currRun->info.frag.numInsts == 0) { 1776 break; 1777 } 1778 1779 curBB = dvmCompilerNewBB(kDalvikByteCode, numBlocks++); 1780 dvmInsertGrowableList(blockList, (intptr_t) curBB); 1781 curOffset = currRun->info.frag.startOffset; 1782 numInsts = currRun->info.frag.numInsts; 1783 curBB->startOffset = curOffset; 1784 codePtr = dexCode->insns + curOffset; 1785 } 1786 } else { 1787 curOffset += width; 1788 codePtr += width; 1789 } 1790 } 1791 1792#if defined(WITH_JIT_TUNING) 1793 /* Convert # of half-word to bytes */ 1794 methodStats->compiledDalvikSize += traceSize * 2; 1795#endif 1796 1797 /* 1798 * Now scan basic blocks containing real code to connect the 1799 * taken/fallthrough links. Also create chaining cells for code not included 1800 * in the trace. 1801 */ 1802 size_t blockId; 1803 for (blockId = 0; blockId < blockList->numUsed; blockId++) { 1804 curBB = (BasicBlock *) dvmGrowableListGetElement(blockList, blockId); 1805 MIR *lastInsn = curBB->lastMIRInsn; 1806 /* Skip empty blocks */ 1807 if (lastInsn == NULL) { 1808 continue; 1809 } 1810 curOffset = lastInsn->offset; 1811 unsigned int targetOffset = curOffset; 1812 unsigned int fallThroughOffset = curOffset + lastInsn->width; 1813 bool isInvoke = false; 1814 const Method *callee = NULL; 1815 1816 findBlockBoundary(desc->method, curBB->lastMIRInsn, curOffset, 1817 &targetOffset, &isInvoke, &callee); 1818 1819 /* Link the taken and fallthrough blocks */ 1820 BasicBlock *searchBB; 1821 1822 int flags = dexGetFlagsFromOpcode(lastInsn->dalvikInsn.opcode); 1823 1824 if (flags & kInstrInvoke) { 1825 cUnit.hasInvoke = true; 1826 } 1827 1828 /* Backward branch seen */ 1829 if (isInvoke == false && 1830 (flags & kInstrCanBranch) != 0 && 1831 targetOffset < curOffset && 1832 (optHints & JIT_OPT_NO_LOOP) == 0) { 1833 dvmCompilerArenaReset(); 1834 return compileLoop(&cUnit, startOffset, desc, numMaxInsts, 1835 info, bailPtr, optHints); 1836 } 1837 1838 /* No backward branch in the trace - start searching the next BB */ 1839 size_t searchBlockId; 1840 for (searchBlockId = blockId+1; searchBlockId < blockList->numUsed; 1841 searchBlockId++) { 1842 searchBB = (BasicBlock *) dvmGrowableListGetElement(blockList, 1843 searchBlockId); 1844 if (targetOffset == searchBB->startOffset) { 1845 curBB->taken = searchBB; 1846 dvmCompilerSetBit(searchBB->predecessors, curBB->id); 1847 } 1848 if (fallThroughOffset == searchBB->startOffset) { 1849 curBB->fallThrough = searchBB; 1850 dvmCompilerSetBit(searchBB->predecessors, curBB->id); 1851 1852 /* 1853 * Fallthrough block of an invoke instruction needs to be 1854 * aligned to 4-byte boundary (alignment instruction to be 1855 * inserted later. 1856 */ 1857 if (flags & kInstrInvoke) { 1858 searchBB->isFallThroughFromInvoke = true; 1859 } 1860 } 1861 } 1862 1863 /* 1864 * Some blocks are ended by non-control-flow-change instructions, 1865 * currently only due to trace length constraint. In this case we need 1866 * to generate an explicit branch at the end of the block to jump to 1867 * the chaining cell. 1868 */ 1869 curBB->needFallThroughBranch = 1870 ((flags & (kInstrCanBranch | kInstrCanSwitch | kInstrCanReturn | 1871 kInstrInvoke)) == 0); 1872 if (lastInsn->dalvikInsn.opcode == OP_PACKED_SWITCH || 1873 lastInsn->dalvikInsn.opcode == OP_SPARSE_SWITCH) { 1874 int i; 1875 const u2 *switchData = desc->method->insns + lastInsn->offset + 1876 lastInsn->dalvikInsn.vB; 1877 int size = switchData[1]; 1878 int maxChains = MIN(size, MAX_CHAINED_SWITCH_CASES); 1879 1880 /* 1881 * Generate the landing pad for cases whose ranks are higher than 1882 * MAX_CHAINED_SWITCH_CASES. The code will re-enter the interpreter 1883 * through the NoChain point. 1884 */ 1885 if (maxChains != size) { 1886 cUnit.switchOverflowPad = 1887 desc->method->insns + lastInsn->offset; 1888 } 1889 1890 s4 *targets = (s4 *) (switchData + 2 + 1891 (lastInsn->dalvikInsn.opcode == OP_PACKED_SWITCH ? 1892 2 : size * 2)); 1893 1894 /* One chaining cell for the first MAX_CHAINED_SWITCH_CASES cases */ 1895 for (i = 0; i < maxChains; i++) { 1896 BasicBlock *caseChain = dvmCompilerNewBB(kChainingCellNormal, 1897 numBlocks++); 1898 dvmInsertGrowableList(blockList, (intptr_t) caseChain); 1899 caseChain->startOffset = lastInsn->offset + targets[i]; 1900 } 1901 1902 /* One more chaining cell for the default case */ 1903 BasicBlock *caseChain = dvmCompilerNewBB(kChainingCellNormal, 1904 numBlocks++); 1905 dvmInsertGrowableList(blockList, (intptr_t) caseChain); 1906 caseChain->startOffset = lastInsn->offset + lastInsn->width; 1907 /* Fallthrough block not included in the trace */ 1908 } else if (!isUnconditionalBranch(lastInsn) && 1909 curBB->fallThrough == NULL) { 1910 BasicBlock *fallThroughBB; 1911 /* 1912 * If the chaining cell is after an invoke or 1913 * instruction that cannot change the control flow, request a hot 1914 * chaining cell. 1915 */ 1916 if (isInvoke || curBB->needFallThroughBranch) { 1917 fallThroughBB = dvmCompilerNewBB(kChainingCellHot, numBlocks++); 1918 } else { 1919 fallThroughBB = dvmCompilerNewBB(kChainingCellNormal, 1920 numBlocks++); 1921 } 1922 dvmInsertGrowableList(blockList, (intptr_t) fallThroughBB); 1923 fallThroughBB->startOffset = fallThroughOffset; 1924 curBB->fallThrough = fallThroughBB; 1925 dvmCompilerSetBit(fallThroughBB->predecessors, curBB->id); 1926 } 1927 /* Target block not included in the trace */ 1928 if (curBB->taken == NULL && 1929 (isGoto(lastInsn) || isInvoke || 1930 (targetOffset != UNKNOWN_TARGET && targetOffset != curOffset))) { 1931 BasicBlock *newBB = NULL; 1932 if (isInvoke) { 1933 /* Monomorphic callee */ 1934 if (callee) { 1935 /* JNI call doesn't need a chaining cell */ 1936 if (!dvmIsNativeMethod(callee)) { 1937 newBB = dvmCompilerNewBB(kChainingCellInvokeSingleton, 1938 numBlocks++); 1939 newBB->startOffset = 0; 1940 newBB->containingMethod = callee; 1941 } 1942 /* Will resolve at runtime */ 1943 } else { 1944 newBB = dvmCompilerNewBB(kChainingCellInvokePredicted, 1945 numBlocks++); 1946 newBB->startOffset = 0; 1947 } 1948 /* For unconditional branches, request a hot chaining cell */ 1949 } else { 1950#if !defined(WITH_SELF_VERIFICATION) 1951 newBB = dvmCompilerNewBB(dexIsGoto(flags) ? 1952 kChainingCellHot : 1953 kChainingCellNormal, 1954 numBlocks++); 1955 newBB->startOffset = targetOffset; 1956#else 1957 /* Handle branches that branch back into the block */ 1958 if (targetOffset >= curBB->firstMIRInsn->offset && 1959 targetOffset <= curBB->lastMIRInsn->offset) { 1960 newBB = dvmCompilerNewBB(kChainingCellBackwardBranch, 1961 numBlocks++); 1962 } else { 1963 newBB = dvmCompilerNewBB(dexIsGoto(flags) ? 1964 kChainingCellHot : 1965 kChainingCellNormal, 1966 numBlocks++); 1967 } 1968 newBB->startOffset = targetOffset; 1969#endif 1970 } 1971 if (newBB) { 1972 curBB->taken = newBB; 1973 dvmCompilerSetBit(newBB->predecessors, curBB->id); 1974 dvmInsertGrowableList(blockList, (intptr_t) newBB); 1975 } 1976 } 1977 } 1978 1979 /* Now create a special block to host PC reconstruction code */ 1980 curBB = dvmCompilerNewBB(kPCReconstruction, numBlocks++); 1981 dvmInsertGrowableList(blockList, (intptr_t) curBB); 1982 1983 /* And one final block that publishes the PC and raise the exception */ 1984 curBB = dvmCompilerNewBB(kExceptionHandling, numBlocks++); 1985 dvmInsertGrowableList(blockList, (intptr_t) curBB); 1986 cUnit.puntBlock = curBB; 1987 1988 if (cUnit.printMe) { 1989 char* signature = 1990 dexProtoCopyMethodDescriptor(&desc->method->prototype); 1991 ALOGD("TRACEINFO (%d): 0x%08x %s%s.%s %#x %d of %d, %d blocks", 1992 compilationId, 1993 (intptr_t) desc->method->insns, 1994 desc->method->clazz->descriptor, 1995 desc->method->name, 1996 signature, 1997 desc->trace[0].info.frag.startOffset, 1998 traceSize, 1999 dexCode->insnsSize, 2000 numBlocks); 2001 free(signature); 2002 } 2003 2004 cUnit.numBlocks = numBlocks; 2005 2006 /* Set the instruction set to use (NOTE: later components may change it) */ 2007 cUnit.instructionSet = dvmCompilerInstructionSet(); 2008 2009 /* Inline transformation @ the MIR level */ 2010 if (cUnit.hasInvoke && !(gDvmJit.disableOpt & (1 << kMethodInlining))) { 2011 dvmCompilerInlineMIR(&cUnit, info); 2012 } 2013 2014 cUnit.numDalvikRegisters = cUnit.method->registersSize; 2015 2016 /* Preparation for SSA conversion */ 2017 dvmInitializeSSAConversion(&cUnit); 2018 2019 dvmCompilerNonLoopAnalysis(&cUnit); 2020 2021 dvmCompilerInitializeRegAlloc(&cUnit); // Needs to happen after SSA naming 2022 2023 if (cUnit.printMe) { 2024 dvmCompilerDumpCompilationUnit(&cUnit); 2025 } 2026 2027 /* Allocate Registers using simple local allocation scheme */ 2028 dvmCompilerLocalRegAlloc(&cUnit); 2029 2030 /* Convert MIR to LIR, etc. */ 2031 dvmCompilerMIR2LIR(&cUnit); 2032 2033 /* Convert LIR into machine code. Loop for recoverable retries */ 2034 do { 2035 dvmCompilerAssembleLIR(&cUnit, info); 2036 cUnit.assemblerRetries++; 2037 if (cUnit.printMe && cUnit.assemblerStatus != kSuccess) 2038 ALOGD("Assembler abort #%d on %d",cUnit.assemblerRetries, 2039 cUnit.assemblerStatus); 2040 } while (cUnit.assemblerStatus == kRetryAll); 2041 2042 if (cUnit.printMe) { 2043 ALOGD("Trace Dalvik PC: %p", startCodePtr); 2044 dvmCompilerCodegenDump(&cUnit); 2045 ALOGD("End %s%s, %d Dalvik instructions", 2046 desc->method->clazz->descriptor, desc->method->name, 2047 cUnit.numInsts); 2048 } 2049 2050 if (cUnit.assemblerStatus == kRetryHalve) { 2051 /* Reset the compiler resource pool before retry */ 2052 dvmCompilerArenaReset(); 2053 2054 /* Halve the instruction count and start from the top */ 2055 return dvmCompileTrace(desc, cUnit.numInsts / 2, info, bailPtr, 2056 optHints); 2057 } 2058 2059 /* 2060 * If this trace uses class objects as constants, 2061 * dvmJitInstallClassObjectPointers will switch the thread state 2062 * to running and look up the class pointers using the descriptor/loader 2063 * tuple stored in the callsite info structure. We need to make this window 2064 * as short as possible since it is blocking GC. 2065 */ 2066 if (cUnit.hasClassLiterals && info->codeAddress) { 2067 dvmJitInstallClassObjectPointers(&cUnit, (char *) info->codeAddress); 2068 } 2069 2070 /* 2071 * Since callsiteinfo is allocated from the arena, delay the reset until 2072 * class pointers are resolved. 2073 */ 2074 dvmCompilerArenaReset(); 2075 2076 assert(cUnit.assemblerStatus == kSuccess); 2077#if defined(WITH_JIT_TUNING) 2078 methodStats->nativeSize += cUnit.totalSize; 2079#endif 2080 2081 return info->codeAddress != NULL; 2082} 2083