quick_compiler.cc revision e299f167c9559401548eab71678d4b779e46c2fb
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "quick_compiler.h"
18
19#include <cstdint>
20
21#include "base/dumpable.h"
22#include "base/logging.h"
23#include "base/macros.h"
24#include "base/timing_logger.h"
25#include "compiler.h"
26#include "dex_file-inl.h"
27#include "dex_file_to_method_inliner_map.h"
28#include "dex/compiler_ir.h"
29#include "dex/dex_flags.h"
30#include "dex/mir_graph.h"
31#include "dex/pass_driver_me_opts.h"
32#include "dex/pass_driver_me_post_opt.h"
33#include "dex/pass_manager.h"
34#include "dex/quick/mir_to_lir.h"
35#include "driver/compiler_driver.h"
36#include "driver/compiler_options.h"
37#include "elf_writer_quick.h"
38#include "jni/quick/jni_compiler.h"
39#include "mir_to_lir.h"
40#include "mirror/art_method-inl.h"
41#include "mirror/object.h"
42#include "runtime.h"
43
44// Specific compiler backends.
45#include "dex/quick/arm/backend_arm.h"
46#include "dex/quick/arm64/backend_arm64.h"
47#include "dex/quick/mips/backend_mips.h"
48#include "dex/quick/x86/backend_x86.h"
49
50namespace art {
51
52static_assert(0U == static_cast<size_t>(kNone),   "kNone not 0");
53static_assert(1U == static_cast<size_t>(kArm),    "kArm not 1");
54static_assert(2U == static_cast<size_t>(kArm64),  "kArm64 not 2");
55static_assert(3U == static_cast<size_t>(kThumb2), "kThumb2 not 3");
56static_assert(4U == static_cast<size_t>(kX86),    "kX86 not 4");
57static_assert(5U == static_cast<size_t>(kX86_64), "kX86_64 not 5");
58static_assert(6U == static_cast<size_t>(kMips),   "kMips not 6");
59static_assert(7U == static_cast<size_t>(kMips64), "kMips64 not 7");
60
61// Additional disabled optimizations (over generally disabled) per instruction set.
62static constexpr uint32_t kDisabledOptimizationsPerISA[] = {
63    // 0 = kNone.
64    ~0U,
65    // 1 = kArm, unused (will use kThumb2).
66    ~0U,
67    // 2 = kArm64.
68    0,
69    // 3 = kThumb2.
70    0,
71    // 4 = kX86.
72    (1 << kLoadStoreElimination) |
73    0,
74    // 5 = kX86_64.
75    (1 << kLoadStoreElimination) |
76    0,
77    // 6 = kMips.
78    (1 << kLoadStoreElimination) |
79    (1 << kLoadHoisting) |
80    (1 << kSuppressLoads) |
81    (1 << kNullCheckElimination) |
82    (1 << kPromoteRegs) |
83    (1 << kTrackLiveTemps) |
84    (1 << kSafeOptimizations) |
85    (1 << kBBOpt) |
86    (1 << kMatch) |
87    (1 << kPromoteCompilerTemps) |
88    0,
89    // 7 = kMips64.
90    (1 << kLoadStoreElimination) |
91    (1 << kLoadHoisting) |
92    (1 << kSuppressLoads) |
93    (1 << kNullCheckElimination) |
94    (1 << kPromoteRegs) |
95    (1 << kTrackLiveTemps) |
96    (1 << kSafeOptimizations) |
97    (1 << kBBOpt) |
98    (1 << kMatch) |
99    (1 << kPromoteCompilerTemps) |
100    0
101};
102static_assert(sizeof(kDisabledOptimizationsPerISA) == 8 * sizeof(uint32_t),
103              "kDisabledOpts unexpected");
104
105// Supported shorty types per instruction set. null means that all are available.
106// Z : boolean
107// B : byte
108// S : short
109// C : char
110// I : int
111// J : long
112// F : float
113// D : double
114// L : reference(object, array)
115// V : void
116static const char* kSupportedTypes[] = {
117    // 0 = kNone.
118    "",
119    // 1 = kArm, unused (will use kThumb2).
120    "",
121    // 2 = kArm64.
122    nullptr,
123    // 3 = kThumb2.
124    nullptr,
125    // 4 = kX86.
126    nullptr,
127    // 5 = kX86_64.
128    nullptr,
129    // 6 = kMips.
130    nullptr,
131    // 7 = kMips64.
132    nullptr
133};
134static_assert(sizeof(kSupportedTypes) == 8 * sizeof(char*), "kSupportedTypes unexpected");
135
136static int kAllOpcodes[] = {
137    Instruction::NOP,
138    Instruction::MOVE,
139    Instruction::MOVE_FROM16,
140    Instruction::MOVE_16,
141    Instruction::MOVE_WIDE,
142    Instruction::MOVE_WIDE_FROM16,
143    Instruction::MOVE_WIDE_16,
144    Instruction::MOVE_OBJECT,
145    Instruction::MOVE_OBJECT_FROM16,
146    Instruction::MOVE_OBJECT_16,
147    Instruction::MOVE_RESULT,
148    Instruction::MOVE_RESULT_WIDE,
149    Instruction::MOVE_RESULT_OBJECT,
150    Instruction::MOVE_EXCEPTION,
151    Instruction::RETURN_VOID,
152    Instruction::RETURN,
153    Instruction::RETURN_WIDE,
154    Instruction::RETURN_OBJECT,
155    Instruction::CONST_4,
156    Instruction::CONST_16,
157    Instruction::CONST,
158    Instruction::CONST_HIGH16,
159    Instruction::CONST_WIDE_16,
160    Instruction::CONST_WIDE_32,
161    Instruction::CONST_WIDE,
162    Instruction::CONST_WIDE_HIGH16,
163    Instruction::CONST_STRING,
164    Instruction::CONST_STRING_JUMBO,
165    Instruction::CONST_CLASS,
166    Instruction::MONITOR_ENTER,
167    Instruction::MONITOR_EXIT,
168    Instruction::CHECK_CAST,
169    Instruction::INSTANCE_OF,
170    Instruction::ARRAY_LENGTH,
171    Instruction::NEW_INSTANCE,
172    Instruction::NEW_ARRAY,
173    Instruction::FILLED_NEW_ARRAY,
174    Instruction::FILLED_NEW_ARRAY_RANGE,
175    Instruction::FILL_ARRAY_DATA,
176    Instruction::THROW,
177    Instruction::GOTO,
178    Instruction::GOTO_16,
179    Instruction::GOTO_32,
180    Instruction::PACKED_SWITCH,
181    Instruction::SPARSE_SWITCH,
182    Instruction::CMPL_FLOAT,
183    Instruction::CMPG_FLOAT,
184    Instruction::CMPL_DOUBLE,
185    Instruction::CMPG_DOUBLE,
186    Instruction::CMP_LONG,
187    Instruction::IF_EQ,
188    Instruction::IF_NE,
189    Instruction::IF_LT,
190    Instruction::IF_GE,
191    Instruction::IF_GT,
192    Instruction::IF_LE,
193    Instruction::IF_EQZ,
194    Instruction::IF_NEZ,
195    Instruction::IF_LTZ,
196    Instruction::IF_GEZ,
197    Instruction::IF_GTZ,
198    Instruction::IF_LEZ,
199    Instruction::UNUSED_3E,
200    Instruction::UNUSED_3F,
201    Instruction::UNUSED_40,
202    Instruction::UNUSED_41,
203    Instruction::UNUSED_42,
204    Instruction::UNUSED_43,
205    Instruction::AGET,
206    Instruction::AGET_WIDE,
207    Instruction::AGET_OBJECT,
208    Instruction::AGET_BOOLEAN,
209    Instruction::AGET_BYTE,
210    Instruction::AGET_CHAR,
211    Instruction::AGET_SHORT,
212    Instruction::APUT,
213    Instruction::APUT_WIDE,
214    Instruction::APUT_OBJECT,
215    Instruction::APUT_BOOLEAN,
216    Instruction::APUT_BYTE,
217    Instruction::APUT_CHAR,
218    Instruction::APUT_SHORT,
219    Instruction::IGET,
220    Instruction::IGET_WIDE,
221    Instruction::IGET_OBJECT,
222    Instruction::IGET_BOOLEAN,
223    Instruction::IGET_BYTE,
224    Instruction::IGET_CHAR,
225    Instruction::IGET_SHORT,
226    Instruction::IPUT,
227    Instruction::IPUT_WIDE,
228    Instruction::IPUT_OBJECT,
229    Instruction::IPUT_BOOLEAN,
230    Instruction::IPUT_BYTE,
231    Instruction::IPUT_CHAR,
232    Instruction::IPUT_SHORT,
233    Instruction::SGET,
234    Instruction::SGET_WIDE,
235    Instruction::SGET_OBJECT,
236    Instruction::SGET_BOOLEAN,
237    Instruction::SGET_BYTE,
238    Instruction::SGET_CHAR,
239    Instruction::SGET_SHORT,
240    Instruction::SPUT,
241    Instruction::SPUT_WIDE,
242    Instruction::SPUT_OBJECT,
243    Instruction::SPUT_BOOLEAN,
244    Instruction::SPUT_BYTE,
245    Instruction::SPUT_CHAR,
246    Instruction::SPUT_SHORT,
247    Instruction::INVOKE_VIRTUAL,
248    Instruction::INVOKE_SUPER,
249    Instruction::INVOKE_DIRECT,
250    Instruction::INVOKE_STATIC,
251    Instruction::INVOKE_INTERFACE,
252    Instruction::RETURN_VOID_NO_BARRIER,
253    Instruction::INVOKE_VIRTUAL_RANGE,
254    Instruction::INVOKE_SUPER_RANGE,
255    Instruction::INVOKE_DIRECT_RANGE,
256    Instruction::INVOKE_STATIC_RANGE,
257    Instruction::INVOKE_INTERFACE_RANGE,
258    Instruction::UNUSED_79,
259    Instruction::UNUSED_7A,
260    Instruction::NEG_INT,
261    Instruction::NOT_INT,
262    Instruction::NEG_LONG,
263    Instruction::NOT_LONG,
264    Instruction::NEG_FLOAT,
265    Instruction::NEG_DOUBLE,
266    Instruction::INT_TO_LONG,
267    Instruction::INT_TO_FLOAT,
268    Instruction::INT_TO_DOUBLE,
269    Instruction::LONG_TO_INT,
270    Instruction::LONG_TO_FLOAT,
271    Instruction::LONG_TO_DOUBLE,
272    Instruction::FLOAT_TO_INT,
273    Instruction::FLOAT_TO_LONG,
274    Instruction::FLOAT_TO_DOUBLE,
275    Instruction::DOUBLE_TO_INT,
276    Instruction::DOUBLE_TO_LONG,
277    Instruction::DOUBLE_TO_FLOAT,
278    Instruction::INT_TO_BYTE,
279    Instruction::INT_TO_CHAR,
280    Instruction::INT_TO_SHORT,
281    Instruction::ADD_INT,
282    Instruction::SUB_INT,
283    Instruction::MUL_INT,
284    Instruction::DIV_INT,
285    Instruction::REM_INT,
286    Instruction::AND_INT,
287    Instruction::OR_INT,
288    Instruction::XOR_INT,
289    Instruction::SHL_INT,
290    Instruction::SHR_INT,
291    Instruction::USHR_INT,
292    Instruction::ADD_LONG,
293    Instruction::SUB_LONG,
294    Instruction::MUL_LONG,
295    Instruction::DIV_LONG,
296    Instruction::REM_LONG,
297    Instruction::AND_LONG,
298    Instruction::OR_LONG,
299    Instruction::XOR_LONG,
300    Instruction::SHL_LONG,
301    Instruction::SHR_LONG,
302    Instruction::USHR_LONG,
303    Instruction::ADD_FLOAT,
304    Instruction::SUB_FLOAT,
305    Instruction::MUL_FLOAT,
306    Instruction::DIV_FLOAT,
307    Instruction::REM_FLOAT,
308    Instruction::ADD_DOUBLE,
309    Instruction::SUB_DOUBLE,
310    Instruction::MUL_DOUBLE,
311    Instruction::DIV_DOUBLE,
312    Instruction::REM_DOUBLE,
313    Instruction::ADD_INT_2ADDR,
314    Instruction::SUB_INT_2ADDR,
315    Instruction::MUL_INT_2ADDR,
316    Instruction::DIV_INT_2ADDR,
317    Instruction::REM_INT_2ADDR,
318    Instruction::AND_INT_2ADDR,
319    Instruction::OR_INT_2ADDR,
320    Instruction::XOR_INT_2ADDR,
321    Instruction::SHL_INT_2ADDR,
322    Instruction::SHR_INT_2ADDR,
323    Instruction::USHR_INT_2ADDR,
324    Instruction::ADD_LONG_2ADDR,
325    Instruction::SUB_LONG_2ADDR,
326    Instruction::MUL_LONG_2ADDR,
327    Instruction::DIV_LONG_2ADDR,
328    Instruction::REM_LONG_2ADDR,
329    Instruction::AND_LONG_2ADDR,
330    Instruction::OR_LONG_2ADDR,
331    Instruction::XOR_LONG_2ADDR,
332    Instruction::SHL_LONG_2ADDR,
333    Instruction::SHR_LONG_2ADDR,
334    Instruction::USHR_LONG_2ADDR,
335    Instruction::ADD_FLOAT_2ADDR,
336    Instruction::SUB_FLOAT_2ADDR,
337    Instruction::MUL_FLOAT_2ADDR,
338    Instruction::DIV_FLOAT_2ADDR,
339    Instruction::REM_FLOAT_2ADDR,
340    Instruction::ADD_DOUBLE_2ADDR,
341    Instruction::SUB_DOUBLE_2ADDR,
342    Instruction::MUL_DOUBLE_2ADDR,
343    Instruction::DIV_DOUBLE_2ADDR,
344    Instruction::REM_DOUBLE_2ADDR,
345    Instruction::ADD_INT_LIT16,
346    Instruction::RSUB_INT,
347    Instruction::MUL_INT_LIT16,
348    Instruction::DIV_INT_LIT16,
349    Instruction::REM_INT_LIT16,
350    Instruction::AND_INT_LIT16,
351    Instruction::OR_INT_LIT16,
352    Instruction::XOR_INT_LIT16,
353    Instruction::ADD_INT_LIT8,
354    Instruction::RSUB_INT_LIT8,
355    Instruction::MUL_INT_LIT8,
356    Instruction::DIV_INT_LIT8,
357    Instruction::REM_INT_LIT8,
358    Instruction::AND_INT_LIT8,
359    Instruction::OR_INT_LIT8,
360    Instruction::XOR_INT_LIT8,
361    Instruction::SHL_INT_LIT8,
362    Instruction::SHR_INT_LIT8,
363    Instruction::USHR_INT_LIT8,
364    Instruction::IGET_QUICK,
365    Instruction::IGET_WIDE_QUICK,
366    Instruction::IGET_OBJECT_QUICK,
367    Instruction::IPUT_QUICK,
368    Instruction::IPUT_WIDE_QUICK,
369    Instruction::IPUT_OBJECT_QUICK,
370    Instruction::INVOKE_VIRTUAL_QUICK,
371    Instruction::INVOKE_VIRTUAL_RANGE_QUICK,
372    Instruction::IPUT_BOOLEAN_QUICK,
373    Instruction::IPUT_BYTE_QUICK,
374    Instruction::IPUT_CHAR_QUICK,
375    Instruction::IPUT_SHORT_QUICK,
376    Instruction::IGET_BOOLEAN_QUICK,
377    Instruction::IGET_BYTE_QUICK,
378    Instruction::IGET_CHAR_QUICK,
379    Instruction::IGET_SHORT_QUICK,
380    Instruction::UNUSED_F3,
381    Instruction::UNUSED_F4,
382    Instruction::UNUSED_F5,
383    Instruction::UNUSED_F6,
384    Instruction::UNUSED_F7,
385    Instruction::UNUSED_F8,
386    Instruction::UNUSED_F9,
387    Instruction::UNUSED_FA,
388    Instruction::UNUSED_FB,
389    Instruction::UNUSED_FC,
390    Instruction::UNUSED_FD,
391    Instruction::UNUSED_FE,
392    Instruction::UNUSED_FF,
393    // ----- ExtendedMIROpcode -----
394    kMirOpPhi,
395    kMirOpCopy,
396    kMirOpFusedCmplFloat,
397    kMirOpFusedCmpgFloat,
398    kMirOpFusedCmplDouble,
399    kMirOpFusedCmpgDouble,
400    kMirOpFusedCmpLong,
401    kMirOpNop,
402    kMirOpNullCheck,
403    kMirOpRangeCheck,
404    kMirOpDivZeroCheck,
405    kMirOpCheck,
406    kMirOpSelect,
407};
408
409static int kInvokeOpcodes[] = {
410    Instruction::INVOKE_VIRTUAL,
411    Instruction::INVOKE_SUPER,
412    Instruction::INVOKE_DIRECT,
413    Instruction::INVOKE_STATIC,
414    Instruction::INVOKE_INTERFACE,
415    Instruction::INVOKE_VIRTUAL_RANGE,
416    Instruction::INVOKE_SUPER_RANGE,
417    Instruction::INVOKE_DIRECT_RANGE,
418    Instruction::INVOKE_STATIC_RANGE,
419    Instruction::INVOKE_INTERFACE_RANGE,
420    Instruction::INVOKE_VIRTUAL_QUICK,
421    Instruction::INVOKE_VIRTUAL_RANGE_QUICK,
422};
423
424// Unsupported opcodes. null can be used when everything is supported. Size of the lists is
425// recorded below.
426static const int* kUnsupportedOpcodes[] = {
427    // 0 = kNone.
428    kAllOpcodes,
429    // 1 = kArm, unused (will use kThumb2).
430    kAllOpcodes,
431    // 2 = kArm64.
432    nullptr,
433    // 3 = kThumb2.
434    nullptr,
435    // 4 = kX86.
436    nullptr,
437    // 5 = kX86_64.
438    nullptr,
439    // 6 = kMips.
440    nullptr,
441    // 7 = kMips64.
442    nullptr
443};
444static_assert(sizeof(kUnsupportedOpcodes) == 8 * sizeof(int*), "kUnsupportedOpcodes unexpected");
445
446// Size of the arrays stored above.
447static const size_t kUnsupportedOpcodesSize[] = {
448    // 0 = kNone.
449    arraysize(kAllOpcodes),
450    // 1 = kArm, unused (will use kThumb2).
451    arraysize(kAllOpcodes),
452    // 2 = kArm64.
453    0,
454    // 3 = kThumb2.
455    0,
456    // 4 = kX86.
457    0,
458    // 5 = kX86_64.
459    0,
460    // 6 = kMips.
461    0,
462    // 7 = kMips64.
463    0
464};
465static_assert(sizeof(kUnsupportedOpcodesSize) == 8 * sizeof(size_t),
466              "kUnsupportedOpcodesSize unexpected");
467
468// The maximum amount of Dalvik register in a method for which we will start compiling. Tries to
469// avoid an abort when we need to manage more SSA registers than we can.
470static constexpr size_t kMaxAllowedDalvikRegisters = INT16_MAX / 2;
471
472static bool CanCompileShorty(const char* shorty, InstructionSet instruction_set) {
473  const char* supported_types = kSupportedTypes[instruction_set];
474  if (supported_types == nullptr) {
475    // Everything available.
476    return true;
477  }
478
479  uint32_t shorty_size = strlen(shorty);
480  CHECK_GE(shorty_size, 1u);
481
482  for (uint32_t i = 0; i < shorty_size; i++) {
483    if (strchr(supported_types, shorty[i]) == nullptr) {
484      return false;
485    }
486  }
487  return true;
488}
489
490// Skip the method that we do not support currently.
491bool QuickCompiler::CanCompileMethod(uint32_t method_idx, const DexFile& dex_file,
492                                     CompilationUnit* cu) const {
493  // This is a limitation in mir_graph. See MirGraph::SetNumSSARegs.
494  if (cu->mir_graph->GetNumOfCodeAndTempVRs() > kMaxAllowedDalvikRegisters) {
495    VLOG(compiler) << "Too many dalvik registers : " << cu->mir_graph->GetNumOfCodeAndTempVRs();
496    return false;
497  }
498
499  // Check whether we do have limitations at all.
500  if (kSupportedTypes[cu->instruction_set] == nullptr &&
501      kUnsupportedOpcodesSize[cu->instruction_set] == 0U) {
502    return true;
503  }
504
505  // Check if we can compile the prototype.
506  const char* shorty = dex_file.GetMethodShorty(dex_file.GetMethodId(method_idx));
507  if (!CanCompileShorty(shorty, cu->instruction_set)) {
508    VLOG(compiler) << "Unsupported shorty : " << shorty;
509    return false;
510  }
511
512  const int *unsupport_list = kUnsupportedOpcodes[cu->instruction_set];
513  int unsupport_list_size = kUnsupportedOpcodesSize[cu->instruction_set];
514
515  for (unsigned int idx = 0; idx < cu->mir_graph->GetNumBlocks(); idx++) {
516    BasicBlock* bb = cu->mir_graph->GetBasicBlock(idx);
517    if (bb == nullptr) continue;
518    if (bb->block_type == kDead) continue;
519    for (MIR* mir = bb->first_mir_insn; mir != nullptr; mir = mir->next) {
520      int opcode = mir->dalvikInsn.opcode;
521      // Check if we support the byte code.
522      if (std::find(unsupport_list, unsupport_list + unsupport_list_size, opcode)
523          != unsupport_list + unsupport_list_size) {
524        if (!MIR::DecodedInstruction::IsPseudoMirOp(opcode)) {
525          VLOG(compiler) << "Unsupported dalvik byte code : "
526              << mir->dalvikInsn.opcode;
527        } else {
528          VLOG(compiler) << "Unsupported extended MIR opcode : "
529              << MIRGraph::extended_mir_op_names_[opcode - kMirOpFirst];
530        }
531        return false;
532      }
533      // Check if it invokes a prototype that we cannot support.
534      if (std::find(kInvokeOpcodes, kInvokeOpcodes + arraysize(kInvokeOpcodes), opcode)
535          != kInvokeOpcodes + arraysize(kInvokeOpcodes)) {
536        uint32_t invoke_method_idx = mir->dalvikInsn.vB;
537        const char* invoke_method_shorty = dex_file.GetMethodShorty(
538            dex_file.GetMethodId(invoke_method_idx));
539        if (!CanCompileShorty(invoke_method_shorty, cu->instruction_set)) {
540          VLOG(compiler) << "Unsupported to invoke '"
541              << PrettyMethod(invoke_method_idx, dex_file)
542              << "' with shorty : " << invoke_method_shorty;
543          return false;
544        }
545      }
546    }
547  }
548  return true;
549}
550
551void QuickCompiler::InitCompilationUnit(CompilationUnit& cu) const {
552  // Disable optimizations according to instruction set.
553  cu.disable_opt |= kDisabledOptimizationsPerISA[cu.instruction_set];
554  if (Runtime::Current()->UseJit()) {
555    // Disable these optimizations for JIT until quickened byte codes are done being implemented.
556    // TODO: Find a cleaner way to do this.
557    cu.disable_opt |= 1u << kLocalValueNumbering;
558  }
559}
560
561void QuickCompiler::Init() {
562  CHECK(GetCompilerDriver()->GetCompilerContext() == nullptr);
563}
564
565void QuickCompiler::UnInit() const {
566  CHECK(GetCompilerDriver()->GetCompilerContext() == nullptr);
567}
568
569/* Default optimizer/debug setting for the compiler. */
570static uint32_t kCompilerOptimizerDisableFlags = 0 |  // Disable specific optimizations
571  // (1 << kLoadStoreElimination) |
572  // (1 << kLoadHoisting) |
573  // (1 << kSuppressLoads) |
574  // (1 << kNullCheckElimination) |
575  // (1 << kClassInitCheckElimination) |
576  // (1 << kGlobalValueNumbering) |
577  // (1 << kGvnDeadCodeElimination) |
578  // (1 << kLocalValueNumbering) |
579  // (1 << kPromoteRegs) |
580  // (1 << kTrackLiveTemps) |
581  // (1 << kSafeOptimizations) |
582  // (1 << kBBOpt) |
583  // (1 << kSuspendCheckElimination) |
584  // (1 << kMatch) |
585  // (1 << kPromoteCompilerTemps) |
586  // (1 << kSuppressExceptionEdges) |
587  // (1 << kSuppressMethodInlining) |
588  0;
589
590static uint32_t kCompilerDebugFlags = 0 |     // Enable debug/testing modes
591  // (1 << kDebugDisplayMissingTargets) |
592  // (1 << kDebugVerbose) |
593  // (1 << kDebugDumpCFG) |
594  // (1 << kDebugSlowFieldPath) |
595  // (1 << kDebugSlowInvokePath) |
596  // (1 << kDebugSlowStringPath) |
597  // (1 << kDebugSlowestFieldPath) |
598  // (1 << kDebugSlowestStringPath) |
599  // (1 << kDebugExerciseResolveMethod) |
600  // (1 << kDebugVerifyDataflow) |
601  // (1 << kDebugShowMemoryUsage) |
602  // (1 << kDebugShowNops) |
603  // (1 << kDebugCountOpcodes) |
604  // (1 << kDebugDumpCheckStats) |
605  // (1 << kDebugShowSummaryMemoryUsage) |
606  // (1 << kDebugShowFilterStats) |
607  // (1 << kDebugTimings) |
608  // (1 << kDebugCodegenDump) |
609  0;
610
611CompiledMethod* QuickCompiler::Compile(const DexFile::CodeItem* code_item,
612                                       uint32_t access_flags,
613                                       InvokeType invoke_type,
614                                       uint16_t class_def_idx,
615                                       uint32_t method_idx,
616                                       jobject class_loader,
617                                       const DexFile& dex_file) const {
618  // TODO: check method fingerprint here to determine appropriate backend type.  Until then, use
619  // build default.
620  CompilerDriver* driver = GetCompilerDriver();
621
622  VLOG(compiler) << "Compiling " << PrettyMethod(method_idx, dex_file) << "...";
623  if (Compiler::IsPathologicalCase(*code_item, method_idx, dex_file)) {
624    return nullptr;
625  }
626
627  DCHECK(driver->GetCompilerOptions().IsCompilationEnabled());
628
629  Runtime* const runtime = Runtime::Current();
630  ClassLinker* const class_linker = runtime->GetClassLinker();
631  InstructionSet instruction_set = driver->GetInstructionSet();
632  if (instruction_set == kArm) {
633    instruction_set = kThumb2;
634  }
635  CompilationUnit cu(runtime->GetArenaPool(), instruction_set, driver, class_linker);
636  cu.dex_file = &dex_file;
637  cu.class_def_idx = class_def_idx;
638  cu.method_idx = method_idx;
639  cu.access_flags = access_flags;
640  cu.invoke_type = invoke_type;
641  cu.shorty = dex_file.GetMethodShorty(dex_file.GetMethodId(method_idx));
642
643  CHECK((cu.instruction_set == kThumb2) ||
644        (cu.instruction_set == kArm64) ||
645        (cu.instruction_set == kX86) ||
646        (cu.instruction_set == kX86_64) ||
647        (cu.instruction_set == kMips) ||
648        (cu.instruction_set == kMips64));
649
650  // TODO: set this from command line
651  constexpr bool compiler_flip_match = false;
652  const std::string compiler_method_match = "";
653
654  bool use_match = !compiler_method_match.empty();
655  bool match = use_match && (compiler_flip_match ^
656      (PrettyMethod(method_idx, dex_file).find(compiler_method_match) != std::string::npos));
657  if (!use_match || match) {
658    cu.disable_opt = kCompilerOptimizerDisableFlags;
659    cu.enable_debug = kCompilerDebugFlags;
660    cu.verbose = VLOG_IS_ON(compiler) ||
661        (cu.enable_debug & (1 << kDebugVerbose));
662  }
663
664  if (driver->GetCompilerOptions().HasVerboseMethods()) {
665    cu.verbose = driver->GetCompilerOptions().IsVerboseMethod(PrettyMethod(method_idx, dex_file));
666  }
667
668  if (cu.verbose) {
669    cu.enable_debug |= (1 << kDebugCodegenDump);
670  }
671
672  /*
673   * TODO: rework handling of optimization and debug flags.  Should we split out
674   * MIR and backend flags?  Need command-line setting as well.
675   */
676
677  InitCompilationUnit(cu);
678
679  cu.StartTimingSplit("BuildMIRGraph");
680  cu.mir_graph.reset(new MIRGraph(&cu, &cu.arena));
681
682  /*
683   * After creation of the MIR graph, also create the code generator.
684   * The reason we do this is that optimizations on the MIR graph may need to get information
685   * that is only available if a CG exists.
686   */
687  cu.cg.reset(GetCodeGenerator(&cu, nullptr));
688
689  /* Gathering opcode stats? */
690  if (kCompilerDebugFlags & (1 << kDebugCountOpcodes)) {
691    cu.mir_graph->EnableOpcodeCounting();
692  }
693
694  /* Build the raw MIR graph */
695  cu.mir_graph->InlineMethod(code_item, access_flags, invoke_type, class_def_idx, method_idx,
696                             class_loader, dex_file);
697
698  if (!CanCompileMethod(method_idx, dex_file, &cu)) {
699    VLOG(compiler)  << cu.instruction_set << ": Cannot compile method : "
700        << PrettyMethod(method_idx, dex_file);
701    cu.EndTiming();
702    return nullptr;
703  }
704
705  cu.NewTimingSplit("MIROpt:CheckFilters");
706  std::string skip_message;
707  if (cu.mir_graph->SkipCompilation(&skip_message)) {
708    VLOG(compiler) << cu.instruction_set << ": Skipping method : "
709        << PrettyMethod(method_idx, dex_file) << "  Reason = " << skip_message;
710    cu.EndTiming();
711    return nullptr;
712  }
713
714  /* Create the pass driver and launch it */
715  PassDriverMEOpts pass_driver(GetPreOptPassManager(), GetPostOptPassManager(), &cu);
716  pass_driver.Launch();
717
718  /* For non-leaf methods check if we should skip compilation when the profiler is enabled. */
719  if (cu.compiler_driver->ProfilePresent()
720      && !cu.mir_graph->MethodIsLeaf()
721      && cu.mir_graph->SkipCompilationByName(PrettyMethod(method_idx, dex_file))) {
722    cu.EndTiming();
723    return nullptr;
724  }
725
726  if (cu.enable_debug & (1 << kDebugDumpCheckStats)) {
727    cu.mir_graph->DumpCheckStats();
728  }
729
730  if (kCompilerDebugFlags & (1 << kDebugCountOpcodes)) {
731    cu.mir_graph->ShowOpcodeStats();
732  }
733
734  /* Reassociate sreg names with original Dalvik vreg names. */
735  cu.mir_graph->RemapRegLocations();
736
737  /* Free Arenas from the cu.arena_stack for reuse by the cu.arena in the codegen. */
738  if (cu.enable_debug & (1 << kDebugShowMemoryUsage)) {
739    if (cu.arena_stack.PeakBytesAllocated() > 1 * 1024 * 1024) {
740      MemStats stack_stats(cu.arena_stack.GetPeakStats());
741      LOG(INFO) << PrettyMethod(method_idx, dex_file) << " " << Dumpable<MemStats>(stack_stats);
742    }
743  }
744  cu.arena_stack.Reset();
745
746  CompiledMethod* result = nullptr;
747
748  if (cu.mir_graph->PuntToInterpreter()) {
749    VLOG(compiler) << cu.instruction_set << ": Punted method to interpreter: "
750        << PrettyMethod(method_idx, dex_file);
751    cu.EndTiming();
752    return nullptr;
753  }
754
755  cu.cg->Materialize();
756
757  cu.NewTimingSplit("Dedupe");  /* deduping takes up the vast majority of time in GetCompiledMethod(). */
758  result = cu.cg->GetCompiledMethod();
759  cu.NewTimingSplit("Cleanup");
760
761  if (result) {
762    VLOG(compiler) << cu.instruction_set << ": Compiled " << PrettyMethod(method_idx, dex_file);
763  } else {
764    VLOG(compiler) << cu.instruction_set << ": Deferred " << PrettyMethod(method_idx, dex_file);
765  }
766
767  if (cu.enable_debug & (1 << kDebugShowMemoryUsage)) {
768    if (cu.arena.BytesAllocated() > (1 * 1024 *1024)) {
769      MemStats mem_stats(cu.arena.GetMemStats());
770      LOG(INFO) << PrettyMethod(method_idx, dex_file) << " " << Dumpable<MemStats>(mem_stats);
771    }
772  }
773
774  if (cu.enable_debug & (1 << kDebugShowSummaryMemoryUsage)) {
775    LOG(INFO) << "MEMINFO " << cu.arena.BytesAllocated() << " " << cu.mir_graph->GetNumBlocks()
776                    << " " << PrettyMethod(method_idx, dex_file);
777  }
778
779  cu.EndTiming();
780  driver->GetTimingsLogger()->AddLogger(cu.timings);
781  return result;
782}
783
784CompiledMethod* QuickCompiler::JniCompile(uint32_t access_flags,
785                                          uint32_t method_idx,
786                                          const DexFile& dex_file) const {
787  return ArtQuickJniCompileMethod(GetCompilerDriver(), access_flags, method_idx, dex_file);
788}
789
790uintptr_t QuickCompiler::GetEntryPointOf(mirror::ArtMethod* method) const {
791  return reinterpret_cast<uintptr_t>(method->GetEntryPointFromQuickCompiledCodePtrSize(
792      InstructionSetPointerSize(GetCompilerDriver()->GetInstructionSet())));
793}
794
795Mir2Lir* QuickCompiler::GetCodeGenerator(CompilationUnit* cu, void* compilation_unit) {
796  UNUSED(compilation_unit);
797  Mir2Lir* mir_to_lir = nullptr;
798  switch (cu->instruction_set) {
799    case kThumb2:
800      mir_to_lir = ArmCodeGenerator(cu, cu->mir_graph.get(), &cu->arena);
801      break;
802    case kArm64:
803      mir_to_lir = Arm64CodeGenerator(cu, cu->mir_graph.get(), &cu->arena);
804      break;
805    case kMips:
806      // Fall-through.
807    case kMips64:
808      mir_to_lir = MipsCodeGenerator(cu, cu->mir_graph.get(), &cu->arena);
809      break;
810    case kX86:
811      // Fall-through.
812    case kX86_64:
813      mir_to_lir = X86CodeGenerator(cu, cu->mir_graph.get(), &cu->arena);
814      break;
815    default:
816      LOG(FATAL) << "Unexpected instruction set: " << cu->instruction_set;
817  }
818
819  /* The number of compiler temporaries depends on backend so set it up now if possible */
820  if (mir_to_lir) {
821    size_t max_temps = mir_to_lir->GetMaxPossibleCompilerTemps();
822    bool set_max = cu->mir_graph->SetMaxAvailableNonSpecialCompilerTemps(max_temps);
823    CHECK(set_max);
824  }
825  return mir_to_lir;
826}
827
828QuickCompiler::QuickCompiler(CompilerDriver* driver) : Compiler(driver, 100) {
829  const auto& compiler_options = driver->GetCompilerOptions();
830  auto* pass_manager_options = compiler_options.GetPassManagerOptions();
831  pre_opt_pass_manager_.reset(new PassManager(*pass_manager_options));
832  CHECK(pre_opt_pass_manager_.get() != nullptr);
833  PassDriverMEOpts::SetupPasses(pre_opt_pass_manager_.get());
834  pre_opt_pass_manager_->CreateDefaultPassList();
835  if (pass_manager_options->GetPrintPassOptions()) {
836    PassDriverMEOpts::PrintPassOptions(pre_opt_pass_manager_.get());
837  }
838  // TODO: Different options for pre vs post opts?
839  post_opt_pass_manager_.reset(new PassManager(PassManagerOptions()));
840  CHECK(post_opt_pass_manager_.get() != nullptr);
841  PassDriverMEPostOpt::SetupPasses(post_opt_pass_manager_.get());
842  post_opt_pass_manager_->CreateDefaultPassList();
843  if (pass_manager_options->GetPrintPassOptions()) {
844    PassDriverMEPostOpt::PrintPassOptions(post_opt_pass_manager_.get());
845  }
846}
847
848QuickCompiler::~QuickCompiler() {
849}
850
851Compiler* CreateQuickCompiler(CompilerDriver* driver) {
852  return QuickCompiler::Create(driver);
853}
854
855Compiler* QuickCompiler::Create(CompilerDriver* driver) {
856  return new QuickCompiler(driver);
857}
858
859}  // namespace art
860