CodeMetrics.cpp revision ebe69fe11e48d322045d5949c83283927a0d790b
1//===- CodeMetrics.cpp - Code cost measurements ---------------------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements code cost measurement utilities.
11//
12//===----------------------------------------------------------------------===//
13
14#include "llvm/Analysis/AssumptionCache.h"
15#include "llvm/Analysis/CodeMetrics.h"
16#include "llvm/Analysis/LoopInfo.h"
17#include "llvm/Analysis/TargetTransformInfo.h"
18#include "llvm/Analysis/ValueTracking.h"
19#include "llvm/IR/CallSite.h"
20#include "llvm/IR/DataLayout.h"
21#include "llvm/IR/Function.h"
22#include "llvm/IR/IntrinsicInst.h"
23#include "llvm/Support/Debug.h"
24
25#define DEBUG_TYPE "code-metrics"
26
27using namespace llvm;
28
29static void completeEphemeralValues(SmallVector<const Value *, 16> &WorkSet,
30                                    SmallPtrSetImpl<const Value*> &EphValues) {
31  SmallPtrSet<const Value *, 32> Visited;
32
33  // Make sure that all of the items in WorkSet are in our EphValues set.
34  EphValues.insert(WorkSet.begin(), WorkSet.end());
35
36  // Note: We don't speculate PHIs here, so we'll miss instruction chains kept
37  // alive only by ephemeral values.
38
39  while (!WorkSet.empty()) {
40    const Value *V = WorkSet.front();
41    WorkSet.erase(WorkSet.begin());
42
43    if (!Visited.insert(V).second)
44      continue;
45
46    // If all uses of this value are ephemeral, then so is this value.
47    bool FoundNEUse = false;
48    for (const User *I : V->users())
49      if (!EphValues.count(I)) {
50        FoundNEUse = true;
51        break;
52      }
53
54    if (FoundNEUse)
55      continue;
56
57    EphValues.insert(V);
58    DEBUG(dbgs() << "Ephemeral Value: " << *V << "\n");
59
60    if (const User *U = dyn_cast<User>(V))
61      for (const Value *J : U->operands()) {
62        if (isSafeToSpeculativelyExecute(J))
63          WorkSet.push_back(J);
64      }
65  }
66}
67
68// Find all ephemeral values.
69void CodeMetrics::collectEphemeralValues(
70    const Loop *L, AssumptionCache *AC,
71    SmallPtrSetImpl<const Value *> &EphValues) {
72  SmallVector<const Value *, 16> WorkSet;
73
74  for (auto &AssumeVH : AC->assumptions()) {
75    if (!AssumeVH)
76      continue;
77    Instruction *I = cast<Instruction>(AssumeVH);
78
79    // Filter out call sites outside of the loop so we don't to a function's
80    // worth of work for each of its loops (and, in the common case, ephemeral
81    // values in the loop are likely due to @llvm.assume calls in the loop).
82    if (!L->contains(I->getParent()))
83      continue;
84
85    WorkSet.push_back(I);
86  }
87
88  completeEphemeralValues(WorkSet, EphValues);
89}
90
91void CodeMetrics::collectEphemeralValues(
92    const Function *F, AssumptionCache *AC,
93    SmallPtrSetImpl<const Value *> &EphValues) {
94  SmallVector<const Value *, 16> WorkSet;
95
96  for (auto &AssumeVH : AC->assumptions()) {
97    if (!AssumeVH)
98      continue;
99    Instruction *I = cast<Instruction>(AssumeVH);
100    assert(I->getParent()->getParent() == F &&
101           "Found assumption for the wrong function!");
102    WorkSet.push_back(I);
103  }
104
105  completeEphemeralValues(WorkSet, EphValues);
106}
107
108/// analyzeBasicBlock - Fill in the current structure with information gleaned
109/// from the specified block.
110void CodeMetrics::analyzeBasicBlock(const BasicBlock *BB,
111                                    const TargetTransformInfo &TTI,
112                                    SmallPtrSetImpl<const Value*> &EphValues) {
113  ++NumBlocks;
114  unsigned NumInstsBeforeThisBB = NumInsts;
115  for (BasicBlock::const_iterator II = BB->begin(), E = BB->end();
116       II != E; ++II) {
117    // Skip ephemeral values.
118    if (EphValues.count(II))
119      continue;
120
121    // Special handling for calls.
122    if (isa<CallInst>(II) || isa<InvokeInst>(II)) {
123      ImmutableCallSite CS(cast<Instruction>(II));
124
125      if (const Function *F = CS.getCalledFunction()) {
126        // If a function is both internal and has a single use, then it is
127        // extremely likely to get inlined in the future (it was probably
128        // exposed by an interleaved devirtualization pass).
129        if (!CS.isNoInline() && F->hasInternalLinkage() && F->hasOneUse())
130          ++NumInlineCandidates;
131
132        // If this call is to function itself, then the function is recursive.
133        // Inlining it into other functions is a bad idea, because this is
134        // basically just a form of loop peeling, and our metrics aren't useful
135        // for that case.
136        if (F == BB->getParent())
137          isRecursive = true;
138
139        if (TTI.isLoweredToCall(F))
140          ++NumCalls;
141      } else {
142        // We don't want inline asm to count as a call - that would prevent loop
143        // unrolling. The argument setup cost is still real, though.
144        if (!isa<InlineAsm>(CS.getCalledValue()))
145          ++NumCalls;
146      }
147    }
148
149    if (const AllocaInst *AI = dyn_cast<AllocaInst>(II)) {
150      if (!AI->isStaticAlloca())
151        this->usesDynamicAlloca = true;
152    }
153
154    if (isa<ExtractElementInst>(II) || II->getType()->isVectorTy())
155      ++NumVectorInsts;
156
157    if (const CallInst *CI = dyn_cast<CallInst>(II))
158      if (CI->cannotDuplicate())
159        notDuplicatable = true;
160
161    if (const InvokeInst *InvI = dyn_cast<InvokeInst>(II))
162      if (InvI->cannotDuplicate())
163        notDuplicatable = true;
164
165    NumInsts += TTI.getUserCost(&*II);
166  }
167
168  if (isa<ReturnInst>(BB->getTerminator()))
169    ++NumRets;
170
171  // We never want to inline functions that contain an indirectbr.  This is
172  // incorrect because all the blockaddress's (in static global initializers
173  // for example) would be referring to the original function, and this indirect
174  // jump would jump from the inlined copy of the function into the original
175  // function which is extremely undefined behavior.
176  // FIXME: This logic isn't really right; we can safely inline functions
177  // with indirectbr's as long as no other function or global references the
178  // blockaddress of a block within the current function.  And as a QOI issue,
179  // if someone is using a blockaddress without an indirectbr, and that
180  // reference somehow ends up in another function or global, we probably
181  // don't want to inline this function.
182  notDuplicatable |= isa<IndirectBrInst>(BB->getTerminator());
183
184  // Remember NumInsts for this BB.
185  NumBBInsts[BB] = NumInsts - NumInstsBeforeThisBB;
186}
187