Reassociate.cpp revision 5560c9d49ccae132cabf1155f18aa0480dce3eda
1//===- Reassociate.cpp - Reassociate binary expressions -------------------===//
2//
3// This pass reassociates commutative expressions in an order that is designed
4// to promote better constant propagation, GCSE, LICM, PRE...
5//
6// For example: 4 + (x + 5) -> x + (4 + 5)
7//
8// Note that this pass works best if left shifts have been promoted to explicit
9// multiplies before this pass executes.
10//
11// In the implementation of this algorithm, constants are assigned rank = 0,
12// function arguments are rank = 1, and other values are assigned ranks
13// corresponding to the reverse post order traversal of current function
14// (starting at 2), which effectively gives values in deep loops higher rank
15// than values not in loops.
16//
17// This code was originally written by Chris Lattner, and was then cleaned up
18// and perfected by Casey Carter.
19//
20//===----------------------------------------------------------------------===//
21
22#include "llvm/Transforms/Scalar.h"
23#include "llvm/Function.h"
24#include "llvm/iOperators.h"
25#include "llvm/Type.h"
26#include "llvm/Pass.h"
27#include "llvm/Constant.h"
28#include "llvm/Support/CFG.h"
29#include "Support/Debug.h"
30#include "Support/PostOrderIterator.h"
31#include "Support/Statistic.h"
32
33namespace {
34  Statistic<> NumLinear ("reassociate","Number of insts linearized");
35  Statistic<> NumChanged("reassociate","Number of insts reassociated");
36  Statistic<> NumSwapped("reassociate","Number of insts with operands swapped");
37
38  class Reassociate : public FunctionPass {
39    std::map<BasicBlock*, unsigned> RankMap;
40    std::map<Value*, unsigned> ValueRankMap;
41  public:
42    bool runOnFunction(Function &F);
43
44    virtual void getAnalysisUsage(AnalysisUsage &AU) const {
45      AU.setPreservesCFG();
46    }
47  private:
48    void BuildRankMap(Function &F);
49    unsigned getRank(Value *V);
50    bool ReassociateExpr(BinaryOperator *I);
51    bool ReassociateBB(BasicBlock *BB);
52  };
53
54  RegisterOpt<Reassociate> X("reassociate", "Reassociate expressions");
55}
56
57Pass *createReassociatePass() { return new Reassociate(); }
58
59void Reassociate::BuildRankMap(Function &F) {
60  unsigned i = 2;
61
62  // Assign distinct ranks to function arguments
63  for (Function::aiterator I = F.abegin(), E = F.aend(); I != E; ++I)
64    ValueRankMap[I] = ++i;
65
66  ReversePostOrderTraversal<Function*> RPOT(&F);
67  for (ReversePostOrderTraversal<Function*>::rpo_iterator I = RPOT.begin(),
68         E = RPOT.end(); I != E; ++I)
69    RankMap[*I] = ++i << 16;
70}
71
72unsigned Reassociate::getRank(Value *V) {
73  if (isa<Argument>(V)) return ValueRankMap[V];   // Function argument...
74
75  if (Instruction *I = dyn_cast<Instruction>(V)) {
76    // If this is an expression, return the 1+MAX(rank(LHS), rank(RHS)) so that
77    // we can reassociate expressions for code motion!  Since we do not recurse
78    // for PHI nodes, we cannot have infinite recursion here, because there
79    // cannot be loops in the value graph that do not go through PHI nodes.
80    //
81    if (I->getOpcode() == Instruction::PHINode ||
82        I->getOpcode() == Instruction::Alloca ||
83        I->getOpcode() == Instruction::Malloc || isa<TerminatorInst>(I) ||
84        I->mayWriteToMemory())  // Cannot move inst if it writes to memory!
85      return RankMap[I->getParent()];
86
87    unsigned &CachedRank = ValueRankMap[I];
88    if (CachedRank) return CachedRank;    // Rank already known?
89
90    // If not, compute it!
91    unsigned Rank = 0, MaxRank = RankMap[I->getParent()];
92    for (unsigned i = 0, e = I->getNumOperands();
93         i != e && Rank != MaxRank; ++i)
94      Rank = std::max(Rank, getRank(I->getOperand(i)));
95
96    DEBUG(std::cerr << "Calculated Rank[" << V->getName() << "] = "
97                    << Rank+1 << "\n");
98
99    return CachedRank = Rank+1;
100  }
101
102  // Otherwise it's a global or constant, rank 0.
103  return 0;
104}
105
106
107bool Reassociate::ReassociateExpr(BinaryOperator *I) {
108  Value *LHS = I->getOperand(0);
109  Value *RHS = I->getOperand(1);
110  unsigned LHSRank = getRank(LHS);
111  unsigned RHSRank = getRank(RHS);
112
113  bool Changed = false;
114
115  // Make sure the LHS of the operand always has the greater rank...
116  if (LHSRank < RHSRank) {
117    bool Success = !I->swapOperands();
118    assert(Success && "swapOperands failed");
119
120    std::swap(LHS, RHS);
121    std::swap(LHSRank, RHSRank);
122    Changed = true;
123    ++NumSwapped;
124    DEBUG(std::cerr << "Transposed: " << I
125          /* << " Result BB: " << I->getParent()*/);
126  }
127
128  // If the LHS is the same operator as the current one is, and if we are the
129  // only expression using it...
130  //
131  if (BinaryOperator *LHSI = dyn_cast<BinaryOperator>(LHS))
132    if (LHSI->getOpcode() == I->getOpcode() && LHSI->use_size() == 1) {
133      // If the rank of our current RHS is less than the rank of the LHS's LHS,
134      // then we reassociate the two instructions...
135
136      unsigned TakeOp = 0;
137      if (BinaryOperator *IOp = dyn_cast<BinaryOperator>(LHSI->getOperand(0)))
138        if (IOp->getOpcode() == LHSI->getOpcode())
139          TakeOp = 1;   // Hoist out non-tree portion
140
141      if (RHSRank < getRank(LHSI->getOperand(TakeOp))) {
142        // Convert ((a + 12) + 10) into (a + (12 + 10))
143        I->setOperand(0, LHSI->getOperand(TakeOp));
144        LHSI->setOperand(TakeOp, RHS);
145        I->setOperand(1, LHSI);
146
147        // Move the LHS expression forward, to ensure that it is dominated by
148        // its operands.
149        LHSI->getParent()->getInstList().remove(LHSI);
150        I->getParent()->getInstList().insert(I, LHSI);
151
152        ++NumChanged;
153        DEBUG(std::cerr << "Reassociated: " << I/* << " Result BB: "
154                                                   << I->getParent()*/);
155
156        // Since we modified the RHS instruction, make sure that we recheck it.
157        ReassociateExpr(LHSI);
158        ReassociateExpr(I);
159        return true;
160      }
161    }
162
163  return Changed;
164}
165
166
167// NegateValue - Insert instructions before the instruction pointed to by BI,
168// that computes the negative version of the value specified.  The negative
169// version of the value is returned, and BI is left pointing at the instruction
170// that should be processed next by the reassociation pass.
171//
172static Value *NegateValue(Value *V, BasicBlock::iterator &BI) {
173  // We are trying to expose opportunity for reassociation.  One of the things
174  // that we want to do to achieve this is to push a negation as deep into an
175  // expression chain as possible, to expose the add instructions.  In practice,
176  // this means that we turn this:
177  //   X = -(A+12+C+D)   into    X = -A + -12 + -C + -D = -12 + -A + -C + -D
178  // so that later, a: Y = 12+X could get reassociated with the -12 to eliminate
179  // the constants.  We assume that instcombine will clean up the mess later if
180  // we introduce tons of unnecessary negation instructions...
181  //
182  if (Instruction *I = dyn_cast<Instruction>(V))
183    if (I->getOpcode() == Instruction::Add && I->use_size() == 1) {
184      Value *RHS = NegateValue(I->getOperand(1), BI);
185      Value *LHS = NegateValue(I->getOperand(0), BI);
186
187      // We must actually insert a new add instruction here, because the neg
188      // instructions do not dominate the old add instruction in general.  By
189      // adding it now, we are assured that the neg instructions we just
190      // inserted dominate the instruction we are about to insert after them.
191      //
192      return BinaryOperator::create(Instruction::Add, LHS, RHS,
193                                    I->getName()+".neg",
194                                    cast<Instruction>(RHS)->getNext());
195    }
196
197  // Insert a 'neg' instruction that subtracts the value from zero to get the
198  // negation.
199  //
200  return BI = BinaryOperator::createNeg(V, V->getName() + ".neg", BI);
201}
202
203
204bool Reassociate::ReassociateBB(BasicBlock *BB) {
205  bool Changed = false;
206  for (BasicBlock::iterator BI = BB->begin(); BI != BB->end(); ++BI) {
207
208    DEBUG(std::cerr << "Processing: " << *BI);
209    if (BI->getOpcode() == Instruction::Sub && !BinaryOperator::isNeg(BI)) {
210      // Convert a subtract into an add and a neg instruction... so that sub
211      // instructions can be commuted with other add instructions...
212      //
213      // Calculate the negative value of Operand 1 of the sub instruction...
214      // and set it as the RHS of the add instruction we just made...
215      //
216      std::string Name = BI->getName();
217      BI->setName("");
218      Instruction *New =
219        BinaryOperator::create(Instruction::Add, BI->getOperand(0),
220                               BI->getOperand(1), Name, BI);
221
222      // Everyone now refers to the add instruction...
223      BI->replaceAllUsesWith(New);
224
225      // Put the new add in the place of the subtract... deleting the subtract
226      BB->getInstList().erase(BI);
227
228      BI = New;
229      New->setOperand(1, NegateValue(New->getOperand(1), BI));
230
231      Changed = true;
232      DEBUG(std::cerr << "Negated: " << New /*<< " Result BB: " << BB*/);
233    }
234
235    // If this instruction is a commutative binary operator, and the ranks of
236    // the two operands are sorted incorrectly, fix it now.
237    //
238    if (BI->isAssociative()) {
239      BinaryOperator *I = cast<BinaryOperator>(BI);
240      if (!I->use_empty()) {
241        // Make sure that we don't have a tree-shaped computation.  If we do,
242        // linearize it.  Convert (A+B)+(C+D) into ((A+B)+C)+D
243        //
244        Instruction *LHSI = dyn_cast<Instruction>(I->getOperand(0));
245        Instruction *RHSI = dyn_cast<Instruction>(I->getOperand(1));
246        if (LHSI && (int)LHSI->getOpcode() == I->getOpcode() &&
247            RHSI && (int)RHSI->getOpcode() == I->getOpcode() &&
248            RHSI->use_size() == 1) {
249          // Insert a new temporary instruction... (A+B)+C
250          BinaryOperator *Tmp = BinaryOperator::create(I->getOpcode(), LHSI,
251                                                       RHSI->getOperand(0),
252                                                       RHSI->getName()+".ra",
253                                                       BI);
254          BI = Tmp;
255          I->setOperand(0, Tmp);
256          I->setOperand(1, RHSI->getOperand(1));
257
258          // Process the temporary instruction for reassociation now.
259          I = Tmp;
260          ++NumLinear;
261          Changed = true;
262          DEBUG(std::cerr << "Linearized: " << I/* << " Result BB: " << BB*/);
263        }
264
265        // Make sure that this expression is correctly reassociated with respect
266        // to it's used values...
267        //
268        Changed |= ReassociateExpr(I);
269      }
270    }
271  }
272
273  return Changed;
274}
275
276
277bool Reassociate::runOnFunction(Function &F) {
278  // Recalculate the rank map for F
279  BuildRankMap(F);
280
281  bool Changed = false;
282  for (Function::iterator FI = F.begin(), FE = F.end(); FI != FE; ++FI)
283    Changed |= ReassociateBB(FI);
284
285  // We are done with the rank map...
286  RankMap.clear();
287  ValueRankMap.clear();
288  return Changed;
289}
290