Reassociate.cpp revision 94285e620b845e09b18939e8d6448e01e692f3ce
1//===- Reassociate.cpp - Reassociate binary expressions -------------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This pass reassociates commutative expressions in an order that is designed
11// to promote better constant propagation, GCSE, LICM, PRE...
12//
13// For example: 4 + (x + 5) -> x + (4 + 5)
14//
15// In the implementation of this algorithm, constants are assigned rank = 0,
16// function arguments are rank = 1, and other values are assigned ranks
17// corresponding to the reverse post order traversal of current function
18// (starting at 2), which effectively gives values in deep loops higher rank
19// than values not in loops.
20//
21//===----------------------------------------------------------------------===//
22
23#define DEBUG_TYPE "reassociate"
24#include "llvm/Transforms/Scalar.h"
25#include "llvm/Constants.h"
26#include "llvm/DerivedTypes.h"
27#include "llvm/Function.h"
28#include "llvm/Instructions.h"
29#include "llvm/IntrinsicInst.h"
30#include "llvm/Pass.h"
31#include "llvm/Assembly/Writer.h"
32#include "llvm/Support/CFG.h"
33#include "llvm/Support/Debug.h"
34#include "llvm/Support/ValueHandle.h"
35#include "llvm/Support/raw_ostream.h"
36#include "llvm/ADT/PostOrderIterator.h"
37#include "llvm/ADT/Statistic.h"
38#include "llvm/ADT/DenseMap.h"
39#include <algorithm>
40#include <map>
41using namespace llvm;
42
43STATISTIC(NumLinear , "Number of insts linearized");
44STATISTIC(NumChanged, "Number of insts reassociated");
45STATISTIC(NumAnnihil, "Number of expr tree annihilated");
46STATISTIC(NumFactor , "Number of multiplies factored");
47
48namespace {
49  struct ValueEntry {
50    unsigned Rank;
51    Value *Op;
52    ValueEntry(unsigned R, Value *O) : Rank(R), Op(O) {}
53  };
54  inline bool operator<(const ValueEntry &LHS, const ValueEntry &RHS) {
55    return LHS.Rank > RHS.Rank;   // Sort so that highest rank goes to start.
56  }
57}
58
59#ifndef NDEBUG
60/// PrintOps - Print out the expression identified in the Ops list.
61///
62static void PrintOps(Instruction *I, const std::vector<ValueEntry> &Ops) {
63  Module *M = I->getParent()->getParent()->getParent();
64  errs() << Instruction::getOpcodeName(I->getOpcode()) << " "
65       << *Ops[0].Op->getType() << '\t';
66  for (unsigned i = 0, e = Ops.size(); i != e; ++i) {
67    errs() << "[ ";
68    WriteAsOperand(errs(), Ops[i].Op, false, M);
69    errs() << ", #" << Ops[i].Rank << "] ";
70  }
71}
72#endif
73
74namespace {
75  class Reassociate : public FunctionPass {
76    std::map<BasicBlock*, unsigned> RankMap;
77    std::map<AssertingVH<>, unsigned> ValueRankMap;
78    bool MadeChange;
79  public:
80    static char ID; // Pass identification, replacement for typeid
81    Reassociate() : FunctionPass(&ID) {}
82
83    bool runOnFunction(Function &F);
84
85    virtual void getAnalysisUsage(AnalysisUsage &AU) const {
86      AU.setPreservesCFG();
87    }
88  private:
89    void BuildRankMap(Function &F);
90    unsigned getRank(Value *V);
91    void ReassociateExpression(BinaryOperator *I);
92    void RewriteExprTree(BinaryOperator *I, std::vector<ValueEntry> &Ops,
93                         unsigned Idx = 0);
94    Value *OptimizeExpression(BinaryOperator *I, std::vector<ValueEntry> &Ops);
95    Value *OptimizeAdd(Instruction *I, std::vector<ValueEntry> &Ops);
96    void LinearizeExprTree(BinaryOperator *I, std::vector<ValueEntry> &Ops);
97    void LinearizeExpr(BinaryOperator *I);
98    Value *RemoveFactorFromExpression(Value *V, Value *Factor);
99    void ReassociateBB(BasicBlock *BB);
100
101    void RemoveDeadBinaryOp(Value *V);
102  };
103}
104
105char Reassociate::ID = 0;
106static RegisterPass<Reassociate> X("reassociate", "Reassociate expressions");
107
108// Public interface to the Reassociate pass
109FunctionPass *llvm::createReassociatePass() { return new Reassociate(); }
110
111void Reassociate::RemoveDeadBinaryOp(Value *V) {
112  Instruction *Op = dyn_cast<Instruction>(V);
113  if (!Op || !isa<BinaryOperator>(Op) || !isa<CmpInst>(Op) || !Op->use_empty())
114    return;
115
116  Value *LHS = Op->getOperand(0), *RHS = Op->getOperand(1);
117  RemoveDeadBinaryOp(LHS);
118  RemoveDeadBinaryOp(RHS);
119}
120
121
122static bool isUnmovableInstruction(Instruction *I) {
123  if (I->getOpcode() == Instruction::PHI ||
124      I->getOpcode() == Instruction::Alloca ||
125      I->getOpcode() == Instruction::Load ||
126      I->getOpcode() == Instruction::Invoke ||
127      (I->getOpcode() == Instruction::Call &&
128       !isa<DbgInfoIntrinsic>(I)) ||
129      I->getOpcode() == Instruction::UDiv ||
130      I->getOpcode() == Instruction::SDiv ||
131      I->getOpcode() == Instruction::FDiv ||
132      I->getOpcode() == Instruction::URem ||
133      I->getOpcode() == Instruction::SRem ||
134      I->getOpcode() == Instruction::FRem)
135    return true;
136  return false;
137}
138
139void Reassociate::BuildRankMap(Function &F) {
140  unsigned i = 2;
141
142  // Assign distinct ranks to function arguments
143  for (Function::arg_iterator I = F.arg_begin(), E = F.arg_end(); I != E; ++I)
144    ValueRankMap[&*I] = ++i;
145
146  ReversePostOrderTraversal<Function*> RPOT(&F);
147  for (ReversePostOrderTraversal<Function*>::rpo_iterator I = RPOT.begin(),
148         E = RPOT.end(); I != E; ++I) {
149    BasicBlock *BB = *I;
150    unsigned BBRank = RankMap[BB] = ++i << 16;
151
152    // Walk the basic block, adding precomputed ranks for any instructions that
153    // we cannot move.  This ensures that the ranks for these instructions are
154    // all different in the block.
155    for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I)
156      if (isUnmovableInstruction(I))
157        ValueRankMap[&*I] = ++BBRank;
158  }
159}
160
161unsigned Reassociate::getRank(Value *V) {
162  if (isa<Argument>(V)) return ValueRankMap[V];   // Function argument...
163
164  Instruction *I = dyn_cast<Instruction>(V);
165  if (I == 0) return 0;  // Otherwise it's a global or constant, rank 0.
166
167  unsigned &CachedRank = ValueRankMap[I];
168  if (CachedRank) return CachedRank;    // Rank already known?
169
170  // If this is an expression, return the 1+MAX(rank(LHS), rank(RHS)) so that
171  // we can reassociate expressions for code motion!  Since we do not recurse
172  // for PHI nodes, we cannot have infinite recursion here, because there
173  // cannot be loops in the value graph that do not go through PHI nodes.
174  unsigned Rank = 0, MaxRank = RankMap[I->getParent()];
175  for (unsigned i = 0, e = I->getNumOperands();
176       i != e && Rank != MaxRank; ++i)
177    Rank = std::max(Rank, getRank(I->getOperand(i)));
178
179  // If this is a not or neg instruction, do not count it for rank.  This
180  // assures us that X and ~X will have the same rank.
181  if (!I->getType()->isInteger() ||
182      (!BinaryOperator::isNot(I) && !BinaryOperator::isNeg(I)))
183    ++Rank;
184
185  //DEBUG(errs() << "Calculated Rank[" << V->getName() << "] = "
186  //     << Rank << "\n");
187
188  return CachedRank = Rank;
189}
190
191/// isReassociableOp - Return true if V is an instruction of the specified
192/// opcode and if it only has one use.
193static BinaryOperator *isReassociableOp(Value *V, unsigned Opcode) {
194  if ((V->hasOneUse() || V->use_empty()) && isa<Instruction>(V) &&
195      cast<Instruction>(V)->getOpcode() == Opcode)
196    return cast<BinaryOperator>(V);
197  return 0;
198}
199
200/// LowerNegateToMultiply - Replace 0-X with X*-1.
201///
202static Instruction *LowerNegateToMultiply(Instruction *Neg,
203                              std::map<AssertingVH<>, unsigned> &ValueRankMap) {
204  Constant *Cst = Constant::getAllOnesValue(Neg->getType());
205
206  Instruction *Res = BinaryOperator::CreateMul(Neg->getOperand(1), Cst, "",Neg);
207  ValueRankMap.erase(Neg);
208  Res->takeName(Neg);
209  Neg->replaceAllUsesWith(Res);
210  Neg->eraseFromParent();
211  return Res;
212}
213
214// Given an expression of the form '(A+B)+(D+C)', turn it into '(((A+B)+C)+D)'.
215// Note that if D is also part of the expression tree that we recurse to
216// linearize it as well.  Besides that case, this does not recurse into A,B, or
217// C.
218void Reassociate::LinearizeExpr(BinaryOperator *I) {
219  BinaryOperator *LHS = cast<BinaryOperator>(I->getOperand(0));
220  BinaryOperator *RHS = cast<BinaryOperator>(I->getOperand(1));
221  assert(isReassociableOp(LHS, I->getOpcode()) &&
222         isReassociableOp(RHS, I->getOpcode()) &&
223         "Not an expression that needs linearization?");
224
225  DEBUG(errs() << "Linear" << *LHS << '\n' << *RHS << '\n' << *I << '\n');
226
227  // Move the RHS instruction to live immediately before I, avoiding breaking
228  // dominator properties.
229  RHS->moveBefore(I);
230
231  // Move operands around to do the linearization.
232  I->setOperand(1, RHS->getOperand(0));
233  RHS->setOperand(0, LHS);
234  I->setOperand(0, RHS);
235
236  ++NumLinear;
237  MadeChange = true;
238  DEBUG(errs() << "Linearized: " << *I << '\n');
239
240  // If D is part of this expression tree, tail recurse.
241  if (isReassociableOp(I->getOperand(1), I->getOpcode()))
242    LinearizeExpr(I);
243}
244
245
246/// LinearizeExprTree - Given an associative binary expression tree, traverse
247/// all of the uses putting it into canonical form.  This forces a left-linear
248/// form of the the expression (((a+b)+c)+d), and collects information about the
249/// rank of the non-tree operands.
250///
251/// NOTE: These intentionally destroys the expression tree operands (turning
252/// them into undef values) to reduce #uses of the values.  This means that the
253/// caller MUST use something like RewriteExprTree to put the values back in.
254///
255void Reassociate::LinearizeExprTree(BinaryOperator *I,
256                                    std::vector<ValueEntry> &Ops) {
257  Value *LHS = I->getOperand(0), *RHS = I->getOperand(1);
258  unsigned Opcode = I->getOpcode();
259
260  // First step, linearize the expression if it is in ((A+B)+(C+D)) form.
261  BinaryOperator *LHSBO = isReassociableOp(LHS, Opcode);
262  BinaryOperator *RHSBO = isReassociableOp(RHS, Opcode);
263
264  // If this is a multiply expression tree and it contains internal negations,
265  // transform them into multiplies by -1 so they can be reassociated.
266  if (I->getOpcode() == Instruction::Mul) {
267    if (!LHSBO && LHS->hasOneUse() && BinaryOperator::isNeg(LHS)) {
268      LHS = LowerNegateToMultiply(cast<Instruction>(LHS), ValueRankMap);
269      LHSBO = isReassociableOp(LHS, Opcode);
270    }
271    if (!RHSBO && RHS->hasOneUse() && BinaryOperator::isNeg(RHS)) {
272      RHS = LowerNegateToMultiply(cast<Instruction>(RHS), ValueRankMap);
273      RHSBO = isReassociableOp(RHS, Opcode);
274    }
275  }
276
277  if (!LHSBO) {
278    if (!RHSBO) {
279      // Neither the LHS or RHS as part of the tree, thus this is a leaf.  As
280      // such, just remember these operands and their rank.
281      Ops.push_back(ValueEntry(getRank(LHS), LHS));
282      Ops.push_back(ValueEntry(getRank(RHS), RHS));
283
284      // Clear the leaves out.
285      I->setOperand(0, UndefValue::get(I->getType()));
286      I->setOperand(1, UndefValue::get(I->getType()));
287      return;
288    }
289
290    // Turn X+(Y+Z) -> (Y+Z)+X
291    std::swap(LHSBO, RHSBO);
292    std::swap(LHS, RHS);
293    bool Success = !I->swapOperands();
294    assert(Success && "swapOperands failed");
295    Success = false;
296    MadeChange = true;
297  } else if (RHSBO) {
298    // Turn (A+B)+(C+D) -> (((A+B)+C)+D).  This guarantees the the RHS is not
299    // part of the expression tree.
300    LinearizeExpr(I);
301    LHS = LHSBO = cast<BinaryOperator>(I->getOperand(0));
302    RHS = I->getOperand(1);
303    RHSBO = 0;
304  }
305
306  // Okay, now we know that the LHS is a nested expression and that the RHS is
307  // not.  Perform reassociation.
308  assert(!isReassociableOp(RHS, Opcode) && "LinearizeExpr failed!");
309
310  // Move LHS right before I to make sure that the tree expression dominates all
311  // values.
312  LHSBO->moveBefore(I);
313
314  // Linearize the expression tree on the LHS.
315  LinearizeExprTree(LHSBO, Ops);
316
317  // Remember the RHS operand and its rank.
318  Ops.push_back(ValueEntry(getRank(RHS), RHS));
319
320  // Clear the RHS leaf out.
321  I->setOperand(1, UndefValue::get(I->getType()));
322}
323
324// RewriteExprTree - Now that the operands for this expression tree are
325// linearized and optimized, emit them in-order.  This function is written to be
326// tail recursive.
327void Reassociate::RewriteExprTree(BinaryOperator *I,
328                                  std::vector<ValueEntry> &Ops,
329                                  unsigned i) {
330  if (i+2 == Ops.size()) {
331    if (I->getOperand(0) != Ops[i].Op ||
332        I->getOperand(1) != Ops[i+1].Op) {
333      Value *OldLHS = I->getOperand(0);
334      DEBUG(errs() << "RA: " << *I << '\n');
335      I->setOperand(0, Ops[i].Op);
336      I->setOperand(1, Ops[i+1].Op);
337      DEBUG(errs() << "TO: " << *I << '\n');
338      MadeChange = true;
339      ++NumChanged;
340
341      // If we reassociated a tree to fewer operands (e.g. (1+a+2) -> (a+3)
342      // delete the extra, now dead, nodes.
343      RemoveDeadBinaryOp(OldLHS);
344    }
345    return;
346  }
347  assert(i+2 < Ops.size() && "Ops index out of range!");
348
349  if (I->getOperand(1) != Ops[i].Op) {
350    DEBUG(errs() << "RA: " << *I << '\n');
351    I->setOperand(1, Ops[i].Op);
352    DEBUG(errs() << "TO: " << *I << '\n');
353    MadeChange = true;
354    ++NumChanged;
355  }
356
357  BinaryOperator *LHS = cast<BinaryOperator>(I->getOperand(0));
358  assert(LHS->getOpcode() == I->getOpcode() &&
359         "Improper expression tree!");
360
361  // Compactify the tree instructions together with each other to guarantee
362  // that the expression tree is dominated by all of Ops.
363  LHS->moveBefore(I);
364  RewriteExprTree(LHS, Ops, i+1);
365}
366
367
368
369// NegateValue - Insert instructions before the instruction pointed to by BI,
370// that computes the negative version of the value specified.  The negative
371// version of the value is returned, and BI is left pointing at the instruction
372// that should be processed next by the reassociation pass.
373//
374static Value *NegateValue(Value *V, Instruction *BI) {
375  // We are trying to expose opportunity for reassociation.  One of the things
376  // that we want to do to achieve this is to push a negation as deep into an
377  // expression chain as possible, to expose the add instructions.  In practice,
378  // this means that we turn this:
379  //   X = -(A+12+C+D)   into    X = -A + -12 + -C + -D = -12 + -A + -C + -D
380  // so that later, a: Y = 12+X could get reassociated with the -12 to eliminate
381  // the constants.  We assume that instcombine will clean up the mess later if
382  // we introduce tons of unnecessary negation instructions...
383  //
384  if (Instruction *I = dyn_cast<Instruction>(V))
385    if (I->getOpcode() == Instruction::Add && I->hasOneUse()) {
386      // Push the negates through the add.
387      I->setOperand(0, NegateValue(I->getOperand(0), BI));
388      I->setOperand(1, NegateValue(I->getOperand(1), BI));
389
390      // We must move the add instruction here, because the neg instructions do
391      // not dominate the old add instruction in general.  By moving it, we are
392      // assured that the neg instructions we just inserted dominate the
393      // instruction we are about to insert after them.
394      //
395      I->moveBefore(BI);
396      I->setName(I->getName()+".neg");
397      return I;
398    }
399
400  // Insert a 'neg' instruction that subtracts the value from zero to get the
401  // negation.
402  //
403  return BinaryOperator::CreateNeg(V, V->getName() + ".neg", BI);
404}
405
406/// ShouldBreakUpSubtract - Return true if we should break up this subtract of
407/// X-Y into (X + -Y).
408static bool ShouldBreakUpSubtract(Instruction *Sub) {
409  // If this is a negation, we can't split it up!
410  if (BinaryOperator::isNeg(Sub))
411    return false;
412
413  // Don't bother to break this up unless either the LHS is an associable add or
414  // subtract or if this is only used by one.
415  if (isReassociableOp(Sub->getOperand(0), Instruction::Add) ||
416      isReassociableOp(Sub->getOperand(0), Instruction::Sub))
417    return true;
418  if (isReassociableOp(Sub->getOperand(1), Instruction::Add) ||
419      isReassociableOp(Sub->getOperand(1), Instruction::Sub))
420    return true;
421  if (Sub->hasOneUse() &&
422      (isReassociableOp(Sub->use_back(), Instruction::Add) ||
423       isReassociableOp(Sub->use_back(), Instruction::Sub)))
424    return true;
425
426  return false;
427}
428
429/// BreakUpSubtract - If we have (X-Y), and if either X is an add, or if this is
430/// only used by an add, transform this into (X+(0-Y)) to promote better
431/// reassociation.
432static Instruction *BreakUpSubtract(Instruction *Sub,
433                              std::map<AssertingVH<>, unsigned> &ValueRankMap) {
434  // Convert a subtract into an add and a neg instruction... so that sub
435  // instructions can be commuted with other add instructions...
436  //
437  // Calculate the negative value of Operand 1 of the sub instruction...
438  // and set it as the RHS of the add instruction we just made...
439  //
440  Value *NegVal = NegateValue(Sub->getOperand(1), Sub);
441  Instruction *New =
442    BinaryOperator::CreateAdd(Sub->getOperand(0), NegVal, "", Sub);
443  New->takeName(Sub);
444
445  // Everyone now refers to the add instruction.
446  ValueRankMap.erase(Sub);
447  Sub->replaceAllUsesWith(New);
448  Sub->eraseFromParent();
449
450  DEBUG(errs() << "Negated: " << *New << '\n');
451  return New;
452}
453
454/// ConvertShiftToMul - If this is a shift of a reassociable multiply or is used
455/// by one, change this into a multiply by a constant to assist with further
456/// reassociation.
457static Instruction *ConvertShiftToMul(Instruction *Shl,
458                              std::map<AssertingVH<>, unsigned> &ValueRankMap) {
459  // If an operand of this shift is a reassociable multiply, or if the shift
460  // is used by a reassociable multiply or add, turn into a multiply.
461  if (isReassociableOp(Shl->getOperand(0), Instruction::Mul) ||
462      (Shl->hasOneUse() &&
463       (isReassociableOp(Shl->use_back(), Instruction::Mul) ||
464        isReassociableOp(Shl->use_back(), Instruction::Add)))) {
465    Constant *MulCst = ConstantInt::get(Shl->getType(), 1);
466    MulCst = ConstantExpr::getShl(MulCst, cast<Constant>(Shl->getOperand(1)));
467
468    Instruction *Mul =
469      BinaryOperator::CreateMul(Shl->getOperand(0), MulCst, "", Shl);
470    ValueRankMap.erase(Shl);
471    Mul->takeName(Shl);
472    Shl->replaceAllUsesWith(Mul);
473    Shl->eraseFromParent();
474    return Mul;
475  }
476  return 0;
477}
478
479// Scan backwards and forwards among values with the same rank as element i to
480// see if X exists.  If X does not exist, return i.
481static unsigned FindInOperandList(std::vector<ValueEntry> &Ops, unsigned i,
482                                  Value *X) {
483  unsigned XRank = Ops[i].Rank;
484  unsigned e = Ops.size();
485  for (unsigned j = i+1; j != e && Ops[j].Rank == XRank; ++j)
486    if (Ops[j].Op == X)
487      return j;
488  // Scan backwards
489  for (unsigned j = i-1; j != ~0U && Ops[j].Rank == XRank; --j)
490    if (Ops[j].Op == X)
491      return j;
492  return i;
493}
494
495/// EmitAddTreeOfValues - Emit a tree of add instructions, summing Ops together
496/// and returning the result.  Insert the tree before I.
497static Value *EmitAddTreeOfValues(Instruction *I, SmallVectorImpl<Value*> &Ops){
498  if (Ops.size() == 1) return Ops.back();
499
500  Value *V1 = Ops.back();
501  Ops.pop_back();
502  Value *V2 = EmitAddTreeOfValues(I, Ops);
503  return BinaryOperator::CreateAdd(V2, V1, "tmp", I);
504}
505
506/// RemoveFactorFromExpression - If V is an expression tree that is a
507/// multiplication sequence, and if this sequence contains a multiply by Factor,
508/// remove Factor from the tree and return the new tree.
509Value *Reassociate::RemoveFactorFromExpression(Value *V, Value *Factor) {
510  BinaryOperator *BO = isReassociableOp(V, Instruction::Mul);
511  if (!BO) return 0;
512
513  std::vector<ValueEntry> Factors;
514  LinearizeExprTree(BO, Factors);
515
516  bool FoundFactor = false;
517  for (unsigned i = 0, e = Factors.size(); i != e; ++i)
518    if (Factors[i].Op == Factor) {
519      FoundFactor = true;
520      Factors.erase(Factors.begin()+i);
521      break;
522    }
523  if (!FoundFactor) {
524    // Make sure to restore the operands to the expression tree.
525    RewriteExprTree(BO, Factors);
526    return 0;
527  }
528
529  if (Factors.size() == 1) return Factors[0].Op;
530
531  RewriteExprTree(BO, Factors);
532  return BO;
533}
534
535/// FindSingleUseMultiplyFactors - If V is a single-use multiply, recursively
536/// add its operands as factors, otherwise add V to the list of factors.
537static void FindSingleUseMultiplyFactors(Value *V,
538                                         SmallVectorImpl<Value*> &Factors) {
539  BinaryOperator *BO;
540  if ((!V->hasOneUse() && !V->use_empty()) ||
541      !(BO = dyn_cast<BinaryOperator>(V)) ||
542      BO->getOpcode() != Instruction::Mul) {
543    Factors.push_back(V);
544    return;
545  }
546
547  // Otherwise, add the LHS and RHS to the list of factors.
548  FindSingleUseMultiplyFactors(BO->getOperand(1), Factors);
549  FindSingleUseMultiplyFactors(BO->getOperand(0), Factors);
550}
551
552/// OptimizeAndOrXor - Optimize a series of operands to an 'and', 'or', or 'xor'
553/// instruction.  This optimizes based on identities.  If it can be reduced to
554/// a single Value, it is returned, otherwise the Ops list is mutated as
555/// necessary.
556static Value *OptimizeAndOrXor(unsigned Opcode, std::vector<ValueEntry> &Ops) {
557  // Scan the operand lists looking for X and ~X pairs, along with X,X pairs.
558  // If we find any, we can simplify the expression. X&~X == 0, X|~X == -1.
559  for (unsigned i = 0, e = Ops.size(); i != e; ++i) {
560    // First, check for X and ~X in the operand list.
561    assert(i < Ops.size());
562    if (BinaryOperator::isNot(Ops[i].Op)) {    // Cannot occur for ^.
563      Value *X = BinaryOperator::getNotArgument(Ops[i].Op);
564      unsigned FoundX = FindInOperandList(Ops, i, X);
565      if (FoundX != i) {
566        if (Opcode == Instruction::And)   // ...&X&~X = 0
567          return Constant::getNullValue(X->getType());
568
569        if (Opcode == Instruction::Or)    // ...|X|~X = -1
570          return Constant::getAllOnesValue(X->getType());
571      }
572    }
573
574    // Next, check for duplicate pairs of values, which we assume are next to
575    // each other, due to our sorting criteria.
576    assert(i < Ops.size());
577    if (i+1 != Ops.size() && Ops[i+1].Op == Ops[i].Op) {
578      if (Opcode == Instruction::And || Opcode == Instruction::Or) {
579        // Drop duplicate values.
580        Ops.erase(Ops.begin()+i);
581        --i; --e;
582        ++NumAnnihil;
583      } else {
584        assert(Opcode == Instruction::Xor);
585        if (e == 2)
586          return Constant::getNullValue(Ops[0].Op->getType());
587
588        // ... X^X -> ...
589        Ops.erase(Ops.begin()+i, Ops.begin()+i+2);
590        i -= 1; e -= 2;
591        ++NumAnnihil;
592      }
593    }
594  }
595  return 0;
596}
597
598/// OptimizeAdd - Optimize a series of operands to an 'add' instruction.  This
599/// optimizes based on identities.  If it can be reduced to a single Value, it
600/// is returned, otherwise the Ops list is mutated as necessary.
601Value *Reassociate::OptimizeAdd(Instruction *I, std::vector<ValueEntry> &Ops) {
602  // Scan the operand lists looking for X and -X pairs.  If we find any, we
603  // can simplify the expression. X+-X == 0.
604  for (unsigned i = 0, e = Ops.size(); i != e; ++i) {
605    assert(i < Ops.size());
606    // Check for X and -X in the operand list.
607    if (!BinaryOperator::isNeg(Ops[i].Op))
608      continue;
609
610    Value *X = BinaryOperator::getNegArgument(Ops[i].Op);
611    unsigned FoundX = FindInOperandList(Ops, i, X);
612    if (FoundX == i)
613      continue;
614
615    // Remove X and -X from the operand list.
616    if (Ops.size() == 2)
617      return Constant::getNullValue(X->getType());
618
619    Ops.erase(Ops.begin()+i);
620    if (i < FoundX)
621      --FoundX;
622    else
623      --i;   // Need to back up an extra one.
624    Ops.erase(Ops.begin()+FoundX);
625    ++NumAnnihil;
626    --i;     // Revisit element.
627    e -= 2;  // Removed two elements.
628  }
629
630  // Scan the operand list, checking to see if there are any common factors
631  // between operands.  Consider something like A*A+A*B*C+D.  We would like to
632  // reassociate this to A*(A+B*C)+D, which reduces the number of multiplies.
633  // To efficiently find this, we count the number of times a factor occurs
634  // for any ADD operands that are MULs.
635  DenseMap<Value*, unsigned> FactorOccurrences;
636
637  // Keep track of each multiply we see, to avoid triggering on (X*4)+(X*4)
638  // where they are actually the same multiply.
639  SmallPtrSet<BinaryOperator*, 4> Multiplies;
640  unsigned MaxOcc = 0;
641  Value *MaxOccVal = 0;
642  for (unsigned i = 0, e = Ops.size(); i != e; ++i) {
643    BinaryOperator *BOp = dyn_cast<BinaryOperator>(Ops[i].Op);
644    if (BOp == 0 || BOp->getOpcode() != Instruction::Mul || !BOp->use_empty())
645      continue;
646
647    // If we've already seen this multiply, don't revisit it.
648    if (!Multiplies.insert(BOp)) continue;
649
650    // Compute all of the factors of this added value.
651    SmallVector<Value*, 8> Factors;
652    FindSingleUseMultiplyFactors(BOp, Factors);
653    assert(Factors.size() > 1 && "Bad linearize!");
654
655    // Add one to FactorOccurrences for each unique factor in this op.
656    if (Factors.size() == 2) {
657      unsigned Occ = ++FactorOccurrences[Factors[0]];
658      if (Occ > MaxOcc) { MaxOcc = Occ; MaxOccVal = Factors[0]; }
659      if (Factors[0] != Factors[1]) {   // Don't double count A*A.
660        Occ = ++FactorOccurrences[Factors[1]];
661        if (Occ > MaxOcc) { MaxOcc = Occ; MaxOccVal = Factors[1]; }
662      }
663    } else {
664      SmallPtrSet<Value*, 4> Duplicates;
665      for (unsigned i = 0, e = Factors.size(); i != e; ++i) {
666        if (!Duplicates.insert(Factors[i])) continue;
667
668        unsigned Occ = ++FactorOccurrences[Factors[i]];
669        if (Occ > MaxOcc) { MaxOcc = Occ; MaxOccVal = Factors[i]; }
670      }
671    }
672  }
673
674  // If any factor occurred more than one time, we can pull it out.
675  if (MaxOcc > 1) {
676    DEBUG(errs() << "\nFACTORING [" << MaxOcc << "]: " << *MaxOccVal << "\n");
677    ++NumFactor;
678
679    // Create a new instruction that uses the MaxOccVal twice.  If we don't do
680    // this, we could otherwise run into situations where removing a factor
681    // from an expression will drop a use of maxocc, and this can cause
682    // RemoveFactorFromExpression on successive values to behave differently.
683    Instruction *DummyInst = BinaryOperator::CreateAdd(MaxOccVal, MaxOccVal);
684    SmallVector<Value*, 4> NewMulOps;
685    for (unsigned i = 0, e = Ops.size(); i != e; ++i) {
686      if (Value *V = RemoveFactorFromExpression(Ops[i].Op, MaxOccVal)) {
687        NewMulOps.push_back(V);
688        Ops.erase(Ops.begin()+i);
689        --i; --e;
690      }
691    }
692
693    // No need for extra uses anymore.
694    delete DummyInst;
695
696    unsigned NumAddedValues = NewMulOps.size();
697    Value *V = EmitAddTreeOfValues(I, NewMulOps);
698    Value *V2 = BinaryOperator::CreateMul(V, MaxOccVal, "tmp", I);
699
700    // Now that we have inserted V and its sole use, optimize it. This allows
701    // us to handle cases that require multiple factoring steps, such as this:
702    // A*A*B + A*A*C   -->   A*(A*B+A*C)   -->   A*(A*(B+C))
703    if (NumAddedValues > 1)
704      ReassociateExpression(cast<BinaryOperator>(V));
705
706    // If every add operand included the factor (e.g. "A*B + A*C"), then the
707    // entire result expression is just the multiply "A*(B+C)".
708    if (Ops.empty())
709      return V2;
710
711    // Otherwise, we had some input that didn't have the fact, such as
712    // "A*B + A*C + D" -> "A*(B+C) + D".  Add the new multiply to the list of
713    // things being added.
714    Ops.insert(Ops.begin(), ValueEntry(getRank(V2), V2));
715  }
716
717  return 0;
718}
719
720Value *Reassociate::OptimizeExpression(BinaryOperator *I,
721                                       std::vector<ValueEntry> &Ops) {
722  // Now that we have the linearized expression tree, try to optimize it.
723  // Start by folding any constants that we found.
724  bool IterateOptimization = false;
725  if (Ops.size() == 1) return Ops[0].Op;
726
727  unsigned Opcode = I->getOpcode();
728
729  if (Constant *V1 = dyn_cast<Constant>(Ops[Ops.size()-2].Op))
730    if (Constant *V2 = dyn_cast<Constant>(Ops.back().Op)) {
731      Ops.pop_back();
732      Ops.back().Op = ConstantExpr::get(Opcode, V1, V2);
733      return OptimizeExpression(I, Ops);
734    }
735
736  // Check for destructive annihilation due to a constant being used.
737  if (ConstantInt *CstVal = dyn_cast<ConstantInt>(Ops.back().Op))
738    switch (Opcode) {
739    default: break;
740    case Instruction::And:
741      if (CstVal->isZero())                  // ... & 0 -> 0
742        return CstVal;
743      if (CstVal->isAllOnesValue())          // ... & -1 -> ...
744        Ops.pop_back();
745      break;
746    case Instruction::Mul:
747      if (CstVal->isZero()) {                // ... * 0 -> 0
748        ++NumAnnihil;
749        return CstVal;
750      }
751
752      if (cast<ConstantInt>(CstVal)->isOne())
753        Ops.pop_back();                      // ... * 1 -> ...
754      break;
755    case Instruction::Or:
756      if (CstVal->isAllOnesValue())          // ... | -1 -> -1
757        return CstVal;
758      // FALLTHROUGH!
759    case Instruction::Add:
760    case Instruction::Xor:
761      if (CstVal->isZero())                  // ... [|^+] 0 -> ...
762        Ops.pop_back();
763      break;
764    }
765  if (Ops.size() == 1) return Ops[0].Op;
766
767  // Handle destructive annihilation due to identities between elements in the
768  // argument list here.
769  switch (Opcode) {
770  default: break;
771  case Instruction::And:
772  case Instruction::Or:
773  case Instruction::Xor: {
774    unsigned NumOps = Ops.size();
775    if (Value *Result = OptimizeAndOrXor(Opcode, Ops))
776      return Result;
777    IterateOptimization |= Ops.size() != NumOps;
778    break;
779  }
780
781  case Instruction::Add: {
782    unsigned NumOps = Ops.size();
783    if (Value *Result = OptimizeAdd(I, Ops))
784      return Result;
785    IterateOptimization |= Ops.size() != NumOps;
786  }
787
788    break;
789  //case Instruction::Mul:
790  }
791
792  if (IterateOptimization)
793    return OptimizeExpression(I, Ops);
794  return 0;
795}
796
797
798/// ReassociateBB - Inspect all of the instructions in this basic block,
799/// reassociating them as we go.
800void Reassociate::ReassociateBB(BasicBlock *BB) {
801  for (BasicBlock::iterator BBI = BB->begin(); BBI != BB->end(); ) {
802    Instruction *BI = BBI++;
803    if (BI->getOpcode() == Instruction::Shl &&
804        isa<ConstantInt>(BI->getOperand(1)))
805      if (Instruction *NI = ConvertShiftToMul(BI, ValueRankMap)) {
806        MadeChange = true;
807        BI = NI;
808      }
809
810    // Reject cases where it is pointless to do this.
811    if (!isa<BinaryOperator>(BI) || BI->getType()->isFloatingPoint() ||
812        isa<VectorType>(BI->getType()))
813      continue;  // Floating point ops are not associative.
814
815    // If this is a subtract instruction which is not already in negate form,
816    // see if we can convert it to X+-Y.
817    if (BI->getOpcode() == Instruction::Sub) {
818      if (ShouldBreakUpSubtract(BI)) {
819        BI = BreakUpSubtract(BI, ValueRankMap);
820        MadeChange = true;
821      } else if (BinaryOperator::isNeg(BI)) {
822        // Otherwise, this is a negation.  See if the operand is a multiply tree
823        // and if this is not an inner node of a multiply tree.
824        if (isReassociableOp(BI->getOperand(1), Instruction::Mul) &&
825            (!BI->hasOneUse() ||
826             !isReassociableOp(BI->use_back(), Instruction::Mul))) {
827          BI = LowerNegateToMultiply(BI, ValueRankMap);
828          MadeChange = true;
829        }
830      }
831    }
832
833    // If this instruction is a commutative binary operator, process it.
834    if (!BI->isAssociative()) continue;
835    BinaryOperator *I = cast<BinaryOperator>(BI);
836
837    // If this is an interior node of a reassociable tree, ignore it until we
838    // get to the root of the tree, to avoid N^2 analysis.
839    if (I->hasOneUse() && isReassociableOp(I->use_back(), I->getOpcode()))
840      continue;
841
842    // If this is an add tree that is used by a sub instruction, ignore it
843    // until we process the subtract.
844    if (I->hasOneUse() && I->getOpcode() == Instruction::Add &&
845        cast<Instruction>(I->use_back())->getOpcode() == Instruction::Sub)
846      continue;
847
848    ReassociateExpression(I);
849  }
850}
851
852void Reassociate::ReassociateExpression(BinaryOperator *I) {
853
854  // First, walk the expression tree, linearizing the tree, collecting
855  std::vector<ValueEntry> Ops;
856  LinearizeExprTree(I, Ops);
857
858  DEBUG(errs() << "RAIn:\t"; PrintOps(I, Ops); errs() << '\n');
859
860  // Now that we have linearized the tree to a list and have gathered all of
861  // the operands and their ranks, sort the operands by their rank.  Use a
862  // stable_sort so that values with equal ranks will have their relative
863  // positions maintained (and so the compiler is deterministic).  Note that
864  // this sorts so that the highest ranking values end up at the beginning of
865  // the vector.
866  std::stable_sort(Ops.begin(), Ops.end());
867
868  // OptimizeExpression - Now that we have the expression tree in a convenient
869  // sorted form, optimize it globally if possible.
870  if (Value *V = OptimizeExpression(I, Ops)) {
871    // This expression tree simplified to something that isn't a tree,
872    // eliminate it.
873    DEBUG(errs() << "Reassoc to scalar: " << *V << '\n');
874    I->replaceAllUsesWith(V);
875    RemoveDeadBinaryOp(I);
876    ++NumAnnihil;
877    return;
878  }
879
880  // We want to sink immediates as deeply as possible except in the case where
881  // this is a multiply tree used only by an add, and the immediate is a -1.
882  // In this case we reassociate to put the negation on the outside so that we
883  // can fold the negation into the add: (-X)*Y + Z -> Z-X*Y
884  if (I->getOpcode() == Instruction::Mul && I->hasOneUse() &&
885      cast<Instruction>(I->use_back())->getOpcode() == Instruction::Add &&
886      isa<ConstantInt>(Ops.back().Op) &&
887      cast<ConstantInt>(Ops.back().Op)->isAllOnesValue()) {
888    Ops.insert(Ops.begin(), Ops.back());
889    Ops.pop_back();
890  }
891
892  DEBUG(errs() << "RAOut:\t"; PrintOps(I, Ops); errs() << '\n');
893
894  if (Ops.size() == 1) {
895    // This expression tree simplified to something that isn't a tree,
896    // eliminate it.
897    I->replaceAllUsesWith(Ops[0].Op);
898    RemoveDeadBinaryOp(I);
899  } else {
900    // Now that we ordered and optimized the expressions, splat them back into
901    // the expression tree, removing any unneeded nodes.
902    RewriteExprTree(I, Ops);
903  }
904}
905
906
907bool Reassociate::runOnFunction(Function &F) {
908  // Recalculate the rank map for F
909  BuildRankMap(F);
910
911  MadeChange = false;
912  for (Function::iterator FI = F.begin(), FE = F.end(); FI != FE; ++FI)
913    ReassociateBB(FI);
914
915  // We are done with the rank map...
916  RankMap.clear();
917  ValueRankMap.clear();
918  return MadeChange;
919}
920
921