DwarfEHPrepare.cpp revision 52f155ee5d607b3773c2bc19fa688758f8887e35
1//===-- DwarfEHPrepare - Prepare exception handling for code generation ---===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This pass mulches exception handling code into a form adapted to code
11// generation. Required if using dwarf exception handling.
12//
13//===----------------------------------------------------------------------===//
14
15#define DEBUG_TYPE "dwarfehprepare"
16#include "llvm/Function.h"
17#include "llvm/Instructions.h"
18#include "llvm/IntrinsicInst.h"
19#include "llvm/Module.h"
20#include "llvm/Pass.h"
21#include "llvm/ADT/Statistic.h"
22#include "llvm/Analysis/Dominators.h"
23#include "llvm/CodeGen/Passes.h"
24#include "llvm/MC/MCAsmInfo.h"
25#include "llvm/Target/TargetLowering.h"
26#include "llvm/Transforms/Utils/BasicBlockUtils.h"
27#include "llvm/Transforms/Utils/PromoteMemToReg.h"
28using namespace llvm;
29
30STATISTIC(NumLandingPadsSplit,     "Number of landing pads split");
31STATISTIC(NumUnwindsLowered,       "Number of unwind instructions lowered");
32STATISTIC(NumExceptionValuesMoved, "Number of eh.exception calls moved");
33STATISTIC(NumStackTempsIntroduced, "Number of stack temporaries introduced");
34
35namespace {
36  class DwarfEHPrepare : public FunctionPass {
37    const TargetLowering *TLI;
38    bool CompileFast;
39
40    // The eh.exception intrinsic.
41    Function *ExceptionValueIntrinsic;
42
43    // The eh.selector intrinsic.
44    Function *SelectorIntrinsic;
45
46    // _Unwind_Resume_or_Rethrow call.
47    Constant *URoR;
48
49    // The EH language-specific catch-all type.
50    GlobalVariable *EHCatchAllValue;
51
52    // _Unwind_Resume or the target equivalent.
53    Constant *RewindFunction;
54
55    // Dominator info is used when turning stack temporaries into registers.
56    DominatorTree *DT;
57    DominanceFrontier *DF;
58
59    // The function we are running on.
60    Function *F;
61
62    // The landing pads for this function.
63    typedef SmallPtrSet<BasicBlock*, 8> BBSet;
64    BBSet LandingPads;
65
66    // Stack temporary used to hold eh.exception values.
67    AllocaInst *ExceptionValueVar;
68
69    bool NormalizeLandingPads();
70    bool LowerUnwinds();
71    bool MoveExceptionValueCalls();
72    bool FinishStackTemporaries();
73    bool PromoteStackTemporaries();
74
75    Instruction *CreateExceptionValueCall(BasicBlock *BB);
76    Instruction *CreateValueLoad(BasicBlock *BB);
77
78    /// CreateReadOfExceptionValue - Return the result of the eh.exception
79    /// intrinsic by calling the intrinsic if in a landing pad, or loading it
80    /// from the exception value variable otherwise.
81    Instruction *CreateReadOfExceptionValue(BasicBlock *BB) {
82      return LandingPads.count(BB) ?
83        CreateExceptionValueCall(BB) : CreateValueLoad(BB);
84    }
85
86    /// CleanupSelectors - Any remaining eh.selector intrinsic calls which still
87    /// use the ".llvm.eh.catch.all.value" call need to convert to using it's
88    /// initializer instead.
89    bool CleanupSelectors();
90
91    /// FindAllCleanupSelectors - Find all eh.selector calls that are clean-ups.
92    void FindAllCleanupSelectors(SmallPtrSet<IntrinsicInst*, 32> &Sels);
93
94    /// FindAllURoRInvokes - Find all URoR invokes in the function.
95    void FindAllURoRInvokes(SmallPtrSet<InvokeInst*, 32> &URoRInvokes);
96
97    /// HandleURoRInvokes - Handle invokes of "_Unwind_Resume_or_Rethrow"
98    /// calls. The "unwind" part of these invokes jump to a landing pad within
99    /// the current function. This is a candidate to merge the selector
100    /// associated with the URoR invoke with the one from the URoR's landing
101    /// pad.
102    bool HandleURoRInvokes();
103
104    /// FindSelectorAndURoR - Find the eh.selector call and URoR call associated
105    /// with the eh.exception call. This recursively looks past instructions
106    /// which don't change the EH pointer value, like casts or PHI nodes.
107    bool FindSelectorAndURoR(Instruction *Inst, bool &URoRInvoke,
108                             SmallPtrSet<IntrinsicInst*, 8> &SelCalls);
109
110    /// DoMem2RegPromotion - Take an alloca call and promote it from memory to a
111    /// register.
112    bool DoMem2RegPromotion(Value *V) {
113      AllocaInst *AI = dyn_cast<AllocaInst>(V);
114      if (!AI || !isAllocaPromotable(AI)) return false;
115
116      // Turn the alloca into a register.
117      std::vector<AllocaInst*> Allocas(1, AI);
118      PromoteMemToReg(Allocas, *DT, *DF);
119      return true;
120    }
121
122    /// PromoteStoreInst - Perform Mem2Reg on a StoreInst.
123    bool PromoteStoreInst(StoreInst *SI) {
124      if (!SI || !DT || !DF) return false;
125      if (DoMem2RegPromotion(SI->getOperand(1)))
126        return true;
127      return false;
128    }
129
130    /// PromoteEHPtrStore - Promote the storing of an EH pointer into a
131    /// register. This should get rid of the store and subsequent loads.
132    bool PromoteEHPtrStore(IntrinsicInst *II) {
133      if (!DT || !DF) return false;
134
135      bool Changed = false;
136      StoreInst *SI;
137
138      while (1) {
139        SI = 0;
140        for (Value::use_iterator
141               I = II->use_begin(), E = II->use_end(); I != E; ++I) {
142          SI = dyn_cast<StoreInst>(I);
143          if (SI) break;
144        }
145
146        if (!PromoteStoreInst(SI))
147          break;
148
149        Changed = true;
150      }
151
152      return false;
153    }
154
155  public:
156    static char ID; // Pass identification, replacement for typeid.
157    DwarfEHPrepare(const TargetLowering *tli, bool fast) :
158      FunctionPass(&ID), TLI(tli), CompileFast(fast),
159      ExceptionValueIntrinsic(0), SelectorIntrinsic(0),
160      URoR(0), EHCatchAllValue(0), RewindFunction(0) {}
161
162    virtual bool runOnFunction(Function &Fn);
163
164    // getAnalysisUsage - We need dominance frontiers for memory promotion.
165    virtual void getAnalysisUsage(AnalysisUsage &AU) const {
166      if (!CompileFast)
167        AU.addRequired<DominatorTree>();
168      AU.addPreserved<DominatorTree>();
169      if (!CompileFast)
170        AU.addRequired<DominanceFrontier>();
171      AU.addPreserved<DominanceFrontier>();
172    }
173
174    const char *getPassName() const {
175      return "Exception handling preparation";
176    }
177
178  };
179} // end anonymous namespace
180
181char DwarfEHPrepare::ID = 0;
182
183FunctionPass *llvm::createDwarfEHPass(const TargetLowering *tli, bool fast) {
184  return new DwarfEHPrepare(tli, fast);
185}
186
187/// FindAllCleanupSelectors - Find all eh.selector calls that are clean-ups.
188void DwarfEHPrepare::
189FindAllCleanupSelectors(SmallPtrSet<IntrinsicInst*, 32> &Sels) {
190  for (Value::use_iterator
191         I = SelectorIntrinsic->use_begin(),
192         E = SelectorIntrinsic->use_end(); I != E; ++I) {
193    IntrinsicInst *SI = cast<IntrinsicInst>(I);
194    if (!SI || SI->getParent()->getParent() != F) continue;
195
196    unsigned NumOps = SI->getNumOperands();
197    if (NumOps > 4) continue;
198    bool IsCleanUp = (NumOps == 3);
199
200    if (!IsCleanUp)
201      if (ConstantInt *CI = dyn_cast<ConstantInt>(SI->getOperand(3)))
202        IsCleanUp = (CI->getZExtValue() == 0);
203
204    if (IsCleanUp)
205      Sels.insert(SI);
206  }
207}
208
209/// FindAllURoRInvokes - Find all URoR invokes in the function.
210void DwarfEHPrepare::
211FindAllURoRInvokes(SmallPtrSet<InvokeInst*, 32> &URoRInvokes) {
212  for (Value::use_iterator
213         I = URoR->use_begin(),
214         E = URoR->use_end(); I != E; ++I) {
215    if (InvokeInst *II = dyn_cast<InvokeInst>(I))
216      URoRInvokes.insert(II);
217  }
218}
219
220/// CleanupSelectors - Any remaining eh.selector intrinsic calls which still use
221/// the ".llvm.eh.catch.all.value" call need to convert to using it's
222/// initializer instead.
223bool DwarfEHPrepare::CleanupSelectors() {
224  if (!EHCatchAllValue) return false;
225
226  if (!SelectorIntrinsic) {
227    SelectorIntrinsic =
228      Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_selector);
229    if (!SelectorIntrinsic) return false;
230  }
231
232  bool Changed = false;
233  for (Value::use_iterator
234         I = SelectorIntrinsic->use_begin(),
235         E = SelectorIntrinsic->use_end(); I != E; ++I) {
236    IntrinsicInst *Sel = dyn_cast<IntrinsicInst>(I);
237    if (!Sel || Sel->getParent()->getParent() != F) continue;
238
239    // Index of the ".llvm.eh.catch.all.value" variable.
240    unsigned OpIdx = Sel->getNumOperands() - 1;
241    GlobalVariable *GV = dyn_cast<GlobalVariable>(Sel->getOperand(OpIdx));
242    if (GV != EHCatchAllValue) continue;
243    Sel->setOperand(OpIdx, EHCatchAllValue->getInitializer());
244    Changed = true;
245  }
246
247  return Changed;
248}
249
250/// FindSelectorAndURoR - Find the eh.selector call associated with the
251/// eh.exception call. And indicate if there is a URoR "invoke" associated with
252/// the eh.exception call. This recursively looks past instructions which don't
253/// change the EH pointer value, like casts or PHI nodes.
254bool
255DwarfEHPrepare::FindSelectorAndURoR(Instruction *Inst, bool &URoRInvoke,
256                                    SmallPtrSet<IntrinsicInst*, 8> &SelCalls) {
257  SmallPtrSet<PHINode*, 32> SeenPHIs;
258  bool Changed = false;
259
260 restart:
261  for (Value::use_iterator
262         I = Inst->use_begin(), E = Inst->use_end(); I != E; ++I) {
263    Instruction *II = dyn_cast<Instruction>(I);
264    if (!II || II->getParent()->getParent() != F) continue;
265
266    if (IntrinsicInst *Sel = dyn_cast<IntrinsicInst>(II)) {
267      if (Sel->getIntrinsicID() == Intrinsic::eh_selector)
268        SelCalls.insert(Sel);
269    } else if (InvokeInst *Invoke = dyn_cast<InvokeInst>(II)) {
270      if (Invoke->getCalledFunction() == URoR)
271        URoRInvoke = true;
272    } else if (CastInst *CI = dyn_cast<CastInst>(II)) {
273      Changed |= FindSelectorAndURoR(CI, URoRInvoke, SelCalls);
274    } else if (StoreInst *SI = dyn_cast<StoreInst>(II)) {
275      if (!PromoteStoreInst(SI)) continue;
276      Changed = true;
277      SeenPHIs.clear();
278      goto restart;             // Uses may have changed, restart loop.
279    } else if (PHINode *PN = dyn_cast<PHINode>(II)) {
280      if (SeenPHIs.insert(PN))
281        // Don't process a PHI node more than once.
282        Changed |= FindSelectorAndURoR(PN, URoRInvoke, SelCalls);
283    }
284  }
285
286  return Changed;
287}
288
289/// HandleURoRInvokes - Handle invokes of "_Unwind_Resume_or_Rethrow" calls. The
290/// "unwind" part of these invokes jump to a landing pad within the current
291/// function. This is a candidate to merge the selector associated with the URoR
292/// invoke with the one from the URoR's landing pad.
293bool DwarfEHPrepare::HandleURoRInvokes() {
294  if (!DT) return CleanupSelectors(); // We require DominatorTree information.
295
296  if (!EHCatchAllValue) {
297    EHCatchAllValue =
298      F->getParent()->getNamedGlobal(".llvm.eh.catch.all.value");
299    if (!EHCatchAllValue) return false;
300  }
301
302  if (!SelectorIntrinsic) {
303    SelectorIntrinsic =
304      Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_selector);
305    if (!SelectorIntrinsic) return false;
306  }
307
308  if (!URoR) {
309    URoR = F->getParent()->getFunction("_Unwind_Resume_or_Rethrow");
310    if (!URoR) return CleanupSelectors();
311  }
312
313  SmallPtrSet<IntrinsicInst*, 32> Sels;
314  SmallPtrSet<InvokeInst*, 32> URoRInvokes;
315  FindAllCleanupSelectors(Sels);
316  FindAllURoRInvokes(URoRInvokes);
317
318  SmallPtrSet<IntrinsicInst*, 32> SelsToConvert;
319
320  for (SmallPtrSet<IntrinsicInst*, 32>::iterator
321         SI = Sels.begin(), SE = Sels.end(); SI != SE; ++SI) {
322    const BasicBlock *SelBB = (*SI)->getParent();
323    for (SmallPtrSet<InvokeInst*, 32>::iterator
324           UI = URoRInvokes.begin(), UE = URoRInvokes.end(); UI != UE; ++UI) {
325      const BasicBlock *URoRBB = (*UI)->getParent();
326      if (SelBB == URoRBB || DT->dominates(SelBB, URoRBB)) {
327        SelsToConvert.insert(*SI);
328        break;
329      }
330    }
331  }
332
333  bool Changed = false;
334
335  if (Sels.size() != SelsToConvert.size()) {
336    // If we haven't been able to convert all of the clean-up selectors, then
337    // loop through the slow way to see if they still need to be converted.
338    if (!ExceptionValueIntrinsic) {
339      ExceptionValueIntrinsic =
340        Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_exception);
341      if (!ExceptionValueIntrinsic) return CleanupSelectors();
342    }
343
344    for (Value::use_iterator
345           I = ExceptionValueIntrinsic->use_begin(),
346           E = ExceptionValueIntrinsic->use_end(); I != E; ++I) {
347      IntrinsicInst *EHPtr = dyn_cast<IntrinsicInst>(I);
348      if (!EHPtr || EHPtr->getParent()->getParent() != F) continue;
349
350      Changed |= PromoteEHPtrStore(EHPtr);
351
352      bool URoRInvoke = false;
353      SmallPtrSet<IntrinsicInst*, 8> SelCalls;
354      Changed |= FindSelectorAndURoR(EHPtr, URoRInvoke, SelCalls);
355
356      if (URoRInvoke) {
357        // This EH pointer is being used by an invoke of an URoR instruction and
358        // an eh.selector intrinsic call. If the eh.selector is a 'clean-up', we
359        // need to convert it to a 'catch-all'.
360        for (SmallPtrSet<IntrinsicInst*, 8>::iterator
361               SI = SelCalls.begin(), SE = SelCalls.end(); SI != SE; ++SI) {
362          IntrinsicInst *II = *SI;
363          unsigned NumOps = II->getNumOperands();
364
365          if (NumOps <= 4) {
366            bool IsCleanUp = (NumOps == 3);
367
368            if (!IsCleanUp)
369              if (ConstantInt *CI = dyn_cast<ConstantInt>(II->getOperand(3)))
370                IsCleanUp = (CI->getZExtValue() == 0);
371
372            if (IsCleanUp)
373              SelsToConvert.insert(II);
374          }
375        }
376      }
377    }
378  }
379
380  if (!SelsToConvert.empty()) {
381    // Convert all clean-up eh.selectors, which are associated with "invokes" of
382    // URoR calls, into catch-all eh.selectors.
383    Changed = true;
384
385    for (SmallPtrSet<IntrinsicInst*, 8>::iterator
386           SI = SelsToConvert.begin(), SE = SelsToConvert.end();
387         SI != SE; ++SI) {
388      IntrinsicInst *II = *SI;
389      SmallVector<Value*, 8> Args;
390
391      // Use the exception object pointer and the personality function
392      // from the original selector.
393      Args.push_back(II->getOperand(1)); // Exception object pointer.
394      Args.push_back(II->getOperand(2)); // Personality function.
395      Args.push_back(EHCatchAllValue->getInitializer()); // Catch-all indicator.
396
397      CallInst *NewSelector =
398        CallInst::Create(SelectorIntrinsic, Args.begin(), Args.end(),
399                         "eh.sel.catch.all", II);
400
401      NewSelector->setTailCall(II->isTailCall());
402      NewSelector->setAttributes(II->getAttributes());
403      NewSelector->setCallingConv(II->getCallingConv());
404
405      II->replaceAllUsesWith(NewSelector);
406      II->eraseFromParent();
407    }
408  }
409
410  Changed |= CleanupSelectors();
411  return Changed;
412}
413
414/// NormalizeLandingPads - Normalize and discover landing pads, noting them
415/// in the LandingPads set.  A landing pad is normal if the only CFG edges
416/// that end at it are unwind edges from invoke instructions. If we inlined
417/// through an invoke we could have a normal branch from the previous
418/// unwind block through to the landing pad for the original invoke.
419/// Abnormal landing pads are fixed up by redirecting all unwind edges to
420/// a new basic block which falls through to the original.
421bool DwarfEHPrepare::NormalizeLandingPads() {
422  bool Changed = false;
423
424  const MCAsmInfo *MAI = TLI->getTargetMachine().getMCAsmInfo();
425  bool usingSjLjEH = MAI->getExceptionHandlingType() == ExceptionHandling::SjLj;
426
427  for (Function::iterator I = F->begin(), E = F->end(); I != E; ++I) {
428    TerminatorInst *TI = I->getTerminator();
429    if (!isa<InvokeInst>(TI))
430      continue;
431    BasicBlock *LPad = TI->getSuccessor(1);
432    // Skip landing pads that have already been normalized.
433    if (LandingPads.count(LPad))
434      continue;
435
436    // Check that only invoke unwind edges end at the landing pad.
437    bool OnlyUnwoundTo = true;
438    bool SwitchOK = usingSjLjEH;
439    for (pred_iterator PI = pred_begin(LPad), PE = pred_end(LPad);
440         PI != PE; ++PI) {
441      TerminatorInst *PT = (*PI)->getTerminator();
442      // The SjLj dispatch block uses a switch instruction. This is effectively
443      // an unwind edge, so we can disregard it here. There will only ever
444      // be one dispatch, however, so if there are multiple switches, one
445      // of them truly is a normal edge, not an unwind edge.
446      if (SwitchOK && isa<SwitchInst>(PT)) {
447        SwitchOK = false;
448        continue;
449      }
450      if (!isa<InvokeInst>(PT) || LPad == PT->getSuccessor(0)) {
451        OnlyUnwoundTo = false;
452        break;
453      }
454    }
455
456    if (OnlyUnwoundTo) {
457      // Only unwind edges lead to the landing pad.  Remember the landing pad.
458      LandingPads.insert(LPad);
459      continue;
460    }
461
462    // At least one normal edge ends at the landing pad.  Redirect the unwind
463    // edges to a new basic block which falls through into this one.
464
465    // Create the new basic block.
466    BasicBlock *NewBB = BasicBlock::Create(F->getContext(),
467                                           LPad->getName() + "_unwind_edge");
468
469    // Insert it into the function right before the original landing pad.
470    LPad->getParent()->getBasicBlockList().insert(LPad, NewBB);
471
472    // Redirect unwind edges from the original landing pad to NewBB.
473    for (pred_iterator PI = pred_begin(LPad), PE = pred_end(LPad); PI != PE; ) {
474      TerminatorInst *PT = (*PI++)->getTerminator();
475      if (isa<InvokeInst>(PT) && PT->getSuccessor(1) == LPad)
476        // Unwind to the new block.
477        PT->setSuccessor(1, NewBB);
478    }
479
480    // If there are any PHI nodes in LPad, we need to update them so that they
481    // merge incoming values from NewBB instead.
482    for (BasicBlock::iterator II = LPad->begin(); isa<PHINode>(II); ++II) {
483      PHINode *PN = cast<PHINode>(II);
484      pred_iterator PB = pred_begin(NewBB), PE = pred_end(NewBB);
485
486      // Check to see if all of the values coming in via unwind edges are the
487      // same.  If so, we don't need to create a new PHI node.
488      Value *InVal = PN->getIncomingValueForBlock(*PB);
489      for (pred_iterator PI = PB; PI != PE; ++PI) {
490        if (PI != PB && InVal != PN->getIncomingValueForBlock(*PI)) {
491          InVal = 0;
492          break;
493        }
494      }
495
496      if (InVal == 0) {
497        // Different unwind edges have different values.  Create a new PHI node
498        // in NewBB.
499        PHINode *NewPN = PHINode::Create(PN->getType(), PN->getName()+".unwind",
500                                         NewBB);
501        // Add an entry for each unwind edge, using the value from the old PHI.
502        for (pred_iterator PI = PB; PI != PE; ++PI)
503          NewPN->addIncoming(PN->getIncomingValueForBlock(*PI), *PI);
504
505        // Now use this new PHI as the common incoming value for NewBB in PN.
506        InVal = NewPN;
507      }
508
509      // Revector exactly one entry in the PHI node to come from NewBB
510      // and delete all other entries that come from unwind edges.  If
511      // there are both normal and unwind edges from the same predecessor,
512      // this leaves an entry for the normal edge.
513      for (pred_iterator PI = PB; PI != PE; ++PI)
514        PN->removeIncomingValue(*PI);
515      PN->addIncoming(InVal, NewBB);
516    }
517
518    // Add a fallthrough from NewBB to the original landing pad.
519    BranchInst::Create(LPad, NewBB);
520
521    // Now update DominatorTree and DominanceFrontier analysis information.
522    if (DT)
523      DT->splitBlock(NewBB);
524    if (DF)
525      DF->splitBlock(NewBB);
526
527    // Remember the newly constructed landing pad.  The original landing pad
528    // LPad is no longer a landing pad now that all unwind edges have been
529    // revectored to NewBB.
530    LandingPads.insert(NewBB);
531    ++NumLandingPadsSplit;
532    Changed = true;
533  }
534
535  return Changed;
536}
537
538/// LowerUnwinds - Turn unwind instructions into calls to _Unwind_Resume,
539/// rethrowing any previously caught exception.  This will crash horribly
540/// at runtime if there is no such exception: using unwind to throw a new
541/// exception is currently not supported.
542bool DwarfEHPrepare::LowerUnwinds() {
543  SmallVector<TerminatorInst*, 16> UnwindInsts;
544
545  for (Function::iterator I = F->begin(), E = F->end(); I != E; ++I) {
546    TerminatorInst *TI = I->getTerminator();
547    if (isa<UnwindInst>(TI))
548      UnwindInsts.push_back(TI);
549  }
550
551  if (UnwindInsts.empty()) return false;
552
553  // Find the rewind function if we didn't already.
554  if (!RewindFunction) {
555    LLVMContext &Ctx = UnwindInsts[0]->getContext();
556    std::vector<const Type*>
557      Params(1, Type::getInt8PtrTy(Ctx));
558    FunctionType *FTy = FunctionType::get(Type::getVoidTy(Ctx),
559                                          Params, false);
560    const char *RewindName = TLI->getLibcallName(RTLIB::UNWIND_RESUME);
561    RewindFunction = F->getParent()->getOrInsertFunction(RewindName, FTy);
562  }
563
564  bool Changed = false;
565
566  for (SmallVectorImpl<TerminatorInst*>::iterator
567         I = UnwindInsts.begin(), E = UnwindInsts.end(); I != E; ++I) {
568    TerminatorInst *TI = *I;
569
570    // Replace the unwind instruction with a call to _Unwind_Resume (or the
571    // appropriate target equivalent) followed by an UnreachableInst.
572
573    // Create the call...
574    CallInst *CI = CallInst::Create(RewindFunction,
575                                    CreateReadOfExceptionValue(TI->getParent()),
576                                    "", TI);
577    CI->setCallingConv(TLI->getLibcallCallingConv(RTLIB::UNWIND_RESUME));
578    // ...followed by an UnreachableInst.
579    new UnreachableInst(TI->getContext(), TI);
580
581    // Nuke the unwind instruction.
582    TI->eraseFromParent();
583    ++NumUnwindsLowered;
584    Changed = true;
585  }
586
587  return Changed;
588}
589
590/// MoveExceptionValueCalls - Ensure that eh.exception is only ever called from
591/// landing pads by replacing calls outside of landing pads with loads from a
592/// stack temporary.  Move eh.exception calls inside landing pads to the start
593/// of the landing pad (optional, but may make things simpler for later passes).
594bool DwarfEHPrepare::MoveExceptionValueCalls() {
595  // If the eh.exception intrinsic is not declared in the module then there is
596  // nothing to do.  Speed up compilation by checking for this common case.
597  if (!ExceptionValueIntrinsic &&
598      !F->getParent()->getFunction(Intrinsic::getName(Intrinsic::eh_exception)))
599    return false;
600
601  bool Changed = false;
602
603  for (Function::iterator BB = F->begin(), E = F->end(); BB != E; ++BB) {
604    for (BasicBlock::iterator II = BB->begin(), E = BB->end(); II != E;)
605      if (IntrinsicInst *CI = dyn_cast<IntrinsicInst>(II++))
606        if (CI->getIntrinsicID() == Intrinsic::eh_exception) {
607          if (!CI->use_empty()) {
608            Value *ExceptionValue = CreateReadOfExceptionValue(BB);
609            if (CI == ExceptionValue) {
610              // The call was at the start of a landing pad - leave it alone.
611              assert(LandingPads.count(BB) &&
612                     "Created eh.exception call outside landing pad!");
613              continue;
614            }
615            CI->replaceAllUsesWith(ExceptionValue);
616          }
617          CI->eraseFromParent();
618          ++NumExceptionValuesMoved;
619          Changed = true;
620        }
621  }
622
623  return Changed;
624}
625
626/// FinishStackTemporaries - If we introduced a stack variable to hold the
627/// exception value then initialize it in each landing pad.
628bool DwarfEHPrepare::FinishStackTemporaries() {
629  if (!ExceptionValueVar)
630    // Nothing to do.
631    return false;
632
633  bool Changed = false;
634
635  // Make sure that there is a store of the exception value at the start of
636  // each landing pad.
637  for (BBSet::iterator LI = LandingPads.begin(), LE = LandingPads.end();
638       LI != LE; ++LI) {
639    Instruction *ExceptionValue = CreateReadOfExceptionValue(*LI);
640    Instruction *Store = new StoreInst(ExceptionValue, ExceptionValueVar);
641    Store->insertAfter(ExceptionValue);
642    Changed = true;
643  }
644
645  return Changed;
646}
647
648/// PromoteStackTemporaries - Turn any stack temporaries we introduced into
649/// registers if possible.
650bool DwarfEHPrepare::PromoteStackTemporaries() {
651  if (ExceptionValueVar && DT && DF && isAllocaPromotable(ExceptionValueVar)) {
652    // Turn the exception temporary into registers and phi nodes if possible.
653    std::vector<AllocaInst*> Allocas(1, ExceptionValueVar);
654    PromoteMemToReg(Allocas, *DT, *DF);
655    return true;
656  }
657  return false;
658}
659
660/// CreateExceptionValueCall - Insert a call to the eh.exception intrinsic at
661/// the start of the basic block (unless there already is one, in which case
662/// the existing call is returned).
663Instruction *DwarfEHPrepare::CreateExceptionValueCall(BasicBlock *BB) {
664  Instruction *Start = BB->getFirstNonPHIOrDbg();
665  // Is this a call to eh.exception?
666  if (IntrinsicInst *CI = dyn_cast<IntrinsicInst>(Start))
667    if (CI->getIntrinsicID() == Intrinsic::eh_exception)
668      // Reuse the existing call.
669      return Start;
670
671  // Find the eh.exception intrinsic if we didn't already.
672  if (!ExceptionValueIntrinsic)
673    ExceptionValueIntrinsic = Intrinsic::getDeclaration(F->getParent(),
674                                                       Intrinsic::eh_exception);
675
676  // Create the call.
677  return CallInst::Create(ExceptionValueIntrinsic, "eh.value.call", Start);
678}
679
680/// CreateValueLoad - Insert a load of the exception value stack variable
681/// (creating it if necessary) at the start of the basic block (unless
682/// there already is a load, in which case the existing load is returned).
683Instruction *DwarfEHPrepare::CreateValueLoad(BasicBlock *BB) {
684  Instruction *Start = BB->getFirstNonPHIOrDbg();
685  // Is this a load of the exception temporary?
686  if (ExceptionValueVar)
687    if (LoadInst* LI = dyn_cast<LoadInst>(Start))
688      if (LI->getPointerOperand() == ExceptionValueVar)
689        // Reuse the existing load.
690        return Start;
691
692  // Create the temporary if we didn't already.
693  if (!ExceptionValueVar) {
694    ExceptionValueVar = new AllocaInst(PointerType::getUnqual(
695           Type::getInt8Ty(BB->getContext())), "eh.value", F->begin()->begin());
696    ++NumStackTempsIntroduced;
697  }
698
699  // Load the value.
700  return new LoadInst(ExceptionValueVar, "eh.value.load", Start);
701}
702
703bool DwarfEHPrepare::runOnFunction(Function &Fn) {
704  bool Changed = false;
705
706  // Initialize internal state.
707  DT = getAnalysisIfAvailable<DominatorTree>();
708  DF = getAnalysisIfAvailable<DominanceFrontier>();
709  ExceptionValueVar = 0;
710  F = &Fn;
711
712  // Ensure that only unwind edges end at landing pads (a landing pad is a
713  // basic block where an invoke unwind edge ends).
714  Changed |= NormalizeLandingPads();
715
716  // Turn unwind instructions into libcalls.
717  Changed |= LowerUnwinds();
718
719  // TODO: Move eh.selector calls to landing pads and combine them.
720
721  // Move eh.exception calls to landing pads.
722  Changed |= MoveExceptionValueCalls();
723
724  // Initialize any stack temporaries we introduced.
725  Changed |= FinishStackTemporaries();
726
727  // Turn any stack temporaries into registers if possible.
728  if (!CompileFast)
729    Changed |= PromoteStackTemporaries();
730
731  Changed |= HandleURoRInvokes();
732
733  LandingPads.clear();
734
735  return Changed;
736}
737