DwarfEHPrepare.cpp revision 619acdc63ab0a47d125dca0591285c8ac4c9ed20
1//===-- DwarfEHPrepare - Prepare exception handling for code generation ---===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This pass mulches exception handling code into a form adapted to code
11// generation. Required if using dwarf exception handling.
12//
13//===----------------------------------------------------------------------===//
14
15#define DEBUG_TYPE "dwarfehprepare"
16#include "llvm/Function.h"
17#include "llvm/Instructions.h"
18#include "llvm/IntrinsicInst.h"
19#include "llvm/Module.h"
20#include "llvm/Pass.h"
21#include "llvm/ADT/Statistic.h"
22#include "llvm/Analysis/Dominators.h"
23#include "llvm/CodeGen/Passes.h"
24#include "llvm/MC/MCAsmInfo.h"
25#include "llvm/Support/CallSite.h"
26#include "llvm/Target/TargetLowering.h"
27#include "llvm/Transforms/Utils/BasicBlockUtils.h"
28#include "llvm/Transforms/Utils/PromoteMemToReg.h"
29using namespace llvm;
30
31STATISTIC(NumLandingPadsSplit,     "Number of landing pads split");
32STATISTIC(NumUnwindsLowered,       "Number of unwind instructions lowered");
33STATISTIC(NumExceptionValuesMoved, "Number of eh.exception calls moved");
34STATISTIC(NumStackTempsIntroduced, "Number of stack temporaries introduced");
35
36namespace {
37  class DwarfEHPrepare : public FunctionPass {
38    const TargetMachine *TM;
39    const TargetLowering *TLI;
40    bool CompileFast;
41
42    // The eh.exception intrinsic.
43    Function *ExceptionValueIntrinsic;
44
45    // The eh.selector intrinsic.
46    Function *SelectorIntrinsic;
47
48    // _Unwind_Resume_or_Rethrow call.
49    Constant *URoR;
50
51    // The EH language-specific catch-all type.
52    GlobalVariable *EHCatchAllValue;
53
54    // _Unwind_Resume or the target equivalent.
55    Constant *RewindFunction;
56
57    // Dominator info is used when turning stack temporaries into registers.
58    DominatorTree *DT;
59    DominanceFrontier *DF;
60
61    // The function we are running on.
62    Function *F;
63
64    // The landing pads for this function.
65    typedef SmallPtrSet<BasicBlock*, 8> BBSet;
66    BBSet LandingPads;
67
68    // Stack temporary used to hold eh.exception values.
69    AllocaInst *ExceptionValueVar;
70
71    bool NormalizeLandingPads();
72    bool LowerUnwinds();
73    bool MoveExceptionValueCalls();
74    bool FinishStackTemporaries();
75    bool PromoteStackTemporaries();
76
77    Instruction *CreateExceptionValueCall(BasicBlock *BB);
78    Instruction *CreateValueLoad(BasicBlock *BB);
79
80    /// CreateReadOfExceptionValue - Return the result of the eh.exception
81    /// intrinsic by calling the intrinsic if in a landing pad, or loading it
82    /// from the exception value variable otherwise.
83    Instruction *CreateReadOfExceptionValue(BasicBlock *BB) {
84      return LandingPads.count(BB) ?
85        CreateExceptionValueCall(BB) : CreateValueLoad(BB);
86    }
87
88    /// CleanupSelectors - Any remaining eh.selector intrinsic calls which still
89    /// use the "llvm.eh.catch.all.value" call need to convert to using its
90    /// initializer instead.
91    bool CleanupSelectors(SmallPtrSet<IntrinsicInst*, 32> &Sels);
92
93    bool HasCatchAllInSelector(IntrinsicInst *);
94
95    /// FindAllCleanupSelectors - Find all eh.selector calls that are clean-ups.
96    void FindAllCleanupSelectors(SmallPtrSet<IntrinsicInst*, 32> &Sels,
97                                 SmallPtrSet<IntrinsicInst*, 32> &CatchAllSels);
98
99    /// FindAllURoRInvokes - Find all URoR invokes in the function.
100    void FindAllURoRInvokes(SmallPtrSet<InvokeInst*, 32> &URoRInvokes);
101
102    /// HandleURoRInvokes - Handle invokes of "_Unwind_Resume_or_Rethrow"
103    /// calls. The "unwind" part of these invokes jump to a landing pad within
104    /// the current function. This is a candidate to merge the selector
105    /// associated with the URoR invoke with the one from the URoR's landing
106    /// pad.
107    bool HandleURoRInvokes();
108
109    /// FindSelectorAndURoR - Find the eh.selector call and URoR call associated
110    /// with the eh.exception call. This recursively looks past instructions
111    /// which don't change the EH pointer value, like casts or PHI nodes.
112    bool FindSelectorAndURoR(Instruction *Inst, bool &URoRInvoke,
113                             SmallPtrSet<IntrinsicInst*, 8> &SelCalls);
114
115    /// DoMem2RegPromotion - Take an alloca call and promote it from memory to a
116    /// register.
117    bool DoMem2RegPromotion(Value *V) {
118      AllocaInst *AI = dyn_cast<AllocaInst>(V);
119      if (!AI || !isAllocaPromotable(AI)) return false;
120
121      // Turn the alloca into a register.
122      std::vector<AllocaInst*> Allocas(1, AI);
123      PromoteMemToReg(Allocas, *DT, *DF);
124      return true;
125    }
126
127    /// PromoteStoreInst - Perform Mem2Reg on a StoreInst.
128    bool PromoteStoreInst(StoreInst *SI) {
129      if (!SI || !DT || !DF) return false;
130      if (DoMem2RegPromotion(SI->getOperand(1)))
131        return true;
132      return false;
133    }
134
135    /// PromoteEHPtrStore - Promote the storing of an EH pointer into a
136    /// register. This should get rid of the store and subsequent loads.
137    bool PromoteEHPtrStore(IntrinsicInst *II) {
138      if (!DT || !DF) return false;
139
140      bool Changed = false;
141      StoreInst *SI;
142
143      while (1) {
144        SI = 0;
145        for (Value::use_iterator
146               I = II->use_begin(), E = II->use_end(); I != E; ++I) {
147          SI = dyn_cast<StoreInst>(*I);
148          if (SI) break;
149        }
150
151        if (!PromoteStoreInst(SI))
152          break;
153
154        Changed = true;
155      }
156
157      return Changed;
158    }
159
160  public:
161    static char ID; // Pass identification, replacement for typeid.
162    DwarfEHPrepare(const TargetMachine *tm, bool fast) :
163      FunctionPass(&ID), TM(tm), TLI(TM->getTargetLowering()),
164      CompileFast(fast),
165      ExceptionValueIntrinsic(0), SelectorIntrinsic(0),
166      URoR(0), EHCatchAllValue(0), RewindFunction(0) {}
167
168    virtual bool runOnFunction(Function &Fn);
169
170    // getAnalysisUsage - We need dominance frontiers for memory promotion.
171    virtual void getAnalysisUsage(AnalysisUsage &AU) const {
172      if (!CompileFast)
173        AU.addRequired<DominatorTree>();
174      AU.addPreserved<DominatorTree>();
175      if (!CompileFast)
176        AU.addRequired<DominanceFrontier>();
177      AU.addPreserved<DominanceFrontier>();
178    }
179
180    const char *getPassName() const {
181      return "Exception handling preparation";
182    }
183
184  };
185} // end anonymous namespace
186
187char DwarfEHPrepare::ID = 0;
188
189FunctionPass *llvm::createDwarfEHPass(const TargetMachine *tm, bool fast) {
190  return new DwarfEHPrepare(tm, fast);
191}
192
193/// HasCatchAllInSelector - Return true if the intrinsic instruction has a
194/// catch-all.
195bool DwarfEHPrepare::HasCatchAllInSelector(IntrinsicInst *II) {
196  if (!EHCatchAllValue) return false;
197
198  unsigned ArgIdx = II->getNumArgOperands() - 1;
199  GlobalVariable *GV = dyn_cast<GlobalVariable>(II->getArgOperand(ArgIdx));
200  return GV == EHCatchAllValue;
201}
202
203/// FindAllCleanupSelectors - Find all eh.selector calls that are clean-ups.
204void DwarfEHPrepare::
205FindAllCleanupSelectors(SmallPtrSet<IntrinsicInst*, 32> &Sels,
206                        SmallPtrSet<IntrinsicInst*, 32> &CatchAllSels) {
207  for (Value::use_iterator
208         I = SelectorIntrinsic->use_begin(),
209         E = SelectorIntrinsic->use_end(); I != E; ++I) {
210    IntrinsicInst *II = cast<IntrinsicInst>(*I);
211
212    if (II->getParent()->getParent() != F)
213      continue;
214
215    if (!HasCatchAllInSelector(II))
216      Sels.insert(II);
217    else
218      CatchAllSels.insert(II);
219  }
220}
221
222/// FindAllURoRInvokes - Find all URoR invokes in the function.
223void DwarfEHPrepare::
224FindAllURoRInvokes(SmallPtrSet<InvokeInst*, 32> &URoRInvokes) {
225  for (Value::use_iterator
226         I = URoR->use_begin(),
227         E = URoR->use_end(); I != E; ++I) {
228    if (InvokeInst *II = dyn_cast<InvokeInst>(*I))
229      URoRInvokes.insert(II);
230  }
231}
232
233/// CleanupSelectors - Any remaining eh.selector intrinsic calls which still use
234/// the "llvm.eh.catch.all.value" call need to convert to using its
235/// initializer instead.
236bool DwarfEHPrepare::CleanupSelectors(SmallPtrSet<IntrinsicInst*, 32> &Sels) {
237  if (!EHCatchAllValue) return false;
238
239  if (!SelectorIntrinsic) {
240    SelectorIntrinsic =
241      Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_selector);
242    if (!SelectorIntrinsic) return false;
243  }
244
245  bool Changed = false;
246  for (SmallPtrSet<IntrinsicInst*, 32>::iterator
247         I = Sels.begin(), E = Sels.end(); I != E; ++I) {
248    IntrinsicInst *Sel = *I;
249
250    // Index of the "llvm.eh.catch.all.value" variable.
251    unsigned OpIdx = Sel->getNumArgOperands() - 1;
252    GlobalVariable *GV = dyn_cast<GlobalVariable>(Sel->getArgOperand(OpIdx));
253    if (GV != EHCatchAllValue) continue;
254    Sel->setArgOperand(OpIdx, EHCatchAllValue->getInitializer());
255    Changed = true;
256  }
257
258  return Changed;
259}
260
261/// FindSelectorAndURoR - Find the eh.selector call associated with the
262/// eh.exception call. And indicate if there is a URoR "invoke" associated with
263/// the eh.exception call. This recursively looks past instructions which don't
264/// change the EH pointer value, like casts or PHI nodes.
265bool
266DwarfEHPrepare::FindSelectorAndURoR(Instruction *Inst, bool &URoRInvoke,
267                                    SmallPtrSet<IntrinsicInst*, 8> &SelCalls) {
268  SmallPtrSet<PHINode*, 32> SeenPHIs;
269  bool Changed = false;
270
271 restart:
272  for (Value::use_iterator
273         I = Inst->use_begin(), E = Inst->use_end(); I != E; ++I) {
274    Instruction *II = dyn_cast<Instruction>(*I);
275    if (!II || II->getParent()->getParent() != F) continue;
276
277    if (IntrinsicInst *Sel = dyn_cast<IntrinsicInst>(II)) {
278      if (Sel->getIntrinsicID() == Intrinsic::eh_selector)
279        SelCalls.insert(Sel);
280    } else if (InvokeInst *Invoke = dyn_cast<InvokeInst>(II)) {
281      if (Invoke->getCalledFunction() == URoR)
282        URoRInvoke = true;
283    } else if (CastInst *CI = dyn_cast<CastInst>(II)) {
284      Changed |= FindSelectorAndURoR(CI, URoRInvoke, SelCalls);
285    } else if (StoreInst *SI = dyn_cast<StoreInst>(II)) {
286      if (!PromoteStoreInst(SI)) continue;
287      Changed = true;
288      SeenPHIs.clear();
289      goto restart;             // Uses may have changed, restart loop.
290    } else if (PHINode *PN = dyn_cast<PHINode>(II)) {
291      if (SeenPHIs.insert(PN))
292        // Don't process a PHI node more than once.
293        Changed |= FindSelectorAndURoR(PN, URoRInvoke, SelCalls);
294    }
295  }
296
297  return Changed;
298}
299
300/// HandleURoRInvokes - Handle invokes of "_Unwind_Resume_or_Rethrow" calls. The
301/// "unwind" part of these invokes jump to a landing pad within the current
302/// function. This is a candidate to merge the selector associated with the URoR
303/// invoke with the one from the URoR's landing pad.
304bool DwarfEHPrepare::HandleURoRInvokes() {
305  if (!EHCatchAllValue) {
306    EHCatchAllValue =
307      F->getParent()->getNamedGlobal("llvm.eh.catch.all.value");
308    if (!EHCatchAllValue) return false;
309  }
310
311  if (!SelectorIntrinsic) {
312    SelectorIntrinsic =
313      Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_selector);
314    if (!SelectorIntrinsic) return false;
315  }
316
317  SmallPtrSet<IntrinsicInst*, 32> Sels;
318  SmallPtrSet<IntrinsicInst*, 32> CatchAllSels;
319  FindAllCleanupSelectors(Sels, CatchAllSels);
320
321  if (!DT)
322    // We require DominatorTree information.
323    return CleanupSelectors(CatchAllSels);
324
325  if (!URoR) {
326    URoR = F->getParent()->getFunction("_Unwind_Resume_or_Rethrow");
327    if (!URoR) return CleanupSelectors(CatchAllSels);
328  }
329
330  SmallPtrSet<InvokeInst*, 32> URoRInvokes;
331  FindAllURoRInvokes(URoRInvokes);
332
333  SmallPtrSet<IntrinsicInst*, 32> SelsToConvert;
334
335  for (SmallPtrSet<IntrinsicInst*, 32>::iterator
336         SI = Sels.begin(), SE = Sels.end(); SI != SE; ++SI) {
337    const BasicBlock *SelBB = (*SI)->getParent();
338    for (SmallPtrSet<InvokeInst*, 32>::iterator
339           UI = URoRInvokes.begin(), UE = URoRInvokes.end(); UI != UE; ++UI) {
340      const BasicBlock *URoRBB = (*UI)->getParent();
341      if (DT->dominates(SelBB, URoRBB)) {
342        SelsToConvert.insert(*SI);
343        break;
344      }
345    }
346  }
347
348  bool Changed = false;
349
350  if (Sels.size() != SelsToConvert.size()) {
351    // If we haven't been able to convert all of the clean-up selectors, then
352    // loop through the slow way to see if they still need to be converted.
353    if (!ExceptionValueIntrinsic) {
354      ExceptionValueIntrinsic =
355        Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_exception);
356      if (!ExceptionValueIntrinsic)
357        return CleanupSelectors(CatchAllSels);
358    }
359
360    for (Value::use_iterator
361           I = ExceptionValueIntrinsic->use_begin(),
362           E = ExceptionValueIntrinsic->use_end(); I != E; ++I) {
363      IntrinsicInst *EHPtr = dyn_cast<IntrinsicInst>(*I);
364      if (!EHPtr || EHPtr->getParent()->getParent() != F) continue;
365
366      Changed |= PromoteEHPtrStore(EHPtr);
367
368      bool URoRInvoke = false;
369      SmallPtrSet<IntrinsicInst*, 8> SelCalls;
370      Changed |= FindSelectorAndURoR(EHPtr, URoRInvoke, SelCalls);
371
372      if (URoRInvoke) {
373        // This EH pointer is being used by an invoke of an URoR instruction and
374        // an eh.selector intrinsic call. If the eh.selector is a 'clean-up', we
375        // need to convert it to a 'catch-all'.
376        for (SmallPtrSet<IntrinsicInst*, 8>::iterator
377               SI = SelCalls.begin(), SE = SelCalls.end(); SI != SE; ++SI)
378          if (!HasCatchAllInSelector(*SI))
379              SelsToConvert.insert(*SI);
380      }
381    }
382  }
383
384  if (!SelsToConvert.empty()) {
385    // Convert all clean-up eh.selectors, which are associated with "invokes" of
386    // URoR calls, into catch-all eh.selectors.
387    Changed = true;
388
389    for (SmallPtrSet<IntrinsicInst*, 8>::iterator
390           SI = SelsToConvert.begin(), SE = SelsToConvert.end();
391         SI != SE; ++SI) {
392      IntrinsicInst *II = *SI;
393
394      // Use the exception object pointer and the personality function
395      // from the original selector.
396      CallSite CS(II);
397      IntrinsicInst::op_iterator I = CS.arg_begin();
398      IntrinsicInst::op_iterator E = CS.arg_end();
399      IntrinsicInst::op_iterator B = prior(E);
400
401      // Exclude last argument if it is an integer.
402      if (isa<ConstantInt>(B)) E = B;
403
404      // Add exception object pointer (front).
405      // Add personality function (next).
406      // Add in any filter IDs (rest).
407      SmallVector<Value*, 8> Args(I, E);
408
409      Args.push_back(EHCatchAllValue->getInitializer()); // Catch-all indicator.
410
411      CallInst *NewSelector =
412        CallInst::Create(SelectorIntrinsic, Args.begin(), Args.end(),
413                         "eh.sel.catch.all", II);
414
415      NewSelector->setTailCall(II->isTailCall());
416      NewSelector->setAttributes(II->getAttributes());
417      NewSelector->setCallingConv(II->getCallingConv());
418
419      II->replaceAllUsesWith(NewSelector);
420      II->eraseFromParent();
421    }
422  }
423
424  Changed |= CleanupSelectors(CatchAllSels);
425  return Changed;
426}
427
428/// NormalizeLandingPads - Normalize and discover landing pads, noting them
429/// in the LandingPads set.  A landing pad is normal if the only CFG edges
430/// that end at it are unwind edges from invoke instructions. If we inlined
431/// through an invoke we could have a normal branch from the previous
432/// unwind block through to the landing pad for the original invoke.
433/// Abnormal landing pads are fixed up by redirecting all unwind edges to
434/// a new basic block which falls through to the original.
435bool DwarfEHPrepare::NormalizeLandingPads() {
436  bool Changed = false;
437
438  const MCAsmInfo *MAI = TM->getMCAsmInfo();
439  bool usingSjLjEH = MAI->getExceptionHandlingType() == ExceptionHandling::SjLj;
440
441  for (Function::iterator I = F->begin(), E = F->end(); I != E; ++I) {
442    TerminatorInst *TI = I->getTerminator();
443    if (!isa<InvokeInst>(TI))
444      continue;
445    BasicBlock *LPad = TI->getSuccessor(1);
446    // Skip landing pads that have already been normalized.
447    if (LandingPads.count(LPad))
448      continue;
449
450    // Check that only invoke unwind edges end at the landing pad.
451    bool OnlyUnwoundTo = true;
452    bool SwitchOK = usingSjLjEH;
453    for (pred_iterator PI = pred_begin(LPad), PE = pred_end(LPad);
454         PI != PE; ++PI) {
455      TerminatorInst *PT = (*PI)->getTerminator();
456      // The SjLj dispatch block uses a switch instruction. This is effectively
457      // an unwind edge, so we can disregard it here. There will only ever
458      // be one dispatch, however, so if there are multiple switches, one
459      // of them truly is a normal edge, not an unwind edge.
460      if (SwitchOK && isa<SwitchInst>(PT)) {
461        SwitchOK = false;
462        continue;
463      }
464      if (!isa<InvokeInst>(PT) || LPad == PT->getSuccessor(0)) {
465        OnlyUnwoundTo = false;
466        break;
467      }
468    }
469
470    if (OnlyUnwoundTo) {
471      // Only unwind edges lead to the landing pad.  Remember the landing pad.
472      LandingPads.insert(LPad);
473      continue;
474    }
475
476    // At least one normal edge ends at the landing pad.  Redirect the unwind
477    // edges to a new basic block which falls through into this one.
478
479    // Create the new basic block.
480    BasicBlock *NewBB = BasicBlock::Create(F->getContext(),
481                                           LPad->getName() + "_unwind_edge");
482
483    // Insert it into the function right before the original landing pad.
484    LPad->getParent()->getBasicBlockList().insert(LPad, NewBB);
485
486    // Redirect unwind edges from the original landing pad to NewBB.
487    for (pred_iterator PI = pred_begin(LPad), PE = pred_end(LPad); PI != PE; ) {
488      TerminatorInst *PT = (*PI++)->getTerminator();
489      if (isa<InvokeInst>(PT) && PT->getSuccessor(1) == LPad)
490        // Unwind to the new block.
491        PT->setSuccessor(1, NewBB);
492    }
493
494    // If there are any PHI nodes in LPad, we need to update them so that they
495    // merge incoming values from NewBB instead.
496    for (BasicBlock::iterator II = LPad->begin(); isa<PHINode>(II); ++II) {
497      PHINode *PN = cast<PHINode>(II);
498      pred_iterator PB = pred_begin(NewBB), PE = pred_end(NewBB);
499
500      // Check to see if all of the values coming in via unwind edges are the
501      // same.  If so, we don't need to create a new PHI node.
502      Value *InVal = PN->getIncomingValueForBlock(*PB);
503      for (pred_iterator PI = PB; PI != PE; ++PI) {
504        if (PI != PB && InVal != PN->getIncomingValueForBlock(*PI)) {
505          InVal = 0;
506          break;
507        }
508      }
509
510      if (InVal == 0) {
511        // Different unwind edges have different values.  Create a new PHI node
512        // in NewBB.
513        PHINode *NewPN = PHINode::Create(PN->getType(), PN->getName()+".unwind",
514                                         NewBB);
515        // Add an entry for each unwind edge, using the value from the old PHI.
516        for (pred_iterator PI = PB; PI != PE; ++PI)
517          NewPN->addIncoming(PN->getIncomingValueForBlock(*PI), *PI);
518
519        // Now use this new PHI as the common incoming value for NewBB in PN.
520        InVal = NewPN;
521      }
522
523      // Revector exactly one entry in the PHI node to come from NewBB
524      // and delete all other entries that come from unwind edges.  If
525      // there are both normal and unwind edges from the same predecessor,
526      // this leaves an entry for the normal edge.
527      for (pred_iterator PI = PB; PI != PE; ++PI)
528        PN->removeIncomingValue(*PI);
529      PN->addIncoming(InVal, NewBB);
530    }
531
532    // Add a fallthrough from NewBB to the original landing pad.
533    BranchInst::Create(LPad, NewBB);
534
535    // Now update DominatorTree and DominanceFrontier analysis information.
536    if (DT)
537      DT->splitBlock(NewBB);
538    if (DF)
539      DF->splitBlock(NewBB);
540
541    // Remember the newly constructed landing pad.  The original landing pad
542    // LPad is no longer a landing pad now that all unwind edges have been
543    // revectored to NewBB.
544    LandingPads.insert(NewBB);
545    ++NumLandingPadsSplit;
546    Changed = true;
547  }
548
549  return Changed;
550}
551
552/// LowerUnwinds - Turn unwind instructions into calls to _Unwind_Resume,
553/// rethrowing any previously caught exception.  This will crash horribly
554/// at runtime if there is no such exception: using unwind to throw a new
555/// exception is currently not supported.
556bool DwarfEHPrepare::LowerUnwinds() {
557  SmallVector<TerminatorInst*, 16> UnwindInsts;
558
559  for (Function::iterator I = F->begin(), E = F->end(); I != E; ++I) {
560    TerminatorInst *TI = I->getTerminator();
561    if (isa<UnwindInst>(TI))
562      UnwindInsts.push_back(TI);
563  }
564
565  if (UnwindInsts.empty()) return false;
566
567  // Find the rewind function if we didn't already.
568  if (!RewindFunction) {
569    LLVMContext &Ctx = UnwindInsts[0]->getContext();
570    std::vector<const Type*>
571      Params(1, Type::getInt8PtrTy(Ctx));
572    FunctionType *FTy = FunctionType::get(Type::getVoidTy(Ctx),
573                                          Params, false);
574    const char *RewindName = TLI->getLibcallName(RTLIB::UNWIND_RESUME);
575    RewindFunction = F->getParent()->getOrInsertFunction(RewindName, FTy);
576  }
577
578  bool Changed = false;
579
580  for (SmallVectorImpl<TerminatorInst*>::iterator
581         I = UnwindInsts.begin(), E = UnwindInsts.end(); I != E; ++I) {
582    TerminatorInst *TI = *I;
583
584    // Replace the unwind instruction with a call to _Unwind_Resume (or the
585    // appropriate target equivalent) followed by an UnreachableInst.
586
587    // Create the call...
588    CallInst *CI = CallInst::Create(RewindFunction,
589                                    CreateReadOfExceptionValue(TI->getParent()),
590                                    "", TI);
591    CI->setCallingConv(TLI->getLibcallCallingConv(RTLIB::UNWIND_RESUME));
592    // ...followed by an UnreachableInst.
593    new UnreachableInst(TI->getContext(), TI);
594
595    // Nuke the unwind instruction.
596    TI->eraseFromParent();
597    ++NumUnwindsLowered;
598    Changed = true;
599  }
600
601  return Changed;
602}
603
604/// MoveExceptionValueCalls - Ensure that eh.exception is only ever called from
605/// landing pads by replacing calls outside of landing pads with loads from a
606/// stack temporary.  Move eh.exception calls inside landing pads to the start
607/// of the landing pad (optional, but may make things simpler for later passes).
608bool DwarfEHPrepare::MoveExceptionValueCalls() {
609  // If the eh.exception intrinsic is not declared in the module then there is
610  // nothing to do.  Speed up compilation by checking for this common case.
611  if (!ExceptionValueIntrinsic &&
612      !F->getParent()->getFunction(Intrinsic::getName(Intrinsic::eh_exception)))
613    return false;
614
615  bool Changed = false;
616
617  for (Function::iterator BB = F->begin(), E = F->end(); BB != E; ++BB) {
618    for (BasicBlock::iterator II = BB->begin(), E = BB->end(); II != E;)
619      if (IntrinsicInst *CI = dyn_cast<IntrinsicInst>(II++))
620        if (CI->getIntrinsicID() == Intrinsic::eh_exception) {
621          if (!CI->use_empty()) {
622            Value *ExceptionValue = CreateReadOfExceptionValue(BB);
623            if (CI == ExceptionValue) {
624              // The call was at the start of a landing pad - leave it alone.
625              assert(LandingPads.count(BB) &&
626                     "Created eh.exception call outside landing pad!");
627              continue;
628            }
629            CI->replaceAllUsesWith(ExceptionValue);
630          }
631          CI->eraseFromParent();
632          ++NumExceptionValuesMoved;
633          Changed = true;
634        }
635  }
636
637  return Changed;
638}
639
640/// FinishStackTemporaries - If we introduced a stack variable to hold the
641/// exception value then initialize it in each landing pad.
642bool DwarfEHPrepare::FinishStackTemporaries() {
643  if (!ExceptionValueVar)
644    // Nothing to do.
645    return false;
646
647  bool Changed = false;
648
649  // Make sure that there is a store of the exception value at the start of
650  // each landing pad.
651  for (BBSet::iterator LI = LandingPads.begin(), LE = LandingPads.end();
652       LI != LE; ++LI) {
653    Instruction *ExceptionValue = CreateReadOfExceptionValue(*LI);
654    Instruction *Store = new StoreInst(ExceptionValue, ExceptionValueVar);
655    Store->insertAfter(ExceptionValue);
656    Changed = true;
657  }
658
659  return Changed;
660}
661
662/// PromoteStackTemporaries - Turn any stack temporaries we introduced into
663/// registers if possible.
664bool DwarfEHPrepare::PromoteStackTemporaries() {
665  if (ExceptionValueVar && DT && DF && isAllocaPromotable(ExceptionValueVar)) {
666    // Turn the exception temporary into registers and phi nodes if possible.
667    std::vector<AllocaInst*> Allocas(1, ExceptionValueVar);
668    PromoteMemToReg(Allocas, *DT, *DF);
669    return true;
670  }
671  return false;
672}
673
674/// CreateExceptionValueCall - Insert a call to the eh.exception intrinsic at
675/// the start of the basic block (unless there already is one, in which case
676/// the existing call is returned).
677Instruction *DwarfEHPrepare::CreateExceptionValueCall(BasicBlock *BB) {
678  Instruction *Start = BB->getFirstNonPHIOrDbg();
679  // Is this a call to eh.exception?
680  if (IntrinsicInst *CI = dyn_cast<IntrinsicInst>(Start))
681    if (CI->getIntrinsicID() == Intrinsic::eh_exception)
682      // Reuse the existing call.
683      return Start;
684
685  // Find the eh.exception intrinsic if we didn't already.
686  if (!ExceptionValueIntrinsic)
687    ExceptionValueIntrinsic = Intrinsic::getDeclaration(F->getParent(),
688                                                       Intrinsic::eh_exception);
689
690  // Create the call.
691  return CallInst::Create(ExceptionValueIntrinsic, "eh.value.call", Start);
692}
693
694/// CreateValueLoad - Insert a load of the exception value stack variable
695/// (creating it if necessary) at the start of the basic block (unless
696/// there already is a load, in which case the existing load is returned).
697Instruction *DwarfEHPrepare::CreateValueLoad(BasicBlock *BB) {
698  Instruction *Start = BB->getFirstNonPHIOrDbg();
699  // Is this a load of the exception temporary?
700  if (ExceptionValueVar)
701    if (LoadInst* LI = dyn_cast<LoadInst>(Start))
702      if (LI->getPointerOperand() == ExceptionValueVar)
703        // Reuse the existing load.
704        return Start;
705
706  // Create the temporary if we didn't already.
707  if (!ExceptionValueVar) {
708    ExceptionValueVar = new AllocaInst(PointerType::getUnqual(
709           Type::getInt8Ty(BB->getContext())), "eh.value", F->begin()->begin());
710    ++NumStackTempsIntroduced;
711  }
712
713  // Load the value.
714  return new LoadInst(ExceptionValueVar, "eh.value.load", Start);
715}
716
717bool DwarfEHPrepare::runOnFunction(Function &Fn) {
718  bool Changed = false;
719
720  // Initialize internal state.
721  DT = getAnalysisIfAvailable<DominatorTree>();
722  DF = getAnalysisIfAvailable<DominanceFrontier>();
723  ExceptionValueVar = 0;
724  F = &Fn;
725
726  // Ensure that only unwind edges end at landing pads (a landing pad is a
727  // basic block where an invoke unwind edge ends).
728  Changed |= NormalizeLandingPads();
729
730  // Turn unwind instructions into libcalls.
731  Changed |= LowerUnwinds();
732
733  // TODO: Move eh.selector calls to landing pads and combine them.
734
735  // Move eh.exception calls to landing pads.
736  Changed |= MoveExceptionValueCalls();
737
738  // Initialize any stack temporaries we introduced.
739  Changed |= FinishStackTemporaries();
740
741  // Turn any stack temporaries into registers if possible.
742  if (!CompileFast)
743    Changed |= PromoteStackTemporaries();
744
745  Changed |= HandleURoRInvokes();
746
747  LandingPads.clear();
748
749  return Changed;
750}
751