DwarfEHPrepare.cpp revision c34c2200a8077f98cf5cac9fe6f8a6d69b89b54c
1//===-- DwarfEHPrepare - Prepare exception handling for code generation ---===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This pass mulches exception handling code into a form adapted to code
11// generation. Required if using dwarf exception handling.
12//
13//===----------------------------------------------------------------------===//
14
15#define DEBUG_TYPE "dwarfehprepare"
16#include "llvm/Function.h"
17#include "llvm/Instructions.h"
18#include "llvm/IntrinsicInst.h"
19#include "llvm/Module.h"
20#include "llvm/Pass.h"
21#include "llvm/ADT/Statistic.h"
22#include "llvm/Analysis/Dominators.h"
23#include "llvm/CodeGen/Passes.h"
24#include "llvm/MC/MCAsmInfo.h"
25#include "llvm/Support/CallSite.h"
26#include "llvm/Target/TargetLowering.h"
27#include "llvm/Transforms/Utils/BasicBlockUtils.h"
28#include "llvm/Transforms/Utils/PromoteMemToReg.h"
29using namespace llvm;
30
31STATISTIC(NumLandingPadsSplit,     "Number of landing pads split");
32STATISTIC(NumUnwindsLowered,       "Number of unwind instructions lowered");
33STATISTIC(NumExceptionValuesMoved, "Number of eh.exception calls moved");
34STATISTIC(NumStackTempsIntroduced, "Number of stack temporaries introduced");
35
36namespace {
37  class DwarfEHPrepare : public FunctionPass {
38    const TargetMachine *TM;
39    const TargetLowering *TLI;
40    bool CompileFast;
41
42    // The eh.exception intrinsic.
43    Function *ExceptionValueIntrinsic;
44
45    // The eh.selector intrinsic.
46    Function *SelectorIntrinsic;
47
48    // _Unwind_Resume_or_Rethrow call.
49    Constant *URoR;
50
51    // The EH language-specific catch-all type.
52    GlobalVariable *EHCatchAllValue;
53
54    // _Unwind_Resume or the target equivalent.
55    Constant *RewindFunction;
56
57    // Dominator info is used when turning stack temporaries into registers.
58    DominatorTree *DT;
59    DominanceFrontier *DF;
60
61    // The function we are running on.
62    Function *F;
63
64    // The landing pads for this function.
65    typedef SmallPtrSet<BasicBlock*, 8> BBSet;
66    BBSet LandingPads;
67
68    // Stack temporary used to hold eh.exception values.
69    AllocaInst *ExceptionValueVar;
70
71    bool NormalizeLandingPads();
72    bool LowerUnwinds();
73    bool MoveExceptionValueCalls();
74    bool FinishStackTemporaries();
75    bool PromoteStackTemporaries();
76
77    Instruction *CreateExceptionValueCall(BasicBlock *BB);
78    Instruction *CreateValueLoad(BasicBlock *BB);
79
80    /// CreateReadOfExceptionValue - Return the result of the eh.exception
81    /// intrinsic by calling the intrinsic if in a landing pad, or loading it
82    /// from the exception value variable otherwise.
83    Instruction *CreateReadOfExceptionValue(BasicBlock *BB) {
84      return LandingPads.count(BB) ?
85        CreateExceptionValueCall(BB) : CreateValueLoad(BB);
86    }
87
88    /// CleanupSelectors - Any remaining eh.selector intrinsic calls which still
89    /// use the "llvm.eh.catch.all.value" call need to convert to using its
90    /// initializer instead.
91    bool CleanupSelectors(SmallPtrSet<IntrinsicInst*, 32> &Sels);
92
93    bool HasCatchAllInSelector(IntrinsicInst *);
94
95    /// FindAllCleanupSelectors - Find all eh.selector calls that are clean-ups.
96    void FindAllCleanupSelectors(SmallPtrSet<IntrinsicInst*, 32> &Sels,
97                                 SmallPtrSet<IntrinsicInst*, 32> &CatchAllSels);
98
99    /// FindAllURoRInvokes - Find all URoR invokes in the function.
100    void FindAllURoRInvokes(SmallPtrSet<InvokeInst*, 32> &URoRInvokes);
101
102    /// HandleURoRInvokes - Handle invokes of "_Unwind_Resume_or_Rethrow"
103    /// calls. The "unwind" part of these invokes jump to a landing pad within
104    /// the current function. This is a candidate to merge the selector
105    /// associated with the URoR invoke with the one from the URoR's landing
106    /// pad.
107    bool HandleURoRInvokes();
108
109    /// FindSelectorAndURoR - Find the eh.selector call and URoR call associated
110    /// with the eh.exception call. This recursively looks past instructions
111    /// which don't change the EH pointer value, like casts or PHI nodes.
112    bool FindSelectorAndURoR(Instruction *Inst, bool &URoRInvoke,
113                             SmallPtrSet<IntrinsicInst*, 8> &SelCalls);
114
115    /// PromoteStoreInst - Perform Mem2Reg on a StoreInst.
116    bool PromoteStoreInst(StoreInst *SI) {
117      if (!SI || !DT || !DF) return false;
118
119      AllocaInst *AI = dyn_cast<AllocaInst>(SI->getOperand(1));
120      if (!AI || !isAllocaPromotable(AI)) return false;
121
122      // Turn the alloca into a register.
123      std::vector<AllocaInst*> Allocas(1, AI);
124      PromoteMemToReg(Allocas, *DT, *DF);
125      return true;
126    }
127
128    /// PromoteEHPtrStore - Promote the storing of an EH pointer into a
129    /// register. This should get rid of the store and subsequent loads.
130    bool PromoteEHPtrStore(IntrinsicInst *II) {
131      if (!DT || !DF) return false;
132
133      bool Changed = false;
134      StoreInst *SI;
135
136      while (1) {
137        SI = 0;
138        for (Value::use_iterator
139               I = II->use_begin(), E = II->use_end(); I != E; ++I) {
140          SI = dyn_cast<StoreInst>(*I);
141          if (SI) break;
142        }
143
144        if (!PromoteStoreInst(SI))
145          break;
146
147        Changed = true;
148      }
149
150      return Changed;
151    }
152
153  public:
154    static char ID; // Pass identification, replacement for typeid.
155    DwarfEHPrepare(const TargetMachine *tm, bool fast) :
156      FunctionPass(ID), TM(tm), TLI(TM->getTargetLowering()),
157      CompileFast(fast),
158      ExceptionValueIntrinsic(0), SelectorIntrinsic(0),
159      URoR(0), EHCatchAllValue(0), RewindFunction(0) {}
160
161    virtual bool runOnFunction(Function &Fn);
162
163    // getAnalysisUsage - We need dominance frontiers for memory promotion.
164    virtual void getAnalysisUsage(AnalysisUsage &AU) const {
165      if (!CompileFast)
166        AU.addRequired<DominatorTree>();
167      AU.addPreserved<DominatorTree>();
168      if (!CompileFast)
169        AU.addRequired<DominanceFrontier>();
170      AU.addPreserved<DominanceFrontier>();
171    }
172
173    const char *getPassName() const {
174      return "Exception handling preparation";
175    }
176
177  };
178} // end anonymous namespace
179
180char DwarfEHPrepare::ID = 0;
181
182FunctionPass *llvm::createDwarfEHPass(const TargetMachine *tm, bool fast) {
183  return new DwarfEHPrepare(tm, fast);
184}
185
186/// HasCatchAllInSelector - Return true if the intrinsic instruction has a
187/// catch-all.
188bool DwarfEHPrepare::HasCatchAllInSelector(IntrinsicInst *II) {
189  if (!EHCatchAllValue) return false;
190
191  unsigned ArgIdx = II->getNumArgOperands() - 1;
192  GlobalVariable *GV = dyn_cast<GlobalVariable>(II->getArgOperand(ArgIdx));
193  return GV == EHCatchAllValue;
194}
195
196/// FindAllCleanupSelectors - Find all eh.selector calls that are clean-ups.
197void DwarfEHPrepare::
198FindAllCleanupSelectors(SmallPtrSet<IntrinsicInst*, 32> &Sels,
199                        SmallPtrSet<IntrinsicInst*, 32> &CatchAllSels) {
200  for (Value::use_iterator
201         I = SelectorIntrinsic->use_begin(),
202         E = SelectorIntrinsic->use_end(); I != E; ++I) {
203    IntrinsicInst *II = cast<IntrinsicInst>(*I);
204
205    if (II->getParent()->getParent() != F)
206      continue;
207
208    if (!HasCatchAllInSelector(II))
209      Sels.insert(II);
210    else
211      CatchAllSels.insert(II);
212  }
213}
214
215/// FindAllURoRInvokes - Find all URoR invokes in the function.
216void DwarfEHPrepare::
217FindAllURoRInvokes(SmallPtrSet<InvokeInst*, 32> &URoRInvokes) {
218  for (Value::use_iterator
219         I = URoR->use_begin(),
220         E = URoR->use_end(); I != E; ++I) {
221    if (InvokeInst *II = dyn_cast<InvokeInst>(*I))
222      URoRInvokes.insert(II);
223  }
224}
225
226/// CleanupSelectors - Any remaining eh.selector intrinsic calls which still use
227/// the "llvm.eh.catch.all.value" call need to convert to using its
228/// initializer instead.
229bool DwarfEHPrepare::CleanupSelectors(SmallPtrSet<IntrinsicInst*, 32> &Sels) {
230  if (!EHCatchAllValue) return false;
231
232  if (!SelectorIntrinsic) {
233    SelectorIntrinsic =
234      Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_selector);
235    if (!SelectorIntrinsic) return false;
236  }
237
238  bool Changed = false;
239  for (SmallPtrSet<IntrinsicInst*, 32>::iterator
240         I = Sels.begin(), E = Sels.end(); I != E; ++I) {
241    IntrinsicInst *Sel = *I;
242
243    // Index of the "llvm.eh.catch.all.value" variable.
244    unsigned OpIdx = Sel->getNumArgOperands() - 1;
245    GlobalVariable *GV = dyn_cast<GlobalVariable>(Sel->getArgOperand(OpIdx));
246    if (GV != EHCatchAllValue) continue;
247    Sel->setArgOperand(OpIdx, EHCatchAllValue->getInitializer());
248    Changed = true;
249  }
250
251  return Changed;
252}
253
254/// FindSelectorAndURoR - Find the eh.selector call associated with the
255/// eh.exception call. And indicate if there is a URoR "invoke" associated with
256/// the eh.exception call. This recursively looks past instructions which don't
257/// change the EH pointer value, like casts or PHI nodes.
258bool
259DwarfEHPrepare::FindSelectorAndURoR(Instruction *Inst, bool &URoRInvoke,
260                                    SmallPtrSet<IntrinsicInst*, 8> &SelCalls) {
261  SmallPtrSet<PHINode*, 32> SeenPHIs;
262  bool Changed = false;
263
264 restart:
265  for (Value::use_iterator
266         I = Inst->use_begin(), E = Inst->use_end(); I != E; ++I) {
267    Instruction *II = dyn_cast<Instruction>(*I);
268    if (!II || II->getParent()->getParent() != F) continue;
269
270    if (IntrinsicInst *Sel = dyn_cast<IntrinsicInst>(II)) {
271      if (Sel->getIntrinsicID() == Intrinsic::eh_selector)
272        SelCalls.insert(Sel);
273    } else if (InvokeInst *Invoke = dyn_cast<InvokeInst>(II)) {
274      if (Invoke->getCalledFunction() == URoR)
275        URoRInvoke = true;
276    } else if (CastInst *CI = dyn_cast<CastInst>(II)) {
277      Changed |= FindSelectorAndURoR(CI, URoRInvoke, SelCalls);
278    } else if (StoreInst *SI = dyn_cast<StoreInst>(II)) {
279      if (!PromoteStoreInst(SI)) continue;
280      Changed = true;
281      SeenPHIs.clear();
282      goto restart;             // Uses may have changed, restart loop.
283    } else if (PHINode *PN = dyn_cast<PHINode>(II)) {
284      if (SeenPHIs.insert(PN))
285        // Don't process a PHI node more than once.
286        Changed |= FindSelectorAndURoR(PN, URoRInvoke, SelCalls);
287    }
288  }
289
290  return Changed;
291}
292
293/// HandleURoRInvokes - Handle invokes of "_Unwind_Resume_or_Rethrow" calls. The
294/// "unwind" part of these invokes jump to a landing pad within the current
295/// function. This is a candidate to merge the selector associated with the URoR
296/// invoke with the one from the URoR's landing pad.
297bool DwarfEHPrepare::HandleURoRInvokes() {
298  if (!EHCatchAllValue) {
299    EHCatchAllValue =
300      F->getParent()->getNamedGlobal("llvm.eh.catch.all.value");
301    if (!EHCatchAllValue) return false;
302  }
303
304  if (!SelectorIntrinsic) {
305    SelectorIntrinsic =
306      Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_selector);
307    if (!SelectorIntrinsic) return false;
308  }
309
310  SmallPtrSet<IntrinsicInst*, 32> Sels;
311  SmallPtrSet<IntrinsicInst*, 32> CatchAllSels;
312  FindAllCleanupSelectors(Sels, CatchAllSels);
313
314  if (!DT)
315    // We require DominatorTree information.
316    return CleanupSelectors(CatchAllSels);
317
318  if (!URoR) {
319    URoR = F->getParent()->getFunction("_Unwind_Resume_or_Rethrow");
320    if (!URoR) return CleanupSelectors(CatchAllSels);
321  }
322
323  SmallPtrSet<InvokeInst*, 32> URoRInvokes;
324  FindAllURoRInvokes(URoRInvokes);
325
326  SmallPtrSet<IntrinsicInst*, 32> SelsToConvert;
327
328  for (SmallPtrSet<IntrinsicInst*, 32>::iterator
329         SI = Sels.begin(), SE = Sels.end(); SI != SE; ++SI) {
330    const BasicBlock *SelBB = (*SI)->getParent();
331    for (SmallPtrSet<InvokeInst*, 32>::iterator
332           UI = URoRInvokes.begin(), UE = URoRInvokes.end(); UI != UE; ++UI) {
333      const BasicBlock *URoRBB = (*UI)->getParent();
334      if (DT->dominates(SelBB, URoRBB)) {
335        SelsToConvert.insert(*SI);
336        break;
337      }
338    }
339  }
340
341  bool Changed = false;
342
343  if (Sels.size() != SelsToConvert.size()) {
344    // If we haven't been able to convert all of the clean-up selectors, then
345    // loop through the slow way to see if they still need to be converted.
346    if (!ExceptionValueIntrinsic) {
347      ExceptionValueIntrinsic =
348        Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_exception);
349      if (!ExceptionValueIntrinsic)
350        return CleanupSelectors(CatchAllSels);
351    }
352
353    for (Value::use_iterator
354           I = ExceptionValueIntrinsic->use_begin(),
355           E = ExceptionValueIntrinsic->use_end(); I != E; ++I) {
356      IntrinsicInst *EHPtr = dyn_cast<IntrinsicInst>(*I);
357      if (!EHPtr || EHPtr->getParent()->getParent() != F) continue;
358
359      Changed |= PromoteEHPtrStore(EHPtr);
360
361      bool URoRInvoke = false;
362      SmallPtrSet<IntrinsicInst*, 8> SelCalls;
363      Changed |= FindSelectorAndURoR(EHPtr, URoRInvoke, SelCalls);
364
365      if (URoRInvoke) {
366        // This EH pointer is being used by an invoke of an URoR instruction and
367        // an eh.selector intrinsic call. If the eh.selector is a 'clean-up', we
368        // need to convert it to a 'catch-all'.
369        for (SmallPtrSet<IntrinsicInst*, 8>::iterator
370               SI = SelCalls.begin(), SE = SelCalls.end(); SI != SE; ++SI)
371          if (!HasCatchAllInSelector(*SI))
372              SelsToConvert.insert(*SI);
373      }
374    }
375  }
376
377  if (!SelsToConvert.empty()) {
378    // Convert all clean-up eh.selectors, which are associated with "invokes" of
379    // URoR calls, into catch-all eh.selectors.
380    Changed = true;
381
382    for (SmallPtrSet<IntrinsicInst*, 8>::iterator
383           SI = SelsToConvert.begin(), SE = SelsToConvert.end();
384         SI != SE; ++SI) {
385      IntrinsicInst *II = *SI;
386
387      // Use the exception object pointer and the personality function
388      // from the original selector.
389      CallSite CS(II);
390      IntrinsicInst::op_iterator I = CS.arg_begin();
391      IntrinsicInst::op_iterator E = CS.arg_end();
392      IntrinsicInst::op_iterator B = prior(E);
393
394      // Exclude last argument if it is an integer.
395      if (isa<ConstantInt>(B)) E = B;
396
397      // Add exception object pointer (front).
398      // Add personality function (next).
399      // Add in any filter IDs (rest).
400      SmallVector<Value*, 8> Args(I, E);
401
402      Args.push_back(EHCatchAllValue->getInitializer()); // Catch-all indicator.
403
404      CallInst *NewSelector =
405        CallInst::Create(SelectorIntrinsic, Args.begin(), Args.end(),
406                         "eh.sel.catch.all", II);
407
408      NewSelector->setTailCall(II->isTailCall());
409      NewSelector->setAttributes(II->getAttributes());
410      NewSelector->setCallingConv(II->getCallingConv());
411
412      II->replaceAllUsesWith(NewSelector);
413      II->eraseFromParent();
414    }
415  }
416
417  Changed |= CleanupSelectors(CatchAllSels);
418  return Changed;
419}
420
421/// NormalizeLandingPads - Normalize and discover landing pads, noting them
422/// in the LandingPads set.  A landing pad is normal if the only CFG edges
423/// that end at it are unwind edges from invoke instructions. If we inlined
424/// through an invoke we could have a normal branch from the previous
425/// unwind block through to the landing pad for the original invoke.
426/// Abnormal landing pads are fixed up by redirecting all unwind edges to
427/// a new basic block which falls through to the original.
428bool DwarfEHPrepare::NormalizeLandingPads() {
429  bool Changed = false;
430
431  const MCAsmInfo *MAI = TM->getMCAsmInfo();
432  bool usingSjLjEH = MAI->getExceptionHandlingType() == ExceptionHandling::SjLj;
433
434  for (Function::iterator I = F->begin(), E = F->end(); I != E; ++I) {
435    TerminatorInst *TI = I->getTerminator();
436    if (!isa<InvokeInst>(TI))
437      continue;
438    BasicBlock *LPad = TI->getSuccessor(1);
439    // Skip landing pads that have already been normalized.
440    if (LandingPads.count(LPad))
441      continue;
442
443    // Check that only invoke unwind edges end at the landing pad.
444    bool OnlyUnwoundTo = true;
445    bool SwitchOK = usingSjLjEH;
446    for (pred_iterator PI = pred_begin(LPad), PE = pred_end(LPad);
447         PI != PE; ++PI) {
448      TerminatorInst *PT = (*PI)->getTerminator();
449      // The SjLj dispatch block uses a switch instruction. This is effectively
450      // an unwind edge, so we can disregard it here. There will only ever
451      // be one dispatch, however, so if there are multiple switches, one
452      // of them truly is a normal edge, not an unwind edge.
453      if (SwitchOK && isa<SwitchInst>(PT)) {
454        SwitchOK = false;
455        continue;
456      }
457      if (!isa<InvokeInst>(PT) || LPad == PT->getSuccessor(0)) {
458        OnlyUnwoundTo = false;
459        break;
460      }
461    }
462
463    if (OnlyUnwoundTo) {
464      // Only unwind edges lead to the landing pad.  Remember the landing pad.
465      LandingPads.insert(LPad);
466      continue;
467    }
468
469    // At least one normal edge ends at the landing pad.  Redirect the unwind
470    // edges to a new basic block which falls through into this one.
471
472    // Create the new basic block.
473    BasicBlock *NewBB = BasicBlock::Create(F->getContext(),
474                                           LPad->getName() + "_unwind_edge");
475
476    // Insert it into the function right before the original landing pad.
477    LPad->getParent()->getBasicBlockList().insert(LPad, NewBB);
478
479    // Redirect unwind edges from the original landing pad to NewBB.
480    for (pred_iterator PI = pred_begin(LPad), PE = pred_end(LPad); PI != PE; ) {
481      TerminatorInst *PT = (*PI++)->getTerminator();
482      if (isa<InvokeInst>(PT) && PT->getSuccessor(1) == LPad)
483        // Unwind to the new block.
484        PT->setSuccessor(1, NewBB);
485    }
486
487    // If there are any PHI nodes in LPad, we need to update them so that they
488    // merge incoming values from NewBB instead.
489    for (BasicBlock::iterator II = LPad->begin(); isa<PHINode>(II); ++II) {
490      PHINode *PN = cast<PHINode>(II);
491      pred_iterator PB = pred_begin(NewBB), PE = pred_end(NewBB);
492
493      // Check to see if all of the values coming in via unwind edges are the
494      // same.  If so, we don't need to create a new PHI node.
495      Value *InVal = PN->getIncomingValueForBlock(*PB);
496      for (pred_iterator PI = PB; PI != PE; ++PI) {
497        if (PI != PB && InVal != PN->getIncomingValueForBlock(*PI)) {
498          InVal = 0;
499          break;
500        }
501      }
502
503      if (InVal == 0) {
504        // Different unwind edges have different values.  Create a new PHI node
505        // in NewBB.
506        PHINode *NewPN = PHINode::Create(PN->getType(), PN->getName()+".unwind",
507                                         NewBB);
508        // Add an entry for each unwind edge, using the value from the old PHI.
509        for (pred_iterator PI = PB; PI != PE; ++PI)
510          NewPN->addIncoming(PN->getIncomingValueForBlock(*PI), *PI);
511
512        // Now use this new PHI as the common incoming value for NewBB in PN.
513        InVal = NewPN;
514      }
515
516      // Revector exactly one entry in the PHI node to come from NewBB
517      // and delete all other entries that come from unwind edges.  If
518      // there are both normal and unwind edges from the same predecessor,
519      // this leaves an entry for the normal edge.
520      for (pred_iterator PI = PB; PI != PE; ++PI)
521        PN->removeIncomingValue(*PI);
522      PN->addIncoming(InVal, NewBB);
523    }
524
525    // Add a fallthrough from NewBB to the original landing pad.
526    BranchInst::Create(LPad, NewBB);
527
528    // Now update DominatorTree and DominanceFrontier analysis information.
529    if (DT)
530      DT->splitBlock(NewBB);
531    if (DF)
532      DF->splitBlock(NewBB);
533
534    // Remember the newly constructed landing pad.  The original landing pad
535    // LPad is no longer a landing pad now that all unwind edges have been
536    // revectored to NewBB.
537    LandingPads.insert(NewBB);
538    ++NumLandingPadsSplit;
539    Changed = true;
540  }
541
542  return Changed;
543}
544
545/// LowerUnwinds - Turn unwind instructions into calls to _Unwind_Resume,
546/// rethrowing any previously caught exception.  This will crash horribly
547/// at runtime if there is no such exception: using unwind to throw a new
548/// exception is currently not supported.
549bool DwarfEHPrepare::LowerUnwinds() {
550  SmallVector<TerminatorInst*, 16> UnwindInsts;
551
552  for (Function::iterator I = F->begin(), E = F->end(); I != E; ++I) {
553    TerminatorInst *TI = I->getTerminator();
554    if (isa<UnwindInst>(TI))
555      UnwindInsts.push_back(TI);
556  }
557
558  if (UnwindInsts.empty()) return false;
559
560  // Find the rewind function if we didn't already.
561  if (!RewindFunction) {
562    LLVMContext &Ctx = UnwindInsts[0]->getContext();
563    std::vector<const Type*>
564      Params(1, Type::getInt8PtrTy(Ctx));
565    FunctionType *FTy = FunctionType::get(Type::getVoidTy(Ctx),
566                                          Params, false);
567    const char *RewindName = TLI->getLibcallName(RTLIB::UNWIND_RESUME);
568    RewindFunction = F->getParent()->getOrInsertFunction(RewindName, FTy);
569  }
570
571  bool Changed = false;
572
573  for (SmallVectorImpl<TerminatorInst*>::iterator
574         I = UnwindInsts.begin(), E = UnwindInsts.end(); I != E; ++I) {
575    TerminatorInst *TI = *I;
576
577    // Replace the unwind instruction with a call to _Unwind_Resume (or the
578    // appropriate target equivalent) followed by an UnreachableInst.
579
580    // Create the call...
581    CallInst *CI = CallInst::Create(RewindFunction,
582                                    CreateReadOfExceptionValue(TI->getParent()),
583                                    "", TI);
584    CI->setCallingConv(TLI->getLibcallCallingConv(RTLIB::UNWIND_RESUME));
585    // ...followed by an UnreachableInst.
586    new UnreachableInst(TI->getContext(), TI);
587
588    // Nuke the unwind instruction.
589    TI->eraseFromParent();
590    ++NumUnwindsLowered;
591    Changed = true;
592  }
593
594  return Changed;
595}
596
597/// MoveExceptionValueCalls - Ensure that eh.exception is only ever called from
598/// landing pads by replacing calls outside of landing pads with loads from a
599/// stack temporary.  Move eh.exception calls inside landing pads to the start
600/// of the landing pad (optional, but may make things simpler for later passes).
601bool DwarfEHPrepare::MoveExceptionValueCalls() {
602  // If the eh.exception intrinsic is not declared in the module then there is
603  // nothing to do.  Speed up compilation by checking for this common case.
604  if (!ExceptionValueIntrinsic &&
605      !F->getParent()->getFunction(Intrinsic::getName(Intrinsic::eh_exception)))
606    return false;
607
608  bool Changed = false;
609
610  for (Function::iterator BB = F->begin(), E = F->end(); BB != E; ++BB) {
611    for (BasicBlock::iterator II = BB->begin(), E = BB->end(); II != E;)
612      if (IntrinsicInst *CI = dyn_cast<IntrinsicInst>(II++))
613        if (CI->getIntrinsicID() == Intrinsic::eh_exception) {
614          if (!CI->use_empty()) {
615            Value *ExceptionValue = CreateReadOfExceptionValue(BB);
616            if (CI == ExceptionValue) {
617              // The call was at the start of a landing pad - leave it alone.
618              assert(LandingPads.count(BB) &&
619                     "Created eh.exception call outside landing pad!");
620              continue;
621            }
622            CI->replaceAllUsesWith(ExceptionValue);
623          }
624          CI->eraseFromParent();
625          ++NumExceptionValuesMoved;
626          Changed = true;
627        }
628  }
629
630  return Changed;
631}
632
633/// FinishStackTemporaries - If we introduced a stack variable to hold the
634/// exception value then initialize it in each landing pad.
635bool DwarfEHPrepare::FinishStackTemporaries() {
636  if (!ExceptionValueVar)
637    // Nothing to do.
638    return false;
639
640  bool Changed = false;
641
642  // Make sure that there is a store of the exception value at the start of
643  // each landing pad.
644  for (BBSet::iterator LI = LandingPads.begin(), LE = LandingPads.end();
645       LI != LE; ++LI) {
646    Instruction *ExceptionValue = CreateReadOfExceptionValue(*LI);
647    Instruction *Store = new StoreInst(ExceptionValue, ExceptionValueVar);
648    Store->insertAfter(ExceptionValue);
649    Changed = true;
650  }
651
652  return Changed;
653}
654
655/// PromoteStackTemporaries - Turn any stack temporaries we introduced into
656/// registers if possible.
657bool DwarfEHPrepare::PromoteStackTemporaries() {
658  if (ExceptionValueVar && DT && DF && isAllocaPromotable(ExceptionValueVar)) {
659    // Turn the exception temporary into registers and phi nodes if possible.
660    std::vector<AllocaInst*> Allocas(1, ExceptionValueVar);
661    PromoteMemToReg(Allocas, *DT, *DF);
662    return true;
663  }
664  return false;
665}
666
667/// CreateExceptionValueCall - Insert a call to the eh.exception intrinsic at
668/// the start of the basic block (unless there already is one, in which case
669/// the existing call is returned).
670Instruction *DwarfEHPrepare::CreateExceptionValueCall(BasicBlock *BB) {
671  Instruction *Start = BB->getFirstNonPHIOrDbg();
672  // Is this a call to eh.exception?
673  if (IntrinsicInst *CI = dyn_cast<IntrinsicInst>(Start))
674    if (CI->getIntrinsicID() == Intrinsic::eh_exception)
675      // Reuse the existing call.
676      return Start;
677
678  // Find the eh.exception intrinsic if we didn't already.
679  if (!ExceptionValueIntrinsic)
680    ExceptionValueIntrinsic = Intrinsic::getDeclaration(F->getParent(),
681                                                       Intrinsic::eh_exception);
682
683  // Create the call.
684  return CallInst::Create(ExceptionValueIntrinsic, "eh.value.call", Start);
685}
686
687/// CreateValueLoad - Insert a load of the exception value stack variable
688/// (creating it if necessary) at the start of the basic block (unless
689/// there already is a load, in which case the existing load is returned).
690Instruction *DwarfEHPrepare::CreateValueLoad(BasicBlock *BB) {
691  Instruction *Start = BB->getFirstNonPHIOrDbg();
692  // Is this a load of the exception temporary?
693  if (ExceptionValueVar)
694    if (LoadInst* LI = dyn_cast<LoadInst>(Start))
695      if (LI->getPointerOperand() == ExceptionValueVar)
696        // Reuse the existing load.
697        return Start;
698
699  // Create the temporary if we didn't already.
700  if (!ExceptionValueVar) {
701    ExceptionValueVar = new AllocaInst(PointerType::getUnqual(
702           Type::getInt8Ty(BB->getContext())), "eh.value", F->begin()->begin());
703    ++NumStackTempsIntroduced;
704  }
705
706  // Load the value.
707  return new LoadInst(ExceptionValueVar, "eh.value.load", Start);
708}
709
710bool DwarfEHPrepare::runOnFunction(Function &Fn) {
711  bool Changed = false;
712
713  // Initialize internal state.
714  DT = getAnalysisIfAvailable<DominatorTree>();
715  DF = getAnalysisIfAvailable<DominanceFrontier>();
716  ExceptionValueVar = 0;
717  F = &Fn;
718
719  // Ensure that only unwind edges end at landing pads (a landing pad is a
720  // basic block where an invoke unwind edge ends).
721  Changed |= NormalizeLandingPads();
722
723  // Turn unwind instructions into libcalls.
724  Changed |= LowerUnwinds();
725
726  // TODO: Move eh.selector calls to landing pads and combine them.
727
728  // Move eh.exception calls to landing pads.
729  Changed |= MoveExceptionValueCalls();
730
731  // Initialize any stack temporaries we introduced.
732  Changed |= FinishStackTemporaries();
733
734  // Turn any stack temporaries into registers if possible.
735  if (!CompileFast)
736    Changed |= PromoteStackTemporaries();
737
738  Changed |= HandleURoRInvokes();
739
740  LandingPads.clear();
741
742  return Changed;
743}
744