DwarfEHPrepare.cpp revision efbf30610739d73d1f2dba9a8c29aa30c8c3daa4
1//===-- DwarfEHPrepare - Prepare exception handling for code generation ---===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This pass mulches exception handling code into a form adapted to code
11// generation. Required if using dwarf exception handling.
12//
13//===----------------------------------------------------------------------===//
14
15#define DEBUG_TYPE "dwarfehprepare"
16#include "llvm/Function.h"
17#include "llvm/Instructions.h"
18#include "llvm/IntrinsicInst.h"
19#include "llvm/Module.h"
20#include "llvm/Pass.h"
21#include "llvm/ADT/Statistic.h"
22#include "llvm/Analysis/Dominators.h"
23#include "llvm/CodeGen/Passes.h"
24#include "llvm/MC/MCAsmInfo.h"
25#include "llvm/Target/TargetLowering.h"
26#include "llvm/Transforms/Utils/BasicBlockUtils.h"
27#include "llvm/Transforms/Utils/PromoteMemToReg.h"
28using namespace llvm;
29
30STATISTIC(NumLandingPadsSplit,     "Number of landing pads split");
31STATISTIC(NumUnwindsLowered,       "Number of unwind instructions lowered");
32STATISTIC(NumExceptionValuesMoved, "Number of eh.exception calls moved");
33STATISTIC(NumStackTempsIntroduced, "Number of stack temporaries introduced");
34
35namespace {
36  class DwarfEHPrepare : public FunctionPass {
37    const TargetMachine *TM;
38    const TargetLowering *TLI;
39    bool CompileFast;
40
41    // The eh.exception intrinsic.
42    Function *ExceptionValueIntrinsic;
43
44    // The eh.selector intrinsic.
45    Function *SelectorIntrinsic;
46
47    // _Unwind_Resume_or_Rethrow call.
48    Constant *URoR;
49
50    // The EH language-specific catch-all type.
51    GlobalVariable *EHCatchAllValue;
52
53    // _Unwind_Resume or the target equivalent.
54    Constant *RewindFunction;
55
56    // Dominator info is used when turning stack temporaries into registers.
57    DominatorTree *DT;
58    DominanceFrontier *DF;
59
60    // The function we are running on.
61    Function *F;
62
63    // The landing pads for this function.
64    typedef SmallPtrSet<BasicBlock*, 8> BBSet;
65    BBSet LandingPads;
66
67    // Stack temporary used to hold eh.exception values.
68    AllocaInst *ExceptionValueVar;
69
70    bool NormalizeLandingPads();
71    bool LowerUnwinds();
72    bool MoveExceptionValueCalls();
73    bool FinishStackTemporaries();
74    bool PromoteStackTemporaries();
75
76    Instruction *CreateExceptionValueCall(BasicBlock *BB);
77    Instruction *CreateValueLoad(BasicBlock *BB);
78
79    /// CreateReadOfExceptionValue - Return the result of the eh.exception
80    /// intrinsic by calling the intrinsic if in a landing pad, or loading it
81    /// from the exception value variable otherwise.
82    Instruction *CreateReadOfExceptionValue(BasicBlock *BB) {
83      return LandingPads.count(BB) ?
84        CreateExceptionValueCall(BB) : CreateValueLoad(BB);
85    }
86
87    /// CleanupSelectors - Any remaining eh.selector intrinsic calls which still
88    /// use the ".llvm.eh.catch.all.value" call need to convert to using its
89    /// initializer instead.
90    bool CleanupSelectors();
91
92    bool HasCatchAllInSelector(IntrinsicInst *);
93
94    /// FindAllCleanupSelectors - Find all eh.selector calls that are clean-ups.
95    void FindAllCleanupSelectors(SmallPtrSet<IntrinsicInst*, 32> &Sels);
96
97    /// FindAllURoRInvokes - Find all URoR invokes in the function.
98    void FindAllURoRInvokes(SmallPtrSet<InvokeInst*, 32> &URoRInvokes);
99
100    /// HandleURoRInvokes - Handle invokes of "_Unwind_Resume_or_Rethrow"
101    /// calls. The "unwind" part of these invokes jump to a landing pad within
102    /// the current function. This is a candidate to merge the selector
103    /// associated with the URoR invoke with the one from the URoR's landing
104    /// pad.
105    bool HandleURoRInvokes();
106
107    /// FindSelectorAndURoR - Find the eh.selector call and URoR call associated
108    /// with the eh.exception call. This recursively looks past instructions
109    /// which don't change the EH pointer value, like casts or PHI nodes.
110    bool FindSelectorAndURoR(Instruction *Inst, bool &URoRInvoke,
111                             SmallPtrSet<IntrinsicInst*, 8> &SelCalls);
112
113    /// DoMem2RegPromotion - Take an alloca call and promote it from memory to a
114    /// register.
115    bool DoMem2RegPromotion(Value *V) {
116      AllocaInst *AI = dyn_cast<AllocaInst>(V);
117      if (!AI || !isAllocaPromotable(AI)) return false;
118
119      // Turn the alloca into a register.
120      std::vector<AllocaInst*> Allocas(1, AI);
121      PromoteMemToReg(Allocas, *DT, *DF);
122      return true;
123    }
124
125    /// PromoteStoreInst - Perform Mem2Reg on a StoreInst.
126    bool PromoteStoreInst(StoreInst *SI) {
127      if (!SI || !DT || !DF) return false;
128      if (DoMem2RegPromotion(SI->getOperand(1)))
129        return true;
130      return false;
131    }
132
133    /// PromoteEHPtrStore - Promote the storing of an EH pointer into a
134    /// register. This should get rid of the store and subsequent loads.
135    bool PromoteEHPtrStore(IntrinsicInst *II) {
136      if (!DT || !DF) return false;
137
138      bool Changed = false;
139      StoreInst *SI;
140
141      while (1) {
142        SI = 0;
143        for (Value::use_iterator
144               I = II->use_begin(), E = II->use_end(); I != E; ++I) {
145          SI = dyn_cast<StoreInst>(I);
146          if (SI) break;
147        }
148
149        if (!PromoteStoreInst(SI))
150          break;
151
152        Changed = true;
153      }
154
155      return false;
156    }
157
158  public:
159    static char ID; // Pass identification, replacement for typeid.
160    DwarfEHPrepare(const TargetMachine *tm, bool fast) :
161      FunctionPass(&ID), TM(tm), TLI(TM->getTargetLowering()),
162      CompileFast(fast),
163      ExceptionValueIntrinsic(0), SelectorIntrinsic(0),
164      URoR(0), EHCatchAllValue(0), RewindFunction(0) {}
165
166    virtual bool runOnFunction(Function &Fn);
167
168    // getAnalysisUsage - We need dominance frontiers for memory promotion.
169    virtual void getAnalysisUsage(AnalysisUsage &AU) const {
170      if (!CompileFast)
171        AU.addRequired<DominatorTree>();
172      AU.addPreserved<DominatorTree>();
173      if (!CompileFast)
174        AU.addRequired<DominanceFrontier>();
175      AU.addPreserved<DominanceFrontier>();
176    }
177
178    const char *getPassName() const {
179      return "Exception handling preparation";
180    }
181
182  };
183} // end anonymous namespace
184
185char DwarfEHPrepare::ID = 0;
186
187FunctionPass *llvm::createDwarfEHPass(const TargetMachine *tm, bool fast) {
188  return new DwarfEHPrepare(tm, fast);
189}
190
191/// HasCatchAllInSelector - Return true if the intrinsic instruction has a
192/// catch-all.
193bool DwarfEHPrepare::HasCatchAllInSelector(IntrinsicInst *II) {
194  if (!EHCatchAllValue) return false;
195
196  unsigned OpIdx = II->getNumOperands() - 1;
197  GlobalVariable *GV = dyn_cast<GlobalVariable>(II->getOperand(OpIdx));
198  return GV == EHCatchAllValue;
199}
200
201/// FindAllCleanupSelectors - Find all eh.selector calls that are clean-ups.
202void DwarfEHPrepare::
203FindAllCleanupSelectors(SmallPtrSet<IntrinsicInst*, 32> &Sels) {
204  for (Value::use_iterator
205         I = SelectorIntrinsic->use_begin(),
206         E = SelectorIntrinsic->use_end(); I != E; ++I) {
207    IntrinsicInst *II = cast<IntrinsicInst>(I);
208
209    if (II->getParent()->getParent() != F)
210      continue;
211
212    if (!HasCatchAllInSelector(II))
213      Sels.insert(II);
214  }
215}
216
217/// FindAllURoRInvokes - Find all URoR invokes in the function.
218void DwarfEHPrepare::
219FindAllURoRInvokes(SmallPtrSet<InvokeInst*, 32> &URoRInvokes) {
220  for (Value::use_iterator
221         I = URoR->use_begin(),
222         E = URoR->use_end(); I != E; ++I) {
223    if (InvokeInst *II = dyn_cast<InvokeInst>(I))
224      URoRInvokes.insert(II);
225  }
226}
227
228/// CleanupSelectors - Any remaining eh.selector intrinsic calls which still use
229/// the ".llvm.eh.catch.all.value" call need to convert to using its
230/// initializer instead.
231bool DwarfEHPrepare::CleanupSelectors() {
232  if (!EHCatchAllValue) return false;
233
234  if (!SelectorIntrinsic) {
235    SelectorIntrinsic =
236      Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_selector);
237    if (!SelectorIntrinsic) return false;
238  }
239
240  bool Changed = false;
241  for (Value::use_iterator
242         I = SelectorIntrinsic->use_begin(),
243         E = SelectorIntrinsic->use_end(); I != E; ++I) {
244    IntrinsicInst *Sel = dyn_cast<IntrinsicInst>(I);
245    if (!Sel || Sel->getParent()->getParent() != F) continue;
246
247    // Index of the ".llvm.eh.catch.all.value" variable.
248    unsigned OpIdx = Sel->getNumOperands() - 1;
249    GlobalVariable *GV = dyn_cast<GlobalVariable>(Sel->getOperand(OpIdx));
250    if (GV != EHCatchAllValue) continue;
251    Sel->setOperand(OpIdx, EHCatchAllValue->getInitializer());
252    Changed = true;
253  }
254
255  return Changed;
256}
257
258/// FindSelectorAndURoR - Find the eh.selector call associated with the
259/// eh.exception call. And indicate if there is a URoR "invoke" associated with
260/// the eh.exception call. This recursively looks past instructions which don't
261/// change the EH pointer value, like casts or PHI nodes.
262bool
263DwarfEHPrepare::FindSelectorAndURoR(Instruction *Inst, bool &URoRInvoke,
264                                    SmallPtrSet<IntrinsicInst*, 8> &SelCalls) {
265  SmallPtrSet<PHINode*, 32> SeenPHIs;
266  bool Changed = false;
267
268 restart:
269  for (Value::use_iterator
270         I = Inst->use_begin(), E = Inst->use_end(); I != E; ++I) {
271    Instruction *II = dyn_cast<Instruction>(I);
272    if (!II || II->getParent()->getParent() != F) continue;
273
274    if (IntrinsicInst *Sel = dyn_cast<IntrinsicInst>(II)) {
275      if (Sel->getIntrinsicID() == Intrinsic::eh_selector)
276        SelCalls.insert(Sel);
277    } else if (InvokeInst *Invoke = dyn_cast<InvokeInst>(II)) {
278      if (Invoke->getCalledFunction() == URoR)
279        URoRInvoke = true;
280    } else if (CastInst *CI = dyn_cast<CastInst>(II)) {
281      Changed |= FindSelectorAndURoR(CI, URoRInvoke, SelCalls);
282    } else if (StoreInst *SI = dyn_cast<StoreInst>(II)) {
283      if (!PromoteStoreInst(SI)) continue;
284      Changed = true;
285      SeenPHIs.clear();
286      goto restart;             // Uses may have changed, restart loop.
287    } else if (PHINode *PN = dyn_cast<PHINode>(II)) {
288      if (SeenPHIs.insert(PN))
289        // Don't process a PHI node more than once.
290        Changed |= FindSelectorAndURoR(PN, URoRInvoke, SelCalls);
291    }
292  }
293
294  return Changed;
295}
296
297/// HandleURoRInvokes - Handle invokes of "_Unwind_Resume_or_Rethrow" calls. The
298/// "unwind" part of these invokes jump to a landing pad within the current
299/// function. This is a candidate to merge the selector associated with the URoR
300/// invoke with the one from the URoR's landing pad.
301bool DwarfEHPrepare::HandleURoRInvokes() {
302  if (!DT) return CleanupSelectors(); // We require DominatorTree information.
303
304  if (!EHCatchAllValue) {
305    EHCatchAllValue =
306      F->getParent()->getNamedGlobal(".llvm.eh.catch.all.value");
307    if (!EHCatchAllValue) return false;
308  }
309
310  if (!SelectorIntrinsic) {
311    SelectorIntrinsic =
312      Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_selector);
313    if (!SelectorIntrinsic) return false;
314  }
315
316  if (!URoR) {
317    URoR = F->getParent()->getFunction("_Unwind_Resume_or_Rethrow");
318    if (!URoR) return CleanupSelectors();
319  }
320
321  SmallPtrSet<IntrinsicInst*, 32> Sels;
322  SmallPtrSet<InvokeInst*, 32> URoRInvokes;
323  FindAllCleanupSelectors(Sels);
324  FindAllURoRInvokes(URoRInvokes);
325
326  SmallPtrSet<IntrinsicInst*, 32> SelsToConvert;
327
328  for (SmallPtrSet<IntrinsicInst*, 32>::iterator
329         SI = Sels.begin(), SE = Sels.end(); SI != SE; ++SI) {
330    const BasicBlock *SelBB = (*SI)->getParent();
331    for (SmallPtrSet<InvokeInst*, 32>::iterator
332           UI = URoRInvokes.begin(), UE = URoRInvokes.end(); UI != UE; ++UI) {
333      const BasicBlock *URoRBB = (*UI)->getParent();
334      if (SelBB == URoRBB || DT->dominates(SelBB, URoRBB)) {
335        SelsToConvert.insert(*SI);
336        break;
337      }
338    }
339  }
340
341  bool Changed = false;
342
343  if (Sels.size() != SelsToConvert.size()) {
344    // If we haven't been able to convert all of the clean-up selectors, then
345    // loop through the slow way to see if they still need to be converted.
346    if (!ExceptionValueIntrinsic) {
347      ExceptionValueIntrinsic =
348        Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_exception);
349      if (!ExceptionValueIntrinsic) return CleanupSelectors();
350    }
351
352    for (Value::use_iterator
353           I = ExceptionValueIntrinsic->use_begin(),
354           E = ExceptionValueIntrinsic->use_end(); I != E; ++I) {
355      IntrinsicInst *EHPtr = dyn_cast<IntrinsicInst>(I);
356      if (!EHPtr || EHPtr->getParent()->getParent() != F) continue;
357
358      Changed |= PromoteEHPtrStore(EHPtr);
359
360      bool URoRInvoke = false;
361      SmallPtrSet<IntrinsicInst*, 8> SelCalls;
362      Changed |= FindSelectorAndURoR(EHPtr, URoRInvoke, SelCalls);
363
364      if (URoRInvoke) {
365        // This EH pointer is being used by an invoke of an URoR instruction and
366        // an eh.selector intrinsic call. If the eh.selector is a 'clean-up', we
367        // need to convert it to a 'catch-all'.
368        for (SmallPtrSet<IntrinsicInst*, 8>::iterator
369               SI = SelCalls.begin(), SE = SelCalls.end(); SI != SE; ++SI)
370          if (!HasCatchAllInSelector(*SI))
371              SelsToConvert.insert(*SI);
372      }
373    }
374  }
375
376  if (!SelsToConvert.empty()) {
377    // Convert all clean-up eh.selectors, which are associated with "invokes" of
378    // URoR calls, into catch-all eh.selectors.
379    Changed = true;
380
381    for (SmallPtrSet<IntrinsicInst*, 8>::iterator
382           SI = SelsToConvert.begin(), SE = SelsToConvert.end();
383         SI != SE; ++SI) {
384      IntrinsicInst *II = *SI;
385      SmallVector<Value*, 8> Args;
386
387      // Use the exception object pointer and the personality function
388      // from the original selector.
389      Args.push_back(II->getOperand(1)); // Exception object pointer.
390      Args.push_back(II->getOperand(2)); // Personality function.
391
392      unsigned I = 3;
393      unsigned E = II->getNumOperands() -
394        (isa<ConstantInt>(II->getOperand(II->getNumOperands() - 1)) ? 1 : 0);
395
396      // Add in any filter IDs.
397      for (; I < E; ++I)
398        Args.push_back(II->getOperand(I));
399
400      Args.push_back(EHCatchAllValue->getInitializer()); // Catch-all indicator.
401
402      CallInst *NewSelector =
403        CallInst::Create(SelectorIntrinsic, Args.begin(), Args.end(),
404                         "eh.sel.catch.all", II);
405
406      NewSelector->setTailCall(II->isTailCall());
407      NewSelector->setAttributes(II->getAttributes());
408      NewSelector->setCallingConv(II->getCallingConv());
409
410      II->replaceAllUsesWith(NewSelector);
411      II->eraseFromParent();
412    }
413  }
414
415  Changed |= CleanupSelectors();
416  return Changed;
417}
418
419/// NormalizeLandingPads - Normalize and discover landing pads, noting them
420/// in the LandingPads set.  A landing pad is normal if the only CFG edges
421/// that end at it are unwind edges from invoke instructions. If we inlined
422/// through an invoke we could have a normal branch from the previous
423/// unwind block through to the landing pad for the original invoke.
424/// Abnormal landing pads are fixed up by redirecting all unwind edges to
425/// a new basic block which falls through to the original.
426bool DwarfEHPrepare::NormalizeLandingPads() {
427  bool Changed = false;
428
429  const MCAsmInfo *MAI = TM->getMCAsmInfo();
430  bool usingSjLjEH = MAI->getExceptionHandlingType() == ExceptionHandling::SjLj;
431
432  for (Function::iterator I = F->begin(), E = F->end(); I != E; ++I) {
433    TerminatorInst *TI = I->getTerminator();
434    if (!isa<InvokeInst>(TI))
435      continue;
436    BasicBlock *LPad = TI->getSuccessor(1);
437    // Skip landing pads that have already been normalized.
438    if (LandingPads.count(LPad))
439      continue;
440
441    // Check that only invoke unwind edges end at the landing pad.
442    bool OnlyUnwoundTo = true;
443    bool SwitchOK = usingSjLjEH;
444    for (pred_iterator PI = pred_begin(LPad), PE = pred_end(LPad);
445         PI != PE; ++PI) {
446      TerminatorInst *PT = (*PI)->getTerminator();
447      // The SjLj dispatch block uses a switch instruction. This is effectively
448      // an unwind edge, so we can disregard it here. There will only ever
449      // be one dispatch, however, so if there are multiple switches, one
450      // of them truly is a normal edge, not an unwind edge.
451      if (SwitchOK && isa<SwitchInst>(PT)) {
452        SwitchOK = false;
453        continue;
454      }
455      if (!isa<InvokeInst>(PT) || LPad == PT->getSuccessor(0)) {
456        OnlyUnwoundTo = false;
457        break;
458      }
459    }
460
461    if (OnlyUnwoundTo) {
462      // Only unwind edges lead to the landing pad.  Remember the landing pad.
463      LandingPads.insert(LPad);
464      continue;
465    }
466
467    // At least one normal edge ends at the landing pad.  Redirect the unwind
468    // edges to a new basic block which falls through into this one.
469
470    // Create the new basic block.
471    BasicBlock *NewBB = BasicBlock::Create(F->getContext(),
472                                           LPad->getName() + "_unwind_edge");
473
474    // Insert it into the function right before the original landing pad.
475    LPad->getParent()->getBasicBlockList().insert(LPad, NewBB);
476
477    // Redirect unwind edges from the original landing pad to NewBB.
478    for (pred_iterator PI = pred_begin(LPad), PE = pred_end(LPad); PI != PE; ) {
479      TerminatorInst *PT = (*PI++)->getTerminator();
480      if (isa<InvokeInst>(PT) && PT->getSuccessor(1) == LPad)
481        // Unwind to the new block.
482        PT->setSuccessor(1, NewBB);
483    }
484
485    // If there are any PHI nodes in LPad, we need to update them so that they
486    // merge incoming values from NewBB instead.
487    for (BasicBlock::iterator II = LPad->begin(); isa<PHINode>(II); ++II) {
488      PHINode *PN = cast<PHINode>(II);
489      pred_iterator PB = pred_begin(NewBB), PE = pred_end(NewBB);
490
491      // Check to see if all of the values coming in via unwind edges are the
492      // same.  If so, we don't need to create a new PHI node.
493      Value *InVal = PN->getIncomingValueForBlock(*PB);
494      for (pred_iterator PI = PB; PI != PE; ++PI) {
495        if (PI != PB && InVal != PN->getIncomingValueForBlock(*PI)) {
496          InVal = 0;
497          break;
498        }
499      }
500
501      if (InVal == 0) {
502        // Different unwind edges have different values.  Create a new PHI node
503        // in NewBB.
504        PHINode *NewPN = PHINode::Create(PN->getType(), PN->getName()+".unwind",
505                                         NewBB);
506        // Add an entry for each unwind edge, using the value from the old PHI.
507        for (pred_iterator PI = PB; PI != PE; ++PI)
508          NewPN->addIncoming(PN->getIncomingValueForBlock(*PI), *PI);
509
510        // Now use this new PHI as the common incoming value for NewBB in PN.
511        InVal = NewPN;
512      }
513
514      // Revector exactly one entry in the PHI node to come from NewBB
515      // and delete all other entries that come from unwind edges.  If
516      // there are both normal and unwind edges from the same predecessor,
517      // this leaves an entry for the normal edge.
518      for (pred_iterator PI = PB; PI != PE; ++PI)
519        PN->removeIncomingValue(*PI);
520      PN->addIncoming(InVal, NewBB);
521    }
522
523    // Add a fallthrough from NewBB to the original landing pad.
524    BranchInst::Create(LPad, NewBB);
525
526    // Now update DominatorTree and DominanceFrontier analysis information.
527    if (DT)
528      DT->splitBlock(NewBB);
529    if (DF)
530      DF->splitBlock(NewBB);
531
532    // Remember the newly constructed landing pad.  The original landing pad
533    // LPad is no longer a landing pad now that all unwind edges have been
534    // revectored to NewBB.
535    LandingPads.insert(NewBB);
536    ++NumLandingPadsSplit;
537    Changed = true;
538  }
539
540  return Changed;
541}
542
543/// LowerUnwinds - Turn unwind instructions into calls to _Unwind_Resume,
544/// rethrowing any previously caught exception.  This will crash horribly
545/// at runtime if there is no such exception: using unwind to throw a new
546/// exception is currently not supported.
547bool DwarfEHPrepare::LowerUnwinds() {
548  SmallVector<TerminatorInst*, 16> UnwindInsts;
549
550  for (Function::iterator I = F->begin(), E = F->end(); I != E; ++I) {
551    TerminatorInst *TI = I->getTerminator();
552    if (isa<UnwindInst>(TI))
553      UnwindInsts.push_back(TI);
554  }
555
556  if (UnwindInsts.empty()) return false;
557
558  // Find the rewind function if we didn't already.
559  if (!RewindFunction) {
560    LLVMContext &Ctx = UnwindInsts[0]->getContext();
561    std::vector<const Type*>
562      Params(1, Type::getInt8PtrTy(Ctx));
563    FunctionType *FTy = FunctionType::get(Type::getVoidTy(Ctx),
564                                          Params, false);
565    const char *RewindName = TLI->getLibcallName(RTLIB::UNWIND_RESUME);
566    RewindFunction = F->getParent()->getOrInsertFunction(RewindName, FTy);
567  }
568
569  bool Changed = false;
570
571  for (SmallVectorImpl<TerminatorInst*>::iterator
572         I = UnwindInsts.begin(), E = UnwindInsts.end(); I != E; ++I) {
573    TerminatorInst *TI = *I;
574
575    // Replace the unwind instruction with a call to _Unwind_Resume (or the
576    // appropriate target equivalent) followed by an UnreachableInst.
577
578    // Create the call...
579    CallInst *CI = CallInst::Create(RewindFunction,
580                                    CreateReadOfExceptionValue(TI->getParent()),
581                                    "", TI);
582    CI->setCallingConv(TLI->getLibcallCallingConv(RTLIB::UNWIND_RESUME));
583    // ...followed by an UnreachableInst.
584    new UnreachableInst(TI->getContext(), TI);
585
586    // Nuke the unwind instruction.
587    TI->eraseFromParent();
588    ++NumUnwindsLowered;
589    Changed = true;
590  }
591
592  return Changed;
593}
594
595/// MoveExceptionValueCalls - Ensure that eh.exception is only ever called from
596/// landing pads by replacing calls outside of landing pads with loads from a
597/// stack temporary.  Move eh.exception calls inside landing pads to the start
598/// of the landing pad (optional, but may make things simpler for later passes).
599bool DwarfEHPrepare::MoveExceptionValueCalls() {
600  // If the eh.exception intrinsic is not declared in the module then there is
601  // nothing to do.  Speed up compilation by checking for this common case.
602  if (!ExceptionValueIntrinsic &&
603      !F->getParent()->getFunction(Intrinsic::getName(Intrinsic::eh_exception)))
604    return false;
605
606  bool Changed = false;
607
608  for (Function::iterator BB = F->begin(), E = F->end(); BB != E; ++BB) {
609    for (BasicBlock::iterator II = BB->begin(), E = BB->end(); II != E;)
610      if (IntrinsicInst *CI = dyn_cast<IntrinsicInst>(II++))
611        if (CI->getIntrinsicID() == Intrinsic::eh_exception) {
612          if (!CI->use_empty()) {
613            Value *ExceptionValue = CreateReadOfExceptionValue(BB);
614            if (CI == ExceptionValue) {
615              // The call was at the start of a landing pad - leave it alone.
616              assert(LandingPads.count(BB) &&
617                     "Created eh.exception call outside landing pad!");
618              continue;
619            }
620            CI->replaceAllUsesWith(ExceptionValue);
621          }
622          CI->eraseFromParent();
623          ++NumExceptionValuesMoved;
624          Changed = true;
625        }
626  }
627
628  return Changed;
629}
630
631/// FinishStackTemporaries - If we introduced a stack variable to hold the
632/// exception value then initialize it in each landing pad.
633bool DwarfEHPrepare::FinishStackTemporaries() {
634  if (!ExceptionValueVar)
635    // Nothing to do.
636    return false;
637
638  bool Changed = false;
639
640  // Make sure that there is a store of the exception value at the start of
641  // each landing pad.
642  for (BBSet::iterator LI = LandingPads.begin(), LE = LandingPads.end();
643       LI != LE; ++LI) {
644    Instruction *ExceptionValue = CreateReadOfExceptionValue(*LI);
645    Instruction *Store = new StoreInst(ExceptionValue, ExceptionValueVar);
646    Store->insertAfter(ExceptionValue);
647    Changed = true;
648  }
649
650  return Changed;
651}
652
653/// PromoteStackTemporaries - Turn any stack temporaries we introduced into
654/// registers if possible.
655bool DwarfEHPrepare::PromoteStackTemporaries() {
656  if (ExceptionValueVar && DT && DF && isAllocaPromotable(ExceptionValueVar)) {
657    // Turn the exception temporary into registers and phi nodes if possible.
658    std::vector<AllocaInst*> Allocas(1, ExceptionValueVar);
659    PromoteMemToReg(Allocas, *DT, *DF);
660    return true;
661  }
662  return false;
663}
664
665/// CreateExceptionValueCall - Insert a call to the eh.exception intrinsic at
666/// the start of the basic block (unless there already is one, in which case
667/// the existing call is returned).
668Instruction *DwarfEHPrepare::CreateExceptionValueCall(BasicBlock *BB) {
669  Instruction *Start = BB->getFirstNonPHIOrDbg();
670  // Is this a call to eh.exception?
671  if (IntrinsicInst *CI = dyn_cast<IntrinsicInst>(Start))
672    if (CI->getIntrinsicID() == Intrinsic::eh_exception)
673      // Reuse the existing call.
674      return Start;
675
676  // Find the eh.exception intrinsic if we didn't already.
677  if (!ExceptionValueIntrinsic)
678    ExceptionValueIntrinsic = Intrinsic::getDeclaration(F->getParent(),
679                                                       Intrinsic::eh_exception);
680
681  // Create the call.
682  return CallInst::Create(ExceptionValueIntrinsic, "eh.value.call", Start);
683}
684
685/// CreateValueLoad - Insert a load of the exception value stack variable
686/// (creating it if necessary) at the start of the basic block (unless
687/// there already is a load, in which case the existing load is returned).
688Instruction *DwarfEHPrepare::CreateValueLoad(BasicBlock *BB) {
689  Instruction *Start = BB->getFirstNonPHIOrDbg();
690  // Is this a load of the exception temporary?
691  if (ExceptionValueVar)
692    if (LoadInst* LI = dyn_cast<LoadInst>(Start))
693      if (LI->getPointerOperand() == ExceptionValueVar)
694        // Reuse the existing load.
695        return Start;
696
697  // Create the temporary if we didn't already.
698  if (!ExceptionValueVar) {
699    ExceptionValueVar = new AllocaInst(PointerType::getUnqual(
700           Type::getInt8Ty(BB->getContext())), "eh.value", F->begin()->begin());
701    ++NumStackTempsIntroduced;
702  }
703
704  // Load the value.
705  return new LoadInst(ExceptionValueVar, "eh.value.load", Start);
706}
707
708bool DwarfEHPrepare::runOnFunction(Function &Fn) {
709  bool Changed = false;
710
711  // Initialize internal state.
712  DT = getAnalysisIfAvailable<DominatorTree>();
713  DF = getAnalysisIfAvailable<DominanceFrontier>();
714  ExceptionValueVar = 0;
715  F = &Fn;
716
717  // Ensure that only unwind edges end at landing pads (a landing pad is a
718  // basic block where an invoke unwind edge ends).
719  Changed |= NormalizeLandingPads();
720
721  // Turn unwind instructions into libcalls.
722  Changed |= LowerUnwinds();
723
724  // TODO: Move eh.selector calls to landing pads and combine them.
725
726  // Move eh.exception calls to landing pads.
727  Changed |= MoveExceptionValueCalls();
728
729  // Initialize any stack temporaries we introduced.
730  Changed |= FinishStackTemporaries();
731
732  // Turn any stack temporaries into registers if possible.
733  if (!CompileFast)
734    Changed |= PromoteStackTemporaries();
735
736  Changed |= HandleURoRInvokes();
737
738  LandingPads.clear();
739
740  return Changed;
741}
742