DwarfEHPrepare.cpp revision adc581f5cb6bdb929b1c6a155c330151ebd3bf72
1//===-- DwarfEHPrepare - Prepare exception handling for code generation ---===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This pass mulches exception handling code into a form adapted to code
11// generation. Required if using dwarf exception handling.
12//
13//===----------------------------------------------------------------------===//
14
15#define DEBUG_TYPE "dwarfehprepare"
16#include "llvm/Function.h"
17#include "llvm/Instructions.h"
18#include "llvm/IntrinsicInst.h"
19#include "llvm/Module.h"
20#include "llvm/Pass.h"
21#include "llvm/ADT/Statistic.h"
22#include "llvm/Analysis/Dominators.h"
23#include "llvm/CodeGen/Passes.h"
24#include "llvm/MC/MCAsmInfo.h"
25#include "llvm/Support/CallSite.h"
26#include "llvm/Target/TargetLowering.h"
27#include "llvm/Transforms/Utils/BasicBlockUtils.h"
28using namespace llvm;
29
30STATISTIC(NumLandingPadsSplit,     "Number of landing pads split");
31STATISTIC(NumUnwindsLowered,       "Number of unwind instructions lowered");
32STATISTIC(NumExceptionValuesMoved, "Number of eh.exception calls moved");
33STATISTIC(NumStackTempsIntroduced, "Number of stack temporaries introduced");
34
35namespace {
36  class DwarfEHPrepare : public FunctionPass {
37    const TargetMachine *TM;
38    const TargetLowering *TLI;
39
40    // The eh.exception intrinsic.
41    Function *ExceptionValueIntrinsic;
42
43    // The eh.selector intrinsic.
44    Function *SelectorIntrinsic;
45
46    // _Unwind_Resume_or_Rethrow call.
47    Constant *URoR;
48
49    // The EH language-specific catch-all type.
50    GlobalVariable *EHCatchAllValue;
51
52    // _Unwind_Resume or the target equivalent.
53    Constant *RewindFunction;
54
55    // We both use and preserve dominator info.
56    DominatorTree *DT;
57
58    // The function we are running on.
59    Function *F;
60
61    // The landing pads for this function.
62    typedef SmallPtrSet<BasicBlock*, 8> BBSet;
63    BBSet LandingPads;
64
65    // Stack temporary used to hold eh.exception values.
66    AllocaInst *ExceptionValueVar;
67
68    bool NormalizeLandingPads();
69    bool LowerUnwinds();
70    bool MoveExceptionValueCalls();
71    bool FinishStackTemporaries();
72
73    Instruction *CreateExceptionValueCall(BasicBlock *BB);
74    Instruction *CreateValueLoad(BasicBlock *BB);
75
76    /// CreateReadOfExceptionValue - Return the result of the eh.exception
77    /// intrinsic by calling the intrinsic if in a landing pad, or loading it
78    /// from the exception value variable otherwise.
79    Instruction *CreateReadOfExceptionValue(BasicBlock *BB) {
80      return LandingPads.count(BB) ?
81        CreateExceptionValueCall(BB) : CreateValueLoad(BB);
82    }
83
84    /// CleanupSelectors - Any remaining eh.selector intrinsic calls which still
85    /// use the "llvm.eh.catch.all.value" call need to convert to using its
86    /// initializer instead.
87    bool CleanupSelectors(SmallPtrSet<IntrinsicInst*, 32> &Sels);
88
89    bool HasCatchAllInSelector(IntrinsicInst *);
90
91    /// FindAllCleanupSelectors - Find all eh.selector calls that are clean-ups.
92    void FindAllCleanupSelectors(SmallPtrSet<IntrinsicInst*, 32> &Sels,
93                                 SmallPtrSet<IntrinsicInst*, 32> &CatchAllSels);
94
95    /// FindAllURoRInvokes - Find all URoR invokes in the function.
96    void FindAllURoRInvokes(SmallPtrSet<InvokeInst*, 32> &URoRInvokes);
97
98    /// HandleURoRInvokes - Handle invokes of "_Unwind_Resume_or_Rethrow"
99    /// calls. The "unwind" part of these invokes jump to a landing pad within
100    /// the current function. This is a candidate to merge the selector
101    /// associated with the URoR invoke with the one from the URoR's landing
102    /// pad.
103    bool HandleURoRInvokes();
104
105    /// FindSelectorAndURoR - Find the eh.selector call and URoR call associated
106    /// with the eh.exception call. This recursively looks past instructions
107    /// which don't change the EH pointer value, like casts or PHI nodes.
108    bool FindSelectorAndURoR(Instruction *Inst, bool &URoRInvoke,
109                             SmallPtrSet<IntrinsicInst*, 8> &SelCalls);
110
111  public:
112    static char ID; // Pass identification, replacement for typeid.
113    DwarfEHPrepare(const TargetMachine *tm) :
114      FunctionPass(ID), TM(tm), TLI(TM->getTargetLowering()),
115      ExceptionValueIntrinsic(0), SelectorIntrinsic(0),
116      URoR(0), EHCatchAllValue(0), RewindFunction(0) {}
117
118    virtual bool runOnFunction(Function &Fn);
119
120    // getAnalysisUsage - We need dominance frontiers for memory promotion.
121    virtual void getAnalysisUsage(AnalysisUsage &AU) const {
122      AU.addRequired<DominatorTree>();
123      AU.addPreserved<DominatorTree>();
124    }
125
126    const char *getPassName() const {
127      return "Exception handling preparation";
128    }
129
130  };
131} // end anonymous namespace
132
133char DwarfEHPrepare::ID = 0;
134
135FunctionPass *llvm::createDwarfEHPass(const TargetMachine *tm) {
136  return new DwarfEHPrepare(tm);
137}
138
139/// HasCatchAllInSelector - Return true if the intrinsic instruction has a
140/// catch-all.
141bool DwarfEHPrepare::HasCatchAllInSelector(IntrinsicInst *II) {
142  if (!EHCatchAllValue) return false;
143
144  unsigned ArgIdx = II->getNumArgOperands() - 1;
145  GlobalVariable *GV = dyn_cast<GlobalVariable>(II->getArgOperand(ArgIdx));
146  return GV == EHCatchAllValue;
147}
148
149/// FindAllCleanupSelectors - Find all eh.selector calls that are clean-ups.
150void DwarfEHPrepare::
151FindAllCleanupSelectors(SmallPtrSet<IntrinsicInst*, 32> &Sels,
152                        SmallPtrSet<IntrinsicInst*, 32> &CatchAllSels) {
153  for (Value::use_iterator
154         I = SelectorIntrinsic->use_begin(),
155         E = SelectorIntrinsic->use_end(); I != E; ++I) {
156    IntrinsicInst *II = cast<IntrinsicInst>(*I);
157
158    if (II->getParent()->getParent() != F)
159      continue;
160
161    if (!HasCatchAllInSelector(II))
162      Sels.insert(II);
163    else
164      CatchAllSels.insert(II);
165  }
166}
167
168/// FindAllURoRInvokes - Find all URoR invokes in the function.
169void DwarfEHPrepare::
170FindAllURoRInvokes(SmallPtrSet<InvokeInst*, 32> &URoRInvokes) {
171  for (Value::use_iterator
172         I = URoR->use_begin(),
173         E = URoR->use_end(); I != E; ++I) {
174    if (InvokeInst *II = dyn_cast<InvokeInst>(*I))
175      URoRInvokes.insert(II);
176  }
177}
178
179/// CleanupSelectors - Any remaining eh.selector intrinsic calls which still use
180/// the "llvm.eh.catch.all.value" call need to convert to using its
181/// initializer instead.
182bool DwarfEHPrepare::CleanupSelectors(SmallPtrSet<IntrinsicInst*, 32> &Sels) {
183  if (!EHCatchAllValue) return false;
184
185  if (!SelectorIntrinsic) {
186    SelectorIntrinsic =
187      Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_selector);
188    if (!SelectorIntrinsic) return false;
189  }
190
191  bool Changed = false;
192  for (SmallPtrSet<IntrinsicInst*, 32>::iterator
193         I = Sels.begin(), E = Sels.end(); I != E; ++I) {
194    IntrinsicInst *Sel = *I;
195
196    // Index of the "llvm.eh.catch.all.value" variable.
197    unsigned OpIdx = Sel->getNumArgOperands() - 1;
198    GlobalVariable *GV = dyn_cast<GlobalVariable>(Sel->getArgOperand(OpIdx));
199    if (GV != EHCatchAllValue) continue;
200    Sel->setArgOperand(OpIdx, EHCatchAllValue->getInitializer());
201    Changed = true;
202  }
203
204  return Changed;
205}
206
207/// FindSelectorAndURoR - Find the eh.selector call associated with the
208/// eh.exception call. And indicate if there is a URoR "invoke" associated with
209/// the eh.exception call. This recursively looks past instructions which don't
210/// change the EH pointer value, like casts or PHI nodes.
211bool
212DwarfEHPrepare::FindSelectorAndURoR(Instruction *Inst, bool &URoRInvoke,
213                                    SmallPtrSet<IntrinsicInst*, 8> &SelCalls) {
214  SmallPtrSet<PHINode*, 32> SeenPHIs;
215  bool Changed = false;
216
217  for (Value::use_iterator
218         I = Inst->use_begin(), E = Inst->use_end(); I != E; ++I) {
219    Instruction *II = dyn_cast<Instruction>(*I);
220    if (!II || II->getParent()->getParent() != F) continue;
221
222    if (IntrinsicInst *Sel = dyn_cast<IntrinsicInst>(II)) {
223      if (Sel->getIntrinsicID() == Intrinsic::eh_selector)
224        SelCalls.insert(Sel);
225    } else if (InvokeInst *Invoke = dyn_cast<InvokeInst>(II)) {
226      if (Invoke->getCalledFunction() == URoR)
227        URoRInvoke = true;
228    } else if (CastInst *CI = dyn_cast<CastInst>(II)) {
229      Changed |= FindSelectorAndURoR(CI, URoRInvoke, SelCalls);
230    } else if (PHINode *PN = dyn_cast<PHINode>(II)) {
231      if (SeenPHIs.insert(PN))
232        // Don't process a PHI node more than once.
233        Changed |= FindSelectorAndURoR(PN, URoRInvoke, SelCalls);
234    }
235  }
236
237  return Changed;
238}
239
240/// HandleURoRInvokes - Handle invokes of "_Unwind_Resume_or_Rethrow" calls. The
241/// "unwind" part of these invokes jump to a landing pad within the current
242/// function. This is a candidate to merge the selector associated with the URoR
243/// invoke with the one from the URoR's landing pad.
244bool DwarfEHPrepare::HandleURoRInvokes() {
245  if (!EHCatchAllValue) {
246    EHCatchAllValue =
247      F->getParent()->getNamedGlobal("llvm.eh.catch.all.value");
248    if (!EHCatchAllValue) return false;
249  }
250
251  if (!SelectorIntrinsic) {
252    SelectorIntrinsic =
253      Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_selector);
254    if (!SelectorIntrinsic) return false;
255  }
256
257  SmallPtrSet<IntrinsicInst*, 32> Sels;
258  SmallPtrSet<IntrinsicInst*, 32> CatchAllSels;
259  FindAllCleanupSelectors(Sels, CatchAllSels);
260
261  if (!URoR) {
262    URoR = F->getParent()->getFunction("_Unwind_Resume_or_Rethrow");
263    if (!URoR) return CleanupSelectors(CatchAllSels);
264  }
265
266  SmallPtrSet<InvokeInst*, 32> URoRInvokes;
267  FindAllURoRInvokes(URoRInvokes);
268
269  SmallPtrSet<IntrinsicInst*, 32> SelsToConvert;
270
271  for (SmallPtrSet<IntrinsicInst*, 32>::iterator
272         SI = Sels.begin(), SE = Sels.end(); SI != SE; ++SI) {
273    const BasicBlock *SelBB = (*SI)->getParent();
274    for (SmallPtrSet<InvokeInst*, 32>::iterator
275           UI = URoRInvokes.begin(), UE = URoRInvokes.end(); UI != UE; ++UI) {
276      const BasicBlock *URoRBB = (*UI)->getParent();
277      if (DT->dominates(SelBB, URoRBB)) {
278        SelsToConvert.insert(*SI);
279        break;
280      }
281    }
282  }
283
284  bool Changed = false;
285
286  if (Sels.size() != SelsToConvert.size()) {
287    // If we haven't been able to convert all of the clean-up selectors, then
288    // loop through the slow way to see if they still need to be converted.
289    if (!ExceptionValueIntrinsic) {
290      ExceptionValueIntrinsic =
291        Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_exception);
292      if (!ExceptionValueIntrinsic)
293        return CleanupSelectors(CatchAllSels);
294    }
295
296    for (Value::use_iterator
297           I = ExceptionValueIntrinsic->use_begin(),
298           E = ExceptionValueIntrinsic->use_end(); I != E; ++I) {
299      IntrinsicInst *EHPtr = dyn_cast<IntrinsicInst>(*I);
300      if (!EHPtr || EHPtr->getParent()->getParent() != F) continue;
301
302      bool URoRInvoke = false;
303      SmallPtrSet<IntrinsicInst*, 8> SelCalls;
304      Changed |= FindSelectorAndURoR(EHPtr, URoRInvoke, SelCalls);
305
306      if (URoRInvoke) {
307        // This EH pointer is being used by an invoke of an URoR instruction and
308        // an eh.selector intrinsic call. If the eh.selector is a 'clean-up', we
309        // need to convert it to a 'catch-all'.
310        for (SmallPtrSet<IntrinsicInst*, 8>::iterator
311               SI = SelCalls.begin(), SE = SelCalls.end(); SI != SE; ++SI)
312          if (!HasCatchAllInSelector(*SI))
313              SelsToConvert.insert(*SI);
314      }
315    }
316  }
317
318  if (!SelsToConvert.empty()) {
319    // Convert all clean-up eh.selectors, which are associated with "invokes" of
320    // URoR calls, into catch-all eh.selectors.
321    Changed = true;
322
323    for (SmallPtrSet<IntrinsicInst*, 8>::iterator
324           SI = SelsToConvert.begin(), SE = SelsToConvert.end();
325         SI != SE; ++SI) {
326      IntrinsicInst *II = *SI;
327
328      // Use the exception object pointer and the personality function
329      // from the original selector.
330      CallSite CS(II);
331      IntrinsicInst::op_iterator I = CS.arg_begin();
332      IntrinsicInst::op_iterator E = CS.arg_end();
333      IntrinsicInst::op_iterator B = prior(E);
334
335      // Exclude last argument if it is an integer.
336      if (isa<ConstantInt>(B)) E = B;
337
338      // Add exception object pointer (front).
339      // Add personality function (next).
340      // Add in any filter IDs (rest).
341      SmallVector<Value*, 8> Args(I, E);
342
343      Args.push_back(EHCatchAllValue->getInitializer()); // Catch-all indicator.
344
345      CallInst *NewSelector =
346        CallInst::Create(SelectorIntrinsic, Args.begin(), Args.end(),
347                         "eh.sel.catch.all", II);
348
349      NewSelector->setTailCall(II->isTailCall());
350      NewSelector->setAttributes(II->getAttributes());
351      NewSelector->setCallingConv(II->getCallingConv());
352
353      II->replaceAllUsesWith(NewSelector);
354      II->eraseFromParent();
355    }
356  }
357
358  Changed |= CleanupSelectors(CatchAllSels);
359  return Changed;
360}
361
362/// NormalizeLandingPads - Normalize and discover landing pads, noting them
363/// in the LandingPads set.  A landing pad is normal if the only CFG edges
364/// that end at it are unwind edges from invoke instructions. If we inlined
365/// through an invoke we could have a normal branch from the previous
366/// unwind block through to the landing pad for the original invoke.
367/// Abnormal landing pads are fixed up by redirecting all unwind edges to
368/// a new basic block which falls through to the original.
369bool DwarfEHPrepare::NormalizeLandingPads() {
370  bool Changed = false;
371
372  const MCAsmInfo *MAI = TM->getMCAsmInfo();
373  bool usingSjLjEH = MAI->getExceptionHandlingType() == ExceptionHandling::SjLj;
374
375  for (Function::iterator I = F->begin(), E = F->end(); I != E; ++I) {
376    TerminatorInst *TI = I->getTerminator();
377    if (!isa<InvokeInst>(TI))
378      continue;
379    BasicBlock *LPad = TI->getSuccessor(1);
380    // Skip landing pads that have already been normalized.
381    if (LandingPads.count(LPad))
382      continue;
383
384    // Check that only invoke unwind edges end at the landing pad.
385    bool OnlyUnwoundTo = true;
386    bool SwitchOK = usingSjLjEH;
387    for (pred_iterator PI = pred_begin(LPad), PE = pred_end(LPad);
388         PI != PE; ++PI) {
389      TerminatorInst *PT = (*PI)->getTerminator();
390      // The SjLj dispatch block uses a switch instruction. This is effectively
391      // an unwind edge, so we can disregard it here. There will only ever
392      // be one dispatch, however, so if there are multiple switches, one
393      // of them truly is a normal edge, not an unwind edge.
394      if (SwitchOK && isa<SwitchInst>(PT)) {
395        SwitchOK = false;
396        continue;
397      }
398      if (!isa<InvokeInst>(PT) || LPad == PT->getSuccessor(0)) {
399        OnlyUnwoundTo = false;
400        break;
401      }
402    }
403
404    if (OnlyUnwoundTo) {
405      // Only unwind edges lead to the landing pad.  Remember the landing pad.
406      LandingPads.insert(LPad);
407      continue;
408    }
409
410    // At least one normal edge ends at the landing pad.  Redirect the unwind
411    // edges to a new basic block which falls through into this one.
412
413    // Create the new basic block.
414    BasicBlock *NewBB = BasicBlock::Create(F->getContext(),
415                                           LPad->getName() + "_unwind_edge");
416
417    // Insert it into the function right before the original landing pad.
418    LPad->getParent()->getBasicBlockList().insert(LPad, NewBB);
419
420    // Redirect unwind edges from the original landing pad to NewBB.
421    for (pred_iterator PI = pred_begin(LPad), PE = pred_end(LPad); PI != PE; ) {
422      TerminatorInst *PT = (*PI++)->getTerminator();
423      if (isa<InvokeInst>(PT) && PT->getSuccessor(1) == LPad)
424        // Unwind to the new block.
425        PT->setSuccessor(1, NewBB);
426    }
427
428    // If there are any PHI nodes in LPad, we need to update them so that they
429    // merge incoming values from NewBB instead.
430    for (BasicBlock::iterator II = LPad->begin(); isa<PHINode>(II); ++II) {
431      PHINode *PN = cast<PHINode>(II);
432      pred_iterator PB = pred_begin(NewBB), PE = pred_end(NewBB);
433
434      // Check to see if all of the values coming in via unwind edges are the
435      // same.  If so, we don't need to create a new PHI node.
436      Value *InVal = PN->getIncomingValueForBlock(*PB);
437      for (pred_iterator PI = PB; PI != PE; ++PI) {
438        if (PI != PB && InVal != PN->getIncomingValueForBlock(*PI)) {
439          InVal = 0;
440          break;
441        }
442      }
443
444      if (InVal == 0) {
445        // Different unwind edges have different values.  Create a new PHI node
446        // in NewBB.
447        PHINode *NewPN = PHINode::Create(PN->getType(), PN->getName()+".unwind",
448                                         NewBB);
449        // Add an entry for each unwind edge, using the value from the old PHI.
450        for (pred_iterator PI = PB; PI != PE; ++PI)
451          NewPN->addIncoming(PN->getIncomingValueForBlock(*PI), *PI);
452
453        // Now use this new PHI as the common incoming value for NewBB in PN.
454        InVal = NewPN;
455      }
456
457      // Revector exactly one entry in the PHI node to come from NewBB
458      // and delete all other entries that come from unwind edges.  If
459      // there are both normal and unwind edges from the same predecessor,
460      // this leaves an entry for the normal edge.
461      for (pred_iterator PI = PB; PI != PE; ++PI)
462        PN->removeIncomingValue(*PI);
463      PN->addIncoming(InVal, NewBB);
464    }
465
466    // Add a fallthrough from NewBB to the original landing pad.
467    BranchInst::Create(LPad, NewBB);
468
469    // Now update DominatorTree analysis information.
470    DT->splitBlock(NewBB);
471
472    // Remember the newly constructed landing pad.  The original landing pad
473    // LPad is no longer a landing pad now that all unwind edges have been
474    // revectored to NewBB.
475    LandingPads.insert(NewBB);
476    ++NumLandingPadsSplit;
477    Changed = true;
478  }
479
480  return Changed;
481}
482
483/// LowerUnwinds - Turn unwind instructions into calls to _Unwind_Resume,
484/// rethrowing any previously caught exception.  This will crash horribly
485/// at runtime if there is no such exception: using unwind to throw a new
486/// exception is currently not supported.
487bool DwarfEHPrepare::LowerUnwinds() {
488  SmallVector<TerminatorInst*, 16> UnwindInsts;
489
490  for (Function::iterator I = F->begin(), E = F->end(); I != E; ++I) {
491    TerminatorInst *TI = I->getTerminator();
492    if (isa<UnwindInst>(TI))
493      UnwindInsts.push_back(TI);
494  }
495
496  if (UnwindInsts.empty()) return false;
497
498  // Find the rewind function if we didn't already.
499  if (!RewindFunction) {
500    LLVMContext &Ctx = UnwindInsts[0]->getContext();
501    std::vector<const Type*>
502      Params(1, Type::getInt8PtrTy(Ctx));
503    FunctionType *FTy = FunctionType::get(Type::getVoidTy(Ctx),
504                                          Params, false);
505    const char *RewindName = TLI->getLibcallName(RTLIB::UNWIND_RESUME);
506    RewindFunction = F->getParent()->getOrInsertFunction(RewindName, FTy);
507  }
508
509  bool Changed = false;
510
511  for (SmallVectorImpl<TerminatorInst*>::iterator
512         I = UnwindInsts.begin(), E = UnwindInsts.end(); I != E; ++I) {
513    TerminatorInst *TI = *I;
514
515    // Replace the unwind instruction with a call to _Unwind_Resume (or the
516    // appropriate target equivalent) followed by an UnreachableInst.
517
518    // Create the call...
519    CallInst *CI = CallInst::Create(RewindFunction,
520                                    CreateReadOfExceptionValue(TI->getParent()),
521                                    "", TI);
522    CI->setCallingConv(TLI->getLibcallCallingConv(RTLIB::UNWIND_RESUME));
523    // ...followed by an UnreachableInst.
524    new UnreachableInst(TI->getContext(), TI);
525
526    // Nuke the unwind instruction.
527    TI->eraseFromParent();
528    ++NumUnwindsLowered;
529    Changed = true;
530  }
531
532  return Changed;
533}
534
535/// MoveExceptionValueCalls - Ensure that eh.exception is only ever called from
536/// landing pads by replacing calls outside of landing pads with loads from a
537/// stack temporary.  Move eh.exception calls inside landing pads to the start
538/// of the landing pad (optional, but may make things simpler for later passes).
539bool DwarfEHPrepare::MoveExceptionValueCalls() {
540  // If the eh.exception intrinsic is not declared in the module then there is
541  // nothing to do.  Speed up compilation by checking for this common case.
542  if (!ExceptionValueIntrinsic &&
543      !F->getParent()->getFunction(Intrinsic::getName(Intrinsic::eh_exception)))
544    return false;
545
546  bool Changed = false;
547
548  for (Function::iterator BB = F->begin(), E = F->end(); BB != E; ++BB) {
549    for (BasicBlock::iterator II = BB->begin(), E = BB->end(); II != E;)
550      if (IntrinsicInst *CI = dyn_cast<IntrinsicInst>(II++))
551        if (CI->getIntrinsicID() == Intrinsic::eh_exception) {
552          if (!CI->use_empty()) {
553            Value *ExceptionValue = CreateReadOfExceptionValue(BB);
554            if (CI == ExceptionValue) {
555              // The call was at the start of a landing pad - leave it alone.
556              assert(LandingPads.count(BB) &&
557                     "Created eh.exception call outside landing pad!");
558              continue;
559            }
560            CI->replaceAllUsesWith(ExceptionValue);
561          }
562          CI->eraseFromParent();
563          ++NumExceptionValuesMoved;
564          Changed = true;
565        }
566  }
567
568  return Changed;
569}
570
571/// FinishStackTemporaries - If we introduced a stack variable to hold the
572/// exception value then initialize it in each landing pad.
573bool DwarfEHPrepare::FinishStackTemporaries() {
574  if (!ExceptionValueVar)
575    // Nothing to do.
576    return false;
577
578  bool Changed = false;
579
580  // Make sure that there is a store of the exception value at the start of
581  // each landing pad.
582  for (BBSet::iterator LI = LandingPads.begin(), LE = LandingPads.end();
583       LI != LE; ++LI) {
584    Instruction *ExceptionValue = CreateReadOfExceptionValue(*LI);
585    Instruction *Store = new StoreInst(ExceptionValue, ExceptionValueVar);
586    Store->insertAfter(ExceptionValue);
587    Changed = true;
588  }
589
590  return Changed;
591}
592
593/// CreateExceptionValueCall - Insert a call to the eh.exception intrinsic at
594/// the start of the basic block (unless there already is one, in which case
595/// the existing call is returned).
596Instruction *DwarfEHPrepare::CreateExceptionValueCall(BasicBlock *BB) {
597  Instruction *Start = BB->getFirstNonPHIOrDbg();
598  // Is this a call to eh.exception?
599  if (IntrinsicInst *CI = dyn_cast<IntrinsicInst>(Start))
600    if (CI->getIntrinsicID() == Intrinsic::eh_exception)
601      // Reuse the existing call.
602      return Start;
603
604  // Find the eh.exception intrinsic if we didn't already.
605  if (!ExceptionValueIntrinsic)
606    ExceptionValueIntrinsic = Intrinsic::getDeclaration(F->getParent(),
607                                                       Intrinsic::eh_exception);
608
609  // Create the call.
610  return CallInst::Create(ExceptionValueIntrinsic, "eh.value.call", Start);
611}
612
613/// CreateValueLoad - Insert a load of the exception value stack variable
614/// (creating it if necessary) at the start of the basic block (unless
615/// there already is a load, in which case the existing load is returned).
616Instruction *DwarfEHPrepare::CreateValueLoad(BasicBlock *BB) {
617  Instruction *Start = BB->getFirstNonPHIOrDbg();
618  // Is this a load of the exception temporary?
619  if (ExceptionValueVar)
620    if (LoadInst* LI = dyn_cast<LoadInst>(Start))
621      if (LI->getPointerOperand() == ExceptionValueVar)
622        // Reuse the existing load.
623        return Start;
624
625  // Create the temporary if we didn't already.
626  if (!ExceptionValueVar) {
627    ExceptionValueVar = new AllocaInst(PointerType::getUnqual(
628           Type::getInt8Ty(BB->getContext())), "eh.value", F->begin()->begin());
629    ++NumStackTempsIntroduced;
630  }
631
632  // Load the value.
633  return new LoadInst(ExceptionValueVar, "eh.value.load", Start);
634}
635
636bool DwarfEHPrepare::runOnFunction(Function &Fn) {
637  bool Changed = false;
638
639  // Initialize internal state.
640  DT = &getAnalysis<DominatorTree>();
641  ExceptionValueVar = 0;
642  F = &Fn;
643
644  // Ensure that only unwind edges end at landing pads (a landing pad is a
645  // basic block where an invoke unwind edge ends).
646  Changed |= NormalizeLandingPads();
647
648  // Turn unwind instructions into libcalls.
649  Changed |= LowerUnwinds();
650
651  // TODO: Move eh.selector calls to landing pads and combine them.
652
653  // Move eh.exception calls to landing pads.
654  Changed |= MoveExceptionValueCalls();
655
656  // Initialize any stack temporaries we introduced.
657  Changed |= FinishStackTemporaries();
658
659  // TODO: Turn any stack temporaries into registers if possible.
660
661  Changed |= HandleURoRInvokes();
662
663  LandingPads.clear();
664
665  return Changed;
666}
667