DwarfEHPrepare.cpp revision 19b59f45bddaaabef125d3deec372552a8f0df39
1//===-- DwarfEHPrepare - Prepare exception handling for code generation ---===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This pass mulches exception handling code into a form adapted to code
11// generation. Required if using dwarf exception handling.
12//
13//===----------------------------------------------------------------------===//
14
15#define DEBUG_TYPE "dwarfehprepare"
16#include "llvm/Function.h"
17#include "llvm/Instructions.h"
18#include "llvm/IntrinsicInst.h"
19#include "llvm/Module.h"
20#include "llvm/Pass.h"
21#include "llvm/ADT/Statistic.h"
22#include "llvm/Analysis/Dominators.h"
23#include "llvm/CodeGen/Passes.h"
24#include "llvm/MC/MCAsmInfo.h"
25#include "llvm/Target/TargetLowering.h"
26#include "llvm/Transforms/Utils/BasicBlockUtils.h"
27#include "llvm/Transforms/Utils/PromoteMemToReg.h"
28using namespace llvm;
29
30STATISTIC(NumLandingPadsSplit,     "Number of landing pads split");
31STATISTIC(NumUnwindsLowered,       "Number of unwind instructions lowered");
32STATISTIC(NumExceptionValuesMoved, "Number of eh.exception calls moved");
33STATISTIC(NumStackTempsIntroduced, "Number of stack temporaries introduced");
34
35namespace {
36  class DwarfEHPrepare : public FunctionPass {
37    const TargetLowering *TLI;
38    bool CompileFast;
39
40    // The eh.exception intrinsic.
41    Function *ExceptionValueIntrinsic;
42
43    // The eh.selector intrinsic.
44    Function *SelectorIntrinsic;
45
46    // _Unwind_Resume_or_Rethrow call.
47    Constant *URoR;
48
49    // The EH language-specific catch-all type.
50    GlobalVariable *EHCatchAllValue;
51
52    // _Unwind_Resume or the target equivalent.
53    Constant *RewindFunction;
54
55    // Dominator info is used when turning stack temporaries into registers.
56    DominatorTree *DT;
57    DominanceFrontier *DF;
58
59    // The function we are running on.
60    Function *F;
61
62    // The landing pads for this function.
63    typedef SmallPtrSet<BasicBlock*, 8> BBSet;
64    BBSet LandingPads;
65
66    // Stack temporary used to hold eh.exception values.
67    AllocaInst *ExceptionValueVar;
68
69    bool NormalizeLandingPads();
70    bool LowerUnwinds();
71    bool MoveExceptionValueCalls();
72    bool FinishStackTemporaries();
73    bool PromoteStackTemporaries();
74
75    Instruction *CreateExceptionValueCall(BasicBlock *BB);
76    Instruction *CreateValueLoad(BasicBlock *BB);
77
78    /// CreateReadOfExceptionValue - Return the result of the eh.exception
79    /// intrinsic by calling the intrinsic if in a landing pad, or loading it
80    /// from the exception value variable otherwise.
81    Instruction *CreateReadOfExceptionValue(BasicBlock *BB) {
82      return LandingPads.count(BB) ?
83        CreateExceptionValueCall(BB) : CreateValueLoad(BB);
84    }
85
86    /// CleanupSelectors - Any remaining eh.selector intrinsic calls which still
87    /// use the ".llvm.eh.catch.all.value" call need to convert to using it's
88    /// initializer instead.
89    bool CleanupSelectors();
90
91    /// FindAllCleanupSelectors - Find all eh.selector calls that are clean-ups.
92    void FindAllCleanupSelectors(SmallPtrSet<IntrinsicInst*, 32> &Sels);
93
94    /// FindAllURoRInvokes - Find all URoR invokes in the function.
95    void FindAllURoRInvokes(SmallPtrSet<InvokeInst*, 32> &URoRInvokes);
96
97    /// HandleURoRInvokes - Handle invokes of "_Unwind_Resume_or_Rethrow"
98    /// calls. The "unwind" part of these invokes jump to a landing pad within
99    /// the current function. This is a candidate to merge the selector
100    /// associated with the URoR invoke with the one from the URoR's landing
101    /// pad.
102    bool HandleURoRInvokes();
103
104  public:
105    static char ID; // Pass identification, replacement for typeid.
106    DwarfEHPrepare(const TargetLowering *tli, bool fast) :
107      FunctionPass(&ID), TLI(tli), CompileFast(fast),
108      ExceptionValueIntrinsic(0), SelectorIntrinsic(0),
109      URoR(0), EHCatchAllValue(0), RewindFunction(0) {}
110
111    virtual bool runOnFunction(Function &Fn);
112
113    // getAnalysisUsage - We need dominance frontiers for memory promotion.
114    virtual void getAnalysisUsage(AnalysisUsage &AU) const {
115      if (!CompileFast)
116        AU.addRequired<DominatorTree>();
117      AU.addPreserved<DominatorTree>();
118      if (!CompileFast)
119        AU.addRequired<DominanceFrontier>();
120      AU.addPreserved<DominanceFrontier>();
121    }
122
123    const char *getPassName() const {
124      return "Exception handling preparation";
125    }
126
127  };
128} // end anonymous namespace
129
130char DwarfEHPrepare::ID = 0;
131
132FunctionPass *llvm::createDwarfEHPass(const TargetLowering *tli, bool fast) {
133  return new DwarfEHPrepare(tli, fast);
134}
135
136/// FindAllCleanupSelectors - Find all eh.selector calls that are clean-ups.
137void DwarfEHPrepare::
138FindAllCleanupSelectors(SmallPtrSet<IntrinsicInst*, 32> &Sels) {
139  for (Value::use_iterator
140         I = SelectorIntrinsic->use_begin(),
141         E = SelectorIntrinsic->use_end(); I != E; ++I) {
142    IntrinsicInst *SI = cast<IntrinsicInst>(I);
143    if (!SI || SI->getParent()->getParent() != F) continue;
144
145    unsigned NumOps = SI->getNumOperands();
146    if (NumOps > 4) continue;
147    bool IsCleanUp = (NumOps == 3);
148
149    if (!IsCleanUp)
150      if (ConstantInt *CI = dyn_cast<ConstantInt>(SI->getOperand(3)))
151        IsCleanUp = (CI->getZExtValue() == 0);
152
153    if (IsCleanUp)
154      Sels.insert(SI);
155  }
156}
157
158/// FindAllURoRInvokes - Find all URoR invokes in the function.
159void DwarfEHPrepare::
160FindAllURoRInvokes(SmallPtrSet<InvokeInst*, 32> &URoRInvokes) {
161  for (Value::use_iterator
162         I = URoR->use_begin(),
163         E = URoR->use_end(); I != E; ++I) {
164    if (InvokeInst *II = dyn_cast<InvokeInst>(I))
165      URoRInvokes.insert(II);
166  }
167}
168
169/// CleanupSelectors - Any remaining eh.selector intrinsic calls which still use
170/// the ".llvm.eh.catch.all.value" call need to convert to using it's
171/// initializer instead.
172bool DwarfEHPrepare::CleanupSelectors() {
173  if (!EHCatchAllValue) return false;
174
175  if (!SelectorIntrinsic) {
176    SelectorIntrinsic =
177      Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_selector);
178    if (!SelectorIntrinsic) return false;
179  }
180
181  bool Changed = false;
182  for (Value::use_iterator
183         I = SelectorIntrinsic->use_begin(),
184         E = SelectorIntrinsic->use_end(); I != E; ++I) {
185    IntrinsicInst *Sel = dyn_cast<IntrinsicInst>(I);
186    if (!Sel || Sel->getParent()->getParent() != F) continue;
187
188    // Index of the ".llvm.eh.catch.all.value" variable.
189    unsigned OpIdx = Sel->getNumOperands() - 1;
190    GlobalVariable *GV = dyn_cast<GlobalVariable>(Sel->getOperand(OpIdx));
191    if (GV != EHCatchAllValue) continue;
192    Sel->setOperand(OpIdx, EHCatchAllValue->getInitializer());
193    Changed = true;
194  }
195
196  return Changed;
197}
198
199/// HandleURoRInvokes - Handle invokes of "_Unwind_Resume_or_Rethrow" calls. The
200/// "unwind" part of these invokes jump to a landing pad within the current
201/// function. This is a candidate to merge the selector associated with the URoR
202/// invoke with the one from the URoR's landing pad.
203bool DwarfEHPrepare::HandleURoRInvokes() {
204  if (!DT) return CleanupSelectors(); // We require DominatorTree information.
205
206  if (!EHCatchAllValue) {
207    EHCatchAllValue =
208      F->getParent()->getNamedGlobal(".llvm.eh.catch.all.value");
209    if (!EHCatchAllValue) return false;
210  }
211
212  if (!SelectorIntrinsic) {
213    SelectorIntrinsic =
214      Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_selector);
215    if (!SelectorIntrinsic) return false;
216  }
217
218  if (!URoR) {
219    URoR = F->getParent()->getFunction("_Unwind_Resume_or_Rethrow");
220    if (!URoR) return CleanupSelectors();
221  }
222
223  SmallPtrSet<IntrinsicInst*, 32> Sels;
224  SmallPtrSet<InvokeInst*, 32> URoRInvokes;
225  FindAllCleanupSelectors(Sels);
226  FindAllURoRInvokes(URoRInvokes);
227
228  SmallPtrSet<IntrinsicInst*, 32> SelsToConvert;
229
230  for (SmallPtrSet<IntrinsicInst*, 32>::iterator
231         SI = Sels.begin(), SE = Sels.end(); SI != SE; ++SI) {
232    const BasicBlock *SelBB = (*SI)->getParent();
233    for (SmallPtrSet<InvokeInst*, 32>::iterator
234           UI = URoRInvokes.begin(), UE = URoRInvokes.end(); UI != UE; ++UI) {
235      const BasicBlock *URoRBB = (*UI)->getParent();
236      if (SelBB == URoRBB || DT->dominates(SelBB, URoRBB)) {
237        SelsToConvert.insert(*SI);
238        break;
239      }
240    }
241  }
242
243  bool Changed = false;
244
245  if (!SelsToConvert.empty()) {
246    // Convert all clean-up eh.selectors, which are associated with "invokes" of
247    // URoR calls, into catch-all eh.selectors.
248    Changed = true;
249
250    for (SmallPtrSet<IntrinsicInst*, 8>::iterator
251           SI = SelsToConvert.begin(), SE = SelsToConvert.end();
252         SI != SE; ++SI) {
253      IntrinsicInst *II = *SI;
254      SmallVector<Value*, 8> Args;
255
256      // Use the exception object pointer and the personality function
257      // from the original selector.
258      Args.push_back(II->getOperand(1)); // Exception object pointer.
259      Args.push_back(II->getOperand(2)); // Personality function.
260      Args.push_back(EHCatchAllValue->getInitializer()); // Catch-all indicator.
261
262      CallInst *NewSelector =
263        CallInst::Create(SelectorIntrinsic, Args.begin(), Args.end(),
264                         "eh.sel.catch.all", II);
265
266      NewSelector->setTailCall(II->isTailCall());
267      NewSelector->setAttributes(II->getAttributes());
268      NewSelector->setCallingConv(II->getCallingConv());
269
270      II->replaceAllUsesWith(NewSelector);
271      II->eraseFromParent();
272    }
273  }
274
275  Changed |= CleanupSelectors();
276  return Changed;
277}
278
279/// NormalizeLandingPads - Normalize and discover landing pads, noting them
280/// in the LandingPads set.  A landing pad is normal if the only CFG edges
281/// that end at it are unwind edges from invoke instructions. If we inlined
282/// through an invoke we could have a normal branch from the previous
283/// unwind block through to the landing pad for the original invoke.
284/// Abnormal landing pads are fixed up by redirecting all unwind edges to
285/// a new basic block which falls through to the original.
286bool DwarfEHPrepare::NormalizeLandingPads() {
287  bool Changed = false;
288
289  const MCAsmInfo *MAI = TLI->getTargetMachine().getMCAsmInfo();
290  bool usingSjLjEH = MAI->getExceptionHandlingType() == ExceptionHandling::SjLj;
291
292  for (Function::iterator I = F->begin(), E = F->end(); I != E; ++I) {
293    TerminatorInst *TI = I->getTerminator();
294    if (!isa<InvokeInst>(TI))
295      continue;
296    BasicBlock *LPad = TI->getSuccessor(1);
297    // Skip landing pads that have already been normalized.
298    if (LandingPads.count(LPad))
299      continue;
300
301    // Check that only invoke unwind edges end at the landing pad.
302    bool OnlyUnwoundTo = true;
303    bool SwitchOK = usingSjLjEH;
304    for (pred_iterator PI = pred_begin(LPad), PE = pred_end(LPad);
305         PI != PE; ++PI) {
306      TerminatorInst *PT = (*PI)->getTerminator();
307      // The SjLj dispatch block uses a switch instruction. This is effectively
308      // an unwind edge, so we can disregard it here. There will only ever
309      // be one dispatch, however, so if there are multiple switches, one
310      // of them truly is a normal edge, not an unwind edge.
311      if (SwitchOK && isa<SwitchInst>(PT)) {
312        SwitchOK = false;
313        continue;
314      }
315      if (!isa<InvokeInst>(PT) || LPad == PT->getSuccessor(0)) {
316        OnlyUnwoundTo = false;
317        break;
318      }
319    }
320
321    if (OnlyUnwoundTo) {
322      // Only unwind edges lead to the landing pad.  Remember the landing pad.
323      LandingPads.insert(LPad);
324      continue;
325    }
326
327    // At least one normal edge ends at the landing pad.  Redirect the unwind
328    // edges to a new basic block which falls through into this one.
329
330    // Create the new basic block.
331    BasicBlock *NewBB = BasicBlock::Create(F->getContext(),
332                                           LPad->getName() + "_unwind_edge");
333
334    // Insert it into the function right before the original landing pad.
335    LPad->getParent()->getBasicBlockList().insert(LPad, NewBB);
336
337    // Redirect unwind edges from the original landing pad to NewBB.
338    for (pred_iterator PI = pred_begin(LPad), PE = pred_end(LPad); PI != PE; ) {
339      TerminatorInst *PT = (*PI++)->getTerminator();
340      if (isa<InvokeInst>(PT) && PT->getSuccessor(1) == LPad)
341        // Unwind to the new block.
342        PT->setSuccessor(1, NewBB);
343    }
344
345    // If there are any PHI nodes in LPad, we need to update them so that they
346    // merge incoming values from NewBB instead.
347    for (BasicBlock::iterator II = LPad->begin(); isa<PHINode>(II); ++II) {
348      PHINode *PN = cast<PHINode>(II);
349      pred_iterator PB = pred_begin(NewBB), PE = pred_end(NewBB);
350
351      // Check to see if all of the values coming in via unwind edges are the
352      // same.  If so, we don't need to create a new PHI node.
353      Value *InVal = PN->getIncomingValueForBlock(*PB);
354      for (pred_iterator PI = PB; PI != PE; ++PI) {
355        if (PI != PB && InVal != PN->getIncomingValueForBlock(*PI)) {
356          InVal = 0;
357          break;
358        }
359      }
360
361      if (InVal == 0) {
362        // Different unwind edges have different values.  Create a new PHI node
363        // in NewBB.
364        PHINode *NewPN = PHINode::Create(PN->getType(), PN->getName()+".unwind",
365                                         NewBB);
366        // Add an entry for each unwind edge, using the value from the old PHI.
367        for (pred_iterator PI = PB; PI != PE; ++PI)
368          NewPN->addIncoming(PN->getIncomingValueForBlock(*PI), *PI);
369
370        // Now use this new PHI as the common incoming value for NewBB in PN.
371        InVal = NewPN;
372      }
373
374      // Revector exactly one entry in the PHI node to come from NewBB
375      // and delete all other entries that come from unwind edges.  If
376      // there are both normal and unwind edges from the same predecessor,
377      // this leaves an entry for the normal edge.
378      for (pred_iterator PI = PB; PI != PE; ++PI)
379        PN->removeIncomingValue(*PI);
380      PN->addIncoming(InVal, NewBB);
381    }
382
383    // Add a fallthrough from NewBB to the original landing pad.
384    BranchInst::Create(LPad, NewBB);
385
386    // Now update DominatorTree and DominanceFrontier analysis information.
387    if (DT)
388      DT->splitBlock(NewBB);
389    if (DF)
390      DF->splitBlock(NewBB);
391
392    // Remember the newly constructed landing pad.  The original landing pad
393    // LPad is no longer a landing pad now that all unwind edges have been
394    // revectored to NewBB.
395    LandingPads.insert(NewBB);
396    ++NumLandingPadsSplit;
397    Changed = true;
398  }
399
400  return Changed;
401}
402
403/// LowerUnwinds - Turn unwind instructions into calls to _Unwind_Resume,
404/// rethrowing any previously caught exception.  This will crash horribly
405/// at runtime if there is no such exception: using unwind to throw a new
406/// exception is currently not supported.
407bool DwarfEHPrepare::LowerUnwinds() {
408  SmallVector<TerminatorInst*, 16> UnwindInsts;
409
410  for (Function::iterator I = F->begin(), E = F->end(); I != E; ++I) {
411    TerminatorInst *TI = I->getTerminator();
412    if (isa<UnwindInst>(TI))
413      UnwindInsts.push_back(TI);
414  }
415
416  if (UnwindInsts.empty()) return false;
417
418  // Find the rewind function if we didn't already.
419  if (!RewindFunction) {
420    LLVMContext &Ctx = UnwindInsts[0]->getContext();
421    std::vector<const Type*>
422      Params(1, Type::getInt8PtrTy(Ctx));
423    FunctionType *FTy = FunctionType::get(Type::getVoidTy(Ctx),
424                                          Params, false);
425    const char *RewindName = TLI->getLibcallName(RTLIB::UNWIND_RESUME);
426    RewindFunction = F->getParent()->getOrInsertFunction(RewindName, FTy);
427  }
428
429  bool Changed = false;
430
431  for (SmallVectorImpl<TerminatorInst*>::iterator
432         I = UnwindInsts.begin(), E = UnwindInsts.end(); I != E; ++I) {
433    TerminatorInst *TI = *I;
434
435    // Replace the unwind instruction with a call to _Unwind_Resume (or the
436    // appropriate target equivalent) followed by an UnreachableInst.
437
438    // Create the call...
439    CallInst *CI = CallInst::Create(RewindFunction,
440                                    CreateReadOfExceptionValue(TI->getParent()),
441                                    "", TI);
442    CI->setCallingConv(TLI->getLibcallCallingConv(RTLIB::UNWIND_RESUME));
443    // ...followed by an UnreachableInst.
444    new UnreachableInst(TI->getContext(), TI);
445
446    // Nuke the unwind instruction.
447    TI->eraseFromParent();
448    ++NumUnwindsLowered;
449    Changed = true;
450  }
451
452  return Changed;
453}
454
455/// MoveExceptionValueCalls - Ensure that eh.exception is only ever called from
456/// landing pads by replacing calls outside of landing pads with loads from a
457/// stack temporary.  Move eh.exception calls inside landing pads to the start
458/// of the landing pad (optional, but may make things simpler for later passes).
459bool DwarfEHPrepare::MoveExceptionValueCalls() {
460  // If the eh.exception intrinsic is not declared in the module then there is
461  // nothing to do.  Speed up compilation by checking for this common case.
462  if (!ExceptionValueIntrinsic &&
463      !F->getParent()->getFunction(Intrinsic::getName(Intrinsic::eh_exception)))
464    return false;
465
466  bool Changed = false;
467
468  for (Function::iterator BB = F->begin(), E = F->end(); BB != E; ++BB) {
469    for (BasicBlock::iterator II = BB->begin(), E = BB->end(); II != E;)
470      if (IntrinsicInst *CI = dyn_cast<IntrinsicInst>(II++))
471        if (CI->getIntrinsicID() == Intrinsic::eh_exception) {
472          if (!CI->use_empty()) {
473            Value *ExceptionValue = CreateReadOfExceptionValue(BB);
474            if (CI == ExceptionValue) {
475              // The call was at the start of a landing pad - leave it alone.
476              assert(LandingPads.count(BB) &&
477                     "Created eh.exception call outside landing pad!");
478              continue;
479            }
480            CI->replaceAllUsesWith(ExceptionValue);
481          }
482          CI->eraseFromParent();
483          ++NumExceptionValuesMoved;
484          Changed = true;
485        }
486  }
487
488  return Changed;
489}
490
491/// FinishStackTemporaries - If we introduced a stack variable to hold the
492/// exception value then initialize it in each landing pad.
493bool DwarfEHPrepare::FinishStackTemporaries() {
494  if (!ExceptionValueVar)
495    // Nothing to do.
496    return false;
497
498  bool Changed = false;
499
500  // Make sure that there is a store of the exception value at the start of
501  // each landing pad.
502  for (BBSet::iterator LI = LandingPads.begin(), LE = LandingPads.end();
503       LI != LE; ++LI) {
504    Instruction *ExceptionValue = CreateReadOfExceptionValue(*LI);
505    Instruction *Store = new StoreInst(ExceptionValue, ExceptionValueVar);
506    Store->insertAfter(ExceptionValue);
507    Changed = true;
508  }
509
510  return Changed;
511}
512
513/// PromoteStackTemporaries - Turn any stack temporaries we introduced into
514/// registers if possible.
515bool DwarfEHPrepare::PromoteStackTemporaries() {
516  if (ExceptionValueVar && DT && DF && isAllocaPromotable(ExceptionValueVar)) {
517    // Turn the exception temporary into registers and phi nodes if possible.
518    std::vector<AllocaInst*> Allocas(1, ExceptionValueVar);
519    PromoteMemToReg(Allocas, *DT, *DF);
520    return true;
521  }
522  return false;
523}
524
525/// CreateExceptionValueCall - Insert a call to the eh.exception intrinsic at
526/// the start of the basic block (unless there already is one, in which case
527/// the existing call is returned).
528Instruction *DwarfEHPrepare::CreateExceptionValueCall(BasicBlock *BB) {
529  Instruction *Start = BB->getFirstNonPHI();
530  // Is this a call to eh.exception?
531  if (IntrinsicInst *CI = dyn_cast<IntrinsicInst>(Start))
532    if (CI->getIntrinsicID() == Intrinsic::eh_exception)
533      // Reuse the existing call.
534      return Start;
535
536  // Find the eh.exception intrinsic if we didn't already.
537  if (!ExceptionValueIntrinsic)
538    ExceptionValueIntrinsic = Intrinsic::getDeclaration(F->getParent(),
539                                                       Intrinsic::eh_exception);
540
541  // Create the call.
542  return CallInst::Create(ExceptionValueIntrinsic, "eh.value.call", Start);
543}
544
545/// CreateValueLoad - Insert a load of the exception value stack variable
546/// (creating it if necessary) at the start of the basic block (unless
547/// there already is a load, in which case the existing load is returned).
548Instruction *DwarfEHPrepare::CreateValueLoad(BasicBlock *BB) {
549  Instruction *Start = BB->getFirstNonPHI();
550  // Is this a load of the exception temporary?
551  if (ExceptionValueVar)
552    if (LoadInst* LI = dyn_cast<LoadInst>(Start))
553      if (LI->getPointerOperand() == ExceptionValueVar)
554        // Reuse the existing load.
555        return Start;
556
557  // Create the temporary if we didn't already.
558  if (!ExceptionValueVar) {
559    ExceptionValueVar = new AllocaInst(PointerType::getUnqual(
560           Type::getInt8Ty(BB->getContext())), "eh.value", F->begin()->begin());
561    ++NumStackTempsIntroduced;
562  }
563
564  // Load the value.
565  return new LoadInst(ExceptionValueVar, "eh.value.load", Start);
566}
567
568bool DwarfEHPrepare::runOnFunction(Function &Fn) {
569  bool Changed = false;
570
571  // Initialize internal state.
572  DT = getAnalysisIfAvailable<DominatorTree>();
573  DF = getAnalysisIfAvailable<DominanceFrontier>();
574  ExceptionValueVar = 0;
575  F = &Fn;
576
577  // Ensure that only unwind edges end at landing pads (a landing pad is a
578  // basic block where an invoke unwind edge ends).
579  Changed |= NormalizeLandingPads();
580
581  // Turn unwind instructions into libcalls.
582  Changed |= LowerUnwinds();
583
584  // TODO: Move eh.selector calls to landing pads and combine them.
585
586  // Move eh.exception calls to landing pads.
587  Changed |= MoveExceptionValueCalls();
588
589  // Initialize any stack temporaries we introduced.
590  Changed |= FinishStackTemporaries();
591
592  // Turn any stack temporaries into registers if possible.
593  if (!CompileFast)
594    Changed |= PromoteStackTemporaries();
595
596  Changed |= HandleURoRInvokes();
597
598  LandingPads.clear();
599
600  return Changed;
601}
602