CGException.cpp revision 8262b6a44c98cf14e1d5f347a01e6bf44858198f
1//===--- CGException.cpp - Emit LLVM Code for C++ exceptions --------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This contains code dealing with C++ exception related code generation.
11//
12//===----------------------------------------------------------------------===//
13
14#include "clang/AST/StmtCXX.h"
15
16#include "llvm/Intrinsics.h"
17#include "llvm/Support/CallSite.h"
18
19#include "CodeGenFunction.h"
20#include "CGException.h"
21
22using namespace clang;
23using namespace CodeGen;
24
25/// Push an entry of the given size onto this protected-scope stack.
26char *EHScopeStack::allocate(size_t Size) {
27  if (!StartOfBuffer) {
28    unsigned Capacity = 1024;
29    while (Capacity < Size) Capacity *= 2;
30    StartOfBuffer = new char[Capacity];
31    StartOfData = EndOfBuffer = StartOfBuffer + Capacity;
32  } else if (static_cast<size_t>(StartOfData - StartOfBuffer) < Size) {
33    unsigned CurrentCapacity = EndOfBuffer - StartOfBuffer;
34    unsigned UsedCapacity = CurrentCapacity - (StartOfData - StartOfBuffer);
35
36    unsigned NewCapacity = CurrentCapacity;
37    do {
38      NewCapacity *= 2;
39    } while (NewCapacity < UsedCapacity + Size);
40
41    char *NewStartOfBuffer = new char[NewCapacity];
42    char *NewEndOfBuffer = NewStartOfBuffer + NewCapacity;
43    char *NewStartOfData = NewEndOfBuffer - UsedCapacity;
44    memcpy(NewStartOfData, StartOfData, UsedCapacity);
45    delete [] StartOfBuffer;
46    StartOfBuffer = NewStartOfBuffer;
47    EndOfBuffer = NewEndOfBuffer;
48    StartOfData = NewStartOfData;
49  }
50
51  assert(StartOfBuffer + Size <= StartOfData);
52  StartOfData -= Size;
53  return StartOfData;
54}
55
56EHScopeStack::stable_iterator
57EHScopeStack::getEnclosingEHCleanup(iterator it) const {
58  assert(it != end());
59  do {
60    if (isa<EHCleanupScope>(*it)) {
61      if (cast<EHCleanupScope>(*it).isEHCleanup())
62        return stabilize(it);
63      return cast<EHCleanupScope>(*it).getEnclosingEHCleanup();
64    }
65    if (isa<EHLazyCleanupScope>(*it)) {
66      if (cast<EHLazyCleanupScope>(*it).isEHCleanup())
67        return stabilize(it);
68      return cast<EHLazyCleanupScope>(*it).getEnclosingEHCleanup();
69    }
70    ++it;
71  } while (it != end());
72  return stable_end();
73}
74
75
76void *EHScopeStack::pushLazyCleanup(CleanupKind Kind, size_t Size) {
77  assert(((Size % sizeof(void*)) == 0) && "cleanup type is misaligned");
78  char *Buffer = allocate(EHLazyCleanupScope::getSizeForCleanupSize(Size));
79  bool IsNormalCleanup = Kind != EHCleanup;
80  bool IsEHCleanup = Kind != NormalCleanup;
81  EHLazyCleanupScope *Scope =
82    new (Buffer) EHLazyCleanupScope(IsNormalCleanup,
83                                    IsEHCleanup,
84                                    Size,
85                                    BranchFixups.size(),
86                                    InnermostNormalCleanup,
87                                    InnermostEHCleanup);
88  if (IsNormalCleanup)
89    InnermostNormalCleanup = stable_begin();
90  if (IsEHCleanup)
91    InnermostEHCleanup = stable_begin();
92
93  return Scope->getCleanupBuffer();
94}
95
96void EHScopeStack::pushCleanup(llvm::BasicBlock *NormalEntry,
97                               llvm::BasicBlock *NormalExit,
98                               llvm::BasicBlock *EHEntry,
99                               llvm::BasicBlock *EHExit) {
100  char *Buffer = allocate(EHCleanupScope::getSize());
101  new (Buffer) EHCleanupScope(BranchFixups.size(),
102                              InnermostNormalCleanup,
103                              InnermostEHCleanup,
104                              NormalEntry, NormalExit, EHEntry, EHExit);
105  if (NormalEntry)
106    InnermostNormalCleanup = stable_begin();
107  if (EHEntry)
108    InnermostEHCleanup = stable_begin();
109}
110
111void EHScopeStack::popCleanup() {
112  assert(!empty() && "popping exception stack when not empty");
113
114  if (isa<EHLazyCleanupScope>(*begin())) {
115    EHLazyCleanupScope &Cleanup = cast<EHLazyCleanupScope>(*begin());
116    InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
117    InnermostEHCleanup = Cleanup.getEnclosingEHCleanup();
118    StartOfData += Cleanup.getAllocatedSize();
119  } else {
120    assert(isa<EHCleanupScope>(*begin()));
121    EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin());
122    InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
123    InnermostEHCleanup = Cleanup.getEnclosingEHCleanup();
124    StartOfData += EHCleanupScope::getSize();
125  }
126
127  // Check whether we can shrink the branch-fixups stack.
128  if (!BranchFixups.empty()) {
129    // If we no longer have any normal cleanups, all the fixups are
130    // complete.
131    if (!hasNormalCleanups())
132      BranchFixups.clear();
133
134    // Otherwise we can still trim out unnecessary nulls.
135    else
136      popNullFixups();
137  }
138}
139
140EHFilterScope *EHScopeStack::pushFilter(unsigned NumFilters) {
141  char *Buffer = allocate(EHFilterScope::getSizeForNumFilters(NumFilters));
142  CatchDepth++;
143  return new (Buffer) EHFilterScope(NumFilters);
144}
145
146void EHScopeStack::popFilter() {
147  assert(!empty() && "popping exception stack when not empty");
148
149  EHFilterScope &Filter = cast<EHFilterScope>(*begin());
150  StartOfData += EHFilterScope::getSizeForNumFilters(Filter.getNumFilters());
151
152  assert(CatchDepth > 0 && "mismatched filter push/pop");
153  CatchDepth--;
154}
155
156EHCatchScope *EHScopeStack::pushCatch(unsigned NumHandlers) {
157  char *Buffer = allocate(EHCatchScope::getSizeForNumHandlers(NumHandlers));
158  CatchDepth++;
159  return new (Buffer) EHCatchScope(NumHandlers);
160}
161
162void EHScopeStack::pushTerminate() {
163  char *Buffer = allocate(EHTerminateScope::getSize());
164  CatchDepth++;
165  new (Buffer) EHTerminateScope();
166}
167
168/// Remove any 'null' fixups on the stack.  However, we can't pop more
169/// fixups than the fixup depth on the innermost normal cleanup, or
170/// else fixups that we try to add to that cleanup will end up in the
171/// wrong place.  We *could* try to shrink fixup depths, but that's
172/// actually a lot of work for little benefit.
173void EHScopeStack::popNullFixups() {
174  // We expect this to only be called when there's still an innermost
175  // normal cleanup;  otherwise there really shouldn't be any fixups.
176  assert(hasNormalCleanups());
177
178  EHScopeStack::iterator it = find(InnermostNormalCleanup);
179  unsigned MinSize;
180  if (isa<EHCleanupScope>(*it))
181    MinSize = cast<EHCleanupScope>(*it).getFixupDepth();
182  else
183    MinSize = cast<EHLazyCleanupScope>(*it).getFixupDepth();
184  assert(BranchFixups.size() >= MinSize && "fixup stack out of order");
185
186  while (BranchFixups.size() > MinSize &&
187         BranchFixups.back().Destination == 0)
188    BranchFixups.pop_back();
189}
190
191void EHScopeStack::resolveBranchFixups(llvm::BasicBlock *Dest) {
192  assert(Dest && "null block passed to resolveBranchFixups");
193
194  if (BranchFixups.empty()) return;
195  assert(hasNormalCleanups() &&
196         "branch fixups exist with no normal cleanups on stack");
197
198  for (unsigned I = 0, E = BranchFixups.size(); I != E; ++I)
199    if (BranchFixups[I].Destination == Dest)
200      BranchFixups[I].Destination = 0;
201
202  popNullFixups();
203}
204
205static llvm::Constant *getAllocateExceptionFn(CodeGenFunction &CGF) {
206  // void *__cxa_allocate_exception(size_t thrown_size);
207  const llvm::Type *SizeTy = CGF.ConvertType(CGF.getContext().getSizeType());
208  std::vector<const llvm::Type*> Args(1, SizeTy);
209
210  const llvm::FunctionType *FTy =
211  llvm::FunctionType::get(llvm::Type::getInt8PtrTy(CGF.getLLVMContext()),
212                          Args, false);
213
214  return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_allocate_exception");
215}
216
217static llvm::Constant *getFreeExceptionFn(CodeGenFunction &CGF) {
218  // void __cxa_free_exception(void *thrown_exception);
219  const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext());
220  std::vector<const llvm::Type*> Args(1, Int8PtrTy);
221
222  const llvm::FunctionType *FTy =
223  llvm::FunctionType::get(llvm::Type::getVoidTy(CGF.getLLVMContext()),
224                          Args, false);
225
226  return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_free_exception");
227}
228
229static llvm::Constant *getThrowFn(CodeGenFunction &CGF) {
230  // void __cxa_throw(void *thrown_exception, std::type_info *tinfo,
231  //                  void (*dest) (void *));
232
233  const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext());
234  std::vector<const llvm::Type*> Args(3, Int8PtrTy);
235
236  const llvm::FunctionType *FTy =
237    llvm::FunctionType::get(llvm::Type::getVoidTy(CGF.getLLVMContext()),
238                            Args, false);
239
240  return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_throw");
241}
242
243static llvm::Constant *getReThrowFn(CodeGenFunction &CGF) {
244  // void __cxa_rethrow();
245
246  const llvm::FunctionType *FTy =
247    llvm::FunctionType::get(llvm::Type::getVoidTy(CGF.getLLVMContext()), false);
248
249  return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_rethrow");
250}
251
252static llvm::Constant *getGetExceptionPtrFn(CodeGenFunction &CGF) {
253  // void *__cxa_get_exception_ptr(void*);
254  const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext());
255  std::vector<const llvm::Type*> Args(1, Int8PtrTy);
256
257  const llvm::FunctionType *FTy =
258    llvm::FunctionType::get(Int8PtrTy, Args, false);
259
260  return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_get_exception_ptr");
261}
262
263static llvm::Constant *getBeginCatchFn(CodeGenFunction &CGF) {
264  // void *__cxa_begin_catch(void*);
265
266  const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext());
267  std::vector<const llvm::Type*> Args(1, Int8PtrTy);
268
269  const llvm::FunctionType *FTy =
270    llvm::FunctionType::get(Int8PtrTy, Args, false);
271
272  return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_begin_catch");
273}
274
275static llvm::Constant *getEndCatchFn(CodeGenFunction &CGF) {
276  // void __cxa_end_catch();
277
278  const llvm::FunctionType *FTy =
279    llvm::FunctionType::get(llvm::Type::getVoidTy(CGF.getLLVMContext()), false);
280
281  return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_end_catch");
282}
283
284static llvm::Constant *getUnexpectedFn(CodeGenFunction &CGF) {
285  // void __cxa_call_unexepcted(void *thrown_exception);
286
287  const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext());
288  std::vector<const llvm::Type*> Args(1, Int8PtrTy);
289
290  const llvm::FunctionType *FTy =
291    llvm::FunctionType::get(llvm::Type::getVoidTy(CGF.getLLVMContext()),
292                            Args, false);
293
294  return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_call_unexpected");
295}
296
297llvm::Constant *CodeGenFunction::getUnwindResumeOrRethrowFn() {
298  const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(getLLVMContext());
299  std::vector<const llvm::Type*> Args(1, Int8PtrTy);
300
301  const llvm::FunctionType *FTy =
302    llvm::FunctionType::get(llvm::Type::getVoidTy(getLLVMContext()), Args,
303                            false);
304
305  if (CGM.getLangOptions().SjLjExceptions)
306    return CGM.CreateRuntimeFunction(FTy, "_Unwind_SjLj_Resume");
307  return CGM.CreateRuntimeFunction(FTy, "_Unwind_Resume_or_Rethrow");
308}
309
310static llvm::Constant *getTerminateFn(CodeGenFunction &CGF) {
311  // void __terminate();
312
313  const llvm::FunctionType *FTy =
314    llvm::FunctionType::get(llvm::Type::getVoidTy(CGF.getLLVMContext()), false);
315
316  return CGF.CGM.CreateRuntimeFunction(FTy,
317      CGF.CGM.getLangOptions().CPlusPlus ? "_ZSt9terminatev" : "abort");
318}
319
320static llvm::Constant *getCatchallRethrowFn(CodeGenFunction &CGF,
321                                            const char *Name) {
322  const llvm::Type *Int8PtrTy =
323    llvm::Type::getInt8PtrTy(CGF.getLLVMContext());
324  std::vector<const llvm::Type*> Args(1, Int8PtrTy);
325
326  const llvm::Type *VoidTy = llvm::Type::getVoidTy(CGF.getLLVMContext());
327  const llvm::FunctionType *FTy = llvm::FunctionType::get(VoidTy, Args, false);
328
329  return CGF.CGM.CreateRuntimeFunction(FTy, Name);
330}
331
332const EHPersonality EHPersonality::GNU_C("__gcc_personality_v0");
333const EHPersonality EHPersonality::NeXT_ObjC("__objc_personality_v0");
334const EHPersonality EHPersonality::GNU_CPlusPlus("__gxx_personality_v0");
335const EHPersonality EHPersonality::GNU_CPlusPlus_SJLJ("__gxx_personality_sj0");
336const EHPersonality EHPersonality::GNU_ObjC("__gnu_objc_personality_v0",
337                                            "objc_exception_throw");
338
339static const EHPersonality &getCPersonality(const LangOptions &L) {
340  return EHPersonality::GNU_C;
341}
342
343static const EHPersonality &getObjCPersonality(const LangOptions &L) {
344  if (L.NeXTRuntime) {
345    if (L.ObjCNonFragileABI) return EHPersonality::NeXT_ObjC;
346    else return getCPersonality(L);
347  } else {
348    return EHPersonality::GNU_ObjC;
349  }
350}
351
352static const EHPersonality &getCXXPersonality(const LangOptions &L) {
353  if (L.SjLjExceptions)
354    return EHPersonality::GNU_CPlusPlus_SJLJ;
355  else
356    return EHPersonality::GNU_CPlusPlus;
357}
358
359/// Determines the personality function to use when both C++
360/// and Objective-C exceptions are being caught.
361static const EHPersonality &getObjCXXPersonality(const LangOptions &L) {
362  // The ObjC personality defers to the C++ personality for non-ObjC
363  // handlers.  Unlike the C++ case, we use the same personality
364  // function on targets using (backend-driven) SJLJ EH.
365  if (L.NeXTRuntime) {
366    if (L.ObjCNonFragileABI)
367      return EHPersonality::NeXT_ObjC;
368
369    // In the fragile ABI, just use C++ exception handling and hope
370    // they're not doing crazy exception mixing.
371    else
372      return getCXXPersonality(L);
373  }
374
375  // The GNU runtime's personality function inherently doesn't support
376  // mixed EH.  Use the C++ personality just to avoid returning null.
377  return getCXXPersonality(L);
378}
379
380const EHPersonality &EHPersonality::get(const LangOptions &L) {
381  if (L.CPlusPlus && L.ObjC1)
382    return getObjCXXPersonality(L);
383  else if (L.CPlusPlus)
384    return getCXXPersonality(L);
385  else if (L.ObjC1)
386    return getObjCPersonality(L);
387  else
388    return getCPersonality(L);
389}
390
391static llvm::Constant *getPersonalityFn(CodeGenFunction &CGF,
392                                        const EHPersonality &Personality) {
393  const char *Name = Personality.getPersonalityFnName();
394
395  llvm::Constant *Fn =
396    CGF.CGM.CreateRuntimeFunction(llvm::FunctionType::get(
397                                    llvm::Type::getInt32Ty(
398                                      CGF.CGM.getLLVMContext()),
399                                    true),
400                            Name);
401  return llvm::ConstantExpr::getBitCast(Fn, CGF.CGM.PtrToInt8Ty);
402}
403
404/// Returns the value to inject into a selector to indicate the
405/// presence of a catch-all.
406static llvm::Constant *getCatchAllValue(CodeGenFunction &CGF) {
407  // Possibly we should use @llvm.eh.catch.all.value here.
408  return llvm::ConstantPointerNull::get(CGF.CGM.PtrToInt8Ty);
409}
410
411/// Returns the value to inject into a selector to indicate the
412/// presence of a cleanup.
413static llvm::Constant *getCleanupValue(CodeGenFunction &CGF) {
414  return llvm::ConstantInt::get(CGF.Builder.getInt32Ty(), 0);
415}
416
417namespace {
418  /// A cleanup to free the exception object if its initialization
419  /// throws.
420  struct FreeExceptionCleanup : EHScopeStack::LazyCleanup {
421    FreeExceptionCleanup(llvm::Value *ShouldFreeVar,
422                         llvm::Value *ExnLocVar)
423      : ShouldFreeVar(ShouldFreeVar), ExnLocVar(ExnLocVar) {}
424
425    llvm::Value *ShouldFreeVar;
426    llvm::Value *ExnLocVar;
427
428    void Emit(CodeGenFunction &CGF, bool IsForEH) {
429      llvm::BasicBlock *FreeBB = CGF.createBasicBlock("free-exnobj");
430      llvm::BasicBlock *DoneBB = CGF.createBasicBlock("free-exnobj.done");
431
432      llvm::Value *ShouldFree = CGF.Builder.CreateLoad(ShouldFreeVar,
433                                                       "should-free-exnobj");
434      CGF.Builder.CreateCondBr(ShouldFree, FreeBB, DoneBB);
435      CGF.EmitBlock(FreeBB);
436      llvm::Value *ExnLocLocal = CGF.Builder.CreateLoad(ExnLocVar, "exnobj");
437      CGF.Builder.CreateCall(getFreeExceptionFn(CGF), ExnLocLocal)
438        ->setDoesNotThrow();
439      CGF.EmitBlock(DoneBB);
440    }
441  };
442}
443
444// Emits an exception expression into the given location.  This
445// differs from EmitAnyExprToMem only in that, if a final copy-ctor
446// call is required, an exception within that copy ctor causes
447// std::terminate to be invoked.
448static void EmitAnyExprToExn(CodeGenFunction &CGF, const Expr *E,
449                             llvm::Value *ExnLoc) {
450  // We want to release the allocated exception object if this
451  // expression throws.  We do this by pushing an EH-only cleanup
452  // block which, furthermore, deactivates itself after the expression
453  // is complete.
454  llvm::AllocaInst *ShouldFreeVar =
455    CGF.CreateTempAlloca(llvm::Type::getInt1Ty(CGF.getLLVMContext()),
456                         "should-free-exnobj.var");
457  CGF.InitTempAlloca(ShouldFreeVar,
458                     llvm::ConstantInt::getFalse(CGF.getLLVMContext()));
459
460  // A variable holding the exception pointer.  This is necessary
461  // because the throw expression does not necessarily dominate the
462  // cleanup, for example if it appears in a conditional expression.
463  llvm::AllocaInst *ExnLocVar =
464    CGF.CreateTempAlloca(ExnLoc->getType(), "exnobj.var");
465
466  // Make sure the exception object is cleaned up if there's an
467  // exception during initialization.
468  // FIXME: stmt expressions might require this to be a normal
469  // cleanup, too.
470  CGF.EHStack.pushLazyCleanup<FreeExceptionCleanup>(EHCleanup,
471                                                    ShouldFreeVar,
472                                                    ExnLocVar);
473  EHScopeStack::stable_iterator Cleanup = CGF.EHStack.stable_begin();
474
475  CGF.Builder.CreateStore(ExnLoc, ExnLocVar);
476  CGF.Builder.CreateStore(llvm::ConstantInt::getTrue(CGF.getLLVMContext()),
477                          ShouldFreeVar);
478
479  // __cxa_allocate_exception returns a void*;  we need to cast this
480  // to the appropriate type for the object.
481  const llvm::Type *Ty = CGF.ConvertType(E->getType())->getPointerTo();
482  llvm::Value *TypedExnLoc = CGF.Builder.CreateBitCast(ExnLoc, Ty);
483
484  // FIXME: this isn't quite right!  If there's a final unelided call
485  // to a copy constructor, then according to [except.terminate]p1 we
486  // must call std::terminate() if that constructor throws, because
487  // technically that copy occurs after the exception expression is
488  // evaluated but before the exception is caught.  But the best way
489  // to handle that is to teach EmitAggExpr to do the final copy
490  // differently if it can't be elided.
491  CGF.EmitAnyExprToMem(E, TypedExnLoc, /*Volatile*/ false);
492
493  CGF.Builder.CreateStore(llvm::ConstantInt::getFalse(CGF.getLLVMContext()),
494                          ShouldFreeVar);
495
496  // Technically, the exception object is like a temporary; it has to
497  // be cleaned up when its full-expression is complete.
498  // Unfortunately, the AST represents full-expressions by creating a
499  // CXXExprWithTemporaries, which it only does when there are actually
500  // temporaries.
501  //
502  // If any cleanups have been added since we pushed ours, they must
503  // be from temporaries;  this will get popped at the same time.
504  // Otherwise we need to pop ours off.  FIXME: this is very brittle.
505  if (Cleanup == CGF.EHStack.stable_begin())
506    CGF.PopCleanupBlock();
507}
508
509llvm::Value *CodeGenFunction::getExceptionSlot() {
510  if (!ExceptionSlot) {
511    const llvm::Type *i8p = llvm::Type::getInt8PtrTy(getLLVMContext());
512    ExceptionSlot = CreateTempAlloca(i8p, "exn.slot");
513  }
514  return ExceptionSlot;
515}
516
517void CodeGenFunction::EmitCXXThrowExpr(const CXXThrowExpr *E) {
518  if (!E->getSubExpr()) {
519    if (getInvokeDest()) {
520      Builder.CreateInvoke(getReThrowFn(*this),
521                           getUnreachableBlock(),
522                           getInvokeDest())
523        ->setDoesNotReturn();
524    } else {
525      Builder.CreateCall(getReThrowFn(*this))->setDoesNotReturn();
526      Builder.CreateUnreachable();
527    }
528
529    // Clear the insertion point to indicate we are in unreachable code.
530    Builder.ClearInsertionPoint();
531    return;
532  }
533
534  QualType ThrowType = E->getSubExpr()->getType();
535
536  // Now allocate the exception object.
537  const llvm::Type *SizeTy = ConvertType(getContext().getSizeType());
538  uint64_t TypeSize = getContext().getTypeSizeInChars(ThrowType).getQuantity();
539
540  llvm::Constant *AllocExceptionFn = getAllocateExceptionFn(*this);
541  llvm::CallInst *ExceptionPtr =
542    Builder.CreateCall(AllocExceptionFn,
543                       llvm::ConstantInt::get(SizeTy, TypeSize),
544                       "exception");
545  ExceptionPtr->setDoesNotThrow();
546
547  EmitAnyExprToExn(*this, E->getSubExpr(), ExceptionPtr);
548
549  // Now throw the exception.
550  const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(getLLVMContext());
551  llvm::Constant *TypeInfo = CGM.GetAddrOfRTTIDescriptor(ThrowType, true);
552
553  // The address of the destructor.  If the exception type has a
554  // trivial destructor (or isn't a record), we just pass null.
555  llvm::Constant *Dtor = 0;
556  if (const RecordType *RecordTy = ThrowType->getAs<RecordType>()) {
557    CXXRecordDecl *Record = cast<CXXRecordDecl>(RecordTy->getDecl());
558    if (!Record->hasTrivialDestructor()) {
559      CXXDestructorDecl *DtorD = Record->getDestructor();
560      Dtor = CGM.GetAddrOfCXXDestructor(DtorD, Dtor_Complete);
561      Dtor = llvm::ConstantExpr::getBitCast(Dtor, Int8PtrTy);
562    }
563  }
564  if (!Dtor) Dtor = llvm::Constant::getNullValue(Int8PtrTy);
565
566  if (getInvokeDest()) {
567    llvm::InvokeInst *ThrowCall =
568      Builder.CreateInvoke3(getThrowFn(*this),
569                            getUnreachableBlock(), getInvokeDest(),
570                            ExceptionPtr, TypeInfo, Dtor);
571    ThrowCall->setDoesNotReturn();
572  } else {
573    llvm::CallInst *ThrowCall =
574      Builder.CreateCall3(getThrowFn(*this), ExceptionPtr, TypeInfo, Dtor);
575    ThrowCall->setDoesNotReturn();
576    Builder.CreateUnreachable();
577  }
578
579  // Clear the insertion point to indicate we are in unreachable code.
580  Builder.ClearInsertionPoint();
581
582  // FIXME: For now, emit a dummy basic block because expr emitters in generally
583  // are not ready to handle emitting expressions at unreachable points.
584  EnsureInsertPoint();
585}
586
587void CodeGenFunction::EmitStartEHSpec(const Decl *D) {
588  if (!Exceptions)
589    return;
590
591  const FunctionDecl* FD = dyn_cast_or_null<FunctionDecl>(D);
592  if (FD == 0)
593    return;
594  const FunctionProtoType *Proto = FD->getType()->getAs<FunctionProtoType>();
595  if (Proto == 0)
596    return;
597
598  assert(!Proto->hasAnyExceptionSpec() && "function with parameter pack");
599
600  if (!Proto->hasExceptionSpec())
601    return;
602
603  unsigned NumExceptions = Proto->getNumExceptions();
604  EHFilterScope *Filter = EHStack.pushFilter(NumExceptions);
605
606  for (unsigned I = 0; I != NumExceptions; ++I) {
607    QualType Ty = Proto->getExceptionType(I);
608    QualType ExceptType = Ty.getNonReferenceType().getUnqualifiedType();
609    llvm::Value *EHType = CGM.GetAddrOfRTTIDescriptor(ExceptType, true);
610    Filter->setFilter(I, EHType);
611  }
612}
613
614void CodeGenFunction::EmitEndEHSpec(const Decl *D) {
615  if (!Exceptions)
616    return;
617
618  const FunctionDecl* FD = dyn_cast_or_null<FunctionDecl>(D);
619  if (FD == 0)
620    return;
621  const FunctionProtoType *Proto = FD->getType()->getAs<FunctionProtoType>();
622  if (Proto == 0)
623    return;
624
625  if (!Proto->hasExceptionSpec())
626    return;
627
628  EHStack.popFilter();
629}
630
631void CodeGenFunction::EmitCXXTryStmt(const CXXTryStmt &S) {
632  EnterCXXTryStmt(S);
633  EmitStmt(S.getTryBlock());
634  ExitCXXTryStmt(S);
635}
636
637void CodeGenFunction::EnterCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock) {
638  unsigned NumHandlers = S.getNumHandlers();
639  EHCatchScope *CatchScope = EHStack.pushCatch(NumHandlers);
640
641  for (unsigned I = 0; I != NumHandlers; ++I) {
642    const CXXCatchStmt *C = S.getHandler(I);
643
644    llvm::BasicBlock *Handler = createBasicBlock("catch");
645    if (C->getExceptionDecl()) {
646      // FIXME: Dropping the reference type on the type into makes it
647      // impossible to correctly implement catch-by-reference
648      // semantics for pointers.  Unfortunately, this is what all
649      // existing compilers do, and it's not clear that the standard
650      // personality routine is capable of doing this right.  See C++ DR 388:
651      //   http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_active.html#388
652      QualType CaughtType = C->getCaughtType();
653      CaughtType = CaughtType.getNonReferenceType().getUnqualifiedType();
654      llvm::Value *TypeInfo = CGM.GetAddrOfRTTIDescriptor(CaughtType, true);
655      CatchScope->setHandler(I, TypeInfo, Handler);
656    } else {
657      // No exception decl indicates '...', a catch-all.
658      CatchScope->setCatchAllHandler(I, Handler);
659    }
660  }
661}
662
663/// Check whether this is a non-EH scope, i.e. a scope which doesn't
664/// affect exception handling.  Currently, the only non-EH scopes are
665/// normal-only cleanup scopes.
666static bool isNonEHScope(const EHScope &S) {
667  switch (S.getKind()) {
668  case EHScope::Cleanup:
669    return !cast<EHCleanupScope>(S).isEHCleanup();
670  case EHScope::LazyCleanup:
671    return !cast<EHLazyCleanupScope>(S).isEHCleanup();
672  case EHScope::Filter:
673  case EHScope::Catch:
674  case EHScope::Terminate:
675    return false;
676  }
677
678  // Suppress warning.
679  return false;
680}
681
682llvm::BasicBlock *CodeGenFunction::getInvokeDestImpl() {
683  assert(EHStack.requiresLandingPad());
684  assert(!EHStack.empty());
685
686  if (!Exceptions)
687    return 0;
688
689  // Check the innermost scope for a cached landing pad.  If this is
690  // a non-EH cleanup, we'll check enclosing scopes in EmitLandingPad.
691  llvm::BasicBlock *LP = EHStack.begin()->getCachedLandingPad();
692  if (LP) return LP;
693
694  // Build the landing pad for this scope.
695  LP = EmitLandingPad();
696  assert(LP);
697
698  // Cache the landing pad on the innermost scope.  If this is a
699  // non-EH scope, cache the landing pad on the enclosing scope, too.
700  for (EHScopeStack::iterator ir = EHStack.begin(); true; ++ir) {
701    ir->setCachedLandingPad(LP);
702    if (!isNonEHScope(*ir)) break;
703  }
704
705  return LP;
706}
707
708llvm::BasicBlock *CodeGenFunction::EmitLandingPad() {
709  assert(EHStack.requiresLandingPad());
710
711  // This function contains a hack to work around a design flaw in
712  // LLVM's EH IR which breaks semantics after inlining.  This same
713  // hack is implemented in llvm-gcc.
714  //
715  // The LLVM EH abstraction is basically a thin veneer over the
716  // traditional GCC zero-cost design: for each range of instructions
717  // in the function, there is (at most) one "landing pad" with an
718  // associated chain of EH actions.  A language-specific personality
719  // function interprets this chain of actions and (1) decides whether
720  // or not to resume execution at the landing pad and (2) if so,
721  // provides an integer indicating why it's stopping.  In LLVM IR,
722  // the association of a landing pad with a range of instructions is
723  // achieved via an invoke instruction, the chain of actions becomes
724  // the arguments to the @llvm.eh.selector call, and the selector
725  // call returns the integer indicator.  Other than the required
726  // presence of two intrinsic function calls in the landing pad,
727  // the IR exactly describes the layout of the output code.
728  //
729  // A principal advantage of this design is that it is completely
730  // language-agnostic; in theory, the LLVM optimizers can treat
731  // landing pads neutrally, and targets need only know how to lower
732  // the intrinsics to have a functioning exceptions system (assuming
733  // that platform exceptions follow something approximately like the
734  // GCC design).  Unfortunately, landing pads cannot be combined in a
735  // language-agnostic way: given selectors A and B, there is no way
736  // to make a single landing pad which faithfully represents the
737  // semantics of propagating an exception first through A, then
738  // through B, without knowing how the personality will interpret the
739  // (lowered form of the) selectors.  This means that inlining has no
740  // choice but to crudely chain invokes (i.e., to ignore invokes in
741  // the inlined function, but to turn all unwindable calls into
742  // invokes), which is only semantically valid if every unwind stops
743  // at every landing pad.
744  //
745  // Therefore, the invoke-inline hack is to guarantee that every
746  // landing pad has a catch-all.
747  const bool UseInvokeInlineHack = true;
748
749  for (EHScopeStack::iterator ir = EHStack.begin(); ; ) {
750    assert(ir != EHStack.end() &&
751           "stack requiring landing pad is nothing but non-EH scopes?");
752
753    // If this is a terminate scope, just use the singleton terminate
754    // landing pad.
755    if (isa<EHTerminateScope>(*ir))
756      return getTerminateLandingPad();
757
758    // If this isn't an EH scope, iterate; otherwise break out.
759    if (!isNonEHScope(*ir)) break;
760    ++ir;
761
762    // We haven't checked this scope for a cached landing pad yet.
763    if (llvm::BasicBlock *LP = ir->getCachedLandingPad())
764      return LP;
765  }
766
767  // Save the current IR generation state.
768  CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
769
770  const EHPersonality &Personality =
771    EHPersonality::get(CGF.CGM.getLangOptions());
772
773  // Create and configure the landing pad.
774  llvm::BasicBlock *LP = createBasicBlock("lpad");
775  EmitBlock(LP);
776
777  // Save the exception pointer.  It's safe to use a single exception
778  // pointer per function because EH cleanups can never have nested
779  // try/catches.
780  llvm::CallInst *Exn =
781    Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::eh_exception), "exn");
782  Exn->setDoesNotThrow();
783  Builder.CreateStore(Exn, getExceptionSlot());
784
785  // Build the selector arguments.
786  llvm::SmallVector<llvm::Value*, 8> EHSelector;
787  EHSelector.push_back(Exn);
788  EHSelector.push_back(getPersonalityFn(*this, Personality));
789
790  // Accumulate all the handlers in scope.
791  llvm::DenseMap<llvm::Value*, JumpDest> EHHandlers;
792  JumpDest CatchAll;
793  bool HasEHCleanup = false;
794  bool HasEHFilter = false;
795  llvm::SmallVector<llvm::Value*, 8> EHFilters;
796  for (EHScopeStack::iterator I = EHStack.begin(), E = EHStack.end();
797         I != E; ++I) {
798
799    switch (I->getKind()) {
800    case EHScope::LazyCleanup:
801      if (!HasEHCleanup)
802        HasEHCleanup = cast<EHLazyCleanupScope>(*I).isEHCleanup();
803      // We otherwise don't care about cleanups.
804      continue;
805
806    case EHScope::Cleanup:
807      if (!HasEHCleanup)
808        HasEHCleanup = cast<EHCleanupScope>(*I).isEHCleanup();
809      // We otherwise don't care about cleanups.
810      continue;
811
812    case EHScope::Filter: {
813      assert(I.next() == EHStack.end() && "EH filter is not end of EH stack");
814      assert(!CatchAll.Block && "EH filter reached after catch-all");
815
816      // Filter scopes get added to the selector in wierd ways.
817      EHFilterScope &Filter = cast<EHFilterScope>(*I);
818      HasEHFilter = true;
819
820      // Add all the filter values which we aren't already explicitly
821      // catching.
822      for (unsigned I = 0, E = Filter.getNumFilters(); I != E; ++I) {
823        llvm::Value *FV = Filter.getFilter(I);
824        if (!EHHandlers.count(FV))
825          EHFilters.push_back(FV);
826      }
827      goto done;
828    }
829
830    case EHScope::Terminate:
831      // Terminate scopes are basically catch-alls.
832      assert(!CatchAll.Block);
833      CatchAll.Block = getTerminateHandler();
834      CatchAll.ScopeDepth = EHStack.getEnclosingEHCleanup(I);
835      goto done;
836
837    case EHScope::Catch:
838      break;
839    }
840
841    EHCatchScope &Catch = cast<EHCatchScope>(*I);
842    for (unsigned HI = 0, HE = Catch.getNumHandlers(); HI != HE; ++HI) {
843      EHCatchScope::Handler Handler = Catch.getHandler(HI);
844
845      // Catch-all.  We should only have one of these per catch.
846      if (!Handler.Type) {
847        assert(!CatchAll.Block);
848        CatchAll.Block = Handler.Block;
849        CatchAll.ScopeDepth = EHStack.getEnclosingEHCleanup(I);
850        continue;
851      }
852
853      // Check whether we already have a handler for this type.
854      JumpDest &Dest = EHHandlers[Handler.Type];
855      if (Dest.Block) continue;
856
857      EHSelector.push_back(Handler.Type);
858      Dest.Block = Handler.Block;
859      Dest.ScopeDepth = EHStack.getEnclosingEHCleanup(I);
860    }
861
862    // Stop if we found a catch-all.
863    if (CatchAll.Block) break;
864  }
865
866 done:
867  unsigned LastToEmitInLoop = EHSelector.size();
868
869  // If we have a catch-all, add null to the selector.
870  if (CatchAll.Block) {
871    EHSelector.push_back(getCatchAllValue(CGF));
872
873  // If we have an EH filter, we need to add those handlers in the
874  // right place in the selector, which is to say, at the end.
875  } else if (HasEHFilter) {
876    // Create a filter expression: an integer constant saying how many
877    // filters there are (+1 to avoid ambiguity with 0 for cleanup),
878    // followed by the filter types.  The personality routine only
879    // lands here if the filter doesn't match.
880    EHSelector.push_back(llvm::ConstantInt::get(Builder.getInt32Ty(),
881                                                EHFilters.size() + 1));
882    EHSelector.append(EHFilters.begin(), EHFilters.end());
883
884    // Also check whether we need a cleanup.
885    if (UseInvokeInlineHack || HasEHCleanup)
886      EHSelector.push_back(UseInvokeInlineHack
887                           ? getCatchAllValue(CGF)
888                           : getCleanupValue(CGF));
889
890  // Otherwise, signal that we at least have cleanups.
891  } else if (UseInvokeInlineHack || HasEHCleanup) {
892    EHSelector.push_back(UseInvokeInlineHack
893                         ? getCatchAllValue(CGF)
894                         : getCleanupValue(CGF));
895  } else {
896    assert(LastToEmitInLoop > 2);
897    LastToEmitInLoop--;
898  }
899
900  assert(EHSelector.size() >= 3 && "selector call has only two arguments!");
901
902  // Tell the backend how to generate the landing pad.
903  llvm::CallInst *Selection =
904    Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::eh_selector),
905                       EHSelector.begin(), EHSelector.end(), "eh.selector");
906  Selection->setDoesNotThrow();
907
908  // Select the right handler.
909  llvm::Value *llvm_eh_typeid_for =
910    CGM.getIntrinsic(llvm::Intrinsic::eh_typeid_for);
911
912  // The results of llvm_eh_typeid_for aren't reliable --- at least
913  // not locally --- so we basically have to do this as an 'if' chain.
914  // We walk through the first N-1 catch clauses, testing and chaining,
915  // and then fall into the final clause (which is either a cleanup, a
916  // filter (possibly with a cleanup), a catch-all, or another catch).
917  for (unsigned I = 2; I != LastToEmitInLoop; ++I) {
918    llvm::Value *Type = EHSelector[I];
919    JumpDest Dest = EHHandlers[Type];
920    assert(Dest.Block && "no handler entry for value in selector?");
921
922    // Figure out where to branch on a match.  As a debug code-size
923    // optimization, if the scope depth matches the innermost cleanup,
924    // we branch directly to the catch handler.
925    llvm::BasicBlock *Match = Dest.Block;
926    bool MatchNeedsCleanup = Dest.ScopeDepth != EHStack.getInnermostEHCleanup();
927    if (MatchNeedsCleanup)
928      Match = createBasicBlock("eh.match");
929
930    llvm::BasicBlock *Next = createBasicBlock("eh.next");
931
932    // Check whether the exception matches.
933    llvm::CallInst *Id
934      = Builder.CreateCall(llvm_eh_typeid_for,
935                           Builder.CreateBitCast(Type, CGM.PtrToInt8Ty));
936    Id->setDoesNotThrow();
937    Builder.CreateCondBr(Builder.CreateICmpEQ(Selection, Id),
938                         Match, Next);
939
940    // Emit match code if necessary.
941    if (MatchNeedsCleanup) {
942      EmitBlock(Match);
943      EmitBranchThroughEHCleanup(Dest);
944    }
945
946    // Continue to the next match.
947    EmitBlock(Next);
948  }
949
950  // Emit the final case in the selector.
951  // This might be a catch-all....
952  if (CatchAll.Block) {
953    assert(isa<llvm::ConstantPointerNull>(EHSelector.back()));
954    EmitBranchThroughEHCleanup(CatchAll);
955
956  // ...or an EH filter...
957  } else if (HasEHFilter) {
958    llvm::Value *SavedSelection = Selection;
959
960    // First, unwind out to the outermost scope if necessary.
961    if (EHStack.hasEHCleanups()) {
962      // The end here might not dominate the beginning, so we might need to
963      // save the selector if we need it.
964      llvm::AllocaInst *SelectorVar = 0;
965      if (HasEHCleanup) {
966        SelectorVar = CreateTempAlloca(Builder.getInt32Ty(), "selector.var");
967        Builder.CreateStore(Selection, SelectorVar);
968      }
969
970      llvm::BasicBlock *CleanupContBB = createBasicBlock("ehspec.cleanup.cont");
971      EmitBranchThroughEHCleanup(JumpDest(CleanupContBB, EHStack.stable_end()));
972      EmitBlock(CleanupContBB);
973
974      if (HasEHCleanup)
975        SavedSelection = Builder.CreateLoad(SelectorVar, "ehspec.saved-selector");
976    }
977
978    // If there was a cleanup, we'll need to actually check whether we
979    // landed here because the filter triggered.
980    if (UseInvokeInlineHack || HasEHCleanup) {
981      llvm::BasicBlock *RethrowBB = createBasicBlock("cleanup");
982      llvm::BasicBlock *UnexpectedBB = createBasicBlock("ehspec.unexpected");
983
984      llvm::Constant *Zero = llvm::ConstantInt::get(Builder.getInt32Ty(), 0);
985      llvm::Value *FailsFilter =
986        Builder.CreateICmpSLT(SavedSelection, Zero, "ehspec.fails");
987      Builder.CreateCondBr(FailsFilter, UnexpectedBB, RethrowBB);
988
989      // The rethrow block is where we land if this was a cleanup.
990      // TODO: can this be _Unwind_Resume if the InvokeInlineHack is off?
991      EmitBlock(RethrowBB);
992      Builder.CreateCall(getUnwindResumeOrRethrowFn(),
993                         Builder.CreateLoad(getExceptionSlot()))
994        ->setDoesNotReturn();
995      Builder.CreateUnreachable();
996
997      EmitBlock(UnexpectedBB);
998    }
999
1000    // Call __cxa_call_unexpected.  This doesn't need to be an invoke
1001    // because __cxa_call_unexpected magically filters exceptions
1002    // according to the last landing pad the exception was thrown
1003    // into.  Seriously.
1004    Builder.CreateCall(getUnexpectedFn(*this),
1005                       Builder.CreateLoad(getExceptionSlot()))
1006      ->setDoesNotReturn();
1007    Builder.CreateUnreachable();
1008
1009  // ...or a normal catch handler...
1010  } else if (!UseInvokeInlineHack && !HasEHCleanup) {
1011    llvm::Value *Type = EHSelector.back();
1012    EmitBranchThroughEHCleanup(EHHandlers[Type]);
1013
1014  // ...or a cleanup.
1015  } else {
1016    // We emit a jump to a notional label at the outermost unwind state.
1017    llvm::BasicBlock *Unwind = createBasicBlock("eh.resume");
1018    JumpDest Dest(Unwind, EHStack.stable_end());
1019    EmitBranchThroughEHCleanup(Dest);
1020
1021    // The unwind block.  We have to reload the exception here because
1022    // we might have unwound through arbitrary blocks, so the landing
1023    // pad might not dominate.
1024    EmitBlock(Unwind);
1025
1026    // This can always be a call because we necessarily didn't find
1027    // anything on the EH stack which needs our help.
1028    llvm::Constant *RethrowFn;
1029    if (const char *RethrowName = Personality.getCatchallRethrowFnName())
1030      RethrowFn = getCatchallRethrowFn(CGF, RethrowName);
1031    else
1032      RethrowFn = getUnwindResumeOrRethrowFn();
1033    Builder.CreateCall(RethrowFn, Builder.CreateLoad(getExceptionSlot()))
1034      ->setDoesNotReturn();
1035    Builder.CreateUnreachable();
1036  }
1037
1038  // Restore the old IR generation state.
1039  Builder.restoreIP(SavedIP);
1040
1041  return LP;
1042}
1043
1044namespace {
1045  /// A cleanup to call __cxa_end_catch.  In many cases, the caught
1046  /// exception type lets us state definitively that the thrown exception
1047  /// type does not have a destructor.  In particular:
1048  ///   - Catch-alls tell us nothing, so we have to conservatively
1049  ///     assume that the thrown exception might have a destructor.
1050  ///   - Catches by reference behave according to their base types.
1051  ///   - Catches of non-record types will only trigger for exceptions
1052  ///     of non-record types, which never have destructors.
1053  ///   - Catches of record types can trigger for arbitrary subclasses
1054  ///     of the caught type, so we have to assume the actual thrown
1055  ///     exception type might have a throwing destructor, even if the
1056  ///     caught type's destructor is trivial or nothrow.
1057  struct CallEndCatch : EHScopeStack::LazyCleanup {
1058    CallEndCatch(bool MightThrow) : MightThrow(MightThrow) {}
1059    bool MightThrow;
1060
1061    void Emit(CodeGenFunction &CGF, bool IsForEH) {
1062      if (!MightThrow) {
1063        CGF.Builder.CreateCall(getEndCatchFn(CGF))->setDoesNotThrow();
1064        return;
1065      }
1066
1067      CGF.EmitCallOrInvoke(getEndCatchFn(CGF), 0, 0);
1068    }
1069  };
1070}
1071
1072/// Emits a call to __cxa_begin_catch and enters a cleanup to call
1073/// __cxa_end_catch.
1074///
1075/// \param EndMightThrow - true if __cxa_end_catch might throw
1076static llvm::Value *CallBeginCatch(CodeGenFunction &CGF,
1077                                   llvm::Value *Exn,
1078                                   bool EndMightThrow) {
1079  llvm::CallInst *Call = CGF.Builder.CreateCall(getBeginCatchFn(CGF), Exn);
1080  Call->setDoesNotThrow();
1081
1082  CGF.EHStack.pushLazyCleanup<CallEndCatch>(NormalAndEHCleanup, EndMightThrow);
1083
1084  return Call;
1085}
1086
1087/// A "special initializer" callback for initializing a catch
1088/// parameter during catch initialization.
1089static void InitCatchParam(CodeGenFunction &CGF,
1090                           const VarDecl &CatchParam,
1091                           llvm::Value *ParamAddr) {
1092  // Load the exception from where the landing pad saved it.
1093  llvm::Value *Exn = CGF.Builder.CreateLoad(CGF.getExceptionSlot(), "exn");
1094
1095  CanQualType CatchType =
1096    CGF.CGM.getContext().getCanonicalType(CatchParam.getType());
1097  const llvm::Type *LLVMCatchTy = CGF.ConvertTypeForMem(CatchType);
1098
1099  // If we're catching by reference, we can just cast the object
1100  // pointer to the appropriate pointer.
1101  if (isa<ReferenceType>(CatchType)) {
1102    bool EndCatchMightThrow = cast<ReferenceType>(CatchType)->getPointeeType()
1103      ->isRecordType();
1104
1105    // __cxa_begin_catch returns the adjusted object pointer.
1106    llvm::Value *AdjustedExn = CallBeginCatch(CGF, Exn, EndCatchMightThrow);
1107    llvm::Value *ExnCast =
1108      CGF.Builder.CreateBitCast(AdjustedExn, LLVMCatchTy, "exn.byref");
1109    CGF.Builder.CreateStore(ExnCast, ParamAddr);
1110    return;
1111  }
1112
1113  // Non-aggregates (plus complexes).
1114  bool IsComplex = false;
1115  if (!CGF.hasAggregateLLVMType(CatchType) ||
1116      (IsComplex = CatchType->isAnyComplexType())) {
1117    llvm::Value *AdjustedExn = CallBeginCatch(CGF, Exn, false);
1118
1119    // If the catch type is a pointer type, __cxa_begin_catch returns
1120    // the pointer by value.
1121    if (CatchType->hasPointerRepresentation()) {
1122      llvm::Value *CastExn =
1123        CGF.Builder.CreateBitCast(AdjustedExn, LLVMCatchTy, "exn.casted");
1124      CGF.Builder.CreateStore(CastExn, ParamAddr);
1125      return;
1126    }
1127
1128    // Otherwise, it returns a pointer into the exception object.
1129
1130    const llvm::Type *PtrTy = LLVMCatchTy->getPointerTo(0); // addrspace 0 ok
1131    llvm::Value *Cast = CGF.Builder.CreateBitCast(AdjustedExn, PtrTy);
1132
1133    if (IsComplex) {
1134      CGF.StoreComplexToAddr(CGF.LoadComplexFromAddr(Cast, /*volatile*/ false),
1135                             ParamAddr, /*volatile*/ false);
1136    } else {
1137      llvm::Value *ExnLoad = CGF.Builder.CreateLoad(Cast, "exn.scalar");
1138      CGF.EmitStoreOfScalar(ExnLoad, ParamAddr, /*volatile*/ false, CatchType);
1139    }
1140    return;
1141  }
1142
1143  // FIXME: this *really* needs to be done via a proper, Sema-emitted
1144  // initializer expression.
1145
1146  CXXRecordDecl *RD = CatchType.getTypePtr()->getAsCXXRecordDecl();
1147  assert(RD && "aggregate catch type was not a record!");
1148
1149  const llvm::Type *PtrTy = LLVMCatchTy->getPointerTo(0); // addrspace 0 ok
1150
1151  if (RD->hasTrivialCopyConstructor()) {
1152    llvm::Value *AdjustedExn = CallBeginCatch(CGF, Exn, true);
1153    llvm::Value *Cast = CGF.Builder.CreateBitCast(AdjustedExn, PtrTy);
1154    CGF.EmitAggregateCopy(ParamAddr, Cast, CatchType);
1155    return;
1156  }
1157
1158  // We have to call __cxa_get_exception_ptr to get the adjusted
1159  // pointer before copying.
1160  llvm::CallInst *AdjustedExn =
1161    CGF.Builder.CreateCall(getGetExceptionPtrFn(CGF), Exn);
1162  AdjustedExn->setDoesNotThrow();
1163  llvm::Value *Cast = CGF.Builder.CreateBitCast(AdjustedExn, PtrTy);
1164
1165  CXXConstructorDecl *CD = RD->getCopyConstructor(CGF.getContext(), 0);
1166  assert(CD && "record has no copy constructor!");
1167  llvm::Value *CopyCtor = CGF.CGM.GetAddrOfCXXConstructor(CD, Ctor_Complete);
1168
1169  CallArgList CallArgs;
1170  CallArgs.push_back(std::make_pair(RValue::get(ParamAddr),
1171                                    CD->getThisType(CGF.getContext())));
1172  CallArgs.push_back(std::make_pair(RValue::get(Cast),
1173                                    CD->getParamDecl(0)->getType()));
1174
1175  const FunctionProtoType *FPT
1176    = CD->getType()->getAs<FunctionProtoType>();
1177
1178  // Call the copy ctor in a terminate scope.
1179  CGF.EHStack.pushTerminate();
1180  CGF.EmitCall(CGF.CGM.getTypes().getFunctionInfo(CallArgs, FPT),
1181               CopyCtor, ReturnValueSlot(), CallArgs, CD);
1182  CGF.EHStack.popTerminate();
1183
1184  // Finally we can call __cxa_begin_catch.
1185  CallBeginCatch(CGF, Exn, true);
1186}
1187
1188/// Begins a catch statement by initializing the catch variable and
1189/// calling __cxa_begin_catch.
1190static void BeginCatch(CodeGenFunction &CGF,
1191                       const CXXCatchStmt *S) {
1192  // We have to be very careful with the ordering of cleanups here:
1193  //   C++ [except.throw]p4:
1194  //     The destruction [of the exception temporary] occurs
1195  //     immediately after the destruction of the object declared in
1196  //     the exception-declaration in the handler.
1197  //
1198  // So the precise ordering is:
1199  //   1.  Construct catch variable.
1200  //   2.  __cxa_begin_catch
1201  //   3.  Enter __cxa_end_catch cleanup
1202  //   4.  Enter dtor cleanup
1203  //
1204  // We do this by initializing the exception variable with a
1205  // "special initializer", InitCatchParam.  Delegation sequence:
1206  //   - ExitCXXTryStmt opens a RunCleanupsScope
1207  //     - EmitLocalBlockVarDecl creates the variable and debug info
1208  //       - InitCatchParam initializes the variable from the exception
1209  //         - CallBeginCatch calls __cxa_begin_catch
1210  //         - CallBeginCatch enters the __cxa_end_catch cleanup
1211  //     - EmitLocalBlockVarDecl enters the variable destructor cleanup
1212  //   - EmitCXXTryStmt emits the code for the catch body
1213  //   - EmitCXXTryStmt close the RunCleanupsScope
1214
1215  VarDecl *CatchParam = S->getExceptionDecl();
1216  if (!CatchParam) {
1217    llvm::Value *Exn = CGF.Builder.CreateLoad(CGF.getExceptionSlot(), "exn");
1218    CallBeginCatch(CGF, Exn, true);
1219    return;
1220  }
1221
1222  // Emit the local.
1223  CGF.EmitLocalBlockVarDecl(*CatchParam, &InitCatchParam);
1224}
1225
1226namespace {
1227  struct CallRethrow : EHScopeStack::LazyCleanup {
1228    void Emit(CodeGenFunction &CGF, bool IsForEH) {
1229      CGF.EmitCallOrInvoke(getReThrowFn(CGF), 0, 0);
1230    }
1231  };
1232}
1233
1234void CodeGenFunction::ExitCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock) {
1235  unsigned NumHandlers = S.getNumHandlers();
1236  EHCatchScope &CatchScope = cast<EHCatchScope>(*EHStack.begin());
1237  assert(CatchScope.getNumHandlers() == NumHandlers);
1238
1239  // Copy the handler blocks off before we pop the EH stack.  Emitting
1240  // the handlers might scribble on this memory.
1241  llvm::SmallVector<EHCatchScope::Handler, 8> Handlers(NumHandlers);
1242  memcpy(Handlers.data(), CatchScope.begin(),
1243         NumHandlers * sizeof(EHCatchScope::Handler));
1244  EHStack.popCatch();
1245
1246  // The fall-through block.
1247  llvm::BasicBlock *ContBB = createBasicBlock("try.cont");
1248
1249  // We just emitted the body of the try; jump to the continue block.
1250  if (HaveInsertPoint())
1251    Builder.CreateBr(ContBB);
1252
1253  // Determine if we need an implicit rethrow for all these catch handlers.
1254  bool ImplicitRethrow = false;
1255  if (IsFnTryBlock)
1256    ImplicitRethrow = isa<CXXDestructorDecl>(CurCodeDecl) ||
1257                      isa<CXXConstructorDecl>(CurCodeDecl);
1258
1259  for (unsigned I = 0; I != NumHandlers; ++I) {
1260    llvm::BasicBlock *CatchBlock = Handlers[I].Block;
1261    EmitBlock(CatchBlock);
1262
1263    // Catch the exception if this isn't a catch-all.
1264    const CXXCatchStmt *C = S.getHandler(I);
1265
1266    // Enter a cleanup scope, including the catch variable and the
1267    // end-catch.
1268    RunCleanupsScope CatchScope(*this);
1269
1270    // Initialize the catch variable and set up the cleanups.
1271    BeginCatch(*this, C);
1272
1273    // If there's an implicit rethrow, push a normal "cleanup" to call
1274    // _cxa_rethrow.  This needs to happen before __cxa_end_catch is
1275    // called, and so it is pushed after BeginCatch.
1276    if (ImplicitRethrow)
1277      EHStack.pushLazyCleanup<CallRethrow>(NormalCleanup);
1278
1279    // Perform the body of the catch.
1280    EmitStmt(C->getHandlerBlock());
1281
1282    // Fall out through the catch cleanups.
1283    CatchScope.ForceCleanup();
1284
1285    // Branch out of the try.
1286    if (HaveInsertPoint())
1287      Builder.CreateBr(ContBB);
1288  }
1289
1290  EmitBlock(ContBB);
1291}
1292
1293/// Enters a finally block for an implementation using zero-cost
1294/// exceptions.  This is mostly general, but hard-codes some
1295/// language/ABI-specific behavior in the catch-all sections.
1296CodeGenFunction::FinallyInfo
1297CodeGenFunction::EnterFinallyBlock(const Stmt *Body,
1298                                   llvm::Constant *BeginCatchFn,
1299                                   llvm::Constant *EndCatchFn,
1300                                   llvm::Constant *RethrowFn) {
1301  assert((BeginCatchFn != 0) == (EndCatchFn != 0) &&
1302         "begin/end catch functions not paired");
1303  assert(RethrowFn && "rethrow function is required");
1304
1305  // The rethrow function has one of the following two types:
1306  //   void (*)()
1307  //   void (*)(void*)
1308  // In the latter case we need to pass it the exception object.
1309  // But we can't use the exception slot because the @finally might
1310  // have a landing pad (which would overwrite the exception slot).
1311  const llvm::FunctionType *RethrowFnTy =
1312    cast<llvm::FunctionType>(
1313      cast<llvm::PointerType>(RethrowFn->getType())
1314      ->getElementType());
1315  llvm::Value *SavedExnVar = 0;
1316  if (RethrowFnTy->getNumParams())
1317    SavedExnVar = CreateTempAlloca(Builder.getInt8PtrTy(), "finally.exn");
1318
1319  // A finally block is a statement which must be executed on any edge
1320  // out of a given scope.  Unlike a cleanup, the finally block may
1321  // contain arbitrary control flow leading out of itself.  In
1322  // addition, finally blocks should always be executed, even if there
1323  // are no catch handlers higher on the stack.  Therefore, we
1324  // surround the protected scope with a combination of a normal
1325  // cleanup (to catch attempts to break out of the block via normal
1326  // control flow) and an EH catch-all (semantically "outside" any try
1327  // statement to which the finally block might have been attached).
1328  // The finally block itself is generated in the context of a cleanup
1329  // which conditionally leaves the catch-all.
1330
1331  FinallyInfo Info;
1332
1333  // Jump destination for performing the finally block on an exception
1334  // edge.  We'll never actually reach this block, so unreachable is
1335  // fine.
1336  JumpDest RethrowDest = getJumpDestInCurrentScope(getUnreachableBlock());
1337
1338  // Whether the finally block is being executed for EH purposes.
1339  llvm::AllocaInst *ForEHVar = CreateTempAlloca(CGF.Builder.getInt1Ty(),
1340                                                "finally.for-eh");
1341  InitTempAlloca(ForEHVar, llvm::ConstantInt::getFalse(getLLVMContext()));
1342
1343  // Enter a normal cleanup which will perform the @finally block.
1344  {
1345    CodeGenFunction::CleanupBlock Cleanup(*this, NormalCleanup);
1346
1347    // Enter a cleanup to call the end-catch function if one was provided.
1348    if (EndCatchFn) {
1349      CodeGenFunction::CleanupBlock FinallyExitCleanup(CGF, NormalAndEHCleanup);
1350
1351      llvm::BasicBlock *EndCatchBB = createBasicBlock("finally.endcatch");
1352      llvm::BasicBlock *CleanupContBB = createBasicBlock("finally.cleanup.cont");
1353
1354      llvm::Value *ShouldEndCatch =
1355        Builder.CreateLoad(ForEHVar, "finally.endcatch");
1356      Builder.CreateCondBr(ShouldEndCatch, EndCatchBB, CleanupContBB);
1357      EmitBlock(EndCatchBB);
1358      EmitCallOrInvoke(EndCatchFn, 0, 0); // catch-all, so might throw
1359      EmitBlock(CleanupContBB);
1360    }
1361
1362    // Emit the finally block.
1363    EmitStmt(Body);
1364
1365    // If the end of the finally is reachable, check whether this was
1366    // for EH.  If so, rethrow.
1367    if (HaveInsertPoint()) {
1368      llvm::BasicBlock *RethrowBB = createBasicBlock("finally.rethrow");
1369      llvm::BasicBlock *ContBB = createBasicBlock("finally.cont");
1370
1371      llvm::Value *ShouldRethrow =
1372        Builder.CreateLoad(ForEHVar, "finally.shouldthrow");
1373      Builder.CreateCondBr(ShouldRethrow, RethrowBB, ContBB);
1374
1375      EmitBlock(RethrowBB);
1376      if (SavedExnVar) {
1377        llvm::Value *Args[] = { Builder.CreateLoad(SavedExnVar) };
1378        EmitCallOrInvoke(RethrowFn, Args, Args+1);
1379      } else {
1380        EmitCallOrInvoke(RethrowFn, 0, 0);
1381      }
1382      Builder.CreateUnreachable();
1383
1384      EmitBlock(ContBB);
1385    }
1386
1387    // Leave the end-catch cleanup.  As an optimization, pretend that
1388    // the fallthrough path was inaccessible; we've dynamically proven
1389    // that we're not in the EH case along that path.
1390    if (EndCatchFn) {
1391      CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
1392      PopCleanupBlock();
1393      Builder.restoreIP(SavedIP);
1394    }
1395
1396    // Now make sure we actually have an insertion point or the
1397    // cleanup gods will hate us.
1398    EnsureInsertPoint();
1399  }
1400
1401  // Enter a catch-all scope.
1402  llvm::BasicBlock *CatchAllBB = createBasicBlock("finally.catchall");
1403  CGBuilderTy::InsertPoint SavedIP = Builder.saveIP();
1404  Builder.SetInsertPoint(CatchAllBB);
1405
1406  // If there's a begin-catch function, call it.
1407  if (BeginCatchFn) {
1408    Builder.CreateCall(BeginCatchFn, Builder.CreateLoad(getExceptionSlot()))
1409      ->setDoesNotThrow();
1410  }
1411
1412  // If we need to remember the exception pointer to rethrow later, do so.
1413  if (SavedExnVar) {
1414    llvm::Value *SavedExn = Builder.CreateLoad(getExceptionSlot());
1415    Builder.CreateStore(SavedExn, SavedExnVar);
1416  }
1417
1418  // Tell the finally block that we're in EH.
1419  Builder.CreateStore(llvm::ConstantInt::getTrue(getLLVMContext()), ForEHVar);
1420
1421  // Thread a jump through the finally cleanup.
1422  EmitBranchThroughCleanup(RethrowDest);
1423
1424  Builder.restoreIP(SavedIP);
1425
1426  EHCatchScope *CatchScope = EHStack.pushCatch(1);
1427  CatchScope->setCatchAllHandler(0, CatchAllBB);
1428
1429  return Info;
1430}
1431
1432void CodeGenFunction::ExitFinallyBlock(FinallyInfo &Info) {
1433  // Leave the finally catch-all.
1434  EHCatchScope &Catch = cast<EHCatchScope>(*EHStack.begin());
1435  llvm::BasicBlock *CatchAllBB = Catch.getHandler(0).Block;
1436  EHStack.popCatch();
1437
1438  // And leave the normal cleanup.
1439  PopCleanupBlock();
1440
1441  CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
1442  EmitBlock(CatchAllBB, true);
1443
1444  Builder.restoreIP(SavedIP);
1445}
1446
1447llvm::BasicBlock *CodeGenFunction::getTerminateLandingPad() {
1448  if (TerminateLandingPad)
1449    return TerminateLandingPad;
1450
1451  CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
1452
1453  // This will get inserted at the end of the function.
1454  TerminateLandingPad = createBasicBlock("terminate.lpad");
1455  Builder.SetInsertPoint(TerminateLandingPad);
1456
1457  // Tell the backend that this is a landing pad.
1458  llvm::CallInst *Exn =
1459    Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::eh_exception), "exn");
1460  Exn->setDoesNotThrow();
1461
1462  const EHPersonality &Personality = EHPersonality::get(CGM.getLangOptions());
1463
1464  // Tell the backend what the exception table should be:
1465  // nothing but a catch-all.
1466  llvm::Value *Args[3] = { Exn, getPersonalityFn(*this, Personality),
1467                           getCatchAllValue(*this) };
1468  Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::eh_selector),
1469                     Args, Args+3, "eh.selector")
1470    ->setDoesNotThrow();
1471
1472  llvm::CallInst *TerminateCall = Builder.CreateCall(getTerminateFn(*this));
1473  TerminateCall->setDoesNotReturn();
1474  TerminateCall->setDoesNotThrow();
1475  CGF.Builder.CreateUnreachable();
1476
1477  // Restore the saved insertion state.
1478  Builder.restoreIP(SavedIP);
1479
1480  return TerminateLandingPad;
1481}
1482
1483llvm::BasicBlock *CodeGenFunction::getTerminateHandler() {
1484  if (TerminateHandler)
1485    return TerminateHandler;
1486
1487  CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
1488
1489  // Set up the terminate handler.  This block is inserted at the very
1490  // end of the function by FinishFunction.
1491  TerminateHandler = createBasicBlock("terminate.handler");
1492  Builder.SetInsertPoint(TerminateHandler);
1493  llvm::CallInst *TerminateCall = Builder.CreateCall(getTerminateFn(*this));
1494  TerminateCall->setDoesNotReturn();
1495  TerminateCall->setDoesNotThrow();
1496  Builder.CreateUnreachable();
1497
1498  // Restore the saved insertion state.
1499  Builder.restoreIP(SavedIP);
1500
1501  return TerminateHandler;
1502}
1503
1504CodeGenFunction::CleanupBlock::CleanupBlock(CodeGenFunction &CGF,
1505                                            CleanupKind Kind)
1506  : CGF(CGF), SavedIP(CGF.Builder.saveIP()), NormalCleanupExitBB(0) {
1507  llvm::BasicBlock *EntryBB = CGF.createBasicBlock("cleanup");
1508  CGF.Builder.SetInsertPoint(EntryBB);
1509
1510  switch (Kind) {
1511  case NormalAndEHCleanup:
1512    NormalCleanupEntryBB = EHCleanupEntryBB = EntryBB;
1513    break;
1514
1515  case NormalCleanup:
1516    NormalCleanupEntryBB = EntryBB;
1517    EHCleanupEntryBB = 0;
1518    break;
1519
1520  case EHCleanup:
1521    NormalCleanupEntryBB = 0;
1522    EHCleanupEntryBB = EntryBB;
1523    CGF.EHStack.pushTerminate();
1524    break;
1525  }
1526}
1527
1528void CodeGenFunction::CleanupBlock::beginEHCleanup() {
1529  assert(EHCleanupEntryBB == 0 && "already started an EH cleanup");
1530  NormalCleanupExitBB = CGF.Builder.GetInsertBlock();
1531  assert(NormalCleanupExitBB && "end of normal cleanup is unreachable");
1532
1533  EHCleanupEntryBB = CGF.createBasicBlock("eh.cleanup");
1534  CGF.Builder.SetInsertPoint(EHCleanupEntryBB);
1535  CGF.EHStack.pushTerminate();
1536}
1537
1538CodeGenFunction::CleanupBlock::~CleanupBlock() {
1539  llvm::BasicBlock *EHCleanupExitBB = 0;
1540
1541  // If we're currently writing the EH cleanup...
1542  if (EHCleanupEntryBB) {
1543    // Set the EH cleanup exit block.
1544    EHCleanupExitBB = CGF.Builder.GetInsertBlock();
1545    assert(EHCleanupExitBB && "end of EH cleanup is unreachable");
1546
1547    // If we're actually writing both at once, set the normal exit, too.
1548    if (EHCleanupEntryBB == NormalCleanupEntryBB)
1549      NormalCleanupExitBB = EHCleanupExitBB;
1550
1551    // Otherwise, we must have pushed a terminate handler.
1552    else
1553      CGF.EHStack.popTerminate();
1554
1555  // Otherwise, just set the normal cleanup exit block.
1556  } else {
1557    NormalCleanupExitBB = CGF.Builder.GetInsertBlock();
1558    assert(NormalCleanupExitBB && "end of normal cleanup is unreachable");
1559  }
1560
1561  CGF.EHStack.pushCleanup(NormalCleanupEntryBB, NormalCleanupExitBB,
1562                          EHCleanupEntryBB, EHCleanupExitBB);
1563
1564  CGF.Builder.restoreIP(SavedIP);
1565}
1566
1567EHScopeStack::LazyCleanup::~LazyCleanup() {
1568  llvm_unreachable("LazyCleanup is indestructable");
1569}
1570