CGException.cpp revision 82a113adf8063baa70251dfa269d039ca22e2537
1//===--- CGException.cpp - Emit LLVM Code for C++ exceptions --------------===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This contains code dealing with C++ exception related code generation. 11// 12//===----------------------------------------------------------------------===// 13 14#include "clang/AST/StmtCXX.h" 15 16#include "llvm/Intrinsics.h" 17#include "llvm/IntrinsicInst.h" 18#include "llvm/Support/CallSite.h" 19 20#include "CGObjCRuntime.h" 21#include "CodeGenFunction.h" 22#include "CGException.h" 23#include "TargetInfo.h" 24 25using namespace clang; 26using namespace CodeGen; 27 28/// Push an entry of the given size onto this protected-scope stack. 29char *EHScopeStack::allocate(size_t Size) { 30 if (!StartOfBuffer) { 31 unsigned Capacity = 1024; 32 while (Capacity < Size) Capacity *= 2; 33 StartOfBuffer = new char[Capacity]; 34 StartOfData = EndOfBuffer = StartOfBuffer + Capacity; 35 } else if (static_cast<size_t>(StartOfData - StartOfBuffer) < Size) { 36 unsigned CurrentCapacity = EndOfBuffer - StartOfBuffer; 37 unsigned UsedCapacity = CurrentCapacity - (StartOfData - StartOfBuffer); 38 39 unsigned NewCapacity = CurrentCapacity; 40 do { 41 NewCapacity *= 2; 42 } while (NewCapacity < UsedCapacity + Size); 43 44 char *NewStartOfBuffer = new char[NewCapacity]; 45 char *NewEndOfBuffer = NewStartOfBuffer + NewCapacity; 46 char *NewStartOfData = NewEndOfBuffer - UsedCapacity; 47 memcpy(NewStartOfData, StartOfData, UsedCapacity); 48 delete [] StartOfBuffer; 49 StartOfBuffer = NewStartOfBuffer; 50 EndOfBuffer = NewEndOfBuffer; 51 StartOfData = NewStartOfData; 52 } 53 54 assert(StartOfBuffer + Size <= StartOfData); 55 StartOfData -= Size; 56 return StartOfData; 57} 58 59EHScopeStack::stable_iterator 60EHScopeStack::getEnclosingEHCleanup(iterator it) const { 61 assert(it != end()); 62 do { 63 if (isa<EHCleanupScope>(*it)) { 64 if (cast<EHCleanupScope>(*it).isEHCleanup()) 65 return stabilize(it); 66 return cast<EHCleanupScope>(*it).getEnclosingEHCleanup(); 67 } 68 ++it; 69 } while (it != end()); 70 return stable_end(); 71} 72 73 74void *EHScopeStack::pushCleanup(CleanupKind Kind, size_t Size) { 75 assert(((Size % sizeof(void*)) == 0) && "cleanup type is misaligned"); 76 char *Buffer = allocate(EHCleanupScope::getSizeForCleanupSize(Size)); 77 bool IsNormalCleanup = Kind & NormalCleanup; 78 bool IsEHCleanup = Kind & EHCleanup; 79 bool IsActive = !(Kind & InactiveCleanup); 80 EHCleanupScope *Scope = 81 new (Buffer) EHCleanupScope(IsNormalCleanup, 82 IsEHCleanup, 83 IsActive, 84 Size, 85 BranchFixups.size(), 86 InnermostNormalCleanup, 87 InnermostEHCleanup); 88 if (IsNormalCleanup) 89 InnermostNormalCleanup = stable_begin(); 90 if (IsEHCleanup) 91 InnermostEHCleanup = stable_begin(); 92 93 return Scope->getCleanupBuffer(); 94} 95 96void EHScopeStack::popCleanup() { 97 assert(!empty() && "popping exception stack when not empty"); 98 99 assert(isa<EHCleanupScope>(*begin())); 100 EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin()); 101 InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup(); 102 InnermostEHCleanup = Cleanup.getEnclosingEHCleanup(); 103 StartOfData += Cleanup.getAllocatedSize(); 104 105 if (empty()) NextEHDestIndex = FirstEHDestIndex; 106 107 // Destroy the cleanup. 108 Cleanup.~EHCleanupScope(); 109 110 // Check whether we can shrink the branch-fixups stack. 111 if (!BranchFixups.empty()) { 112 // If we no longer have any normal cleanups, all the fixups are 113 // complete. 114 if (!hasNormalCleanups()) 115 BranchFixups.clear(); 116 117 // Otherwise we can still trim out unnecessary nulls. 118 else 119 popNullFixups(); 120 } 121} 122 123EHFilterScope *EHScopeStack::pushFilter(unsigned NumFilters) { 124 char *Buffer = allocate(EHFilterScope::getSizeForNumFilters(NumFilters)); 125 CatchDepth++; 126 return new (Buffer) EHFilterScope(NumFilters); 127} 128 129void EHScopeStack::popFilter() { 130 assert(!empty() && "popping exception stack when not empty"); 131 132 EHFilterScope &Filter = cast<EHFilterScope>(*begin()); 133 StartOfData += EHFilterScope::getSizeForNumFilters(Filter.getNumFilters()); 134 135 if (empty()) NextEHDestIndex = FirstEHDestIndex; 136 137 assert(CatchDepth > 0 && "mismatched filter push/pop"); 138 CatchDepth--; 139} 140 141EHCatchScope *EHScopeStack::pushCatch(unsigned NumHandlers) { 142 char *Buffer = allocate(EHCatchScope::getSizeForNumHandlers(NumHandlers)); 143 CatchDepth++; 144 EHCatchScope *Scope = new (Buffer) EHCatchScope(NumHandlers); 145 for (unsigned I = 0; I != NumHandlers; ++I) 146 Scope->getHandlers()[I].Index = getNextEHDestIndex(); 147 return Scope; 148} 149 150void EHScopeStack::pushTerminate() { 151 char *Buffer = allocate(EHTerminateScope::getSize()); 152 CatchDepth++; 153 new (Buffer) EHTerminateScope(getNextEHDestIndex()); 154} 155 156/// Remove any 'null' fixups on the stack. However, we can't pop more 157/// fixups than the fixup depth on the innermost normal cleanup, or 158/// else fixups that we try to add to that cleanup will end up in the 159/// wrong place. We *could* try to shrink fixup depths, but that's 160/// actually a lot of work for little benefit. 161void EHScopeStack::popNullFixups() { 162 // We expect this to only be called when there's still an innermost 163 // normal cleanup; otherwise there really shouldn't be any fixups. 164 assert(hasNormalCleanups()); 165 166 EHScopeStack::iterator it = find(InnermostNormalCleanup); 167 unsigned MinSize = cast<EHCleanupScope>(*it).getFixupDepth(); 168 assert(BranchFixups.size() >= MinSize && "fixup stack out of order"); 169 170 while (BranchFixups.size() > MinSize && 171 BranchFixups.back().Destination == 0) 172 BranchFixups.pop_back(); 173} 174 175static llvm::Constant *getAllocateExceptionFn(CodeGenFunction &CGF) { 176 // void *__cxa_allocate_exception(size_t thrown_size); 177 const llvm::Type *SizeTy = CGF.ConvertType(CGF.getContext().getSizeType()); 178 std::vector<const llvm::Type*> Args(1, SizeTy); 179 180 const llvm::FunctionType *FTy = 181 llvm::FunctionType::get(llvm::Type::getInt8PtrTy(CGF.getLLVMContext()), 182 Args, false); 183 184 return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_allocate_exception"); 185} 186 187static llvm::Constant *getFreeExceptionFn(CodeGenFunction &CGF) { 188 // void __cxa_free_exception(void *thrown_exception); 189 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext()); 190 std::vector<const llvm::Type*> Args(1, Int8PtrTy); 191 192 const llvm::FunctionType *FTy = 193 llvm::FunctionType::get(llvm::Type::getVoidTy(CGF.getLLVMContext()), 194 Args, false); 195 196 return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_free_exception"); 197} 198 199static llvm::Constant *getThrowFn(CodeGenFunction &CGF) { 200 // void __cxa_throw(void *thrown_exception, std::type_info *tinfo, 201 // void (*dest) (void *)); 202 203 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext()); 204 std::vector<const llvm::Type*> Args(3, Int8PtrTy); 205 206 const llvm::FunctionType *FTy = 207 llvm::FunctionType::get(llvm::Type::getVoidTy(CGF.getLLVMContext()), 208 Args, false); 209 210 return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_throw"); 211} 212 213static llvm::Constant *getReThrowFn(CodeGenFunction &CGF) { 214 // void __cxa_rethrow(); 215 216 const llvm::FunctionType *FTy = 217 llvm::FunctionType::get(llvm::Type::getVoidTy(CGF.getLLVMContext()), false); 218 219 return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_rethrow"); 220} 221 222static llvm::Constant *getGetExceptionPtrFn(CodeGenFunction &CGF) { 223 // void *__cxa_get_exception_ptr(void*); 224 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext()); 225 std::vector<const llvm::Type*> Args(1, Int8PtrTy); 226 227 const llvm::FunctionType *FTy = 228 llvm::FunctionType::get(Int8PtrTy, Args, false); 229 230 return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_get_exception_ptr"); 231} 232 233static llvm::Constant *getBeginCatchFn(CodeGenFunction &CGF) { 234 // void *__cxa_begin_catch(void*); 235 236 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext()); 237 std::vector<const llvm::Type*> Args(1, Int8PtrTy); 238 239 const llvm::FunctionType *FTy = 240 llvm::FunctionType::get(Int8PtrTy, Args, false); 241 242 return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_begin_catch"); 243} 244 245static llvm::Constant *getEndCatchFn(CodeGenFunction &CGF) { 246 // void __cxa_end_catch(); 247 248 const llvm::FunctionType *FTy = 249 llvm::FunctionType::get(llvm::Type::getVoidTy(CGF.getLLVMContext()), false); 250 251 return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_end_catch"); 252} 253 254static llvm::Constant *getUnexpectedFn(CodeGenFunction &CGF) { 255 // void __cxa_call_unexepcted(void *thrown_exception); 256 257 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext()); 258 std::vector<const llvm::Type*> Args(1, Int8PtrTy); 259 260 const llvm::FunctionType *FTy = 261 llvm::FunctionType::get(llvm::Type::getVoidTy(CGF.getLLVMContext()), 262 Args, false); 263 264 return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_call_unexpected"); 265} 266 267llvm::Constant *CodeGenFunction::getUnwindResumeOrRethrowFn() { 268 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(getLLVMContext()); 269 std::vector<const llvm::Type*> Args(1, Int8PtrTy); 270 271 const llvm::FunctionType *FTy = 272 llvm::FunctionType::get(llvm::Type::getVoidTy(getLLVMContext()), Args, 273 false); 274 275 if (CGM.getLangOptions().SjLjExceptions) 276 return CGM.CreateRuntimeFunction(FTy, "_Unwind_SjLj_Resume_or_Rethrow"); 277 return CGM.CreateRuntimeFunction(FTy, "_Unwind_Resume_or_Rethrow"); 278} 279 280static llvm::Constant *getTerminateFn(CodeGenFunction &CGF) { 281 // void __terminate(); 282 283 const llvm::FunctionType *FTy = 284 llvm::FunctionType::get(llvm::Type::getVoidTy(CGF.getLLVMContext()), false); 285 286 return CGF.CGM.CreateRuntimeFunction(FTy, 287 CGF.CGM.getLangOptions().CPlusPlus ? "_ZSt9terminatev" : "abort"); 288} 289 290static llvm::Constant *getCatchallRethrowFn(CodeGenFunction &CGF, 291 llvm::StringRef Name) { 292 const llvm::Type *Int8PtrTy = 293 llvm::Type::getInt8PtrTy(CGF.getLLVMContext()); 294 std::vector<const llvm::Type*> Args(1, Int8PtrTy); 295 296 const llvm::Type *VoidTy = llvm::Type::getVoidTy(CGF.getLLVMContext()); 297 const llvm::FunctionType *FTy = llvm::FunctionType::get(VoidTy, Args, false); 298 299 return CGF.CGM.CreateRuntimeFunction(FTy, Name); 300} 301 302const EHPersonality EHPersonality::GNU_C("__gcc_personality_v0"); 303const EHPersonality EHPersonality::GNU_C_SJLJ("__gcc_personality_sj0"); 304const EHPersonality EHPersonality::NeXT_ObjC("__objc_personality_v0"); 305const EHPersonality EHPersonality::GNU_CPlusPlus("__gxx_personality_v0"); 306const EHPersonality EHPersonality::GNU_CPlusPlus_SJLJ("__gxx_personality_sj0"); 307const EHPersonality EHPersonality::GNU_ObjC("__gnu_objc_personality_v0", 308 "objc_exception_throw"); 309 310static const EHPersonality &getCPersonality(const LangOptions &L) { 311 if (L.SjLjExceptions) 312 return EHPersonality::GNU_C_SJLJ; 313 return EHPersonality::GNU_C; 314} 315 316static const EHPersonality &getObjCPersonality(const LangOptions &L) { 317 if (L.NeXTRuntime) { 318 if (L.ObjCNonFragileABI) return EHPersonality::NeXT_ObjC; 319 else return getCPersonality(L); 320 } else { 321 return EHPersonality::GNU_ObjC; 322 } 323} 324 325static const EHPersonality &getCXXPersonality(const LangOptions &L) { 326 if (L.SjLjExceptions) 327 return EHPersonality::GNU_CPlusPlus_SJLJ; 328 else 329 return EHPersonality::GNU_CPlusPlus; 330} 331 332/// Determines the personality function to use when both C++ 333/// and Objective-C exceptions are being caught. 334static const EHPersonality &getObjCXXPersonality(const LangOptions &L) { 335 // The ObjC personality defers to the C++ personality for non-ObjC 336 // handlers. Unlike the C++ case, we use the same personality 337 // function on targets using (backend-driven) SJLJ EH. 338 if (L.NeXTRuntime) { 339 if (L.ObjCNonFragileABI) 340 return EHPersonality::NeXT_ObjC; 341 342 // In the fragile ABI, just use C++ exception handling and hope 343 // they're not doing crazy exception mixing. 344 else 345 return getCXXPersonality(L); 346 } 347 348 // The GNU runtime's personality function inherently doesn't support 349 // mixed EH. Use the C++ personality just to avoid returning null. 350 return getCXXPersonality(L); 351} 352 353const EHPersonality &EHPersonality::get(const LangOptions &L) { 354 if (L.CPlusPlus && L.ObjC1) 355 return getObjCXXPersonality(L); 356 else if (L.CPlusPlus) 357 return getCXXPersonality(L); 358 else if (L.ObjC1) 359 return getObjCPersonality(L); 360 else 361 return getCPersonality(L); 362} 363 364static llvm::Constant *getPersonalityFn(CodeGenModule &CGM, 365 const EHPersonality &Personality) { 366 llvm::Constant *Fn = 367 CGM.CreateRuntimeFunction(llvm::FunctionType::get( 368 llvm::Type::getInt32Ty(CGM.getLLVMContext()), 369 true), 370 Personality.getPersonalityFnName()); 371 return Fn; 372} 373 374static llvm::Constant *getOpaquePersonalityFn(CodeGenModule &CGM, 375 const EHPersonality &Personality) { 376 llvm::Constant *Fn = getPersonalityFn(CGM, Personality); 377 return llvm::ConstantExpr::getBitCast(Fn, CGM.PtrToInt8Ty); 378} 379 380/// Check whether a personality function could reasonably be swapped 381/// for a C++ personality function. 382static bool PersonalityHasOnlyCXXUses(llvm::Constant *Fn) { 383 for (llvm::Constant::use_iterator 384 I = Fn->use_begin(), E = Fn->use_end(); I != E; ++I) { 385 llvm::User *User = *I; 386 387 // Conditionally white-list bitcasts. 388 if (llvm::ConstantExpr *CE = dyn_cast<llvm::ConstantExpr>(User)) { 389 if (CE->getOpcode() != llvm::Instruction::BitCast) return false; 390 if (!PersonalityHasOnlyCXXUses(CE)) 391 return false; 392 continue; 393 } 394 395 // Otherwise, it has to be a selector call. 396 if (!isa<llvm::EHSelectorInst>(User)) return false; 397 398 llvm::EHSelectorInst *Selector = cast<llvm::EHSelectorInst>(User); 399 for (unsigned I = 2, E = Selector->getNumArgOperands(); I != E; ++I) { 400 // Look for something that would've been returned by the ObjC 401 // runtime's GetEHType() method. 402 llvm::GlobalVariable *GV 403 = dyn_cast<llvm::GlobalVariable>(Selector->getArgOperand(I)); 404 if (!GV) continue; 405 406 // ObjC EH selector entries are always global variables with 407 // names starting like this. 408 if (GV->getName().startswith("OBJC_EHTYPE")) 409 return false; 410 } 411 } 412 413 return true; 414} 415 416/// Try to use the C++ personality function in ObjC++. Not doing this 417/// can cause some incompatibilities with gcc, which is more 418/// aggressive about only using the ObjC++ personality in a function 419/// when it really needs it. 420void CodeGenModule::SimplifyPersonality() { 421 // For now, this is really a Darwin-specific operation. 422 if (Context.Target.getTriple().getOS() != llvm::Triple::Darwin) 423 return; 424 425 // If we're not in ObjC++ -fexceptions, there's nothing to do. 426 if (!Features.CPlusPlus || !Features.ObjC1 || !Features.Exceptions) 427 return; 428 429 const EHPersonality &ObjCXX = EHPersonality::get(Features); 430 const EHPersonality &CXX = getCXXPersonality(Features); 431 if (&ObjCXX == &CXX || 432 ObjCXX.getPersonalityFnName() == CXX.getPersonalityFnName()) 433 return; 434 435 llvm::Function *Fn = 436 getModule().getFunction(ObjCXX.getPersonalityFnName()); 437 438 // Nothing to do if it's unused. 439 if (!Fn || Fn->use_empty()) return; 440 441 // Can't do the optimization if it has non-C++ uses. 442 if (!PersonalityHasOnlyCXXUses(Fn)) return; 443 444 // Create the C++ personality function and kill off the old 445 // function. 446 llvm::Constant *CXXFn = getPersonalityFn(*this, CXX); 447 448 // This can happen if the user is screwing with us. 449 if (Fn->getType() != CXXFn->getType()) return; 450 451 Fn->replaceAllUsesWith(CXXFn); 452 Fn->eraseFromParent(); 453} 454 455/// Returns the value to inject into a selector to indicate the 456/// presence of a catch-all. 457static llvm::Constant *getCatchAllValue(CodeGenFunction &CGF) { 458 // Possibly we should use @llvm.eh.catch.all.value here. 459 return llvm::ConstantPointerNull::get(CGF.CGM.PtrToInt8Ty); 460} 461 462/// Returns the value to inject into a selector to indicate the 463/// presence of a cleanup. 464static llvm::Constant *getCleanupValue(CodeGenFunction &CGF) { 465 return llvm::ConstantInt::get(CGF.Builder.getInt32Ty(), 0); 466} 467 468namespace { 469 /// A cleanup to free the exception object if its initialization 470 /// throws. 471 struct FreeExceptionCleanup : EHScopeStack::Cleanup { 472 FreeExceptionCleanup(llvm::Value *ShouldFreeVar, 473 llvm::Value *ExnLocVar) 474 : ShouldFreeVar(ShouldFreeVar), ExnLocVar(ExnLocVar) {} 475 476 llvm::Value *ShouldFreeVar; 477 llvm::Value *ExnLocVar; 478 479 void Emit(CodeGenFunction &CGF, bool IsForEH) { 480 llvm::BasicBlock *FreeBB = CGF.createBasicBlock("free-exnobj"); 481 llvm::BasicBlock *DoneBB = CGF.createBasicBlock("free-exnobj.done"); 482 483 llvm::Value *ShouldFree = CGF.Builder.CreateLoad(ShouldFreeVar, 484 "should-free-exnobj"); 485 CGF.Builder.CreateCondBr(ShouldFree, FreeBB, DoneBB); 486 CGF.EmitBlock(FreeBB); 487 llvm::Value *ExnLocLocal = CGF.Builder.CreateLoad(ExnLocVar, "exnobj"); 488 CGF.Builder.CreateCall(getFreeExceptionFn(CGF), ExnLocLocal) 489 ->setDoesNotThrow(); 490 CGF.EmitBlock(DoneBB); 491 } 492 }; 493} 494 495// Emits an exception expression into the given location. This 496// differs from EmitAnyExprToMem only in that, if a final copy-ctor 497// call is required, an exception within that copy ctor causes 498// std::terminate to be invoked. 499static void EmitAnyExprToExn(CodeGenFunction &CGF, const Expr *E, 500 llvm::Value *ExnLoc) { 501 // We want to release the allocated exception object if this 502 // expression throws. We do this by pushing an EH-only cleanup 503 // block which, furthermore, deactivates itself after the expression 504 // is complete. 505 llvm::AllocaInst *ShouldFreeVar = 506 CGF.CreateTempAlloca(llvm::Type::getInt1Ty(CGF.getLLVMContext()), 507 "should-free-exnobj.var"); 508 CGF.InitTempAlloca(ShouldFreeVar, 509 llvm::ConstantInt::getFalse(CGF.getLLVMContext())); 510 511 // A variable holding the exception pointer. This is necessary 512 // because the throw expression does not necessarily dominate the 513 // cleanup, for example if it appears in a conditional expression. 514 llvm::AllocaInst *ExnLocVar = 515 CGF.CreateTempAlloca(ExnLoc->getType(), "exnobj.var"); 516 517 // Make sure the exception object is cleaned up if there's an 518 // exception during initialization. 519 // FIXME: stmt expressions might require this to be a normal 520 // cleanup, too. 521 CGF.EHStack.pushCleanup<FreeExceptionCleanup>(EHCleanup, 522 ShouldFreeVar, 523 ExnLocVar); 524 EHScopeStack::stable_iterator Cleanup = CGF.EHStack.stable_begin(); 525 526 CGF.Builder.CreateStore(ExnLoc, ExnLocVar); 527 CGF.Builder.CreateStore(llvm::ConstantInt::getTrue(CGF.getLLVMContext()), 528 ShouldFreeVar); 529 530 // __cxa_allocate_exception returns a void*; we need to cast this 531 // to the appropriate type for the object. 532 const llvm::Type *Ty = CGF.ConvertTypeForMem(E->getType())->getPointerTo(); 533 llvm::Value *TypedExnLoc = CGF.Builder.CreateBitCast(ExnLoc, Ty); 534 535 // FIXME: this isn't quite right! If there's a final unelided call 536 // to a copy constructor, then according to [except.terminate]p1 we 537 // must call std::terminate() if that constructor throws, because 538 // technically that copy occurs after the exception expression is 539 // evaluated but before the exception is caught. But the best way 540 // to handle that is to teach EmitAggExpr to do the final copy 541 // differently if it can't be elided. 542 CGF.EmitAnyExprToMem(E, TypedExnLoc, /*Volatile*/ false, /*IsInit*/ true); 543 544 CGF.Builder.CreateStore(llvm::ConstantInt::getFalse(CGF.getLLVMContext()), 545 ShouldFreeVar); 546 547 // Technically, the exception object is like a temporary; it has to 548 // be cleaned up when its full-expression is complete. 549 // Unfortunately, the AST represents full-expressions by creating a 550 // ExprWithCleanups, which it only does when there are actually 551 // temporaries. 552 // 553 // If any cleanups have been added since we pushed ours, they must 554 // be from temporaries; this will get popped at the same time. 555 // Otherwise we need to pop ours off. FIXME: this is very brittle. 556 if (Cleanup == CGF.EHStack.stable_begin()) 557 CGF.PopCleanupBlock(); 558} 559 560llvm::Value *CodeGenFunction::getExceptionSlot() { 561 if (!ExceptionSlot) { 562 const llvm::Type *i8p = llvm::Type::getInt8PtrTy(getLLVMContext()); 563 ExceptionSlot = CreateTempAlloca(i8p, "exn.slot"); 564 } 565 return ExceptionSlot; 566} 567 568void CodeGenFunction::EmitCXXThrowExpr(const CXXThrowExpr *E) { 569 if (!E->getSubExpr()) { 570 if (getInvokeDest()) { 571 Builder.CreateInvoke(getReThrowFn(*this), 572 getUnreachableBlock(), 573 getInvokeDest()) 574 ->setDoesNotReturn(); 575 } else { 576 Builder.CreateCall(getReThrowFn(*this))->setDoesNotReturn(); 577 Builder.CreateUnreachable(); 578 } 579 580 // throw is an expression, and the expression emitters expect us 581 // to leave ourselves at a valid insertion point. 582 EmitBlock(createBasicBlock("throw.cont")); 583 584 return; 585 } 586 587 QualType ThrowType = E->getSubExpr()->getType(); 588 589 // Now allocate the exception object. 590 const llvm::Type *SizeTy = ConvertType(getContext().getSizeType()); 591 uint64_t TypeSize = getContext().getTypeSizeInChars(ThrowType).getQuantity(); 592 593 llvm::Constant *AllocExceptionFn = getAllocateExceptionFn(*this); 594 llvm::CallInst *ExceptionPtr = 595 Builder.CreateCall(AllocExceptionFn, 596 llvm::ConstantInt::get(SizeTy, TypeSize), 597 "exception"); 598 ExceptionPtr->setDoesNotThrow(); 599 600 EmitAnyExprToExn(*this, E->getSubExpr(), ExceptionPtr); 601 602 // Now throw the exception. 603 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(getLLVMContext()); 604 llvm::Constant *TypeInfo = CGM.GetAddrOfRTTIDescriptor(ThrowType, 605 /*ForEH=*/true); 606 607 // The address of the destructor. If the exception type has a 608 // trivial destructor (or isn't a record), we just pass null. 609 llvm::Constant *Dtor = 0; 610 if (const RecordType *RecordTy = ThrowType->getAs<RecordType>()) { 611 CXXRecordDecl *Record = cast<CXXRecordDecl>(RecordTy->getDecl()); 612 if (!Record->hasTrivialDestructor()) { 613 CXXDestructorDecl *DtorD = Record->getDestructor(); 614 Dtor = CGM.GetAddrOfCXXDestructor(DtorD, Dtor_Complete); 615 Dtor = llvm::ConstantExpr::getBitCast(Dtor, Int8PtrTy); 616 } 617 } 618 if (!Dtor) Dtor = llvm::Constant::getNullValue(Int8PtrTy); 619 620 if (getInvokeDest()) { 621 llvm::InvokeInst *ThrowCall = 622 Builder.CreateInvoke3(getThrowFn(*this), 623 getUnreachableBlock(), getInvokeDest(), 624 ExceptionPtr, TypeInfo, Dtor); 625 ThrowCall->setDoesNotReturn(); 626 } else { 627 llvm::CallInst *ThrowCall = 628 Builder.CreateCall3(getThrowFn(*this), ExceptionPtr, TypeInfo, Dtor); 629 ThrowCall->setDoesNotReturn(); 630 Builder.CreateUnreachable(); 631 } 632 633 // throw is an expression, and the expression emitters expect us 634 // to leave ourselves at a valid insertion point. 635 EmitBlock(createBasicBlock("throw.cont")); 636} 637 638void CodeGenFunction::EmitStartEHSpec(const Decl *D) { 639 if (!Exceptions) 640 return; 641 642 const FunctionDecl* FD = dyn_cast_or_null<FunctionDecl>(D); 643 if (FD == 0) 644 return; 645 const FunctionProtoType *Proto = FD->getType()->getAs<FunctionProtoType>(); 646 if (Proto == 0) 647 return; 648 649 assert(!Proto->hasAnyExceptionSpec() && "function with parameter pack"); 650 651 if (!Proto->hasExceptionSpec()) 652 return; 653 654 unsigned NumExceptions = Proto->getNumExceptions(); 655 EHFilterScope *Filter = EHStack.pushFilter(NumExceptions); 656 657 for (unsigned I = 0; I != NumExceptions; ++I) { 658 QualType Ty = Proto->getExceptionType(I); 659 QualType ExceptType = Ty.getNonReferenceType().getUnqualifiedType(); 660 llvm::Value *EHType = CGM.GetAddrOfRTTIDescriptor(ExceptType, 661 /*ForEH=*/true); 662 Filter->setFilter(I, EHType); 663 } 664} 665 666void CodeGenFunction::EmitEndEHSpec(const Decl *D) { 667 if (!Exceptions) 668 return; 669 670 const FunctionDecl* FD = dyn_cast_or_null<FunctionDecl>(D); 671 if (FD == 0) 672 return; 673 const FunctionProtoType *Proto = FD->getType()->getAs<FunctionProtoType>(); 674 if (Proto == 0) 675 return; 676 677 if (!Proto->hasExceptionSpec()) 678 return; 679 680 EHStack.popFilter(); 681} 682 683void CodeGenFunction::EmitCXXTryStmt(const CXXTryStmt &S) { 684 EnterCXXTryStmt(S); 685 EmitStmt(S.getTryBlock()); 686 ExitCXXTryStmt(S); 687} 688 689void CodeGenFunction::EnterCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock) { 690 unsigned NumHandlers = S.getNumHandlers(); 691 EHCatchScope *CatchScope = EHStack.pushCatch(NumHandlers); 692 693 for (unsigned I = 0; I != NumHandlers; ++I) { 694 const CXXCatchStmt *C = S.getHandler(I); 695 696 llvm::BasicBlock *Handler = createBasicBlock("catch"); 697 if (C->getExceptionDecl()) { 698 // FIXME: Dropping the reference type on the type into makes it 699 // impossible to correctly implement catch-by-reference 700 // semantics for pointers. Unfortunately, this is what all 701 // existing compilers do, and it's not clear that the standard 702 // personality routine is capable of doing this right. See C++ DR 388: 703 // http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_active.html#388 704 QualType CaughtType = C->getCaughtType(); 705 CaughtType = CaughtType.getNonReferenceType().getUnqualifiedType(); 706 707 llvm::Value *TypeInfo = 0; 708 if (CaughtType->isObjCObjectPointerType()) 709 TypeInfo = CGM.getObjCRuntime().GetEHType(CaughtType); 710 else 711 TypeInfo = CGM.GetAddrOfRTTIDescriptor(CaughtType, /*ForEH=*/true); 712 CatchScope->setHandler(I, TypeInfo, Handler); 713 } else { 714 // No exception decl indicates '...', a catch-all. 715 CatchScope->setCatchAllHandler(I, Handler); 716 } 717 } 718} 719 720/// Check whether this is a non-EH scope, i.e. a scope which doesn't 721/// affect exception handling. Currently, the only non-EH scopes are 722/// normal-only cleanup scopes. 723static bool isNonEHScope(const EHScope &S) { 724 switch (S.getKind()) { 725 case EHScope::Cleanup: 726 return !cast<EHCleanupScope>(S).isEHCleanup(); 727 case EHScope::Filter: 728 case EHScope::Catch: 729 case EHScope::Terminate: 730 return false; 731 } 732 733 // Suppress warning. 734 return false; 735} 736 737llvm::BasicBlock *CodeGenFunction::getInvokeDestImpl() { 738 assert(EHStack.requiresLandingPad()); 739 assert(!EHStack.empty()); 740 741 if (!Exceptions) 742 return 0; 743 744 // Check the innermost scope for a cached landing pad. If this is 745 // a non-EH cleanup, we'll check enclosing scopes in EmitLandingPad. 746 llvm::BasicBlock *LP = EHStack.begin()->getCachedLandingPad(); 747 if (LP) return LP; 748 749 // Build the landing pad for this scope. 750 LP = EmitLandingPad(); 751 assert(LP); 752 753 // Cache the landing pad on the innermost scope. If this is a 754 // non-EH scope, cache the landing pad on the enclosing scope, too. 755 for (EHScopeStack::iterator ir = EHStack.begin(); true; ++ir) { 756 ir->setCachedLandingPad(LP); 757 if (!isNonEHScope(*ir)) break; 758 } 759 760 return LP; 761} 762 763llvm::BasicBlock *CodeGenFunction::EmitLandingPad() { 764 assert(EHStack.requiresLandingPad()); 765 766 // This function contains a hack to work around a design flaw in 767 // LLVM's EH IR which breaks semantics after inlining. This same 768 // hack is implemented in llvm-gcc. 769 // 770 // The LLVM EH abstraction is basically a thin veneer over the 771 // traditional GCC zero-cost design: for each range of instructions 772 // in the function, there is (at most) one "landing pad" with an 773 // associated chain of EH actions. A language-specific personality 774 // function interprets this chain of actions and (1) decides whether 775 // or not to resume execution at the landing pad and (2) if so, 776 // provides an integer indicating why it's stopping. In LLVM IR, 777 // the association of a landing pad with a range of instructions is 778 // achieved via an invoke instruction, the chain of actions becomes 779 // the arguments to the @llvm.eh.selector call, and the selector 780 // call returns the integer indicator. Other than the required 781 // presence of two intrinsic function calls in the landing pad, 782 // the IR exactly describes the layout of the output code. 783 // 784 // A principal advantage of this design is that it is completely 785 // language-agnostic; in theory, the LLVM optimizers can treat 786 // landing pads neutrally, and targets need only know how to lower 787 // the intrinsics to have a functioning exceptions system (assuming 788 // that platform exceptions follow something approximately like the 789 // GCC design). Unfortunately, landing pads cannot be combined in a 790 // language-agnostic way: given selectors A and B, there is no way 791 // to make a single landing pad which faithfully represents the 792 // semantics of propagating an exception first through A, then 793 // through B, without knowing how the personality will interpret the 794 // (lowered form of the) selectors. This means that inlining has no 795 // choice but to crudely chain invokes (i.e., to ignore invokes in 796 // the inlined function, but to turn all unwindable calls into 797 // invokes), which is only semantically valid if every unwind stops 798 // at every landing pad. 799 // 800 // Therefore, the invoke-inline hack is to guarantee that every 801 // landing pad has a catch-all. 802 const bool UseInvokeInlineHack = true; 803 804 for (EHScopeStack::iterator ir = EHStack.begin(); ; ) { 805 assert(ir != EHStack.end() && 806 "stack requiring landing pad is nothing but non-EH scopes?"); 807 808 // If this is a terminate scope, just use the singleton terminate 809 // landing pad. 810 if (isa<EHTerminateScope>(*ir)) 811 return getTerminateLandingPad(); 812 813 // If this isn't an EH scope, iterate; otherwise break out. 814 if (!isNonEHScope(*ir)) break; 815 ++ir; 816 817 // We haven't checked this scope for a cached landing pad yet. 818 if (llvm::BasicBlock *LP = ir->getCachedLandingPad()) 819 return LP; 820 } 821 822 // Save the current IR generation state. 823 CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP(); 824 825 const EHPersonality &Personality = 826 EHPersonality::get(CGF.CGM.getLangOptions()); 827 828 // Create and configure the landing pad. 829 llvm::BasicBlock *LP = createBasicBlock("lpad"); 830 EmitBlock(LP); 831 832 // Save the exception pointer. It's safe to use a single exception 833 // pointer per function because EH cleanups can never have nested 834 // try/catches. 835 llvm::CallInst *Exn = 836 Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::eh_exception), "exn"); 837 Exn->setDoesNotThrow(); 838 Builder.CreateStore(Exn, getExceptionSlot()); 839 840 // Build the selector arguments. 841 llvm::SmallVector<llvm::Value*, 8> EHSelector; 842 EHSelector.push_back(Exn); 843 EHSelector.push_back(getOpaquePersonalityFn(CGM, Personality)); 844 845 // Accumulate all the handlers in scope. 846 llvm::DenseMap<llvm::Value*, UnwindDest> EHHandlers; 847 UnwindDest CatchAll; 848 bool HasEHCleanup = false; 849 bool HasEHFilter = false; 850 llvm::SmallVector<llvm::Value*, 8> EHFilters; 851 for (EHScopeStack::iterator I = EHStack.begin(), E = EHStack.end(); 852 I != E; ++I) { 853 854 switch (I->getKind()) { 855 case EHScope::Cleanup: 856 if (!HasEHCleanup) 857 HasEHCleanup = cast<EHCleanupScope>(*I).isEHCleanup(); 858 // We otherwise don't care about cleanups. 859 continue; 860 861 case EHScope::Filter: { 862 assert(I.next() == EHStack.end() && "EH filter is not end of EH stack"); 863 assert(!CatchAll.isValid() && "EH filter reached after catch-all"); 864 865 // Filter scopes get added to the selector in wierd ways. 866 EHFilterScope &Filter = cast<EHFilterScope>(*I); 867 HasEHFilter = true; 868 869 // Add all the filter values which we aren't already explicitly 870 // catching. 871 for (unsigned I = 0, E = Filter.getNumFilters(); I != E; ++I) { 872 llvm::Value *FV = Filter.getFilter(I); 873 if (!EHHandlers.count(FV)) 874 EHFilters.push_back(FV); 875 } 876 goto done; 877 } 878 879 case EHScope::Terminate: 880 // Terminate scopes are basically catch-alls. 881 assert(!CatchAll.isValid()); 882 CatchAll = UnwindDest(getTerminateHandler(), 883 EHStack.getEnclosingEHCleanup(I), 884 cast<EHTerminateScope>(*I).getDestIndex()); 885 goto done; 886 887 case EHScope::Catch: 888 break; 889 } 890 891 EHCatchScope &Catch = cast<EHCatchScope>(*I); 892 for (unsigned HI = 0, HE = Catch.getNumHandlers(); HI != HE; ++HI) { 893 EHCatchScope::Handler Handler = Catch.getHandler(HI); 894 895 // Catch-all. We should only have one of these per catch. 896 if (!Handler.Type) { 897 assert(!CatchAll.isValid()); 898 CatchAll = UnwindDest(Handler.Block, 899 EHStack.getEnclosingEHCleanup(I), 900 Handler.Index); 901 continue; 902 } 903 904 // Check whether we already have a handler for this type. 905 UnwindDest &Dest = EHHandlers[Handler.Type]; 906 if (Dest.isValid()) continue; 907 908 EHSelector.push_back(Handler.Type); 909 Dest = UnwindDest(Handler.Block, 910 EHStack.getEnclosingEHCleanup(I), 911 Handler.Index); 912 } 913 914 // Stop if we found a catch-all. 915 if (CatchAll.isValid()) break; 916 } 917 918 done: 919 unsigned LastToEmitInLoop = EHSelector.size(); 920 921 // If we have a catch-all, add null to the selector. 922 if (CatchAll.isValid()) { 923 EHSelector.push_back(getCatchAllValue(CGF)); 924 925 // If we have an EH filter, we need to add those handlers in the 926 // right place in the selector, which is to say, at the end. 927 } else if (HasEHFilter) { 928 // Create a filter expression: an integer constant saying how many 929 // filters there are (+1 to avoid ambiguity with 0 for cleanup), 930 // followed by the filter types. The personality routine only 931 // lands here if the filter doesn't match. 932 EHSelector.push_back(llvm::ConstantInt::get(Builder.getInt32Ty(), 933 EHFilters.size() + 1)); 934 EHSelector.append(EHFilters.begin(), EHFilters.end()); 935 936 // Also check whether we need a cleanup. 937 if (UseInvokeInlineHack || HasEHCleanup) 938 EHSelector.push_back(UseInvokeInlineHack 939 ? getCatchAllValue(CGF) 940 : getCleanupValue(CGF)); 941 942 // Otherwise, signal that we at least have cleanups. 943 } else if (UseInvokeInlineHack || HasEHCleanup) { 944 EHSelector.push_back(UseInvokeInlineHack 945 ? getCatchAllValue(CGF) 946 : getCleanupValue(CGF)); 947 } else { 948 assert(LastToEmitInLoop > 2); 949 LastToEmitInLoop--; 950 } 951 952 assert(EHSelector.size() >= 3 && "selector call has only two arguments!"); 953 954 // Tell the backend how to generate the landing pad. 955 llvm::CallInst *Selection = 956 Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::eh_selector), 957 EHSelector.begin(), EHSelector.end(), "eh.selector"); 958 Selection->setDoesNotThrow(); 959 960 // Select the right handler. 961 llvm::Value *llvm_eh_typeid_for = 962 CGM.getIntrinsic(llvm::Intrinsic::eh_typeid_for); 963 964 // The results of llvm_eh_typeid_for aren't reliable --- at least 965 // not locally --- so we basically have to do this as an 'if' chain. 966 // We walk through the first N-1 catch clauses, testing and chaining, 967 // and then fall into the final clause (which is either a cleanup, a 968 // filter (possibly with a cleanup), a catch-all, or another catch). 969 for (unsigned I = 2; I != LastToEmitInLoop; ++I) { 970 llvm::Value *Type = EHSelector[I]; 971 UnwindDest Dest = EHHandlers[Type]; 972 assert(Dest.isValid() && "no handler entry for value in selector?"); 973 974 // Figure out where to branch on a match. As a debug code-size 975 // optimization, if the scope depth matches the innermost cleanup, 976 // we branch directly to the catch handler. 977 llvm::BasicBlock *Match = Dest.getBlock(); 978 bool MatchNeedsCleanup = 979 Dest.getScopeDepth() != EHStack.getInnermostEHCleanup(); 980 if (MatchNeedsCleanup) 981 Match = createBasicBlock("eh.match"); 982 983 llvm::BasicBlock *Next = createBasicBlock("eh.next"); 984 985 // Check whether the exception matches. 986 llvm::CallInst *Id 987 = Builder.CreateCall(llvm_eh_typeid_for, 988 Builder.CreateBitCast(Type, CGM.PtrToInt8Ty)); 989 Id->setDoesNotThrow(); 990 Builder.CreateCondBr(Builder.CreateICmpEQ(Selection, Id), 991 Match, Next); 992 993 // Emit match code if necessary. 994 if (MatchNeedsCleanup) { 995 EmitBlock(Match); 996 EmitBranchThroughEHCleanup(Dest); 997 } 998 999 // Continue to the next match. 1000 EmitBlock(Next); 1001 } 1002 1003 // Emit the final case in the selector. 1004 // This might be a catch-all.... 1005 if (CatchAll.isValid()) { 1006 assert(isa<llvm::ConstantPointerNull>(EHSelector.back())); 1007 EmitBranchThroughEHCleanup(CatchAll); 1008 1009 // ...or an EH filter... 1010 } else if (HasEHFilter) { 1011 llvm::Value *SavedSelection = Selection; 1012 1013 // First, unwind out to the outermost scope if necessary. 1014 if (EHStack.hasEHCleanups()) { 1015 // The end here might not dominate the beginning, so we might need to 1016 // save the selector if we need it. 1017 llvm::AllocaInst *SelectorVar = 0; 1018 if (HasEHCleanup) { 1019 SelectorVar = CreateTempAlloca(Builder.getInt32Ty(), "selector.var"); 1020 Builder.CreateStore(Selection, SelectorVar); 1021 } 1022 1023 llvm::BasicBlock *CleanupContBB = createBasicBlock("ehspec.cleanup.cont"); 1024 EmitBranchThroughEHCleanup(UnwindDest(CleanupContBB, EHStack.stable_end(), 1025 EHStack.getNextEHDestIndex())); 1026 EmitBlock(CleanupContBB); 1027 1028 if (HasEHCleanup) 1029 SavedSelection = Builder.CreateLoad(SelectorVar, "ehspec.saved-selector"); 1030 } 1031 1032 // If there was a cleanup, we'll need to actually check whether we 1033 // landed here because the filter triggered. 1034 if (UseInvokeInlineHack || HasEHCleanup) { 1035 llvm::BasicBlock *RethrowBB = createBasicBlock("cleanup"); 1036 llvm::BasicBlock *UnexpectedBB = createBasicBlock("ehspec.unexpected"); 1037 1038 llvm::Constant *Zero = llvm::ConstantInt::get(Builder.getInt32Ty(), 0); 1039 llvm::Value *FailsFilter = 1040 Builder.CreateICmpSLT(SavedSelection, Zero, "ehspec.fails"); 1041 Builder.CreateCondBr(FailsFilter, UnexpectedBB, RethrowBB); 1042 1043 // The rethrow block is where we land if this was a cleanup. 1044 // TODO: can this be _Unwind_Resume if the InvokeInlineHack is off? 1045 EmitBlock(RethrowBB); 1046 Builder.CreateCall(getUnwindResumeOrRethrowFn(), 1047 Builder.CreateLoad(getExceptionSlot())) 1048 ->setDoesNotReturn(); 1049 Builder.CreateUnreachable(); 1050 1051 EmitBlock(UnexpectedBB); 1052 } 1053 1054 // Call __cxa_call_unexpected. This doesn't need to be an invoke 1055 // because __cxa_call_unexpected magically filters exceptions 1056 // according to the last landing pad the exception was thrown 1057 // into. Seriously. 1058 Builder.CreateCall(getUnexpectedFn(*this), 1059 Builder.CreateLoad(getExceptionSlot())) 1060 ->setDoesNotReturn(); 1061 Builder.CreateUnreachable(); 1062 1063 // ...or a normal catch handler... 1064 } else if (!UseInvokeInlineHack && !HasEHCleanup) { 1065 llvm::Value *Type = EHSelector.back(); 1066 EmitBranchThroughEHCleanup(EHHandlers[Type]); 1067 1068 // ...or a cleanup. 1069 } else { 1070 EmitBranchThroughEHCleanup(getRethrowDest()); 1071 } 1072 1073 // Restore the old IR generation state. 1074 Builder.restoreIP(SavedIP); 1075 1076 return LP; 1077} 1078 1079namespace { 1080 /// A cleanup to call __cxa_end_catch. In many cases, the caught 1081 /// exception type lets us state definitively that the thrown exception 1082 /// type does not have a destructor. In particular: 1083 /// - Catch-alls tell us nothing, so we have to conservatively 1084 /// assume that the thrown exception might have a destructor. 1085 /// - Catches by reference behave according to their base types. 1086 /// - Catches of non-record types will only trigger for exceptions 1087 /// of non-record types, which never have destructors. 1088 /// - Catches of record types can trigger for arbitrary subclasses 1089 /// of the caught type, so we have to assume the actual thrown 1090 /// exception type might have a throwing destructor, even if the 1091 /// caught type's destructor is trivial or nothrow. 1092 struct CallEndCatch : EHScopeStack::Cleanup { 1093 CallEndCatch(bool MightThrow) : MightThrow(MightThrow) {} 1094 bool MightThrow; 1095 1096 void Emit(CodeGenFunction &CGF, bool IsForEH) { 1097 if (!MightThrow) { 1098 CGF.Builder.CreateCall(getEndCatchFn(CGF))->setDoesNotThrow(); 1099 return; 1100 } 1101 1102 CGF.EmitCallOrInvoke(getEndCatchFn(CGF), 0, 0); 1103 } 1104 }; 1105} 1106 1107/// Emits a call to __cxa_begin_catch and enters a cleanup to call 1108/// __cxa_end_catch. 1109/// 1110/// \param EndMightThrow - true if __cxa_end_catch might throw 1111static llvm::Value *CallBeginCatch(CodeGenFunction &CGF, 1112 llvm::Value *Exn, 1113 bool EndMightThrow) { 1114 llvm::CallInst *Call = CGF.Builder.CreateCall(getBeginCatchFn(CGF), Exn); 1115 Call->setDoesNotThrow(); 1116 1117 CGF.EHStack.pushCleanup<CallEndCatch>(NormalAndEHCleanup, EndMightThrow); 1118 1119 return Call; 1120} 1121 1122/// A "special initializer" callback for initializing a catch 1123/// parameter during catch initialization. 1124static void InitCatchParam(CodeGenFunction &CGF, 1125 const VarDecl &CatchParam, 1126 llvm::Value *ParamAddr) { 1127 // Load the exception from where the landing pad saved it. 1128 llvm::Value *Exn = CGF.Builder.CreateLoad(CGF.getExceptionSlot(), "exn"); 1129 1130 CanQualType CatchType = 1131 CGF.CGM.getContext().getCanonicalType(CatchParam.getType()); 1132 const llvm::Type *LLVMCatchTy = CGF.ConvertTypeForMem(CatchType); 1133 1134 // If we're catching by reference, we can just cast the object 1135 // pointer to the appropriate pointer. 1136 if (isa<ReferenceType>(CatchType)) { 1137 QualType CaughtType = cast<ReferenceType>(CatchType)->getPointeeType(); 1138 bool EndCatchMightThrow = CaughtType->isRecordType(); 1139 1140 // __cxa_begin_catch returns the adjusted object pointer. 1141 llvm::Value *AdjustedExn = CallBeginCatch(CGF, Exn, EndCatchMightThrow); 1142 1143 // We have no way to tell the personality function that we're 1144 // catching by reference, so if we're catching a pointer, 1145 // __cxa_begin_catch will actually return that pointer by value. 1146 if (const PointerType *PT = dyn_cast<PointerType>(CaughtType)) { 1147 QualType PointeeType = PT->getPointeeType(); 1148 1149 // When catching by reference, generally we should just ignore 1150 // this by-value pointer and use the exception object instead. 1151 if (!PointeeType->isRecordType()) { 1152 1153 // Exn points to the struct _Unwind_Exception header, which 1154 // we have to skip past in order to reach the exception data. 1155 unsigned HeaderSize = 1156 CGF.CGM.getTargetCodeGenInfo().getSizeOfUnwindException(); 1157 AdjustedExn = CGF.Builder.CreateConstGEP1_32(Exn, HeaderSize); 1158 1159 // However, if we're catching a pointer-to-record type that won't 1160 // work, because the personality function might have adjusted 1161 // the pointer. There's actually no way for us to fully satisfy 1162 // the language/ABI contract here: we can't use Exn because it 1163 // might have the wrong adjustment, but we can't use the by-value 1164 // pointer because it's off by a level of abstraction. 1165 // 1166 // The current solution is to dump the adjusted pointer into an 1167 // alloca, which breaks language semantics (because changing the 1168 // pointer doesn't change the exception) but at least works. 1169 // The better solution would be to filter out non-exact matches 1170 // and rethrow them, but this is tricky because the rethrow 1171 // really needs to be catchable by other sites at this landing 1172 // pad. The best solution is to fix the personality function. 1173 } else { 1174 // Pull the pointer for the reference type off. 1175 const llvm::Type *PtrTy = 1176 cast<llvm::PointerType>(LLVMCatchTy)->getElementType(); 1177 1178 // Create the temporary and write the adjusted pointer into it. 1179 llvm::Value *ExnPtrTmp = CGF.CreateTempAlloca(PtrTy, "exn.byref.tmp"); 1180 llvm::Value *Casted = CGF.Builder.CreateBitCast(AdjustedExn, PtrTy); 1181 CGF.Builder.CreateStore(Casted, ExnPtrTmp); 1182 1183 // Bind the reference to the temporary. 1184 AdjustedExn = ExnPtrTmp; 1185 } 1186 } 1187 1188 llvm::Value *ExnCast = 1189 CGF.Builder.CreateBitCast(AdjustedExn, LLVMCatchTy, "exn.byref"); 1190 CGF.Builder.CreateStore(ExnCast, ParamAddr); 1191 return; 1192 } 1193 1194 // Non-aggregates (plus complexes). 1195 bool IsComplex = false; 1196 if (!CGF.hasAggregateLLVMType(CatchType) || 1197 (IsComplex = CatchType->isAnyComplexType())) { 1198 llvm::Value *AdjustedExn = CallBeginCatch(CGF, Exn, false); 1199 1200 // If the catch type is a pointer type, __cxa_begin_catch returns 1201 // the pointer by value. 1202 if (CatchType->hasPointerRepresentation()) { 1203 llvm::Value *CastExn = 1204 CGF.Builder.CreateBitCast(AdjustedExn, LLVMCatchTy, "exn.casted"); 1205 CGF.Builder.CreateStore(CastExn, ParamAddr); 1206 return; 1207 } 1208 1209 // Otherwise, it returns a pointer into the exception object. 1210 1211 const llvm::Type *PtrTy = LLVMCatchTy->getPointerTo(0); // addrspace 0 ok 1212 llvm::Value *Cast = CGF.Builder.CreateBitCast(AdjustedExn, PtrTy); 1213 1214 if (IsComplex) { 1215 CGF.StoreComplexToAddr(CGF.LoadComplexFromAddr(Cast, /*volatile*/ false), 1216 ParamAddr, /*volatile*/ false); 1217 } else { 1218 unsigned Alignment = 1219 CGF.getContext().getDeclAlign(&CatchParam).getQuantity(); 1220 llvm::Value *ExnLoad = CGF.Builder.CreateLoad(Cast, "exn.scalar"); 1221 CGF.EmitStoreOfScalar(ExnLoad, ParamAddr, /*volatile*/ false, Alignment, 1222 CatchType); 1223 } 1224 return; 1225 } 1226 1227 // FIXME: this *really* needs to be done via a proper, Sema-emitted 1228 // initializer expression. 1229 1230 CXXRecordDecl *RD = CatchType.getTypePtr()->getAsCXXRecordDecl(); 1231 assert(RD && "aggregate catch type was not a record!"); 1232 1233 const llvm::Type *PtrTy = LLVMCatchTy->getPointerTo(0); // addrspace 0 ok 1234 1235 if (RD->hasTrivialCopyConstructor()) { 1236 llvm::Value *AdjustedExn = CallBeginCatch(CGF, Exn, true); 1237 llvm::Value *Cast = CGF.Builder.CreateBitCast(AdjustedExn, PtrTy); 1238 CGF.EmitAggregateCopy(ParamAddr, Cast, CatchType); 1239 return; 1240 } 1241 1242 // We have to call __cxa_get_exception_ptr to get the adjusted 1243 // pointer before copying. 1244 llvm::CallInst *AdjustedExn = 1245 CGF.Builder.CreateCall(getGetExceptionPtrFn(CGF), Exn); 1246 AdjustedExn->setDoesNotThrow(); 1247 llvm::Value *Cast = CGF.Builder.CreateBitCast(AdjustedExn, PtrTy); 1248 1249 CXXConstructorDecl *CD = RD->getCopyConstructor(CGF.getContext(), 0); 1250 assert(CD && "record has no copy constructor!"); 1251 llvm::Value *CopyCtor = CGF.CGM.GetAddrOfCXXConstructor(CD, Ctor_Complete); 1252 1253 CallArgList CallArgs; 1254 CallArgs.push_back(std::make_pair(RValue::get(ParamAddr), 1255 CD->getThisType(CGF.getContext()))); 1256 CallArgs.push_back(std::make_pair(RValue::get(Cast), 1257 CD->getParamDecl(0)->getType())); 1258 1259 const FunctionProtoType *FPT 1260 = CD->getType()->getAs<FunctionProtoType>(); 1261 1262 // Call the copy ctor in a terminate scope. 1263 CGF.EHStack.pushTerminate(); 1264 CGF.EmitCall(CGF.CGM.getTypes().getFunctionInfo(CallArgs, FPT), 1265 CopyCtor, ReturnValueSlot(), CallArgs, CD); 1266 CGF.EHStack.popTerminate(); 1267 1268 // Finally we can call __cxa_begin_catch. 1269 CallBeginCatch(CGF, Exn, true); 1270} 1271 1272/// Begins a catch statement by initializing the catch variable and 1273/// calling __cxa_begin_catch. 1274static void BeginCatch(CodeGenFunction &CGF, 1275 const CXXCatchStmt *S) { 1276 // We have to be very careful with the ordering of cleanups here: 1277 // C++ [except.throw]p4: 1278 // The destruction [of the exception temporary] occurs 1279 // immediately after the destruction of the object declared in 1280 // the exception-declaration in the handler. 1281 // 1282 // So the precise ordering is: 1283 // 1. Construct catch variable. 1284 // 2. __cxa_begin_catch 1285 // 3. Enter __cxa_end_catch cleanup 1286 // 4. Enter dtor cleanup 1287 // 1288 // We do this by initializing the exception variable with a 1289 // "special initializer", InitCatchParam. Delegation sequence: 1290 // - ExitCXXTryStmt opens a RunCleanupsScope 1291 // - EmitLocalBlockVarDecl creates the variable and debug info 1292 // - InitCatchParam initializes the variable from the exception 1293 // - CallBeginCatch calls __cxa_begin_catch 1294 // - CallBeginCatch enters the __cxa_end_catch cleanup 1295 // - EmitLocalBlockVarDecl enters the variable destructor cleanup 1296 // - EmitCXXTryStmt emits the code for the catch body 1297 // - EmitCXXTryStmt close the RunCleanupsScope 1298 1299 VarDecl *CatchParam = S->getExceptionDecl(); 1300 if (!CatchParam) { 1301 llvm::Value *Exn = CGF.Builder.CreateLoad(CGF.getExceptionSlot(), "exn"); 1302 CallBeginCatch(CGF, Exn, true); 1303 return; 1304 } 1305 1306 // Emit the local. 1307 CGF.EmitAutoVarDecl(*CatchParam, &InitCatchParam); 1308} 1309 1310namespace { 1311 struct CallRethrow : EHScopeStack::Cleanup { 1312 void Emit(CodeGenFunction &CGF, bool IsForEH) { 1313 CGF.EmitCallOrInvoke(getReThrowFn(CGF), 0, 0); 1314 } 1315 }; 1316} 1317 1318void CodeGenFunction::ExitCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock) { 1319 unsigned NumHandlers = S.getNumHandlers(); 1320 EHCatchScope &CatchScope = cast<EHCatchScope>(*EHStack.begin()); 1321 assert(CatchScope.getNumHandlers() == NumHandlers); 1322 1323 // Copy the handler blocks off before we pop the EH stack. Emitting 1324 // the handlers might scribble on this memory. 1325 llvm::SmallVector<EHCatchScope::Handler, 8> Handlers(NumHandlers); 1326 memcpy(Handlers.data(), CatchScope.begin(), 1327 NumHandlers * sizeof(EHCatchScope::Handler)); 1328 EHStack.popCatch(); 1329 1330 // The fall-through block. 1331 llvm::BasicBlock *ContBB = createBasicBlock("try.cont"); 1332 1333 // We just emitted the body of the try; jump to the continue block. 1334 if (HaveInsertPoint()) 1335 Builder.CreateBr(ContBB); 1336 1337 // Determine if we need an implicit rethrow for all these catch handlers. 1338 bool ImplicitRethrow = false; 1339 if (IsFnTryBlock) 1340 ImplicitRethrow = isa<CXXDestructorDecl>(CurCodeDecl) || 1341 isa<CXXConstructorDecl>(CurCodeDecl); 1342 1343 for (unsigned I = 0; I != NumHandlers; ++I) { 1344 llvm::BasicBlock *CatchBlock = Handlers[I].Block; 1345 EmitBlock(CatchBlock); 1346 1347 // Catch the exception if this isn't a catch-all. 1348 const CXXCatchStmt *C = S.getHandler(I); 1349 1350 // Enter a cleanup scope, including the catch variable and the 1351 // end-catch. 1352 RunCleanupsScope CatchScope(*this); 1353 1354 // Initialize the catch variable and set up the cleanups. 1355 BeginCatch(*this, C); 1356 1357 // If there's an implicit rethrow, push a normal "cleanup" to call 1358 // _cxa_rethrow. This needs to happen before __cxa_end_catch is 1359 // called, and so it is pushed after BeginCatch. 1360 if (ImplicitRethrow) 1361 EHStack.pushCleanup<CallRethrow>(NormalCleanup); 1362 1363 // Perform the body of the catch. 1364 EmitStmt(C->getHandlerBlock()); 1365 1366 // Fall out through the catch cleanups. 1367 CatchScope.ForceCleanup(); 1368 1369 // Branch out of the try. 1370 if (HaveInsertPoint()) 1371 Builder.CreateBr(ContBB); 1372 } 1373 1374 EmitBlock(ContBB); 1375} 1376 1377namespace { 1378 struct CallEndCatchForFinally : EHScopeStack::Cleanup { 1379 llvm::Value *ForEHVar; 1380 llvm::Value *EndCatchFn; 1381 CallEndCatchForFinally(llvm::Value *ForEHVar, llvm::Value *EndCatchFn) 1382 : ForEHVar(ForEHVar), EndCatchFn(EndCatchFn) {} 1383 1384 void Emit(CodeGenFunction &CGF, bool IsForEH) { 1385 llvm::BasicBlock *EndCatchBB = CGF.createBasicBlock("finally.endcatch"); 1386 llvm::BasicBlock *CleanupContBB = 1387 CGF.createBasicBlock("finally.cleanup.cont"); 1388 1389 llvm::Value *ShouldEndCatch = 1390 CGF.Builder.CreateLoad(ForEHVar, "finally.endcatch"); 1391 CGF.Builder.CreateCondBr(ShouldEndCatch, EndCatchBB, CleanupContBB); 1392 CGF.EmitBlock(EndCatchBB); 1393 CGF.EmitCallOrInvoke(EndCatchFn, 0, 0); // catch-all, so might throw 1394 CGF.EmitBlock(CleanupContBB); 1395 } 1396 }; 1397 1398 struct PerformFinally : EHScopeStack::Cleanup { 1399 const Stmt *Body; 1400 llvm::Value *ForEHVar; 1401 llvm::Value *EndCatchFn; 1402 llvm::Value *RethrowFn; 1403 llvm::Value *SavedExnVar; 1404 1405 PerformFinally(const Stmt *Body, llvm::Value *ForEHVar, 1406 llvm::Value *EndCatchFn, 1407 llvm::Value *RethrowFn, llvm::Value *SavedExnVar) 1408 : Body(Body), ForEHVar(ForEHVar), EndCatchFn(EndCatchFn), 1409 RethrowFn(RethrowFn), SavedExnVar(SavedExnVar) {} 1410 1411 void Emit(CodeGenFunction &CGF, bool IsForEH) { 1412 // Enter a cleanup to call the end-catch function if one was provided. 1413 if (EndCatchFn) 1414 CGF.EHStack.pushCleanup<CallEndCatchForFinally>(NormalAndEHCleanup, 1415 ForEHVar, EndCatchFn); 1416 1417 // Save the current cleanup destination in case there are 1418 // cleanups in the finally block. 1419 llvm::Value *SavedCleanupDest = 1420 CGF.Builder.CreateLoad(CGF.getNormalCleanupDestSlot(), 1421 "cleanup.dest.saved"); 1422 1423 // Emit the finally block. 1424 CGF.EmitStmt(Body); 1425 1426 // If the end of the finally is reachable, check whether this was 1427 // for EH. If so, rethrow. 1428 if (CGF.HaveInsertPoint()) { 1429 llvm::BasicBlock *RethrowBB = CGF.createBasicBlock("finally.rethrow"); 1430 llvm::BasicBlock *ContBB = CGF.createBasicBlock("finally.cont"); 1431 1432 llvm::Value *ShouldRethrow = 1433 CGF.Builder.CreateLoad(ForEHVar, "finally.shouldthrow"); 1434 CGF.Builder.CreateCondBr(ShouldRethrow, RethrowBB, ContBB); 1435 1436 CGF.EmitBlock(RethrowBB); 1437 if (SavedExnVar) { 1438 llvm::Value *Args[] = { CGF.Builder.CreateLoad(SavedExnVar) }; 1439 CGF.EmitCallOrInvoke(RethrowFn, Args, Args+1); 1440 } else { 1441 CGF.EmitCallOrInvoke(RethrowFn, 0, 0); 1442 } 1443 CGF.Builder.CreateUnreachable(); 1444 1445 CGF.EmitBlock(ContBB); 1446 1447 // Restore the cleanup destination. 1448 CGF.Builder.CreateStore(SavedCleanupDest, 1449 CGF.getNormalCleanupDestSlot()); 1450 } 1451 1452 // Leave the end-catch cleanup. As an optimization, pretend that 1453 // the fallthrough path was inaccessible; we've dynamically proven 1454 // that we're not in the EH case along that path. 1455 if (EndCatchFn) { 1456 CGBuilderTy::InsertPoint SavedIP = CGF.Builder.saveAndClearIP(); 1457 CGF.PopCleanupBlock(); 1458 CGF.Builder.restoreIP(SavedIP); 1459 } 1460 1461 // Now make sure we actually have an insertion point or the 1462 // cleanup gods will hate us. 1463 CGF.EnsureInsertPoint(); 1464 } 1465 }; 1466} 1467 1468/// Enters a finally block for an implementation using zero-cost 1469/// exceptions. This is mostly general, but hard-codes some 1470/// language/ABI-specific behavior in the catch-all sections. 1471CodeGenFunction::FinallyInfo 1472CodeGenFunction::EnterFinallyBlock(const Stmt *Body, 1473 llvm::Constant *BeginCatchFn, 1474 llvm::Constant *EndCatchFn, 1475 llvm::Constant *RethrowFn) { 1476 assert((BeginCatchFn != 0) == (EndCatchFn != 0) && 1477 "begin/end catch functions not paired"); 1478 assert(RethrowFn && "rethrow function is required"); 1479 1480 // The rethrow function has one of the following two types: 1481 // void (*)() 1482 // void (*)(void*) 1483 // In the latter case we need to pass it the exception object. 1484 // But we can't use the exception slot because the @finally might 1485 // have a landing pad (which would overwrite the exception slot). 1486 const llvm::FunctionType *RethrowFnTy = 1487 cast<llvm::FunctionType>( 1488 cast<llvm::PointerType>(RethrowFn->getType()) 1489 ->getElementType()); 1490 llvm::Value *SavedExnVar = 0; 1491 if (RethrowFnTy->getNumParams()) 1492 SavedExnVar = CreateTempAlloca(Builder.getInt8PtrTy(), "finally.exn"); 1493 1494 // A finally block is a statement which must be executed on any edge 1495 // out of a given scope. Unlike a cleanup, the finally block may 1496 // contain arbitrary control flow leading out of itself. In 1497 // addition, finally blocks should always be executed, even if there 1498 // are no catch handlers higher on the stack. Therefore, we 1499 // surround the protected scope with a combination of a normal 1500 // cleanup (to catch attempts to break out of the block via normal 1501 // control flow) and an EH catch-all (semantically "outside" any try 1502 // statement to which the finally block might have been attached). 1503 // The finally block itself is generated in the context of a cleanup 1504 // which conditionally leaves the catch-all. 1505 1506 FinallyInfo Info; 1507 1508 // Jump destination for performing the finally block on an exception 1509 // edge. We'll never actually reach this block, so unreachable is 1510 // fine. 1511 JumpDest RethrowDest = getJumpDestInCurrentScope(getUnreachableBlock()); 1512 1513 // Whether the finally block is being executed for EH purposes. 1514 llvm::AllocaInst *ForEHVar = CreateTempAlloca(CGF.Builder.getInt1Ty(), 1515 "finally.for-eh"); 1516 InitTempAlloca(ForEHVar, llvm::ConstantInt::getFalse(getLLVMContext())); 1517 1518 // Enter a normal cleanup which will perform the @finally block. 1519 EHStack.pushCleanup<PerformFinally>(NormalCleanup, Body, 1520 ForEHVar, EndCatchFn, 1521 RethrowFn, SavedExnVar); 1522 1523 // Enter a catch-all scope. 1524 llvm::BasicBlock *CatchAllBB = createBasicBlock("finally.catchall"); 1525 CGBuilderTy::InsertPoint SavedIP = Builder.saveIP(); 1526 Builder.SetInsertPoint(CatchAllBB); 1527 1528 // If there's a begin-catch function, call it. 1529 if (BeginCatchFn) { 1530 Builder.CreateCall(BeginCatchFn, Builder.CreateLoad(getExceptionSlot())) 1531 ->setDoesNotThrow(); 1532 } 1533 1534 // If we need to remember the exception pointer to rethrow later, do so. 1535 if (SavedExnVar) { 1536 llvm::Value *SavedExn = Builder.CreateLoad(getExceptionSlot()); 1537 Builder.CreateStore(SavedExn, SavedExnVar); 1538 } 1539 1540 // Tell the finally block that we're in EH. 1541 Builder.CreateStore(llvm::ConstantInt::getTrue(getLLVMContext()), ForEHVar); 1542 1543 // Thread a jump through the finally cleanup. 1544 EmitBranchThroughCleanup(RethrowDest); 1545 1546 Builder.restoreIP(SavedIP); 1547 1548 EHCatchScope *CatchScope = EHStack.pushCatch(1); 1549 CatchScope->setCatchAllHandler(0, CatchAllBB); 1550 1551 return Info; 1552} 1553 1554void CodeGenFunction::ExitFinallyBlock(FinallyInfo &Info) { 1555 // Leave the finally catch-all. 1556 EHCatchScope &Catch = cast<EHCatchScope>(*EHStack.begin()); 1557 llvm::BasicBlock *CatchAllBB = Catch.getHandler(0).Block; 1558 EHStack.popCatch(); 1559 1560 // And leave the normal cleanup. 1561 PopCleanupBlock(); 1562 1563 CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP(); 1564 EmitBlock(CatchAllBB, true); 1565 1566 Builder.restoreIP(SavedIP); 1567} 1568 1569llvm::BasicBlock *CodeGenFunction::getTerminateLandingPad() { 1570 if (TerminateLandingPad) 1571 return TerminateLandingPad; 1572 1573 CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP(); 1574 1575 // This will get inserted at the end of the function. 1576 TerminateLandingPad = createBasicBlock("terminate.lpad"); 1577 Builder.SetInsertPoint(TerminateLandingPad); 1578 1579 // Tell the backend that this is a landing pad. 1580 llvm::CallInst *Exn = 1581 Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::eh_exception), "exn"); 1582 Exn->setDoesNotThrow(); 1583 1584 const EHPersonality &Personality = EHPersonality::get(CGM.getLangOptions()); 1585 1586 // Tell the backend what the exception table should be: 1587 // nothing but a catch-all. 1588 llvm::Value *Args[3] = { Exn, getOpaquePersonalityFn(CGM, Personality), 1589 getCatchAllValue(*this) }; 1590 Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::eh_selector), 1591 Args, Args+3, "eh.selector") 1592 ->setDoesNotThrow(); 1593 1594 llvm::CallInst *TerminateCall = Builder.CreateCall(getTerminateFn(*this)); 1595 TerminateCall->setDoesNotReturn(); 1596 TerminateCall->setDoesNotThrow(); 1597 CGF.Builder.CreateUnreachable(); 1598 1599 // Restore the saved insertion state. 1600 Builder.restoreIP(SavedIP); 1601 1602 return TerminateLandingPad; 1603} 1604 1605llvm::BasicBlock *CodeGenFunction::getTerminateHandler() { 1606 if (TerminateHandler) 1607 return TerminateHandler; 1608 1609 CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP(); 1610 1611 // Set up the terminate handler. This block is inserted at the very 1612 // end of the function by FinishFunction. 1613 TerminateHandler = createBasicBlock("terminate.handler"); 1614 Builder.SetInsertPoint(TerminateHandler); 1615 llvm::CallInst *TerminateCall = Builder.CreateCall(getTerminateFn(*this)); 1616 TerminateCall->setDoesNotReturn(); 1617 TerminateCall->setDoesNotThrow(); 1618 Builder.CreateUnreachable(); 1619 1620 // Restore the saved insertion state. 1621 Builder.restoreIP(SavedIP); 1622 1623 return TerminateHandler; 1624} 1625 1626CodeGenFunction::UnwindDest CodeGenFunction::getRethrowDest() { 1627 if (RethrowBlock.isValid()) return RethrowBlock; 1628 1629 CGBuilderTy::InsertPoint SavedIP = Builder.saveIP(); 1630 1631 // We emit a jump to a notional label at the outermost unwind state. 1632 llvm::BasicBlock *Unwind = createBasicBlock("eh.resume"); 1633 Builder.SetInsertPoint(Unwind); 1634 1635 const EHPersonality &Personality = EHPersonality::get(CGM.getLangOptions()); 1636 1637 // This can always be a call because we necessarily didn't find 1638 // anything on the EH stack which needs our help. 1639 llvm::StringRef RethrowName = Personality.getCatchallRethrowFnName(); 1640 llvm::Constant *RethrowFn; 1641 if (!RethrowName.empty()) 1642 RethrowFn = getCatchallRethrowFn(*this, RethrowName); 1643 else 1644 RethrowFn = getUnwindResumeOrRethrowFn(); 1645 1646 Builder.CreateCall(RethrowFn, Builder.CreateLoad(getExceptionSlot())) 1647 ->setDoesNotReturn(); 1648 Builder.CreateUnreachable(); 1649 1650 Builder.restoreIP(SavedIP); 1651 1652 RethrowBlock = UnwindDest(Unwind, EHStack.stable_end(), 0); 1653 return RethrowBlock; 1654} 1655 1656EHScopeStack::Cleanup::~Cleanup() { 1657 llvm_unreachable("Cleanup is indestructable"); 1658} 1659