CGException.cpp revision f1549f66a8216a78112286e3978cea2c29d6334c
1//===--- CGException.cpp - Emit LLVM Code for C++ exceptions --------------===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This contains code dealing with C++ exception related code generation. 11// 12//===----------------------------------------------------------------------===// 13 14#include "clang/AST/StmtCXX.h" 15 16#include "llvm/Intrinsics.h" 17#include "llvm/Support/CallSite.h" 18 19#include "CodeGenFunction.h" 20#include "CGException.h" 21 22using namespace clang; 23using namespace CodeGen; 24 25/// Push an entry of the given size onto this protected-scope stack. 26char *EHScopeStack::allocate(size_t Size) { 27 if (!StartOfBuffer) { 28 unsigned Capacity = 1024; 29 while (Capacity < Size) Capacity *= 2; 30 StartOfBuffer = new char[Capacity]; 31 StartOfData = EndOfBuffer = StartOfBuffer + Capacity; 32 } else if (static_cast<size_t>(StartOfData - StartOfBuffer) < Size) { 33 unsigned CurrentCapacity = EndOfBuffer - StartOfBuffer; 34 unsigned UsedCapacity = CurrentCapacity - (StartOfData - StartOfBuffer); 35 36 unsigned NewCapacity = CurrentCapacity; 37 do { 38 NewCapacity *= 2; 39 } while (NewCapacity < UsedCapacity + Size); 40 41 char *NewStartOfBuffer = new char[NewCapacity]; 42 char *NewEndOfBuffer = NewStartOfBuffer + NewCapacity; 43 char *NewStartOfData = NewEndOfBuffer - UsedCapacity; 44 memcpy(NewStartOfData, StartOfData, UsedCapacity); 45 delete [] StartOfBuffer; 46 StartOfBuffer = NewStartOfBuffer; 47 EndOfBuffer = NewEndOfBuffer; 48 StartOfData = NewStartOfData; 49 } 50 51 assert(StartOfBuffer + Size <= StartOfData); 52 StartOfData -= Size; 53 return StartOfData; 54} 55 56EHScopeStack::stable_iterator 57EHScopeStack::getEnclosingEHCleanup(iterator it) const { 58 assert(it != end()); 59 do { 60 if (isa<EHCleanupScope>(*it)) { 61 if (cast<EHCleanupScope>(*it).isEHCleanup()) 62 return stabilize(it); 63 return cast<EHCleanupScope>(*it).getEnclosingEHCleanup(); 64 } 65 ++it; 66 } while (it != end()); 67 return stable_end(); 68} 69 70 71void EHScopeStack::pushCleanup(llvm::BasicBlock *NormalEntry, 72 llvm::BasicBlock *NormalExit, 73 llvm::BasicBlock *EHEntry, 74 llvm::BasicBlock *EHExit) { 75 char *Buffer = allocate(EHCleanupScope::getSize()); 76 new (Buffer) EHCleanupScope(BranchFixups.size(), 77 InnermostNormalCleanup, 78 InnermostEHCleanup, 79 NormalEntry, NormalExit, EHEntry, EHExit); 80 if (NormalEntry) 81 InnermostNormalCleanup = stable_begin(); 82 if (EHEntry) 83 InnermostEHCleanup = stable_begin(); 84} 85 86void EHScopeStack::popCleanup() { 87 assert(!empty() && "popping exception stack when not empty"); 88 89 assert(isa<EHCleanupScope>(*begin())); 90 EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin()); 91 InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup(); 92 InnermostEHCleanup = Cleanup.getEnclosingEHCleanup(); 93 StartOfData += EHCleanupScope::getSize(); 94 95 // Check whether we can shrink the branch-fixups stack. 96 if (!BranchFixups.empty()) { 97 // If we no longer have any normal cleanups, all the fixups are 98 // complete. 99 if (!hasNormalCleanups()) 100 BranchFixups.clear(); 101 102 // Otherwise we can still trim out unnecessary nulls. 103 else 104 popNullFixups(); 105 } 106} 107 108EHFilterScope *EHScopeStack::pushFilter(unsigned NumFilters) { 109 char *Buffer = allocate(EHFilterScope::getSizeForNumFilters(NumFilters)); 110 CatchDepth++; 111 return new (Buffer) EHFilterScope(NumFilters); 112} 113 114void EHScopeStack::popFilter() { 115 assert(!empty() && "popping exception stack when not empty"); 116 117 EHFilterScope &Filter = cast<EHFilterScope>(*begin()); 118 StartOfData += EHFilterScope::getSizeForNumFilters(Filter.getNumFilters()); 119 120 assert(CatchDepth > 0 && "mismatched filter push/pop"); 121 CatchDepth--; 122} 123 124EHCatchScope *EHScopeStack::pushCatch(unsigned NumHandlers) { 125 char *Buffer = allocate(EHCatchScope::getSizeForNumHandlers(NumHandlers)); 126 CatchDepth++; 127 return new (Buffer) EHCatchScope(NumHandlers); 128} 129 130void EHScopeStack::pushTerminate() { 131 char *Buffer = allocate(EHTerminateScope::getSize()); 132 CatchDepth++; 133 new (Buffer) EHTerminateScope(); 134} 135 136/// Remove any 'null' fixups on the stack. However, we can't pop more 137/// fixups than the fixup depth on the innermost normal cleanup, or 138/// else fixups that we try to add to that cleanup will end up in the 139/// wrong place. We *could* try to shrink fixup depths, but that's 140/// actually a lot of work for little benefit. 141void EHScopeStack::popNullFixups() { 142 // We expect this to only be called when there's still an innermost 143 // normal cleanup; otherwise there really shouldn't be any fixups. 144 assert(hasNormalCleanups()); 145 146 EHScopeStack::iterator it = find(InnermostNormalCleanup); 147 unsigned MinSize = cast<EHCleanupScope>(*it).getFixupDepth(); 148 assert(BranchFixups.size() >= MinSize && "fixup stack out of order"); 149 150 while (BranchFixups.size() > MinSize && 151 BranchFixups.back().Destination == 0) 152 BranchFixups.pop_back(); 153} 154 155void EHScopeStack::resolveBranchFixups(llvm::BasicBlock *Dest) { 156 assert(Dest && "null block passed to resolveBranchFixups"); 157 158 if (BranchFixups.empty()) return; 159 assert(hasNormalCleanups() && 160 "branch fixups exist with no normal cleanups on stack"); 161 162 for (unsigned I = 0, E = BranchFixups.size(); I != E; ++I) 163 if (BranchFixups[I].Destination == Dest) 164 BranchFixups[I].Destination = 0; 165 166 popNullFixups(); 167} 168 169static llvm::Constant *getAllocateExceptionFn(CodeGenFunction &CGF) { 170 // void *__cxa_allocate_exception(size_t thrown_size); 171 const llvm::Type *SizeTy = CGF.ConvertType(CGF.getContext().getSizeType()); 172 std::vector<const llvm::Type*> Args(1, SizeTy); 173 174 const llvm::FunctionType *FTy = 175 llvm::FunctionType::get(llvm::Type::getInt8PtrTy(CGF.getLLVMContext()), 176 Args, false); 177 178 return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_allocate_exception"); 179} 180 181static llvm::Constant *getFreeExceptionFn(CodeGenFunction &CGF) { 182 // void __cxa_free_exception(void *thrown_exception); 183 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext()); 184 std::vector<const llvm::Type*> Args(1, Int8PtrTy); 185 186 const llvm::FunctionType *FTy = 187 llvm::FunctionType::get(llvm::Type::getVoidTy(CGF.getLLVMContext()), 188 Args, false); 189 190 return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_free_exception"); 191} 192 193static llvm::Constant *getThrowFn(CodeGenFunction &CGF) { 194 // void __cxa_throw(void *thrown_exception, std::type_info *tinfo, 195 // void (*dest) (void *)); 196 197 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext()); 198 std::vector<const llvm::Type*> Args(3, Int8PtrTy); 199 200 const llvm::FunctionType *FTy = 201 llvm::FunctionType::get(llvm::Type::getVoidTy(CGF.getLLVMContext()), 202 Args, false); 203 204 return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_throw"); 205} 206 207static llvm::Constant *getReThrowFn(CodeGenFunction &CGF) { 208 // void __cxa_rethrow(); 209 210 const llvm::FunctionType *FTy = 211 llvm::FunctionType::get(llvm::Type::getVoidTy(CGF.getLLVMContext()), false); 212 213 return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_rethrow"); 214} 215 216static llvm::Constant *getGetExceptionPtrFn(CodeGenFunction &CGF) { 217 // void *__cxa_get_exception_ptr(void*); 218 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext()); 219 std::vector<const llvm::Type*> Args(1, Int8PtrTy); 220 221 const llvm::FunctionType *FTy = 222 llvm::FunctionType::get(Int8PtrTy, Args, false); 223 224 return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_get_exception_ptr"); 225} 226 227static llvm::Constant *getBeginCatchFn(CodeGenFunction &CGF) { 228 // void *__cxa_begin_catch(void*); 229 230 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext()); 231 std::vector<const llvm::Type*> Args(1, Int8PtrTy); 232 233 const llvm::FunctionType *FTy = 234 llvm::FunctionType::get(Int8PtrTy, Args, false); 235 236 return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_begin_catch"); 237} 238 239static llvm::Constant *getEndCatchFn(CodeGenFunction &CGF) { 240 // void __cxa_end_catch(); 241 242 const llvm::FunctionType *FTy = 243 llvm::FunctionType::get(llvm::Type::getVoidTy(CGF.getLLVMContext()), false); 244 245 return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_end_catch"); 246} 247 248static llvm::Constant *getUnexpectedFn(CodeGenFunction &CGF) { 249 // void __cxa_call_unexepcted(void *thrown_exception); 250 251 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext()); 252 std::vector<const llvm::Type*> Args(1, Int8PtrTy); 253 254 const llvm::FunctionType *FTy = 255 llvm::FunctionType::get(llvm::Type::getVoidTy(CGF.getLLVMContext()), 256 Args, false); 257 258 return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_call_unexpected"); 259} 260 261llvm::Constant *CodeGenFunction::getUnwindResumeOrRethrowFn() { 262 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(getLLVMContext()); 263 std::vector<const llvm::Type*> Args(1, Int8PtrTy); 264 265 const llvm::FunctionType *FTy = 266 llvm::FunctionType::get(llvm::Type::getVoidTy(getLLVMContext()), Args, 267 false); 268 269 if (CGM.getLangOptions().SjLjExceptions) 270 return CGM.CreateRuntimeFunction(FTy, "_Unwind_SjLj_Resume"); 271 return CGM.CreateRuntimeFunction(FTy, "_Unwind_Resume_or_Rethrow"); 272} 273 274static llvm::Constant *getTerminateFn(CodeGenFunction &CGF) { 275 // void __terminate(); 276 277 const llvm::FunctionType *FTy = 278 llvm::FunctionType::get(llvm::Type::getVoidTy(CGF.getLLVMContext()), false); 279 280 return CGF.CGM.CreateRuntimeFunction(FTy, 281 CGF.CGM.getLangOptions().CPlusPlus ? "_ZSt9terminatev" : "abort"); 282} 283 284static const char *getCPersonalityFn(CodeGenFunction &CGF) { 285 return "__gcc_personality_v0"; 286} 287 288static const char *getObjCPersonalityFn(CodeGenFunction &CGF) { 289 if (CGF.CGM.getLangOptions().NeXTRuntime) { 290 if (CGF.CGM.getLangOptions().ObjCNonFragileABI) 291 return "__objc_personality_v0"; 292 else 293 return getCPersonalityFn(CGF); 294 } else { 295 return "__gnu_objc_personality_v0"; 296 } 297} 298 299static const char *getCXXPersonalityFn(CodeGenFunction &CGF) { 300 if (CGF.CGM.getLangOptions().SjLjExceptions) 301 return "__gxx_personality_sj0"; 302 else 303 return "__gxx_personality_v0"; 304} 305 306/// Determines the personality function to use when both C++ 307/// and Objective-C exceptions are being caught. 308static const char *getObjCXXPersonalityFn(CodeGenFunction &CGF) { 309 // The ObjC personality defers to the C++ personality for non-ObjC 310 // handlers. Unlike the C++ case, we use the same personality 311 // function on targets using (backend-driven) SJLJ EH. 312 if (CGF.CGM.getLangOptions().NeXTRuntime) { 313 if (CGF.CGM.getLangOptions().ObjCNonFragileABI) 314 return "__objc_personality_v0"; 315 316 // In the fragile ABI, just use C++ exception handling and hope 317 // they're not doing crazy exception mixing. 318 else 319 return getCXXPersonalityFn(CGF); 320 } 321 322 // I'm pretty sure the GNU runtime doesn't support mixed EH. 323 // TODO: we don't necessarily need mixed EH here; remember what 324 // kind of exceptions we actually try to catch in this function. 325 CGF.CGM.ErrorUnsupported(CGF.CurCodeDecl, 326 "the GNU Objective C runtime does not support " 327 "catching C++ and Objective C exceptions in the " 328 "same function"); 329 // Use the C++ personality just to avoid returning null. 330 return getCXXPersonalityFn(CGF); 331} 332 333static llvm::Constant *getPersonalityFn(CodeGenFunction &CGF) { 334 const char *Name; 335 const LangOptions &Opts = CGF.CGM.getLangOptions(); 336 if (Opts.CPlusPlus && Opts.ObjC1) 337 Name = getObjCXXPersonalityFn(CGF); 338 else if (Opts.CPlusPlus) 339 Name = getCXXPersonalityFn(CGF); 340 else if (Opts.ObjC1) 341 Name = getObjCPersonalityFn(CGF); 342 else 343 Name = getCPersonalityFn(CGF); 344 345 llvm::Constant *Personality = 346 CGF.CGM.CreateRuntimeFunction(llvm::FunctionType::get( 347 llvm::Type::getInt32Ty( 348 CGF.CGM.getLLVMContext()), 349 true), 350 Name); 351 return llvm::ConstantExpr::getBitCast(Personality, CGF.CGM.PtrToInt8Ty); 352} 353 354/// Returns the value to inject into a selector to indicate the 355/// presence of a catch-all. 356static llvm::Constant *getCatchAllValue(CodeGenFunction &CGF) { 357 // Possibly we should use @llvm.eh.catch.all.value here. 358 return llvm::ConstantPointerNull::get(CGF.CGM.PtrToInt8Ty); 359} 360 361/// Returns the value to inject into a selector to indicate the 362/// presence of a cleanup. 363static llvm::Constant *getCleanupValue(CodeGenFunction &CGF) { 364 return llvm::ConstantInt::get(CGF.Builder.getInt32Ty(), 0); 365} 366 367// Emits an exception expression into the given location. This 368// differs from EmitAnyExprToMem only in that, if a final copy-ctor 369// call is required, an exception within that copy ctor causes 370// std::terminate to be invoked. 371static void EmitAnyExprToExn(CodeGenFunction &CGF, const Expr *E, 372 llvm::Value *ExnLoc) { 373 // We want to release the allocated exception object if this 374 // expression throws. We do this by pushing an EH-only cleanup 375 // block which, furthermore, deactivates itself after the expression 376 // is complete. 377 llvm::AllocaInst *ShouldFreeVar = 378 CGF.CreateTempAlloca(llvm::Type::getInt1Ty(CGF.getLLVMContext()), 379 "should-free-exnobj.var"); 380 CGF.InitTempAlloca(ShouldFreeVar, 381 llvm::ConstantInt::getFalse(CGF.getLLVMContext())); 382 383 // A variable holding the exception pointer. This is necessary 384 // because the throw expression does not necessarily dominate the 385 // cleanup, for example if it appears in a conditional expression. 386 llvm::AllocaInst *ExnLocVar = 387 CGF.CreateTempAlloca(ExnLoc->getType(), "exnobj.var"); 388 389 // Make sure the exception object is cleaned up if there's an 390 // exception during initialization. 391 // FIXME: StmtExprs probably force this to include a non-EH 392 // handler. 393 { 394 CodeGenFunction::CleanupBlock Cleanup(CGF, CodeGenFunction::EHCleanup); 395 llvm::BasicBlock *FreeBB = CGF.createBasicBlock("free-exnobj"); 396 llvm::BasicBlock *DoneBB = CGF.createBasicBlock("free-exnobj.done"); 397 398 llvm::Value *ShouldFree = CGF.Builder.CreateLoad(ShouldFreeVar, 399 "should-free-exnobj"); 400 CGF.Builder.CreateCondBr(ShouldFree, FreeBB, DoneBB); 401 CGF.EmitBlock(FreeBB); 402 llvm::Value *ExnLocLocal = CGF.Builder.CreateLoad(ExnLocVar, "exnobj"); 403 CGF.Builder.CreateCall(getFreeExceptionFn(CGF), ExnLocLocal) 404 ->setDoesNotThrow(); 405 CGF.EmitBlock(DoneBB); 406 } 407 EHScopeStack::stable_iterator Cleanup = CGF.EHStack.stable_begin(); 408 409 CGF.Builder.CreateStore(ExnLoc, ExnLocVar); 410 CGF.Builder.CreateStore(llvm::ConstantInt::getTrue(CGF.getLLVMContext()), 411 ShouldFreeVar); 412 413 // __cxa_allocate_exception returns a void*; we need to cast this 414 // to the appropriate type for the object. 415 const llvm::Type *Ty = CGF.ConvertType(E->getType())->getPointerTo(); 416 llvm::Value *TypedExnLoc = CGF.Builder.CreateBitCast(ExnLoc, Ty); 417 418 // FIXME: this isn't quite right! If there's a final unelided call 419 // to a copy constructor, then according to [except.terminate]p1 we 420 // must call std::terminate() if that constructor throws, because 421 // technically that copy occurs after the exception expression is 422 // evaluated but before the exception is caught. But the best way 423 // to handle that is to teach EmitAggExpr to do the final copy 424 // differently if it can't be elided. 425 CGF.EmitAnyExprToMem(E, TypedExnLoc, /*Volatile*/ false); 426 427 CGF.Builder.CreateStore(llvm::ConstantInt::getFalse(CGF.getLLVMContext()), 428 ShouldFreeVar); 429 430 // Technically, the exception object is like a temporary; it has to 431 // be cleaned up when its full-expression is complete. 432 // Unfortunately, the AST represents full-expressions by creating a 433 // CXXExprWithTemporaries, which it only does when there are actually 434 // temporaries. 435 // 436 // If any cleanups have been added since we pushed ours, they must 437 // be from temporaries; this will get popped at the same time. 438 // Otherwise we need to pop ours off. FIXME: this is very brittle. 439 if (Cleanup == CGF.EHStack.stable_begin()) 440 CGF.PopCleanupBlock(); 441} 442 443llvm::Value *CodeGenFunction::getExceptionSlot() { 444 if (!ExceptionSlot) { 445 const llvm::Type *i8p = llvm::Type::getInt8PtrTy(getLLVMContext()); 446 ExceptionSlot = CreateTempAlloca(i8p, "exn.slot"); 447 } 448 return ExceptionSlot; 449} 450 451void CodeGenFunction::EmitCXXThrowExpr(const CXXThrowExpr *E) { 452 if (!E->getSubExpr()) { 453 if (getInvokeDest()) { 454 Builder.CreateInvoke(getReThrowFn(*this), 455 getUnreachableBlock(), 456 getInvokeDest()) 457 ->setDoesNotReturn(); 458 } else { 459 Builder.CreateCall(getReThrowFn(*this))->setDoesNotReturn(); 460 Builder.CreateUnreachable(); 461 } 462 463 // Clear the insertion point to indicate we are in unreachable code. 464 Builder.ClearInsertionPoint(); 465 return; 466 } 467 468 QualType ThrowType = E->getSubExpr()->getType(); 469 470 // Now allocate the exception object. 471 const llvm::Type *SizeTy = ConvertType(getContext().getSizeType()); 472 uint64_t TypeSize = getContext().getTypeSizeInChars(ThrowType).getQuantity(); 473 474 llvm::Constant *AllocExceptionFn = getAllocateExceptionFn(*this); 475 llvm::CallInst *ExceptionPtr = 476 Builder.CreateCall(AllocExceptionFn, 477 llvm::ConstantInt::get(SizeTy, TypeSize), 478 "exception"); 479 ExceptionPtr->setDoesNotThrow(); 480 481 EmitAnyExprToExn(*this, E->getSubExpr(), ExceptionPtr); 482 483 // Now throw the exception. 484 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(getLLVMContext()); 485 llvm::Constant *TypeInfo = CGM.GetAddrOfRTTIDescriptor(ThrowType, true); 486 487 // The address of the destructor. If the exception type has a 488 // trivial destructor (or isn't a record), we just pass null. 489 llvm::Constant *Dtor = 0; 490 if (const RecordType *RecordTy = ThrowType->getAs<RecordType>()) { 491 CXXRecordDecl *Record = cast<CXXRecordDecl>(RecordTy->getDecl()); 492 if (!Record->hasTrivialDestructor()) { 493 CXXDestructorDecl *DtorD = Record->getDestructor(); 494 Dtor = CGM.GetAddrOfCXXDestructor(DtorD, Dtor_Complete); 495 Dtor = llvm::ConstantExpr::getBitCast(Dtor, Int8PtrTy); 496 } 497 } 498 if (!Dtor) Dtor = llvm::Constant::getNullValue(Int8PtrTy); 499 500 if (getInvokeDest()) { 501 llvm::InvokeInst *ThrowCall = 502 Builder.CreateInvoke3(getThrowFn(*this), 503 getUnreachableBlock(), getInvokeDest(), 504 ExceptionPtr, TypeInfo, Dtor); 505 ThrowCall->setDoesNotReturn(); 506 } else { 507 llvm::CallInst *ThrowCall = 508 Builder.CreateCall3(getThrowFn(*this), ExceptionPtr, TypeInfo, Dtor); 509 ThrowCall->setDoesNotReturn(); 510 Builder.CreateUnreachable(); 511 } 512 513 // Clear the insertion point to indicate we are in unreachable code. 514 Builder.ClearInsertionPoint(); 515 516 // FIXME: For now, emit a dummy basic block because expr emitters in generally 517 // are not ready to handle emitting expressions at unreachable points. 518 EnsureInsertPoint(); 519} 520 521void CodeGenFunction::EmitStartEHSpec(const Decl *D) { 522 if (!Exceptions) 523 return; 524 525 const FunctionDecl* FD = dyn_cast_or_null<FunctionDecl>(D); 526 if (FD == 0) 527 return; 528 const FunctionProtoType *Proto = FD->getType()->getAs<FunctionProtoType>(); 529 if (Proto == 0) 530 return; 531 532 assert(!Proto->hasAnyExceptionSpec() && "function with parameter pack"); 533 534 if (!Proto->hasExceptionSpec()) 535 return; 536 537 unsigned NumExceptions = Proto->getNumExceptions(); 538 EHFilterScope *Filter = EHStack.pushFilter(NumExceptions); 539 540 for (unsigned I = 0; I != NumExceptions; ++I) { 541 QualType Ty = Proto->getExceptionType(I); 542 QualType ExceptType = Ty.getNonReferenceType().getUnqualifiedType(); 543 llvm::Value *EHType = CGM.GetAddrOfRTTIDescriptor(ExceptType, true); 544 Filter->setFilter(I, EHType); 545 } 546} 547 548void CodeGenFunction::EmitEndEHSpec(const Decl *D) { 549 if (!Exceptions) 550 return; 551 552 const FunctionDecl* FD = dyn_cast_or_null<FunctionDecl>(D); 553 if (FD == 0) 554 return; 555 const FunctionProtoType *Proto = FD->getType()->getAs<FunctionProtoType>(); 556 if (Proto == 0) 557 return; 558 559 if (!Proto->hasExceptionSpec()) 560 return; 561 562 EHStack.popFilter(); 563} 564 565void CodeGenFunction::EmitCXXTryStmt(const CXXTryStmt &S) { 566 CXXTryStmtInfo Info = EnterCXXTryStmt(S); 567 EmitStmt(S.getTryBlock()); 568 ExitCXXTryStmt(S, Info); 569} 570 571CodeGenFunction::CXXTryStmtInfo 572CodeGenFunction::EnterCXXTryStmt(const CXXTryStmt &S) { 573 unsigned NumHandlers = S.getNumHandlers(); 574 EHCatchScope *CatchScope = EHStack.pushCatch(NumHandlers); 575 576 for (unsigned I = 0; I != NumHandlers; ++I) { 577 const CXXCatchStmt *C = S.getHandler(I); 578 579 llvm::BasicBlock *Handler = createBasicBlock("catch"); 580 if (C->getExceptionDecl()) { 581 // FIXME: Dropping the reference type on the type into makes it 582 // impossible to correctly implement catch-by-reference 583 // semantics for pointers. Unfortunately, this is what all 584 // existing compilers do, and it's not clear that the standard 585 // personality routine is capable of doing this right. See C++ DR 388: 586 // http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_active.html#388 587 QualType CaughtType = C->getCaughtType(); 588 CaughtType = CaughtType.getNonReferenceType().getUnqualifiedType(); 589 llvm::Value *TypeInfo = CGM.GetAddrOfRTTIDescriptor(CaughtType, true); 590 CatchScope->setHandler(I, TypeInfo, Handler); 591 } else { 592 // No exception decl indicates '...', a catch-all. 593 CatchScope->setCatchAllHandler(I, Handler); 594 } 595 } 596 597 return CXXTryStmtInfo(); 598} 599 600/// Check whether this is a non-EH scope, i.e. a scope which doesn't 601/// affect exception handling. Currently, the only non-EH scopes are 602/// normal-only cleanup scopes. 603static bool isNonEHScope(const EHScope &S) { 604 return isa<EHCleanupScope>(S) && !cast<EHCleanupScope>(S).isEHCleanup(); 605} 606 607llvm::BasicBlock *CodeGenFunction::getInvokeDestImpl() { 608 assert(EHStack.requiresLandingPad()); 609 assert(!EHStack.empty()); 610 611 // Check the innermost scope for a cached landing pad. If this is 612 // a non-EH cleanup, we'll check enclosing scopes in EmitLandingPad. 613 llvm::BasicBlock *LP = EHStack.begin()->getCachedLandingPad(); 614 if (LP) return LP; 615 616 // Build the landing pad for this scope. 617 LP = EmitLandingPad(); 618 assert(LP); 619 620 // Cache the landing pad on the innermost scope. If this is a 621 // non-EH scope, cache the landing pad on the enclosing scope, too. 622 for (EHScopeStack::iterator ir = EHStack.begin(); true; ++ir) { 623 ir->setCachedLandingPad(LP); 624 if (!isNonEHScope(*ir)) break; 625 } 626 627 return LP; 628} 629 630llvm::BasicBlock *CodeGenFunction::EmitLandingPad() { 631 assert(EHStack.requiresLandingPad()); 632 633 // This function contains a hack to work around a design flaw in 634 // LLVM's EH IR which breaks semantics after inlining. This same 635 // hack is implemented in llvm-gcc. 636 // 637 // The LLVM EH abstraction is basically a thin veneer over the 638 // traditional GCC zero-cost design: for each range of instructions 639 // in the function, there is (at most) one "landing pad" with an 640 // associated chain of EH actions. A language-specific personality 641 // function interprets this chain of actions and (1) decides whether 642 // or not to resume execution at the landing pad and (2) if so, 643 // provides an integer indicating why it's stopping. In LLVM IR, 644 // the association of a landing pad with a range of instructions is 645 // achieved via an invoke instruction, the chain of actions becomes 646 // the arguments to the @llvm.eh.selector call, and the selector 647 // call returns the integer indicator. Other than the required 648 // presence of two intrinsic function calls in the landing pad, 649 // the IR exactly describes the layout of the output code. 650 // 651 // A principal advantage of this design is that it is completely 652 // language-agnostic; in theory, the LLVM optimizers can treat 653 // landing pads neutrally, and targets need only know how to lower 654 // the intrinsics to have a functioning exceptions system (assuming 655 // that platform exceptions follow something approximately like the 656 // GCC design). Unfortunately, landing pads cannot be combined in a 657 // language-agnostic way: given selectors A and B, there is no way 658 // to make a single landing pad which faithfully represents the 659 // semantics of propagating an exception first through A, then 660 // through B, without knowing how the personality will interpret the 661 // (lowered form of the) selectors. This means that inlining has no 662 // choice but to crudely chain invokes (i.e., to ignore invokes in 663 // the inlined function, but to turn all unwindable calls into 664 // invokes), which is only semantically valid if every unwind stops 665 // at every landing pad. 666 // 667 // Therefore, the invoke-inline hack is to guarantee that every 668 // landing pad has a catch-all. 669 const bool UseInvokeInlineHack = true; 670 671 for (EHScopeStack::iterator ir = EHStack.begin(); ; ) { 672 assert(ir != EHStack.end() && 673 "stack requiring landing pad is nothing but non-EH scopes?"); 674 675 // If this is a terminate scope, just use the singleton terminate 676 // landing pad. 677 if (isa<EHTerminateScope>(*ir)) 678 return getTerminateLandingPad(); 679 680 // If this isn't an EH scope, iterate; otherwise break out. 681 if (!isNonEHScope(*ir)) break; 682 ++ir; 683 684 // We haven't checked this scope for a cached landing pad yet. 685 if (llvm::BasicBlock *LP = ir->getCachedLandingPad()) 686 return LP; 687 } 688 689 // Save the current IR generation state. 690 CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP(); 691 692 // Create and configure the landing pad. 693 llvm::BasicBlock *LP = createBasicBlock("lpad"); 694 EmitBlock(LP); 695 696 // Save the exception pointer. It's safe to use a single exception 697 // pointer per function because EH cleanups can never have nested 698 // try/catches. 699 llvm::CallInst *Exn = 700 Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::eh_exception), "exn"); 701 Exn->setDoesNotThrow(); 702 Builder.CreateStore(Exn, getExceptionSlot()); 703 704 // Build the selector arguments. 705 llvm::SmallVector<llvm::Value*, 8> EHSelector; 706 EHSelector.push_back(Exn); 707 EHSelector.push_back(getPersonalityFn(*this)); 708 709 // Accumulate all the handlers in scope. 710 llvm::DenseMap<llvm::Value*, JumpDest> EHHandlers; 711 JumpDest CatchAll; 712 bool HasEHCleanup = false; 713 bool HasEHFilter = false; 714 llvm::SmallVector<llvm::Value*, 8> EHFilters; 715 for (EHScopeStack::iterator I = EHStack.begin(), E = EHStack.end(); 716 I != E; ++I) { 717 718 switch (I->getKind()) { 719 case EHScope::Cleanup: 720 if (!HasEHCleanup) 721 HasEHCleanup = cast<EHCleanupScope>(*I).isEHCleanup(); 722 // We otherwise don't care about cleanups. 723 continue; 724 725 case EHScope::Filter: { 726 assert(I.next() == EHStack.end() && "EH filter is not end of EH stack"); 727 assert(!CatchAll.Block && "EH filter reached after catch-all"); 728 729 // Filter scopes get added to the selector in wierd ways. 730 EHFilterScope &Filter = cast<EHFilterScope>(*I); 731 HasEHFilter = true; 732 733 // Add all the filter values which we aren't already explicitly 734 // catching. 735 for (unsigned I = 0, E = Filter.getNumFilters(); I != E; ++I) { 736 llvm::Value *FV = Filter.getFilter(I); 737 if (!EHHandlers.count(FV)) 738 EHFilters.push_back(FV); 739 } 740 goto done; 741 } 742 743 case EHScope::Terminate: 744 // Terminate scopes are basically catch-alls. 745 assert(!CatchAll.Block); 746 CatchAll.Block = getTerminateHandler(); 747 CatchAll.ScopeDepth = EHStack.getEnclosingEHCleanup(I); 748 goto done; 749 750 case EHScope::Catch: 751 break; 752 } 753 754 EHCatchScope &Catch = cast<EHCatchScope>(*I); 755 for (unsigned HI = 0, HE = Catch.getNumHandlers(); HI != HE; ++HI) { 756 EHCatchScope::Handler Handler = Catch.getHandler(HI); 757 758 // Catch-all. We should only have one of these per catch. 759 if (!Handler.Type) { 760 assert(!CatchAll.Block); 761 CatchAll.Block = Handler.Block; 762 CatchAll.ScopeDepth = EHStack.getEnclosingEHCleanup(I); 763 continue; 764 } 765 766 // Check whether we already have a handler for this type. 767 JumpDest &Dest = EHHandlers[Handler.Type]; 768 if (Dest.Block) continue; 769 770 EHSelector.push_back(Handler.Type); 771 Dest.Block = Handler.Block; 772 Dest.ScopeDepth = EHStack.getEnclosingEHCleanup(I); 773 } 774 775 // Stop if we found a catch-all. 776 if (CatchAll.Block) break; 777 } 778 779 done: 780 unsigned LastToEmitInLoop = EHSelector.size(); 781 782 // If we have a catch-all, add null to the selector. 783 if (CatchAll.Block) { 784 EHSelector.push_back(getCatchAllValue(CGF)); 785 786 // If we have an EH filter, we need to add those handlers in the 787 // right place in the selector, which is to say, at the end. 788 } else if (HasEHFilter) { 789 // Create a filter expression: an integer constant saying how many 790 // filters there are (+1 to avoid ambiguity with 0 for cleanup), 791 // followed by the filter types. The personality routine only 792 // lands here if the filter doesn't match. 793 EHSelector.push_back(llvm::ConstantInt::get(Builder.getInt32Ty(), 794 EHFilters.size() + 1)); 795 EHSelector.append(EHFilters.begin(), EHFilters.end()); 796 797 // Also check whether we need a cleanup. 798 if (UseInvokeInlineHack || HasEHCleanup) 799 EHSelector.push_back(UseInvokeInlineHack 800 ? getCatchAllValue(CGF) 801 : getCleanupValue(CGF)); 802 803 // Otherwise, signal that we at least have cleanups. 804 } else if (UseInvokeInlineHack || HasEHCleanup) { 805 EHSelector.push_back(UseInvokeInlineHack 806 ? getCatchAllValue(CGF) 807 : getCleanupValue(CGF)); 808 } else { 809 assert(LastToEmitInLoop > 2); 810 LastToEmitInLoop--; 811 } 812 813 assert(EHSelector.size() >= 3 && "selector call has only two arguments!"); 814 815 // Tell the backend how to generate the landing pad. 816 llvm::CallInst *Selection = 817 Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::eh_selector), 818 EHSelector.begin(), EHSelector.end(), "eh.selector"); 819 Selection->setDoesNotThrow(); 820 821 // Select the right handler. 822 llvm::Value *llvm_eh_typeid_for = 823 CGM.getIntrinsic(llvm::Intrinsic::eh_typeid_for); 824 825 // The results of llvm_eh_typeid_for aren't reliable --- at least 826 // not locally --- so we basically have to do this as an 'if' chain. 827 // We walk through the first N-1 catch clauses, testing and chaining, 828 // and then fall into the final clause (which is either a cleanup, a 829 // filter (possibly with a cleanup), a catch-all, or another catch). 830 for (unsigned I = 2; I != LastToEmitInLoop; ++I) { 831 llvm::Value *Type = EHSelector[I]; 832 JumpDest Dest = EHHandlers[Type]; 833 assert(Dest.Block && "no handler entry for value in selector?"); 834 835 // Figure out where to branch on a match. As a debug code-size 836 // optimization, if the scope depth matches the innermost cleanup, 837 // we branch directly to the catch handler. 838 llvm::BasicBlock *Match = Dest.Block; 839 bool MatchNeedsCleanup = Dest.ScopeDepth != EHStack.getInnermostEHCleanup(); 840 if (MatchNeedsCleanup) 841 Match = createBasicBlock("eh.match"); 842 843 llvm::BasicBlock *Next = createBasicBlock("eh.next"); 844 845 // Check whether the exception matches. 846 llvm::CallInst *Id 847 = Builder.CreateCall(llvm_eh_typeid_for, 848 Builder.CreateBitCast(Type, CGM.PtrToInt8Ty)); 849 Id->setDoesNotThrow(); 850 Builder.CreateCondBr(Builder.CreateICmpEQ(Selection, Id), 851 Match, Next); 852 853 // Emit match code if necessary. 854 if (MatchNeedsCleanup) { 855 EmitBlock(Match); 856 EmitBranchThroughEHCleanup(Dest); 857 } 858 859 // Continue to the next match. 860 EmitBlock(Next); 861 } 862 863 // Emit the final case in the selector. 864 // This might be a catch-all.... 865 if (CatchAll.Block) { 866 assert(isa<llvm::ConstantPointerNull>(EHSelector.back())); 867 EmitBranchThroughEHCleanup(CatchAll); 868 869 // ...or an EH filter... 870 } else if (HasEHFilter) { 871 llvm::Value *SavedSelection = Selection; 872 873 // First, unwind out to the outermost scope if necessary. 874 if (EHStack.hasEHCleanups()) { 875 // The end here might not dominate the beginning, so we might need to 876 // save the selector if we need it. 877 llvm::AllocaInst *SelectorVar = 0; 878 if (HasEHCleanup) { 879 SelectorVar = CreateTempAlloca(Builder.getInt32Ty(), "selector.var"); 880 Builder.CreateStore(Selection, SelectorVar); 881 } 882 883 llvm::BasicBlock *CleanupContBB = createBasicBlock("ehspec.cleanup.cont"); 884 EmitBranchThroughEHCleanup(JumpDest(CleanupContBB, EHStack.stable_end())); 885 EmitBlock(CleanupContBB); 886 887 if (HasEHCleanup) 888 SavedSelection = Builder.CreateLoad(SelectorVar, "ehspec.saved-selector"); 889 } 890 891 // If there was a cleanup, we'll need to actually check whether we 892 // landed here because the filter triggered. 893 if (UseInvokeInlineHack || HasEHCleanup) { 894 llvm::BasicBlock *RethrowBB = createBasicBlock("cleanup"); 895 llvm::BasicBlock *UnexpectedBB = createBasicBlock("ehspec.unexpected"); 896 897 llvm::Constant *Zero = llvm::ConstantInt::get(Builder.getInt32Ty(), 0); 898 llvm::Value *FailsFilter = 899 Builder.CreateICmpSLT(SavedSelection, Zero, "ehspec.fails"); 900 Builder.CreateCondBr(FailsFilter, UnexpectedBB, RethrowBB); 901 902 // The rethrow block is where we land if this was a cleanup. 903 // TODO: can this be _Unwind_Resume if the InvokeInlineHack is off? 904 EmitBlock(RethrowBB); 905 Builder.CreateCall(getUnwindResumeOrRethrowFn(), 906 Builder.CreateLoad(getExceptionSlot())) 907 ->setDoesNotReturn(); 908 Builder.CreateUnreachable(); 909 910 EmitBlock(UnexpectedBB); 911 } 912 913 // Call __cxa_call_unexpected. This doesn't need to be an invoke 914 // because __cxa_call_unexpected magically filters exceptions 915 // according to the last landing pad the exception was thrown 916 // into. Seriously. 917 Builder.CreateCall(getUnexpectedFn(*this), 918 Builder.CreateLoad(getExceptionSlot())) 919 ->setDoesNotReturn(); 920 Builder.CreateUnreachable(); 921 922 // ...or a normal catch handler... 923 } else if (!UseInvokeInlineHack && !HasEHCleanup) { 924 llvm::Value *Type = EHSelector.back(); 925 EmitBranchThroughEHCleanup(EHHandlers[Type]); 926 927 // ...or a cleanup. 928 } else { 929 // We emit a jump to a notional label at the outermost unwind state. 930 llvm::BasicBlock *Unwind = createBasicBlock("eh.resume"); 931 JumpDest Dest(Unwind, EHStack.stable_end()); 932 EmitBranchThroughEHCleanup(Dest); 933 934 // The unwind block. We have to reload the exception here because 935 // we might have unwound through arbitrary blocks, so the landing 936 // pad might not dominate. 937 EmitBlock(Unwind); 938 939 // This can always be a call because we necessarily didn't find 940 // anything on the EH stack which needs our help. 941 Builder.CreateCall(getUnwindResumeOrRethrowFn(), 942 Builder.CreateLoad(getExceptionSlot())) 943 ->setDoesNotReturn(); 944 Builder.CreateUnreachable(); 945 } 946 947 // Restore the old IR generation state. 948 Builder.restoreIP(SavedIP); 949 950 return LP; 951} 952 953/// Emits a call to __cxa_begin_catch and enters a cleanup to call 954/// __cxa_end_catch. 955static llvm::Value *CallBeginCatch(CodeGenFunction &CGF, llvm::Value *Exn) { 956 llvm::CallInst *Call = CGF.Builder.CreateCall(getBeginCatchFn(CGF), Exn); 957 Call->setDoesNotThrow(); 958 959 { 960 CodeGenFunction::CleanupBlock EndCatchCleanup(CGF, 961 CodeGenFunction::NormalAndEHCleanup); 962 963 // __cxa_end_catch never throws, so this can just be a call. 964 CGF.Builder.CreateCall(getEndCatchFn(CGF))->setDoesNotThrow(); 965 } 966 967 return Call; 968} 969 970/// A "special initializer" callback for initializing a catch 971/// parameter during catch initialization. 972static void InitCatchParam(CodeGenFunction &CGF, 973 const VarDecl &CatchParam, 974 llvm::Value *ParamAddr) { 975 // Load the exception from where the landing pad saved it. 976 llvm::Value *Exn = CGF.Builder.CreateLoad(CGF.getExceptionSlot(), "exn"); 977 978 CanQualType CatchType = 979 CGF.CGM.getContext().getCanonicalType(CatchParam.getType()); 980 const llvm::Type *LLVMCatchTy = CGF.ConvertTypeForMem(CatchType); 981 982 // If we're catching by reference, we can just cast the object 983 // pointer to the appropriate pointer. 984 if (isa<ReferenceType>(CatchType)) { 985 // __cxa_begin_catch returns the adjusted object pointer. 986 llvm::Value *AdjustedExn = CallBeginCatch(CGF, Exn); 987 llvm::Value *ExnCast = 988 CGF.Builder.CreateBitCast(AdjustedExn, LLVMCatchTy, "exn.byref"); 989 CGF.Builder.CreateStore(ExnCast, ParamAddr); 990 return; 991 } 992 993 // Non-aggregates (plus complexes). 994 bool IsComplex = false; 995 if (!CGF.hasAggregateLLVMType(CatchType) || 996 (IsComplex = CatchType->isAnyComplexType())) { 997 llvm::Value *AdjustedExn = CallBeginCatch(CGF, Exn); 998 999 // If the catch type is a pointer type, __cxa_begin_catch returns 1000 // the pointer by value. 1001 if (CatchType->hasPointerRepresentation()) { 1002 llvm::Value *CastExn = 1003 CGF.Builder.CreateBitCast(AdjustedExn, LLVMCatchTy, "exn.casted"); 1004 CGF.Builder.CreateStore(CastExn, ParamAddr); 1005 return; 1006 } 1007 1008 // Otherwise, it returns a pointer into the exception object. 1009 1010 const llvm::Type *PtrTy = LLVMCatchTy->getPointerTo(0); // addrspace 0 ok 1011 llvm::Value *Cast = CGF.Builder.CreateBitCast(AdjustedExn, PtrTy); 1012 1013 if (IsComplex) { 1014 CGF.StoreComplexToAddr(CGF.LoadComplexFromAddr(Cast, /*volatile*/ false), 1015 ParamAddr, /*volatile*/ false); 1016 } else { 1017 llvm::Value *ExnLoad = CGF.Builder.CreateLoad(Cast, "exn.scalar"); 1018 CGF.EmitStoreOfScalar(ExnLoad, ParamAddr, /*volatile*/ false, CatchType); 1019 } 1020 return; 1021 } 1022 1023 // FIXME: this *really* needs to be done via a proper, Sema-emitted 1024 // initializer expression. 1025 1026 CXXRecordDecl *RD = CatchType.getTypePtr()->getAsCXXRecordDecl(); 1027 assert(RD && "aggregate catch type was not a record!"); 1028 1029 const llvm::Type *PtrTy = LLVMCatchTy->getPointerTo(0); // addrspace 0 ok 1030 1031 if (RD->hasTrivialCopyConstructor()) { 1032 llvm::Value *AdjustedExn = CallBeginCatch(CGF, Exn); 1033 llvm::Value *Cast = CGF.Builder.CreateBitCast(AdjustedExn, PtrTy); 1034 CGF.EmitAggregateCopy(ParamAddr, Cast, CatchType); 1035 return; 1036 } 1037 1038 // We have to call __cxa_get_exception_ptr to get the adjusted 1039 // pointer before copying. 1040 llvm::CallInst *AdjustedExn = 1041 CGF.Builder.CreateCall(getGetExceptionPtrFn(CGF), Exn); 1042 AdjustedExn->setDoesNotThrow(); 1043 llvm::Value *Cast = CGF.Builder.CreateBitCast(AdjustedExn, PtrTy); 1044 1045 CXXConstructorDecl *CD = RD->getCopyConstructor(CGF.getContext(), 0); 1046 assert(CD && "record has no copy constructor!"); 1047 llvm::Value *CopyCtor = CGF.CGM.GetAddrOfCXXConstructor(CD, Ctor_Complete); 1048 1049 CallArgList CallArgs; 1050 CallArgs.push_back(std::make_pair(RValue::get(ParamAddr), 1051 CD->getThisType(CGF.getContext()))); 1052 CallArgs.push_back(std::make_pair(RValue::get(Cast), 1053 CD->getParamDecl(0)->getType())); 1054 1055 const FunctionProtoType *FPT 1056 = CD->getType()->getAs<FunctionProtoType>(); 1057 1058 // Call the copy ctor in a terminate scope. 1059 CGF.EHStack.pushTerminate(); 1060 CGF.EmitCall(CGF.CGM.getTypes().getFunctionInfo(CallArgs, FPT), 1061 CopyCtor, ReturnValueSlot(), CallArgs, CD); 1062 CGF.EHStack.popTerminate(); 1063 1064 // Finally we can call __cxa_begin_catch. 1065 CallBeginCatch(CGF, Exn); 1066} 1067 1068/// Begins a catch statement by initializing the catch variable and 1069/// calling __cxa_begin_catch. 1070static void BeginCatch(CodeGenFunction &CGF, 1071 const CXXCatchStmt *S) { 1072 // We have to be very careful with the ordering of cleanups here: 1073 // C++ [except.throw]p4: 1074 // The destruction [of the exception temporary] occurs 1075 // immediately after the destruction of the object declared in 1076 // the exception-declaration in the handler. 1077 // 1078 // So the precise ordering is: 1079 // 1. Construct catch variable. 1080 // 2. __cxa_begin_catch 1081 // 3. Enter __cxa_end_catch cleanup 1082 // 4. Enter dtor cleanup 1083 // 1084 // We do this by initializing the exception variable with a 1085 // "special initializer", InitCatchParam. Delegation sequence: 1086 // - ExitCXXTryStmt opens a RunCleanupsScope 1087 // - EmitLocalBlockVarDecl creates the variable and debug info 1088 // - InitCatchParam initializes the variable from the exception 1089 // - CallBeginCatch calls __cxa_begin_catch 1090 // - CallBeginCatch enters the __cxa_end_catch cleanup 1091 // - EmitLocalBlockVarDecl enters the variable destructor cleanup 1092 // - EmitCXXTryStmt emits the code for the catch body 1093 // - EmitCXXTryStmt close the RunCleanupsScope 1094 1095 VarDecl *CatchParam = S->getExceptionDecl(); 1096 if (!CatchParam) { 1097 llvm::Value *Exn = CGF.Builder.CreateLoad(CGF.getExceptionSlot(), "exn"); 1098 CallBeginCatch(CGF, Exn); 1099 return; 1100 } 1101 1102 // Emit the local. 1103 CGF.EmitLocalBlockVarDecl(*CatchParam, &InitCatchParam); 1104} 1105 1106void CodeGenFunction::ExitCXXTryStmt(const CXXTryStmt &S, 1107 CXXTryStmtInfo TryInfo) { 1108 unsigned NumHandlers = S.getNumHandlers(); 1109 EHCatchScope &CatchScope = cast<EHCatchScope>(*EHStack.begin()); 1110 assert(CatchScope.getNumHandlers() == NumHandlers); 1111 1112 // Copy the handler blocks off before we pop the EH stack. Emitting 1113 // the handlers might scribble on this memory. 1114 llvm::SmallVector<EHCatchScope::Handler, 8> Handlers(NumHandlers); 1115 memcpy(Handlers.data(), CatchScope.begin(), 1116 NumHandlers * sizeof(EHCatchScope::Handler)); 1117 EHStack.popCatch(); 1118 1119 // The fall-through block. 1120 llvm::BasicBlock *ContBB = createBasicBlock("try.cont"); 1121 1122 // We just emitted the body of the try; jump to the continue block. 1123 if (HaveInsertPoint()) 1124 Builder.CreateBr(ContBB); 1125 1126 for (unsigned I = 0; I != NumHandlers; ++I) { 1127 llvm::BasicBlock *CatchBlock = Handlers[I].Block; 1128 EmitBlock(CatchBlock); 1129 1130 // Catch the exception if this isn't a catch-all. 1131 const CXXCatchStmt *C = S.getHandler(I); 1132 1133 // Enter a cleanup scope, including the catch variable and the 1134 // end-catch. 1135 RunCleanupsScope CatchScope(*this); 1136 1137 // Initialize the catch variable and set up the cleanups. 1138 BeginCatch(*this, C); 1139 1140 // Perform the body of the catch. 1141 EmitStmt(C->getHandlerBlock()); 1142 1143 // Fall out through the catch cleanups. 1144 CatchScope.ForceCleanup(); 1145 1146 // Branch out of the try. 1147 if (HaveInsertPoint()) 1148 Builder.CreateBr(ContBB); 1149 } 1150 1151 EmitBlock(ContBB); 1152} 1153 1154/// Enters a finally block for an implementation using zero-cost 1155/// exceptions. This is mostly general, but hard-codes some 1156/// language/ABI-specific behavior in the catch-all sections. 1157CodeGenFunction::FinallyInfo 1158CodeGenFunction::EnterFinallyBlock(const Stmt *Body, 1159 llvm::Constant *BeginCatchFn, 1160 llvm::Constant *EndCatchFn, 1161 llvm::Constant *RethrowFn) { 1162 assert((BeginCatchFn != 0) == (EndCatchFn != 0) && 1163 "begin/end catch functions not paired"); 1164 assert(RethrowFn && "rethrow function is required"); 1165 1166 // The rethrow function has one of the following two types: 1167 // void (*)() 1168 // void (*)(void*) 1169 // In the latter case we need to pass it the exception object. 1170 // But we can't use the exception slot because the @finally might 1171 // have a landing pad (which would overwrite the exception slot). 1172 const llvm::FunctionType *RethrowFnTy = 1173 cast<llvm::FunctionType>( 1174 cast<llvm::PointerType>(RethrowFn->getType()) 1175 ->getElementType()); 1176 llvm::Value *SavedExnVar = 0; 1177 if (RethrowFnTy->getNumParams()) 1178 SavedExnVar = CreateTempAlloca(Builder.getInt8PtrTy(), "finally.exn"); 1179 1180 // A finally block is a statement which must be executed on any edge 1181 // out of a given scope. Unlike a cleanup, the finally block may 1182 // contain arbitrary control flow leading out of itself. In 1183 // addition, finally blocks should always be executed, even if there 1184 // are no catch handlers higher on the stack. Therefore, we 1185 // surround the protected scope with a combination of a normal 1186 // cleanup (to catch attempts to break out of the block via normal 1187 // control flow) and an EH catch-all (semantically "outside" any try 1188 // statement to which the finally block might have been attached). 1189 // The finally block itself is generated in the context of a cleanup 1190 // which conditionally leaves the catch-all. 1191 1192 FinallyInfo Info; 1193 1194 // Jump destination for performing the finally block on an exception 1195 // edge. We'll never actually reach this block, so unreachable is 1196 // fine. 1197 JumpDest RethrowDest = getJumpDestInCurrentScope(getUnreachableBlock()); 1198 1199 // Whether the finally block is being executed for EH purposes. 1200 llvm::AllocaInst *ForEHVar = CreateTempAlloca(CGF.Builder.getInt1Ty(), 1201 "finally.for-eh"); 1202 InitTempAlloca(ForEHVar, llvm::ConstantInt::getFalse(getLLVMContext())); 1203 1204 // Enter a normal cleanup which will perform the @finally block. 1205 { 1206 CodeGenFunction::CleanupBlock 1207 NormalCleanup(*this, CodeGenFunction::NormalCleanup); 1208 1209 // Enter a cleanup to call the end-catch function if one was provided. 1210 if (EndCatchFn) { 1211 CodeGenFunction::CleanupBlock 1212 FinallyExitCleanup(CGF, CodeGenFunction::NormalAndEHCleanup); 1213 1214 llvm::BasicBlock *EndCatchBB = createBasicBlock("finally.endcatch"); 1215 llvm::BasicBlock *CleanupContBB = createBasicBlock("finally.cleanup.cont"); 1216 1217 llvm::Value *ShouldEndCatch = 1218 Builder.CreateLoad(ForEHVar, "finally.endcatch"); 1219 Builder.CreateCondBr(ShouldEndCatch, EndCatchBB, CleanupContBB); 1220 EmitBlock(EndCatchBB); 1221 Builder.CreateCall(EndCatchFn)->setDoesNotThrow(); 1222 EmitBlock(CleanupContBB); 1223 } 1224 1225 // Emit the finally block. 1226 EmitStmt(Body); 1227 1228 // If the end of the finally is reachable, check whether this was 1229 // for EH. If so, rethrow. 1230 if (HaveInsertPoint()) { 1231 llvm::BasicBlock *RethrowBB = createBasicBlock("finally.rethrow"); 1232 llvm::BasicBlock *ContBB = createBasicBlock("finally.cont"); 1233 1234 llvm::Value *ShouldRethrow = 1235 Builder.CreateLoad(ForEHVar, "finally.shouldthrow"); 1236 Builder.CreateCondBr(ShouldRethrow, RethrowBB, ContBB); 1237 1238 EmitBlock(RethrowBB); 1239 if (SavedExnVar) { 1240 llvm::Value *Args[] = { Builder.CreateLoad(SavedExnVar) }; 1241 EmitCallOrInvoke(RethrowFn, Args, Args+1); 1242 } else { 1243 EmitCallOrInvoke(RethrowFn, 0, 0); 1244 } 1245 Builder.CreateUnreachable(); 1246 1247 EmitBlock(ContBB); 1248 } 1249 1250 // Leave the end-catch cleanup. As an optimization, pretend that 1251 // the fallthrough path was inaccessible; we've dynamically proven 1252 // that we're not in the EH case along that path. 1253 if (EndCatchFn) { 1254 CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP(); 1255 PopCleanupBlock(); 1256 Builder.restoreIP(SavedIP); 1257 } 1258 1259 // Now make sure we actually have an insertion point or the 1260 // cleanup gods will hate us. 1261 EnsureInsertPoint(); 1262 } 1263 1264 // Enter a catch-all scope. 1265 llvm::BasicBlock *CatchAllBB = createBasicBlock("finally.catchall"); 1266 CGBuilderTy::InsertPoint SavedIP = Builder.saveIP(); 1267 Builder.SetInsertPoint(CatchAllBB); 1268 1269 // If there's a begin-catch function, call it. 1270 if (BeginCatchFn) { 1271 Builder.CreateCall(BeginCatchFn, Builder.CreateLoad(getExceptionSlot())) 1272 ->setDoesNotThrow(); 1273 } 1274 1275 // If we need to remember the exception pointer to rethrow later, do so. 1276 if (SavedExnVar) { 1277 llvm::Value *SavedExn = Builder.CreateLoad(getExceptionSlot()); 1278 Builder.CreateStore(SavedExn, SavedExnVar); 1279 } 1280 1281 // Tell the finally block that we're in EH. 1282 Builder.CreateStore(llvm::ConstantInt::getTrue(getLLVMContext()), ForEHVar); 1283 1284 // Thread a jump through the finally cleanup. 1285 EmitBranchThroughCleanup(RethrowDest); 1286 1287 Builder.restoreIP(SavedIP); 1288 1289 EHCatchScope *CatchScope = EHStack.pushCatch(1); 1290 CatchScope->setCatchAllHandler(0, CatchAllBB); 1291 1292 return Info; 1293} 1294 1295void CodeGenFunction::ExitFinallyBlock(FinallyInfo &Info) { 1296 // Leave the finally catch-all. 1297 EHCatchScope &Catch = cast<EHCatchScope>(*EHStack.begin()); 1298 llvm::BasicBlock *CatchAllBB = Catch.getHandler(0).Block; 1299 EHStack.popCatch(); 1300 1301 // And leave the normal cleanup. 1302 PopCleanupBlock(); 1303 1304 CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP(); 1305 EmitBlock(CatchAllBB, true); 1306 1307 Builder.restoreIP(SavedIP); 1308} 1309 1310llvm::BasicBlock *CodeGenFunction::getTerminateLandingPad() { 1311 if (TerminateLandingPad) 1312 return TerminateLandingPad; 1313 1314 CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP(); 1315 1316 // This will get inserted at the end of the function. 1317 TerminateLandingPad = createBasicBlock("terminate.lpad"); 1318 Builder.SetInsertPoint(TerminateLandingPad); 1319 1320 // Tell the backend that this is a landing pad. 1321 llvm::CallInst *Exn = 1322 Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::eh_exception), "exn"); 1323 Exn->setDoesNotThrow(); 1324 1325 // Tell the backend what the exception table should be: 1326 // nothing but a catch-all. 1327 llvm::Value *Args[3] = { Exn, getPersonalityFn(*this), 1328 getCatchAllValue(*this) }; 1329 Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::eh_selector), 1330 Args, Args+3, "eh.selector") 1331 ->setDoesNotThrow(); 1332 1333 llvm::CallInst *TerminateCall = Builder.CreateCall(getTerminateFn(*this)); 1334 TerminateCall->setDoesNotReturn(); 1335 TerminateCall->setDoesNotThrow(); 1336 CGF.Builder.CreateUnreachable(); 1337 1338 // Restore the saved insertion state. 1339 Builder.restoreIP(SavedIP); 1340 1341 return TerminateLandingPad; 1342} 1343 1344llvm::BasicBlock *CodeGenFunction::getTerminateHandler() { 1345 if (TerminateHandler) 1346 return TerminateHandler; 1347 1348 CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP(); 1349 1350 // Set up the terminate handler. This block is inserted at the very 1351 // end of the function by FinishFunction. 1352 TerminateHandler = createBasicBlock("terminate.handler"); 1353 Builder.SetInsertPoint(TerminateHandler); 1354 llvm::CallInst *TerminateCall = Builder.CreateCall(getTerminateFn(*this)); 1355 TerminateCall->setDoesNotReturn(); 1356 TerminateCall->setDoesNotThrow(); 1357 Builder.CreateUnreachable(); 1358 1359 // Restore the saved insertion state. 1360 Builder.restoreIP(SavedIP); 1361 1362 return TerminateHandler; 1363} 1364 1365CodeGenFunction::CleanupBlock::CleanupBlock(CodeGenFunction &CGF, 1366 CleanupKind Kind) 1367 : CGF(CGF), SavedIP(CGF.Builder.saveIP()), NormalCleanupExitBB(0) { 1368 llvm::BasicBlock *EntryBB = CGF.createBasicBlock("cleanup"); 1369 CGF.Builder.SetInsertPoint(EntryBB); 1370 1371 switch (Kind) { 1372 case NormalAndEHCleanup: 1373 NormalCleanupEntryBB = EHCleanupEntryBB = EntryBB; 1374 break; 1375 1376 case NormalCleanup: 1377 NormalCleanupEntryBB = EntryBB; 1378 EHCleanupEntryBB = 0; 1379 break; 1380 1381 case EHCleanup: 1382 NormalCleanupEntryBB = 0; 1383 EHCleanupEntryBB = EntryBB; 1384 CGF.EHStack.pushTerminate(); 1385 break; 1386 } 1387} 1388 1389void CodeGenFunction::CleanupBlock::beginEHCleanup() { 1390 assert(EHCleanupEntryBB == 0 && "already started an EH cleanup"); 1391 NormalCleanupExitBB = CGF.Builder.GetInsertBlock(); 1392 assert(NormalCleanupExitBB && "end of normal cleanup is unreachable"); 1393 1394 EHCleanupEntryBB = CGF.createBasicBlock("eh.cleanup"); 1395 CGF.Builder.SetInsertPoint(EHCleanupEntryBB); 1396 CGF.EHStack.pushTerminate(); 1397} 1398 1399CodeGenFunction::CleanupBlock::~CleanupBlock() { 1400 llvm::BasicBlock *EHCleanupExitBB = 0; 1401 1402 // If we're currently writing the EH cleanup... 1403 if (EHCleanupEntryBB) { 1404 // Set the EH cleanup exit block. 1405 EHCleanupExitBB = CGF.Builder.GetInsertBlock(); 1406 assert(EHCleanupExitBB && "end of EH cleanup is unreachable"); 1407 1408 // If we're actually writing both at once, set the normal exit, too. 1409 if (EHCleanupEntryBB == NormalCleanupEntryBB) 1410 NormalCleanupExitBB = EHCleanupExitBB; 1411 1412 // Otherwise, we must have pushed a terminate handler. 1413 else 1414 CGF.EHStack.popTerminate(); 1415 1416 // Otherwise, just set the normal cleanup exit block. 1417 } else { 1418 NormalCleanupExitBB = CGF.Builder.GetInsertBlock(); 1419 assert(NormalCleanupExitBB && "end of normal cleanup is unreachable"); 1420 } 1421 1422 CGF.EHStack.pushCleanup(NormalCleanupEntryBB, NormalCleanupExitBB, 1423 EHCleanupEntryBB, EHCleanupExitBB); 1424 1425 CGF.Builder.restoreIP(SavedIP); 1426} 1427 1428