CGCXX.cpp revision 5a0f49ebc83e7fe0da07b9964c44b0a7fae270cb
1//===--- CGDecl.cpp - Emit LLVM Code for declarations ---------------------===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This contains code dealing with C++ code generation. 11// 12//===----------------------------------------------------------------------===// 13 14// We might split this into multiple files if it gets too unwieldy 15 16#include "CodeGenFunction.h" 17#include "CodeGenModule.h" 18#include "Mangle.h" 19#include "clang/AST/ASTContext.h" 20#include "clang/AST/RecordLayout.h" 21#include "clang/AST/Decl.h" 22#include "clang/AST/DeclCXX.h" 23#include "clang/AST/DeclObjC.h" 24#include "llvm/ADT/StringExtras.h" 25using namespace clang; 26using namespace CodeGen; 27 28void 29CodeGenFunction::EmitCXXGlobalDtorRegistration(const CXXDestructorDecl *Dtor, 30 llvm::Constant *DeclPtr) { 31 // FIXME: This is ABI dependent and we use the Itanium ABI. 32 33 const llvm::Type *Int8PtrTy = 34 llvm::PointerType::getUnqual(llvm::Type::getInt8Ty(VMContext)); 35 36 std::vector<const llvm::Type *> Params; 37 Params.push_back(Int8PtrTy); 38 39 // Get the destructor function type 40 const llvm::Type *DtorFnTy = 41 llvm::FunctionType::get(llvm::Type::getVoidTy(VMContext), Params, false); 42 DtorFnTy = llvm::PointerType::getUnqual(DtorFnTy); 43 44 Params.clear(); 45 Params.push_back(DtorFnTy); 46 Params.push_back(Int8PtrTy); 47 Params.push_back(Int8PtrTy); 48 49 // Get the __cxa_atexit function type 50 // extern "C" int __cxa_atexit ( void (*f)(void *), void *p, void *d ); 51 const llvm::FunctionType *AtExitFnTy = 52 llvm::FunctionType::get(ConvertType(getContext().IntTy), Params, false); 53 54 llvm::Constant *AtExitFn = CGM.CreateRuntimeFunction(AtExitFnTy, 55 "__cxa_atexit"); 56 57 llvm::Constant *Handle = CGM.CreateRuntimeVariable(Int8PtrTy, 58 "__dso_handle"); 59 60 llvm::Constant *DtorFn = CGM.GetAddrOfCXXDestructor(Dtor, Dtor_Complete); 61 62 llvm::Value *Args[3] = { llvm::ConstantExpr::getBitCast(DtorFn, DtorFnTy), 63 llvm::ConstantExpr::getBitCast(DeclPtr, Int8PtrTy), 64 llvm::ConstantExpr::getBitCast(Handle, Int8PtrTy) }; 65 Builder.CreateCall(AtExitFn, &Args[0], llvm::array_endof(Args)); 66} 67 68void CodeGenFunction::EmitCXXGlobalVarDeclInit(const VarDecl &D, 69 llvm::Constant *DeclPtr) { 70 assert(D.hasGlobalStorage() && 71 "VarDecl must have global storage!"); 72 73 const Expr *Init = D.getInit(); 74 QualType T = D.getType(); 75 76 if (T->isReferenceType()) { 77 ErrorUnsupported(Init, "global variable that binds to a reference"); 78 } else if (!hasAggregateLLVMType(T)) { 79 llvm::Value *V = EmitScalarExpr(Init); 80 EmitStoreOfScalar(V, DeclPtr, T.isVolatileQualified(), T); 81 } else if (T->isAnyComplexType()) { 82 EmitComplexExprIntoAddr(Init, DeclPtr, T.isVolatileQualified()); 83 } else { 84 EmitAggExpr(Init, DeclPtr, T.isVolatileQualified()); 85 86 if (const RecordType *RT = T->getAs<RecordType>()) { 87 CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl()); 88 if (!RD->hasTrivialDestructor()) 89 EmitCXXGlobalDtorRegistration(RD->getDestructor(getContext()), DeclPtr); 90 } 91 } 92} 93 94void 95CodeGenModule::EmitCXXGlobalInitFunc() { 96 if (CXXGlobalInits.empty()) 97 return; 98 99 const llvm::FunctionType *FTy = llvm::FunctionType::get(llvm::Type::getVoidTy(VMContext), 100 false); 101 102 // Create our global initialization function. 103 // FIXME: Should this be tweakable by targets? 104 llvm::Function *Fn = 105 llvm::Function::Create(FTy, llvm::GlobalValue::InternalLinkage, 106 "__cxx_global_initialization", &TheModule); 107 108 CodeGenFunction(*this).GenerateCXXGlobalInitFunc(Fn, 109 &CXXGlobalInits[0], 110 CXXGlobalInits.size()); 111 AddGlobalCtor(Fn); 112} 113 114void CodeGenFunction::GenerateCXXGlobalInitFunc(llvm::Function *Fn, 115 const VarDecl **Decls, 116 unsigned NumDecls) { 117 StartFunction(GlobalDecl(), getContext().VoidTy, Fn, FunctionArgList(), 118 SourceLocation()); 119 120 for (unsigned i = 0; i != NumDecls; ++i) { 121 const VarDecl *D = Decls[i]; 122 123 llvm::Constant *DeclPtr = CGM.GetAddrOfGlobalVar(D); 124 EmitCXXGlobalVarDeclInit(*D, DeclPtr); 125 } 126 FinishFunction(); 127} 128 129void 130CodeGenFunction::EmitStaticCXXBlockVarDeclInit(const VarDecl &D, 131 llvm::GlobalVariable *GV) { 132 // FIXME: This should use __cxa_guard_{acquire,release}? 133 134 assert(!getContext().getLangOptions().ThreadsafeStatics && 135 "thread safe statics are currently not supported!"); 136 137 llvm::SmallString<256> GuardVName; 138 llvm::raw_svector_ostream GuardVOut(GuardVName); 139 mangleGuardVariable(&D, getContext(), GuardVOut); 140 141 // Create the guard variable. 142 llvm::GlobalValue *GuardV = 143 new llvm::GlobalVariable(CGM.getModule(), llvm::Type::getInt64Ty(VMContext), false, 144 GV->getLinkage(), 145 llvm::Constant::getNullValue(llvm::Type::getInt64Ty(VMContext)), 146 GuardVName.str()); 147 148 // Load the first byte of the guard variable. 149 const llvm::Type *PtrTy = llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext), 0); 150 llvm::Value *V = Builder.CreateLoad(Builder.CreateBitCast(GuardV, PtrTy), 151 "tmp"); 152 153 // Compare it against 0. 154 llvm::Value *nullValue = llvm::Constant::getNullValue(llvm::Type::getInt8Ty(VMContext)); 155 llvm::Value *ICmp = Builder.CreateICmpEQ(V, nullValue , "tobool"); 156 157 llvm::BasicBlock *InitBlock = createBasicBlock("init"); 158 llvm::BasicBlock *EndBlock = createBasicBlock("init.end"); 159 160 // If the guard variable is 0, jump to the initializer code. 161 Builder.CreateCondBr(ICmp, InitBlock, EndBlock); 162 163 EmitBlock(InitBlock); 164 165 EmitCXXGlobalVarDeclInit(D, GV); 166 167 Builder.CreateStore(llvm::ConstantInt::get(llvm::Type::getInt8Ty(VMContext), 1), 168 Builder.CreateBitCast(GuardV, PtrTy)); 169 170 EmitBlock(EndBlock); 171} 172 173RValue CodeGenFunction::EmitCXXMemberCall(const CXXMethodDecl *MD, 174 llvm::Value *Callee, 175 llvm::Value *This, 176 CallExpr::const_arg_iterator ArgBeg, 177 CallExpr::const_arg_iterator ArgEnd) { 178 assert(MD->isInstance() && 179 "Trying to emit a member call expr on a static method!"); 180 181 // A call to a trivial destructor requires no code generation. 182 if (const CXXDestructorDecl *Destructor = dyn_cast<CXXDestructorDecl>(MD)) 183 if (Destructor->isTrivial()) 184 return RValue::get(0); 185 186 const FunctionProtoType *FPT = MD->getType()->getAsFunctionProtoType(); 187 188 CallArgList Args; 189 190 // Push the this ptr. 191 Args.push_back(std::make_pair(RValue::get(This), 192 MD->getThisType(getContext()))); 193 194 // And the rest of the call args 195 EmitCallArgs(Args, FPT, ArgBeg, ArgEnd); 196 197 QualType ResultType = MD->getType()->getAsFunctionType()->getResultType(); 198 return EmitCall(CGM.getTypes().getFunctionInfo(ResultType, Args), 199 Callee, Args, MD); 200} 201 202RValue CodeGenFunction::EmitCXXMemberCallExpr(const CXXMemberCallExpr *CE) { 203 const MemberExpr *ME = cast<MemberExpr>(CE->getCallee()); 204 const CXXMethodDecl *MD = cast<CXXMethodDecl>(ME->getMemberDecl()); 205 206 const FunctionProtoType *FPT = MD->getType()->getAsFunctionProtoType(); 207 208 const llvm::Type *Ty = 209 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD), 210 FPT->isVariadic()); 211 llvm::Value *This; 212 213 if (ME->isArrow()) 214 This = EmitScalarExpr(ME->getBase()); 215 else { 216 LValue BaseLV = EmitLValue(ME->getBase()); 217 This = BaseLV.getAddress(); 218 } 219 220 // C++ [class.virtual]p12: 221 // Explicit qualification with the scope operator (5.1) suppresses the 222 // virtual call mechanism. 223 llvm::Value *Callee; 224 if (MD->isVirtual() && !ME->hasQualifier()) 225 Callee = BuildVirtualCall(MD, This, Ty); 226 else if (const CXXDestructorDecl *Destructor 227 = dyn_cast<CXXDestructorDecl>(MD)) 228 Callee = CGM.GetAddrOfFunction(GlobalDecl(Destructor, Dtor_Complete), Ty); 229 else 230 Callee = CGM.GetAddrOfFunction(MD, Ty); 231 232 return EmitCXXMemberCall(MD, Callee, This, 233 CE->arg_begin(), CE->arg_end()); 234} 235 236RValue 237CodeGenFunction::EmitCXXOperatorMemberCallExpr(const CXXOperatorCallExpr *E, 238 const CXXMethodDecl *MD) { 239 assert(MD->isInstance() && 240 "Trying to emit a member call expr on a static method!"); 241 242 if (MD->isCopyAssignment()) { 243 const CXXRecordDecl *ClassDecl = cast<CXXRecordDecl>(MD->getDeclContext()); 244 if (ClassDecl->hasTrivialCopyAssignment()) { 245 assert(!ClassDecl->hasUserDeclaredCopyAssignment() && 246 "EmitCXXOperatorMemberCallExpr - user declared copy assignment"); 247 llvm::Value *This = EmitLValue(E->getArg(0)).getAddress(); 248 llvm::Value *Src = EmitLValue(E->getArg(1)).getAddress(); 249 QualType Ty = E->getType(); 250 EmitAggregateCopy(This, Src, Ty); 251 return RValue::get(This); 252 } 253 } 254 255 const FunctionProtoType *FPT = MD->getType()->getAsFunctionProtoType(); 256 const llvm::Type *Ty = 257 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD), 258 FPT->isVariadic()); 259 llvm::Constant *Callee = CGM.GetAddrOfFunction(MD, Ty); 260 261 llvm::Value *This = EmitLValue(E->getArg(0)).getAddress(); 262 263 return EmitCXXMemberCall(MD, Callee, This, 264 E->arg_begin() + 1, E->arg_end()); 265} 266 267llvm::Value *CodeGenFunction::LoadCXXThis() { 268 assert(isa<CXXMethodDecl>(CurFuncDecl) && 269 "Must be in a C++ member function decl to load 'this'"); 270 assert(cast<CXXMethodDecl>(CurFuncDecl)->isInstance() && 271 "Must be in a C++ member function decl to load 'this'"); 272 273 // FIXME: What if we're inside a block? 274 // ans: See how CodeGenFunction::LoadObjCSelf() uses 275 // CodeGenFunction::BlockForwardSelf() for how to do this. 276 return Builder.CreateLoad(LocalDeclMap[CXXThisDecl], "this"); 277} 278 279/// EmitCXXAggrConstructorCall - This routine essentially creates a (nested) 280/// for-loop to call the default constructor on individual members of the 281/// array. 'Array' is the array type, 'This' is llvm pointer of the start 282/// of the array and 'D' is the default costructor Decl for elements of the 283/// array. It is assumed that all relevant checks have been made by the 284/// caller. 285void 286CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *D, 287 const ArrayType *Array, 288 llvm::Value *This) { 289 const ConstantArrayType *CA = dyn_cast<ConstantArrayType>(Array); 290 assert(CA && "Do we support VLA for construction ?"); 291 292 // Create a temporary for the loop index and initialize it with 0. 293 llvm::Value *IndexPtr = CreateTempAlloca(llvm::Type::getInt64Ty(VMContext), 294 "loop.index"); 295 llvm::Value* zeroConstant = 296 llvm::Constant::getNullValue(llvm::Type::getInt64Ty(VMContext)); 297 Builder.CreateStore(zeroConstant, IndexPtr, false); 298 299 // Start the loop with a block that tests the condition. 300 llvm::BasicBlock *CondBlock = createBasicBlock("for.cond"); 301 llvm::BasicBlock *AfterFor = createBasicBlock("for.end"); 302 303 EmitBlock(CondBlock); 304 305 llvm::BasicBlock *ForBody = createBasicBlock("for.body"); 306 307 // Generate: if (loop-index < number-of-elements fall to the loop body, 308 // otherwise, go to the block after the for-loop. 309 uint64_t NumElements = getContext().getConstantArrayElementCount(CA); 310 llvm::Value * NumElementsPtr = 311 llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), NumElements); 312 llvm::Value *Counter = Builder.CreateLoad(IndexPtr); 313 llvm::Value *IsLess = Builder.CreateICmpULT(Counter, NumElementsPtr, 314 "isless"); 315 // If the condition is true, execute the body. 316 Builder.CreateCondBr(IsLess, ForBody, AfterFor); 317 318 EmitBlock(ForBody); 319 320 llvm::BasicBlock *ContinueBlock = createBasicBlock("for.inc"); 321 // Inside the loop body, emit the constructor call on the array element. 322 Counter = Builder.CreateLoad(IndexPtr); 323 llvm::Value *Address = Builder.CreateInBoundsGEP(This, Counter, "arrayidx"); 324 EmitCXXConstructorCall(D, Ctor_Complete, Address, 0, 0); 325 326 EmitBlock(ContinueBlock); 327 328 // Emit the increment of the loop counter. 329 llvm::Value *NextVal = llvm::ConstantInt::get(Counter->getType(), 1); 330 Counter = Builder.CreateLoad(IndexPtr); 331 NextVal = Builder.CreateAdd(Counter, NextVal, "inc"); 332 Builder.CreateStore(NextVal, IndexPtr, false); 333 334 // Finally, branch back up to the condition for the next iteration. 335 EmitBranch(CondBlock); 336 337 // Emit the fall-through block. 338 EmitBlock(AfterFor, true); 339} 340 341/// EmitCXXAggrDestructorCall - calls the default destructor on array 342/// elements in reverse order of construction. 343void 344CodeGenFunction::EmitCXXAggrDestructorCall(const CXXDestructorDecl *D, 345 const ArrayType *Array, 346 llvm::Value *This) { 347 const ConstantArrayType *CA = dyn_cast<ConstantArrayType>(Array); 348 assert(CA && "Do we support VLA for destruction ?"); 349 llvm::Value *One = llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), 350 1); 351 uint64_t ElementCount = getContext().getConstantArrayElementCount(CA); 352 // Create a temporary for the loop index and initialize it with count of 353 // array elements. 354 llvm::Value *IndexPtr = CreateTempAlloca(llvm::Type::getInt64Ty(VMContext), 355 "loop.index"); 356 // Index = ElementCount; 357 llvm::Value* UpperCount = 358 llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), ElementCount); 359 Builder.CreateStore(UpperCount, IndexPtr, false); 360 361 // Start the loop with a block that tests the condition. 362 llvm::BasicBlock *CondBlock = createBasicBlock("for.cond"); 363 llvm::BasicBlock *AfterFor = createBasicBlock("for.end"); 364 365 EmitBlock(CondBlock); 366 367 llvm::BasicBlock *ForBody = createBasicBlock("for.body"); 368 369 // Generate: if (loop-index != 0 fall to the loop body, 370 // otherwise, go to the block after the for-loop. 371 llvm::Value* zeroConstant = 372 llvm::Constant::getNullValue(llvm::Type::getInt64Ty(VMContext)); 373 llvm::Value *Counter = Builder.CreateLoad(IndexPtr); 374 llvm::Value *IsNE = Builder.CreateICmpNE(Counter, zeroConstant, 375 "isne"); 376 // If the condition is true, execute the body. 377 Builder.CreateCondBr(IsNE, ForBody, AfterFor); 378 379 EmitBlock(ForBody); 380 381 llvm::BasicBlock *ContinueBlock = createBasicBlock("for.inc"); 382 // Inside the loop body, emit the constructor call on the array element. 383 Counter = Builder.CreateLoad(IndexPtr); 384 Counter = Builder.CreateSub(Counter, One); 385 llvm::Value *Address = Builder.CreateInBoundsGEP(This, Counter, "arrayidx"); 386 EmitCXXDestructorCall(D, Dtor_Complete, Address); 387 388 EmitBlock(ContinueBlock); 389 390 // Emit the decrement of the loop counter. 391 Counter = Builder.CreateLoad(IndexPtr); 392 Counter = Builder.CreateSub(Counter, One, "dec"); 393 Builder.CreateStore(Counter, IndexPtr, false); 394 395 // Finally, branch back up to the condition for the next iteration. 396 EmitBranch(CondBlock); 397 398 // Emit the fall-through block. 399 EmitBlock(AfterFor, true); 400} 401 402void 403CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D, 404 CXXCtorType Type, 405 llvm::Value *This, 406 CallExpr::const_arg_iterator ArgBeg, 407 CallExpr::const_arg_iterator ArgEnd) { 408 if (D->isCopyConstructor(getContext())) { 409 const CXXRecordDecl *ClassDecl = cast<CXXRecordDecl>(D->getDeclContext()); 410 if (ClassDecl->hasTrivialCopyConstructor()) { 411 assert(!ClassDecl->hasUserDeclaredCopyConstructor() && 412 "EmitCXXConstructorCall - user declared copy constructor"); 413 const Expr *E = (*ArgBeg); 414 QualType Ty = E->getType(); 415 llvm::Value *Src = EmitLValue(E).getAddress(); 416 EmitAggregateCopy(This, Src, Ty); 417 return; 418 } 419 } 420 421 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D, Type); 422 423 EmitCXXMemberCall(D, Callee, This, ArgBeg, ArgEnd); 424} 425 426void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *D, 427 CXXDtorType Type, 428 llvm::Value *This) { 429 llvm::Value *Callee = CGM.GetAddrOfCXXDestructor(D, Type); 430 431 EmitCXXMemberCall(D, Callee, This, 0, 0); 432} 433 434void 435CodeGenFunction::EmitCXXConstructExpr(llvm::Value *Dest, 436 const CXXConstructExpr *E) { 437 assert(Dest && "Must have a destination!"); 438 439 const CXXRecordDecl *RD = 440 cast<CXXRecordDecl>(E->getType()->getAs<RecordType>()->getDecl()); 441 if (RD->hasTrivialConstructor()) 442 return; 443 444 // Code gen optimization to eliminate copy constructor and return 445 // its first argument instead. 446 if (getContext().getLangOptions().ElideConstructors && E->isElidable()) { 447 CXXConstructExpr::const_arg_iterator i = E->arg_begin(); 448 EmitAggExpr((*i), Dest, false); 449 return; 450 } 451 // Call the constructor. 452 EmitCXXConstructorCall(E->getConstructor(), Ctor_Complete, Dest, 453 E->arg_begin(), E->arg_end()); 454} 455 456llvm::Value *CodeGenFunction::EmitCXXNewExpr(const CXXNewExpr *E) { 457 if (E->isArray()) { 458 ErrorUnsupported(E, "new[] expression"); 459 return llvm::UndefValue::get(ConvertType(E->getType())); 460 } 461 462 QualType AllocType = E->getAllocatedType(); 463 FunctionDecl *NewFD = E->getOperatorNew(); 464 const FunctionProtoType *NewFTy = NewFD->getType()->getAsFunctionProtoType(); 465 466 CallArgList NewArgs; 467 468 // The allocation size is the first argument. 469 QualType SizeTy = getContext().getSizeType(); 470 llvm::Value *AllocSize = 471 llvm::ConstantInt::get(ConvertType(SizeTy), 472 getContext().getTypeSize(AllocType) / 8); 473 474 NewArgs.push_back(std::make_pair(RValue::get(AllocSize), SizeTy)); 475 476 // Emit the rest of the arguments. 477 // FIXME: Ideally, this should just use EmitCallArgs. 478 CXXNewExpr::const_arg_iterator NewArg = E->placement_arg_begin(); 479 480 // First, use the types from the function type. 481 // We start at 1 here because the first argument (the allocation size) 482 // has already been emitted. 483 for (unsigned i = 1, e = NewFTy->getNumArgs(); i != e; ++i, ++NewArg) { 484 QualType ArgType = NewFTy->getArgType(i); 485 486 assert(getContext().getCanonicalType(ArgType.getNonReferenceType()). 487 getTypePtr() == 488 getContext().getCanonicalType(NewArg->getType()).getTypePtr() && 489 "type mismatch in call argument!"); 490 491 NewArgs.push_back(std::make_pair(EmitCallArg(*NewArg, ArgType), 492 ArgType)); 493 494 } 495 496 // Either we've emitted all the call args, or we have a call to a 497 // variadic function. 498 assert((NewArg == E->placement_arg_end() || NewFTy->isVariadic()) && 499 "Extra arguments in non-variadic function!"); 500 501 // If we still have any arguments, emit them using the type of the argument. 502 for (CXXNewExpr::const_arg_iterator NewArgEnd = E->placement_arg_end(); 503 NewArg != NewArgEnd; ++NewArg) { 504 QualType ArgType = NewArg->getType(); 505 NewArgs.push_back(std::make_pair(EmitCallArg(*NewArg, ArgType), 506 ArgType)); 507 } 508 509 // Emit the call to new. 510 RValue RV = 511 EmitCall(CGM.getTypes().getFunctionInfo(NewFTy->getResultType(), NewArgs), 512 CGM.GetAddrOfFunction(NewFD), NewArgs, NewFD); 513 514 // If an allocation function is declared with an empty exception specification 515 // it returns null to indicate failure to allocate storage. [expr.new]p13. 516 // (We don't need to check for null when there's no new initializer and 517 // we're allocating a POD type). 518 bool NullCheckResult = NewFTy->hasEmptyExceptionSpec() && 519 !(AllocType->isPODType() && !E->hasInitializer()); 520 521 llvm::BasicBlock *NewNull = 0; 522 llvm::BasicBlock *NewNotNull = 0; 523 llvm::BasicBlock *NewEnd = 0; 524 525 llvm::Value *NewPtr = RV.getScalarVal(); 526 527 if (NullCheckResult) { 528 NewNull = createBasicBlock("new.null"); 529 NewNotNull = createBasicBlock("new.notnull"); 530 NewEnd = createBasicBlock("new.end"); 531 532 llvm::Value *IsNull = 533 Builder.CreateICmpEQ(NewPtr, 534 llvm::Constant::getNullValue(NewPtr->getType()), 535 "isnull"); 536 537 Builder.CreateCondBr(IsNull, NewNull, NewNotNull); 538 EmitBlock(NewNotNull); 539 } 540 541 NewPtr = Builder.CreateBitCast(NewPtr, ConvertType(E->getType())); 542 543 if (AllocType->isPODType()) { 544 if (E->getNumConstructorArgs() > 0) { 545 assert(E->getNumConstructorArgs() == 1 && 546 "Can only have one argument to initializer of POD type."); 547 548 const Expr *Init = E->getConstructorArg(0); 549 550 if (!hasAggregateLLVMType(AllocType)) 551 Builder.CreateStore(EmitScalarExpr(Init), NewPtr); 552 else if (AllocType->isAnyComplexType()) 553 EmitComplexExprIntoAddr(Init, NewPtr, AllocType.isVolatileQualified()); 554 else 555 EmitAggExpr(Init, NewPtr, AllocType.isVolatileQualified()); 556 } 557 } else { 558 // Call the constructor. 559 CXXConstructorDecl *Ctor = E->getConstructor(); 560 561 EmitCXXConstructorCall(Ctor, Ctor_Complete, NewPtr, 562 E->constructor_arg_begin(), 563 E->constructor_arg_end()); 564 } 565 566 if (NullCheckResult) { 567 Builder.CreateBr(NewEnd); 568 EmitBlock(NewNull); 569 Builder.CreateBr(NewEnd); 570 EmitBlock(NewEnd); 571 572 llvm::PHINode *PHI = Builder.CreatePHI(NewPtr->getType()); 573 PHI->reserveOperandSpace(2); 574 PHI->addIncoming(NewPtr, NewNotNull); 575 PHI->addIncoming(llvm::Constant::getNullValue(NewPtr->getType()), NewNull); 576 577 NewPtr = PHI; 578 } 579 580 return NewPtr; 581} 582 583void CodeGenFunction::EmitCXXDeleteExpr(const CXXDeleteExpr *E) { 584 if (E->isArrayForm()) { 585 ErrorUnsupported(E, "delete[] expression"); 586 return; 587 }; 588 589 QualType DeleteTy = 590 E->getArgument()->getType()->getAs<PointerType>()->getPointeeType(); 591 592 llvm::Value *Ptr = EmitScalarExpr(E->getArgument()); 593 594 // Null check the pointer. 595 llvm::BasicBlock *DeleteNotNull = createBasicBlock("delete.notnull"); 596 llvm::BasicBlock *DeleteEnd = createBasicBlock("delete.end"); 597 598 llvm::Value *IsNull = 599 Builder.CreateICmpEQ(Ptr, llvm::Constant::getNullValue(Ptr->getType()), 600 "isnull"); 601 602 Builder.CreateCondBr(IsNull, DeleteEnd, DeleteNotNull); 603 EmitBlock(DeleteNotNull); 604 605 // Call the destructor if necessary. 606 if (const RecordType *RT = DeleteTy->getAs<RecordType>()) { 607 if (CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(RT->getDecl())) { 608 if (!RD->hasTrivialDestructor()) { 609 const CXXDestructorDecl *Dtor = RD->getDestructor(getContext()); 610 if (Dtor->isVirtual()) { 611 ErrorUnsupported(E, "delete expression with virtual destructor"); 612 return; 613 } 614 615 EmitCXXDestructorCall(Dtor, Dtor_Complete, Ptr); 616 } 617 } 618 } 619 620 // Call delete. 621 FunctionDecl *DeleteFD = E->getOperatorDelete(); 622 const FunctionProtoType *DeleteFTy = 623 DeleteFD->getType()->getAsFunctionProtoType(); 624 625 CallArgList DeleteArgs; 626 627 QualType ArgTy = DeleteFTy->getArgType(0); 628 llvm::Value *DeletePtr = Builder.CreateBitCast(Ptr, ConvertType(ArgTy)); 629 DeleteArgs.push_back(std::make_pair(RValue::get(DeletePtr), ArgTy)); 630 631 // Emit the call to delete. 632 EmitCall(CGM.getTypes().getFunctionInfo(DeleteFTy->getResultType(), 633 DeleteArgs), 634 CGM.GetAddrOfFunction(DeleteFD), 635 DeleteArgs, DeleteFD); 636 637 EmitBlock(DeleteEnd); 638} 639 640void CodeGenModule::EmitCXXConstructors(const CXXConstructorDecl *D) { 641 EmitGlobal(GlobalDecl(D, Ctor_Complete)); 642 EmitGlobal(GlobalDecl(D, Ctor_Base)); 643} 644 645void CodeGenModule::EmitCXXConstructor(const CXXConstructorDecl *D, 646 CXXCtorType Type) { 647 648 llvm::Function *Fn = GetAddrOfCXXConstructor(D, Type); 649 650 CodeGenFunction(*this).GenerateCode(GlobalDecl(D, Type), Fn); 651 652 SetFunctionDefinitionAttributes(D, Fn); 653 SetLLVMFunctionAttributesForDefinition(D, Fn); 654} 655 656llvm::Function * 657CodeGenModule::GetAddrOfCXXConstructor(const CXXConstructorDecl *D, 658 CXXCtorType Type) { 659 const llvm::FunctionType *FTy = 660 getTypes().GetFunctionType(getTypes().getFunctionInfo(D), false); 661 662 const char *Name = getMangledCXXCtorName(D, Type); 663 return cast<llvm::Function>( 664 GetOrCreateLLVMFunction(Name, FTy, GlobalDecl(D, Type))); 665} 666 667const char *CodeGenModule::getMangledCXXCtorName(const CXXConstructorDecl *D, 668 CXXCtorType Type) { 669 llvm::SmallString<256> Name; 670 llvm::raw_svector_ostream Out(Name); 671 mangleCXXCtor(D, Type, Context, Out); 672 673 Name += '\0'; 674 return UniqueMangledName(Name.begin(), Name.end()); 675} 676 677void CodeGenModule::EmitCXXDestructors(const CXXDestructorDecl *D) { 678 EmitCXXDestructor(D, Dtor_Complete); 679 EmitCXXDestructor(D, Dtor_Base); 680} 681 682void CodeGenModule::EmitCXXDestructor(const CXXDestructorDecl *D, 683 CXXDtorType Type) { 684 llvm::Function *Fn = GetAddrOfCXXDestructor(D, Type); 685 686 CodeGenFunction(*this).GenerateCode(GlobalDecl(D, Type), Fn); 687 688 SetFunctionDefinitionAttributes(D, Fn); 689 SetLLVMFunctionAttributesForDefinition(D, Fn); 690} 691 692llvm::Function * 693CodeGenModule::GetAddrOfCXXDestructor(const CXXDestructorDecl *D, 694 CXXDtorType Type) { 695 const llvm::FunctionType *FTy = 696 getTypes().GetFunctionType(getTypes().getFunctionInfo(D), false); 697 698 const char *Name = getMangledCXXDtorName(D, Type); 699 return cast<llvm::Function>( 700 GetOrCreateLLVMFunction(Name, FTy, GlobalDecl(D, Type))); 701} 702 703const char *CodeGenModule::getMangledCXXDtorName(const CXXDestructorDecl *D, 704 CXXDtorType Type) { 705 llvm::SmallString<256> Name; 706 llvm::raw_svector_ostream Out(Name); 707 mangleCXXDtor(D, Type, Context, Out); 708 709 Name += '\0'; 710 return UniqueMangledName(Name.begin(), Name.end()); 711} 712 713llvm::Constant *CodeGenModule::GenerateRtti(const CXXRecordDecl *RD) { 714 llvm::Type *Ptr8Ty; 715 Ptr8Ty = llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext), 0); 716 llvm::Constant *Rtti = llvm::Constant::getNullValue(Ptr8Ty); 717 718 if (!getContext().getLangOptions().Rtti) 719 return Rtti; 720 721 llvm::SmallString<256> OutName; 722 llvm::raw_svector_ostream Out(OutName); 723 QualType ClassTy; 724 ClassTy = getContext().getTagDeclType(RD); 725 mangleCXXRtti(ClassTy, getContext(), Out); 726 llvm::GlobalVariable::LinkageTypes linktype; 727 linktype = llvm::GlobalValue::WeakAnyLinkage; 728 std::vector<llvm::Constant *> info; 729 // assert(0 && "FIXME: implement rtti descriptor"); 730 // FIXME: descriptor 731 info.push_back(llvm::Constant::getNullValue(Ptr8Ty)); 732 // assert(0 && "FIXME: implement rtti ts"); 733 // FIXME: TS 734 info.push_back(llvm::Constant::getNullValue(Ptr8Ty)); 735 736 llvm::Constant *C; 737 llvm::ArrayType *type = llvm::ArrayType::get(Ptr8Ty, info.size()); 738 C = llvm::ConstantArray::get(type, info); 739 Rtti = new llvm::GlobalVariable(getModule(), type, true, linktype, C, 740 Out.str()); 741 Rtti = llvm::ConstantExpr::getBitCast(Rtti, Ptr8Ty); 742 return Rtti; 743} 744 745class VtableBuilder { 746public: 747 /// Index_t - Vtable index type. 748 typedef uint64_t Index_t; 749private: 750 std::vector<llvm::Constant *> &methods; 751 std::vector<llvm::Constant *> submethods; 752 llvm::Type *Ptr8Ty; 753 /// Class - The most derived class that this vtable is being built for. 754 const CXXRecordDecl *Class; 755 /// BLayout - Layout for the most derived class that this vtable is being 756 /// built for. 757 const ASTRecordLayout &BLayout; 758 llvm::SmallSet<const CXXRecordDecl *, 32> IndirectPrimary; 759 llvm::SmallSet<const CXXRecordDecl *, 32> SeenVBase; 760 llvm::Constant *rtti; 761 llvm::LLVMContext &VMContext; 762 CodeGenModule &CGM; // Per-module state. 763 /// Index - Maps a method decl into a vtable index. Useful for virtual 764 /// dispatch codegen. 765 llvm::DenseMap<const CXXMethodDecl *, Index_t> Index; 766 llvm::DenseMap<const CXXMethodDecl *, Index_t> VCall; 767 llvm::DenseMap<const CXXMethodDecl *, Index_t> VCallOffset; 768 typedef std::pair<Index_t, Index_t> CallOffset; 769 typedef llvm::DenseMap<const CXXMethodDecl *, CallOffset> Thunks_t; 770 Thunks_t Thunks; 771 typedef llvm::DenseMap<const CXXMethodDecl *, 772 std::pair<CallOffset, CallOffset> > CovariantThunks_t; 773 CovariantThunks_t CovariantThunks; 774 std::vector<Index_t> VCalls; 775 typedef CXXRecordDecl::method_iterator method_iter; 776 // FIXME: Linkage should follow vtable 777 const bool Extern; 778 const uint32_t LLVMPointerWidth; 779 Index_t extra; 780public: 781 VtableBuilder(std::vector<llvm::Constant *> &meth, 782 const CXXRecordDecl *c, 783 CodeGenModule &cgm) 784 : methods(meth), Class(c), BLayout(cgm.getContext().getASTRecordLayout(c)), 785 rtti(cgm.GenerateRtti(c)), VMContext(cgm.getModule().getContext()), 786 CGM(cgm), Extern(true), 787 LLVMPointerWidth(cgm.getContext().Target.getPointerWidth(0)) { 788 Ptr8Ty = llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext), 0); 789 } 790 791 llvm::DenseMap<const CXXMethodDecl *, Index_t> &getIndex() { return Index; } 792 793 llvm::Constant *wrap(Index_t i) { 794 llvm::Constant *m; 795 m = llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), i); 796 return llvm::ConstantExpr::getIntToPtr(m, Ptr8Ty); 797 } 798 799 llvm::Constant *wrap(llvm::Constant *m) { 800 return llvm::ConstantExpr::getBitCast(m, Ptr8Ty); 801 } 802 803 void GenerateVBaseOffsets(std::vector<llvm::Constant *> &offsets, 804 const CXXRecordDecl *RD, uint64_t Offset) { 805 for (CXXRecordDecl::base_class_const_iterator i =RD->bases_begin(), 806 e = RD->bases_end(); i != e; ++i) { 807 const CXXRecordDecl *Base = 808 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl()); 809 if (i->isVirtual() && !SeenVBase.count(Base)) { 810 SeenVBase.insert(Base); 811 int64_t BaseOffset = -(Offset/8) + BLayout.getVBaseClassOffset(Base)/8; 812 llvm::Constant *m = wrap(BaseOffset); 813 m = wrap((0?700:0) + BaseOffset); 814 offsets.push_back(m); 815 } 816 GenerateVBaseOffsets(offsets, Base, Offset); 817 } 818 } 819 820 void StartNewTable() { 821 SeenVBase.clear(); 822 } 823 824 bool OverrideMethod(const CXXMethodDecl *MD, llvm::Constant *m, 825 bool MorallyVirtual, Index_t Offset) { 826 typedef CXXMethodDecl::method_iterator meth_iter; 827 828 // FIXME: Don't like the nested loops. For very large inheritance 829 // heirarchies we could have a table on the side with the final overridder 830 // and just replace each instance of an overridden method once. Would be 831 // nice to measure the cost/benefit on real code. 832 833 for (meth_iter mi = MD->begin_overridden_methods(), 834 e = MD->end_overridden_methods(); 835 mi != e; ++mi) { 836 const CXXMethodDecl *OMD = *mi; 837 llvm::Constant *om; 838 om = CGM.GetAddrOfFunction(OMD, Ptr8Ty); 839 om = llvm::ConstantExpr::getBitCast(om, Ptr8Ty); 840 841 for (Index_t i = 0, e = submethods.size(); 842 i != e; ++i) { 843 // FIXME: begin_overridden_methods might be too lax, covariance */ 844 if (submethods[i] != om) 845 continue; 846 QualType nc_oret = OMD->getType()->getAsFunctionType()->getResultType(); 847 CanQualType oret = CGM.getContext().getCanonicalType(nc_oret); 848 QualType nc_ret = MD->getType()->getAsFunctionType()->getResultType(); 849 CanQualType ret = CGM.getContext().getCanonicalType(nc_ret); 850 CallOffset ReturnOffset = std::make_pair(0, 0); 851 if (oret != ret) { 852 // FIXME: calculate offsets for covariance 853 ReturnOffset = std::make_pair(42,42); 854 } 855 Index[MD] = i; 856 submethods[i] = m; 857 858 Thunks.erase(OMD); 859 if (MorallyVirtual) { 860 Index_t &idx = VCall[OMD]; 861 if (idx == 0) { 862 VCallOffset[MD] = Offset/8; 863 idx = VCalls.size()+1; 864 VCalls.push_back(0); 865 } else { 866 VCallOffset[MD] = VCallOffset[OMD]; 867 VCalls[idx-1] = -VCallOffset[OMD] + Offset/8; 868 } 869 VCall[MD] = idx; 870 CallOffset ThisOffset; 871 // FIXME: calculate non-virtual offset 872 ThisOffset = std::make_pair(0, -((idx+extra+2)*LLVMPointerWidth/8)); 873 if (ReturnOffset.first || ReturnOffset.second) 874 CovariantThunks[MD] = std::make_pair(ThisOffset, ReturnOffset); 875 else 876 Thunks[MD] = ThisOffset; 877 return true; 878 } 879#if 0 880 // FIXME: finish off 881 int64_t O = VCallOffset[OMD] - Offset/8; 882 if (O) { 883 Thunks[MD] = std::make_pair(O, 0); 884 } 885#endif 886 return true; 887 } 888 } 889 890 return false; 891 } 892 893 void InstallThunks() { 894 for (Thunks_t::iterator i = Thunks.begin(), e = Thunks.end(); 895 i != e; ++i) { 896 const CXXMethodDecl *MD = i->first; 897 Index_t idx = Index[MD]; 898 Index_t nv_O = i->second.first; 899 Index_t v_O = i->second.second; 900 submethods[idx] = CGM.BuildThunk(MD, Extern, nv_O, v_O); 901 } 902 Thunks.clear(); 903 for (CovariantThunks_t::iterator i = CovariantThunks.begin(), 904 e = CovariantThunks.end(); 905 i != e; ++i) { 906 const CXXMethodDecl *MD = i->first; 907 Index_t idx = Index[MD]; 908 Index_t nv_t = i->second.first.first; 909 Index_t v_t = i->second.first.second; 910 Index_t nv_r = i->second.second.first; 911 Index_t v_r = i->second.second.second; 912 submethods[idx] = CGM.BuildCovariantThunk(MD, Extern, nv_t, v_t, nv_r, 913 v_r); 914 } 915 CovariantThunks.clear(); 916 } 917 918 void OverrideMethods(std::vector<std::pair<const CXXRecordDecl *, 919 int64_t> > *Path, bool MorallyVirtual) { 920 for (std::vector<std::pair<const CXXRecordDecl *, 921 int64_t> >::reverse_iterator i =Path->rbegin(), 922 e = Path->rend(); i != e; ++i) { 923 const CXXRecordDecl *RD = i->first; 924 int64_t Offset = i->second; 925 for (method_iter mi = RD->method_begin(), me = RD->method_end(); mi != me; 926 ++mi) 927 if (mi->isVirtual()) { 928 const CXXMethodDecl *MD = *mi; 929 llvm::Constant *m = wrap(CGM.GetAddrOfFunction(MD)); 930 OverrideMethod(MD, m, MorallyVirtual, Offset); 931 } 932 } 933 } 934 935 void AddMethod(const CXXMethodDecl *MD, bool MorallyVirtual, Index_t Offset) { 936 llvm::Constant *m = 0; 937 if (const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(MD)) 938 m = wrap(CGM.GetAddrOfCXXDestructor(Dtor, Dtor_Complete)); 939 else 940 m = wrap(CGM.GetAddrOfFunction(MD)); 941 942 // If we can find a previously allocated slot for this, reuse it. 943 if (OverrideMethod(MD, m, MorallyVirtual, Offset)) 944 return; 945 946 // else allocate a new slot. 947 Index[MD] = submethods.size(); 948 submethods.push_back(m); 949 if (MorallyVirtual) { 950 VCallOffset[MD] = Offset/8; 951 Index_t &idx = VCall[MD]; 952 // Allocate the first one, after that, we reuse the previous one. 953 if (idx == 0) { 954 idx = VCalls.size()+1; 955 VCalls.push_back(0); 956 } 957 } 958 } 959 960 void AddMethods(const CXXRecordDecl *RD, bool MorallyVirtual, 961 Index_t Offset) { 962 for (method_iter mi = RD->method_begin(), me = RD->method_end(); mi != me; 963 ++mi) 964 if (mi->isVirtual()) 965 AddMethod(*mi, MorallyVirtual, Offset); 966 } 967 968 void NonVirtualBases(const CXXRecordDecl *RD, const ASTRecordLayout &Layout, 969 const CXXRecordDecl *PrimaryBase, 970 bool PrimaryBaseWasVirtual, bool MorallyVirtual, 971 int64_t Offset) { 972 for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(), 973 e = RD->bases_end(); i != e; ++i) { 974 if (i->isVirtual()) 975 continue; 976 const CXXRecordDecl *Base = 977 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl()); 978 if (Base != PrimaryBase || PrimaryBaseWasVirtual) { 979 uint64_t o = Offset + Layout.getBaseClassOffset(Base); 980 StartNewTable(); 981 std::vector<std::pair<const CXXRecordDecl *, 982 int64_t> > S; 983 S.push_back(std::make_pair(RD, Offset)); 984 GenerateVtableForBase(Base, MorallyVirtual, o, false, &S); 985 } 986 } 987 } 988 989 Index_t end(const CXXRecordDecl *RD, std::vector<llvm::Constant *> &offsets, 990 const ASTRecordLayout &Layout, 991 const CXXRecordDecl *PrimaryBase, 992 bool PrimaryBaseWasVirtual, bool MorallyVirtual, 993 int64_t Offset, bool ForVirtualBase) { 994 StartNewTable(); 995 extra = 0; 996 // FIXME: Cleanup. 997 if (!ForVirtualBase) { 998 // then virtual base offsets... 999 for (std::vector<llvm::Constant *>::reverse_iterator i = offsets.rbegin(), 1000 e = offsets.rend(); i != e; ++i) 1001 methods.push_back(*i); 1002 } 1003 1004 // The vcalls come first... 1005 for (std::vector<Index_t>::reverse_iterator i=VCalls.rbegin(), 1006 e=VCalls.rend(); 1007 i != e; ++i) 1008 methods.push_back(wrap((0?600:0) + *i)); 1009 VCalls.clear(); 1010 1011 if (ForVirtualBase) { 1012 // then virtual base offsets... 1013 for (std::vector<llvm::Constant *>::reverse_iterator i = offsets.rbegin(), 1014 e = offsets.rend(); i != e; ++i) 1015 methods.push_back(*i); 1016 } 1017 1018 methods.push_back(wrap(-(Offset/8))); 1019 methods.push_back(rtti); 1020 Index_t AddressPoint = methods.size(); 1021 1022 InstallThunks(); 1023 methods.insert(methods.end(), submethods.begin(), submethods.end()); 1024 submethods.clear(); 1025 1026 // and then the non-virtual bases. 1027 NonVirtualBases(RD, Layout, PrimaryBase, PrimaryBaseWasVirtual, 1028 MorallyVirtual, Offset); 1029 return AddressPoint; 1030 } 1031 1032 void Primaries(const CXXRecordDecl *RD, bool MorallyVirtual, int64_t Offset) { 1033 if (!RD->isDynamicClass()) 1034 return; 1035 1036 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD); 1037 const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase(); 1038 const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual(); 1039 1040 // vtables are composed from the chain of primaries. 1041 if (PrimaryBase) { 1042 if (PrimaryBaseWasVirtual) 1043 IndirectPrimary.insert(PrimaryBase); 1044 Primaries(PrimaryBase, PrimaryBaseWasVirtual|MorallyVirtual, Offset); 1045 } 1046 1047 // And add the virtuals for the class to the primary vtable. 1048 AddMethods(RD, MorallyVirtual, Offset); 1049 } 1050 1051 int64_t GenerateVtableForBase(const CXXRecordDecl *RD, 1052 bool MorallyVirtual = false, int64_t Offset = 0, 1053 bool ForVirtualBase = false, 1054 std::vector<std::pair<const CXXRecordDecl *, 1055 int64_t> > *Path = 0) { 1056 if (!RD->isDynamicClass()) 1057 return 0; 1058 1059 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD); 1060 const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase(); 1061 const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual(); 1062 1063 std::vector<llvm::Constant *> offsets; 1064 extra = 0; 1065 GenerateVBaseOffsets(offsets, RD, Offset); 1066 if (ForVirtualBase) 1067 extra = offsets.size(); 1068 1069 // vtables are composed from the chain of primaries. 1070 if (PrimaryBase) { 1071 if (PrimaryBaseWasVirtual) 1072 IndirectPrimary.insert(PrimaryBase); 1073 Primaries(PrimaryBase, PrimaryBaseWasVirtual|MorallyVirtual, Offset); 1074 } 1075 1076 // And add the virtuals for the class to the primary vtable. 1077 AddMethods(RD, MorallyVirtual, Offset); 1078 1079 if (Path) 1080 OverrideMethods(Path, MorallyVirtual); 1081 1082 return end(RD, offsets, Layout, PrimaryBase, PrimaryBaseWasVirtual, 1083 MorallyVirtual, Offset, ForVirtualBase); 1084 } 1085 1086 void GenerateVtableForVBases(const CXXRecordDecl *RD, 1087 int64_t Offset = 0, 1088 std::vector<std::pair<const CXXRecordDecl *, 1089 int64_t> > *Path = 0) { 1090 bool alloc = false; 1091 if (Path == 0) { 1092 alloc = true; 1093 Path = new std::vector<std::pair<const CXXRecordDecl *, 1094 int64_t> >; 1095 } 1096 // FIXME: We also need to override using all paths to a virtual base, 1097 // right now, we just process the first path 1098 Path->push_back(std::make_pair(RD, Offset)); 1099 for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(), 1100 e = RD->bases_end(); i != e; ++i) { 1101 const CXXRecordDecl *Base = 1102 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl()); 1103 if (i->isVirtual() && !IndirectPrimary.count(Base)) { 1104 // Mark it so we don't output it twice. 1105 IndirectPrimary.insert(Base); 1106 StartNewTable(); 1107 int64_t BaseOffset = BLayout.getVBaseClassOffset(Base); 1108 GenerateVtableForBase(Base, true, BaseOffset, true, Path); 1109 } 1110 int64_t BaseOffset = Offset; 1111 if (i->isVirtual()) 1112 BaseOffset = BLayout.getVBaseClassOffset(Base); 1113 if (Base->getNumVBases()) 1114 GenerateVtableForVBases(Base, BaseOffset, Path); 1115 } 1116 Path->pop_back(); 1117 if (alloc) 1118 delete Path; 1119 } 1120}; 1121 1122class VtableInfo { 1123public: 1124 typedef VtableBuilder::Index_t Index_t; 1125private: 1126 CodeGenModule &CGM; // Per-module state. 1127 /// Index_t - Vtable index type. 1128 typedef llvm::DenseMap<const CXXMethodDecl *, Index_t> ElTy; 1129 typedef llvm::DenseMap<const CXXRecordDecl *, ElTy *> MapTy; 1130 // FIXME: Move to Context. 1131 static MapTy IndexFor; 1132public: 1133 VtableInfo(CodeGenModule &cgm) : CGM(cgm) { } 1134 void register_index(const CXXRecordDecl *RD, const ElTy &e) { 1135 assert(IndexFor.find(RD) == IndexFor.end() && "Don't compute vtbl twice"); 1136 // We own a copy of this, it will go away shortly. 1137 new ElTy (e); 1138 IndexFor[RD] = new ElTy (e); 1139 } 1140 Index_t lookup(const CXXMethodDecl *MD) { 1141 const CXXRecordDecl *RD = MD->getParent(); 1142 MapTy::iterator I = IndexFor.find(RD); 1143 if (I == IndexFor.end()) { 1144 std::vector<llvm::Constant *> methods; 1145 VtableBuilder b(methods, RD, CGM); 1146 b.GenerateVtableForBase(RD); 1147 b.GenerateVtableForVBases(RD); 1148 register_index(RD, b.getIndex()); 1149 I = IndexFor.find(RD); 1150 } 1151 assert(I->second->find(MD)!=I->second->end() && "Can't find vtable index"); 1152 return (*I->second)[MD]; 1153 } 1154}; 1155 1156// FIXME: Move to Context. 1157VtableInfo::MapTy VtableInfo::IndexFor; 1158 1159llvm::Value *CodeGenFunction::GenerateVtable(const CXXRecordDecl *RD) { 1160 llvm::SmallString<256> OutName; 1161 llvm::raw_svector_ostream Out(OutName); 1162 QualType ClassTy; 1163 ClassTy = getContext().getTagDeclType(RD); 1164 mangleCXXVtable(ClassTy, getContext(), Out); 1165 llvm::GlobalVariable::LinkageTypes linktype; 1166 linktype = llvm::GlobalValue::WeakAnyLinkage; 1167 std::vector<llvm::Constant *> methods; 1168 llvm::Type *Ptr8Ty=llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext),0); 1169 int64_t AddressPoint; 1170 1171 VtableBuilder b(methods, RD, CGM); 1172 1173 // First comes the vtables for all the non-virtual bases... 1174 AddressPoint = b.GenerateVtableForBase(RD); 1175 1176 // then the vtables for all the virtual bases. 1177 b.GenerateVtableForVBases(RD); 1178 1179 llvm::Constant *C; 1180 llvm::ArrayType *type = llvm::ArrayType::get(Ptr8Ty, methods.size()); 1181 C = llvm::ConstantArray::get(type, methods); 1182 llvm::Value *vtable = new llvm::GlobalVariable(CGM.getModule(), type, true, 1183 linktype, C, Out.str()); 1184 vtable = Builder.CreateBitCast(vtable, Ptr8Ty); 1185 vtable = Builder.CreateGEP(vtable, 1186 llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), 1187 AddressPoint*LLVMPointerWidth/8)); 1188 return vtable; 1189} 1190 1191// FIXME: move to Context 1192static VtableInfo *vtableinfo; 1193 1194llvm::Constant *CodeGenFunction::GenerateThunk(llvm::Function *Fn, 1195 const CXXMethodDecl *MD, 1196 bool Extern, int64_t nv, 1197 int64_t v) { 1198 QualType R = MD->getType()->getAsFunctionType()->getResultType(); 1199 1200 FunctionArgList Args; 1201 ImplicitParamDecl *ThisDecl = 1202 ImplicitParamDecl::Create(getContext(), 0, SourceLocation(), 0, 1203 MD->getThisType(getContext())); 1204 Args.push_back(std::make_pair(ThisDecl, ThisDecl->getType())); 1205 for (FunctionDecl::param_const_iterator i = MD->param_begin(), 1206 e = MD->param_end(); 1207 i != e; ++i) { 1208 ParmVarDecl *D = *i; 1209 Args.push_back(std::make_pair(D, D->getType())); 1210 } 1211 IdentifierInfo *II 1212 = &CGM.getContext().Idents.get("__thunk_named_foo_"); 1213 FunctionDecl *FD = FunctionDecl::Create(getContext(), 1214 getContext().getTranslationUnitDecl(), 1215 SourceLocation(), II, R, 0, 1216 Extern 1217 ? FunctionDecl::Extern 1218 : FunctionDecl::Static, 1219 false, true); 1220 StartFunction(FD, R, Fn, Args, SourceLocation()); 1221 // FIXME: generate body 1222 FinishFunction(); 1223 return Fn; 1224} 1225 1226llvm::Constant *CodeGenFunction::GenerateCovariantThunk(llvm::Function *Fn, 1227 const CXXMethodDecl *MD, 1228 bool Extern, 1229 int64_t nv_t, 1230 int64_t v_t, 1231 int64_t nv_r, 1232 int64_t v_r) { 1233 QualType R = MD->getType()->getAsFunctionType()->getResultType(); 1234 1235 FunctionArgList Args; 1236 ImplicitParamDecl *ThisDecl = 1237 ImplicitParamDecl::Create(getContext(), 0, SourceLocation(), 0, 1238 MD->getThisType(getContext())); 1239 Args.push_back(std::make_pair(ThisDecl, ThisDecl->getType())); 1240 for (FunctionDecl::param_const_iterator i = MD->param_begin(), 1241 e = MD->param_end(); 1242 i != e; ++i) { 1243 ParmVarDecl *D = *i; 1244 Args.push_back(std::make_pair(D, D->getType())); 1245 } 1246 IdentifierInfo *II 1247 = &CGM.getContext().Idents.get("__thunk_named_foo_"); 1248 FunctionDecl *FD = FunctionDecl::Create(getContext(), 1249 getContext().getTranslationUnitDecl(), 1250 SourceLocation(), II, R, 0, 1251 Extern 1252 ? FunctionDecl::Extern 1253 : FunctionDecl::Static, 1254 false, true); 1255 StartFunction(FD, R, Fn, Args, SourceLocation()); 1256 // FIXME: generate body 1257 FinishFunction(); 1258 return Fn; 1259} 1260 1261llvm::Constant *CodeGenModule::BuildThunk(const CXXMethodDecl *MD, bool Extern, 1262 int64_t nv, int64_t v) { 1263 llvm::SmallString<256> OutName; 1264 llvm::raw_svector_ostream Out(OutName); 1265 mangleThunk(MD, nv, v, getContext(), Out); 1266 llvm::GlobalVariable::LinkageTypes linktype; 1267 linktype = llvm::GlobalValue::WeakAnyLinkage; 1268 if (!Extern) 1269 linktype = llvm::GlobalValue::InternalLinkage; 1270 llvm::Type *Ptr8Ty=llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext),0); 1271 const FunctionProtoType *FPT = MD->getType()->getAsFunctionProtoType(); 1272 const llvm::FunctionType *FTy = 1273 getTypes().GetFunctionType(getTypes().getFunctionInfo(MD), 1274 FPT->isVariadic()); 1275 1276 llvm::Function *Fn = llvm::Function::Create(FTy, linktype, Out.str(), 1277 &getModule()); 1278 CodeGenFunction(*this).GenerateThunk(Fn, MD, Extern, nv, v); 1279 // Fn = Builder.CreateBitCast(Fn, Ptr8Ty); 1280 llvm::Constant *m = llvm::ConstantExpr::getBitCast(Fn, Ptr8Ty); 1281 return m; 1282} 1283 1284llvm::Constant *CodeGenModule::BuildCovariantThunk(const CXXMethodDecl *MD, 1285 bool Extern, int64_t nv_t, 1286 int64_t v_t, int64_t nv_r, 1287 int64_t v_r) { 1288 llvm::SmallString<256> OutName; 1289 llvm::raw_svector_ostream Out(OutName); 1290 mangleCovariantThunk(MD, nv_t, v_t, nv_r, v_r, getContext(), Out); 1291 llvm::GlobalVariable::LinkageTypes linktype; 1292 linktype = llvm::GlobalValue::WeakAnyLinkage; 1293 if (!Extern) 1294 linktype = llvm::GlobalValue::InternalLinkage; 1295 llvm::Type *Ptr8Ty=llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext),0); 1296 const FunctionProtoType *FPT = MD->getType()->getAsFunctionProtoType(); 1297 const llvm::FunctionType *FTy = 1298 getTypes().GetFunctionType(getTypes().getFunctionInfo(MD), 1299 FPT->isVariadic()); 1300 1301 llvm::Function *Fn = llvm::Function::Create(FTy, linktype, Out.str(), 1302 &getModule()); 1303 CodeGenFunction(*this).GenerateCovariantThunk(Fn, MD, Extern, nv_t, v_t, nv_r, 1304 v_r); 1305 // Fn = Builder.CreateBitCast(Fn, Ptr8Ty); 1306 llvm::Constant *m = llvm::ConstantExpr::getBitCast(Fn, Ptr8Ty); 1307 return m; 1308} 1309 1310llvm::Value * 1311CodeGenFunction::BuildVirtualCall(const CXXMethodDecl *MD, llvm::Value *&This, 1312 const llvm::Type *Ty) { 1313 // FIXME: If we know the dynamic type, we don't have to do a virtual dispatch. 1314 1315 // FIXME: move to Context 1316 if (vtableinfo == 0) 1317 vtableinfo = new VtableInfo(CGM); 1318 1319 VtableInfo::Index_t Idx = vtableinfo->lookup(MD); 1320 1321 Ty = llvm::PointerType::get(Ty, 0); 1322 Ty = llvm::PointerType::get(Ty, 0); 1323 Ty = llvm::PointerType::get(Ty, 0); 1324 llvm::Value *vtbl = Builder.CreateBitCast(This, Ty); 1325 vtbl = Builder.CreateLoad(vtbl); 1326 llvm::Value *vfn = Builder.CreateConstInBoundsGEP1_64(vtbl, 1327 Idx, "vfn"); 1328 vfn = Builder.CreateLoad(vfn); 1329 return vfn; 1330} 1331 1332/// EmitClassAggrMemberwiseCopy - This routine generates code to copy a class 1333/// array of objects from SrcValue to DestValue. Copying can be either a bitwise 1334/// copy or via a copy constructor call. 1335// FIXME. Consolidate this with EmitCXXAggrConstructorCall. 1336void CodeGenFunction::EmitClassAggrMemberwiseCopy(llvm::Value *Dest, 1337 llvm::Value *Src, 1338 const ArrayType *Array, 1339 const CXXRecordDecl *BaseClassDecl, 1340 QualType Ty) { 1341 const ConstantArrayType *CA = dyn_cast<ConstantArrayType>(Array); 1342 assert(CA && "VLA cannot be copied over"); 1343 bool BitwiseCopy = BaseClassDecl->hasTrivialCopyConstructor(); 1344 1345 // Create a temporary for the loop index and initialize it with 0. 1346 llvm::Value *IndexPtr = CreateTempAlloca(llvm::Type::getInt64Ty(VMContext), 1347 "loop.index"); 1348 llvm::Value* zeroConstant = 1349 llvm::Constant::getNullValue(llvm::Type::getInt64Ty(VMContext)); 1350 Builder.CreateStore(zeroConstant, IndexPtr, false); 1351 // Start the loop with a block that tests the condition. 1352 llvm::BasicBlock *CondBlock = createBasicBlock("for.cond"); 1353 llvm::BasicBlock *AfterFor = createBasicBlock("for.end"); 1354 1355 EmitBlock(CondBlock); 1356 1357 llvm::BasicBlock *ForBody = createBasicBlock("for.body"); 1358 // Generate: if (loop-index < number-of-elements fall to the loop body, 1359 // otherwise, go to the block after the for-loop. 1360 uint64_t NumElements = getContext().getConstantArrayElementCount(CA); 1361 llvm::Value * NumElementsPtr = 1362 llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), NumElements); 1363 llvm::Value *Counter = Builder.CreateLoad(IndexPtr); 1364 llvm::Value *IsLess = Builder.CreateICmpULT(Counter, NumElementsPtr, 1365 "isless"); 1366 // If the condition is true, execute the body. 1367 Builder.CreateCondBr(IsLess, ForBody, AfterFor); 1368 1369 EmitBlock(ForBody); 1370 llvm::BasicBlock *ContinueBlock = createBasicBlock("for.inc"); 1371 // Inside the loop body, emit the constructor call on the array element. 1372 Counter = Builder.CreateLoad(IndexPtr); 1373 Src = Builder.CreateInBoundsGEP(Src, Counter, "srcaddress"); 1374 Dest = Builder.CreateInBoundsGEP(Dest, Counter, "destaddress"); 1375 if (BitwiseCopy) 1376 EmitAggregateCopy(Dest, Src, Ty); 1377 else if (CXXConstructorDecl *BaseCopyCtor = 1378 BaseClassDecl->getCopyConstructor(getContext(), 0)) { 1379 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(BaseCopyCtor, 1380 Ctor_Complete); 1381 CallArgList CallArgs; 1382 // Push the this (Dest) ptr. 1383 CallArgs.push_back(std::make_pair(RValue::get(Dest), 1384 BaseCopyCtor->getThisType(getContext()))); 1385 1386 // Push the Src ptr. 1387 CallArgs.push_back(std::make_pair(RValue::get(Src), 1388 BaseCopyCtor->getParamDecl(0)->getType())); 1389 QualType ResultType = 1390 BaseCopyCtor->getType()->getAsFunctionType()->getResultType(); 1391 EmitCall(CGM.getTypes().getFunctionInfo(ResultType, CallArgs), 1392 Callee, CallArgs, BaseCopyCtor); 1393 } 1394 EmitBlock(ContinueBlock); 1395 1396 // Emit the increment of the loop counter. 1397 llvm::Value *NextVal = llvm::ConstantInt::get(Counter->getType(), 1); 1398 Counter = Builder.CreateLoad(IndexPtr); 1399 NextVal = Builder.CreateAdd(Counter, NextVal, "inc"); 1400 Builder.CreateStore(NextVal, IndexPtr, false); 1401 1402 // Finally, branch back up to the condition for the next iteration. 1403 EmitBranch(CondBlock); 1404 1405 // Emit the fall-through block. 1406 EmitBlock(AfterFor, true); 1407} 1408 1409/// EmitClassAggrCopyAssignment - This routine generates code to assign a class 1410/// array of objects from SrcValue to DestValue. Assignment can be either a 1411/// bitwise assignment or via a copy assignment operator function call. 1412/// FIXME. This can be consolidated with EmitClassAggrMemberwiseCopy 1413void CodeGenFunction::EmitClassAggrCopyAssignment(llvm::Value *Dest, 1414 llvm::Value *Src, 1415 const ArrayType *Array, 1416 const CXXRecordDecl *BaseClassDecl, 1417 QualType Ty) { 1418 const ConstantArrayType *CA = dyn_cast<ConstantArrayType>(Array); 1419 assert(CA && "VLA cannot be asssigned"); 1420 bool BitwiseAssign = BaseClassDecl->hasTrivialCopyAssignment(); 1421 1422 // Create a temporary for the loop index and initialize it with 0. 1423 llvm::Value *IndexPtr = CreateTempAlloca(llvm::Type::getInt64Ty(VMContext), 1424 "loop.index"); 1425 llvm::Value* zeroConstant = 1426 llvm::Constant::getNullValue(llvm::Type::getInt64Ty(VMContext)); 1427 Builder.CreateStore(zeroConstant, IndexPtr, false); 1428 // Start the loop with a block that tests the condition. 1429 llvm::BasicBlock *CondBlock = createBasicBlock("for.cond"); 1430 llvm::BasicBlock *AfterFor = createBasicBlock("for.end"); 1431 1432 EmitBlock(CondBlock); 1433 1434 llvm::BasicBlock *ForBody = createBasicBlock("for.body"); 1435 // Generate: if (loop-index < number-of-elements fall to the loop body, 1436 // otherwise, go to the block after the for-loop. 1437 uint64_t NumElements = getContext().getConstantArrayElementCount(CA); 1438 llvm::Value * NumElementsPtr = 1439 llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), NumElements); 1440 llvm::Value *Counter = Builder.CreateLoad(IndexPtr); 1441 llvm::Value *IsLess = Builder.CreateICmpULT(Counter, NumElementsPtr, 1442 "isless"); 1443 // If the condition is true, execute the body. 1444 Builder.CreateCondBr(IsLess, ForBody, AfterFor); 1445 1446 EmitBlock(ForBody); 1447 llvm::BasicBlock *ContinueBlock = createBasicBlock("for.inc"); 1448 // Inside the loop body, emit the assignment operator call on array element. 1449 Counter = Builder.CreateLoad(IndexPtr); 1450 Src = Builder.CreateInBoundsGEP(Src, Counter, "srcaddress"); 1451 Dest = Builder.CreateInBoundsGEP(Dest, Counter, "destaddress"); 1452 const CXXMethodDecl *MD = 0; 1453 if (BitwiseAssign) 1454 EmitAggregateCopy(Dest, Src, Ty); 1455 else { 1456 bool hasCopyAssign = BaseClassDecl->hasConstCopyAssignment(getContext(), 1457 MD); 1458 assert(hasCopyAssign && "EmitClassAggrCopyAssignment - No user assign"); 1459 (void)hasCopyAssign; 1460 const FunctionProtoType *FPT = MD->getType()->getAsFunctionProtoType(); 1461 const llvm::Type *LTy = 1462 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD), 1463 FPT->isVariadic()); 1464 llvm::Constant *Callee = CGM.GetAddrOfFunction(MD, LTy); 1465 1466 CallArgList CallArgs; 1467 // Push the this (Dest) ptr. 1468 CallArgs.push_back(std::make_pair(RValue::get(Dest), 1469 MD->getThisType(getContext()))); 1470 1471 // Push the Src ptr. 1472 CallArgs.push_back(std::make_pair(RValue::get(Src), 1473 MD->getParamDecl(0)->getType())); 1474 QualType ResultType = MD->getType()->getAsFunctionType()->getResultType(); 1475 EmitCall(CGM.getTypes().getFunctionInfo(ResultType, CallArgs), 1476 Callee, CallArgs, MD); 1477 } 1478 EmitBlock(ContinueBlock); 1479 1480 // Emit the increment of the loop counter. 1481 llvm::Value *NextVal = llvm::ConstantInt::get(Counter->getType(), 1); 1482 Counter = Builder.CreateLoad(IndexPtr); 1483 NextVal = Builder.CreateAdd(Counter, NextVal, "inc"); 1484 Builder.CreateStore(NextVal, IndexPtr, false); 1485 1486 // Finally, branch back up to the condition for the next iteration. 1487 EmitBranch(CondBlock); 1488 1489 // Emit the fall-through block. 1490 EmitBlock(AfterFor, true); 1491} 1492 1493/// EmitClassMemberwiseCopy - This routine generates code to copy a class 1494/// object from SrcValue to DestValue. Copying can be either a bitwise copy 1495/// or via a copy constructor call. 1496void CodeGenFunction::EmitClassMemberwiseCopy( 1497 llvm::Value *Dest, llvm::Value *Src, 1498 const CXXRecordDecl *ClassDecl, 1499 const CXXRecordDecl *BaseClassDecl, QualType Ty) { 1500 if (ClassDecl) { 1501 Dest = GetAddressCXXOfBaseClass(Dest, ClassDecl, BaseClassDecl, 1502 /*NullCheckValue=*/false); 1503 Src = GetAddressCXXOfBaseClass(Src, ClassDecl, BaseClassDecl, 1504 /*NullCheckValue=*/false); 1505 } 1506 if (BaseClassDecl->hasTrivialCopyConstructor()) { 1507 EmitAggregateCopy(Dest, Src, Ty); 1508 return; 1509 } 1510 1511 if (CXXConstructorDecl *BaseCopyCtor = 1512 BaseClassDecl->getCopyConstructor(getContext(), 0)) { 1513 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(BaseCopyCtor, 1514 Ctor_Complete); 1515 CallArgList CallArgs; 1516 // Push the this (Dest) ptr. 1517 CallArgs.push_back(std::make_pair(RValue::get(Dest), 1518 BaseCopyCtor->getThisType(getContext()))); 1519 1520 // Push the Src ptr. 1521 CallArgs.push_back(std::make_pair(RValue::get(Src), 1522 BaseCopyCtor->getParamDecl(0)->getType())); 1523 QualType ResultType = 1524 BaseCopyCtor->getType()->getAsFunctionType()->getResultType(); 1525 EmitCall(CGM.getTypes().getFunctionInfo(ResultType, CallArgs), 1526 Callee, CallArgs, BaseCopyCtor); 1527 } 1528} 1529 1530/// EmitClassCopyAssignment - This routine generates code to copy assign a class 1531/// object from SrcValue to DestValue. Assignment can be either a bitwise 1532/// assignment of via an assignment operator call. 1533// FIXME. Consolidate this with EmitClassMemberwiseCopy as they share a lot. 1534void CodeGenFunction::EmitClassCopyAssignment( 1535 llvm::Value *Dest, llvm::Value *Src, 1536 const CXXRecordDecl *ClassDecl, 1537 const CXXRecordDecl *BaseClassDecl, 1538 QualType Ty) { 1539 if (ClassDecl) { 1540 Dest = GetAddressCXXOfBaseClass(Dest, ClassDecl, BaseClassDecl, 1541 /*NullCheckValue=*/false); 1542 Src = GetAddressCXXOfBaseClass(Src, ClassDecl, BaseClassDecl, 1543 /*NullCheckValue=*/false); 1544 } 1545 if (BaseClassDecl->hasTrivialCopyAssignment()) { 1546 EmitAggregateCopy(Dest, Src, Ty); 1547 return; 1548 } 1549 1550 const CXXMethodDecl *MD = 0; 1551 bool ConstCopyAssignOp = BaseClassDecl->hasConstCopyAssignment(getContext(), 1552 MD); 1553 assert(ConstCopyAssignOp && "EmitClassCopyAssignment - missing copy assign"); 1554 (void)ConstCopyAssignOp; 1555 1556 const FunctionProtoType *FPT = MD->getType()->getAsFunctionProtoType(); 1557 const llvm::Type *LTy = 1558 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD), 1559 FPT->isVariadic()); 1560 llvm::Constant *Callee = CGM.GetAddrOfFunction(MD, LTy); 1561 1562 CallArgList CallArgs; 1563 // Push the this (Dest) ptr. 1564 CallArgs.push_back(std::make_pair(RValue::get(Dest), 1565 MD->getThisType(getContext()))); 1566 1567 // Push the Src ptr. 1568 CallArgs.push_back(std::make_pair(RValue::get(Src), 1569 MD->getParamDecl(0)->getType())); 1570 QualType ResultType = 1571 MD->getType()->getAsFunctionType()->getResultType(); 1572 EmitCall(CGM.getTypes().getFunctionInfo(ResultType, CallArgs), 1573 Callee, CallArgs, MD); 1574} 1575 1576/// SynthesizeDefaultConstructor - synthesize a default constructor 1577void 1578CodeGenFunction::SynthesizeDefaultConstructor(GlobalDecl GD, 1579 const FunctionDecl *FD, 1580 llvm::Function *Fn, 1581 const FunctionArgList &Args) { 1582 const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(GD.getDecl()); 1583 1584 StartFunction(GD, FD->getResultType(), Fn, Args, SourceLocation()); 1585 EmitCtorPrologue(Ctor); 1586 FinishFunction(); 1587} 1588 1589/// SynthesizeCXXCopyConstructor - This routine implicitly defines body of a copy 1590/// constructor, in accordance with section 12.8 (p7 and p8) of C++03 1591/// The implicitly-defined copy constructor for class X performs a memberwise 1592/// copy of its subobjects. The order of copying is the same as the order 1593/// of initialization of bases and members in a user-defined constructor 1594/// Each subobject is copied in the manner appropriate to its type: 1595/// if the subobject is of class type, the copy constructor for the class is 1596/// used; 1597/// if the subobject is an array, each element is copied, in the manner 1598/// appropriate to the element type; 1599/// if the subobject is of scalar type, the built-in assignment operator is 1600/// used. 1601/// Virtual base class subobjects shall be copied only once by the 1602/// implicitly-defined copy constructor 1603 1604void CodeGenFunction::SynthesizeCXXCopyConstructor(GlobalDecl GD, 1605 const FunctionDecl *FD, 1606 llvm::Function *Fn, 1607 const FunctionArgList &Args) { 1608 const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(GD.getDecl()); 1609 const CXXRecordDecl *ClassDecl = Ctor->getParent(); 1610 assert(!ClassDecl->hasUserDeclaredCopyConstructor() && 1611 "SynthesizeCXXCopyConstructor - copy constructor has definition already"); 1612 StartFunction(GD, Ctor->getResultType(), Fn, Args, SourceLocation()); 1613 1614 FunctionArgList::const_iterator i = Args.begin(); 1615 const VarDecl *ThisArg = i->first; 1616 llvm::Value *ThisObj = GetAddrOfLocalVar(ThisArg); 1617 llvm::Value *LoadOfThis = Builder.CreateLoad(ThisObj, "this"); 1618 const VarDecl *SrcArg = (i+1)->first; 1619 llvm::Value *SrcObj = GetAddrOfLocalVar(SrcArg); 1620 llvm::Value *LoadOfSrc = Builder.CreateLoad(SrcObj); 1621 1622 for (CXXRecordDecl::base_class_const_iterator Base = ClassDecl->bases_begin(); 1623 Base != ClassDecl->bases_end(); ++Base) { 1624 // FIXME. copy constrution of virtual base NYI 1625 if (Base->isVirtual()) 1626 continue; 1627 1628 CXXRecordDecl *BaseClassDecl 1629 = cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl()); 1630 EmitClassMemberwiseCopy(LoadOfThis, LoadOfSrc, ClassDecl, BaseClassDecl, 1631 Base->getType()); 1632 } 1633 1634 for (CXXRecordDecl::field_iterator Field = ClassDecl->field_begin(), 1635 FieldEnd = ClassDecl->field_end(); 1636 Field != FieldEnd; ++Field) { 1637 QualType FieldType = getContext().getCanonicalType((*Field)->getType()); 1638 const ConstantArrayType *Array = 1639 getContext().getAsConstantArrayType(FieldType); 1640 if (Array) 1641 FieldType = getContext().getBaseElementType(FieldType); 1642 1643 if (const RecordType *FieldClassType = FieldType->getAs<RecordType>()) { 1644 CXXRecordDecl *FieldClassDecl 1645 = cast<CXXRecordDecl>(FieldClassType->getDecl()); 1646 LValue LHS = EmitLValueForField(LoadOfThis, *Field, false, 0); 1647 LValue RHS = EmitLValueForField(LoadOfSrc, *Field, false, 0); 1648 if (Array) { 1649 const llvm::Type *BasePtr = ConvertType(FieldType); 1650 BasePtr = llvm::PointerType::getUnqual(BasePtr); 1651 llvm::Value *DestBaseAddrPtr = 1652 Builder.CreateBitCast(LHS.getAddress(), BasePtr); 1653 llvm::Value *SrcBaseAddrPtr = 1654 Builder.CreateBitCast(RHS.getAddress(), BasePtr); 1655 EmitClassAggrMemberwiseCopy(DestBaseAddrPtr, SrcBaseAddrPtr, Array, 1656 FieldClassDecl, FieldType); 1657 } 1658 else 1659 EmitClassMemberwiseCopy(LHS.getAddress(), RHS.getAddress(), 1660 0 /*ClassDecl*/, FieldClassDecl, FieldType); 1661 continue; 1662 } 1663 // Do a built-in assignment of scalar data members. 1664 LValue LHS = EmitLValueForField(LoadOfThis, *Field, false, 0); 1665 LValue RHS = EmitLValueForField(LoadOfSrc, *Field, false, 0); 1666 RValue RVRHS = EmitLoadOfLValue(RHS, FieldType); 1667 EmitStoreThroughLValue(RVRHS, LHS, FieldType); 1668 } 1669 FinishFunction(); 1670} 1671 1672/// SynthesizeCXXCopyAssignment - Implicitly define copy assignment operator. 1673/// Before the implicitly-declared copy assignment operator for a class is 1674/// implicitly defined, all implicitly- declared copy assignment operators for 1675/// its direct base classes and its nonstatic data members shall have been 1676/// implicitly defined. [12.8-p12] 1677/// The implicitly-defined copy assignment operator for class X performs 1678/// memberwise assignment of its subob- jects. The direct base classes of X are 1679/// assigned first, in the order of their declaration in 1680/// the base-specifier-list, and then the immediate nonstatic data members of X 1681/// are assigned, in the order in which they were declared in the class 1682/// definition.Each subobject is assigned in the manner appropriate to its type: 1683/// if the subobject is of class type, the copy assignment operator for the 1684/// class is used (as if by explicit qualification; that is, ignoring any 1685/// possible virtual overriding functions in more derived classes); 1686/// 1687/// if the subobject is an array, each element is assigned, in the manner 1688/// appropriate to the element type; 1689/// 1690/// if the subobject is of scalar type, the built-in assignment operator is 1691/// used. 1692void CodeGenFunction::SynthesizeCXXCopyAssignment(const CXXMethodDecl *CD, 1693 const FunctionDecl *FD, 1694 llvm::Function *Fn, 1695 const FunctionArgList &Args) { 1696 1697 const CXXRecordDecl *ClassDecl = cast<CXXRecordDecl>(CD->getDeclContext()); 1698 assert(!ClassDecl->hasUserDeclaredCopyAssignment() && 1699 "SynthesizeCXXCopyAssignment - copy assignment has user declaration"); 1700 StartFunction(FD, FD->getResultType(), Fn, Args, SourceLocation()); 1701 1702 FunctionArgList::const_iterator i = Args.begin(); 1703 const VarDecl *ThisArg = i->first; 1704 llvm::Value *ThisObj = GetAddrOfLocalVar(ThisArg); 1705 llvm::Value *LoadOfThis = Builder.CreateLoad(ThisObj, "this"); 1706 const VarDecl *SrcArg = (i+1)->first; 1707 llvm::Value *SrcObj = GetAddrOfLocalVar(SrcArg); 1708 llvm::Value *LoadOfSrc = Builder.CreateLoad(SrcObj); 1709 1710 for (CXXRecordDecl::base_class_const_iterator Base = ClassDecl->bases_begin(); 1711 Base != ClassDecl->bases_end(); ++Base) { 1712 // FIXME. copy assignment of virtual base NYI 1713 if (Base->isVirtual()) 1714 continue; 1715 1716 CXXRecordDecl *BaseClassDecl 1717 = cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl()); 1718 EmitClassCopyAssignment(LoadOfThis, LoadOfSrc, ClassDecl, BaseClassDecl, 1719 Base->getType()); 1720 } 1721 1722 for (CXXRecordDecl::field_iterator Field = ClassDecl->field_begin(), 1723 FieldEnd = ClassDecl->field_end(); 1724 Field != FieldEnd; ++Field) { 1725 QualType FieldType = getContext().getCanonicalType((*Field)->getType()); 1726 const ConstantArrayType *Array = 1727 getContext().getAsConstantArrayType(FieldType); 1728 if (Array) 1729 FieldType = getContext().getBaseElementType(FieldType); 1730 1731 if (const RecordType *FieldClassType = FieldType->getAs<RecordType>()) { 1732 CXXRecordDecl *FieldClassDecl 1733 = cast<CXXRecordDecl>(FieldClassType->getDecl()); 1734 LValue LHS = EmitLValueForField(LoadOfThis, *Field, false, 0); 1735 LValue RHS = EmitLValueForField(LoadOfSrc, *Field, false, 0); 1736 if (Array) { 1737 const llvm::Type *BasePtr = ConvertType(FieldType); 1738 BasePtr = llvm::PointerType::getUnqual(BasePtr); 1739 llvm::Value *DestBaseAddrPtr = 1740 Builder.CreateBitCast(LHS.getAddress(), BasePtr); 1741 llvm::Value *SrcBaseAddrPtr = 1742 Builder.CreateBitCast(RHS.getAddress(), BasePtr); 1743 EmitClassAggrCopyAssignment(DestBaseAddrPtr, SrcBaseAddrPtr, Array, 1744 FieldClassDecl, FieldType); 1745 } 1746 else 1747 EmitClassCopyAssignment(LHS.getAddress(), RHS.getAddress(), 1748 0 /*ClassDecl*/, FieldClassDecl, FieldType); 1749 continue; 1750 } 1751 // Do a built-in assignment of scalar data members. 1752 LValue LHS = EmitLValueForField(LoadOfThis, *Field, false, 0); 1753 LValue RHS = EmitLValueForField(LoadOfSrc, *Field, false, 0); 1754 RValue RVRHS = EmitLoadOfLValue(RHS, FieldType); 1755 EmitStoreThroughLValue(RVRHS, LHS, FieldType); 1756 } 1757 1758 // return *this; 1759 Builder.CreateStore(LoadOfThis, ReturnValue); 1760 1761 FinishFunction(); 1762} 1763 1764/// EmitCtorPrologue - This routine generates necessary code to initialize 1765/// base classes and non-static data members belonging to this constructor. 1766/// FIXME: This needs to take a CXXCtorType. 1767void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD) { 1768 const CXXRecordDecl *ClassDecl = cast<CXXRecordDecl>(CD->getDeclContext()); 1769 // FIXME: Add vbase initialization 1770 llvm::Value *LoadOfThis = 0; 1771 1772 for (CXXConstructorDecl::init_const_iterator B = CD->init_begin(), 1773 E = CD->init_end(); 1774 B != E; ++B) { 1775 CXXBaseOrMemberInitializer *Member = (*B); 1776 if (Member->isBaseInitializer()) { 1777 LoadOfThis = LoadCXXThis(); 1778 Type *BaseType = Member->getBaseClass(); 1779 CXXRecordDecl *BaseClassDecl = 1780 cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl()); 1781 llvm::Value *V = GetAddressCXXOfBaseClass(LoadOfThis, ClassDecl, 1782 BaseClassDecl, 1783 /*NullCheckValue=*/false); 1784 EmitCXXConstructorCall(Member->getConstructor(), 1785 Ctor_Complete, V, 1786 Member->const_arg_begin(), 1787 Member->const_arg_end()); 1788 } else { 1789 // non-static data member initilaizers. 1790 FieldDecl *Field = Member->getMember(); 1791 QualType FieldType = getContext().getCanonicalType((Field)->getType()); 1792 const ConstantArrayType *Array = 1793 getContext().getAsConstantArrayType(FieldType); 1794 if (Array) 1795 FieldType = getContext().getBaseElementType(FieldType); 1796 1797 LoadOfThis = LoadCXXThis(); 1798 LValue LHS; 1799 if (FieldType->isReferenceType()) { 1800 // FIXME: This is really ugly; should be refactored somehow 1801 unsigned idx = CGM.getTypes().getLLVMFieldNo(Field); 1802 llvm::Value *V = Builder.CreateStructGEP(LoadOfThis, idx, "tmp"); 1803 LHS = LValue::MakeAddr(V, FieldType.getCVRQualifiers(), 1804 QualType::GCNone, FieldType.getAddressSpace()); 1805 } else { 1806 LHS = EmitLValueForField(LoadOfThis, Field, false, 0); 1807 } 1808 if (FieldType->getAs<RecordType>()) { 1809 if (!Field->isAnonymousStructOrUnion()) { 1810 assert(Member->getConstructor() && 1811 "EmitCtorPrologue - no constructor to initialize member"); 1812 if (Array) { 1813 const llvm::Type *BasePtr = ConvertType(FieldType); 1814 BasePtr = llvm::PointerType::getUnqual(BasePtr); 1815 llvm::Value *BaseAddrPtr = 1816 Builder.CreateBitCast(LHS.getAddress(), BasePtr); 1817 EmitCXXAggrConstructorCall(Member->getConstructor(), 1818 Array, BaseAddrPtr); 1819 } 1820 else 1821 EmitCXXConstructorCall(Member->getConstructor(), 1822 Ctor_Complete, LHS.getAddress(), 1823 Member->const_arg_begin(), 1824 Member->const_arg_end()); 1825 continue; 1826 } 1827 else { 1828 // Initializing an anonymous union data member. 1829 FieldDecl *anonMember = Member->getAnonUnionMember(); 1830 LHS = EmitLValueForField(LHS.getAddress(), anonMember, 1831 /*IsUnion=*/true, 0); 1832 FieldType = anonMember->getType(); 1833 } 1834 } 1835 1836 assert(Member->getNumArgs() == 1 && "Initializer count must be 1 only"); 1837 Expr *RhsExpr = *Member->arg_begin(); 1838 RValue RHS; 1839 if (FieldType->isReferenceType()) 1840 RHS = EmitReferenceBindingToExpr(RhsExpr, FieldType, 1841 /*IsInitializer=*/true); 1842 else 1843 RHS = RValue::get(EmitScalarExpr(RhsExpr, true)); 1844 EmitStoreThroughLValue(RHS, LHS, FieldType); 1845 } 1846 } 1847 1848 if (!CD->getNumBaseOrMemberInitializers() && !CD->isTrivial()) { 1849 // Nontrivial default constructor with no initializer list. It may still 1850 // have bases classes and/or contain non-static data members which require 1851 // construction. 1852 for (CXXRecordDecl::base_class_const_iterator Base = 1853 ClassDecl->bases_begin(); 1854 Base != ClassDecl->bases_end(); ++Base) { 1855 // FIXME. copy assignment of virtual base NYI 1856 if (Base->isVirtual()) 1857 continue; 1858 1859 CXXRecordDecl *BaseClassDecl 1860 = cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl()); 1861 if (BaseClassDecl->hasTrivialConstructor()) 1862 continue; 1863 if (CXXConstructorDecl *BaseCX = 1864 BaseClassDecl->getDefaultConstructor(getContext())) { 1865 LoadOfThis = LoadCXXThis(); 1866 llvm::Value *V = GetAddressCXXOfBaseClass(LoadOfThis, ClassDecl, 1867 BaseClassDecl, 1868 /*NullCheckValue=*/false); 1869 EmitCXXConstructorCall(BaseCX, Ctor_Complete, V, 0, 0); 1870 } 1871 } 1872 1873 for (CXXRecordDecl::field_iterator Field = ClassDecl->field_begin(), 1874 FieldEnd = ClassDecl->field_end(); 1875 Field != FieldEnd; ++Field) { 1876 QualType FieldType = getContext().getCanonicalType((*Field)->getType()); 1877 const ConstantArrayType *Array = 1878 getContext().getAsConstantArrayType(FieldType); 1879 if (Array) 1880 FieldType = getContext().getBaseElementType(FieldType); 1881 if (!FieldType->getAs<RecordType>() || Field->isAnonymousStructOrUnion()) 1882 continue; 1883 const RecordType *ClassRec = FieldType->getAs<RecordType>(); 1884 CXXRecordDecl *MemberClassDecl = 1885 dyn_cast<CXXRecordDecl>(ClassRec->getDecl()); 1886 if (!MemberClassDecl || MemberClassDecl->hasTrivialConstructor()) 1887 continue; 1888 if (CXXConstructorDecl *MamberCX = 1889 MemberClassDecl->getDefaultConstructor(getContext())) { 1890 LoadOfThis = LoadCXXThis(); 1891 LValue LHS = EmitLValueForField(LoadOfThis, *Field, false, 0); 1892 if (Array) { 1893 const llvm::Type *BasePtr = ConvertType(FieldType); 1894 BasePtr = llvm::PointerType::getUnqual(BasePtr); 1895 llvm::Value *BaseAddrPtr = 1896 Builder.CreateBitCast(LHS.getAddress(), BasePtr); 1897 EmitCXXAggrConstructorCall(MamberCX, Array, BaseAddrPtr); 1898 } 1899 else 1900 EmitCXXConstructorCall(MamberCX, Ctor_Complete, LHS.getAddress(), 1901 0, 0); 1902 } 1903 } 1904 } 1905 1906 // Initialize the vtable pointer 1907 if (ClassDecl->isDynamicClass()) { 1908 if (!LoadOfThis) 1909 LoadOfThis = LoadCXXThis(); 1910 llvm::Value *VtableField; 1911 llvm::Type *Ptr8Ty, *PtrPtr8Ty; 1912 Ptr8Ty = llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext), 0); 1913 PtrPtr8Ty = llvm::PointerType::get(Ptr8Ty, 0); 1914 VtableField = Builder.CreateBitCast(LoadOfThis, PtrPtr8Ty); 1915 llvm::Value *vtable = GenerateVtable(ClassDecl); 1916 Builder.CreateStore(vtable, VtableField); 1917 } 1918} 1919 1920/// EmitDtorEpilogue - Emit all code that comes at the end of class's 1921/// destructor. This is to call destructors on members and base classes 1922/// in reverse order of their construction. 1923/// FIXME: This needs to take a CXXDtorType. 1924void CodeGenFunction::EmitDtorEpilogue(const CXXDestructorDecl *DD) { 1925 const CXXRecordDecl *ClassDecl = cast<CXXRecordDecl>(DD->getDeclContext()); 1926 assert(!ClassDecl->getNumVBases() && 1927 "FIXME: Destruction of virtual bases not supported"); 1928 (void)ClassDecl; // prevent warning. 1929 1930 for (CXXDestructorDecl::destr_const_iterator *B = DD->destr_begin(), 1931 *E = DD->destr_end(); B != E; ++B) { 1932 uintptr_t BaseOrMember = (*B); 1933 if (DD->isMemberToDestroy(BaseOrMember)) { 1934 FieldDecl *FD = DD->getMemberToDestroy(BaseOrMember); 1935 QualType FieldType = getContext().getCanonicalType((FD)->getType()); 1936 const ConstantArrayType *Array = 1937 getContext().getAsConstantArrayType(FieldType); 1938 if (Array) 1939 FieldType = getContext().getBaseElementType(FieldType); 1940 const RecordType *RT = FieldType->getAs<RecordType>(); 1941 CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl()); 1942 if (FieldClassDecl->hasTrivialDestructor()) 1943 continue; 1944 llvm::Value *LoadOfThis = LoadCXXThis(); 1945 LValue LHS = EmitLValueForField(LoadOfThis, FD, false, 0); 1946 if (Array) { 1947 const llvm::Type *BasePtr = ConvertType(FieldType); 1948 BasePtr = llvm::PointerType::getUnqual(BasePtr); 1949 llvm::Value *BaseAddrPtr = 1950 Builder.CreateBitCast(LHS.getAddress(), BasePtr); 1951 EmitCXXAggrDestructorCall(FieldClassDecl->getDestructor(getContext()), 1952 Array, BaseAddrPtr); 1953 } 1954 else 1955 EmitCXXDestructorCall(FieldClassDecl->getDestructor(getContext()), 1956 Dtor_Complete, LHS.getAddress()); 1957 } else { 1958 const RecordType *RT = 1959 DD->getAnyBaseClassToDestroy(BaseOrMember)->getAs<RecordType>(); 1960 CXXRecordDecl *BaseClassDecl = cast<CXXRecordDecl>(RT->getDecl()); 1961 if (BaseClassDecl->hasTrivialDestructor()) 1962 continue; 1963 llvm::Value *V = GetAddressCXXOfBaseClass(LoadCXXThis(), 1964 ClassDecl, BaseClassDecl, 1965 /*NullCheckValue=*/false); 1966 EmitCXXDestructorCall(BaseClassDecl->getDestructor(getContext()), 1967 Dtor_Complete, V); 1968 } 1969 } 1970 if (DD->getNumBaseOrMemberDestructions() || DD->isTrivial()) 1971 return; 1972 // Case of destructor synthesis with fields and base classes 1973 // which have non-trivial destructors. They must be destructed in 1974 // reverse order of their construction. 1975 llvm::SmallVector<FieldDecl *, 16> DestructedFields; 1976 1977 for (CXXRecordDecl::field_iterator Field = ClassDecl->field_begin(), 1978 FieldEnd = ClassDecl->field_end(); 1979 Field != FieldEnd; ++Field) { 1980 QualType FieldType = getContext().getCanonicalType((*Field)->getType()); 1981 if (getContext().getAsConstantArrayType(FieldType)) 1982 FieldType = getContext().getBaseElementType(FieldType); 1983 if (const RecordType *RT = FieldType->getAs<RecordType>()) { 1984 CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl()); 1985 if (FieldClassDecl->hasTrivialDestructor()) 1986 continue; 1987 DestructedFields.push_back(*Field); 1988 } 1989 } 1990 if (!DestructedFields.empty()) 1991 for (int i = DestructedFields.size() -1; i >= 0; --i) { 1992 FieldDecl *Field = DestructedFields[i]; 1993 QualType FieldType = Field->getType(); 1994 const ConstantArrayType *Array = 1995 getContext().getAsConstantArrayType(FieldType); 1996 if (Array) 1997 FieldType = getContext().getBaseElementType(FieldType); 1998 const RecordType *RT = FieldType->getAs<RecordType>(); 1999 CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl()); 2000 llvm::Value *LoadOfThis = LoadCXXThis(); 2001 LValue LHS = EmitLValueForField(LoadOfThis, Field, false, 0); 2002 if (Array) { 2003 const llvm::Type *BasePtr = ConvertType(FieldType); 2004 BasePtr = llvm::PointerType::getUnqual(BasePtr); 2005 llvm::Value *BaseAddrPtr = 2006 Builder.CreateBitCast(LHS.getAddress(), BasePtr); 2007 EmitCXXAggrDestructorCall(FieldClassDecl->getDestructor(getContext()), 2008 Array, BaseAddrPtr); 2009 } 2010 else 2011 EmitCXXDestructorCall(FieldClassDecl->getDestructor(getContext()), 2012 Dtor_Complete, LHS.getAddress()); 2013 } 2014 2015 llvm::SmallVector<CXXRecordDecl*, 4> DestructedBases; 2016 for (CXXRecordDecl::base_class_const_iterator Base = ClassDecl->bases_begin(); 2017 Base != ClassDecl->bases_end(); ++Base) { 2018 // FIXME. copy assignment of virtual base NYI 2019 if (Base->isVirtual()) 2020 continue; 2021 2022 CXXRecordDecl *BaseClassDecl 2023 = cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl()); 2024 if (BaseClassDecl->hasTrivialDestructor()) 2025 continue; 2026 DestructedBases.push_back(BaseClassDecl); 2027 } 2028 if (DestructedBases.empty()) 2029 return; 2030 for (int i = DestructedBases.size() -1; i >= 0; --i) { 2031 CXXRecordDecl *BaseClassDecl = DestructedBases[i]; 2032 llvm::Value *V = GetAddressCXXOfBaseClass(LoadCXXThis(), 2033 ClassDecl,BaseClassDecl, 2034 /*NullCheckValue=*/false); 2035 EmitCXXDestructorCall(BaseClassDecl->getDestructor(getContext()), 2036 Dtor_Complete, V); 2037 } 2038} 2039 2040void CodeGenFunction::SynthesizeDefaultDestructor(GlobalDecl GD, 2041 const FunctionDecl *FD, 2042 llvm::Function *Fn, 2043 const FunctionArgList &Args) { 2044 2045 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(GD.getDecl()); 2046 2047 const CXXRecordDecl *ClassDecl = Dtor->getParent(); 2048 assert(!ClassDecl->hasUserDeclaredDestructor() && 2049 "SynthesizeDefaultDestructor - destructor has user declaration"); 2050 (void) ClassDecl; 2051 2052 StartFunction(GD, Dtor->getResultType(), Fn, Args, SourceLocation()); 2053 EmitDtorEpilogue(Dtor); 2054 FinishFunction(); 2055} 2056