CGCXX.cpp revision edee783ed3edcd7fd988b9fb1dc58361a7e8f8a0
1//===--- CGDecl.cpp - Emit LLVM Code for declarations ---------------------===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This contains code dealing with C++ code generation. 11// 12//===----------------------------------------------------------------------===// 13 14// We might split this into multiple files if it gets too unwieldy 15 16#include "CodeGenFunction.h" 17#include "CodeGenModule.h" 18#include "Mangle.h" 19#include "clang/AST/ASTContext.h" 20#include "clang/AST/RecordLayout.h" 21#include "clang/AST/Decl.h" 22#include "clang/AST/DeclCXX.h" 23#include "clang/AST/DeclObjC.h" 24#include "clang/AST/StmtCXX.h" 25#include "llvm/ADT/StringExtras.h" 26using namespace clang; 27using namespace CodeGen; 28 29void 30CodeGenFunction::EmitCXXGlobalDtorRegistration(const CXXDestructorDecl *Dtor, 31 llvm::Constant *DeclPtr) { 32 const llvm::Type *Int8PtrTy = 33 llvm::Type::getInt8Ty(VMContext)->getPointerTo(); 34 35 std::vector<const llvm::Type *> Params; 36 Params.push_back(Int8PtrTy); 37 38 // Get the destructor function type 39 const llvm::Type *DtorFnTy = 40 llvm::FunctionType::get(llvm::Type::getVoidTy(VMContext), Params, false); 41 DtorFnTy = llvm::PointerType::getUnqual(DtorFnTy); 42 43 Params.clear(); 44 Params.push_back(DtorFnTy); 45 Params.push_back(Int8PtrTy); 46 Params.push_back(Int8PtrTy); 47 48 // Get the __cxa_atexit function type 49 // extern "C" int __cxa_atexit ( void (*f)(void *), void *p, void *d ); 50 const llvm::FunctionType *AtExitFnTy = 51 llvm::FunctionType::get(ConvertType(getContext().IntTy), Params, false); 52 53 llvm::Constant *AtExitFn = CGM.CreateRuntimeFunction(AtExitFnTy, 54 "__cxa_atexit"); 55 56 llvm::Constant *Handle = CGM.CreateRuntimeVariable(Int8PtrTy, 57 "__dso_handle"); 58 59 llvm::Constant *DtorFn = CGM.GetAddrOfCXXDestructor(Dtor, Dtor_Complete); 60 61 llvm::Value *Args[3] = { llvm::ConstantExpr::getBitCast(DtorFn, DtorFnTy), 62 llvm::ConstantExpr::getBitCast(DeclPtr, Int8PtrTy), 63 llvm::ConstantExpr::getBitCast(Handle, Int8PtrTy) }; 64 Builder.CreateCall(AtExitFn, &Args[0], llvm::array_endof(Args)); 65} 66 67void CodeGenFunction::EmitCXXGlobalVarDeclInit(const VarDecl &D, 68 llvm::Constant *DeclPtr) { 69 assert(D.hasGlobalStorage() && 70 "VarDecl must have global storage!"); 71 72 const Expr *Init = D.getInit(); 73 QualType T = D.getType(); 74 75 if (T->isReferenceType()) { 76 ErrorUnsupported(Init, "global variable that binds to a reference"); 77 } else if (!hasAggregateLLVMType(T)) { 78 llvm::Value *V = EmitScalarExpr(Init); 79 EmitStoreOfScalar(V, DeclPtr, T.isVolatileQualified(), T); 80 } else if (T->isAnyComplexType()) { 81 EmitComplexExprIntoAddr(Init, DeclPtr, T.isVolatileQualified()); 82 } else { 83 EmitAggExpr(Init, DeclPtr, T.isVolatileQualified()); 84 85 if (const RecordType *RT = T->getAs<RecordType>()) { 86 CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl()); 87 if (!RD->hasTrivialDestructor()) 88 EmitCXXGlobalDtorRegistration(RD->getDestructor(getContext()), DeclPtr); 89 } 90 } 91} 92 93void 94CodeGenModule::EmitCXXGlobalInitFunc() { 95 if (CXXGlobalInits.empty()) 96 return; 97 98 const llvm::FunctionType *FTy = llvm::FunctionType::get(llvm::Type::getVoidTy(VMContext), 99 false); 100 101 // Create our global initialization function. 102 // FIXME: Should this be tweakable by targets? 103 llvm::Function *Fn = 104 llvm::Function::Create(FTy, llvm::GlobalValue::InternalLinkage, 105 "__cxx_global_initialization", &TheModule); 106 107 CodeGenFunction(*this).GenerateCXXGlobalInitFunc(Fn, 108 &CXXGlobalInits[0], 109 CXXGlobalInits.size()); 110 AddGlobalCtor(Fn); 111} 112 113void CodeGenFunction::GenerateCXXGlobalInitFunc(llvm::Function *Fn, 114 const VarDecl **Decls, 115 unsigned NumDecls) { 116 StartFunction(GlobalDecl(), getContext().VoidTy, Fn, FunctionArgList(), 117 SourceLocation()); 118 119 for (unsigned i = 0; i != NumDecls; ++i) { 120 const VarDecl *D = Decls[i]; 121 122 llvm::Constant *DeclPtr = CGM.GetAddrOfGlobalVar(D); 123 EmitCXXGlobalVarDeclInit(*D, DeclPtr); 124 } 125 FinishFunction(); 126} 127 128void 129CodeGenFunction::EmitStaticCXXBlockVarDeclInit(const VarDecl &D, 130 llvm::GlobalVariable *GV) { 131 // FIXME: This should use __cxa_guard_{acquire,release}? 132 133 assert(!getContext().getLangOptions().ThreadsafeStatics && 134 "thread safe statics are currently not supported!"); 135 136 llvm::SmallString<256> GuardVName; 137 llvm::raw_svector_ostream GuardVOut(GuardVName); 138 mangleGuardVariable(&D, getContext(), GuardVOut); 139 140 // Create the guard variable. 141 llvm::GlobalValue *GuardV = 142 new llvm::GlobalVariable(CGM.getModule(), llvm::Type::getInt64Ty(VMContext), false, 143 GV->getLinkage(), 144 llvm::Constant::getNullValue(llvm::Type::getInt64Ty(VMContext)), 145 GuardVName.str()); 146 147 // Load the first byte of the guard variable. 148 const llvm::Type *PtrTy = llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext), 0); 149 llvm::Value *V = Builder.CreateLoad(Builder.CreateBitCast(GuardV, PtrTy), 150 "tmp"); 151 152 // Compare it against 0. 153 llvm::Value *nullValue = llvm::Constant::getNullValue(llvm::Type::getInt8Ty(VMContext)); 154 llvm::Value *ICmp = Builder.CreateICmpEQ(V, nullValue , "tobool"); 155 156 llvm::BasicBlock *InitBlock = createBasicBlock("init"); 157 llvm::BasicBlock *EndBlock = createBasicBlock("init.end"); 158 159 // If the guard variable is 0, jump to the initializer code. 160 Builder.CreateCondBr(ICmp, InitBlock, EndBlock); 161 162 EmitBlock(InitBlock); 163 164 EmitCXXGlobalVarDeclInit(D, GV); 165 166 Builder.CreateStore(llvm::ConstantInt::get(llvm::Type::getInt8Ty(VMContext), 1), 167 Builder.CreateBitCast(GuardV, PtrTy)); 168 169 EmitBlock(EndBlock); 170} 171 172RValue CodeGenFunction::EmitCXXMemberCall(const CXXMethodDecl *MD, 173 llvm::Value *Callee, 174 llvm::Value *This, 175 CallExpr::const_arg_iterator ArgBeg, 176 CallExpr::const_arg_iterator ArgEnd) { 177 assert(MD->isInstance() && 178 "Trying to emit a member call expr on a static method!"); 179 180 // A call to a trivial destructor requires no code generation. 181 if (const CXXDestructorDecl *Destructor = dyn_cast<CXXDestructorDecl>(MD)) 182 if (Destructor->isTrivial()) 183 return RValue::get(0); 184 185 const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>(); 186 187 CallArgList Args; 188 189 // Push the this ptr. 190 Args.push_back(std::make_pair(RValue::get(This), 191 MD->getThisType(getContext()))); 192 193 // And the rest of the call args 194 EmitCallArgs(Args, FPT, ArgBeg, ArgEnd); 195 196 QualType ResultType = MD->getType()->getAs<FunctionType>()->getResultType(); 197 return EmitCall(CGM.getTypes().getFunctionInfo(ResultType, Args), 198 Callee, Args, MD); 199} 200 201RValue CodeGenFunction::EmitCXXMemberCallExpr(const CXXMemberCallExpr *CE) { 202 if (isa<BinaryOperator>(CE->getCallee())) 203 return EmitCXXMemberPointerCallExpr(CE); 204 205 const MemberExpr *ME = cast<MemberExpr>(CE->getCallee()); 206 const CXXMethodDecl *MD = cast<CXXMethodDecl>(ME->getMemberDecl()); 207 208 if (MD->isStatic()) { 209 // The method is static, emit it as we would a regular call. 210 llvm::Value *Callee = CGM.GetAddrOfFunction(MD); 211 return EmitCall(Callee, getContext().getPointerType(MD->getType()), 212 CE->arg_begin(), CE->arg_end(), 0); 213 214 } 215 216 const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>(); 217 218 const llvm::Type *Ty = 219 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD), 220 FPT->isVariadic()); 221 llvm::Value *This; 222 223 if (ME->isArrow()) 224 This = EmitScalarExpr(ME->getBase()); 225 else { 226 LValue BaseLV = EmitLValue(ME->getBase()); 227 This = BaseLV.getAddress(); 228 } 229 230 // C++ [class.virtual]p12: 231 // Explicit qualification with the scope operator (5.1) suppresses the 232 // virtual call mechanism. 233 llvm::Value *Callee; 234 if (MD->isVirtual() && !ME->hasQualifier()) 235 // FIXME: push getCanonicalDecl as a conversion using the static type system (CanCXXMethodDecl). 236 Callee = BuildVirtualCall(MD->getCanonicalDecl(), This, Ty); 237 else if (const CXXDestructorDecl *Destructor 238 = dyn_cast<CXXDestructorDecl>(MD)) 239 Callee = CGM.GetAddrOfFunction(GlobalDecl(Destructor, Dtor_Complete), Ty); 240 else 241 Callee = CGM.GetAddrOfFunction(MD, Ty); 242 243 return EmitCXXMemberCall(MD, Callee, This, 244 CE->arg_begin(), CE->arg_end()); 245} 246 247RValue 248CodeGenFunction::EmitCXXMemberPointerCallExpr(const CXXMemberCallExpr *E) { 249 const BinaryOperator *BO = cast<BinaryOperator>(E->getCallee()); 250 const DeclRefExpr *BaseExpr = cast<DeclRefExpr>(BO->getLHS()); 251 const DeclRefExpr *MemFn = cast<DeclRefExpr>(BO->getRHS()); 252 253 const MemberPointerType *MPT = MemFn->getType()->getAs<MemberPointerType>(); 254 const FunctionProtoType *FPT = 255 MPT->getPointeeType()->getAs<FunctionProtoType>(); 256 const CXXRecordDecl *RD = 257 cast<CXXRecordDecl>(cast<RecordType>(MPT->getClass())->getDecl()); 258 259 const llvm::FunctionType *FTy = 260 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(RD, FPT), 261 FPT->isVariadic()); 262 263 const llvm::Type *Int8PtrTy = 264 llvm::Type::getInt8Ty(VMContext)->getPointerTo(); 265 266 // Get the member function pointer. 267 llvm::Value *MemFnPtr = 268 CreateTempAlloca(ConvertType(MemFn->getType()), "mem.fn"); 269 EmitAggExpr(MemFn, MemFnPtr, /*VolatileDest=*/false); 270 271 // Emit the 'this' pointer. 272 llvm::Value *This; 273 274 if (BO->getOpcode() == BinaryOperator::PtrMemI) 275 This = EmitScalarExpr(BaseExpr); 276 else 277 This = EmitLValue(BaseExpr).getAddress(); 278 279 // Adjust it. 280 llvm::Value *Adj = Builder.CreateStructGEP(MemFnPtr, 1); 281 Adj = Builder.CreateLoad(Adj, "mem.fn.adj"); 282 283 llvm::Value *Ptr = Builder.CreateBitCast(This, Int8PtrTy, "ptr"); 284 Ptr = Builder.CreateGEP(Ptr, Adj, "adj"); 285 286 This = Builder.CreateBitCast(Ptr, This->getType(), "this"); 287 288 llvm::Value *FnPtr = Builder.CreateStructGEP(MemFnPtr, 0, "mem.fn.ptr"); 289 290 const llvm::Type *PtrDiffTy = ConvertType(getContext().getPointerDiffType()); 291 292 llvm::Value *FnAsInt = Builder.CreateLoad(FnPtr, "fn"); 293 294 // If the LSB in the function pointer is 1, the function pointer points to 295 // a virtual function. 296 llvm::Value *IsVirtual 297 = Builder.CreateAnd(FnAsInt, llvm::ConstantInt::get(PtrDiffTy, 1), 298 "and"); 299 300 IsVirtual = Builder.CreateTrunc(IsVirtual, 301 llvm::Type::getInt1Ty(VMContext)); 302 303 llvm::BasicBlock *FnVirtual = createBasicBlock("fn.virtual"); 304 llvm::BasicBlock *FnNonVirtual = createBasicBlock("fn.nonvirtual"); 305 llvm::BasicBlock *FnEnd = createBasicBlock("fn.end"); 306 307 Builder.CreateCondBr(IsVirtual, FnVirtual, FnNonVirtual); 308 EmitBlock(FnVirtual); 309 310 const llvm::Type *VTableTy = 311 FTy->getPointerTo()->getPointerTo()->getPointerTo(); 312 313 llvm::Value *VTable = Builder.CreateBitCast(This, VTableTy); 314 VTable = Builder.CreateLoad(VTable); 315 316 VTable = Builder.CreateGEP(VTable, FnAsInt, "fn"); 317 318 // Since the function pointer is 1 plus the virtual table offset, we 319 // subtract 1 by using a GEP. 320 VTable = Builder.CreateConstGEP1_64(VTable, -1); 321 322 llvm::Value *VirtualFn = Builder.CreateLoad(VTable, "virtualfn"); 323 324 EmitBranch(FnEnd); 325 EmitBlock(FnNonVirtual); 326 327 // If the function is not virtual, just load the pointer. 328 llvm::Value *NonVirtualFn = Builder.CreateLoad(FnPtr, "fn"); 329 NonVirtualFn = Builder.CreateIntToPtr(NonVirtualFn, FTy->getPointerTo()); 330 331 EmitBlock(FnEnd); 332 333 llvm::PHINode *Callee = Builder.CreatePHI(FTy->getPointerTo()); 334 Callee->reserveOperandSpace(2); 335 Callee->addIncoming(VirtualFn, FnVirtual); 336 Callee->addIncoming(NonVirtualFn, FnNonVirtual); 337 338 CallArgList Args; 339 340 QualType ThisType = 341 getContext().getPointerType(getContext().getTagDeclType(RD)); 342 343 // Push the this ptr. 344 Args.push_back(std::make_pair(RValue::get(This), ThisType)); 345 346 // And the rest of the call args 347 EmitCallArgs(Args, FPT, E->arg_begin(), E->arg_end()); 348 QualType ResultType = BO->getType()->getAs<FunctionType>()->getResultType(); 349 return EmitCall(CGM.getTypes().getFunctionInfo(ResultType, Args), 350 Callee, Args, 0); 351} 352 353RValue 354CodeGenFunction::EmitCXXOperatorMemberCallExpr(const CXXOperatorCallExpr *E, 355 const CXXMethodDecl *MD) { 356 assert(MD->isInstance() && 357 "Trying to emit a member call expr on a static method!"); 358 359 if (MD->isCopyAssignment()) { 360 const CXXRecordDecl *ClassDecl = cast<CXXRecordDecl>(MD->getDeclContext()); 361 if (ClassDecl->hasTrivialCopyAssignment()) { 362 assert(!ClassDecl->hasUserDeclaredCopyAssignment() && 363 "EmitCXXOperatorMemberCallExpr - user declared copy assignment"); 364 llvm::Value *This = EmitLValue(E->getArg(0)).getAddress(); 365 llvm::Value *Src = EmitLValue(E->getArg(1)).getAddress(); 366 QualType Ty = E->getType(); 367 EmitAggregateCopy(This, Src, Ty); 368 return RValue::get(This); 369 } 370 } 371 372 const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>(); 373 const llvm::Type *Ty = 374 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD), 375 FPT->isVariadic()); 376 llvm::Constant *Callee = CGM.GetAddrOfFunction(MD, Ty); 377 378 llvm::Value *This = EmitLValue(E->getArg(0)).getAddress(); 379 380 return EmitCXXMemberCall(MD, Callee, This, 381 E->arg_begin() + 1, E->arg_end()); 382} 383 384llvm::Value *CodeGenFunction::LoadCXXThis() { 385 assert(isa<CXXMethodDecl>(CurFuncDecl) && 386 "Must be in a C++ member function decl to load 'this'"); 387 assert(cast<CXXMethodDecl>(CurFuncDecl)->isInstance() && 388 "Must be in a C++ member function decl to load 'this'"); 389 390 // FIXME: What if we're inside a block? 391 // ans: See how CodeGenFunction::LoadObjCSelf() uses 392 // CodeGenFunction::BlockForwardSelf() for how to do this. 393 return Builder.CreateLoad(LocalDeclMap[CXXThisDecl], "this"); 394} 395 396/// EmitCXXAggrConstructorCall - This routine essentially creates a (nested) 397/// for-loop to call the default constructor on individual members of the 398/// array. 399/// 'D' is the default constructor for elements of the array, 'ArrayTy' is the 400/// array type and 'ArrayPtr' points to the beginning fo the array. 401/// It is assumed that all relevant checks have been made by the caller. 402void 403CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *D, 404 const ConstantArrayType *ArrayTy, 405 llvm::Value *ArrayPtr) { 406 const llvm::Type *SizeTy = ConvertType(getContext().getSizeType()); 407 llvm::Value * NumElements = 408 llvm::ConstantInt::get(SizeTy, 409 getContext().getConstantArrayElementCount(ArrayTy)); 410 411 EmitCXXAggrConstructorCall(D, NumElements, ArrayPtr); 412} 413 414void 415CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *D, 416 llvm::Value *NumElements, 417 llvm::Value *ArrayPtr) { 418 const llvm::Type *SizeTy = ConvertType(getContext().getSizeType()); 419 420 // Create a temporary for the loop index and initialize it with 0. 421 llvm::Value *IndexPtr = CreateTempAlloca(SizeTy, "loop.index"); 422 llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy); 423 Builder.CreateStore(Zero, IndexPtr, false); 424 425 // Start the loop with a block that tests the condition. 426 llvm::BasicBlock *CondBlock = createBasicBlock("for.cond"); 427 llvm::BasicBlock *AfterFor = createBasicBlock("for.end"); 428 429 EmitBlock(CondBlock); 430 431 llvm::BasicBlock *ForBody = createBasicBlock("for.body"); 432 433 // Generate: if (loop-index < number-of-elements fall to the loop body, 434 // otherwise, go to the block after the for-loop. 435 llvm::Value *Counter = Builder.CreateLoad(IndexPtr); 436 llvm::Value *IsLess = Builder.CreateICmpULT(Counter, NumElements, "isless"); 437 // If the condition is true, execute the body. 438 Builder.CreateCondBr(IsLess, ForBody, AfterFor); 439 440 EmitBlock(ForBody); 441 442 llvm::BasicBlock *ContinueBlock = createBasicBlock("for.inc"); 443 // Inside the loop body, emit the constructor call on the array element. 444 Counter = Builder.CreateLoad(IndexPtr); 445 llvm::Value *Address = Builder.CreateInBoundsGEP(ArrayPtr, Counter, 446 "arrayidx"); 447 EmitCXXConstructorCall(D, Ctor_Complete, Address, 0, 0); 448 449 EmitBlock(ContinueBlock); 450 451 // Emit the increment of the loop counter. 452 llvm::Value *NextVal = llvm::ConstantInt::get(SizeTy, 1); 453 Counter = Builder.CreateLoad(IndexPtr); 454 NextVal = Builder.CreateAdd(Counter, NextVal, "inc"); 455 Builder.CreateStore(NextVal, IndexPtr, false); 456 457 // Finally, branch back up to the condition for the next iteration. 458 EmitBranch(CondBlock); 459 460 // Emit the fall-through block. 461 EmitBlock(AfterFor, true); 462} 463 464/// EmitCXXAggrDestructorCall - calls the default destructor on array 465/// elements in reverse order of construction. 466void 467CodeGenFunction::EmitCXXAggrDestructorCall(const CXXDestructorDecl *D, 468 const ArrayType *Array, 469 llvm::Value *This) { 470 const ConstantArrayType *CA = dyn_cast<ConstantArrayType>(Array); 471 assert(CA && "Do we support VLA for destruction ?"); 472 llvm::Value *One = llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), 473 1); 474 uint64_t ElementCount = getContext().getConstantArrayElementCount(CA); 475 // Create a temporary for the loop index and initialize it with count of 476 // array elements. 477 llvm::Value *IndexPtr = CreateTempAlloca(llvm::Type::getInt64Ty(VMContext), 478 "loop.index"); 479 // Index = ElementCount; 480 llvm::Value* UpperCount = 481 llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), ElementCount); 482 Builder.CreateStore(UpperCount, IndexPtr, false); 483 484 // Start the loop with a block that tests the condition. 485 llvm::BasicBlock *CondBlock = createBasicBlock("for.cond"); 486 llvm::BasicBlock *AfterFor = createBasicBlock("for.end"); 487 488 EmitBlock(CondBlock); 489 490 llvm::BasicBlock *ForBody = createBasicBlock("for.body"); 491 492 // Generate: if (loop-index != 0 fall to the loop body, 493 // otherwise, go to the block after the for-loop. 494 llvm::Value* zeroConstant = 495 llvm::Constant::getNullValue(llvm::Type::getInt64Ty(VMContext)); 496 llvm::Value *Counter = Builder.CreateLoad(IndexPtr); 497 llvm::Value *IsNE = Builder.CreateICmpNE(Counter, zeroConstant, 498 "isne"); 499 // If the condition is true, execute the body. 500 Builder.CreateCondBr(IsNE, ForBody, AfterFor); 501 502 EmitBlock(ForBody); 503 504 llvm::BasicBlock *ContinueBlock = createBasicBlock("for.inc"); 505 // Inside the loop body, emit the constructor call on the array element. 506 Counter = Builder.CreateLoad(IndexPtr); 507 Counter = Builder.CreateSub(Counter, One); 508 llvm::Value *Address = Builder.CreateInBoundsGEP(This, Counter, "arrayidx"); 509 EmitCXXDestructorCall(D, Dtor_Complete, Address); 510 511 EmitBlock(ContinueBlock); 512 513 // Emit the decrement of the loop counter. 514 Counter = Builder.CreateLoad(IndexPtr); 515 Counter = Builder.CreateSub(Counter, One, "dec"); 516 Builder.CreateStore(Counter, IndexPtr, false); 517 518 // Finally, branch back up to the condition for the next iteration. 519 EmitBranch(CondBlock); 520 521 // Emit the fall-through block. 522 EmitBlock(AfterFor, true); 523} 524 525void 526CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D, 527 CXXCtorType Type, 528 llvm::Value *This, 529 CallExpr::const_arg_iterator ArgBeg, 530 CallExpr::const_arg_iterator ArgEnd) { 531 if (D->isCopyConstructor(getContext())) { 532 const CXXRecordDecl *ClassDecl = cast<CXXRecordDecl>(D->getDeclContext()); 533 if (ClassDecl->hasTrivialCopyConstructor()) { 534 assert(!ClassDecl->hasUserDeclaredCopyConstructor() && 535 "EmitCXXConstructorCall - user declared copy constructor"); 536 const Expr *E = (*ArgBeg); 537 QualType Ty = E->getType(); 538 llvm::Value *Src = EmitLValue(E).getAddress(); 539 EmitAggregateCopy(This, Src, Ty); 540 return; 541 } 542 } 543 544 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D, Type); 545 546 EmitCXXMemberCall(D, Callee, This, ArgBeg, ArgEnd); 547} 548 549void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *D, 550 CXXDtorType Type, 551 llvm::Value *This) { 552 llvm::Value *Callee = CGM.GetAddrOfCXXDestructor(D, Type); 553 554 EmitCXXMemberCall(D, Callee, This, 0, 0); 555} 556 557void 558CodeGenFunction::EmitCXXConstructExpr(llvm::Value *Dest, 559 const CXXConstructExpr *E) { 560 assert(Dest && "Must have a destination!"); 561 562 const CXXRecordDecl *RD = 563 cast<CXXRecordDecl>(E->getType()->getAs<RecordType>()->getDecl()); 564 if (RD->hasTrivialConstructor()) 565 return; 566 567 // Code gen optimization to eliminate copy constructor and return 568 // its first argument instead. 569 if (getContext().getLangOptions().ElideConstructors && E->isElidable()) { 570 CXXConstructExpr::const_arg_iterator i = E->arg_begin(); 571 EmitAggExpr((*i), Dest, false); 572 return; 573 } 574 // Call the constructor. 575 EmitCXXConstructorCall(E->getConstructor(), Ctor_Complete, Dest, 576 E->arg_begin(), E->arg_end()); 577} 578 579void CodeGenModule::EmitCXXConstructors(const CXXConstructorDecl *D) { 580 EmitGlobal(GlobalDecl(D, Ctor_Complete)); 581 EmitGlobal(GlobalDecl(D, Ctor_Base)); 582} 583 584void CodeGenModule::EmitCXXConstructor(const CXXConstructorDecl *D, 585 CXXCtorType Type) { 586 587 llvm::Function *Fn = GetAddrOfCXXConstructor(D, Type); 588 589 CodeGenFunction(*this).GenerateCode(GlobalDecl(D, Type), Fn); 590 591 SetFunctionDefinitionAttributes(D, Fn); 592 SetLLVMFunctionAttributesForDefinition(D, Fn); 593} 594 595llvm::Function * 596CodeGenModule::GetAddrOfCXXConstructor(const CXXConstructorDecl *D, 597 CXXCtorType Type) { 598 const llvm::FunctionType *FTy = 599 getTypes().GetFunctionType(getTypes().getFunctionInfo(D), false); 600 601 const char *Name = getMangledCXXCtorName(D, Type); 602 return cast<llvm::Function>( 603 GetOrCreateLLVMFunction(Name, FTy, GlobalDecl(D, Type))); 604} 605 606const char *CodeGenModule::getMangledCXXCtorName(const CXXConstructorDecl *D, 607 CXXCtorType Type) { 608 llvm::SmallString<256> Name; 609 llvm::raw_svector_ostream Out(Name); 610 mangleCXXCtor(D, Type, Context, Out); 611 612 Name += '\0'; 613 return UniqueMangledName(Name.begin(), Name.end()); 614} 615 616void CodeGenModule::EmitCXXDestructors(const CXXDestructorDecl *D) { 617 EmitCXXDestructor(D, Dtor_Complete); 618 EmitCXXDestructor(D, Dtor_Base); 619} 620 621void CodeGenModule::EmitCXXDestructor(const CXXDestructorDecl *D, 622 CXXDtorType Type) { 623 llvm::Function *Fn = GetAddrOfCXXDestructor(D, Type); 624 625 CodeGenFunction(*this).GenerateCode(GlobalDecl(D, Type), Fn); 626 627 SetFunctionDefinitionAttributes(D, Fn); 628 SetLLVMFunctionAttributesForDefinition(D, Fn); 629} 630 631llvm::Function * 632CodeGenModule::GetAddrOfCXXDestructor(const CXXDestructorDecl *D, 633 CXXDtorType Type) { 634 const llvm::FunctionType *FTy = 635 getTypes().GetFunctionType(getTypes().getFunctionInfo(D), false); 636 637 const char *Name = getMangledCXXDtorName(D, Type); 638 return cast<llvm::Function>( 639 GetOrCreateLLVMFunction(Name, FTy, GlobalDecl(D, Type))); 640} 641 642const char *CodeGenModule::getMangledCXXDtorName(const CXXDestructorDecl *D, 643 CXXDtorType Type) { 644 llvm::SmallString<256> Name; 645 llvm::raw_svector_ostream Out(Name); 646 mangleCXXDtor(D, Type, Context, Out); 647 648 Name += '\0'; 649 return UniqueMangledName(Name.begin(), Name.end()); 650} 651 652llvm::Constant *CodeGenModule::GenerateRtti(const CXXRecordDecl *RD) { 653 llvm::Type *Ptr8Ty; 654 Ptr8Ty = llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext), 0); 655 llvm::Constant *Rtti = llvm::Constant::getNullValue(Ptr8Ty); 656 657 if (!getContext().getLangOptions().Rtti) 658 return Rtti; 659 660 llvm::SmallString<256> OutName; 661 llvm::raw_svector_ostream Out(OutName); 662 QualType ClassTy; 663 ClassTy = getContext().getTagDeclType(RD); 664 mangleCXXRtti(ClassTy, getContext(), Out); 665 llvm::GlobalVariable::LinkageTypes linktype; 666 linktype = llvm::GlobalValue::WeakAnyLinkage; 667 std::vector<llvm::Constant *> info; 668 // assert(0 && "FIXME: implement rtti descriptor"); 669 // FIXME: descriptor 670 info.push_back(llvm::Constant::getNullValue(Ptr8Ty)); 671 // assert(0 && "FIXME: implement rtti ts"); 672 // FIXME: TS 673 info.push_back(llvm::Constant::getNullValue(Ptr8Ty)); 674 675 llvm::Constant *C; 676 llvm::ArrayType *type = llvm::ArrayType::get(Ptr8Ty, info.size()); 677 C = llvm::ConstantArray::get(type, info); 678 Rtti = new llvm::GlobalVariable(getModule(), type, true, linktype, C, 679 Out.str()); 680 Rtti = llvm::ConstantExpr::getBitCast(Rtti, Ptr8Ty); 681 return Rtti; 682} 683 684class VtableBuilder { 685public: 686 /// Index_t - Vtable index type. 687 typedef uint64_t Index_t; 688private: 689 std::vector<llvm::Constant *> &methods; 690 std::vector<llvm::Constant *> submethods; 691 llvm::Type *Ptr8Ty; 692 /// Class - The most derived class that this vtable is being built for. 693 const CXXRecordDecl *Class; 694 /// BLayout - Layout for the most derived class that this vtable is being 695 /// built for. 696 const ASTRecordLayout &BLayout; 697 llvm::SmallSet<const CXXRecordDecl *, 32> IndirectPrimary; 698 llvm::SmallSet<const CXXRecordDecl *, 32> SeenVBase; 699 llvm::Constant *rtti; 700 llvm::LLVMContext &VMContext; 701 CodeGenModule &CGM; // Per-module state. 702 /// Index - Maps a method decl into a vtable index. Useful for virtual 703 /// dispatch codegen. 704 llvm::DenseMap<const CXXMethodDecl *, Index_t> Index; 705 llvm::DenseMap<const CXXMethodDecl *, Index_t> VCall; 706 llvm::DenseMap<const CXXMethodDecl *, Index_t> VCallOffset; 707 llvm::DenseMap<const CXXRecordDecl *, Index_t> VBIndex; 708 typedef std::pair<Index_t, Index_t> CallOffset; 709 typedef llvm::DenseMap<const CXXMethodDecl *, CallOffset> Thunks_t; 710 Thunks_t Thunks; 711 typedef llvm::DenseMap<const CXXMethodDecl *, 712 std::pair<CallOffset, CallOffset> > CovariantThunks_t; 713 CovariantThunks_t CovariantThunks; 714 std::vector<Index_t> VCalls; 715 typedef CXXRecordDecl::method_iterator method_iter; 716 // FIXME: Linkage should follow vtable 717 const bool Extern; 718 const uint32_t LLVMPointerWidth; 719 Index_t extra; 720public: 721 VtableBuilder(std::vector<llvm::Constant *> &meth, 722 const CXXRecordDecl *c, 723 CodeGenModule &cgm) 724 : methods(meth), Class(c), BLayout(cgm.getContext().getASTRecordLayout(c)), 725 rtti(cgm.GenerateRtti(c)), VMContext(cgm.getModule().getContext()), 726 CGM(cgm), Extern(true), 727 LLVMPointerWidth(cgm.getContext().Target.getPointerWidth(0)) { 728 Ptr8Ty = llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext), 0); 729 } 730 731 llvm::DenseMap<const CXXMethodDecl *, Index_t> &getIndex() { return Index; } 732 llvm::DenseMap<const CXXRecordDecl *, Index_t> &getVBIndex() 733 { return VBIndex; } 734 735 llvm::Constant *wrap(Index_t i) { 736 llvm::Constant *m; 737 m = llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), i); 738 return llvm::ConstantExpr::getIntToPtr(m, Ptr8Ty); 739 } 740 741 llvm::Constant *wrap(llvm::Constant *m) { 742 return llvm::ConstantExpr::getBitCast(m, Ptr8Ty); 743 } 744 745 void GenerateVBaseOffsets(std::vector<llvm::Constant *> &offsets, 746 const CXXRecordDecl *RD, uint64_t Offset) { 747 for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(), 748 e = RD->bases_end(); i != e; ++i) { 749 const CXXRecordDecl *Base = 750 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl()); 751 if (i->isVirtual() && !SeenVBase.count(Base)) { 752 SeenVBase.insert(Base); 753 int64_t BaseOffset = -(Offset/8) + BLayout.getVBaseClassOffset(Base)/8; 754 llvm::Constant *m = wrap(BaseOffset); 755 m = wrap((0?700:0) + BaseOffset); 756 VBIndex[Base] = -(offsets.size()*LLVMPointerWidth/8) 757 - 3*LLVMPointerWidth/8; 758 offsets.push_back(m); 759 } 760 GenerateVBaseOffsets(offsets, Base, Offset); 761 } 762 } 763 764 void StartNewTable() { 765 SeenVBase.clear(); 766 } 767 768 Index_t VBlookup(CXXRecordDecl *D, CXXRecordDecl *B); 769 770 /// getVbaseOffset - Returns the index into the vtable for the virtual base 771 /// offset for the given (B) virtual base of the derived class D. 772 Index_t getVbaseOffset(QualType qB, QualType qD) { 773 qD = qD->getAs<PointerType>()->getPointeeType(); 774 qB = qB->getAs<PointerType>()->getPointeeType(); 775 CXXRecordDecl *D = cast<CXXRecordDecl>(qD->getAs<RecordType>()->getDecl()); 776 CXXRecordDecl *B = cast<CXXRecordDecl>(qB->getAs<RecordType>()->getDecl()); 777 if (D != Class) 778 return VBlookup(D, B); 779 llvm::DenseMap<const CXXRecordDecl *, Index_t>::iterator i; 780 i = VBIndex.find(B); 781 if (i != VBIndex.end()) 782 return i->second; 783 // FIXME: temporal botch, is this data here, by the time we need it? 784 785 assert(false && "FIXME: Locate the containing virtual base first"); 786 return 0; 787 } 788 789 bool OverrideMethod(const CXXMethodDecl *MD, llvm::Constant *m, 790 bool MorallyVirtual, Index_t Offset) { 791 typedef CXXMethodDecl::method_iterator meth_iter; 792 793 // FIXME: Don't like the nested loops. For very large inheritance 794 // heirarchies we could have a table on the side with the final overridder 795 // and just replace each instance of an overridden method once. Would be 796 // nice to measure the cost/benefit on real code. 797 798 for (meth_iter mi = MD->begin_overridden_methods(), 799 e = MD->end_overridden_methods(); 800 mi != e; ++mi) { 801 const CXXMethodDecl *OMD = *mi; 802 llvm::Constant *om; 803 om = CGM.GetAddrOfFunction(OMD, Ptr8Ty); 804 om = llvm::ConstantExpr::getBitCast(om, Ptr8Ty); 805 806 for (Index_t i = 0, e = submethods.size(); 807 i != e; ++i) { 808 // FIXME: begin_overridden_methods might be too lax, covariance */ 809 if (submethods[i] != om) 810 continue; 811 QualType nc_oret = OMD->getType()->getAs<FunctionType>()->getResultType(); 812 CanQualType oret = CGM.getContext().getCanonicalType(nc_oret); 813 QualType nc_ret = MD->getType()->getAs<FunctionType>()->getResultType(); 814 CanQualType ret = CGM.getContext().getCanonicalType(nc_ret); 815 CallOffset ReturnOffset = std::make_pair(0, 0); 816 if (oret != ret) { 817 // FIXME: calculate offsets for covariance 818 ReturnOffset = std::make_pair(42,getVbaseOffset(oret, ret)); 819 } 820 Index[MD] = i; 821 submethods[i] = m; 822 823 Thunks.erase(OMD); 824 if (MorallyVirtual) { 825 Index_t &idx = VCall[OMD]; 826 if (idx == 0) { 827 VCallOffset[MD] = Offset/8; 828 idx = VCalls.size()+1; 829 VCalls.push_back(0); 830 } else { 831 VCallOffset[MD] = VCallOffset[OMD]; 832 VCalls[idx-1] = -VCallOffset[OMD] + Offset/8; 833 } 834 VCall[MD] = idx; 835 CallOffset ThisOffset; 836 // FIXME: calculate non-virtual offset 837 ThisOffset = std::make_pair(0, -((idx+extra+2)*LLVMPointerWidth/8)); 838 if (ReturnOffset.first || ReturnOffset.second) 839 CovariantThunks[MD] = std::make_pair(ThisOffset, ReturnOffset); 840 else 841 Thunks[MD] = ThisOffset; 842 return true; 843 } 844#if 0 845 // FIXME: finish off 846 int64_t O = VCallOffset[OMD] - Offset/8; 847 if (O) { 848 Thunks[MD] = std::make_pair(O, 0); 849 } 850#endif 851 return true; 852 } 853 } 854 855 return false; 856 } 857 858 void InstallThunks() { 859 for (Thunks_t::iterator i = Thunks.begin(), e = Thunks.end(); 860 i != e; ++i) { 861 const CXXMethodDecl *MD = i->first; 862 Index_t idx = Index[MD]; 863 Index_t nv_O = i->second.first; 864 Index_t v_O = i->second.second; 865 submethods[idx] = CGM.BuildThunk(MD, Extern, nv_O, v_O); 866 } 867 Thunks.clear(); 868 for (CovariantThunks_t::iterator i = CovariantThunks.begin(), 869 e = CovariantThunks.end(); 870 i != e; ++i) { 871 const CXXMethodDecl *MD = i->first; 872 Index_t idx = Index[MD]; 873 Index_t nv_t = i->second.first.first; 874 Index_t v_t = i->second.first.second; 875 Index_t nv_r = i->second.second.first; 876 Index_t v_r = i->second.second.second; 877 submethods[idx] = CGM.BuildCovariantThunk(MD, Extern, nv_t, v_t, nv_r, 878 v_r); 879 } 880 CovariantThunks.clear(); 881 } 882 883 void OverrideMethods(std::vector<std::pair<const CXXRecordDecl *, 884 int64_t> > *Path, bool MorallyVirtual) { 885 for (std::vector<std::pair<const CXXRecordDecl *, 886 int64_t> >::reverse_iterator i =Path->rbegin(), 887 e = Path->rend(); i != e; ++i) { 888 const CXXRecordDecl *RD = i->first; 889 int64_t Offset = i->second; 890 for (method_iter mi = RD->method_begin(), me = RD->method_end(); mi != me; 891 ++mi) { 892 if (!mi->isVirtual()) 893 continue; 894 895 const CXXMethodDecl *MD = *mi; 896 llvm::Constant *m = 0; 897 if (const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(MD)) 898 m = wrap(CGM.GetAddrOfCXXDestructor(Dtor, Dtor_Complete)); 899 else { 900 const FunctionProtoType *FPT = 901 MD->getType()->getAs<FunctionProtoType>(); 902 const llvm::Type *Ty = 903 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD), 904 FPT->isVariadic()); 905 906 m = wrap(CGM.GetAddrOfFunction(MD, Ty)); 907 } 908 909 OverrideMethod(MD, m, MorallyVirtual, Offset); 910 } 911 } 912 } 913 914 void AddMethod(const CXXMethodDecl *MD, bool MorallyVirtual, Index_t Offset) { 915 llvm::Constant *m = 0; 916 if (const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(MD)) 917 m = wrap(CGM.GetAddrOfCXXDestructor(Dtor, Dtor_Complete)); 918 else { 919 const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>(); 920 const llvm::Type *Ty = 921 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD), 922 FPT->isVariadic()); 923 924 m = wrap(CGM.GetAddrOfFunction(MD, Ty)); 925 } 926 927 // If we can find a previously allocated slot for this, reuse it. 928 if (OverrideMethod(MD, m, MorallyVirtual, Offset)) 929 return; 930 931 // else allocate a new slot. 932 Index[MD] = submethods.size(); 933 submethods.push_back(m); 934 if (MorallyVirtual) { 935 VCallOffset[MD] = Offset/8; 936 Index_t &idx = VCall[MD]; 937 // Allocate the first one, after that, we reuse the previous one. 938 if (idx == 0) { 939 idx = VCalls.size()+1; 940 VCalls.push_back(0); 941 } 942 } 943 } 944 945 void AddMethods(const CXXRecordDecl *RD, bool MorallyVirtual, 946 Index_t Offset) { 947 for (method_iter mi = RD->method_begin(), me = RD->method_end(); mi != me; 948 ++mi) 949 if (mi->isVirtual()) 950 AddMethod(*mi, MorallyVirtual, Offset); 951 } 952 953 void NonVirtualBases(const CXXRecordDecl *RD, const ASTRecordLayout &Layout, 954 const CXXRecordDecl *PrimaryBase, 955 bool PrimaryBaseWasVirtual, bool MorallyVirtual, 956 int64_t Offset) { 957 for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(), 958 e = RD->bases_end(); i != e; ++i) { 959 if (i->isVirtual()) 960 continue; 961 const CXXRecordDecl *Base = 962 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl()); 963 if (Base != PrimaryBase || PrimaryBaseWasVirtual) { 964 uint64_t o = Offset + Layout.getBaseClassOffset(Base); 965 StartNewTable(); 966 std::vector<std::pair<const CXXRecordDecl *, 967 int64_t> > S; 968 S.push_back(std::make_pair(RD, Offset)); 969 GenerateVtableForBase(Base, MorallyVirtual, o, false, &S); 970 } 971 } 972 } 973 974 Index_t end(const CXXRecordDecl *RD, std::vector<llvm::Constant *> &offsets, 975 const ASTRecordLayout &Layout, 976 const CXXRecordDecl *PrimaryBase, 977 bool PrimaryBaseWasVirtual, bool MorallyVirtual, 978 int64_t Offset, bool ForVirtualBase) { 979 StartNewTable(); 980 extra = 0; 981 // FIXME: Cleanup. 982 if (!ForVirtualBase) { 983 // then virtual base offsets... 984 for (std::vector<llvm::Constant *>::reverse_iterator i = offsets.rbegin(), 985 e = offsets.rend(); i != e; ++i) 986 methods.push_back(*i); 987 } 988 989 // The vcalls come first... 990 for (std::vector<Index_t>::reverse_iterator i=VCalls.rbegin(), 991 e=VCalls.rend(); 992 i != e; ++i) 993 methods.push_back(wrap((0?600:0) + *i)); 994 VCalls.clear(); 995 996 if (ForVirtualBase) { 997 // then virtual base offsets... 998 for (std::vector<llvm::Constant *>::reverse_iterator i = offsets.rbegin(), 999 e = offsets.rend(); i != e; ++i) 1000 methods.push_back(*i); 1001 } 1002 1003 methods.push_back(wrap(-(Offset/8))); 1004 methods.push_back(rtti); 1005 Index_t AddressPoint = methods.size(); 1006 1007 InstallThunks(); 1008 methods.insert(methods.end(), submethods.begin(), submethods.end()); 1009 submethods.clear(); 1010 1011 // and then the non-virtual bases. 1012 NonVirtualBases(RD, Layout, PrimaryBase, PrimaryBaseWasVirtual, 1013 MorallyVirtual, Offset); 1014 return AddressPoint; 1015 } 1016 1017 void Primaries(const CXXRecordDecl *RD, bool MorallyVirtual, int64_t Offset) { 1018 if (!RD->isDynamicClass()) 1019 return; 1020 1021 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD); 1022 const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase(); 1023 const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual(); 1024 1025 // vtables are composed from the chain of primaries. 1026 if (PrimaryBase) { 1027 if (PrimaryBaseWasVirtual) 1028 IndirectPrimary.insert(PrimaryBase); 1029 Primaries(PrimaryBase, PrimaryBaseWasVirtual|MorallyVirtual, Offset); 1030 } 1031 1032 // And add the virtuals for the class to the primary vtable. 1033 AddMethods(RD, MorallyVirtual, Offset); 1034 } 1035 1036 int64_t GenerateVtableForBase(const CXXRecordDecl *RD, 1037 bool MorallyVirtual = false, int64_t Offset = 0, 1038 bool ForVirtualBase = false, 1039 std::vector<std::pair<const CXXRecordDecl *, 1040 int64_t> > *Path = 0) { 1041 if (!RD->isDynamicClass()) 1042 return 0; 1043 1044 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD); 1045 const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase(); 1046 const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual(); 1047 1048 std::vector<llvm::Constant *> offsets; 1049 extra = 0; 1050 GenerateVBaseOffsets(offsets, RD, Offset); 1051 if (ForVirtualBase) 1052 extra = offsets.size(); 1053 1054 // vtables are composed from the chain of primaries. 1055 if (PrimaryBase) { 1056 if (PrimaryBaseWasVirtual) 1057 IndirectPrimary.insert(PrimaryBase); 1058 Primaries(PrimaryBase, PrimaryBaseWasVirtual|MorallyVirtual, Offset); 1059 } 1060 1061 // And add the virtuals for the class to the primary vtable. 1062 AddMethods(RD, MorallyVirtual, Offset); 1063 1064 if (Path) 1065 OverrideMethods(Path, MorallyVirtual); 1066 1067 return end(RD, offsets, Layout, PrimaryBase, PrimaryBaseWasVirtual, 1068 MorallyVirtual, Offset, ForVirtualBase); 1069 } 1070 1071 void GenerateVtableForVBases(const CXXRecordDecl *RD, 1072 int64_t Offset = 0, 1073 std::vector<std::pair<const CXXRecordDecl *, 1074 int64_t> > *Path = 0) { 1075 bool alloc = false; 1076 if (Path == 0) { 1077 alloc = true; 1078 Path = new std::vector<std::pair<const CXXRecordDecl *, 1079 int64_t> >; 1080 } 1081 // FIXME: We also need to override using all paths to a virtual base, 1082 // right now, we just process the first path 1083 Path->push_back(std::make_pair(RD, Offset)); 1084 for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(), 1085 e = RD->bases_end(); i != e; ++i) { 1086 const CXXRecordDecl *Base = 1087 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl()); 1088 if (i->isVirtual() && !IndirectPrimary.count(Base)) { 1089 // Mark it so we don't output it twice. 1090 IndirectPrimary.insert(Base); 1091 StartNewTable(); 1092 int64_t BaseOffset = BLayout.getVBaseClassOffset(Base); 1093 GenerateVtableForBase(Base, true, BaseOffset, true, Path); 1094 } 1095 int64_t BaseOffset = Offset; 1096 if (i->isVirtual()) 1097 BaseOffset = BLayout.getVBaseClassOffset(Base); 1098 if (Base->getNumVBases()) 1099 GenerateVtableForVBases(Base, BaseOffset, Path); 1100 } 1101 Path->pop_back(); 1102 if (alloc) 1103 delete Path; 1104 } 1105}; 1106 1107class VtableInfo { 1108public: 1109 typedef VtableBuilder::Index_t Index_t; 1110private: 1111 CodeGenModule &CGM; // Per-module state. 1112 /// Index_t - Vtable index type. 1113 typedef llvm::DenseMap<const CXXMethodDecl *, Index_t> ElTy; 1114 typedef llvm::DenseMap<const CXXRecordDecl *, ElTy *> MapTy; 1115 // FIXME: Move to Context. 1116 static MapTy IndexFor; 1117 1118 typedef llvm::DenseMap<const CXXRecordDecl *, Index_t> VBElTy; 1119 typedef llvm::DenseMap<const CXXRecordDecl *, VBElTy *> VBMapTy; 1120 // FIXME: Move to Context. 1121 static VBMapTy VBIndexFor; 1122public: 1123 VtableInfo(CodeGenModule &cgm) : CGM(cgm) { } 1124 void RegisterIndex(const CXXRecordDecl *RD, const ElTy &e) { 1125 assert(IndexFor.find(RD) == IndexFor.end() && "Don't compute vtbl twice"); 1126 // We own a copy of this, it will go away shortly. 1127 IndexFor[RD] = new ElTy (e); 1128 } 1129 void RegisterVBIndex(const CXXRecordDecl *RD, const VBElTy &e) { 1130 assert(VBIndexFor.find(RD) == VBIndexFor.end() && "Don't compute vtbl twice"); 1131 // We own a copy of this, it will go away shortly. 1132 VBIndexFor[RD] = new VBElTy (e); 1133 } 1134 Index_t lookup(const CXXMethodDecl *MD) { 1135 const CXXRecordDecl *RD = MD->getParent(); 1136 MapTy::iterator I = IndexFor.find(RD); 1137 if (I == IndexFor.end()) { 1138 std::vector<llvm::Constant *> methods; 1139 // FIXME: This seems expensive. Can we do a partial job to get 1140 // just this data. 1141 VtableBuilder b(methods, RD, CGM); 1142 b.GenerateVtableForBase(RD); 1143 b.GenerateVtableForVBases(RD); 1144 RegisterIndex(RD, b.getIndex()); 1145 I = IndexFor.find(RD); 1146 } 1147 assert(I->second->find(MD)!=I->second->end() && "Can't find vtable index"); 1148 return (*I->second)[MD]; 1149 } 1150 Index_t VBlookup(const CXXRecordDecl *RD, const CXXRecordDecl *BD) { 1151 VBMapTy::iterator I = VBIndexFor.find(RD); 1152 if (I == VBIndexFor.end()) { 1153 std::vector<llvm::Constant *> methods; 1154 // FIXME: This seems expensive. Can we do a partial job to get 1155 // just this data. 1156 VtableBuilder b(methods, RD, CGM); 1157 b.GenerateVtableForBase(RD); 1158 b.GenerateVtableForVBases(RD); 1159 RegisterVBIndex(RD, b.getVBIndex()); 1160 I = VBIndexFor.find(RD); 1161 } 1162 assert(I->second->find(BD)!=I->second->end() && "Can't find vtable index"); 1163 return (*I->second)[BD]; 1164 } 1165}; 1166 1167// FIXME: move to Context 1168static VtableInfo *vtableinfo; 1169 1170VtableBuilder::Index_t VtableBuilder::VBlookup(CXXRecordDecl *D, 1171 CXXRecordDecl *B) { 1172 if (vtableinfo == 0) 1173 vtableinfo = new VtableInfo(CGM); 1174 1175 return vtableinfo->VBlookup(D, B); 1176} 1177 1178 1179// FIXME: Move to Context. 1180VtableInfo::MapTy VtableInfo::IndexFor; 1181 1182// FIXME: Move to Context. 1183VtableInfo::VBMapTy VtableInfo::VBIndexFor; 1184 1185llvm::Value *CodeGenFunction::GenerateVtable(const CXXRecordDecl *RD) { 1186 llvm::SmallString<256> OutName; 1187 llvm::raw_svector_ostream Out(OutName); 1188 QualType ClassTy; 1189 ClassTy = getContext().getTagDeclType(RD); 1190 mangleCXXVtable(ClassTy, getContext(), Out); 1191 llvm::GlobalVariable::LinkageTypes linktype; 1192 linktype = llvm::GlobalValue::WeakAnyLinkage; 1193 std::vector<llvm::Constant *> methods; 1194 llvm::Type *Ptr8Ty=llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext),0); 1195 int64_t AddressPoint; 1196 1197 VtableBuilder b(methods, RD, CGM); 1198 1199 // First comes the vtables for all the non-virtual bases... 1200 AddressPoint = b.GenerateVtableForBase(RD); 1201 1202 // then the vtables for all the virtual bases. 1203 b.GenerateVtableForVBases(RD); 1204 1205 llvm::Constant *C; 1206 llvm::ArrayType *type = llvm::ArrayType::get(Ptr8Ty, methods.size()); 1207 C = llvm::ConstantArray::get(type, methods); 1208 llvm::Value *vtable = new llvm::GlobalVariable(CGM.getModule(), type, true, 1209 linktype, C, Out.str()); 1210 vtable = Builder.CreateBitCast(vtable, Ptr8Ty); 1211 vtable = Builder.CreateGEP(vtable, 1212 llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), 1213 AddressPoint*LLVMPointerWidth/8)); 1214 return vtable; 1215} 1216 1217llvm::Constant *CodeGenFunction::GenerateThunk(llvm::Function *Fn, 1218 const CXXMethodDecl *MD, 1219 bool Extern, int64_t nv, 1220 int64_t v) { 1221 QualType R = MD->getType()->getAs<FunctionType>()->getResultType(); 1222 1223 FunctionArgList Args; 1224 ImplicitParamDecl *ThisDecl = 1225 ImplicitParamDecl::Create(getContext(), 0, SourceLocation(), 0, 1226 MD->getThisType(getContext())); 1227 Args.push_back(std::make_pair(ThisDecl, ThisDecl->getType())); 1228 for (FunctionDecl::param_const_iterator i = MD->param_begin(), 1229 e = MD->param_end(); 1230 i != e; ++i) { 1231 ParmVarDecl *D = *i; 1232 Args.push_back(std::make_pair(D, D->getType())); 1233 } 1234 IdentifierInfo *II 1235 = &CGM.getContext().Idents.get("__thunk_named_foo_"); 1236 FunctionDecl *FD = FunctionDecl::Create(getContext(), 1237 getContext().getTranslationUnitDecl(), 1238 SourceLocation(), II, R, 0, 1239 Extern 1240 ? FunctionDecl::Extern 1241 : FunctionDecl::Static, 1242 false, true); 1243 StartFunction(FD, R, Fn, Args, SourceLocation()); 1244 // FIXME: generate body 1245 FinishFunction(); 1246 return Fn; 1247} 1248 1249llvm::Constant *CodeGenFunction::GenerateCovariantThunk(llvm::Function *Fn, 1250 const CXXMethodDecl *MD, 1251 bool Extern, 1252 int64_t nv_t, 1253 int64_t v_t, 1254 int64_t nv_r, 1255 int64_t v_r) { 1256 QualType R = MD->getType()->getAs<FunctionType>()->getResultType(); 1257 1258 FunctionArgList Args; 1259 ImplicitParamDecl *ThisDecl = 1260 ImplicitParamDecl::Create(getContext(), 0, SourceLocation(), 0, 1261 MD->getThisType(getContext())); 1262 Args.push_back(std::make_pair(ThisDecl, ThisDecl->getType())); 1263 for (FunctionDecl::param_const_iterator i = MD->param_begin(), 1264 e = MD->param_end(); 1265 i != e; ++i) { 1266 ParmVarDecl *D = *i; 1267 Args.push_back(std::make_pair(D, D->getType())); 1268 } 1269 IdentifierInfo *II 1270 = &CGM.getContext().Idents.get("__thunk_named_foo_"); 1271 FunctionDecl *FD = FunctionDecl::Create(getContext(), 1272 getContext().getTranslationUnitDecl(), 1273 SourceLocation(), II, R, 0, 1274 Extern 1275 ? FunctionDecl::Extern 1276 : FunctionDecl::Static, 1277 false, true); 1278 StartFunction(FD, R, Fn, Args, SourceLocation()); 1279 // FIXME: generate body 1280 FinishFunction(); 1281 return Fn; 1282} 1283 1284llvm::Constant *CodeGenModule::BuildThunk(const CXXMethodDecl *MD, bool Extern, 1285 int64_t nv, int64_t v) { 1286 llvm::SmallString<256> OutName; 1287 llvm::raw_svector_ostream Out(OutName); 1288 mangleThunk(MD, nv, v, getContext(), Out); 1289 llvm::GlobalVariable::LinkageTypes linktype; 1290 linktype = llvm::GlobalValue::WeakAnyLinkage; 1291 if (!Extern) 1292 linktype = llvm::GlobalValue::InternalLinkage; 1293 llvm::Type *Ptr8Ty=llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext),0); 1294 const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>(); 1295 const llvm::FunctionType *FTy = 1296 getTypes().GetFunctionType(getTypes().getFunctionInfo(MD), 1297 FPT->isVariadic()); 1298 1299 llvm::Function *Fn = llvm::Function::Create(FTy, linktype, Out.str(), 1300 &getModule()); 1301 CodeGenFunction(*this).GenerateThunk(Fn, MD, Extern, nv, v); 1302 // Fn = Builder.CreateBitCast(Fn, Ptr8Ty); 1303 llvm::Constant *m = llvm::ConstantExpr::getBitCast(Fn, Ptr8Ty); 1304 return m; 1305} 1306 1307llvm::Constant *CodeGenModule::BuildCovariantThunk(const CXXMethodDecl *MD, 1308 bool Extern, int64_t nv_t, 1309 int64_t v_t, int64_t nv_r, 1310 int64_t v_r) { 1311 llvm::SmallString<256> OutName; 1312 llvm::raw_svector_ostream Out(OutName); 1313 mangleCovariantThunk(MD, nv_t, v_t, nv_r, v_r, getContext(), Out); 1314 llvm::GlobalVariable::LinkageTypes linktype; 1315 linktype = llvm::GlobalValue::WeakAnyLinkage; 1316 if (!Extern) 1317 linktype = llvm::GlobalValue::InternalLinkage; 1318 llvm::Type *Ptr8Ty=llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext),0); 1319 const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>(); 1320 const llvm::FunctionType *FTy = 1321 getTypes().GetFunctionType(getTypes().getFunctionInfo(MD), 1322 FPT->isVariadic()); 1323 1324 llvm::Function *Fn = llvm::Function::Create(FTy, linktype, Out.str(), 1325 &getModule()); 1326 CodeGenFunction(*this).GenerateCovariantThunk(Fn, MD, Extern, nv_t, v_t, nv_r, 1327 v_r); 1328 // Fn = Builder.CreateBitCast(Fn, Ptr8Ty); 1329 llvm::Constant *m = llvm::ConstantExpr::getBitCast(Fn, Ptr8Ty); 1330 return m; 1331} 1332 1333llvm::Value * 1334CodeGenFunction::GetVirtualCXXBaseClassOffset(llvm::Value *This, 1335 const CXXRecordDecl *ClassDecl, 1336 const CXXRecordDecl *BaseClassDecl) { 1337 // FIXME: move to Context 1338 if (vtableinfo == 0) 1339 vtableinfo = new VtableInfo(CGM); 1340 1341 const llvm::Type *Int8PtrTy = 1342 llvm::Type::getInt8Ty(VMContext)->getPointerTo(); 1343 1344 llvm::Value *VTablePtr = Builder.CreateBitCast(This, 1345 Int8PtrTy->getPointerTo()); 1346 VTablePtr = Builder.CreateLoad(VTablePtr, "vtable"); 1347 1348 llvm::Value *VBaseOffsetPtr = 1349 Builder.CreateConstGEP1_64(VTablePtr, 1350 vtableinfo->VBlookup(ClassDecl, BaseClassDecl), 1351 "vbase.offset.ptr"); 1352 const llvm::Type *PtrDiffTy = 1353 ConvertType(getContext().getPointerDiffType()); 1354 1355 VBaseOffsetPtr = Builder.CreateBitCast(VBaseOffsetPtr, 1356 PtrDiffTy->getPointerTo()); 1357 1358 llvm::Value *VBaseOffset = Builder.CreateLoad(VBaseOffsetPtr, "vbase.offset"); 1359 1360 return VBaseOffset; 1361} 1362 1363llvm::Value * 1364CodeGenFunction::BuildVirtualCall(const CXXMethodDecl *MD, llvm::Value *&This, 1365 const llvm::Type *Ty) { 1366 // FIXME: If we know the dynamic type, we don't have to do a virtual dispatch. 1367 1368 uint64_t Index = CGM.GetVtableIndex(MD); 1369 1370 Ty = llvm::PointerType::get(Ty, 0); 1371 Ty = llvm::PointerType::get(Ty, 0); 1372 Ty = llvm::PointerType::get(Ty, 0); 1373 llvm::Value *vtbl = Builder.CreateBitCast(This, Ty); 1374 vtbl = Builder.CreateLoad(vtbl); 1375 llvm::Value *vfn = Builder.CreateConstInBoundsGEP1_64(vtbl, 1376 Index, "vfn"); 1377 vfn = Builder.CreateLoad(vfn); 1378 return vfn; 1379} 1380 1381uint64_t CodeGenModule::GetVtableIndex(const CXXMethodDecl *MD) { 1382 // FIXME: move to CodeGenModule. 1383 if (vtableinfo == 0) 1384 vtableinfo = new VtableInfo(*this); 1385 1386 return vtableinfo->lookup(MD); 1387} 1388 1389/// EmitClassAggrMemberwiseCopy - This routine generates code to copy a class 1390/// array of objects from SrcValue to DestValue. Copying can be either a bitwise 1391/// copy or via a copy constructor call. 1392// FIXME. Consolidate this with EmitCXXAggrConstructorCall. 1393void CodeGenFunction::EmitClassAggrMemberwiseCopy(llvm::Value *Dest, 1394 llvm::Value *Src, 1395 const ArrayType *Array, 1396 const CXXRecordDecl *BaseClassDecl, 1397 QualType Ty) { 1398 const ConstantArrayType *CA = dyn_cast<ConstantArrayType>(Array); 1399 assert(CA && "VLA cannot be copied over"); 1400 bool BitwiseCopy = BaseClassDecl->hasTrivialCopyConstructor(); 1401 1402 // Create a temporary for the loop index and initialize it with 0. 1403 llvm::Value *IndexPtr = CreateTempAlloca(llvm::Type::getInt64Ty(VMContext), 1404 "loop.index"); 1405 llvm::Value* zeroConstant = 1406 llvm::Constant::getNullValue(llvm::Type::getInt64Ty(VMContext)); 1407 Builder.CreateStore(zeroConstant, IndexPtr, false); 1408 // Start the loop with a block that tests the condition. 1409 llvm::BasicBlock *CondBlock = createBasicBlock("for.cond"); 1410 llvm::BasicBlock *AfterFor = createBasicBlock("for.end"); 1411 1412 EmitBlock(CondBlock); 1413 1414 llvm::BasicBlock *ForBody = createBasicBlock("for.body"); 1415 // Generate: if (loop-index < number-of-elements fall to the loop body, 1416 // otherwise, go to the block after the for-loop. 1417 uint64_t NumElements = getContext().getConstantArrayElementCount(CA); 1418 llvm::Value * NumElementsPtr = 1419 llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), NumElements); 1420 llvm::Value *Counter = Builder.CreateLoad(IndexPtr); 1421 llvm::Value *IsLess = Builder.CreateICmpULT(Counter, NumElementsPtr, 1422 "isless"); 1423 // If the condition is true, execute the body. 1424 Builder.CreateCondBr(IsLess, ForBody, AfterFor); 1425 1426 EmitBlock(ForBody); 1427 llvm::BasicBlock *ContinueBlock = createBasicBlock("for.inc"); 1428 // Inside the loop body, emit the constructor call on the array element. 1429 Counter = Builder.CreateLoad(IndexPtr); 1430 Src = Builder.CreateInBoundsGEP(Src, Counter, "srcaddress"); 1431 Dest = Builder.CreateInBoundsGEP(Dest, Counter, "destaddress"); 1432 if (BitwiseCopy) 1433 EmitAggregateCopy(Dest, Src, Ty); 1434 else if (CXXConstructorDecl *BaseCopyCtor = 1435 BaseClassDecl->getCopyConstructor(getContext(), 0)) { 1436 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(BaseCopyCtor, 1437 Ctor_Complete); 1438 CallArgList CallArgs; 1439 // Push the this (Dest) ptr. 1440 CallArgs.push_back(std::make_pair(RValue::get(Dest), 1441 BaseCopyCtor->getThisType(getContext()))); 1442 1443 // Push the Src ptr. 1444 CallArgs.push_back(std::make_pair(RValue::get(Src), 1445 BaseCopyCtor->getParamDecl(0)->getType())); 1446 QualType ResultType = 1447 BaseCopyCtor->getType()->getAs<FunctionType>()->getResultType(); 1448 EmitCall(CGM.getTypes().getFunctionInfo(ResultType, CallArgs), 1449 Callee, CallArgs, BaseCopyCtor); 1450 } 1451 EmitBlock(ContinueBlock); 1452 1453 // Emit the increment of the loop counter. 1454 llvm::Value *NextVal = llvm::ConstantInt::get(Counter->getType(), 1); 1455 Counter = Builder.CreateLoad(IndexPtr); 1456 NextVal = Builder.CreateAdd(Counter, NextVal, "inc"); 1457 Builder.CreateStore(NextVal, IndexPtr, false); 1458 1459 // Finally, branch back up to the condition for the next iteration. 1460 EmitBranch(CondBlock); 1461 1462 // Emit the fall-through block. 1463 EmitBlock(AfterFor, true); 1464} 1465 1466/// EmitClassAggrCopyAssignment - This routine generates code to assign a class 1467/// array of objects from SrcValue to DestValue. Assignment can be either a 1468/// bitwise assignment or via a copy assignment operator function call. 1469/// FIXME. This can be consolidated with EmitClassAggrMemberwiseCopy 1470void CodeGenFunction::EmitClassAggrCopyAssignment(llvm::Value *Dest, 1471 llvm::Value *Src, 1472 const ArrayType *Array, 1473 const CXXRecordDecl *BaseClassDecl, 1474 QualType Ty) { 1475 const ConstantArrayType *CA = dyn_cast<ConstantArrayType>(Array); 1476 assert(CA && "VLA cannot be asssigned"); 1477 bool BitwiseAssign = BaseClassDecl->hasTrivialCopyAssignment(); 1478 1479 // Create a temporary for the loop index and initialize it with 0. 1480 llvm::Value *IndexPtr = CreateTempAlloca(llvm::Type::getInt64Ty(VMContext), 1481 "loop.index"); 1482 llvm::Value* zeroConstant = 1483 llvm::Constant::getNullValue(llvm::Type::getInt64Ty(VMContext)); 1484 Builder.CreateStore(zeroConstant, IndexPtr, false); 1485 // Start the loop with a block that tests the condition. 1486 llvm::BasicBlock *CondBlock = createBasicBlock("for.cond"); 1487 llvm::BasicBlock *AfterFor = createBasicBlock("for.end"); 1488 1489 EmitBlock(CondBlock); 1490 1491 llvm::BasicBlock *ForBody = createBasicBlock("for.body"); 1492 // Generate: if (loop-index < number-of-elements fall to the loop body, 1493 // otherwise, go to the block after the for-loop. 1494 uint64_t NumElements = getContext().getConstantArrayElementCount(CA); 1495 llvm::Value * NumElementsPtr = 1496 llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), NumElements); 1497 llvm::Value *Counter = Builder.CreateLoad(IndexPtr); 1498 llvm::Value *IsLess = Builder.CreateICmpULT(Counter, NumElementsPtr, 1499 "isless"); 1500 // If the condition is true, execute the body. 1501 Builder.CreateCondBr(IsLess, ForBody, AfterFor); 1502 1503 EmitBlock(ForBody); 1504 llvm::BasicBlock *ContinueBlock = createBasicBlock("for.inc"); 1505 // Inside the loop body, emit the assignment operator call on array element. 1506 Counter = Builder.CreateLoad(IndexPtr); 1507 Src = Builder.CreateInBoundsGEP(Src, Counter, "srcaddress"); 1508 Dest = Builder.CreateInBoundsGEP(Dest, Counter, "destaddress"); 1509 const CXXMethodDecl *MD = 0; 1510 if (BitwiseAssign) 1511 EmitAggregateCopy(Dest, Src, Ty); 1512 else { 1513 bool hasCopyAssign = BaseClassDecl->hasConstCopyAssignment(getContext(), 1514 MD); 1515 assert(hasCopyAssign && "EmitClassAggrCopyAssignment - No user assign"); 1516 (void)hasCopyAssign; 1517 const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>(); 1518 const llvm::Type *LTy = 1519 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD), 1520 FPT->isVariadic()); 1521 llvm::Constant *Callee = CGM.GetAddrOfFunction(MD, LTy); 1522 1523 CallArgList CallArgs; 1524 // Push the this (Dest) ptr. 1525 CallArgs.push_back(std::make_pair(RValue::get(Dest), 1526 MD->getThisType(getContext()))); 1527 1528 // Push the Src ptr. 1529 CallArgs.push_back(std::make_pair(RValue::get(Src), 1530 MD->getParamDecl(0)->getType())); 1531 QualType ResultType = MD->getType()->getAs<FunctionType>()->getResultType(); 1532 EmitCall(CGM.getTypes().getFunctionInfo(ResultType, CallArgs), 1533 Callee, CallArgs, MD); 1534 } 1535 EmitBlock(ContinueBlock); 1536 1537 // Emit the increment of the loop counter. 1538 llvm::Value *NextVal = llvm::ConstantInt::get(Counter->getType(), 1); 1539 Counter = Builder.CreateLoad(IndexPtr); 1540 NextVal = Builder.CreateAdd(Counter, NextVal, "inc"); 1541 Builder.CreateStore(NextVal, IndexPtr, false); 1542 1543 // Finally, branch back up to the condition for the next iteration. 1544 EmitBranch(CondBlock); 1545 1546 // Emit the fall-through block. 1547 EmitBlock(AfterFor, true); 1548} 1549 1550/// EmitClassMemberwiseCopy - This routine generates code to copy a class 1551/// object from SrcValue to DestValue. Copying can be either a bitwise copy 1552/// or via a copy constructor call. 1553void CodeGenFunction::EmitClassMemberwiseCopy( 1554 llvm::Value *Dest, llvm::Value *Src, 1555 const CXXRecordDecl *ClassDecl, 1556 const CXXRecordDecl *BaseClassDecl, QualType Ty) { 1557 if (ClassDecl) { 1558 Dest = GetAddressCXXOfBaseClass(Dest, ClassDecl, BaseClassDecl, 1559 /*NullCheckValue=*/false); 1560 Src = GetAddressCXXOfBaseClass(Src, ClassDecl, BaseClassDecl, 1561 /*NullCheckValue=*/false); 1562 } 1563 if (BaseClassDecl->hasTrivialCopyConstructor()) { 1564 EmitAggregateCopy(Dest, Src, Ty); 1565 return; 1566 } 1567 1568 if (CXXConstructorDecl *BaseCopyCtor = 1569 BaseClassDecl->getCopyConstructor(getContext(), 0)) { 1570 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(BaseCopyCtor, 1571 Ctor_Complete); 1572 CallArgList CallArgs; 1573 // Push the this (Dest) ptr. 1574 CallArgs.push_back(std::make_pair(RValue::get(Dest), 1575 BaseCopyCtor->getThisType(getContext()))); 1576 1577 // Push the Src ptr. 1578 CallArgs.push_back(std::make_pair(RValue::get(Src), 1579 BaseCopyCtor->getParamDecl(0)->getType())); 1580 QualType ResultType = 1581 BaseCopyCtor->getType()->getAs<FunctionType>()->getResultType(); 1582 EmitCall(CGM.getTypes().getFunctionInfo(ResultType, CallArgs), 1583 Callee, CallArgs, BaseCopyCtor); 1584 } 1585} 1586 1587/// EmitClassCopyAssignment - This routine generates code to copy assign a class 1588/// object from SrcValue to DestValue. Assignment can be either a bitwise 1589/// assignment of via an assignment operator call. 1590// FIXME. Consolidate this with EmitClassMemberwiseCopy as they share a lot. 1591void CodeGenFunction::EmitClassCopyAssignment( 1592 llvm::Value *Dest, llvm::Value *Src, 1593 const CXXRecordDecl *ClassDecl, 1594 const CXXRecordDecl *BaseClassDecl, 1595 QualType Ty) { 1596 if (ClassDecl) { 1597 Dest = GetAddressCXXOfBaseClass(Dest, ClassDecl, BaseClassDecl, 1598 /*NullCheckValue=*/false); 1599 Src = GetAddressCXXOfBaseClass(Src, ClassDecl, BaseClassDecl, 1600 /*NullCheckValue=*/false); 1601 } 1602 if (BaseClassDecl->hasTrivialCopyAssignment()) { 1603 EmitAggregateCopy(Dest, Src, Ty); 1604 return; 1605 } 1606 1607 const CXXMethodDecl *MD = 0; 1608 bool ConstCopyAssignOp = BaseClassDecl->hasConstCopyAssignment(getContext(), 1609 MD); 1610 assert(ConstCopyAssignOp && "EmitClassCopyAssignment - missing copy assign"); 1611 (void)ConstCopyAssignOp; 1612 1613 const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>(); 1614 const llvm::Type *LTy = 1615 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD), 1616 FPT->isVariadic()); 1617 llvm::Constant *Callee = CGM.GetAddrOfFunction(MD, LTy); 1618 1619 CallArgList CallArgs; 1620 // Push the this (Dest) ptr. 1621 CallArgs.push_back(std::make_pair(RValue::get(Dest), 1622 MD->getThisType(getContext()))); 1623 1624 // Push the Src ptr. 1625 CallArgs.push_back(std::make_pair(RValue::get(Src), 1626 MD->getParamDecl(0)->getType())); 1627 QualType ResultType = 1628 MD->getType()->getAs<FunctionType>()->getResultType(); 1629 EmitCall(CGM.getTypes().getFunctionInfo(ResultType, CallArgs), 1630 Callee, CallArgs, MD); 1631} 1632 1633/// SynthesizeDefaultConstructor - synthesize a default constructor 1634void 1635CodeGenFunction::SynthesizeDefaultConstructor(const CXXConstructorDecl *Ctor, 1636 CXXCtorType Type, 1637 llvm::Function *Fn, 1638 const FunctionArgList &Args) { 1639 StartFunction(GlobalDecl(Ctor, Type), Ctor->getResultType(), Fn, Args, 1640 SourceLocation()); 1641 EmitCtorPrologue(Ctor, Type); 1642 FinishFunction(); 1643} 1644 1645/// SynthesizeCXXCopyConstructor - This routine implicitly defines body of a copy 1646/// constructor, in accordance with section 12.8 (p7 and p8) of C++03 1647/// The implicitly-defined copy constructor for class X performs a memberwise 1648/// copy of its subobjects. The order of copying is the same as the order 1649/// of initialization of bases and members in a user-defined constructor 1650/// Each subobject is copied in the manner appropriate to its type: 1651/// if the subobject is of class type, the copy constructor for the class is 1652/// used; 1653/// if the subobject is an array, each element is copied, in the manner 1654/// appropriate to the element type; 1655/// if the subobject is of scalar type, the built-in assignment operator is 1656/// used. 1657/// Virtual base class subobjects shall be copied only once by the 1658/// implicitly-defined copy constructor 1659 1660void 1661CodeGenFunction::SynthesizeCXXCopyConstructor(const CXXConstructorDecl *Ctor, 1662 CXXCtorType Type, 1663 llvm::Function *Fn, 1664 const FunctionArgList &Args) { 1665 const CXXRecordDecl *ClassDecl = Ctor->getParent(); 1666 assert(!ClassDecl->hasUserDeclaredCopyConstructor() && 1667 "SynthesizeCXXCopyConstructor - copy constructor has definition already"); 1668 StartFunction(GlobalDecl(Ctor, Type), Ctor->getResultType(), Fn, Args, 1669 SourceLocation()); 1670 1671 FunctionArgList::const_iterator i = Args.begin(); 1672 const VarDecl *ThisArg = i->first; 1673 llvm::Value *ThisObj = GetAddrOfLocalVar(ThisArg); 1674 llvm::Value *LoadOfThis = Builder.CreateLoad(ThisObj, "this"); 1675 const VarDecl *SrcArg = (i+1)->first; 1676 llvm::Value *SrcObj = GetAddrOfLocalVar(SrcArg); 1677 llvm::Value *LoadOfSrc = Builder.CreateLoad(SrcObj); 1678 1679 for (CXXRecordDecl::base_class_const_iterator Base = ClassDecl->bases_begin(); 1680 Base != ClassDecl->bases_end(); ++Base) { 1681 // FIXME. copy constrution of virtual base NYI 1682 if (Base->isVirtual()) 1683 continue; 1684 1685 CXXRecordDecl *BaseClassDecl 1686 = cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl()); 1687 EmitClassMemberwiseCopy(LoadOfThis, LoadOfSrc, ClassDecl, BaseClassDecl, 1688 Base->getType()); 1689 } 1690 1691 for (CXXRecordDecl::field_iterator Field = ClassDecl->field_begin(), 1692 FieldEnd = ClassDecl->field_end(); 1693 Field != FieldEnd; ++Field) { 1694 QualType FieldType = getContext().getCanonicalType((*Field)->getType()); 1695 const ConstantArrayType *Array = 1696 getContext().getAsConstantArrayType(FieldType); 1697 if (Array) 1698 FieldType = getContext().getBaseElementType(FieldType); 1699 1700 if (const RecordType *FieldClassType = FieldType->getAs<RecordType>()) { 1701 CXXRecordDecl *FieldClassDecl 1702 = cast<CXXRecordDecl>(FieldClassType->getDecl()); 1703 LValue LHS = EmitLValueForField(LoadOfThis, *Field, false, 0); 1704 LValue RHS = EmitLValueForField(LoadOfSrc, *Field, false, 0); 1705 if (Array) { 1706 const llvm::Type *BasePtr = ConvertType(FieldType); 1707 BasePtr = llvm::PointerType::getUnqual(BasePtr); 1708 llvm::Value *DestBaseAddrPtr = 1709 Builder.CreateBitCast(LHS.getAddress(), BasePtr); 1710 llvm::Value *SrcBaseAddrPtr = 1711 Builder.CreateBitCast(RHS.getAddress(), BasePtr); 1712 EmitClassAggrMemberwiseCopy(DestBaseAddrPtr, SrcBaseAddrPtr, Array, 1713 FieldClassDecl, FieldType); 1714 } 1715 else 1716 EmitClassMemberwiseCopy(LHS.getAddress(), RHS.getAddress(), 1717 0 /*ClassDecl*/, FieldClassDecl, FieldType); 1718 continue; 1719 } 1720 // Do a built-in assignment of scalar data members. 1721 LValue LHS = EmitLValueForField(LoadOfThis, *Field, false, 0); 1722 LValue RHS = EmitLValueForField(LoadOfSrc, *Field, false, 0); 1723 RValue RVRHS = EmitLoadOfLValue(RHS, FieldType); 1724 EmitStoreThroughLValue(RVRHS, LHS, FieldType); 1725 } 1726 FinishFunction(); 1727} 1728 1729/// SynthesizeCXXCopyAssignment - Implicitly define copy assignment operator. 1730/// Before the implicitly-declared copy assignment operator for a class is 1731/// implicitly defined, all implicitly- declared copy assignment operators for 1732/// its direct base classes and its nonstatic data members shall have been 1733/// implicitly defined. [12.8-p12] 1734/// The implicitly-defined copy assignment operator for class X performs 1735/// memberwise assignment of its subob- jects. The direct base classes of X are 1736/// assigned first, in the order of their declaration in 1737/// the base-specifier-list, and then the immediate nonstatic data members of X 1738/// are assigned, in the order in which they were declared in the class 1739/// definition.Each subobject is assigned in the manner appropriate to its type: 1740/// if the subobject is of class type, the copy assignment operator for the 1741/// class is used (as if by explicit qualification; that is, ignoring any 1742/// possible virtual overriding functions in more derived classes); 1743/// 1744/// if the subobject is an array, each element is assigned, in the manner 1745/// appropriate to the element type; 1746/// 1747/// if the subobject is of scalar type, the built-in assignment operator is 1748/// used. 1749void CodeGenFunction::SynthesizeCXXCopyAssignment(const CXXMethodDecl *CD, 1750 llvm::Function *Fn, 1751 const FunctionArgList &Args) { 1752 1753 const CXXRecordDecl *ClassDecl = cast<CXXRecordDecl>(CD->getDeclContext()); 1754 assert(!ClassDecl->hasUserDeclaredCopyAssignment() && 1755 "SynthesizeCXXCopyAssignment - copy assignment has user declaration"); 1756 StartFunction(CD, CD->getResultType(), Fn, Args, SourceLocation()); 1757 1758 FunctionArgList::const_iterator i = Args.begin(); 1759 const VarDecl *ThisArg = i->first; 1760 llvm::Value *ThisObj = GetAddrOfLocalVar(ThisArg); 1761 llvm::Value *LoadOfThis = Builder.CreateLoad(ThisObj, "this"); 1762 const VarDecl *SrcArg = (i+1)->first; 1763 llvm::Value *SrcObj = GetAddrOfLocalVar(SrcArg); 1764 llvm::Value *LoadOfSrc = Builder.CreateLoad(SrcObj); 1765 1766 for (CXXRecordDecl::base_class_const_iterator Base = ClassDecl->bases_begin(); 1767 Base != ClassDecl->bases_end(); ++Base) { 1768 // FIXME. copy assignment of virtual base NYI 1769 if (Base->isVirtual()) 1770 continue; 1771 1772 CXXRecordDecl *BaseClassDecl 1773 = cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl()); 1774 EmitClassCopyAssignment(LoadOfThis, LoadOfSrc, ClassDecl, BaseClassDecl, 1775 Base->getType()); 1776 } 1777 1778 for (CXXRecordDecl::field_iterator Field = ClassDecl->field_begin(), 1779 FieldEnd = ClassDecl->field_end(); 1780 Field != FieldEnd; ++Field) { 1781 QualType FieldType = getContext().getCanonicalType((*Field)->getType()); 1782 const ConstantArrayType *Array = 1783 getContext().getAsConstantArrayType(FieldType); 1784 if (Array) 1785 FieldType = getContext().getBaseElementType(FieldType); 1786 1787 if (const RecordType *FieldClassType = FieldType->getAs<RecordType>()) { 1788 CXXRecordDecl *FieldClassDecl 1789 = cast<CXXRecordDecl>(FieldClassType->getDecl()); 1790 LValue LHS = EmitLValueForField(LoadOfThis, *Field, false, 0); 1791 LValue RHS = EmitLValueForField(LoadOfSrc, *Field, false, 0); 1792 if (Array) { 1793 const llvm::Type *BasePtr = ConvertType(FieldType); 1794 BasePtr = llvm::PointerType::getUnqual(BasePtr); 1795 llvm::Value *DestBaseAddrPtr = 1796 Builder.CreateBitCast(LHS.getAddress(), BasePtr); 1797 llvm::Value *SrcBaseAddrPtr = 1798 Builder.CreateBitCast(RHS.getAddress(), BasePtr); 1799 EmitClassAggrCopyAssignment(DestBaseAddrPtr, SrcBaseAddrPtr, Array, 1800 FieldClassDecl, FieldType); 1801 } 1802 else 1803 EmitClassCopyAssignment(LHS.getAddress(), RHS.getAddress(), 1804 0 /*ClassDecl*/, FieldClassDecl, FieldType); 1805 continue; 1806 } 1807 // Do a built-in assignment of scalar data members. 1808 LValue LHS = EmitLValueForField(LoadOfThis, *Field, false, 0); 1809 LValue RHS = EmitLValueForField(LoadOfSrc, *Field, false, 0); 1810 RValue RVRHS = EmitLoadOfLValue(RHS, FieldType); 1811 EmitStoreThroughLValue(RVRHS, LHS, FieldType); 1812 } 1813 1814 // return *this; 1815 Builder.CreateStore(LoadOfThis, ReturnValue); 1816 1817 FinishFunction(); 1818} 1819 1820/// EmitCtorPrologue - This routine generates necessary code to initialize 1821/// base classes and non-static data members belonging to this constructor. 1822/// FIXME: This needs to take a CXXCtorType. 1823void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD, 1824 CXXCtorType CtorType) { 1825 const CXXRecordDecl *ClassDecl = cast<CXXRecordDecl>(CD->getDeclContext()); 1826 // FIXME: Add vbase initialization 1827 llvm::Value *LoadOfThis = 0; 1828 1829 for (CXXConstructorDecl::init_const_iterator B = CD->init_begin(), 1830 E = CD->init_end(); 1831 B != E; ++B) { 1832 CXXBaseOrMemberInitializer *Member = (*B); 1833 if (Member->isBaseInitializer()) { 1834 LoadOfThis = LoadCXXThis(); 1835 Type *BaseType = Member->getBaseClass(); 1836 CXXRecordDecl *BaseClassDecl = 1837 cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl()); 1838 llvm::Value *V = GetAddressCXXOfBaseClass(LoadOfThis, ClassDecl, 1839 BaseClassDecl, 1840 /*NullCheckValue=*/false); 1841 EmitCXXConstructorCall(Member->getConstructor(), 1842 CtorType, V, 1843 Member->const_arg_begin(), 1844 Member->const_arg_end()); 1845 } else { 1846 // non-static data member initilaizers. 1847 FieldDecl *Field = Member->getMember(); 1848 QualType FieldType = getContext().getCanonicalType((Field)->getType()); 1849 const ConstantArrayType *Array = 1850 getContext().getAsConstantArrayType(FieldType); 1851 if (Array) 1852 FieldType = getContext().getBaseElementType(FieldType); 1853 1854 LoadOfThis = LoadCXXThis(); 1855 LValue LHS; 1856 if (FieldType->isReferenceType()) { 1857 // FIXME: This is really ugly; should be refactored somehow 1858 unsigned idx = CGM.getTypes().getLLVMFieldNo(Field); 1859 llvm::Value *V = Builder.CreateStructGEP(LoadOfThis, idx, "tmp"); 1860 assert(!FieldType.getObjCGCAttr() && "fields cannot have GC attrs"); 1861 LHS = LValue::MakeAddr(V, MakeQualifiers(FieldType)); 1862 } else { 1863 LHS = EmitLValueForField(LoadOfThis, Field, false, 0); 1864 } 1865 if (FieldType->getAs<RecordType>()) { 1866 if (!Field->isAnonymousStructOrUnion()) { 1867 assert(Member->getConstructor() && 1868 "EmitCtorPrologue - no constructor to initialize member"); 1869 if (Array) { 1870 const llvm::Type *BasePtr = ConvertType(FieldType); 1871 BasePtr = llvm::PointerType::getUnqual(BasePtr); 1872 llvm::Value *BaseAddrPtr = 1873 Builder.CreateBitCast(LHS.getAddress(), BasePtr); 1874 EmitCXXAggrConstructorCall(Member->getConstructor(), 1875 Array, BaseAddrPtr); 1876 } 1877 else 1878 EmitCXXConstructorCall(Member->getConstructor(), 1879 Ctor_Complete, LHS.getAddress(), 1880 Member->const_arg_begin(), 1881 Member->const_arg_end()); 1882 continue; 1883 } 1884 else { 1885 // Initializing an anonymous union data member. 1886 FieldDecl *anonMember = Member->getAnonUnionMember(); 1887 LHS = EmitLValueForField(LHS.getAddress(), anonMember, 1888 /*IsUnion=*/true, 0); 1889 FieldType = anonMember->getType(); 1890 } 1891 } 1892 1893 assert(Member->getNumArgs() == 1 && "Initializer count must be 1 only"); 1894 Expr *RhsExpr = *Member->arg_begin(); 1895 RValue RHS; 1896 if (FieldType->isReferenceType()) 1897 RHS = EmitReferenceBindingToExpr(RhsExpr, FieldType, 1898 /*IsInitializer=*/true); 1899 else 1900 RHS = RValue::get(EmitScalarExpr(RhsExpr, true)); 1901 EmitStoreThroughLValue(RHS, LHS, FieldType); 1902 } 1903 } 1904 1905 if (!CD->getNumBaseOrMemberInitializers() && !CD->isTrivial()) { 1906 // Nontrivial default constructor with no initializer list. It may still 1907 // have bases classes and/or contain non-static data members which require 1908 // construction. 1909 for (CXXRecordDecl::base_class_const_iterator Base = 1910 ClassDecl->bases_begin(); 1911 Base != ClassDecl->bases_end(); ++Base) { 1912 // FIXME. copy assignment of virtual base NYI 1913 if (Base->isVirtual()) 1914 continue; 1915 1916 CXXRecordDecl *BaseClassDecl 1917 = cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl()); 1918 if (BaseClassDecl->hasTrivialConstructor()) 1919 continue; 1920 if (CXXConstructorDecl *BaseCX = 1921 BaseClassDecl->getDefaultConstructor(getContext())) { 1922 LoadOfThis = LoadCXXThis(); 1923 llvm::Value *V = GetAddressCXXOfBaseClass(LoadOfThis, ClassDecl, 1924 BaseClassDecl, 1925 /*NullCheckValue=*/false); 1926 EmitCXXConstructorCall(BaseCX, Ctor_Complete, V, 0, 0); 1927 } 1928 } 1929 1930 for (CXXRecordDecl::field_iterator Field = ClassDecl->field_begin(), 1931 FieldEnd = ClassDecl->field_end(); 1932 Field != FieldEnd; ++Field) { 1933 QualType FieldType = getContext().getCanonicalType((*Field)->getType()); 1934 const ConstantArrayType *Array = 1935 getContext().getAsConstantArrayType(FieldType); 1936 if (Array) 1937 FieldType = getContext().getBaseElementType(FieldType); 1938 if (!FieldType->getAs<RecordType>() || Field->isAnonymousStructOrUnion()) 1939 continue; 1940 const RecordType *ClassRec = FieldType->getAs<RecordType>(); 1941 CXXRecordDecl *MemberClassDecl = 1942 dyn_cast<CXXRecordDecl>(ClassRec->getDecl()); 1943 if (!MemberClassDecl || MemberClassDecl->hasTrivialConstructor()) 1944 continue; 1945 if (CXXConstructorDecl *MamberCX = 1946 MemberClassDecl->getDefaultConstructor(getContext())) { 1947 LoadOfThis = LoadCXXThis(); 1948 LValue LHS = EmitLValueForField(LoadOfThis, *Field, false, 0); 1949 if (Array) { 1950 const llvm::Type *BasePtr = ConvertType(FieldType); 1951 BasePtr = llvm::PointerType::getUnqual(BasePtr); 1952 llvm::Value *BaseAddrPtr = 1953 Builder.CreateBitCast(LHS.getAddress(), BasePtr); 1954 EmitCXXAggrConstructorCall(MamberCX, Array, BaseAddrPtr); 1955 } 1956 else 1957 EmitCXXConstructorCall(MamberCX, Ctor_Complete, LHS.getAddress(), 1958 0, 0); 1959 } 1960 } 1961 } 1962 1963 // Initialize the vtable pointer 1964 if (ClassDecl->isDynamicClass()) { 1965 if (!LoadOfThis) 1966 LoadOfThis = LoadCXXThis(); 1967 llvm::Value *VtableField; 1968 llvm::Type *Ptr8Ty, *PtrPtr8Ty; 1969 Ptr8Ty = llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext), 0); 1970 PtrPtr8Ty = llvm::PointerType::get(Ptr8Ty, 0); 1971 VtableField = Builder.CreateBitCast(LoadOfThis, PtrPtr8Ty); 1972 llvm::Value *vtable = GenerateVtable(ClassDecl); 1973 Builder.CreateStore(vtable, VtableField); 1974 } 1975} 1976 1977/// EmitDtorEpilogue - Emit all code that comes at the end of class's 1978/// destructor. This is to call destructors on members and base classes 1979/// in reverse order of their construction. 1980/// FIXME: This needs to take a CXXDtorType. 1981void CodeGenFunction::EmitDtorEpilogue(const CXXDestructorDecl *DD, 1982 CXXDtorType DtorType) { 1983 const CXXRecordDecl *ClassDecl = cast<CXXRecordDecl>(DD->getDeclContext()); 1984 assert(!ClassDecl->getNumVBases() && 1985 "FIXME: Destruction of virtual bases not supported"); 1986 (void)ClassDecl; // prevent warning. 1987 1988 for (CXXDestructorDecl::destr_const_iterator *B = DD->destr_begin(), 1989 *E = DD->destr_end(); B != E; ++B) { 1990 uintptr_t BaseOrMember = (*B); 1991 if (DD->isMemberToDestroy(BaseOrMember)) { 1992 FieldDecl *FD = DD->getMemberToDestroy(BaseOrMember); 1993 QualType FieldType = getContext().getCanonicalType((FD)->getType()); 1994 const ConstantArrayType *Array = 1995 getContext().getAsConstantArrayType(FieldType); 1996 if (Array) 1997 FieldType = getContext().getBaseElementType(FieldType); 1998 const RecordType *RT = FieldType->getAs<RecordType>(); 1999 CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl()); 2000 if (FieldClassDecl->hasTrivialDestructor()) 2001 continue; 2002 llvm::Value *LoadOfThis = LoadCXXThis(); 2003 LValue LHS = EmitLValueForField(LoadOfThis, FD, false, 0); 2004 if (Array) { 2005 const llvm::Type *BasePtr = ConvertType(FieldType); 2006 BasePtr = llvm::PointerType::getUnqual(BasePtr); 2007 llvm::Value *BaseAddrPtr = 2008 Builder.CreateBitCast(LHS.getAddress(), BasePtr); 2009 EmitCXXAggrDestructorCall(FieldClassDecl->getDestructor(getContext()), 2010 Array, BaseAddrPtr); 2011 } 2012 else 2013 EmitCXXDestructorCall(FieldClassDecl->getDestructor(getContext()), 2014 Dtor_Complete, LHS.getAddress()); 2015 } else { 2016 const RecordType *RT = 2017 DD->getAnyBaseClassToDestroy(BaseOrMember)->getAs<RecordType>(); 2018 CXXRecordDecl *BaseClassDecl = cast<CXXRecordDecl>(RT->getDecl()); 2019 if (BaseClassDecl->hasTrivialDestructor()) 2020 continue; 2021 llvm::Value *V = GetAddressCXXOfBaseClass(LoadCXXThis(), 2022 ClassDecl, BaseClassDecl, 2023 /*NullCheckValue=*/false); 2024 EmitCXXDestructorCall(BaseClassDecl->getDestructor(getContext()), 2025 DtorType, V); 2026 } 2027 } 2028 if (DD->getNumBaseOrMemberDestructions() || DD->isTrivial()) 2029 return; 2030 // Case of destructor synthesis with fields and base classes 2031 // which have non-trivial destructors. They must be destructed in 2032 // reverse order of their construction. 2033 llvm::SmallVector<FieldDecl *, 16> DestructedFields; 2034 2035 for (CXXRecordDecl::field_iterator Field = ClassDecl->field_begin(), 2036 FieldEnd = ClassDecl->field_end(); 2037 Field != FieldEnd; ++Field) { 2038 QualType FieldType = getContext().getCanonicalType((*Field)->getType()); 2039 if (getContext().getAsConstantArrayType(FieldType)) 2040 FieldType = getContext().getBaseElementType(FieldType); 2041 if (const RecordType *RT = FieldType->getAs<RecordType>()) { 2042 CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl()); 2043 if (FieldClassDecl->hasTrivialDestructor()) 2044 continue; 2045 DestructedFields.push_back(*Field); 2046 } 2047 } 2048 if (!DestructedFields.empty()) 2049 for (int i = DestructedFields.size() -1; i >= 0; --i) { 2050 FieldDecl *Field = DestructedFields[i]; 2051 QualType FieldType = Field->getType(); 2052 const ConstantArrayType *Array = 2053 getContext().getAsConstantArrayType(FieldType); 2054 if (Array) 2055 FieldType = getContext().getBaseElementType(FieldType); 2056 const RecordType *RT = FieldType->getAs<RecordType>(); 2057 CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl()); 2058 llvm::Value *LoadOfThis = LoadCXXThis(); 2059 LValue LHS = EmitLValueForField(LoadOfThis, Field, false, 0); 2060 if (Array) { 2061 const llvm::Type *BasePtr = ConvertType(FieldType); 2062 BasePtr = llvm::PointerType::getUnqual(BasePtr); 2063 llvm::Value *BaseAddrPtr = 2064 Builder.CreateBitCast(LHS.getAddress(), BasePtr); 2065 EmitCXXAggrDestructorCall(FieldClassDecl->getDestructor(getContext()), 2066 Array, BaseAddrPtr); 2067 } 2068 else 2069 EmitCXXDestructorCall(FieldClassDecl->getDestructor(getContext()), 2070 Dtor_Complete, LHS.getAddress()); 2071 } 2072 2073 llvm::SmallVector<CXXRecordDecl*, 4> DestructedBases; 2074 for (CXXRecordDecl::base_class_const_iterator Base = ClassDecl->bases_begin(); 2075 Base != ClassDecl->bases_end(); ++Base) { 2076 // FIXME. copy assignment of virtual base NYI 2077 if (Base->isVirtual()) 2078 continue; 2079 2080 CXXRecordDecl *BaseClassDecl 2081 = cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl()); 2082 if (BaseClassDecl->hasTrivialDestructor()) 2083 continue; 2084 DestructedBases.push_back(BaseClassDecl); 2085 } 2086 if (DestructedBases.empty()) 2087 return; 2088 for (int i = DestructedBases.size() -1; i >= 0; --i) { 2089 CXXRecordDecl *BaseClassDecl = DestructedBases[i]; 2090 llvm::Value *V = GetAddressCXXOfBaseClass(LoadCXXThis(), 2091 ClassDecl,BaseClassDecl, 2092 /*NullCheckValue=*/false); 2093 EmitCXXDestructorCall(BaseClassDecl->getDestructor(getContext()), 2094 Dtor_Complete, V); 2095 } 2096} 2097 2098void CodeGenFunction::SynthesizeDefaultDestructor(const CXXDestructorDecl *Dtor, 2099 CXXDtorType DtorType, 2100 llvm::Function *Fn, 2101 const FunctionArgList &Args) { 2102 2103 const CXXRecordDecl *ClassDecl = Dtor->getParent(); 2104 assert(!ClassDecl->hasUserDeclaredDestructor() && 2105 "SynthesizeDefaultDestructor - destructor has user declaration"); 2106 (void) ClassDecl; 2107 2108 StartFunction(GlobalDecl(Dtor, DtorType), Dtor->getResultType(), Fn, Args, 2109 SourceLocation()); 2110 EmitDtorEpilogue(Dtor, DtorType); 2111 FinishFunction(); 2112} 2113 2114// FIXME: Move this to CGCXXStmt.cpp 2115void CodeGenFunction::EmitCXXTryStmt(const CXXTryStmt &S) { 2116 // FIXME: We need to do more here. 2117 EmitStmt(S.getTryBlock()); 2118} 2119