CGVTables.cpp revision 9063302a82423cb83f002257a416741850739a70
1//===--- CGVTables.cpp - Emit LLVM Code for C++ vtables -------------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This contains code dealing with C++ code generation of virtual tables.
11//
12//===----------------------------------------------------------------------===//
13
14#include "CodeGenFunction.h"
15#include "CGCXXABI.h"
16#include "CodeGenModule.h"
17#include "clang/AST/CXXInheritance.h"
18#include "clang/AST/RecordLayout.h"
19#include "clang/Frontend/CodeGenOptions.h"
20#include "llvm/ADT/DenseSet.h"
21#include "llvm/ADT/SetVector.h"
22#include "llvm/Support/Compiler.h"
23#include "llvm/Support/Format.h"
24#include "llvm/Transforms/Utils/Cloning.h"
25#include <algorithm>
26#include <cstdio>
27
28using namespace clang;
29using namespace CodeGen;
30
31CodeGenVTables::CodeGenVTables(CodeGenModule &CGM)
32  : CGM(CGM), VTContext(CGM.getContext()) { }
33
34llvm::Constant *CodeGenModule::GetAddrOfThunk(GlobalDecl GD,
35                                              const ThunkInfo &Thunk) {
36  const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
37
38  // Compute the mangled name.
39  SmallString<256> Name;
40  llvm::raw_svector_ostream Out(Name);
41  if (const CXXDestructorDecl* DD = dyn_cast<CXXDestructorDecl>(MD))
42    getCXXABI().getMangleContext().mangleCXXDtorThunk(DD, GD.getDtorType(),
43                                                      Thunk.This, Out);
44  else
45    getCXXABI().getMangleContext().mangleThunk(MD, Thunk, Out);
46  Out.flush();
47
48  llvm::Type *Ty = getTypes().GetFunctionTypeForVTable(GD);
49  return GetOrCreateLLVMFunction(Name, Ty, GD, /*ForVTable=*/true);
50}
51
52static llvm::Value *PerformTypeAdjustment(CodeGenFunction &CGF,
53                                          llvm::Value *Ptr,
54                                          int64_t NonVirtualAdjustment,
55                                          int64_t VirtualAdjustment,
56                                          bool IsReturnAdjustment) {
57  if (!NonVirtualAdjustment && !VirtualAdjustment)
58    return Ptr;
59
60  llvm::Type *Int8PtrTy = CGF.Int8PtrTy;
61  llvm::Value *V = CGF.Builder.CreateBitCast(Ptr, Int8PtrTy);
62
63  if (NonVirtualAdjustment && !IsReturnAdjustment) {
64    // Perform the non-virtual adjustment for a base-to-derived cast.
65    V = CGF.Builder.CreateConstInBoundsGEP1_64(V, NonVirtualAdjustment);
66  }
67
68  if (VirtualAdjustment) {
69    llvm::Type *PtrDiffTy =
70      CGF.ConvertType(CGF.getContext().getPointerDiffType());
71
72    // Perform the virtual adjustment.
73    llvm::Value *VTablePtrPtr =
74      CGF.Builder.CreateBitCast(V, Int8PtrTy->getPointerTo());
75
76    llvm::Value *VTablePtr = CGF.Builder.CreateLoad(VTablePtrPtr);
77
78    llvm::Value *OffsetPtr =
79      CGF.Builder.CreateConstInBoundsGEP1_64(VTablePtr, VirtualAdjustment);
80
81    OffsetPtr = CGF.Builder.CreateBitCast(OffsetPtr, PtrDiffTy->getPointerTo());
82
83    // Load the adjustment offset from the vtable.
84    llvm::Value *Offset = CGF.Builder.CreateLoad(OffsetPtr);
85
86    // Adjust our pointer.
87    V = CGF.Builder.CreateInBoundsGEP(V, Offset);
88  }
89
90  if (NonVirtualAdjustment && IsReturnAdjustment) {
91    // Perform the non-virtual adjustment for a derived-to-base cast.
92    V = CGF.Builder.CreateConstInBoundsGEP1_64(V, NonVirtualAdjustment);
93  }
94
95  // Cast back to the original type.
96  return CGF.Builder.CreateBitCast(V, Ptr->getType());
97}
98
99static void setThunkVisibility(CodeGenModule &CGM, const CXXMethodDecl *MD,
100                               const ThunkInfo &Thunk, llvm::Function *Fn) {
101  CGM.setGlobalVisibility(Fn, MD);
102
103  if (!CGM.getCodeGenOpts().HiddenWeakVTables)
104    return;
105
106  // If the thunk has weak/linkonce linkage, but the function must be
107  // emitted in every translation unit that references it, then we can
108  // emit its thunks with hidden visibility, since its thunks must be
109  // emitted when the function is.
110
111  // This follows CodeGenModule::setTypeVisibility; see the comments
112  // there for explanation.
113
114  if ((Fn->getLinkage() != llvm::GlobalVariable::LinkOnceODRLinkage &&
115       Fn->getLinkage() != llvm::GlobalVariable::WeakODRLinkage) ||
116      Fn->getVisibility() != llvm::GlobalVariable::DefaultVisibility)
117    return;
118
119  if (MD->getExplicitVisibility(ValueDecl::VisibilityForValue))
120    return;
121
122  switch (MD->getTemplateSpecializationKind()) {
123  case TSK_ExplicitInstantiationDefinition:
124  case TSK_ExplicitInstantiationDeclaration:
125    return;
126
127  case TSK_Undeclared:
128    break;
129
130  case TSK_ExplicitSpecialization:
131  case TSK_ImplicitInstantiation:
132    return;
133    break;
134  }
135
136  // If there's an explicit definition, and that definition is
137  // out-of-line, then we can't assume that all users will have a
138  // definition to emit.
139  const FunctionDecl *Def = 0;
140  if (MD->hasBody(Def) && Def->isOutOfLine())
141    return;
142
143  Fn->setVisibility(llvm::GlobalValue::HiddenVisibility);
144}
145
146#ifndef NDEBUG
147static bool similar(const ABIArgInfo &infoL, CanQualType typeL,
148                    const ABIArgInfo &infoR, CanQualType typeR) {
149  return (infoL.getKind() == infoR.getKind() &&
150          (typeL == typeR ||
151           (isa<PointerType>(typeL) && isa<PointerType>(typeR)) ||
152           (isa<ReferenceType>(typeL) && isa<ReferenceType>(typeR))));
153}
154#endif
155
156static RValue PerformReturnAdjustment(CodeGenFunction &CGF,
157                                      QualType ResultType, RValue RV,
158                                      const ThunkInfo &Thunk) {
159  // Emit the return adjustment.
160  bool NullCheckValue = !ResultType->isReferenceType();
161
162  llvm::BasicBlock *AdjustNull = 0;
163  llvm::BasicBlock *AdjustNotNull = 0;
164  llvm::BasicBlock *AdjustEnd = 0;
165
166  llvm::Value *ReturnValue = RV.getScalarVal();
167
168  if (NullCheckValue) {
169    AdjustNull = CGF.createBasicBlock("adjust.null");
170    AdjustNotNull = CGF.createBasicBlock("adjust.notnull");
171    AdjustEnd = CGF.createBasicBlock("adjust.end");
172
173    llvm::Value *IsNull = CGF.Builder.CreateIsNull(ReturnValue);
174    CGF.Builder.CreateCondBr(IsNull, AdjustNull, AdjustNotNull);
175    CGF.EmitBlock(AdjustNotNull);
176  }
177
178  ReturnValue = PerformTypeAdjustment(CGF, ReturnValue,
179                                      Thunk.Return.NonVirtual,
180                                      Thunk.Return.VBaseOffsetOffset,
181                                      /*IsReturnAdjustment*/true);
182
183  if (NullCheckValue) {
184    CGF.Builder.CreateBr(AdjustEnd);
185    CGF.EmitBlock(AdjustNull);
186    CGF.Builder.CreateBr(AdjustEnd);
187    CGF.EmitBlock(AdjustEnd);
188
189    llvm::PHINode *PHI = CGF.Builder.CreatePHI(ReturnValue->getType(), 2);
190    PHI->addIncoming(ReturnValue, AdjustNotNull);
191    PHI->addIncoming(llvm::Constant::getNullValue(ReturnValue->getType()),
192                     AdjustNull);
193    ReturnValue = PHI;
194  }
195
196  return RValue::get(ReturnValue);
197}
198
199// This function does roughly the same thing as GenerateThunk, but in a
200// very different way, so that va_start and va_end work correctly.
201// FIXME: This function assumes "this" is the first non-sret LLVM argument of
202//        a function, and that there is an alloca built in the entry block
203//        for all accesses to "this".
204// FIXME: This function assumes there is only one "ret" statement per function.
205// FIXME: Cloning isn't correct in the presence of indirect goto!
206// FIXME: This implementation of thunks bloats codesize by duplicating the
207//        function definition.  There are alternatives:
208//        1. Add some sort of stub support to LLVM for cases where we can
209//           do a this adjustment, then a sibcall.
210//        2. We could transform the definition to take a va_list instead of an
211//           actual variable argument list, then have the thunks (including a
212//           no-op thunk for the regular definition) call va_start/va_end.
213//           There's a bit of per-call overhead for this solution, but it's
214//           better for codesize if the definition is long.
215void CodeGenFunction::GenerateVarArgsThunk(
216                                      llvm::Function *Fn,
217                                      const CGFunctionInfo &FnInfo,
218                                      GlobalDecl GD, const ThunkInfo &Thunk) {
219  const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
220  const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>();
221  QualType ResultType = FPT->getResultType();
222
223  // Get the original function
224  assert(FnInfo.isVariadic());
225  llvm::Type *Ty = CGM.getTypes().GetFunctionType(FnInfo);
226  llvm::Value *Callee = CGM.GetAddrOfFunction(GD, Ty, /*ForVTable=*/true);
227  llvm::Function *BaseFn = cast<llvm::Function>(Callee);
228
229  // Clone to thunk.
230  llvm::ValueToValueMapTy VMap;
231  llvm::Function *NewFn = llvm::CloneFunction(BaseFn, VMap,
232                                              /*ModuleLevelChanges=*/false);
233  CGM.getModule().getFunctionList().push_back(NewFn);
234  Fn->replaceAllUsesWith(NewFn);
235  NewFn->takeName(Fn);
236  Fn->eraseFromParent();
237  Fn = NewFn;
238
239  // "Initialize" CGF (minimally).
240  CurFn = Fn;
241
242  // Get the "this" value
243  llvm::Function::arg_iterator AI = Fn->arg_begin();
244  if (CGM.ReturnTypeUsesSRet(FnInfo))
245    ++AI;
246
247  // Find the first store of "this", which will be to the alloca associated
248  // with "this".
249  llvm::Value *ThisPtr = &*AI;
250  llvm::BasicBlock *EntryBB = Fn->begin();
251  llvm::Instruction *ThisStore = 0;
252  for (llvm::BasicBlock::iterator I = EntryBB->begin(), E = EntryBB->end();
253       I != E; I++) {
254    if (isa<llvm::StoreInst>(I) && I->getOperand(0) == ThisPtr) {
255      ThisStore = cast<llvm::StoreInst>(I);
256      break;
257    }
258  }
259  assert(ThisStore && "Store of this should be in entry block?");
260  // Adjust "this", if necessary.
261  Builder.SetInsertPoint(ThisStore);
262  llvm::Value *AdjustedThisPtr =
263    PerformTypeAdjustment(*this, ThisPtr,
264                          Thunk.This.NonVirtual,
265                          Thunk.This.VCallOffsetOffset,
266                          /*IsReturnAdjustment*/false);
267  ThisStore->setOperand(0, AdjustedThisPtr);
268
269  if (!Thunk.Return.isEmpty()) {
270    // Fix up the returned value, if necessary.
271    for (llvm::Function::iterator I = Fn->begin(), E = Fn->end(); I != E; I++) {
272      llvm::Instruction *T = I->getTerminator();
273      if (isa<llvm::ReturnInst>(T)) {
274        RValue RV = RValue::get(T->getOperand(0));
275        T->eraseFromParent();
276        Builder.SetInsertPoint(&*I);
277        RV = PerformReturnAdjustment(*this, ResultType, RV, Thunk);
278        Builder.CreateRet(RV.getScalarVal());
279        break;
280      }
281    }
282  }
283}
284
285void CodeGenFunction::GenerateThunk(llvm::Function *Fn,
286                                    const CGFunctionInfo &FnInfo,
287                                    GlobalDecl GD, const ThunkInfo &Thunk) {
288  const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
289  const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>();
290  QualType ThisType = MD->getThisType(getContext());
291  QualType ResultType =
292    CGM.getCXXABI().HasThisReturn(GD) ? ThisType : FPT->getResultType();
293
294  FunctionArgList FunctionArgs;
295
296  // FIXME: It would be nice if more of this code could be shared with
297  // CodeGenFunction::GenerateCode.
298
299  // Create the implicit 'this' parameter declaration.
300  CurGD = GD;
301  CGM.getCXXABI().BuildInstanceFunctionParams(*this, ResultType, FunctionArgs);
302
303  // Add the rest of the parameters.
304  for (FunctionDecl::param_const_iterator I = MD->param_begin(),
305       E = MD->param_end(); I != E; ++I) {
306    ParmVarDecl *Param = *I;
307
308    FunctionArgs.push_back(Param);
309  }
310
311  // Initialize debug info if needed.
312  maybeInitializeDebugInfo();
313
314  StartFunction(GlobalDecl(), ResultType, Fn, FnInfo, FunctionArgs,
315                SourceLocation());
316
317  CGM.getCXXABI().EmitInstanceFunctionProlog(*this);
318  CXXThisValue = CXXABIThisValue;
319
320  // Adjust the 'this' pointer if necessary.
321  llvm::Value *AdjustedThisPtr =
322    PerformTypeAdjustment(*this, LoadCXXThis(),
323                          Thunk.This.NonVirtual,
324                          Thunk.This.VCallOffsetOffset,
325                          /*IsReturnAdjustment*/false);
326
327  CallArgList CallArgs;
328
329  // Add our adjusted 'this' pointer.
330  CallArgs.add(RValue::get(AdjustedThisPtr), ThisType);
331
332  // Add the rest of the parameters.
333  for (FunctionDecl::param_const_iterator I = MD->param_begin(),
334       E = MD->param_end(); I != E; ++I) {
335    ParmVarDecl *param = *I;
336    EmitDelegateCallArg(CallArgs, param);
337  }
338
339  // Get our callee.
340  llvm::Type *Ty =
341    CGM.getTypes().GetFunctionType(CGM.getTypes().arrangeGlobalDeclaration(GD));
342  llvm::Value *Callee = CGM.GetAddrOfFunction(GD, Ty, /*ForVTable=*/true);
343
344#ifndef NDEBUG
345  const CGFunctionInfo &CallFnInfo =
346    CGM.getTypes().arrangeCXXMethodCall(CallArgs, FPT,
347                                       RequiredArgs::forPrototypePlus(FPT, 1));
348  assert(CallFnInfo.getRegParm() == FnInfo.getRegParm() &&
349         CallFnInfo.isNoReturn() == FnInfo.isNoReturn() &&
350         CallFnInfo.getCallingConvention() == FnInfo.getCallingConvention());
351  assert(isa<CXXDestructorDecl>(MD) || // ignore dtor return types
352         similar(CallFnInfo.getReturnInfo(), CallFnInfo.getReturnType(),
353                 FnInfo.getReturnInfo(), FnInfo.getReturnType()));
354  assert(CallFnInfo.arg_size() == FnInfo.arg_size());
355  for (unsigned i = 0, e = FnInfo.arg_size(); i != e; ++i)
356    assert(similar(CallFnInfo.arg_begin()[i].info,
357                   CallFnInfo.arg_begin()[i].type,
358                   FnInfo.arg_begin()[i].info, FnInfo.arg_begin()[i].type));
359#endif
360
361  // Determine whether we have a return value slot to use.
362  ReturnValueSlot Slot;
363  if (!ResultType->isVoidType() &&
364      FnInfo.getReturnInfo().getKind() == ABIArgInfo::Indirect &&
365      !hasScalarEvaluationKind(CurFnInfo->getReturnType()))
366    Slot = ReturnValueSlot(ReturnValue, ResultType.isVolatileQualified());
367
368  // Now emit our call.
369  RValue RV = EmitCall(FnInfo, Callee, Slot, CallArgs, MD);
370
371  if (!Thunk.Return.isEmpty())
372    RV = PerformReturnAdjustment(*this, ResultType, RV, Thunk);
373
374  if (!ResultType->isVoidType() && Slot.isNull())
375    CGM.getCXXABI().EmitReturnFromThunk(*this, RV, ResultType);
376
377  // Disable the final ARC autorelease.
378  AutoreleaseResult = false;
379
380  FinishFunction();
381
382  // Set the right linkage.
383  CGM.setFunctionLinkage(GD, Fn);
384
385  // Set the right visibility.
386  setThunkVisibility(CGM, MD, Thunk, Fn);
387}
388
389void CodeGenVTables::EmitThunk(GlobalDecl GD, const ThunkInfo &Thunk,
390                               bool UseAvailableExternallyLinkage)
391{
392  const CGFunctionInfo &FnInfo = CGM.getTypes().arrangeGlobalDeclaration(GD);
393
394  // FIXME: re-use FnInfo in this computation.
395  llvm::Constant *Entry = CGM.GetAddrOfThunk(GD, Thunk);
396
397  // Strip off a bitcast if we got one back.
398  if (llvm::ConstantExpr *CE = dyn_cast<llvm::ConstantExpr>(Entry)) {
399    assert(CE->getOpcode() == llvm::Instruction::BitCast);
400    Entry = CE->getOperand(0);
401  }
402
403  // There's already a declaration with the same name, check if it has the same
404  // type or if we need to replace it.
405  if (cast<llvm::GlobalValue>(Entry)->getType()->getElementType() !=
406      CGM.getTypes().GetFunctionTypeForVTable(GD)) {
407    llvm::GlobalValue *OldThunkFn = cast<llvm::GlobalValue>(Entry);
408
409    // If the types mismatch then we have to rewrite the definition.
410    assert(OldThunkFn->isDeclaration() &&
411           "Shouldn't replace non-declaration");
412
413    // Remove the name from the old thunk function and get a new thunk.
414    OldThunkFn->setName(StringRef());
415    Entry = CGM.GetAddrOfThunk(GD, Thunk);
416
417    // If needed, replace the old thunk with a bitcast.
418    if (!OldThunkFn->use_empty()) {
419      llvm::Constant *NewPtrForOldDecl =
420        llvm::ConstantExpr::getBitCast(Entry, OldThunkFn->getType());
421      OldThunkFn->replaceAllUsesWith(NewPtrForOldDecl);
422    }
423
424    // Remove the old thunk.
425    OldThunkFn->eraseFromParent();
426  }
427
428  llvm::Function *ThunkFn = cast<llvm::Function>(Entry);
429
430  if (!ThunkFn->isDeclaration()) {
431    if (UseAvailableExternallyLinkage) {
432      // There is already a thunk emitted for this function, do nothing.
433      return;
434    }
435
436    // If a function has a body, it should have available_externally linkage.
437    assert(ThunkFn->hasAvailableExternallyLinkage() &&
438           "Function should have available_externally linkage!");
439
440    // Change the linkage.
441    CGM.setFunctionLinkage(GD, ThunkFn);
442    return;
443  }
444
445  CGM.SetLLVMFunctionAttributesForDefinition(GD.getDecl(), ThunkFn);
446
447  if (ThunkFn->isVarArg()) {
448    // Varargs thunks are special; we can't just generate a call because
449    // we can't copy the varargs.  Our implementation is rather
450    // expensive/sucky at the moment, so don't generate the thunk unless
451    // we have to.
452    // FIXME: Do something better here; GenerateVarArgsThunk is extremely ugly.
453    if (!UseAvailableExternallyLinkage)
454      CodeGenFunction(CGM).GenerateVarArgsThunk(ThunkFn, FnInfo, GD, Thunk);
455  } else {
456    // Normal thunk body generation.
457    CodeGenFunction(CGM).GenerateThunk(ThunkFn, FnInfo, GD, Thunk);
458  }
459
460  if (UseAvailableExternallyLinkage)
461    ThunkFn->setLinkage(llvm::GlobalValue::AvailableExternallyLinkage);
462}
463
464void CodeGenVTables::MaybeEmitThunkAvailableExternally(GlobalDecl GD,
465                                                       const ThunkInfo &Thunk) {
466  // We only want to do this when building with optimizations.
467  if (!CGM.getCodeGenOpts().OptimizationLevel)
468    return;
469
470  // We can't emit thunks for member functions with incomplete types.
471  const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
472  if (!CGM.getTypes().isFuncTypeConvertible(
473                                cast<FunctionType>(MD->getType().getTypePtr())))
474    return;
475
476  EmitThunk(GD, Thunk, /*UseAvailableExternallyLinkage=*/true);
477}
478
479void CodeGenVTables::EmitThunks(GlobalDecl GD)
480{
481  const CXXMethodDecl *MD =
482    cast<CXXMethodDecl>(GD.getDecl())->getCanonicalDecl();
483
484  // We don't need to generate thunks for the base destructor.
485  if (isa<CXXDestructorDecl>(MD) && GD.getDtorType() == Dtor_Base)
486    return;
487
488  const VTableContext::ThunkInfoVectorTy *ThunkInfoVector =
489    VTContext.getThunkInfo(MD);
490  if (!ThunkInfoVector)
491    return;
492
493  for (unsigned I = 0, E = ThunkInfoVector->size(); I != E; ++I)
494    EmitThunk(GD, (*ThunkInfoVector)[I],
495              /*UseAvailableExternallyLinkage=*/false);
496}
497
498llvm::Constant *
499CodeGenVTables::CreateVTableInitializer(const CXXRecordDecl *RD,
500                                        const VTableComponent *Components,
501                                        unsigned NumComponents,
502                                const VTableLayout::VTableThunkTy *VTableThunks,
503                                        unsigned NumVTableThunks) {
504  SmallVector<llvm::Constant *, 64> Inits;
505
506  llvm::Type *Int8PtrTy = CGM.Int8PtrTy;
507
508  llvm::Type *PtrDiffTy =
509    CGM.getTypes().ConvertType(CGM.getContext().getPointerDiffType());
510
511  QualType ClassType = CGM.getContext().getTagDeclType(RD);
512  llvm::Constant *RTTI = CGM.GetAddrOfRTTIDescriptor(ClassType);
513
514  unsigned NextVTableThunkIndex = 0;
515
516  llvm::Constant *PureVirtualFn = 0, *DeletedVirtualFn = 0;
517
518  for (unsigned I = 0; I != NumComponents; ++I) {
519    VTableComponent Component = Components[I];
520
521    llvm::Constant *Init = 0;
522
523    switch (Component.getKind()) {
524    case VTableComponent::CK_VCallOffset:
525      Init = llvm::ConstantInt::get(PtrDiffTy,
526                                    Component.getVCallOffset().getQuantity());
527      Init = llvm::ConstantExpr::getIntToPtr(Init, Int8PtrTy);
528      break;
529    case VTableComponent::CK_VBaseOffset:
530      Init = llvm::ConstantInt::get(PtrDiffTy,
531                                    Component.getVBaseOffset().getQuantity());
532      Init = llvm::ConstantExpr::getIntToPtr(Init, Int8PtrTy);
533      break;
534    case VTableComponent::CK_OffsetToTop:
535      Init = llvm::ConstantInt::get(PtrDiffTy,
536                                    Component.getOffsetToTop().getQuantity());
537      Init = llvm::ConstantExpr::getIntToPtr(Init, Int8PtrTy);
538      break;
539    case VTableComponent::CK_RTTI:
540      Init = llvm::ConstantExpr::getBitCast(RTTI, Int8PtrTy);
541      break;
542    case VTableComponent::CK_FunctionPointer:
543    case VTableComponent::CK_CompleteDtorPointer:
544    case VTableComponent::CK_DeletingDtorPointer: {
545      GlobalDecl GD;
546
547      // Get the right global decl.
548      switch (Component.getKind()) {
549      default:
550        llvm_unreachable("Unexpected vtable component kind");
551      case VTableComponent::CK_FunctionPointer:
552        GD = Component.getFunctionDecl();
553        break;
554      case VTableComponent::CK_CompleteDtorPointer:
555        GD = GlobalDecl(Component.getDestructorDecl(), Dtor_Complete);
556        break;
557      case VTableComponent::CK_DeletingDtorPointer:
558        GD = GlobalDecl(Component.getDestructorDecl(), Dtor_Deleting);
559        break;
560      }
561
562      if (cast<CXXMethodDecl>(GD.getDecl())->isPure()) {
563        // We have a pure virtual member function.
564        if (!PureVirtualFn) {
565          llvm::FunctionType *Ty =
566            llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
567          StringRef PureCallName = CGM.getCXXABI().GetPureVirtualCallName();
568          PureVirtualFn = CGM.CreateRuntimeFunction(Ty, PureCallName);
569          PureVirtualFn = llvm::ConstantExpr::getBitCast(PureVirtualFn,
570                                                         CGM.Int8PtrTy);
571        }
572        Init = PureVirtualFn;
573      } else if (cast<CXXMethodDecl>(GD.getDecl())->isDeleted()) {
574        if (!DeletedVirtualFn) {
575          llvm::FunctionType *Ty =
576            llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
577          StringRef DeletedCallName =
578            CGM.getCXXABI().GetDeletedVirtualCallName();
579          DeletedVirtualFn = CGM.CreateRuntimeFunction(Ty, DeletedCallName);
580          DeletedVirtualFn = llvm::ConstantExpr::getBitCast(DeletedVirtualFn,
581                                                         CGM.Int8PtrTy);
582        }
583        Init = DeletedVirtualFn;
584      } else {
585        // Check if we should use a thunk.
586        if (NextVTableThunkIndex < NumVTableThunks &&
587            VTableThunks[NextVTableThunkIndex].first == I) {
588          const ThunkInfo &Thunk = VTableThunks[NextVTableThunkIndex].second;
589
590          MaybeEmitThunkAvailableExternally(GD, Thunk);
591          Init = CGM.GetAddrOfThunk(GD, Thunk);
592
593          NextVTableThunkIndex++;
594        } else {
595          llvm::Type *Ty = CGM.getTypes().GetFunctionTypeForVTable(GD);
596
597          Init = CGM.GetAddrOfFunction(GD, Ty, /*ForVTable=*/true);
598        }
599
600        Init = llvm::ConstantExpr::getBitCast(Init, Int8PtrTy);
601      }
602      break;
603    }
604
605    case VTableComponent::CK_UnusedFunctionPointer:
606      Init = llvm::ConstantExpr::getNullValue(Int8PtrTy);
607      break;
608    };
609
610    Inits.push_back(Init);
611  }
612
613  llvm::ArrayType *ArrayType = llvm::ArrayType::get(Int8PtrTy, NumComponents);
614  return llvm::ConstantArray::get(ArrayType, Inits);
615}
616
617llvm::GlobalVariable *CodeGenVTables::GetAddrOfVTable(const CXXRecordDecl *RD) {
618  llvm::GlobalVariable *&VTable = VTables[RD];
619  if (VTable)
620    return VTable;
621
622  // Queue up this v-table for possible deferred emission.
623  CGM.addDeferredVTable(RD);
624
625  SmallString<256> OutName;
626  llvm::raw_svector_ostream Out(OutName);
627  CGM.getCXXABI().getMangleContext().mangleCXXVTable(RD, Out);
628  Out.flush();
629  StringRef Name = OutName.str();
630
631  llvm::ArrayType *ArrayType =
632    llvm::ArrayType::get(CGM.Int8PtrTy,
633                        VTContext.getVTableLayout(RD).getNumVTableComponents());
634
635  VTable =
636    CGM.CreateOrReplaceCXXRuntimeVariable(Name, ArrayType,
637                                          llvm::GlobalValue::ExternalLinkage);
638  VTable->setUnnamedAddr(true);
639  return VTable;
640}
641
642void
643CodeGenVTables::EmitVTableDefinition(llvm::GlobalVariable *VTable,
644                                     llvm::GlobalVariable::LinkageTypes Linkage,
645                                     const CXXRecordDecl *RD) {
646  const VTableLayout &VTLayout = VTContext.getVTableLayout(RD);
647
648  // Create and set the initializer.
649  llvm::Constant *Init =
650    CreateVTableInitializer(RD,
651                            VTLayout.vtable_component_begin(),
652                            VTLayout.getNumVTableComponents(),
653                            VTLayout.vtable_thunk_begin(),
654                            VTLayout.getNumVTableThunks());
655  VTable->setInitializer(Init);
656
657  // Set the correct linkage.
658  VTable->setLinkage(Linkage);
659
660  // Set the right visibility.
661  CGM.setTypeVisibility(VTable, RD, CodeGenModule::TVK_ForVTable);
662}
663
664llvm::GlobalVariable *
665CodeGenVTables::GenerateConstructionVTable(const CXXRecordDecl *RD,
666                                      const BaseSubobject &Base,
667                                      bool BaseIsVirtual,
668                                   llvm::GlobalVariable::LinkageTypes Linkage,
669                                      VTableAddressPointsMapTy& AddressPoints) {
670  OwningPtr<VTableLayout> VTLayout(
671    VTContext.createConstructionVTableLayout(Base.getBase(),
672                                             Base.getBaseOffset(),
673                                             BaseIsVirtual, RD));
674
675  // Add the address points.
676  AddressPoints = VTLayout->getAddressPoints();
677
678  // Get the mangled construction vtable name.
679  SmallString<256> OutName;
680  llvm::raw_svector_ostream Out(OutName);
681  CGM.getCXXABI().getMangleContext().
682    mangleCXXCtorVTable(RD, Base.getBaseOffset().getQuantity(), Base.getBase(),
683                        Out);
684  Out.flush();
685  StringRef Name = OutName.str();
686
687  llvm::ArrayType *ArrayType =
688    llvm::ArrayType::get(CGM.Int8PtrTy, VTLayout->getNumVTableComponents());
689
690  // Construction vtable symbols are not part of the Itanium ABI, so we cannot
691  // guarantee that they actually will be available externally. Instead, when
692  // emitting an available_externally VTT, we provide references to an internal
693  // linkage construction vtable. The ABI only requires complete-object vtables
694  // to be the same for all instances of a type, not construction vtables.
695  if (Linkage == llvm::GlobalVariable::AvailableExternallyLinkage)
696    Linkage = llvm::GlobalVariable::InternalLinkage;
697
698  // Create the variable that will hold the construction vtable.
699  llvm::GlobalVariable *VTable =
700    CGM.CreateOrReplaceCXXRuntimeVariable(Name, ArrayType, Linkage);
701  CGM.setTypeVisibility(VTable, RD, CodeGenModule::TVK_ForConstructionVTable);
702
703  // V-tables are always unnamed_addr.
704  VTable->setUnnamedAddr(true);
705
706  // Create and set the initializer.
707  llvm::Constant *Init =
708    CreateVTableInitializer(Base.getBase(),
709                            VTLayout->vtable_component_begin(),
710                            VTLayout->getNumVTableComponents(),
711                            VTLayout->vtable_thunk_begin(),
712                            VTLayout->getNumVTableThunks());
713  VTable->setInitializer(Init);
714
715  return VTable;
716}
717
718/// Compute the required linkage of the v-table for the given class.
719///
720/// Note that we only call this at the end of the translation unit.
721llvm::GlobalVariable::LinkageTypes
722CodeGenModule::getVTableLinkage(const CXXRecordDecl *RD) {
723  if (!RD->isExternallyVisible())
724    return llvm::GlobalVariable::InternalLinkage;
725
726  // We're at the end of the translation unit, so the current key
727  // function is fully correct.
728  if (const CXXMethodDecl *keyFunction = Context.getCurrentKeyFunction(RD)) {
729    // If this class has a key function, use that to determine the
730    // linkage of the vtable.
731    const FunctionDecl *def = 0;
732    if (keyFunction->hasBody(def))
733      keyFunction = cast<CXXMethodDecl>(def);
734
735    switch (keyFunction->getTemplateSpecializationKind()) {
736      case TSK_Undeclared:
737      case TSK_ExplicitSpecialization:
738        // When compiling with optimizations turned on, we emit all vtables,
739        // even if the key function is not defined in the current translation
740        // unit. If this is the case, use available_externally linkage.
741        if (!def && CodeGenOpts.OptimizationLevel)
742          return llvm::GlobalVariable::AvailableExternallyLinkage;
743
744        if (keyFunction->isInlined())
745          return !Context.getLangOpts().AppleKext ?
746                   llvm::GlobalVariable::LinkOnceODRLinkage :
747                   llvm::Function::InternalLinkage;
748
749        return llvm::GlobalVariable::ExternalLinkage;
750
751      case TSK_ImplicitInstantiation:
752        return !Context.getLangOpts().AppleKext ?
753                 llvm::GlobalVariable::LinkOnceODRLinkage :
754                 llvm::Function::InternalLinkage;
755
756      case TSK_ExplicitInstantiationDefinition:
757        return !Context.getLangOpts().AppleKext ?
758                 llvm::GlobalVariable::WeakODRLinkage :
759                 llvm::Function::InternalLinkage;
760
761      case TSK_ExplicitInstantiationDeclaration:
762        return !Context.getLangOpts().AppleKext ?
763                 llvm::GlobalVariable::AvailableExternallyLinkage :
764                 llvm::Function::InternalLinkage;
765    }
766  }
767
768  // -fapple-kext mode does not support weak linkage, so we must use
769  // internal linkage.
770  if (Context.getLangOpts().AppleKext)
771    return llvm::Function::InternalLinkage;
772
773  switch (RD->getTemplateSpecializationKind()) {
774  case TSK_Undeclared:
775  case TSK_ExplicitSpecialization:
776  case TSK_ImplicitInstantiation:
777    return llvm::GlobalVariable::LinkOnceODRLinkage;
778
779  case TSK_ExplicitInstantiationDeclaration:
780    return llvm::GlobalVariable::AvailableExternallyLinkage;
781
782  case TSK_ExplicitInstantiationDefinition:
783      return llvm::GlobalVariable::WeakODRLinkage;
784  }
785
786  llvm_unreachable("Invalid TemplateSpecializationKind!");
787}
788
789/// This is a callback from Sema to tell us that it believes that a
790/// particular v-table is required to be emitted in this translation
791/// unit.
792///
793/// The reason we don't simply trust this callback is because Sema
794/// will happily report that something is used even when it's used
795/// only in code that we don't actually have to emit.
796///
797/// \param isRequired - if true, the v-table is mandatory, e.g.
798///   because the translation unit defines the key function
799void CodeGenModule::EmitVTable(CXXRecordDecl *theClass, bool isRequired) {
800  if (!isRequired) return;
801
802  VTables.GenerateClassData(theClass);
803}
804
805void
806CodeGenVTables::GenerateClassData(const CXXRecordDecl *RD) {
807  // First off, check whether we've already emitted the v-table and
808  // associated stuff.
809  llvm::GlobalVariable *VTable = GetAddrOfVTable(RD);
810  if (VTable->hasInitializer())
811    return;
812
813  llvm::GlobalVariable::LinkageTypes Linkage = CGM.getVTableLinkage(RD);
814  EmitVTableDefinition(VTable, Linkage, RD);
815
816  if (RD->getNumVBases())
817    CGM.getCXXABI().EmitVirtualInheritanceTables(Linkage, RD);
818
819  // If this is the magic class __cxxabiv1::__fundamental_type_info,
820  // we will emit the typeinfo for the fundamental types. This is the
821  // same behaviour as GCC.
822  const DeclContext *DC = RD->getDeclContext();
823  if (RD->getIdentifier() &&
824      RD->getIdentifier()->isStr("__fundamental_type_info") &&
825      isa<NamespaceDecl>(DC) &&
826      cast<NamespaceDecl>(DC)->getIdentifier() &&
827      cast<NamespaceDecl>(DC)->getIdentifier()->isStr("__cxxabiv1") &&
828      DC->getParent()->isTranslationUnit())
829    CGM.EmitFundamentalRTTIDescriptors();
830}
831
832/// At this point in the translation unit, does it appear that can we
833/// rely on the vtable being defined elsewhere in the program?
834///
835/// The response is really only definitive when called at the end of
836/// the translation unit.
837///
838/// The only semantic restriction here is that the object file should
839/// not contain a v-table definition when that v-table is defined
840/// strongly elsewhere.  Otherwise, we'd just like to avoid emitting
841/// v-tables when unnecessary.
842bool CodeGenVTables::isVTableExternal(const CXXRecordDecl *RD) {
843  assert(RD->isDynamicClass() && "Non dynamic classes have no VTable.");
844
845  // If we have an explicit instantiation declaration (and not a
846  // definition), the v-table is defined elsewhere.
847  TemplateSpecializationKind TSK = RD->getTemplateSpecializationKind();
848  if (TSK == TSK_ExplicitInstantiationDeclaration)
849    return true;
850
851  // Otherwise, if the class is an instantiated template, the
852  // v-table must be defined here.
853  if (TSK == TSK_ImplicitInstantiation ||
854      TSK == TSK_ExplicitInstantiationDefinition)
855    return false;
856
857  // Otherwise, if the class doesn't have a key function (possibly
858  // anymore), the v-table must be defined here.
859  const CXXMethodDecl *keyFunction = CGM.getContext().getCurrentKeyFunction(RD);
860  if (!keyFunction)
861    return false;
862
863  // Otherwise, if we don't have a definition of the key function, the
864  // v-table must be defined somewhere else.
865  return !keyFunction->hasBody();
866}
867
868/// Given that we're currently at the end of the translation unit, and
869/// we've emitted a reference to the v-table for this class, should
870/// we define that v-table?
871static bool shouldEmitVTableAtEndOfTranslationUnit(CodeGenModule &CGM,
872                                                   const CXXRecordDecl *RD) {
873  // If we're building with optimization, we always emit v-tables
874  // since that allows for virtual function calls to be devirtualized.
875  // If the v-table is defined strongly elsewhere, this definition
876  // will be emitted available_externally.
877  //
878  // However, we don't want to do this in -fapple-kext mode, because
879  // kext mode does not permit devirtualization.
880  if (CGM.getCodeGenOpts().OptimizationLevel && !CGM.getLangOpts().AppleKext)
881    return true;
882
883  return !CGM.getVTables().isVTableExternal(RD);
884}
885
886/// Given that at some point we emitted a reference to one or more
887/// v-tables, and that we are now at the end of the translation unit,
888/// decide whether we should emit them.
889void CodeGenModule::EmitDeferredVTables() {
890#ifndef NDEBUG
891  // Remember the size of DeferredVTables, because we're going to assume
892  // that this entire operation doesn't modify it.
893  size_t savedSize = DeferredVTables.size();
894#endif
895
896  typedef std::vector<const CXXRecordDecl *>::const_iterator const_iterator;
897  for (const_iterator i = DeferredVTables.begin(),
898                      e = DeferredVTables.end(); i != e; ++i) {
899    const CXXRecordDecl *RD = *i;
900    if (shouldEmitVTableAtEndOfTranslationUnit(*this, RD))
901      VTables.GenerateClassData(RD);
902  }
903
904  assert(savedSize == DeferredVTables.size() &&
905         "deferred extra v-tables during v-table emission?");
906  DeferredVTables.clear();
907}
908