CGExprCXX.cpp revision 871d078f5ae5505553c02deeabdd4b83b4820211
1//===--- CGExprCXX.cpp - Emit LLVM Code for C++ expressions ---------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This contains code dealing with code generation of C++ expressions
11//
12//===----------------------------------------------------------------------===//
13
14#include "CodeGenFunction.h"
15using namespace clang;
16using namespace CodeGen;
17
18static uint64_t CalculateCookiePadding(ASTContext &Ctx, QualType ElementType) {
19  const RecordType *RT = ElementType->getAs<RecordType>();
20  if (!RT)
21    return 0;
22
23  const CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(RT->getDecl());
24  if (!RD)
25    return 0;
26
27  // Check if the class has a trivial destructor.
28  if (RD->hasTrivialDestructor()) {
29    // Check if the usual deallocation function takes two arguments.
30    DeclarationName OpName =
31      Ctx.DeclarationNames.getCXXOperatorName(OO_Array_Delete);
32
33    DeclContext::lookup_const_iterator Op, OpEnd;
34    for (llvm::tie(Op, OpEnd) = RD->lookup(OpName);
35         Op != OpEnd; ++Op) {
36      CXXMethodDecl *Delete = cast<CXXMethodDecl>(*Op);
37
38      if (Delete->isUsualDeallocationFunction()) {
39        // We don't need a cookie.
40        if (Delete->getNumParams() == 1)
41          return 0;
42
43        assert(Delete->getNumParams() == 2 &&
44               "Unexpected deallocation function type!");
45        break;
46      }
47    }
48  }
49
50  // Padding is the maximum of sizeof(size_t) and alignof(ElementType)
51  return std::max(Ctx.getTypeSize(Ctx.getSizeType()),
52                  static_cast<uint64_t>(Ctx.getTypeAlign(ElementType))) / 8;
53}
54
55static uint64_t CalculateCookiePadding(ASTContext &Ctx, const CXXNewExpr *E) {
56  if (!E->isArray())
57    return 0;
58
59  return CalculateCookiePadding(Ctx, E->getAllocatedType());
60  QualType T = E->getAllocatedType();
61}
62
63static llvm::Value *EmitCXXNewAllocSize(CodeGenFunction &CGF,
64                                        const CXXNewExpr *E,
65                                        llvm::Value *& NumElements) {
66  QualType Type = E->getAllocatedType();
67  uint64_t TypeSizeInBytes = CGF.getContext().getTypeSize(Type) / 8;
68  const llvm::Type *SizeTy = CGF.ConvertType(CGF.getContext().getSizeType());
69
70  if (!E->isArray())
71    return llvm::ConstantInt::get(SizeTy, TypeSizeInBytes);
72
73  uint64_t CookiePadding = CalculateCookiePadding(CGF.getContext(), E);
74
75  Expr::EvalResult Result;
76  if (E->getArraySize()->Evaluate(Result, CGF.getContext()) &&
77      !Result.HasSideEffects && Result.Val.isInt()) {
78
79    uint64_t AllocSize =
80      Result.Val.getInt().getZExtValue() * TypeSizeInBytes + CookiePadding;
81
82    NumElements =
83      llvm::ConstantInt::get(SizeTy, Result.Val.getInt().getZExtValue());
84
85    return llvm::ConstantInt::get(SizeTy, AllocSize);
86  }
87
88  // Emit the array size expression.
89  NumElements = CGF.EmitScalarExpr(E->getArraySize());
90
91  // Multiply with the type size.
92  llvm::Value *V =
93    CGF.Builder.CreateMul(NumElements,
94                          llvm::ConstantInt::get(SizeTy, TypeSizeInBytes));
95
96  // And add the cookie padding if necessary.
97  if (CookiePadding)
98    V = CGF.Builder.CreateAdd(V, llvm::ConstantInt::get(SizeTy, CookiePadding));
99
100  return V;
101}
102
103static void EmitNewInitializer(CodeGenFunction &CGF, const CXXNewExpr *E,
104                               llvm::Value *NewPtr,
105                               llvm::Value *NumElements) {
106  if (E->isArray()) {
107    if (CXXConstructorDecl *Ctor = E->getConstructor())
108      CGF.EmitCXXAggrConstructorCall(Ctor, NumElements, NewPtr,
109                                     E->constructor_arg_begin(),
110                                     E->constructor_arg_end());
111    return;
112  }
113
114  QualType AllocType = E->getAllocatedType();
115
116  if (CXXConstructorDecl *Ctor = E->getConstructor()) {
117    CGF.EmitCXXConstructorCall(Ctor, Ctor_Complete, NewPtr,
118                               E->constructor_arg_begin(),
119                               E->constructor_arg_end());
120
121    return;
122  }
123
124  // We have a POD type.
125  if (E->getNumConstructorArgs() == 0)
126    return;
127
128  assert(E->getNumConstructorArgs() == 1 &&
129         "Can only have one argument to initializer of POD type.");
130
131  const Expr *Init = E->getConstructorArg(0);
132
133  if (!CGF.hasAggregateLLVMType(AllocType))
134    CGF.EmitStoreOfScalar(CGF.EmitScalarExpr(Init), NewPtr,
135                          AllocType.isVolatileQualified(), AllocType);
136  else if (AllocType->isAnyComplexType())
137    CGF.EmitComplexExprIntoAddr(Init, NewPtr,
138                                AllocType.isVolatileQualified());
139  else
140    CGF.EmitAggExpr(Init, NewPtr, AllocType.isVolatileQualified());
141}
142
143llvm::Value *CodeGenFunction::EmitCXXNewExpr(const CXXNewExpr *E) {
144  QualType AllocType = E->getAllocatedType();
145  FunctionDecl *NewFD = E->getOperatorNew();
146  const FunctionProtoType *NewFTy = NewFD->getType()->getAs<FunctionProtoType>();
147
148  CallArgList NewArgs;
149
150  // The allocation size is the first argument.
151  QualType SizeTy = getContext().getSizeType();
152
153  llvm::Value *NumElements = 0;
154  llvm::Value *AllocSize = EmitCXXNewAllocSize(*this, E, NumElements);
155
156  NewArgs.push_back(std::make_pair(RValue::get(AllocSize), SizeTy));
157
158  // Emit the rest of the arguments.
159  // FIXME: Ideally, this should just use EmitCallArgs.
160  CXXNewExpr::const_arg_iterator NewArg = E->placement_arg_begin();
161
162  // First, use the types from the function type.
163  // We start at 1 here because the first argument (the allocation size)
164  // has already been emitted.
165  for (unsigned i = 1, e = NewFTy->getNumArgs(); i != e; ++i, ++NewArg) {
166    QualType ArgType = NewFTy->getArgType(i);
167
168    assert(getContext().getCanonicalType(ArgType.getNonReferenceType()).
169           getTypePtr() ==
170           getContext().getCanonicalType(NewArg->getType()).getTypePtr() &&
171           "type mismatch in call argument!");
172
173    NewArgs.push_back(std::make_pair(EmitCallArg(*NewArg, ArgType),
174                                     ArgType));
175
176  }
177
178  // Either we've emitted all the call args, or we have a call to a
179  // variadic function.
180  assert((NewArg == E->placement_arg_end() || NewFTy->isVariadic()) &&
181         "Extra arguments in non-variadic function!");
182
183  // If we still have any arguments, emit them using the type of the argument.
184  for (CXXNewExpr::const_arg_iterator NewArgEnd = E->placement_arg_end();
185       NewArg != NewArgEnd; ++NewArg) {
186    QualType ArgType = NewArg->getType();
187    NewArgs.push_back(std::make_pair(EmitCallArg(*NewArg, ArgType),
188                                     ArgType));
189  }
190
191  // Emit the call to new.
192  RValue RV =
193    EmitCall(CGM.getTypes().getFunctionInfo(NewFTy->getResultType(), NewArgs),
194             CGM.GetAddrOfFunction(NewFD), NewArgs, NewFD);
195
196  // If an allocation function is declared with an empty exception specification
197  // it returns null to indicate failure to allocate storage. [expr.new]p13.
198  // (We don't need to check for null when there's no new initializer and
199  // we're allocating a POD type).
200  bool NullCheckResult = NewFTy->hasEmptyExceptionSpec() &&
201    !(AllocType->isPODType() && !E->hasInitializer());
202
203  llvm::BasicBlock *NewNull = 0;
204  llvm::BasicBlock *NewNotNull = 0;
205  llvm::BasicBlock *NewEnd = 0;
206
207  llvm::Value *NewPtr = RV.getScalarVal();
208
209  if (NullCheckResult) {
210    NewNull = createBasicBlock("new.null");
211    NewNotNull = createBasicBlock("new.notnull");
212    NewEnd = createBasicBlock("new.end");
213
214    llvm::Value *IsNull =
215      Builder.CreateICmpEQ(NewPtr,
216                           llvm::Constant::getNullValue(NewPtr->getType()),
217                           "isnull");
218
219    Builder.CreateCondBr(IsNull, NewNull, NewNotNull);
220    EmitBlock(NewNotNull);
221  }
222
223  if (uint64_t CookiePadding = CalculateCookiePadding(getContext(), E)) {
224    uint64_t CookieOffset =
225      CookiePadding - getContext().getTypeSize(SizeTy) / 8;
226
227    llvm::Value *NumElementsPtr =
228      Builder.CreateConstInBoundsGEP1_64(NewPtr, CookieOffset);
229
230    NumElementsPtr = Builder.CreateBitCast(NumElementsPtr,
231                                           ConvertType(SizeTy)->getPointerTo());
232    Builder.CreateStore(NumElements, NumElementsPtr);
233
234    // Now add the padding to the new ptr.
235    NewPtr = Builder.CreateConstInBoundsGEP1_64(NewPtr, CookiePadding);
236  }
237
238  NewPtr = Builder.CreateBitCast(NewPtr, ConvertType(E->getType()));
239
240  EmitNewInitializer(*this, E, NewPtr, NumElements);
241
242  if (NullCheckResult) {
243    Builder.CreateBr(NewEnd);
244    NewNotNull = Builder.GetInsertBlock();
245    EmitBlock(NewNull);
246    Builder.CreateBr(NewEnd);
247    EmitBlock(NewEnd);
248
249    llvm::PHINode *PHI = Builder.CreatePHI(NewPtr->getType());
250    PHI->reserveOperandSpace(2);
251    PHI->addIncoming(NewPtr, NewNotNull);
252    PHI->addIncoming(llvm::Constant::getNullValue(NewPtr->getType()), NewNull);
253
254    NewPtr = PHI;
255  }
256
257  return NewPtr;
258}
259
260
261static std::pair<llvm::Value *, llvm::Value *>
262GetAllocatedObjectPtrAndNumElements(CodeGenFunction &CGF,
263                                    llvm::Value *Ptr, QualType DeleteTy) {
264  QualType SizeTy = CGF.getContext().getSizeType();
265  const llvm::Type *SizeLTy = CGF.ConvertType(SizeTy);
266
267  uint64_t DeleteTypeAlign = CGF.getContext().getTypeAlign(DeleteTy);
268  uint64_t CookiePadding = std::max(CGF.getContext().getTypeSize(SizeTy),
269                                    DeleteTypeAlign) / 8;
270  assert(CookiePadding && "CookiePadding should not be 0.");
271
272  const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext());
273  uint64_t CookieOffset =
274    CookiePadding - CGF.getContext().getTypeSize(SizeTy) / 8;
275
276  llvm::Value *AllocatedObjectPtr = CGF.Builder.CreateBitCast(Ptr, Int8PtrTy);
277  AllocatedObjectPtr =
278    CGF.Builder.CreateConstInBoundsGEP1_64(AllocatedObjectPtr,
279                                           -CookiePadding);
280
281  llvm::Value *NumElementsPtr =
282    CGF.Builder.CreateConstInBoundsGEP1_64(AllocatedObjectPtr,
283                                           CookieOffset);
284  NumElementsPtr =
285    CGF.Builder.CreateBitCast(NumElementsPtr, SizeLTy->getPointerTo());
286
287  llvm::Value *NumElements = CGF.Builder.CreateLoad(NumElementsPtr);
288  NumElements =
289    CGF.Builder.CreateIntCast(NumElements, SizeLTy, /*isSigned=*/false);
290
291  return std::make_pair(AllocatedObjectPtr, NumElements);
292}
293
294void CodeGenFunction::EmitDeleteCall(const FunctionDecl *DeleteFD,
295                                     llvm::Value *Ptr,
296                                     QualType DeleteTy) {
297  const FunctionProtoType *DeleteFTy =
298    DeleteFD->getType()->getAs<FunctionProtoType>();
299
300  CallArgList DeleteArgs;
301
302  // Check if we need to pass the size to the delete operator.
303  llvm::Value *Size = 0;
304  QualType SizeTy;
305  if (DeleteFTy->getNumArgs() == 2) {
306    SizeTy = DeleteFTy->getArgType(1);
307    uint64_t DeleteTypeSize = getContext().getTypeSize(DeleteTy) / 8;
308    Size = llvm::ConstantInt::get(ConvertType(SizeTy), DeleteTypeSize);
309  }
310
311  if (DeleteFD->getOverloadedOperator() == OO_Array_Delete &&
312
313      CalculateCookiePadding(getContext(), DeleteTy)) {
314    // We need to get the number of elements in the array from the cookie.
315    llvm::Value *AllocatedObjectPtr;
316    llvm::Value *NumElements;
317    llvm::tie(AllocatedObjectPtr, NumElements) =
318      GetAllocatedObjectPtrAndNumElements(*this, Ptr, DeleteTy);
319
320    // Multiply the size with the number of elements.
321    if (Size)
322      Size = Builder.CreateMul(NumElements, Size);
323
324    Ptr = AllocatedObjectPtr;
325  }
326
327  QualType ArgTy = DeleteFTy->getArgType(0);
328  llvm::Value *DeletePtr = Builder.CreateBitCast(Ptr, ConvertType(ArgTy));
329  DeleteArgs.push_back(std::make_pair(RValue::get(DeletePtr), ArgTy));
330
331  if (Size)
332    DeleteArgs.push_back(std::make_pair(RValue::get(Size), SizeTy));
333
334  // Emit the call to delete.
335  EmitCall(CGM.getTypes().getFunctionInfo(DeleteFTy->getResultType(),
336                                          DeleteArgs),
337           CGM.GetAddrOfFunction(DeleteFD),
338           DeleteArgs, DeleteFD);
339}
340
341void CodeGenFunction::EmitCXXDeleteExpr(const CXXDeleteExpr *E) {
342
343  // Get at the argument before we performed the implicit conversion
344  // to void*.
345  const Expr *Arg = E->getArgument();
346  while (const ImplicitCastExpr *ICE = dyn_cast<ImplicitCastExpr>(Arg)) {
347    if (ICE->getCastKind() != CastExpr::CK_UserDefinedConversion &&
348        ICE->getType()->isVoidPointerType())
349      Arg = ICE->getSubExpr();
350    else
351      break;
352  }
353
354  QualType DeleteTy = Arg->getType()->getAs<PointerType>()->getPointeeType();
355
356  llvm::Value *Ptr = EmitScalarExpr(Arg);
357
358  // Null check the pointer.
359  llvm::BasicBlock *DeleteNotNull = createBasicBlock("delete.notnull");
360  llvm::BasicBlock *DeleteEnd = createBasicBlock("delete.end");
361
362  llvm::Value *IsNull =
363    Builder.CreateICmpEQ(Ptr, llvm::Constant::getNullValue(Ptr->getType()),
364                         "isnull");
365
366  Builder.CreateCondBr(IsNull, DeleteEnd, DeleteNotNull);
367  EmitBlock(DeleteNotNull);
368
369  bool ShouldCallDelete = true;
370
371  // Call the destructor if necessary.
372  if (const RecordType *RT = DeleteTy->getAs<RecordType>()) {
373    if (CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(RT->getDecl())) {
374      if (!RD->hasTrivialDestructor()) {
375        const CXXDestructorDecl *Dtor = RD->getDestructor(getContext());
376        if (E->isArrayForm()) {
377          llvm::Value *AllocatedObjectPtr;
378          llvm::Value *NumElements;
379          llvm::tie(AllocatedObjectPtr, NumElements) =
380            GetAllocatedObjectPtrAndNumElements(*this, Ptr, DeleteTy);
381
382          EmitCXXAggrDestructorCall(Dtor, NumElements, Ptr);
383        } else if (Dtor->isVirtual()) {
384          const llvm::Type *Ty =
385            CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(Dtor),
386                                           /*isVariadic=*/false);
387
388          llvm::Value *Callee = BuildVirtualCall(Dtor, Dtor_Deleting, Ptr, Ty);
389          EmitCXXMemberCall(Dtor, Callee, Ptr, 0, 0);
390
391          // The dtor took care of deleting the object.
392          ShouldCallDelete = false;
393        } else
394          EmitCXXDestructorCall(Dtor, Dtor_Complete, Ptr);
395      }
396    }
397  }
398
399  if (ShouldCallDelete)
400    EmitDeleteCall(E->getOperatorDelete(), Ptr, DeleteTy);
401
402  EmitBlock(DeleteEnd);
403}
404
405llvm::Value * CodeGenFunction::EmitCXXTypeidExpr(const CXXTypeidExpr *E) {
406  QualType Ty = E->getType();
407  const llvm::Type *LTy = ConvertType(Ty)->getPointerTo();
408
409  if (E->isTypeOperand())
410    return Builder.CreateBitCast(CGM.GetAddrOfRTTI(E->getTypeOperand()), LTy);
411
412  Expr *subE = E->getExprOperand();
413  Ty = subE->getType();
414  CanQualType CanTy = CGM.getContext().getCanonicalType(Ty);
415  Ty = CanTy.getUnqualifiedType().getNonReferenceType();
416  if (const RecordType *RT = Ty->getAs<RecordType>()) {
417    const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
418    if (RD->isPolymorphic()) {
419      // FIXME: if subE is an lvalue do
420      LValue Obj = EmitLValue(subE);
421      llvm::Value *This = Obj.getAddress();
422      LTy = LTy->getPointerTo()->getPointerTo();
423      llvm::Value *V = Builder.CreateBitCast(This, LTy);
424      // We need to do a zero check for *p, unless it has NonNullAttr.
425      // FIXME: PointerType->hasAttr<NonNullAttr>()
426      bool CanBeZero = false;
427      if (UnaryOperator *UO = dyn_cast<UnaryOperator>(subE->IgnoreParens()))
428        if (UO->getOpcode() == UnaryOperator::Deref)
429          CanBeZero = true;
430      if (CanBeZero) {
431        llvm::BasicBlock *NonZeroBlock = createBasicBlock();
432        llvm::BasicBlock *ZeroBlock = createBasicBlock();
433
434        llvm::Value *Zero = llvm::Constant::getNullValue(LTy);
435        Builder.CreateCondBr(Builder.CreateICmpNE(V, Zero),
436                             NonZeroBlock, ZeroBlock);
437        EmitBlock(ZeroBlock);
438        /// Call __cxa_bad_typeid
439        const llvm::Type *ResultType = llvm::Type::getVoidTy(VMContext);
440        const llvm::FunctionType *FTy;
441        FTy = llvm::FunctionType::get(ResultType, false);
442        llvm::Value *F = CGM.CreateRuntimeFunction(FTy, "__cxa_bad_typeid");
443        Builder.CreateCall(F)->setDoesNotReturn();
444        Builder.CreateUnreachable();
445        EmitBlock(NonZeroBlock);
446      }
447      V = Builder.CreateLoad(V, "vtable");
448      V = Builder.CreateConstInBoundsGEP1_64(V, -1ULL);
449      V = Builder.CreateLoad(V);
450      return V;
451    }
452    return Builder.CreateBitCast(CGM.GenerateRTTI(RD), LTy);
453  }
454  return Builder.CreateBitCast(CGM.GenerateRTTI(Ty), LTy);
455}
456
457llvm::Value *CodeGenFunction::EmitDynamicCast(llvm::Value *V,
458                                              const CXXDynamicCastExpr *DCE) {
459  QualType CastTy = DCE->getTypeAsWritten();
460  QualType InnerType = CastTy->getPointeeType();
461  QualType ArgTy = DCE->getSubExpr()->getType();
462  const llvm::Type *LArgTy = ConvertType(ArgTy);
463  const llvm::Type *LTy = ConvertType(DCE->getType());
464
465  bool CanBeZero = false;
466  bool ToVoid = false;
467  bool ThrowOnBad = false;
468  if (CastTy->isPointerType()) {
469    // FIXME: if PointerType->hasAttr<NonNullAttr>(), we don't set this
470    CanBeZero = true;
471    if (InnerType->isVoidType())
472      ToVoid = true;
473  } else {
474    LTy = LTy->getPointerTo();
475    ThrowOnBad = true;
476  }
477
478  CXXRecordDecl *SrcTy;
479  QualType Ty = ArgTy;
480  if (ArgTy.getTypePtr()->isPointerType()
481      || ArgTy.getTypePtr()->isReferenceType())
482    Ty = Ty.getTypePtr()->getPointeeType();
483  CanQualType CanTy = CGM.getContext().getCanonicalType(Ty);
484  Ty = CanTy.getUnqualifiedType();
485  SrcTy = cast<CXXRecordDecl>(Ty->getAs<RecordType>()->getDecl());
486
487  llvm::BasicBlock *ContBlock = createBasicBlock();
488  llvm::BasicBlock *NullBlock = 0;
489  llvm::BasicBlock *NonZeroBlock = 0;
490  if (CanBeZero) {
491    NonZeroBlock = createBasicBlock();
492    NullBlock = createBasicBlock();
493    llvm::Value *Zero = llvm::Constant::getNullValue(LArgTy);
494    Builder.CreateCondBr(Builder.CreateICmpNE(V, Zero),
495                         NonZeroBlock, NullBlock);
496    EmitBlock(NonZeroBlock);
497  }
498
499  llvm::BasicBlock *BadCastBlock = 0;
500
501  const llvm::Type *PtrDiffTy = ConvertType(getContext().getSizeType());
502
503  // See if this is a dynamic_cast(void*)
504  if (ToVoid) {
505    llvm::Value *This = V;
506    V = Builder.CreateBitCast(This, PtrDiffTy->getPointerTo()->getPointerTo());
507    V = Builder.CreateLoad(V, "vtable");
508    V = Builder.CreateConstInBoundsGEP1_64(V, -2ULL);
509    V = Builder.CreateLoad(V, "offset to top");
510    This = Builder.CreateBitCast(This, llvm::Type::getInt8PtrTy(VMContext));
511    V = Builder.CreateInBoundsGEP(This, V);
512    V = Builder.CreateBitCast(V, LTy);
513  } else {
514    /// Call __dynamic_cast
515    const llvm::Type *ResultType = llvm::Type::getInt8PtrTy(VMContext);
516    const llvm::FunctionType *FTy;
517    std::vector<const llvm::Type*> ArgTys;
518    const llvm::Type *PtrToInt8Ty
519      = llvm::Type::getInt8Ty(VMContext)->getPointerTo();
520    ArgTys.push_back(PtrToInt8Ty);
521    ArgTys.push_back(PtrToInt8Ty);
522    ArgTys.push_back(PtrToInt8Ty);
523    ArgTys.push_back(PtrDiffTy);
524    FTy = llvm::FunctionType::get(ResultType, ArgTys, false);
525    CXXRecordDecl *DstTy;
526    Ty = CastTy.getTypePtr()->getPointeeType();
527    CanTy = CGM.getContext().getCanonicalType(Ty);
528    Ty = CanTy.getUnqualifiedType();
529    DstTy = cast<CXXRecordDecl>(Ty->getAs<RecordType>()->getDecl());
530
531    // FIXME: Calculate better hint.
532    llvm::Value *hint = llvm::ConstantInt::get(PtrDiffTy, -1ULL);
533    llvm::Value *SrcArg = CGM.GenerateRTTIRef(SrcTy);
534    llvm::Value *DstArg = CGM.GenerateRTTIRef(DstTy);
535    V = Builder.CreateBitCast(V, PtrToInt8Ty);
536    V = Builder.CreateCall4(CGM.CreateRuntimeFunction(FTy, "__dynamic_cast"),
537                            V, SrcArg, DstArg, hint);
538    V = Builder.CreateBitCast(V, LTy);
539
540    if (ThrowOnBad) {
541      BadCastBlock = createBasicBlock();
542
543      llvm::Value *Zero = llvm::Constant::getNullValue(LTy);
544      Builder.CreateCondBr(Builder.CreateICmpNE(V, Zero),
545                           ContBlock, BadCastBlock);
546      EmitBlock(BadCastBlock);
547      /// Call __cxa_bad_cast
548      ResultType = llvm::Type::getVoidTy(VMContext);
549      const llvm::FunctionType *FBadTy;
550      FBadTy = llvm::FunctionType::get(ResultType, false);
551      llvm::Value *F = CGM.CreateRuntimeFunction(FBadTy, "__cxa_bad_cast");
552      Builder.CreateCall(F)->setDoesNotReturn();
553      Builder.CreateUnreachable();
554    }
555  }
556
557  if (CanBeZero) {
558    Builder.CreateBr(ContBlock);
559    EmitBlock(NullBlock);
560    Builder.CreateBr(ContBlock);
561  }
562  EmitBlock(ContBlock);
563  if (CanBeZero) {
564    llvm::PHINode *PHI = Builder.CreatePHI(LTy);
565    PHI->reserveOperandSpace(2);
566    PHI->addIncoming(V, NonZeroBlock);
567    PHI->addIncoming(llvm::Constant::getNullValue(LTy), NullBlock);
568    V = PHI;
569  }
570
571  return V;
572}
573