CGExprAgg.cpp revision e9d34dc7afe06c9adaacad7a678a0cbbf749ea75
1//===--- CGExprAgg.cpp - Emit LLVM Code from Aggregate Expressions --------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This contains code to emit Aggregate Expr nodes as LLVM code.
11//
12//===----------------------------------------------------------------------===//
13
14#include "CodeGenFunction.h"
15#include "CodeGenModule.h"
16#include "CGObjCRuntime.h"
17#include "clang/AST/ASTContext.h"
18#include "clang/AST/DeclCXX.h"
19#include "clang/AST/StmtVisitor.h"
20#include "llvm/Constants.h"
21#include "llvm/Function.h"
22#include "llvm/GlobalVariable.h"
23#include "llvm/Support/Compiler.h"
24#include "llvm/Intrinsics.h"
25using namespace clang;
26using namespace CodeGen;
27
28//===----------------------------------------------------------------------===//
29//                        Aggregate Expression Emitter
30//===----------------------------------------------------------------------===//
31
32namespace  {
33class VISIBILITY_HIDDEN AggExprEmitter : public StmtVisitor<AggExprEmitter> {
34  CodeGenFunction &CGF;
35  CGBuilderTy &Builder;
36  llvm::Value *DestPtr;
37  bool VolatileDest;
38  bool IgnoreResult;
39  bool IsInitializer;
40  bool RequiresGCollection;
41public:
42  AggExprEmitter(CodeGenFunction &cgf, llvm::Value *destPtr, bool v,
43                 bool ignore, bool isinit, bool requiresGCollection)
44    : CGF(cgf), Builder(CGF.Builder),
45      DestPtr(destPtr), VolatileDest(v), IgnoreResult(ignore),
46      IsInitializer(isinit), RequiresGCollection(requiresGCollection) {
47  }
48
49  //===--------------------------------------------------------------------===//
50  //                               Utilities
51  //===--------------------------------------------------------------------===//
52
53  /// EmitAggLoadOfLValue - Given an expression with aggregate type that
54  /// represents a value lvalue, this method emits the address of the lvalue,
55  /// then loads the result into DestPtr.
56  void EmitAggLoadOfLValue(const Expr *E);
57
58  /// EmitFinalDestCopy - Perform the final copy to DestPtr, if desired.
59  void EmitFinalDestCopy(const Expr *E, LValue Src, bool Ignore = false);
60  void EmitFinalDestCopy(const Expr *E, RValue Src, bool Ignore = false);
61
62  //===--------------------------------------------------------------------===//
63  //                            Visitor Methods
64  //===--------------------------------------------------------------------===//
65
66  void VisitStmt(Stmt *S) {
67    CGF.ErrorUnsupported(S, "aggregate expression");
68  }
69  void VisitParenExpr(ParenExpr *PE) { Visit(PE->getSubExpr()); }
70  void VisitUnaryExtension(UnaryOperator *E) { Visit(E->getSubExpr()); }
71
72  // l-values.
73  void VisitDeclRefExpr(DeclRefExpr *DRE) { EmitAggLoadOfLValue(DRE); }
74  void VisitMemberExpr(MemberExpr *ME) { EmitAggLoadOfLValue(ME); }
75  void VisitUnaryDeref(UnaryOperator *E) { EmitAggLoadOfLValue(E); }
76  void VisitStringLiteral(StringLiteral *E) { EmitAggLoadOfLValue(E); }
77  void VisitCompoundLiteralExpr(CompoundLiteralExpr *E) {
78    EmitAggLoadOfLValue(E);
79  }
80  void VisitArraySubscriptExpr(ArraySubscriptExpr *E) {
81    EmitAggLoadOfLValue(E);
82  }
83  void VisitBlockDeclRefExpr(const BlockDeclRefExpr *E) {
84    EmitAggLoadOfLValue(E);
85  }
86  void VisitPredefinedExpr(const PredefinedExpr *E) {
87    EmitAggLoadOfLValue(E);
88  }
89
90  // Operators.
91  void VisitCastExpr(CastExpr *E);
92  void VisitCallExpr(const CallExpr *E);
93  void VisitStmtExpr(const StmtExpr *E);
94  void VisitBinaryOperator(const BinaryOperator *BO);
95  void VisitBinAssign(const BinaryOperator *E);
96  void VisitBinComma(const BinaryOperator *E);
97
98  void VisitObjCMessageExpr(ObjCMessageExpr *E);
99  void VisitObjCIvarRefExpr(ObjCIvarRefExpr *E) {
100    EmitAggLoadOfLValue(E);
101  }
102  void VisitObjCPropertyRefExpr(ObjCPropertyRefExpr *E);
103  void VisitObjCImplicitSetterGetterRefExpr(ObjCImplicitSetterGetterRefExpr *E);
104
105  void VisitConditionalOperator(const ConditionalOperator *CO);
106  void VisitChooseExpr(const ChooseExpr *CE);
107  void VisitInitListExpr(InitListExpr *E);
108  void VisitCXXDefaultArgExpr(CXXDefaultArgExpr *DAE) {
109    Visit(DAE->getExpr());
110  }
111  void VisitCXXBindTemporaryExpr(CXXBindTemporaryExpr *E);
112  void VisitCXXConstructExpr(const CXXConstructExpr *E);
113  void VisitCXXExprWithTemporaries(CXXExprWithTemporaries *E);
114
115  void VisitVAArgExpr(VAArgExpr *E);
116
117  void EmitInitializationToLValue(Expr *E, LValue Address);
118  void EmitNullInitializationToLValue(LValue Address, QualType T);
119  //  case Expr::ChooseExprClass:
120
121};
122}  // end anonymous namespace.
123
124//===----------------------------------------------------------------------===//
125//                                Utilities
126//===----------------------------------------------------------------------===//
127
128/// EmitAggLoadOfLValue - Given an expression with aggregate type that
129/// represents a value lvalue, this method emits the address of the lvalue,
130/// then loads the result into DestPtr.
131void AggExprEmitter::EmitAggLoadOfLValue(const Expr *E) {
132  LValue LV = CGF.EmitLValue(E);
133  EmitFinalDestCopy(E, LV);
134}
135
136/// EmitFinalDestCopy - Perform the final copy to DestPtr, if desired.
137void AggExprEmitter::EmitFinalDestCopy(const Expr *E, RValue Src, bool Ignore) {
138  assert(Src.isAggregate() && "value must be aggregate value!");
139
140  // If the result is ignored, don't copy from the value.
141  if (DestPtr == 0) {
142    if (!Src.isVolatileQualified() || (IgnoreResult && Ignore))
143      return;
144    // If the source is volatile, we must read from it; to do that, we need
145    // some place to put it.
146    DestPtr = CGF.CreateTempAlloca(CGF.ConvertType(E->getType()), "agg.tmp");
147  }
148
149  if (RequiresGCollection) {
150    CGF.CGM.getObjCRuntime().EmitGCMemmoveCollectable(CGF,
151                                              DestPtr, Src.getAggregateAddr(),
152                                              E->getType());
153    return;
154  }
155  // If the result of the assignment is used, copy the LHS there also.
156  // FIXME: Pass VolatileDest as well.  I think we also need to merge volatile
157  // from the source as well, as we can't eliminate it if either operand
158  // is volatile, unless copy has volatile for both source and destination..
159  CGF.EmitAggregateCopy(DestPtr, Src.getAggregateAddr(), E->getType(),
160                        VolatileDest|Src.isVolatileQualified());
161}
162
163/// EmitFinalDestCopy - Perform the final copy to DestPtr, if desired.
164void AggExprEmitter::EmitFinalDestCopy(const Expr *E, LValue Src, bool Ignore) {
165  assert(Src.isSimple() && "Can't have aggregate bitfield, vector, etc");
166
167  EmitFinalDestCopy(E, RValue::getAggregate(Src.getAddress(),
168                                            Src.isVolatileQualified()),
169                    Ignore);
170}
171
172//===----------------------------------------------------------------------===//
173//                            Visitor Methods
174//===----------------------------------------------------------------------===//
175
176void AggExprEmitter::VisitCastExpr(CastExpr *E) {
177  switch (E->getCastKind()) {
178  default: assert(0 && "Unhandled cast kind!");
179
180  case CastExpr::CK_ToUnion: {
181    // GCC union extension
182    QualType PtrTy =
183    CGF.getContext().getPointerType(E->getSubExpr()->getType());
184    llvm::Value *CastPtr = Builder.CreateBitCast(DestPtr,
185                                                 CGF.ConvertType(PtrTy));
186    EmitInitializationToLValue(E->getSubExpr(),
187                               LValue::MakeAddr(CastPtr, Qualifiers()));
188    break;
189  }
190
191  // FIXME: Remove the CK_Unknown check here.
192  case CastExpr::CK_Unknown:
193  case CastExpr::CK_NoOp:
194  case CastExpr::CK_UserDefinedConversion:
195  case CastExpr::CK_ConstructorConversion:
196    assert(CGF.getContext().hasSameUnqualifiedType(E->getSubExpr()->getType(),
197                                                   E->getType()) &&
198           "Implicit cast types must be compatible");
199    Visit(E->getSubExpr());
200    break;
201
202  case CastExpr::CK_NullToMemberPointer: {
203    QualType T = E->getType();
204    const llvm::Type *PtrDiffTy =
205      CGF.ConvertType(CGF.getContext().getPointerDiffType());
206
207    llvm::Value *NullValue = llvm::Constant::getNullValue(PtrDiffTy);
208    llvm::Value *Ptr = Builder.CreateStructGEP(DestPtr, 0, "ptr");
209    Builder.CreateStore(NullValue, Ptr, VolatileDest);
210
211    llvm::Value *Adj = Builder.CreateStructGEP(DestPtr, 1, "adj");
212    Builder.CreateStore(NullValue, Adj, VolatileDest);
213
214    break;
215  }
216  }
217}
218
219void AggExprEmitter::VisitCallExpr(const CallExpr *E) {
220  if (E->getCallReturnType()->isReferenceType()) {
221    EmitAggLoadOfLValue(E);
222    return;
223  }
224
225  RValue RV = CGF.EmitCallExpr(E);
226  EmitFinalDestCopy(E, RV);
227}
228
229void AggExprEmitter::VisitObjCMessageExpr(ObjCMessageExpr *E) {
230  RValue RV = CGF.EmitObjCMessageExpr(E);
231  EmitFinalDestCopy(E, RV);
232}
233
234void AggExprEmitter::VisitObjCPropertyRefExpr(ObjCPropertyRefExpr *E) {
235  RValue RV = CGF.EmitObjCPropertyGet(E);
236  EmitFinalDestCopy(E, RV);
237}
238
239void AggExprEmitter::VisitObjCImplicitSetterGetterRefExpr(
240                                   ObjCImplicitSetterGetterRefExpr *E) {
241  RValue RV = CGF.EmitObjCPropertyGet(E);
242  EmitFinalDestCopy(E, RV);
243}
244
245void AggExprEmitter::VisitBinComma(const BinaryOperator *E) {
246  CGF.EmitAnyExpr(E->getLHS(), 0, false, true);
247  CGF.EmitAggExpr(E->getRHS(), DestPtr, VolatileDest,
248                  /*IgnoreResult=*/false, IsInitializer);
249}
250
251void AggExprEmitter::VisitStmtExpr(const StmtExpr *E) {
252  CGF.EmitCompoundStmt(*E->getSubStmt(), true, DestPtr, VolatileDest);
253}
254
255void AggExprEmitter::VisitBinaryOperator(const BinaryOperator *E) {
256  CGF.ErrorUnsupported(E, "aggregate binary expression");
257}
258
259void AggExprEmitter::VisitBinAssign(const BinaryOperator *E) {
260  // For an assignment to work, the value on the right has
261  // to be compatible with the value on the left.
262  assert(CGF.getContext().hasSameUnqualifiedType(E->getLHS()->getType(),
263                                                 E->getRHS()->getType())
264         && "Invalid assignment");
265  LValue LHS = CGF.EmitLValue(E->getLHS());
266
267  // We have to special case property setters, otherwise we must have
268  // a simple lvalue (no aggregates inside vectors, bitfields).
269  if (LHS.isPropertyRef()) {
270    llvm::Value *AggLoc = DestPtr;
271    if (!AggLoc)
272      AggLoc = CGF.CreateTempAlloca(CGF.ConvertType(E->getRHS()->getType()));
273    CGF.EmitAggExpr(E->getRHS(), AggLoc, VolatileDest);
274    CGF.EmitObjCPropertySet(LHS.getPropertyRefExpr(),
275                            RValue::getAggregate(AggLoc, VolatileDest));
276  } else if (LHS.isKVCRef()) {
277    llvm::Value *AggLoc = DestPtr;
278    if (!AggLoc)
279      AggLoc = CGF.CreateTempAlloca(CGF.ConvertType(E->getRHS()->getType()));
280    CGF.EmitAggExpr(E->getRHS(), AggLoc, VolatileDest);
281    CGF.EmitObjCPropertySet(LHS.getKVCRefExpr(),
282                            RValue::getAggregate(AggLoc, VolatileDest));
283  } else {
284    bool RequiresGCollection = false;
285    if (CGF.getContext().getLangOptions().NeXTRuntime) {
286      QualType LHSTy = E->getLHS()->getType();
287      if (const RecordType *FDTTy = LHSTy.getTypePtr()->getAs<RecordType>())
288        RequiresGCollection = FDTTy->getDecl()->hasObjectMember();
289    }
290    // Codegen the RHS so that it stores directly into the LHS.
291    CGF.EmitAggExpr(E->getRHS(), LHS.getAddress(), LHS.isVolatileQualified(),
292                    false, false, RequiresGCollection);
293    EmitFinalDestCopy(E, LHS, true);
294  }
295}
296
297void AggExprEmitter::VisitConditionalOperator(const ConditionalOperator *E) {
298  llvm::BasicBlock *LHSBlock = CGF.createBasicBlock("cond.true");
299  llvm::BasicBlock *RHSBlock = CGF.createBasicBlock("cond.false");
300  llvm::BasicBlock *ContBlock = CGF.createBasicBlock("cond.end");
301
302  llvm::Value *Cond = CGF.EvaluateExprAsBool(E->getCond());
303  Builder.CreateCondBr(Cond, LHSBlock, RHSBlock);
304
305  CGF.PushConditionalTempDestruction();
306  CGF.EmitBlock(LHSBlock);
307
308  // Handle the GNU extension for missing LHS.
309  assert(E->getLHS() && "Must have LHS for aggregate value");
310
311  Visit(E->getLHS());
312  CGF.PopConditionalTempDestruction();
313  CGF.EmitBranch(ContBlock);
314
315  CGF.PushConditionalTempDestruction();
316  CGF.EmitBlock(RHSBlock);
317
318  Visit(E->getRHS());
319  CGF.PopConditionalTempDestruction();
320  CGF.EmitBranch(ContBlock);
321
322  CGF.EmitBlock(ContBlock);
323}
324
325void AggExprEmitter::VisitChooseExpr(const ChooseExpr *CE) {
326  Visit(CE->getChosenSubExpr(CGF.getContext()));
327}
328
329void AggExprEmitter::VisitVAArgExpr(VAArgExpr *VE) {
330  llvm::Value *ArgValue = CGF.EmitVAListRef(VE->getSubExpr());
331  llvm::Value *ArgPtr = CGF.EmitVAArg(ArgValue, VE->getType());
332
333  if (!ArgPtr) {
334    CGF.ErrorUnsupported(VE, "aggregate va_arg expression");
335    return;
336  }
337
338  EmitFinalDestCopy(VE, LValue::MakeAddr(ArgPtr, Qualifiers()));
339}
340
341void AggExprEmitter::VisitCXXBindTemporaryExpr(CXXBindTemporaryExpr *E) {
342  llvm::Value *Val = DestPtr;
343
344  if (!Val) {
345    // Create a temporary variable.
346    Val = CGF.CreateTempAlloca(CGF.ConvertTypeForMem(E->getType()), "tmp");
347
348    // FIXME: volatile
349    CGF.EmitAggExpr(E->getSubExpr(), Val, false);
350  } else
351    Visit(E->getSubExpr());
352
353  // Don't make this a live temporary if we're emitting an initializer expr.
354  if (!IsInitializer)
355    CGF.PushCXXTemporary(E->getTemporary(), Val);
356}
357
358void
359AggExprEmitter::VisitCXXConstructExpr(const CXXConstructExpr *E) {
360  llvm::Value *Val = DestPtr;
361
362  if (!Val) {
363    // Create a temporary variable.
364    Val = CGF.CreateTempAlloca(CGF.ConvertTypeForMem(E->getType()), "tmp");
365  }
366
367  CGF.EmitCXXConstructExpr(Val, E);
368}
369
370void AggExprEmitter::VisitCXXExprWithTemporaries(CXXExprWithTemporaries *E) {
371  CGF.EmitCXXExprWithTemporaries(E, DestPtr, VolatileDest, IsInitializer);
372}
373
374void AggExprEmitter::EmitInitializationToLValue(Expr* E, LValue LV) {
375  // FIXME: Ignore result?
376  // FIXME: Are initializers affected by volatile?
377  if (isa<ImplicitValueInitExpr>(E)) {
378    EmitNullInitializationToLValue(LV, E->getType());
379  } else if (E->getType()->isComplexType()) {
380    CGF.EmitComplexExprIntoAddr(E, LV.getAddress(), false);
381  } else if (CGF.hasAggregateLLVMType(E->getType())) {
382    CGF.EmitAnyExpr(E, LV.getAddress(), false);
383  } else {
384    CGF.EmitStoreThroughLValue(CGF.EmitAnyExpr(E), LV, E->getType());
385  }
386}
387
388void AggExprEmitter::EmitNullInitializationToLValue(LValue LV, QualType T) {
389  if (!CGF.hasAggregateLLVMType(T)) {
390    // For non-aggregates, we can store zero
391    llvm::Value *Null = llvm::Constant::getNullValue(CGF.ConvertType(T));
392    CGF.EmitStoreThroughLValue(RValue::get(Null), LV, T);
393  } else {
394    // Otherwise, just memset the whole thing to zero.  This is legal
395    // because in LLVM, all default initializers are guaranteed to have a
396    // bit pattern of all zeros.
397    // FIXME: That isn't true for member pointers!
398    // There's a potential optimization opportunity in combining
399    // memsets; that would be easy for arrays, but relatively
400    // difficult for structures with the current code.
401    CGF.EmitMemSetToZero(LV.getAddress(), T);
402  }
403}
404
405void AggExprEmitter::VisitInitListExpr(InitListExpr *E) {
406#if 0
407  // FIXME: Disabled while we figure out what to do about
408  // test/CodeGen/bitfield.c
409  //
410  // If we can, prefer a copy from a global; this is a lot less code for long
411  // globals, and it's easier for the current optimizers to analyze.
412  // FIXME: Should we really be doing this? Should we try to avoid cases where
413  // we emit a global with a lot of zeros?  Should we try to avoid short
414  // globals?
415  if (E->isConstantInitializer(CGF.getContext(), 0)) {
416    llvm::Constant* C = CGF.CGM.EmitConstantExpr(E, &CGF);
417    llvm::GlobalVariable* GV =
418    new llvm::GlobalVariable(C->getType(), true,
419                             llvm::GlobalValue::InternalLinkage,
420                             C, "", &CGF.CGM.getModule(), 0);
421    EmitFinalDestCopy(E, LValue::MakeAddr(GV, 0));
422    return;
423  }
424#endif
425  if (E->hadArrayRangeDesignator()) {
426    CGF.ErrorUnsupported(E, "GNU array range designator extension");
427  }
428
429  // Handle initialization of an array.
430  if (E->getType()->isArrayType()) {
431    const llvm::PointerType *APType =
432      cast<llvm::PointerType>(DestPtr->getType());
433    const llvm::ArrayType *AType =
434      cast<llvm::ArrayType>(APType->getElementType());
435
436    uint64_t NumInitElements = E->getNumInits();
437
438    if (E->getNumInits() > 0) {
439      QualType T1 = E->getType();
440      QualType T2 = E->getInit(0)->getType();
441      if (CGF.getContext().hasSameUnqualifiedType(T1, T2)) {
442        EmitAggLoadOfLValue(E->getInit(0));
443        return;
444      }
445    }
446
447    uint64_t NumArrayElements = AType->getNumElements();
448    QualType ElementType = CGF.getContext().getCanonicalType(E->getType());
449    ElementType = CGF.getContext().getAsArrayType(ElementType)->getElementType();
450
451    // FIXME: were we intentionally ignoring address spaces and GC attributes?
452    Qualifiers Quals = CGF.MakeQualifiers(ElementType);
453
454    for (uint64_t i = 0; i != NumArrayElements; ++i) {
455      llvm::Value *NextVal = Builder.CreateStructGEP(DestPtr, i, ".array");
456      if (i < NumInitElements)
457        EmitInitializationToLValue(E->getInit(i),
458                                   LValue::MakeAddr(NextVal, Quals));
459      else
460        EmitNullInitializationToLValue(LValue::MakeAddr(NextVal, Quals),
461                                       ElementType);
462    }
463    return;
464  }
465
466  assert(E->getType()->isRecordType() && "Only support structs/unions here!");
467
468  // Do struct initialization; this code just sets each individual member
469  // to the approprate value.  This makes bitfield support automatic;
470  // the disadvantage is that the generated code is more difficult for
471  // the optimizer, especially with bitfields.
472  unsigned NumInitElements = E->getNumInits();
473  RecordDecl *SD = E->getType()->getAs<RecordType>()->getDecl();
474  unsigned CurInitVal = 0;
475
476  if (E->getType()->isUnionType()) {
477    // Only initialize one field of a union. The field itself is
478    // specified by the initializer list.
479    if (!E->getInitializedFieldInUnion()) {
480      // Empty union; we have nothing to do.
481
482#ifndef NDEBUG
483      // Make sure that it's really an empty and not a failure of
484      // semantic analysis.
485      for (RecordDecl::field_iterator Field = SD->field_begin(),
486                                   FieldEnd = SD->field_end();
487           Field != FieldEnd; ++Field)
488        assert(Field->isUnnamedBitfield() && "Only unnamed bitfields allowed");
489#endif
490      return;
491    }
492
493    // FIXME: volatility
494    FieldDecl *Field = E->getInitializedFieldInUnion();
495    LValue FieldLoc = CGF.EmitLValueForField(DestPtr, Field, true, 0);
496
497    if (NumInitElements) {
498      // Store the initializer into the field
499      EmitInitializationToLValue(E->getInit(0), FieldLoc);
500    } else {
501      // Default-initialize to null
502      EmitNullInitializationToLValue(FieldLoc, Field->getType());
503    }
504
505    return;
506  }
507
508  // Here we iterate over the fields; this makes it simpler to both
509  // default-initialize fields and skip over unnamed fields.
510  for (RecordDecl::field_iterator Field = SD->field_begin(),
511                               FieldEnd = SD->field_end();
512       Field != FieldEnd; ++Field) {
513    // We're done once we hit the flexible array member
514    if (Field->getType()->isIncompleteArrayType())
515      break;
516
517    if (Field->isUnnamedBitfield())
518      continue;
519
520    // FIXME: volatility
521    LValue FieldLoc = CGF.EmitLValueForField(DestPtr, *Field, false, 0);
522    // We never generate write-barries for initialized fields.
523    LValue::SetObjCNonGC(FieldLoc, true);
524    if (CurInitVal < NumInitElements) {
525      // Store the initializer into the field
526      EmitInitializationToLValue(E->getInit(CurInitVal++), FieldLoc);
527    } else {
528      // We're out of initalizers; default-initialize to null
529      EmitNullInitializationToLValue(FieldLoc, Field->getType());
530    }
531  }
532}
533
534//===----------------------------------------------------------------------===//
535//                        Entry Points into this File
536//===----------------------------------------------------------------------===//
537
538/// EmitAggExpr - Emit the computation of the specified expression of aggregate
539/// type.  The result is computed into DestPtr.  Note that if DestPtr is null,
540/// the value of the aggregate expression is not needed.  If VolatileDest is
541/// true, DestPtr cannot be 0.
542void CodeGenFunction::EmitAggExpr(const Expr *E, llvm::Value *DestPtr,
543                                  bool VolatileDest, bool IgnoreResult,
544                                  bool IsInitializer,
545                                  bool RequiresGCollection) {
546  assert(E && hasAggregateLLVMType(E->getType()) &&
547         "Invalid aggregate expression to emit");
548  assert ((DestPtr != 0 || VolatileDest == false)
549          && "volatile aggregate can't be 0");
550
551  AggExprEmitter(*this, DestPtr, VolatileDest, IgnoreResult, IsInitializer,
552                 RequiresGCollection)
553    .Visit(const_cast<Expr*>(E));
554}
555
556void CodeGenFunction::EmitAggregateClear(llvm::Value *DestPtr, QualType Ty) {
557  assert(!Ty->isAnyComplexType() && "Shouldn't happen for complex");
558
559  EmitMemSetToZero(DestPtr, Ty);
560}
561
562void CodeGenFunction::EmitAggregateCopy(llvm::Value *DestPtr,
563                                        llvm::Value *SrcPtr, QualType Ty,
564                                        bool isVolatile) {
565  assert(!Ty->isAnyComplexType() && "Shouldn't happen for complex");
566
567  // Aggregate assignment turns into llvm.memcpy.  This is almost valid per
568  // C99 6.5.16.1p3, which states "If the value being stored in an object is
569  // read from another object that overlaps in anyway the storage of the first
570  // object, then the overlap shall be exact and the two objects shall have
571  // qualified or unqualified versions of a compatible type."
572  //
573  // memcpy is not defined if the source and destination pointers are exactly
574  // equal, but other compilers do this optimization, and almost every memcpy
575  // implementation handles this case safely.  If there is a libc that does not
576  // safely handle this, we can add a target hook.
577  const llvm::Type *BP =
578                llvm::PointerType::getUnqual(llvm::Type::getInt8Ty(VMContext));
579  if (DestPtr->getType() != BP)
580    DestPtr = Builder.CreateBitCast(DestPtr, BP, "tmp");
581  if (SrcPtr->getType() != BP)
582    SrcPtr = Builder.CreateBitCast(SrcPtr, BP, "tmp");
583
584  // Get size and alignment info for this aggregate.
585  std::pair<uint64_t, unsigned> TypeInfo = getContext().getTypeInfo(Ty);
586
587  // FIXME: Handle variable sized types.
588  const llvm::Type *IntPtr =
589          llvm::IntegerType::get(VMContext, LLVMPointerWidth);
590
591  // FIXME: If we have a volatile struct, the optimizer can remove what might
592  // appear to be `extra' memory ops:
593  //
594  // volatile struct { int i; } a, b;
595  //
596  // int main() {
597  //   a = b;
598  //   a = b;
599  // }
600  //
601  // we need to use a differnt call here.  We use isVolatile to indicate when
602  // either the source or the destination is volatile.
603  Builder.CreateCall4(CGM.getMemCpyFn(),
604                      DestPtr, SrcPtr,
605                      // TypeInfo.first describes size in bits.
606                      llvm::ConstantInt::get(IntPtr, TypeInfo.first/8),
607                      llvm::ConstantInt::get(llvm::Type::getInt32Ty(VMContext),
608                                             TypeInfo.second/8));
609}
610