RecordLayoutBuilder.cpp revision b2969b1e50580344891a98f5b241f8351fe371cf
1//=== RecordLayoutBuilder.cpp - Helper class for building record layouts ---==//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9
10#include "clang/AST/RecordLayout.h"
11#include "clang/AST/ASTContext.h"
12#include "clang/AST/Attr.h"
13#include "clang/AST/CXXInheritance.h"
14#include "clang/AST/Decl.h"
15#include "clang/AST/DeclCXX.h"
16#include "clang/AST/DeclObjC.h"
17#include "clang/AST/Expr.h"
18#include "clang/Basic/TargetInfo.h"
19#include "clang/Sema/SemaDiagnostic.h"
20#include "llvm/ADT/SmallSet.h"
21#include "llvm/Support/CrashRecoveryContext.h"
22#include "llvm/Support/Format.h"
23#include "llvm/Support/MathExtras.h"
24
25using namespace clang;
26
27namespace {
28
29/// BaseSubobjectInfo - Represents a single base subobject in a complete class.
30/// For a class hierarchy like
31///
32/// class A { };
33/// class B : A { };
34/// class C : A, B { };
35///
36/// The BaseSubobjectInfo graph for C will have three BaseSubobjectInfo
37/// instances, one for B and two for A.
38///
39/// If a base is virtual, it will only have one BaseSubobjectInfo allocated.
40struct BaseSubobjectInfo {
41  /// Class - The class for this base info.
42  const CXXRecordDecl *Class;
43
44  /// IsVirtual - Whether the BaseInfo represents a virtual base or not.
45  bool IsVirtual;
46
47  /// Bases - Information about the base subobjects.
48  SmallVector<BaseSubobjectInfo*, 4> Bases;
49
50  /// PrimaryVirtualBaseInfo - Holds the base info for the primary virtual base
51  /// of this base info (if one exists).
52  BaseSubobjectInfo *PrimaryVirtualBaseInfo;
53
54  // FIXME: Document.
55  const BaseSubobjectInfo *Derived;
56};
57
58/// EmptySubobjectMap - Keeps track of which empty subobjects exist at different
59/// offsets while laying out a C++ class.
60class EmptySubobjectMap {
61  const ASTContext &Context;
62  uint64_t CharWidth;
63
64  /// Class - The class whose empty entries we're keeping track of.
65  const CXXRecordDecl *Class;
66
67  /// EmptyClassOffsets - A map from offsets to empty record decls.
68  typedef SmallVector<const CXXRecordDecl *, 1> ClassVectorTy;
69  typedef llvm::DenseMap<CharUnits, ClassVectorTy> EmptyClassOffsetsMapTy;
70  EmptyClassOffsetsMapTy EmptyClassOffsets;
71
72  /// MaxEmptyClassOffset - The highest offset known to contain an empty
73  /// base subobject.
74  CharUnits MaxEmptyClassOffset;
75
76  /// ComputeEmptySubobjectSizes - Compute the size of the largest base or
77  /// member subobject that is empty.
78  void ComputeEmptySubobjectSizes();
79
80  void AddSubobjectAtOffset(const CXXRecordDecl *RD, CharUnits Offset);
81
82  void UpdateEmptyBaseSubobjects(const BaseSubobjectInfo *Info,
83                                 CharUnits Offset, bool PlacingEmptyBase);
84
85  void UpdateEmptyFieldSubobjects(const CXXRecordDecl *RD,
86                                  const CXXRecordDecl *Class,
87                                  CharUnits Offset);
88  void UpdateEmptyFieldSubobjects(const FieldDecl *FD, CharUnits Offset);
89
90  /// AnyEmptySubobjectsBeyondOffset - Returns whether there are any empty
91  /// subobjects beyond the given offset.
92  bool AnyEmptySubobjectsBeyondOffset(CharUnits Offset) const {
93    return Offset <= MaxEmptyClassOffset;
94  }
95
96  CharUnits
97  getFieldOffset(const ASTRecordLayout &Layout, unsigned FieldNo) const {
98    uint64_t FieldOffset = Layout.getFieldOffset(FieldNo);
99    assert(FieldOffset % CharWidth == 0 &&
100           "Field offset not at char boundary!");
101
102    return Context.toCharUnitsFromBits(FieldOffset);
103  }
104
105protected:
106  bool CanPlaceSubobjectAtOffset(const CXXRecordDecl *RD,
107                                 CharUnits Offset) const;
108
109  bool CanPlaceBaseSubobjectAtOffset(const BaseSubobjectInfo *Info,
110                                     CharUnits Offset);
111
112  bool CanPlaceFieldSubobjectAtOffset(const CXXRecordDecl *RD,
113                                      const CXXRecordDecl *Class,
114                                      CharUnits Offset) const;
115  bool CanPlaceFieldSubobjectAtOffset(const FieldDecl *FD,
116                                      CharUnits Offset) const;
117
118public:
119  /// This holds the size of the largest empty subobject (either a base
120  /// or a member). Will be zero if the record being built doesn't contain
121  /// any empty classes.
122  CharUnits SizeOfLargestEmptySubobject;
123
124  EmptySubobjectMap(const ASTContext &Context, const CXXRecordDecl *Class)
125  : Context(Context), CharWidth(Context.getCharWidth()), Class(Class) {
126      ComputeEmptySubobjectSizes();
127  }
128
129  /// CanPlaceBaseAtOffset - Return whether the given base class can be placed
130  /// at the given offset.
131  /// Returns false if placing the record will result in two components
132  /// (direct or indirect) of the same type having the same offset.
133  bool CanPlaceBaseAtOffset(const BaseSubobjectInfo *Info,
134                            CharUnits Offset);
135
136  /// CanPlaceFieldAtOffset - Return whether a field can be placed at the given
137  /// offset.
138  bool CanPlaceFieldAtOffset(const FieldDecl *FD, CharUnits Offset);
139};
140
141void EmptySubobjectMap::ComputeEmptySubobjectSizes() {
142  // Check the bases.
143  for (CXXRecordDecl::base_class_const_iterator I = Class->bases_begin(),
144       E = Class->bases_end(); I != E; ++I) {
145    const CXXRecordDecl *BaseDecl =
146      cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
147
148    CharUnits EmptySize;
149    const ASTRecordLayout &Layout = Context.getASTRecordLayout(BaseDecl);
150    if (BaseDecl->isEmpty()) {
151      // If the class decl is empty, get its size.
152      EmptySize = Layout.getSize();
153    } else {
154      // Otherwise, we get the largest empty subobject for the decl.
155      EmptySize = Layout.getSizeOfLargestEmptySubobject();
156    }
157
158    if (EmptySize > SizeOfLargestEmptySubobject)
159      SizeOfLargestEmptySubobject = EmptySize;
160  }
161
162  // Check the fields.
163  for (CXXRecordDecl::field_iterator I = Class->field_begin(),
164       E = Class->field_end(); I != E; ++I) {
165
166    const RecordType *RT =
167      Context.getBaseElementType(I->getType())->getAs<RecordType>();
168
169    // We only care about record types.
170    if (!RT)
171      continue;
172
173    CharUnits EmptySize;
174    const CXXRecordDecl *MemberDecl = cast<CXXRecordDecl>(RT->getDecl());
175    const ASTRecordLayout &Layout = Context.getASTRecordLayout(MemberDecl);
176    if (MemberDecl->isEmpty()) {
177      // If the class decl is empty, get its size.
178      EmptySize = Layout.getSize();
179    } else {
180      // Otherwise, we get the largest empty subobject for the decl.
181      EmptySize = Layout.getSizeOfLargestEmptySubobject();
182    }
183
184    if (EmptySize > SizeOfLargestEmptySubobject)
185      SizeOfLargestEmptySubobject = EmptySize;
186  }
187}
188
189bool
190EmptySubobjectMap::CanPlaceSubobjectAtOffset(const CXXRecordDecl *RD,
191                                             CharUnits Offset) const {
192  // We only need to check empty bases.
193  if (!RD->isEmpty())
194    return true;
195
196  EmptyClassOffsetsMapTy::const_iterator I = EmptyClassOffsets.find(Offset);
197  if (I == EmptyClassOffsets.end())
198    return true;
199
200  const ClassVectorTy& Classes = I->second;
201  if (std::find(Classes.begin(), Classes.end(), RD) == Classes.end())
202    return true;
203
204  // There is already an empty class of the same type at this offset.
205  return false;
206}
207
208void EmptySubobjectMap::AddSubobjectAtOffset(const CXXRecordDecl *RD,
209                                             CharUnits Offset) {
210  // We only care about empty bases.
211  if (!RD->isEmpty())
212    return;
213
214  // If we have empty structures inside a union, we can assign both
215  // the same offset. Just avoid pushing them twice in the list.
216  ClassVectorTy& Classes = EmptyClassOffsets[Offset];
217  if (std::find(Classes.begin(), Classes.end(), RD) != Classes.end())
218    return;
219
220  Classes.push_back(RD);
221
222  // Update the empty class offset.
223  if (Offset > MaxEmptyClassOffset)
224    MaxEmptyClassOffset = Offset;
225}
226
227bool
228EmptySubobjectMap::CanPlaceBaseSubobjectAtOffset(const BaseSubobjectInfo *Info,
229                                                 CharUnits Offset) {
230  // We don't have to keep looking past the maximum offset that's known to
231  // contain an empty class.
232  if (!AnyEmptySubobjectsBeyondOffset(Offset))
233    return true;
234
235  if (!CanPlaceSubobjectAtOffset(Info->Class, Offset))
236    return false;
237
238  // Traverse all non-virtual bases.
239  const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class);
240  for (unsigned I = 0, E = Info->Bases.size(); I != E; ++I) {
241    BaseSubobjectInfo* Base = Info->Bases[I];
242    if (Base->IsVirtual)
243      continue;
244
245    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base->Class);
246
247    if (!CanPlaceBaseSubobjectAtOffset(Base, BaseOffset))
248      return false;
249  }
250
251  if (Info->PrimaryVirtualBaseInfo) {
252    BaseSubobjectInfo *PrimaryVirtualBaseInfo = Info->PrimaryVirtualBaseInfo;
253
254    if (Info == PrimaryVirtualBaseInfo->Derived) {
255      if (!CanPlaceBaseSubobjectAtOffset(PrimaryVirtualBaseInfo, Offset))
256        return false;
257    }
258  }
259
260  // Traverse all member variables.
261  unsigned FieldNo = 0;
262  for (CXXRecordDecl::field_iterator I = Info->Class->field_begin(),
263       E = Info->Class->field_end(); I != E; ++I, ++FieldNo) {
264    if (I->isBitField())
265      continue;
266
267    CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
268    if (!CanPlaceFieldSubobjectAtOffset(*I, FieldOffset))
269      return false;
270  }
271
272  return true;
273}
274
275void EmptySubobjectMap::UpdateEmptyBaseSubobjects(const BaseSubobjectInfo *Info,
276                                                  CharUnits Offset,
277                                                  bool PlacingEmptyBase) {
278  if (!PlacingEmptyBase && Offset >= SizeOfLargestEmptySubobject) {
279    // We know that the only empty subobjects that can conflict with empty
280    // subobject of non-empty bases, are empty bases that can be placed at
281    // offset zero. Because of this, we only need to keep track of empty base
282    // subobjects with offsets less than the size of the largest empty
283    // subobject for our class.
284    return;
285  }
286
287  AddSubobjectAtOffset(Info->Class, Offset);
288
289  // Traverse all non-virtual bases.
290  const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class);
291  for (unsigned I = 0, E = Info->Bases.size(); I != E; ++I) {
292    BaseSubobjectInfo* Base = Info->Bases[I];
293    if (Base->IsVirtual)
294      continue;
295
296    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base->Class);
297    UpdateEmptyBaseSubobjects(Base, BaseOffset, PlacingEmptyBase);
298  }
299
300  if (Info->PrimaryVirtualBaseInfo) {
301    BaseSubobjectInfo *PrimaryVirtualBaseInfo = Info->PrimaryVirtualBaseInfo;
302
303    if (Info == PrimaryVirtualBaseInfo->Derived)
304      UpdateEmptyBaseSubobjects(PrimaryVirtualBaseInfo, Offset,
305                                PlacingEmptyBase);
306  }
307
308  // Traverse all member variables.
309  unsigned FieldNo = 0;
310  for (CXXRecordDecl::field_iterator I = Info->Class->field_begin(),
311       E = Info->Class->field_end(); I != E; ++I, ++FieldNo) {
312    if (I->isBitField())
313      continue;
314
315    CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
316    UpdateEmptyFieldSubobjects(*I, FieldOffset);
317  }
318}
319
320bool EmptySubobjectMap::CanPlaceBaseAtOffset(const BaseSubobjectInfo *Info,
321                                             CharUnits Offset) {
322  // If we know this class doesn't have any empty subobjects we don't need to
323  // bother checking.
324  if (SizeOfLargestEmptySubobject.isZero())
325    return true;
326
327  if (!CanPlaceBaseSubobjectAtOffset(Info, Offset))
328    return false;
329
330  // We are able to place the base at this offset. Make sure to update the
331  // empty base subobject map.
332  UpdateEmptyBaseSubobjects(Info, Offset, Info->Class->isEmpty());
333  return true;
334}
335
336bool
337EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const CXXRecordDecl *RD,
338                                                  const CXXRecordDecl *Class,
339                                                  CharUnits Offset) const {
340  // We don't have to keep looking past the maximum offset that's known to
341  // contain an empty class.
342  if (!AnyEmptySubobjectsBeyondOffset(Offset))
343    return true;
344
345  if (!CanPlaceSubobjectAtOffset(RD, Offset))
346    return false;
347
348  const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
349
350  // Traverse all non-virtual bases.
351  for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
352       E = RD->bases_end(); I != E; ++I) {
353    if (I->isVirtual())
354      continue;
355
356    const CXXRecordDecl *BaseDecl =
357      cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
358
359    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(BaseDecl);
360    if (!CanPlaceFieldSubobjectAtOffset(BaseDecl, Class, BaseOffset))
361      return false;
362  }
363
364  if (RD == Class) {
365    // This is the most derived class, traverse virtual bases as well.
366    for (CXXRecordDecl::base_class_const_iterator I = RD->vbases_begin(),
367         E = RD->vbases_end(); I != E; ++I) {
368      const CXXRecordDecl *VBaseDecl =
369        cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
370
371      CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBaseDecl);
372      if (!CanPlaceFieldSubobjectAtOffset(VBaseDecl, Class, VBaseOffset))
373        return false;
374    }
375  }
376
377  // Traverse all member variables.
378  unsigned FieldNo = 0;
379  for (CXXRecordDecl::field_iterator I = RD->field_begin(), E = RD->field_end();
380       I != E; ++I, ++FieldNo) {
381    if (I->isBitField())
382      continue;
383
384    CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
385
386    if (!CanPlaceFieldSubobjectAtOffset(*I, FieldOffset))
387      return false;
388  }
389
390  return true;
391}
392
393bool
394EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const FieldDecl *FD,
395                                                  CharUnits Offset) const {
396  // We don't have to keep looking past the maximum offset that's known to
397  // contain an empty class.
398  if (!AnyEmptySubobjectsBeyondOffset(Offset))
399    return true;
400
401  QualType T = FD->getType();
402  if (const RecordType *RT = T->getAs<RecordType>()) {
403    const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
404    return CanPlaceFieldSubobjectAtOffset(RD, RD, Offset);
405  }
406
407  // If we have an array type we need to look at every element.
408  if (const ConstantArrayType *AT = Context.getAsConstantArrayType(T)) {
409    QualType ElemTy = Context.getBaseElementType(AT);
410    const RecordType *RT = ElemTy->getAs<RecordType>();
411    if (!RT)
412      return true;
413
414    const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
415    const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
416
417    uint64_t NumElements = Context.getConstantArrayElementCount(AT);
418    CharUnits ElementOffset = Offset;
419    for (uint64_t I = 0; I != NumElements; ++I) {
420      // We don't have to keep looking past the maximum offset that's known to
421      // contain an empty class.
422      if (!AnyEmptySubobjectsBeyondOffset(ElementOffset))
423        return true;
424
425      if (!CanPlaceFieldSubobjectAtOffset(RD, RD, ElementOffset))
426        return false;
427
428      ElementOffset += Layout.getSize();
429    }
430  }
431
432  return true;
433}
434
435bool
436EmptySubobjectMap::CanPlaceFieldAtOffset(const FieldDecl *FD,
437                                         CharUnits Offset) {
438  if (!CanPlaceFieldSubobjectAtOffset(FD, Offset))
439    return false;
440
441  // We are able to place the member variable at this offset.
442  // Make sure to update the empty base subobject map.
443  UpdateEmptyFieldSubobjects(FD, Offset);
444  return true;
445}
446
447void EmptySubobjectMap::UpdateEmptyFieldSubobjects(const CXXRecordDecl *RD,
448                                                   const CXXRecordDecl *Class,
449                                                   CharUnits Offset) {
450  // We know that the only empty subobjects that can conflict with empty
451  // field subobjects are subobjects of empty bases that can be placed at offset
452  // zero. Because of this, we only need to keep track of empty field
453  // subobjects with offsets less than the size of the largest empty
454  // subobject for our class.
455  if (Offset >= SizeOfLargestEmptySubobject)
456    return;
457
458  AddSubobjectAtOffset(RD, Offset);
459
460  const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
461
462  // Traverse all non-virtual bases.
463  for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
464       E = RD->bases_end(); I != E; ++I) {
465    if (I->isVirtual())
466      continue;
467
468    const CXXRecordDecl *BaseDecl =
469      cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
470
471    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(BaseDecl);
472    UpdateEmptyFieldSubobjects(BaseDecl, Class, BaseOffset);
473  }
474
475  if (RD == Class) {
476    // This is the most derived class, traverse virtual bases as well.
477    for (CXXRecordDecl::base_class_const_iterator I = RD->vbases_begin(),
478         E = RD->vbases_end(); I != E; ++I) {
479      const CXXRecordDecl *VBaseDecl =
480      cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
481
482      CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBaseDecl);
483      UpdateEmptyFieldSubobjects(VBaseDecl, Class, VBaseOffset);
484    }
485  }
486
487  // Traverse all member variables.
488  unsigned FieldNo = 0;
489  for (CXXRecordDecl::field_iterator I = RD->field_begin(), E = RD->field_end();
490       I != E; ++I, ++FieldNo) {
491    if (I->isBitField())
492      continue;
493
494    CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
495
496    UpdateEmptyFieldSubobjects(*I, FieldOffset);
497  }
498}
499
500void EmptySubobjectMap::UpdateEmptyFieldSubobjects(const FieldDecl *FD,
501                                                   CharUnits Offset) {
502  QualType T = FD->getType();
503  if (const RecordType *RT = T->getAs<RecordType>()) {
504    const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
505    UpdateEmptyFieldSubobjects(RD, RD, Offset);
506    return;
507  }
508
509  // If we have an array type we need to update every element.
510  if (const ConstantArrayType *AT = Context.getAsConstantArrayType(T)) {
511    QualType ElemTy = Context.getBaseElementType(AT);
512    const RecordType *RT = ElemTy->getAs<RecordType>();
513    if (!RT)
514      return;
515
516    const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
517    const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
518
519    uint64_t NumElements = Context.getConstantArrayElementCount(AT);
520    CharUnits ElementOffset = Offset;
521
522    for (uint64_t I = 0; I != NumElements; ++I) {
523      // We know that the only empty subobjects that can conflict with empty
524      // field subobjects are subobjects of empty bases that can be placed at
525      // offset zero. Because of this, we only need to keep track of empty field
526      // subobjects with offsets less than the size of the largest empty
527      // subobject for our class.
528      if (ElementOffset >= SizeOfLargestEmptySubobject)
529        return;
530
531      UpdateEmptyFieldSubobjects(RD, RD, ElementOffset);
532      ElementOffset += Layout.getSize();
533    }
534  }
535}
536
537typedef llvm::SmallPtrSet<const CXXRecordDecl*, 4> ClassSetTy;
538
539class RecordLayoutBuilder {
540protected:
541  // FIXME: Remove this and make the appropriate fields public.
542  friend class clang::ASTContext;
543
544  const ASTContext &Context;
545
546  EmptySubobjectMap *EmptySubobjects;
547
548  /// Size - The current size of the record layout.
549  uint64_t Size;
550
551  /// Alignment - The current alignment of the record layout.
552  CharUnits Alignment;
553
554  /// \brief The alignment if attribute packed is not used.
555  CharUnits UnpackedAlignment;
556
557  SmallVector<uint64_t, 16> FieldOffsets;
558
559  /// \brief Whether the external AST source has provided a layout for this
560  /// record.
561  unsigned ExternalLayout : 1;
562
563  /// \brief Whether we need to infer alignment, even when we have an
564  /// externally-provided layout.
565  unsigned InferAlignment : 1;
566
567  /// Packed - Whether the record is packed or not.
568  unsigned Packed : 1;
569
570  unsigned IsUnion : 1;
571
572  unsigned IsMac68kAlign : 1;
573
574  unsigned IsMsStruct : 1;
575
576  /// UnfilledBitsInLastUnit - If the last field laid out was a bitfield,
577  /// this contains the number of bits in the last unit that can be used for
578  /// an adjacent bitfield if necessary.  The unit in question is usually
579  /// a byte, but larger units are used if IsMsStruct.
580  unsigned char UnfilledBitsInLastUnit;
581  /// LastBitfieldTypeSize - If IsMsStruct, represents the size of the type
582  /// of the previous field if it was a bitfield.
583  unsigned char LastBitfieldTypeSize;
584
585  /// MaxFieldAlignment - The maximum allowed field alignment. This is set by
586  /// #pragma pack.
587  CharUnits MaxFieldAlignment;
588
589  /// DataSize - The data size of the record being laid out.
590  uint64_t DataSize;
591
592  CharUnits NonVirtualSize;
593  CharUnits NonVirtualAlignment;
594
595  /// PrimaryBase - the primary base class (if one exists) of the class
596  /// we're laying out.
597  const CXXRecordDecl *PrimaryBase;
598
599  /// PrimaryBaseIsVirtual - Whether the primary base of the class we're laying
600  /// out is virtual.
601  bool PrimaryBaseIsVirtual;
602
603  /// HasOwnVFPtr - Whether the class provides its own vtable/vftbl
604  /// pointer, as opposed to inheriting one from a primary base class.
605  bool HasOwnVFPtr;
606
607  /// HasOwnVBPtr - Whether the class provides its own vbtbl
608  /// pointer, as opposed to inheriting one from a base class. Only for MS.
609  bool HasOwnVBPtr;
610
611  /// VBPtrOffset - Virtual base table offset. Only for MS layout.
612  CharUnits VBPtrOffset;
613
614  typedef llvm::DenseMap<const CXXRecordDecl *, CharUnits> BaseOffsetsMapTy;
615
616  /// Bases - base classes and their offsets in the record.
617  BaseOffsetsMapTy Bases;
618
619  // VBases - virtual base classes and their offsets in the record.
620  ASTRecordLayout::VBaseOffsetsMapTy VBases;
621
622  /// IndirectPrimaryBases - Virtual base classes, direct or indirect, that are
623  /// primary base classes for some other direct or indirect base class.
624  CXXIndirectPrimaryBaseSet IndirectPrimaryBases;
625
626  /// FirstNearlyEmptyVBase - The first nearly empty virtual base class in
627  /// inheritance graph order. Used for determining the primary base class.
628  const CXXRecordDecl *FirstNearlyEmptyVBase;
629
630  /// VisitedVirtualBases - A set of all the visited virtual bases, used to
631  /// avoid visiting virtual bases more than once.
632  llvm::SmallPtrSet<const CXXRecordDecl *, 4> VisitedVirtualBases;
633
634  /// \brief Externally-provided size.
635  uint64_t ExternalSize;
636
637  /// \brief Externally-provided alignment.
638  uint64_t ExternalAlign;
639
640  /// \brief Externally-provided field offsets.
641  llvm::DenseMap<const FieldDecl *, uint64_t> ExternalFieldOffsets;
642
643  /// \brief Externally-provided direct, non-virtual base offsets.
644  llvm::DenseMap<const CXXRecordDecl *, CharUnits> ExternalBaseOffsets;
645
646  /// \brief Externally-provided virtual base offsets.
647  llvm::DenseMap<const CXXRecordDecl *, CharUnits> ExternalVirtualBaseOffsets;
648
649  RecordLayoutBuilder(const ASTContext &Context,
650                      EmptySubobjectMap *EmptySubobjects)
651    : Context(Context), EmptySubobjects(EmptySubobjects), Size(0),
652      Alignment(CharUnits::One()), UnpackedAlignment(CharUnits::One()),
653      ExternalLayout(false), InferAlignment(false),
654      Packed(false), IsUnion(false), IsMac68kAlign(false), IsMsStruct(false),
655      UnfilledBitsInLastUnit(0), LastBitfieldTypeSize(0),
656      MaxFieldAlignment(CharUnits::Zero()),
657      DataSize(0), NonVirtualSize(CharUnits::Zero()),
658      NonVirtualAlignment(CharUnits::One()),
659      PrimaryBase(0), PrimaryBaseIsVirtual(false),
660      HasOwnVFPtr(false),
661      HasOwnVBPtr(false),
662      VBPtrOffset(CharUnits::fromQuantity(-1)),
663      FirstNearlyEmptyVBase(0) { }
664
665  /// Reset this RecordLayoutBuilder to a fresh state, using the given
666  /// alignment as the initial alignment.  This is used for the
667  /// correct layout of vb-table pointers in MSVC.
668  void resetWithTargetAlignment(CharUnits TargetAlignment) {
669    const ASTContext &Context = this->Context;
670    EmptySubobjectMap *EmptySubobjects = this->EmptySubobjects;
671    this->~RecordLayoutBuilder();
672    new (this) RecordLayoutBuilder(Context, EmptySubobjects);
673    Alignment = UnpackedAlignment = TargetAlignment;
674  }
675
676  void Layout(const RecordDecl *D);
677  void Layout(const CXXRecordDecl *D);
678  void Layout(const ObjCInterfaceDecl *D);
679
680  void LayoutFields(const RecordDecl *D);
681  void LayoutField(const FieldDecl *D);
682  void LayoutWideBitField(uint64_t FieldSize, uint64_t TypeSize,
683                          bool FieldPacked, const FieldDecl *D);
684  void LayoutBitField(const FieldDecl *D);
685
686  TargetCXXABI getCXXABI() const {
687    return Context.getTargetInfo().getCXXABI();
688  }
689
690  bool isMicrosoftCXXABI() const {
691    return getCXXABI().isMicrosoft();
692  }
693
694  void MSLayoutVirtualBases(const CXXRecordDecl *RD);
695
696  /// BaseSubobjectInfoAllocator - Allocator for BaseSubobjectInfo objects.
697  llvm::SpecificBumpPtrAllocator<BaseSubobjectInfo> BaseSubobjectInfoAllocator;
698
699  typedef llvm::DenseMap<const CXXRecordDecl *, BaseSubobjectInfo *>
700    BaseSubobjectInfoMapTy;
701
702  /// VirtualBaseInfo - Map from all the (direct or indirect) virtual bases
703  /// of the class we're laying out to their base subobject info.
704  BaseSubobjectInfoMapTy VirtualBaseInfo;
705
706  /// NonVirtualBaseInfo - Map from all the direct non-virtual bases of the
707  /// class we're laying out to their base subobject info.
708  BaseSubobjectInfoMapTy NonVirtualBaseInfo;
709
710  /// ComputeBaseSubobjectInfo - Compute the base subobject information for the
711  /// bases of the given class.
712  void ComputeBaseSubobjectInfo(const CXXRecordDecl *RD);
713
714  /// ComputeBaseSubobjectInfo - Compute the base subobject information for a
715  /// single class and all of its base classes.
716  BaseSubobjectInfo *ComputeBaseSubobjectInfo(const CXXRecordDecl *RD,
717                                              bool IsVirtual,
718                                              BaseSubobjectInfo *Derived);
719
720  /// DeterminePrimaryBase - Determine the primary base of the given class.
721  void DeterminePrimaryBase(const CXXRecordDecl *RD);
722
723  void SelectPrimaryVBase(const CXXRecordDecl *RD);
724
725  void EnsureVTablePointerAlignment(CharUnits UnpackedBaseAlign);
726
727  /// LayoutNonVirtualBases - Determines the primary base class (if any) and
728  /// lays it out. Will then proceed to lay out all non-virtual base clasess.
729  void LayoutNonVirtualBases(const CXXRecordDecl *RD);
730
731  /// LayoutNonVirtualBase - Lays out a single non-virtual base.
732  void LayoutNonVirtualBase(const BaseSubobjectInfo *Base);
733
734  void AddPrimaryVirtualBaseOffsets(const BaseSubobjectInfo *Info,
735                                    CharUnits Offset);
736
737  bool needsVFTable(const CXXRecordDecl *RD) const;
738  bool hasNewVirtualFunction(const CXXRecordDecl *RD,
739                             bool IgnoreDestructor = false) const;
740  bool isPossiblePrimaryBase(const CXXRecordDecl *Base) const;
741
742  void computeVtordisps(const CXXRecordDecl *RD,
743                        ClassSetTy &VtordispVBases);
744
745  /// LayoutVirtualBases - Lays out all the virtual bases.
746  void LayoutVirtualBases(const CXXRecordDecl *RD,
747                          const CXXRecordDecl *MostDerivedClass);
748
749  /// LayoutVirtualBase - Lays out a single virtual base.
750  void LayoutVirtualBase(const BaseSubobjectInfo *Base,
751                         bool IsVtordispNeed = false);
752
753  /// LayoutBase - Will lay out a base and return the offset where it was
754  /// placed, in chars.
755  CharUnits LayoutBase(const BaseSubobjectInfo *Base);
756
757  /// InitializeLayout - Initialize record layout for the given record decl.
758  void InitializeLayout(const Decl *D);
759
760  /// FinishLayout - Finalize record layout. Adjust record size based on the
761  /// alignment.
762  void FinishLayout(const NamedDecl *D);
763
764  void UpdateAlignment(CharUnits NewAlignment, CharUnits UnpackedNewAlignment);
765  void UpdateAlignment(CharUnits NewAlignment) {
766    UpdateAlignment(NewAlignment, NewAlignment);
767  }
768
769  /// \brief Retrieve the externally-supplied field offset for the given
770  /// field.
771  ///
772  /// \param Field The field whose offset is being queried.
773  /// \param ComputedOffset The offset that we've computed for this field.
774  uint64_t updateExternalFieldOffset(const FieldDecl *Field,
775                                     uint64_t ComputedOffset);
776
777  void CheckFieldPadding(uint64_t Offset, uint64_t UnpaddedOffset,
778                          uint64_t UnpackedOffset, unsigned UnpackedAlign,
779                          bool isPacked, const FieldDecl *D);
780
781  DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID);
782
783  CharUnits getSize() const {
784    assert(Size % Context.getCharWidth() == 0);
785    return Context.toCharUnitsFromBits(Size);
786  }
787  uint64_t getSizeInBits() const { return Size; }
788
789  void setSize(CharUnits NewSize) { Size = Context.toBits(NewSize); }
790  void setSize(uint64_t NewSize) { Size = NewSize; }
791
792  CharUnits getAligment() const { return Alignment; }
793
794  CharUnits getDataSize() const {
795    assert(DataSize % Context.getCharWidth() == 0);
796    return Context.toCharUnitsFromBits(DataSize);
797  }
798  uint64_t getDataSizeInBits() const { return DataSize; }
799
800  void setDataSize(CharUnits NewSize) { DataSize = Context.toBits(NewSize); }
801  void setDataSize(uint64_t NewSize) { DataSize = NewSize; }
802
803  RecordLayoutBuilder(const RecordLayoutBuilder &) LLVM_DELETED_FUNCTION;
804  void operator=(const RecordLayoutBuilder &) LLVM_DELETED_FUNCTION;
805};
806} // end anonymous namespace
807
808void
809RecordLayoutBuilder::SelectPrimaryVBase(const CXXRecordDecl *RD) {
810  for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
811         E = RD->bases_end(); I != E; ++I) {
812    assert(!I->getType()->isDependentType() &&
813           "Cannot layout class with dependent bases.");
814
815    const CXXRecordDecl *Base =
816      cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
817
818    // Check if this is a nearly empty virtual base.
819    if (I->isVirtual() && Context.isNearlyEmpty(Base)) {
820      // If it's not an indirect primary base, then we've found our primary
821      // base.
822      if (!IndirectPrimaryBases.count(Base)) {
823        PrimaryBase = Base;
824        PrimaryBaseIsVirtual = true;
825        return;
826      }
827
828      // Is this the first nearly empty virtual base?
829      if (!FirstNearlyEmptyVBase)
830        FirstNearlyEmptyVBase = Base;
831    }
832
833    SelectPrimaryVBase(Base);
834    if (PrimaryBase)
835      return;
836  }
837}
838
839/// DeterminePrimaryBase - Determine the primary base of the given class.
840void RecordLayoutBuilder::DeterminePrimaryBase(const CXXRecordDecl *RD) {
841  // If the class isn't dynamic, it won't have a primary base.
842  if (!RD->isDynamicClass())
843    return;
844
845  // Compute all the primary virtual bases for all of our direct and
846  // indirect bases, and record all their primary virtual base classes.
847  RD->getIndirectPrimaryBases(IndirectPrimaryBases);
848
849  // If the record has a dynamic base class, attempt to choose a primary base
850  // class. It is the first (in direct base class order) non-virtual dynamic
851  // base class, if one exists.
852  for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
853         e = RD->bases_end(); i != e; ++i) {
854    // Ignore virtual bases.
855    if (i->isVirtual())
856      continue;
857
858    const CXXRecordDecl *Base =
859      cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
860
861    if (isPossiblePrimaryBase(Base)) {
862      // We found it.
863      PrimaryBase = Base;
864      PrimaryBaseIsVirtual = false;
865      return;
866    }
867  }
868
869  // The Microsoft ABI doesn't have primary virtual bases.
870  if (isMicrosoftCXXABI()) {
871    assert(!PrimaryBase && "Should not get here with a primary base!");
872    return;
873  }
874
875  // Under the Itanium ABI, if there is no non-virtual primary base class,
876  // try to compute the primary virtual base.  The primary virtual base is
877  // the first nearly empty virtual base that is not an indirect primary
878  // virtual base class, if one exists.
879  if (RD->getNumVBases() != 0) {
880    SelectPrimaryVBase(RD);
881    if (PrimaryBase)
882      return;
883  }
884
885  // Otherwise, it is the first indirect primary base class, if one exists.
886  if (FirstNearlyEmptyVBase) {
887    PrimaryBase = FirstNearlyEmptyVBase;
888    PrimaryBaseIsVirtual = true;
889    return;
890  }
891
892  assert(!PrimaryBase && "Should not get here with a primary base!");
893}
894
895BaseSubobjectInfo *
896RecordLayoutBuilder::ComputeBaseSubobjectInfo(const CXXRecordDecl *RD,
897                                              bool IsVirtual,
898                                              BaseSubobjectInfo *Derived) {
899  BaseSubobjectInfo *Info;
900
901  if (IsVirtual) {
902    // Check if we already have info about this virtual base.
903    BaseSubobjectInfo *&InfoSlot = VirtualBaseInfo[RD];
904    if (InfoSlot) {
905      assert(InfoSlot->Class == RD && "Wrong class for virtual base info!");
906      return InfoSlot;
907    }
908
909    // We don't, create it.
910    InfoSlot = new (BaseSubobjectInfoAllocator.Allocate()) BaseSubobjectInfo;
911    Info = InfoSlot;
912  } else {
913    Info = new (BaseSubobjectInfoAllocator.Allocate()) BaseSubobjectInfo;
914  }
915
916  Info->Class = RD;
917  Info->IsVirtual = IsVirtual;
918  Info->Derived = 0;
919  Info->PrimaryVirtualBaseInfo = 0;
920
921  const CXXRecordDecl *PrimaryVirtualBase = 0;
922  BaseSubobjectInfo *PrimaryVirtualBaseInfo = 0;
923
924  // Check if this base has a primary virtual base.
925  if (RD->getNumVBases()) {
926    const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
927    if (Layout.isPrimaryBaseVirtual()) {
928      // This base does have a primary virtual base.
929      PrimaryVirtualBase = Layout.getPrimaryBase();
930      assert(PrimaryVirtualBase && "Didn't have a primary virtual base!");
931
932      // Now check if we have base subobject info about this primary base.
933      PrimaryVirtualBaseInfo = VirtualBaseInfo.lookup(PrimaryVirtualBase);
934
935      if (PrimaryVirtualBaseInfo) {
936        if (PrimaryVirtualBaseInfo->Derived) {
937          // We did have info about this primary base, and it turns out that it
938          // has already been claimed as a primary virtual base for another
939          // base.
940          PrimaryVirtualBase = 0;
941        } else {
942          // We can claim this base as our primary base.
943          Info->PrimaryVirtualBaseInfo = PrimaryVirtualBaseInfo;
944          PrimaryVirtualBaseInfo->Derived = Info;
945        }
946      }
947    }
948  }
949
950  // Now go through all direct bases.
951  for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
952       E = RD->bases_end(); I != E; ++I) {
953    bool IsVirtual = I->isVirtual();
954
955    const CXXRecordDecl *BaseDecl =
956      cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
957
958    Info->Bases.push_back(ComputeBaseSubobjectInfo(BaseDecl, IsVirtual, Info));
959  }
960
961  if (PrimaryVirtualBase && !PrimaryVirtualBaseInfo) {
962    // Traversing the bases must have created the base info for our primary
963    // virtual base.
964    PrimaryVirtualBaseInfo = VirtualBaseInfo.lookup(PrimaryVirtualBase);
965    assert(PrimaryVirtualBaseInfo &&
966           "Did not create a primary virtual base!");
967
968    // Claim the primary virtual base as our primary virtual base.
969    Info->PrimaryVirtualBaseInfo = PrimaryVirtualBaseInfo;
970    PrimaryVirtualBaseInfo->Derived = Info;
971  }
972
973  return Info;
974}
975
976void RecordLayoutBuilder::ComputeBaseSubobjectInfo(const CXXRecordDecl *RD) {
977  for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
978       E = RD->bases_end(); I != E; ++I) {
979    bool IsVirtual = I->isVirtual();
980
981    const CXXRecordDecl *BaseDecl =
982      cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
983
984    // Compute the base subobject info for this base.
985    BaseSubobjectInfo *Info = ComputeBaseSubobjectInfo(BaseDecl, IsVirtual, 0);
986
987    if (IsVirtual) {
988      // ComputeBaseInfo has already added this base for us.
989      assert(VirtualBaseInfo.count(BaseDecl) &&
990             "Did not add virtual base!");
991    } else {
992      // Add the base info to the map of non-virtual bases.
993      assert(!NonVirtualBaseInfo.count(BaseDecl) &&
994             "Non-virtual base already exists!");
995      NonVirtualBaseInfo.insert(std::make_pair(BaseDecl, Info));
996    }
997  }
998}
999
1000void
1001RecordLayoutBuilder::EnsureVTablePointerAlignment(CharUnits UnpackedBaseAlign) {
1002  CharUnits BaseAlign = (Packed) ? CharUnits::One() : UnpackedBaseAlign;
1003
1004  // The maximum field alignment overrides base align.
1005  if (!MaxFieldAlignment.isZero()) {
1006    BaseAlign = std::min(BaseAlign, MaxFieldAlignment);
1007    UnpackedBaseAlign = std::min(UnpackedBaseAlign, MaxFieldAlignment);
1008  }
1009
1010  // Round up the current record size to pointer alignment.
1011  setSize(getSize().RoundUpToAlignment(BaseAlign));
1012  setDataSize(getSize());
1013
1014  // Update the alignment.
1015  UpdateAlignment(BaseAlign, UnpackedBaseAlign);
1016}
1017
1018void
1019RecordLayoutBuilder::LayoutNonVirtualBases(const CXXRecordDecl *RD) {
1020  // Then, determine the primary base class.
1021  DeterminePrimaryBase(RD);
1022
1023  // Compute base subobject info.
1024  ComputeBaseSubobjectInfo(RD);
1025
1026  // If we have a primary base class, lay it out.
1027  if (PrimaryBase) {
1028    if (PrimaryBaseIsVirtual) {
1029      // If the primary virtual base was a primary virtual base of some other
1030      // base class we'll have to steal it.
1031      BaseSubobjectInfo *PrimaryBaseInfo = VirtualBaseInfo.lookup(PrimaryBase);
1032      PrimaryBaseInfo->Derived = 0;
1033
1034      // We have a virtual primary base, insert it as an indirect primary base.
1035      IndirectPrimaryBases.insert(PrimaryBase);
1036
1037      assert(!VisitedVirtualBases.count(PrimaryBase) &&
1038             "vbase already visited!");
1039      VisitedVirtualBases.insert(PrimaryBase);
1040
1041      LayoutVirtualBase(PrimaryBaseInfo);
1042    } else {
1043      BaseSubobjectInfo *PrimaryBaseInfo =
1044        NonVirtualBaseInfo.lookup(PrimaryBase);
1045      assert(PrimaryBaseInfo &&
1046             "Did not find base info for non-virtual primary base!");
1047
1048      LayoutNonVirtualBase(PrimaryBaseInfo);
1049    }
1050
1051  // If this class needs a vtable/vf-table and didn't get one from a
1052  // primary base, add it in now.
1053  } else if (needsVFTable(RD)) {
1054    assert(DataSize == 0 && "Vtable pointer must be at offset zero!");
1055    CharUnits PtrWidth =
1056      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(0));
1057    CharUnits PtrAlign =
1058      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerAlign(0));
1059    EnsureVTablePointerAlignment(PtrAlign);
1060    HasOwnVFPtr = true;
1061    setSize(getSize() + PtrWidth);
1062    setDataSize(getSize());
1063  }
1064
1065  bool HasDirectVirtualBases = false;
1066  bool HasNonVirtualBaseWithVBTable = false;
1067
1068  // Now lay out the non-virtual bases.
1069  for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
1070         E = RD->bases_end(); I != E; ++I) {
1071
1072    // Ignore virtual bases, but remember that we saw one.
1073    if (I->isVirtual()) {
1074      HasDirectVirtualBases = true;
1075      continue;
1076    }
1077
1078    const CXXRecordDecl *BaseDecl =
1079      cast<CXXRecordDecl>(I->getType()->castAs<RecordType>()->getDecl());
1080
1081    // Remember if this base has virtual bases itself.
1082    if (BaseDecl->getNumVBases()) {
1083      const ASTRecordLayout &Layout = Context.getASTRecordLayout(BaseDecl);
1084      HasNonVirtualBaseWithVBTable = true;
1085    }
1086
1087    // Skip the primary base, because we've already laid it out.  The
1088    // !PrimaryBaseIsVirtual check is required because we might have a
1089    // non-virtual base of the same type as a primary virtual base.
1090    if (BaseDecl == PrimaryBase && !PrimaryBaseIsVirtual)
1091      continue;
1092
1093    // Lay out the base.
1094    BaseSubobjectInfo *BaseInfo = NonVirtualBaseInfo.lookup(BaseDecl);
1095    assert(BaseInfo && "Did not find base info for non-virtual base!");
1096
1097    LayoutNonVirtualBase(BaseInfo);
1098  }
1099
1100  // In the MS ABI, add the vb-table pointer if we need one, which is
1101  // whenever we have a virtual base and we can't re-use a vb-table
1102  // pointer from a non-virtual base.
1103  if (isMicrosoftCXXABI() &&
1104      HasDirectVirtualBases && !HasNonVirtualBaseWithVBTable) {
1105    CharUnits PtrWidth =
1106      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(0));
1107    CharUnits PtrAlign =
1108      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerAlign(0));
1109
1110    // MSVC potentially over-aligns the vb-table pointer by giving it
1111    // the max alignment of all the non-virtual objects in the class.
1112    // This is completely unnecessary, but we're not here to pass
1113    // judgment.
1114    //
1115    // Note that we've only laid out the non-virtual bases, so on the
1116    // first pass Alignment won't be set correctly here, but if the
1117    // vb-table doesn't end up aligned correctly we'll come through
1118    // and redo the layout from scratch with the right alignment.
1119    //
1120    // TODO: Instead of doing this, just lay out the fields as if the
1121    // vb-table were at offset zero, then retroactively bump the field
1122    // offsets up.
1123    PtrAlign = std::max(PtrAlign, Alignment);
1124
1125    EnsureVTablePointerAlignment(PtrAlign);
1126    HasOwnVBPtr = true;
1127    VBPtrOffset = getSize();
1128    setSize(getSize() + PtrWidth);
1129    setDataSize(getSize());
1130  }
1131}
1132
1133void RecordLayoutBuilder::LayoutNonVirtualBase(const BaseSubobjectInfo *Base) {
1134  // Layout the base.
1135  CharUnits Offset = LayoutBase(Base);
1136
1137  // Add its base class offset.
1138  assert(!Bases.count(Base->Class) && "base offset already exists!");
1139  Bases.insert(std::make_pair(Base->Class, Offset));
1140
1141  AddPrimaryVirtualBaseOffsets(Base, Offset);
1142}
1143
1144void
1145RecordLayoutBuilder::AddPrimaryVirtualBaseOffsets(const BaseSubobjectInfo *Info,
1146                                                  CharUnits Offset) {
1147  // This base isn't interesting, it has no virtual bases.
1148  if (!Info->Class->getNumVBases())
1149    return;
1150
1151  // First, check if we have a virtual primary base to add offsets for.
1152  if (Info->PrimaryVirtualBaseInfo) {
1153    assert(Info->PrimaryVirtualBaseInfo->IsVirtual &&
1154           "Primary virtual base is not virtual!");
1155    if (Info->PrimaryVirtualBaseInfo->Derived == Info) {
1156      // Add the offset.
1157      assert(!VBases.count(Info->PrimaryVirtualBaseInfo->Class) &&
1158             "primary vbase offset already exists!");
1159      VBases.insert(std::make_pair(Info->PrimaryVirtualBaseInfo->Class,
1160                                   ASTRecordLayout::VBaseInfo(Offset, false)));
1161
1162      // Traverse the primary virtual base.
1163      AddPrimaryVirtualBaseOffsets(Info->PrimaryVirtualBaseInfo, Offset);
1164    }
1165  }
1166
1167  // Now go through all direct non-virtual bases.
1168  const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class);
1169  for (unsigned I = 0, E = Info->Bases.size(); I != E; ++I) {
1170    const BaseSubobjectInfo *Base = Info->Bases[I];
1171    if (Base->IsVirtual)
1172      continue;
1173
1174    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base->Class);
1175    AddPrimaryVirtualBaseOffsets(Base, BaseOffset);
1176  }
1177}
1178
1179/// needsVFTable - Return true if this class needs a vtable or vf-table
1180/// when laid out as a base class.  These are treated the same because
1181/// they're both always laid out at offset zero.
1182///
1183/// This function assumes that the class has no primary base.
1184bool RecordLayoutBuilder::needsVFTable(const CXXRecordDecl *RD) const {
1185  assert(!PrimaryBase);
1186
1187  // In the Itanium ABI, every dynamic class needs a vtable: even if
1188  // this class has no virtual functions as a base class (i.e. it's
1189  // non-polymorphic or only has virtual functions from virtual
1190  // bases),x it still needs a vtable to locate its virtual bases.
1191  if (!isMicrosoftCXXABI())
1192    return RD->isDynamicClass();
1193
1194  // In the MS ABI, we need a vfptr if the class has virtual functions
1195  // other than those declared by its virtual bases.  The AST doesn't
1196  // tell us that directly, and checking manually for virtual
1197  // functions that aren't overrides is expensive, but there are
1198  // some important shortcuts:
1199
1200  //  - Non-polymorphic classes have no virtual functions at all.
1201  if (!RD->isPolymorphic()) return false;
1202
1203  //  - Polymorphic classes with no virtual bases must either declare
1204  //    virtual functions directly or inherit them, but in the latter
1205  //    case we would have a primary base.
1206  if (RD->getNumVBases() == 0) return true;
1207
1208  return hasNewVirtualFunction(RD);
1209}
1210
1211/// Does the given class inherit non-virtually from any of the classes
1212/// in the given set?
1213static bool hasNonVirtualBaseInSet(const CXXRecordDecl *RD,
1214                                   const ClassSetTy &set) {
1215  for (CXXRecordDecl::base_class_const_iterator
1216         I = RD->bases_begin(), E = RD->bases_end(); I != E; ++I) {
1217    // Ignore virtual links.
1218    if (I->isVirtual()) continue;
1219
1220    // Check whether the set contains the base.
1221    const CXXRecordDecl *base = I->getType()->getAsCXXRecordDecl();
1222    if (set.count(base))
1223      return true;
1224
1225    // Otherwise, recurse and propagate.
1226    if (hasNonVirtualBaseInSet(base, set))
1227      return true;
1228  }
1229
1230  return false;
1231}
1232
1233/// Does the given method (B::foo()) already override a method (A::foo())
1234/// such that A requires a vtordisp in B?  If so, we don't need to add a
1235/// new vtordisp for B in a yet-more-derived class C providing C::foo().
1236static bool overridesMethodRequiringVtorDisp(const ASTContext &Context,
1237                                             const CXXMethodDecl *M) {
1238  CXXMethodDecl::method_iterator
1239    I = M->begin_overridden_methods(), E = M->end_overridden_methods();
1240  if (I == E) return false;
1241
1242  const ASTRecordLayout::VBaseOffsetsMapTy &offsets =
1243    Context.getASTRecordLayout(M->getParent()).getVBaseOffsetsMap();
1244  do {
1245    const CXXMethodDecl *overridden = *I;
1246
1247    // If the overridden method's class isn't recognized as a virtual
1248    // base in the derived class, ignore it.
1249    ASTRecordLayout::VBaseOffsetsMapTy::const_iterator
1250      it = offsets.find(overridden->getParent());
1251    if (it == offsets.end()) continue;
1252
1253    // Otherwise, check if the overridden method's class needs a vtordisp.
1254    if (it->second.hasVtorDisp()) return true;
1255
1256  } while (++I != E);
1257  return false;
1258}
1259
1260/// In the Microsoft ABI, decide which of the virtual bases require a
1261/// vtordisp field.
1262void RecordLayoutBuilder::computeVtordisps(const CXXRecordDecl *RD,
1263                                           ClassSetTy &vtordispVBases) {
1264  // Bail out if we have no virtual bases.
1265  assert(RD->getNumVBases());
1266
1267  // Build up the set of virtual bases that we haven't decided yet.
1268  ClassSetTy undecidedVBases;
1269  for (CXXRecordDecl::base_class_const_iterator
1270         I = RD->vbases_begin(), E = RD->vbases_end(); I != E; ++I) {
1271    const CXXRecordDecl *vbase = I->getType()->getAsCXXRecordDecl();
1272    undecidedVBases.insert(vbase);
1273  }
1274  assert(!undecidedVBases.empty());
1275
1276  // A virtual base requires a vtordisp field in a derived class if it
1277  // requires a vtordisp field in a base class.  Walk all the direct
1278  // bases and collect this information.
1279  for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
1280       E = RD->bases_end(); I != E; ++I) {
1281    const CXXRecordDecl *base = I->getType()->getAsCXXRecordDecl();
1282    const ASTRecordLayout &baseLayout = Context.getASTRecordLayout(base);
1283
1284    // Iterate over the set of virtual bases provided by this class.
1285    for (ASTRecordLayout::VBaseOffsetsMapTy::const_iterator
1286           VI = baseLayout.getVBaseOffsetsMap().begin(),
1287           VE = baseLayout.getVBaseOffsetsMap().end(); VI != VE; ++VI) {
1288      // If it doesn't need a vtordisp in this base, ignore it.
1289      if (!VI->second.hasVtorDisp()) continue;
1290
1291      // If we've already seen it and decided it needs a vtordisp, ignore it.
1292      if (!undecidedVBases.erase(VI->first))
1293        continue;
1294
1295      // Add it.
1296      vtordispVBases.insert(VI->first);
1297
1298      // Quit as soon as we've decided everything.
1299      if (undecidedVBases.empty())
1300        return;
1301    }
1302  }
1303
1304  // Okay, we have virtual bases that we haven't yet decided about.  A
1305  // virtual base requires a vtordisp if any the non-destructor
1306  // virtual methods declared in this class directly override a method
1307  // provided by that virtual base.  (If so, we need to emit a thunk
1308  // for that method, to be used in the construction vftable, which
1309  // applies an additional 'vtordisp' this-adjustment.)
1310
1311  // Collect the set of bases directly overridden by any method in this class.
1312  // It's possible that some of these classes won't be virtual bases, or won't be
1313  // provided by virtual bases, or won't be virtual bases in the overridden
1314  // instance but are virtual bases elsewhere.  Only the last matters for what
1315  // we're doing, and we can ignore those:  if we don't directly override
1316  // a method provided by a virtual copy of a base class, but we do directly
1317  // override a method provided by a non-virtual copy of that base class,
1318  // then we must indirectly override the method provided by the virtual base,
1319  // and so we should already have collected it in the loop above.
1320  ClassSetTy overriddenBases;
1321  for (CXXRecordDecl::method_iterator
1322         M = RD->method_begin(), E = RD->method_end(); M != E; ++M) {
1323    // Ignore non-virtual methods and destructors.
1324    if (isa<CXXDestructorDecl>(*M) || !M->isVirtual())
1325      continue;
1326
1327    for (CXXMethodDecl::method_iterator I = M->begin_overridden_methods(),
1328          E = M->end_overridden_methods(); I != E; ++I) {
1329      const CXXMethodDecl *overriddenMethod = (*I);
1330
1331      // Ignore methods that override methods from vbases that require
1332      // require vtordisps.
1333      if (overridesMethodRequiringVtorDisp(Context, overriddenMethod))
1334        continue;
1335
1336      // As an optimization, check immediately whether we're overriding
1337      // something from the undecided set.
1338      const CXXRecordDecl *overriddenBase = overriddenMethod->getParent();
1339      if (undecidedVBases.erase(overriddenBase)) {
1340        vtordispVBases.insert(overriddenBase);
1341        if (undecidedVBases.empty()) return;
1342
1343        // We can't 'continue;' here because one of our undecided
1344        // vbases might non-virtually inherit from this base.
1345        // Consider:
1346        //   struct A { virtual void foo(); };
1347        //   struct B : A {};
1348        //   struct C : virtual A, virtual B { virtual void foo(); };
1349        // We need a vtordisp for B here.
1350      }
1351
1352      // Otherwise, just collect it.
1353      overriddenBases.insert(overriddenBase);
1354    }
1355  }
1356
1357  // Walk the undecided v-bases and check whether they (non-virtually)
1358  // provide any of the overridden bases.  We don't need to consider
1359  // virtual links because the vtordisp inheres to the layout
1360  // subobject containing the base.
1361  for (ClassSetTy::const_iterator
1362         I = undecidedVBases.begin(), E = undecidedVBases.end(); I != E; ++I) {
1363    if (hasNonVirtualBaseInSet(*I, overriddenBases))
1364      vtordispVBases.insert(*I);
1365  }
1366}
1367
1368/// hasNewVirtualFunction - Does the given polymorphic class declare a
1369/// virtual function that does not override a method from any of its
1370/// base classes?
1371bool
1372RecordLayoutBuilder::hasNewVirtualFunction(const CXXRecordDecl *RD,
1373                                           bool IgnoreDestructor) const {
1374  if (!RD->getNumBases())
1375    return true;
1376
1377  for (CXXRecordDecl::method_iterator method = RD->method_begin();
1378       method != RD->method_end();
1379       ++method) {
1380    if (method->isVirtual() && !method->size_overridden_methods() &&
1381        !(IgnoreDestructor && method->getKind() == Decl::CXXDestructor)) {
1382      return true;
1383    }
1384  }
1385  return false;
1386}
1387
1388/// isPossiblePrimaryBase - Is the given base class an acceptable
1389/// primary base class?
1390bool
1391RecordLayoutBuilder::isPossiblePrimaryBase(const CXXRecordDecl *base) const {
1392  // In the Itanium ABI, a class can be a primary base class if it has
1393  // a vtable for any reason.
1394  if (!isMicrosoftCXXABI())
1395    return base->isDynamicClass();
1396
1397  // In the MS ABI, a class can only be a primary base class if it
1398  // provides a vf-table at a static offset.  That means it has to be
1399  // non-virtual base.  The existence of a separate vb-table means
1400  // that it's possible to get virtual functions only from a virtual
1401  // base, which we have to guard against.
1402
1403  // First off, it has to have virtual functions.
1404  if (!base->isPolymorphic()) return false;
1405
1406  // If it has no virtual bases, then the vfptr must be at a static offset.
1407  if (!base->getNumVBases()) return true;
1408
1409  // Otherwise, the necessary information is cached in the layout.
1410  const ASTRecordLayout &layout = Context.getASTRecordLayout(base);
1411
1412  // If the base has its own vfptr, it can be a primary base.
1413  if (layout.hasOwnVFPtr()) return true;
1414
1415  // If the base has a primary base class, then it can be a primary base.
1416  if (layout.getPrimaryBase()) return true;
1417
1418  // Otherwise it can't.
1419  return false;
1420}
1421
1422void
1423RecordLayoutBuilder::LayoutVirtualBases(const CXXRecordDecl *RD,
1424                                        const CXXRecordDecl *MostDerivedClass) {
1425  const CXXRecordDecl *PrimaryBase;
1426  bool PrimaryBaseIsVirtual;
1427
1428  if (MostDerivedClass == RD) {
1429    PrimaryBase = this->PrimaryBase;
1430    PrimaryBaseIsVirtual = this->PrimaryBaseIsVirtual;
1431  } else {
1432    const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
1433    PrimaryBase = Layout.getPrimaryBase();
1434    PrimaryBaseIsVirtual = Layout.isPrimaryBaseVirtual();
1435  }
1436
1437  for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
1438         E = RD->bases_end(); I != E; ++I) {
1439    assert(!I->getType()->isDependentType() &&
1440           "Cannot layout class with dependent bases.");
1441
1442    const CXXRecordDecl *BaseDecl =
1443      cast<CXXRecordDecl>(I->getType()->castAs<RecordType>()->getDecl());
1444
1445    if (I->isVirtual()) {
1446      if (PrimaryBase != BaseDecl || !PrimaryBaseIsVirtual) {
1447        bool IndirectPrimaryBase = IndirectPrimaryBases.count(BaseDecl);
1448
1449        // Only lay out the virtual base if it's not an indirect primary base.
1450        if (!IndirectPrimaryBase) {
1451          // Only visit virtual bases once.
1452          if (!VisitedVirtualBases.insert(BaseDecl))
1453            continue;
1454
1455          const BaseSubobjectInfo *BaseInfo = VirtualBaseInfo.lookup(BaseDecl);
1456          assert(BaseInfo && "Did not find virtual base info!");
1457          LayoutVirtualBase(BaseInfo);
1458        }
1459      }
1460    }
1461
1462    if (!BaseDecl->getNumVBases()) {
1463      // This base isn't interesting since it doesn't have any virtual bases.
1464      continue;
1465    }
1466
1467    LayoutVirtualBases(BaseDecl, MostDerivedClass);
1468  }
1469}
1470
1471void RecordLayoutBuilder::MSLayoutVirtualBases(const CXXRecordDecl *RD) {
1472  if (!RD->getNumVBases())
1473    return;
1474
1475  ClassSetTy VtordispVBases;
1476  computeVtordisps(RD, VtordispVBases);
1477
1478  // This is substantially simplified because there are no virtual
1479  // primary bases.
1480  for (CXXRecordDecl::base_class_const_iterator I = RD->vbases_begin(),
1481       E = RD->vbases_end(); I != E; ++I) {
1482    const CXXRecordDecl *BaseDecl = I->getType()->getAsCXXRecordDecl();
1483    const BaseSubobjectInfo *BaseInfo = VirtualBaseInfo.lookup(BaseDecl);
1484    assert(BaseInfo && "Did not find virtual base info!");
1485
1486    // If this base requires a vtordisp, add enough space for an int field.
1487    // This is apparently always 32-bits, even on x64.
1488    bool vtordispNeeded = false;
1489    if (VtordispVBases.count(BaseDecl)) {
1490      CharUnits IntSize =
1491        CharUnits::fromQuantity(Context.getTargetInfo().getIntWidth() / 8);
1492
1493      setSize(getSize() + IntSize);
1494      setDataSize(getSize());
1495      vtordispNeeded = true;
1496    }
1497
1498    LayoutVirtualBase(BaseInfo, vtordispNeeded);
1499  }
1500}
1501
1502void RecordLayoutBuilder::LayoutVirtualBase(const BaseSubobjectInfo *Base,
1503                                            bool IsVtordispNeed) {
1504  assert(!Base->Derived && "Trying to lay out a primary virtual base!");
1505
1506  // Layout the base.
1507  CharUnits Offset = LayoutBase(Base);
1508
1509  // Add its base class offset.
1510  assert(!VBases.count(Base->Class) && "vbase offset already exists!");
1511  VBases.insert(std::make_pair(Base->Class,
1512                       ASTRecordLayout::VBaseInfo(Offset, IsVtordispNeed)));
1513
1514  if (!isMicrosoftCXXABI())
1515    AddPrimaryVirtualBaseOffsets(Base, Offset);
1516}
1517
1518CharUnits RecordLayoutBuilder::LayoutBase(const BaseSubobjectInfo *Base) {
1519  const ASTRecordLayout &Layout = Context.getASTRecordLayout(Base->Class);
1520
1521
1522  CharUnits Offset;
1523
1524  // Query the external layout to see if it provides an offset.
1525  bool HasExternalLayout = false;
1526  if (ExternalLayout) {
1527    llvm::DenseMap<const CXXRecordDecl *, CharUnits>::iterator Known;
1528    if (Base->IsVirtual) {
1529      Known = ExternalVirtualBaseOffsets.find(Base->Class);
1530      if (Known != ExternalVirtualBaseOffsets.end()) {
1531        Offset = Known->second;
1532        HasExternalLayout = true;
1533      }
1534    } else {
1535      Known = ExternalBaseOffsets.find(Base->Class);
1536      if (Known != ExternalBaseOffsets.end()) {
1537        Offset = Known->second;
1538        HasExternalLayout = true;
1539      }
1540    }
1541  }
1542
1543  CharUnits UnpackedBaseAlign = Layout.getNonVirtualAlign();
1544  CharUnits BaseAlign = (Packed) ? CharUnits::One() : UnpackedBaseAlign;
1545
1546  // If we have an empty base class, try to place it at offset 0.
1547  if (Base->Class->isEmpty() &&
1548      (!HasExternalLayout || Offset == CharUnits::Zero()) &&
1549      EmptySubobjects->CanPlaceBaseAtOffset(Base, CharUnits::Zero())) {
1550    setSize(std::max(getSize(), Layout.getSize()));
1551    UpdateAlignment(BaseAlign, UnpackedBaseAlign);
1552
1553    return CharUnits::Zero();
1554  }
1555
1556  // The maximum field alignment overrides base align.
1557  if (!MaxFieldAlignment.isZero()) {
1558    BaseAlign = std::min(BaseAlign, MaxFieldAlignment);
1559    UnpackedBaseAlign = std::min(UnpackedBaseAlign, MaxFieldAlignment);
1560  }
1561
1562  if (!HasExternalLayout) {
1563    // Round up the current record size to the base's alignment boundary.
1564    Offset = getDataSize().RoundUpToAlignment(BaseAlign);
1565
1566    // Try to place the base.
1567    while (!EmptySubobjects->CanPlaceBaseAtOffset(Base, Offset))
1568      Offset += BaseAlign;
1569  } else {
1570    bool Allowed = EmptySubobjects->CanPlaceBaseAtOffset(Base, Offset);
1571    (void)Allowed;
1572    assert(Allowed && "Base subobject externally placed at overlapping offset");
1573
1574    if (InferAlignment && Offset < getDataSize().RoundUpToAlignment(BaseAlign)){
1575      // The externally-supplied base offset is before the base offset we
1576      // computed. Assume that the structure is packed.
1577      Alignment = CharUnits::One();
1578      InferAlignment = false;
1579    }
1580  }
1581
1582  if (!Base->Class->isEmpty()) {
1583    // Update the data size.
1584    setDataSize(Offset + Layout.getNonVirtualSize());
1585
1586    setSize(std::max(getSize(), getDataSize()));
1587  } else
1588    setSize(std::max(getSize(), Offset + Layout.getSize()));
1589
1590  // Remember max struct/class alignment.
1591  UpdateAlignment(BaseAlign, UnpackedBaseAlign);
1592
1593  return Offset;
1594}
1595
1596void RecordLayoutBuilder::InitializeLayout(const Decl *D) {
1597  if (const RecordDecl *RD = dyn_cast<RecordDecl>(D)) {
1598    IsUnion = RD->isUnion();
1599    IsMsStruct = RD->isMsStruct(Context);
1600  }
1601
1602  Packed = D->hasAttr<PackedAttr>();
1603
1604  // Honor the default struct packing maximum alignment flag.
1605  if (unsigned DefaultMaxFieldAlignment = Context.getLangOpts().PackStruct) {
1606    MaxFieldAlignment = CharUnits::fromQuantity(DefaultMaxFieldAlignment);
1607  }
1608
1609  // mac68k alignment supersedes maximum field alignment and attribute aligned,
1610  // and forces all structures to have 2-byte alignment. The IBM docs on it
1611  // allude to additional (more complicated) semantics, especially with regard
1612  // to bit-fields, but gcc appears not to follow that.
1613  if (D->hasAttr<AlignMac68kAttr>()) {
1614    IsMac68kAlign = true;
1615    MaxFieldAlignment = CharUnits::fromQuantity(2);
1616    Alignment = CharUnits::fromQuantity(2);
1617  } else {
1618    if (const MaxFieldAlignmentAttr *MFAA = D->getAttr<MaxFieldAlignmentAttr>())
1619      MaxFieldAlignment = Context.toCharUnitsFromBits(MFAA->getAlignment());
1620
1621    if (unsigned MaxAlign = D->getMaxAlignment())
1622      UpdateAlignment(Context.toCharUnitsFromBits(MaxAlign));
1623  }
1624
1625  // If there is an external AST source, ask it for the various offsets.
1626  if (const RecordDecl *RD = dyn_cast<RecordDecl>(D))
1627    if (ExternalASTSource *External = Context.getExternalSource()) {
1628      ExternalLayout = External->layoutRecordType(RD,
1629                                                  ExternalSize,
1630                                                  ExternalAlign,
1631                                                  ExternalFieldOffsets,
1632                                                  ExternalBaseOffsets,
1633                                                  ExternalVirtualBaseOffsets);
1634
1635      // Update based on external alignment.
1636      if (ExternalLayout) {
1637        if (ExternalAlign > 0) {
1638          Alignment = Context.toCharUnitsFromBits(ExternalAlign);
1639        } else {
1640          // The external source didn't have alignment information; infer it.
1641          InferAlignment = true;
1642        }
1643      }
1644    }
1645}
1646
1647void RecordLayoutBuilder::Layout(const RecordDecl *D) {
1648  InitializeLayout(D);
1649  LayoutFields(D);
1650
1651  // Finally, round the size of the total struct up to the alignment of the
1652  // struct itself.
1653  FinishLayout(D);
1654}
1655
1656void RecordLayoutBuilder::Layout(const CXXRecordDecl *RD) {
1657  InitializeLayout(RD);
1658
1659  // Lay out the vtable and the non-virtual bases.
1660  LayoutNonVirtualBases(RD);
1661
1662  LayoutFields(RD);
1663
1664  NonVirtualSize = Context.toCharUnitsFromBits(
1665        llvm::RoundUpToAlignment(getSizeInBits(),
1666                                 Context.getTargetInfo().getCharAlign()));
1667  NonVirtualAlignment = Alignment;
1668
1669  if (isMicrosoftCXXABI()) {
1670    if (NonVirtualSize != NonVirtualSize.RoundUpToAlignment(Alignment)) {
1671    CharUnits AlignMember =
1672      NonVirtualSize.RoundUpToAlignment(Alignment) - NonVirtualSize;
1673
1674    setSize(getSize() + AlignMember);
1675    setDataSize(getSize());
1676
1677    NonVirtualSize = Context.toCharUnitsFromBits(
1678                             llvm::RoundUpToAlignment(getSizeInBits(),
1679                             Context.getTargetInfo().getCharAlign()));
1680    }
1681
1682    MSLayoutVirtualBases(RD);
1683  } else {
1684    // Lay out the virtual bases and add the primary virtual base offsets.
1685    LayoutVirtualBases(RD, RD);
1686  }
1687
1688  // Finally, round the size of the total struct up to the alignment
1689  // of the struct itself.
1690  FinishLayout(RD);
1691
1692#ifndef NDEBUG
1693  // Check that we have base offsets for all bases.
1694  for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
1695       E = RD->bases_end(); I != E; ++I) {
1696    if (I->isVirtual())
1697      continue;
1698
1699    const CXXRecordDecl *BaseDecl =
1700      cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
1701
1702    assert(Bases.count(BaseDecl) && "Did not find base offset!");
1703  }
1704
1705  // And all virtual bases.
1706  for (CXXRecordDecl::base_class_const_iterator I = RD->vbases_begin(),
1707       E = RD->vbases_end(); I != E; ++I) {
1708    const CXXRecordDecl *BaseDecl =
1709      cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
1710
1711    assert(VBases.count(BaseDecl) && "Did not find base offset!");
1712  }
1713#endif
1714}
1715
1716void RecordLayoutBuilder::Layout(const ObjCInterfaceDecl *D) {
1717  if (ObjCInterfaceDecl *SD = D->getSuperClass()) {
1718    const ASTRecordLayout &SL = Context.getASTObjCInterfaceLayout(SD);
1719
1720    UpdateAlignment(SL.getAlignment());
1721
1722    // We start laying out ivars not at the end of the superclass
1723    // structure, but at the next byte following the last field.
1724    setSize(SL.getDataSize());
1725    setDataSize(getSize());
1726  }
1727
1728  InitializeLayout(D);
1729  // Layout each ivar sequentially.
1730  for (const ObjCIvarDecl *IVD = D->all_declared_ivar_begin(); IVD;
1731       IVD = IVD->getNextIvar())
1732    LayoutField(IVD);
1733
1734  // Finally, round the size of the total struct up to the alignment of the
1735  // struct itself.
1736  FinishLayout(D);
1737}
1738
1739void RecordLayoutBuilder::LayoutFields(const RecordDecl *D) {
1740  // Layout each field, for now, just sequentially, respecting alignment.  In
1741  // the future, this will need to be tweakable by targets.
1742  for (RecordDecl::field_iterator Field = D->field_begin(),
1743       FieldEnd = D->field_end(); Field != FieldEnd; ++Field)
1744    LayoutField(*Field);
1745}
1746
1747void RecordLayoutBuilder::LayoutWideBitField(uint64_t FieldSize,
1748                                             uint64_t TypeSize,
1749                                             bool FieldPacked,
1750                                             const FieldDecl *D) {
1751  assert(Context.getLangOpts().CPlusPlus &&
1752         "Can only have wide bit-fields in C++!");
1753
1754  // Itanium C++ ABI 2.4:
1755  //   If sizeof(T)*8 < n, let T' be the largest integral POD type with
1756  //   sizeof(T')*8 <= n.
1757
1758  QualType IntegralPODTypes[] = {
1759    Context.UnsignedCharTy, Context.UnsignedShortTy, Context.UnsignedIntTy,
1760    Context.UnsignedLongTy, Context.UnsignedLongLongTy
1761  };
1762
1763  QualType Type;
1764  for (unsigned I = 0, E = llvm::array_lengthof(IntegralPODTypes);
1765       I != E; ++I) {
1766    uint64_t Size = Context.getTypeSize(IntegralPODTypes[I]);
1767
1768    if (Size > FieldSize)
1769      break;
1770
1771    Type = IntegralPODTypes[I];
1772  }
1773  assert(!Type.isNull() && "Did not find a type!");
1774
1775  CharUnits TypeAlign = Context.getTypeAlignInChars(Type);
1776
1777  // We're not going to use any of the unfilled bits in the last byte.
1778  UnfilledBitsInLastUnit = 0;
1779  LastBitfieldTypeSize = 0;
1780
1781  uint64_t FieldOffset;
1782  uint64_t UnpaddedFieldOffset = getDataSizeInBits() - UnfilledBitsInLastUnit;
1783
1784  if (IsUnion) {
1785    setDataSize(std::max(getDataSizeInBits(), FieldSize));
1786    FieldOffset = 0;
1787  } else {
1788    // The bitfield is allocated starting at the next offset aligned
1789    // appropriately for T', with length n bits.
1790    FieldOffset = llvm::RoundUpToAlignment(getDataSizeInBits(),
1791                                           Context.toBits(TypeAlign));
1792
1793    uint64_t NewSizeInBits = FieldOffset + FieldSize;
1794
1795    setDataSize(llvm::RoundUpToAlignment(NewSizeInBits,
1796                                         Context.getTargetInfo().getCharAlign()));
1797    UnfilledBitsInLastUnit = getDataSizeInBits() - NewSizeInBits;
1798  }
1799
1800  // Place this field at the current location.
1801  FieldOffsets.push_back(FieldOffset);
1802
1803  CheckFieldPadding(FieldOffset, UnpaddedFieldOffset, FieldOffset,
1804                    Context.toBits(TypeAlign), FieldPacked, D);
1805
1806  // Update the size.
1807  setSize(std::max(getSizeInBits(), getDataSizeInBits()));
1808
1809  // Remember max struct/class alignment.
1810  UpdateAlignment(TypeAlign);
1811}
1812
1813void RecordLayoutBuilder::LayoutBitField(const FieldDecl *D) {
1814  bool FieldPacked = Packed || D->hasAttr<PackedAttr>();
1815  uint64_t FieldSize = D->getBitWidthValue(Context);
1816  std::pair<uint64_t, unsigned> FieldInfo = Context.getTypeInfo(D->getType());
1817  uint64_t TypeSize = FieldInfo.first;
1818  unsigned FieldAlign = FieldInfo.second;
1819
1820  if (IsMsStruct) {
1821    // The field alignment for integer types in ms_struct structs is
1822    // always the size.
1823    FieldAlign = TypeSize;
1824    // Ignore zero-length bitfields after non-bitfields in ms_struct structs.
1825    if (!FieldSize && !LastBitfieldTypeSize)
1826      FieldAlign = 1;
1827    // If a bitfield is followed by a bitfield of a different size, don't
1828    // pack the bits together in ms_struct structs.
1829    if (LastBitfieldTypeSize != TypeSize) {
1830      UnfilledBitsInLastUnit = 0;
1831      LastBitfieldTypeSize = 0;
1832    }
1833  }
1834
1835  uint64_t UnpaddedFieldOffset = getDataSizeInBits() - UnfilledBitsInLastUnit;
1836  uint64_t FieldOffset = IsUnion ? 0 : UnpaddedFieldOffset;
1837
1838  bool ZeroLengthBitfield = false;
1839  if (!Context.getTargetInfo().useBitFieldTypeAlignment() &&
1840      Context.getTargetInfo().useZeroLengthBitfieldAlignment() &&
1841      FieldSize == 0) {
1842    // The alignment of a zero-length bitfield affects the alignment
1843    // of the next member.  The alignment is the max of the zero
1844    // length bitfield's alignment and a target specific fixed value.
1845    ZeroLengthBitfield = true;
1846    unsigned ZeroLengthBitfieldBoundary =
1847      Context.getTargetInfo().getZeroLengthBitfieldBoundary();
1848    if (ZeroLengthBitfieldBoundary > FieldAlign)
1849      FieldAlign = ZeroLengthBitfieldBoundary;
1850  }
1851
1852  if (FieldSize > TypeSize) {
1853    LayoutWideBitField(FieldSize, TypeSize, FieldPacked, D);
1854    return;
1855  }
1856
1857  // The align if the field is not packed. This is to check if the attribute
1858  // was unnecessary (-Wpacked).
1859  unsigned UnpackedFieldAlign = FieldAlign;
1860  uint64_t UnpackedFieldOffset = FieldOffset;
1861  if (!Context.getTargetInfo().useBitFieldTypeAlignment() && !ZeroLengthBitfield)
1862    UnpackedFieldAlign = 1;
1863
1864  if (FieldPacked ||
1865      (!Context.getTargetInfo().useBitFieldTypeAlignment() && !ZeroLengthBitfield))
1866    FieldAlign = 1;
1867  FieldAlign = std::max(FieldAlign, D->getMaxAlignment());
1868  UnpackedFieldAlign = std::max(UnpackedFieldAlign, D->getMaxAlignment());
1869
1870  // The maximum field alignment overrides the aligned attribute.
1871  if (!MaxFieldAlignment.isZero() && FieldSize != 0) {
1872    unsigned MaxFieldAlignmentInBits = Context.toBits(MaxFieldAlignment);
1873    FieldAlign = std::min(FieldAlign, MaxFieldAlignmentInBits);
1874    UnpackedFieldAlign = std::min(UnpackedFieldAlign, MaxFieldAlignmentInBits);
1875  }
1876
1877  // ms_struct bitfields always have to start at a round alignment.
1878  if (IsMsStruct && !LastBitfieldTypeSize) {
1879    FieldOffset = llvm::RoundUpToAlignment(FieldOffset, FieldAlign);
1880    UnpackedFieldOffset = llvm::RoundUpToAlignment(UnpackedFieldOffset,
1881                                                   UnpackedFieldAlign);
1882  }
1883
1884  // Check if we need to add padding to give the field the correct alignment.
1885  if (FieldSize == 0 ||
1886      (MaxFieldAlignment.isZero() &&
1887       (FieldOffset & (FieldAlign-1)) + FieldSize > TypeSize))
1888    FieldOffset = llvm::RoundUpToAlignment(FieldOffset, FieldAlign);
1889
1890  if (FieldSize == 0 ||
1891      (MaxFieldAlignment.isZero() &&
1892       (UnpackedFieldOffset & (UnpackedFieldAlign-1)) + FieldSize > TypeSize))
1893    UnpackedFieldOffset = llvm::RoundUpToAlignment(UnpackedFieldOffset,
1894                                                   UnpackedFieldAlign);
1895
1896  // Padding members don't affect overall alignment, unless zero length bitfield
1897  // alignment is enabled.
1898  if (!D->getIdentifier() &&
1899      !Context.getTargetInfo().useZeroLengthBitfieldAlignment() &&
1900      !IsMsStruct)
1901    FieldAlign = UnpackedFieldAlign = 1;
1902
1903  if (ExternalLayout)
1904    FieldOffset = updateExternalFieldOffset(D, FieldOffset);
1905
1906  // Place this field at the current location.
1907  FieldOffsets.push_back(FieldOffset);
1908
1909  if (!ExternalLayout)
1910    CheckFieldPadding(FieldOffset, UnpaddedFieldOffset, UnpackedFieldOffset,
1911                      UnpackedFieldAlign, FieldPacked, D);
1912
1913  // Update DataSize to include the last byte containing (part of) the bitfield.
1914  if (IsUnion) {
1915    // FIXME: I think FieldSize should be TypeSize here.
1916    setDataSize(std::max(getDataSizeInBits(), FieldSize));
1917  } else {
1918    if (IsMsStruct && FieldSize) {
1919      // Under ms_struct, a bitfield always takes up space equal to the size
1920      // of the type.  We can't just change the alignment computation on the
1921      // other codepath because of the way this interacts with #pragma pack:
1922      // in a packed struct, we need to allocate misaligned space in the
1923      // struct to hold the bitfield.
1924      if (!UnfilledBitsInLastUnit) {
1925        setDataSize(FieldOffset + TypeSize);
1926        UnfilledBitsInLastUnit = TypeSize - FieldSize;
1927      } else if (UnfilledBitsInLastUnit < FieldSize) {
1928        setDataSize(getDataSizeInBits() + TypeSize);
1929        UnfilledBitsInLastUnit = TypeSize - FieldSize;
1930      } else {
1931        UnfilledBitsInLastUnit -= FieldSize;
1932      }
1933      LastBitfieldTypeSize = TypeSize;
1934    } else {
1935      uint64_t NewSizeInBits = FieldOffset + FieldSize;
1936      uint64_t BitfieldAlignment = Context.getTargetInfo().getCharAlign();
1937      setDataSize(llvm::RoundUpToAlignment(NewSizeInBits, BitfieldAlignment));
1938      UnfilledBitsInLastUnit = getDataSizeInBits() - NewSizeInBits;
1939      LastBitfieldTypeSize = 0;
1940    }
1941  }
1942
1943  // Update the size.
1944  setSize(std::max(getSizeInBits(), getDataSizeInBits()));
1945
1946  // Remember max struct/class alignment.
1947  UpdateAlignment(Context.toCharUnitsFromBits(FieldAlign),
1948                  Context.toCharUnitsFromBits(UnpackedFieldAlign));
1949}
1950
1951void RecordLayoutBuilder::LayoutField(const FieldDecl *D) {
1952  if (D->isBitField()) {
1953    LayoutBitField(D);
1954    return;
1955  }
1956
1957  uint64_t UnpaddedFieldOffset = getDataSizeInBits() - UnfilledBitsInLastUnit;
1958
1959  // Reset the unfilled bits.
1960  UnfilledBitsInLastUnit = 0;
1961  LastBitfieldTypeSize = 0;
1962
1963  bool FieldPacked = Packed || D->hasAttr<PackedAttr>();
1964  CharUnits FieldOffset =
1965    IsUnion ? CharUnits::Zero() : getDataSize();
1966  CharUnits FieldSize;
1967  CharUnits FieldAlign;
1968
1969  if (D->getType()->isIncompleteArrayType()) {
1970    // This is a flexible array member; we can't directly
1971    // query getTypeInfo about these, so we figure it out here.
1972    // Flexible array members don't have any size, but they
1973    // have to be aligned appropriately for their element type.
1974    FieldSize = CharUnits::Zero();
1975    const ArrayType* ATy = Context.getAsArrayType(D->getType());
1976    FieldAlign = Context.getTypeAlignInChars(ATy->getElementType());
1977  } else if (const ReferenceType *RT = D->getType()->getAs<ReferenceType>()) {
1978    unsigned AS = RT->getPointeeType().getAddressSpace();
1979    FieldSize =
1980      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(AS));
1981    FieldAlign =
1982      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerAlign(AS));
1983  } else {
1984    std::pair<CharUnits, CharUnits> FieldInfo =
1985      Context.getTypeInfoInChars(D->getType());
1986    FieldSize = FieldInfo.first;
1987    FieldAlign = FieldInfo.second;
1988
1989    if (IsMsStruct) {
1990      // If MS bitfield layout is required, figure out what type is being
1991      // laid out and align the field to the width of that type.
1992
1993      // Resolve all typedefs down to their base type and round up the field
1994      // alignment if necessary.
1995      QualType T = Context.getBaseElementType(D->getType());
1996      if (const BuiltinType *BTy = T->getAs<BuiltinType>()) {
1997        CharUnits TypeSize = Context.getTypeSizeInChars(BTy);
1998        if (TypeSize > FieldAlign)
1999          FieldAlign = TypeSize;
2000      }
2001    }
2002  }
2003
2004  // The align if the field is not packed. This is to check if the attribute
2005  // was unnecessary (-Wpacked).
2006  CharUnits UnpackedFieldAlign = FieldAlign;
2007  CharUnits UnpackedFieldOffset = FieldOffset;
2008
2009  if (FieldPacked)
2010    FieldAlign = CharUnits::One();
2011  CharUnits MaxAlignmentInChars =
2012    Context.toCharUnitsFromBits(D->getMaxAlignment());
2013  FieldAlign = std::max(FieldAlign, MaxAlignmentInChars);
2014  UnpackedFieldAlign = std::max(UnpackedFieldAlign, MaxAlignmentInChars);
2015
2016  // The maximum field alignment overrides the aligned attribute.
2017  if (!MaxFieldAlignment.isZero()) {
2018    FieldAlign = std::min(FieldAlign, MaxFieldAlignment);
2019    UnpackedFieldAlign = std::min(UnpackedFieldAlign, MaxFieldAlignment);
2020  }
2021
2022  // Round up the current record size to the field's alignment boundary.
2023  FieldOffset = FieldOffset.RoundUpToAlignment(FieldAlign);
2024  UnpackedFieldOffset =
2025    UnpackedFieldOffset.RoundUpToAlignment(UnpackedFieldAlign);
2026
2027  if (ExternalLayout) {
2028    FieldOffset = Context.toCharUnitsFromBits(
2029                    updateExternalFieldOffset(D, Context.toBits(FieldOffset)));
2030
2031    if (!IsUnion && EmptySubobjects) {
2032      // Record the fact that we're placing a field at this offset.
2033      bool Allowed = EmptySubobjects->CanPlaceFieldAtOffset(D, FieldOffset);
2034      (void)Allowed;
2035      assert(Allowed && "Externally-placed field cannot be placed here");
2036    }
2037  } else {
2038    if (!IsUnion && EmptySubobjects) {
2039      // Check if we can place the field at this offset.
2040      while (!EmptySubobjects->CanPlaceFieldAtOffset(D, FieldOffset)) {
2041        // We couldn't place the field at the offset. Try again at a new offset.
2042        FieldOffset += FieldAlign;
2043      }
2044    }
2045  }
2046
2047  // Place this field at the current location.
2048  FieldOffsets.push_back(Context.toBits(FieldOffset));
2049
2050  if (!ExternalLayout)
2051    CheckFieldPadding(Context.toBits(FieldOffset), UnpaddedFieldOffset,
2052                      Context.toBits(UnpackedFieldOffset),
2053                      Context.toBits(UnpackedFieldAlign), FieldPacked, D);
2054
2055  // Reserve space for this field.
2056  uint64_t FieldSizeInBits = Context.toBits(FieldSize);
2057  if (IsUnion)
2058    setDataSize(std::max(getDataSizeInBits(), FieldSizeInBits));
2059  else
2060    setDataSize(FieldOffset + FieldSize);
2061
2062  // Update the size.
2063  setSize(std::max(getSizeInBits(), getDataSizeInBits()));
2064
2065  // Remember max struct/class alignment.
2066  UpdateAlignment(FieldAlign, UnpackedFieldAlign);
2067}
2068
2069void RecordLayoutBuilder::FinishLayout(const NamedDecl *D) {
2070  // In C++, records cannot be of size 0.
2071  if (Context.getLangOpts().CPlusPlus && getSizeInBits() == 0) {
2072    if (const CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(D)) {
2073      // Compatibility with gcc requires a class (pod or non-pod)
2074      // which is not empty but of size 0; such as having fields of
2075      // array of zero-length, remains of Size 0
2076      if (RD->isEmpty())
2077        setSize(CharUnits::One());
2078    }
2079    else
2080      setSize(CharUnits::One());
2081  }
2082
2083  // Finally, round the size of the record up to the alignment of the
2084  // record itself.
2085  uint64_t UnpaddedSize = getSizeInBits() - UnfilledBitsInLastUnit;
2086  uint64_t UnpackedSizeInBits =
2087  llvm::RoundUpToAlignment(getSizeInBits(),
2088                           Context.toBits(UnpackedAlignment));
2089  CharUnits UnpackedSize = Context.toCharUnitsFromBits(UnpackedSizeInBits);
2090  uint64_t RoundedSize
2091    = llvm::RoundUpToAlignment(getSizeInBits(), Context.toBits(Alignment));
2092
2093  if (ExternalLayout) {
2094    // If we're inferring alignment, and the external size is smaller than
2095    // our size after we've rounded up to alignment, conservatively set the
2096    // alignment to 1.
2097    if (InferAlignment && ExternalSize < RoundedSize) {
2098      Alignment = CharUnits::One();
2099      InferAlignment = false;
2100    }
2101    setSize(ExternalSize);
2102    return;
2103  }
2104
2105
2106  // MSVC doesn't round up to the alignment of the record with virtual bases.
2107  if (const CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(D)) {
2108    if (isMicrosoftCXXABI() && RD->getNumVBases())
2109      return;
2110  }
2111
2112  // Set the size to the final size.
2113  setSize(RoundedSize);
2114
2115  unsigned CharBitNum = Context.getTargetInfo().getCharWidth();
2116  if (const RecordDecl *RD = dyn_cast<RecordDecl>(D)) {
2117    // Warn if padding was introduced to the struct/class/union.
2118    if (getSizeInBits() > UnpaddedSize) {
2119      unsigned PadSize = getSizeInBits() - UnpaddedSize;
2120      bool InBits = true;
2121      if (PadSize % CharBitNum == 0) {
2122        PadSize = PadSize / CharBitNum;
2123        InBits = false;
2124      }
2125      Diag(RD->getLocation(), diag::warn_padded_struct_size)
2126          << Context.getTypeDeclType(RD)
2127          << PadSize
2128          << (InBits ? 1 : 0) /*(byte|bit)*/ << (PadSize > 1); // plural or not
2129    }
2130
2131    // Warn if we packed it unnecessarily. If the alignment is 1 byte don't
2132    // bother since there won't be alignment issues.
2133    if (Packed && UnpackedAlignment > CharUnits::One() &&
2134        getSize() == UnpackedSize)
2135      Diag(D->getLocation(), diag::warn_unnecessary_packed)
2136          << Context.getTypeDeclType(RD);
2137  }
2138}
2139
2140void RecordLayoutBuilder::UpdateAlignment(CharUnits NewAlignment,
2141                                          CharUnits UnpackedNewAlignment) {
2142  // The alignment is not modified when using 'mac68k' alignment or when
2143  // we have an externally-supplied layout that also provides overall alignment.
2144  if (IsMac68kAlign || (ExternalLayout && !InferAlignment))
2145    return;
2146
2147  if (NewAlignment > Alignment) {
2148    assert(llvm::isPowerOf2_32(NewAlignment.getQuantity() &&
2149           "Alignment not a power of 2"));
2150    Alignment = NewAlignment;
2151  }
2152
2153  if (UnpackedNewAlignment > UnpackedAlignment) {
2154    assert(llvm::isPowerOf2_32(UnpackedNewAlignment.getQuantity() &&
2155           "Alignment not a power of 2"));
2156    UnpackedAlignment = UnpackedNewAlignment;
2157  }
2158}
2159
2160uint64_t
2161RecordLayoutBuilder::updateExternalFieldOffset(const FieldDecl *Field,
2162                                               uint64_t ComputedOffset) {
2163  assert(ExternalFieldOffsets.find(Field) != ExternalFieldOffsets.end() &&
2164         "Field does not have an external offset");
2165
2166  uint64_t ExternalFieldOffset = ExternalFieldOffsets[Field];
2167
2168  if (InferAlignment && ExternalFieldOffset < ComputedOffset) {
2169    // The externally-supplied field offset is before the field offset we
2170    // computed. Assume that the structure is packed.
2171    Alignment = CharUnits::One();
2172    InferAlignment = false;
2173  }
2174
2175  // Use the externally-supplied field offset.
2176  return ExternalFieldOffset;
2177}
2178
2179/// \brief Get diagnostic %select index for tag kind for
2180/// field padding diagnostic message.
2181/// WARNING: Indexes apply to particular diagnostics only!
2182///
2183/// \returns diagnostic %select index.
2184static unsigned getPaddingDiagFromTagKind(TagTypeKind Tag) {
2185  switch (Tag) {
2186  case TTK_Struct: return 0;
2187  case TTK_Interface: return 1;
2188  case TTK_Class: return 2;
2189  default: llvm_unreachable("Invalid tag kind for field padding diagnostic!");
2190  }
2191}
2192
2193void RecordLayoutBuilder::CheckFieldPadding(uint64_t Offset,
2194                                            uint64_t UnpaddedOffset,
2195                                            uint64_t UnpackedOffset,
2196                                            unsigned UnpackedAlign,
2197                                            bool isPacked,
2198                                            const FieldDecl *D) {
2199  // We let objc ivars without warning, objc interfaces generally are not used
2200  // for padding tricks.
2201  if (isa<ObjCIvarDecl>(D))
2202    return;
2203
2204  // Don't warn about structs created without a SourceLocation.  This can
2205  // be done by clients of the AST, such as codegen.
2206  if (D->getLocation().isInvalid())
2207    return;
2208
2209  unsigned CharBitNum = Context.getTargetInfo().getCharWidth();
2210
2211  // Warn if padding was introduced to the struct/class.
2212  if (!IsUnion && Offset > UnpaddedOffset) {
2213    unsigned PadSize = Offset - UnpaddedOffset;
2214    bool InBits = true;
2215    if (PadSize % CharBitNum == 0) {
2216      PadSize = PadSize / CharBitNum;
2217      InBits = false;
2218    }
2219    if (D->getIdentifier())
2220      Diag(D->getLocation(), diag::warn_padded_struct_field)
2221          << getPaddingDiagFromTagKind(D->getParent()->getTagKind())
2222          << Context.getTypeDeclType(D->getParent())
2223          << PadSize
2224          << (InBits ? 1 : 0) /*(byte|bit)*/ << (PadSize > 1) // plural or not
2225          << D->getIdentifier();
2226    else
2227      Diag(D->getLocation(), diag::warn_padded_struct_anon_field)
2228          << getPaddingDiagFromTagKind(D->getParent()->getTagKind())
2229          << Context.getTypeDeclType(D->getParent())
2230          << PadSize
2231          << (InBits ? 1 : 0) /*(byte|bit)*/ << (PadSize > 1); // plural or not
2232  }
2233
2234  // Warn if we packed it unnecessarily. If the alignment is 1 byte don't
2235  // bother since there won't be alignment issues.
2236  if (isPacked && UnpackedAlign > CharBitNum && Offset == UnpackedOffset)
2237    Diag(D->getLocation(), diag::warn_unnecessary_packed)
2238        << D->getIdentifier();
2239}
2240
2241static const CXXMethodDecl *computeKeyFunction(ASTContext &Context,
2242                                               const CXXRecordDecl *RD) {
2243  // If a class isn't polymorphic it doesn't have a key function.
2244  if (!RD->isPolymorphic())
2245    return 0;
2246
2247  // A class that is not externally visible doesn't have a key function. (Or
2248  // at least, there's no point to assigning a key function to such a class;
2249  // this doesn't affect the ABI.)
2250  if (!RD->isExternallyVisible())
2251    return 0;
2252
2253  // Template instantiations don't have key functions,see Itanium C++ ABI 5.2.6.
2254  // Same behavior as GCC.
2255  TemplateSpecializationKind TSK = RD->getTemplateSpecializationKind();
2256  if (TSK == TSK_ImplicitInstantiation ||
2257      TSK == TSK_ExplicitInstantiationDefinition)
2258    return 0;
2259
2260  bool allowInlineFunctions =
2261    Context.getTargetInfo().getCXXABI().canKeyFunctionBeInline();
2262
2263  for (CXXRecordDecl::method_iterator I = RD->method_begin(),
2264         E = RD->method_end(); I != E; ++I) {
2265    const CXXMethodDecl *MD = *I;
2266
2267    if (!MD->isVirtual())
2268      continue;
2269
2270    if (MD->isPure())
2271      continue;
2272
2273    // Ignore implicit member functions, they are always marked as inline, but
2274    // they don't have a body until they're defined.
2275    if (MD->isImplicit())
2276      continue;
2277
2278    if (MD->isInlineSpecified())
2279      continue;
2280
2281    if (MD->hasInlineBody())
2282      continue;
2283
2284    // Ignore inline deleted or defaulted functions.
2285    if (!MD->isUserProvided())
2286      continue;
2287
2288    // In certain ABIs, ignore functions with out-of-line inline definitions.
2289    if (!allowInlineFunctions) {
2290      const FunctionDecl *Def;
2291      if (MD->hasBody(Def) && Def->isInlineSpecified())
2292        continue;
2293    }
2294
2295    // We found it.
2296    return MD;
2297  }
2298
2299  return 0;
2300}
2301
2302DiagnosticBuilder
2303RecordLayoutBuilder::Diag(SourceLocation Loc, unsigned DiagID) {
2304  return Context.getDiagnostics().Report(Loc, DiagID);
2305}
2306
2307/// Does the target C++ ABI require us to skip over the tail-padding
2308/// of the given class (considering it as a base class) when allocating
2309/// objects?
2310static bool mustSkipTailPadding(TargetCXXABI ABI, const CXXRecordDecl *RD) {
2311  switch (ABI.getTailPaddingUseRules()) {
2312  case TargetCXXABI::AlwaysUseTailPadding:
2313    return false;
2314
2315  case TargetCXXABI::UseTailPaddingUnlessPOD03:
2316    // FIXME: To the extent that this is meant to cover the Itanium ABI
2317    // rules, we should implement the restrictions about over-sized
2318    // bitfields:
2319    //
2320    // http://mentorembedded.github.com/cxx-abi/abi.html#POD :
2321    //   In general, a type is considered a POD for the purposes of
2322    //   layout if it is a POD type (in the sense of ISO C++
2323    //   [basic.types]). However, a POD-struct or POD-union (in the
2324    //   sense of ISO C++ [class]) with a bitfield member whose
2325    //   declared width is wider than the declared type of the
2326    //   bitfield is not a POD for the purpose of layout.  Similarly,
2327    //   an array type is not a POD for the purpose of layout if the
2328    //   element type of the array is not a POD for the purpose of
2329    //   layout.
2330    //
2331    //   Where references to the ISO C++ are made in this paragraph,
2332    //   the Technical Corrigendum 1 version of the standard is
2333    //   intended.
2334    return RD->isPOD();
2335
2336  case TargetCXXABI::UseTailPaddingUnlessPOD11:
2337    // This is equivalent to RD->getTypeForDecl().isCXX11PODType(),
2338    // but with a lot of abstraction penalty stripped off.  This does
2339    // assume that these properties are set correctly even in C++98
2340    // mode; fortunately, that is true because we want to assign
2341    // consistently semantics to the type-traits intrinsics (or at
2342    // least as many of them as possible).
2343    return RD->isTrivial() && RD->isStandardLayout();
2344  }
2345
2346  llvm_unreachable("bad tail-padding use kind");
2347}
2348
2349static bool isMsLayout(const RecordDecl* D) {
2350  return (D->getASTContext().getTargetInfo().getCXXABI().isMicrosoft() ||
2351          D->getASTContext().getTargetInfo().getTriple().getOS() ==
2352          llvm::Triple::Win32) &&
2353          D->getASTContext().getTargetInfo().getPointerWidth(0) == 32;
2354  // FIXME: we intend to enable 64 bit mode once it's been verified.
2355}
2356
2357// This section contains an implementation of struct layout that is, up to the
2358// included tests, compatible with cl.exe (2012).  The layout produced is
2359// significantly different than those produced by the Itanium ABI.  Here we note
2360// the most important differences.
2361//
2362// * The alignment of bitfields in unions is ignored when computing the
2363//   alignment of the union.
2364// * The existance of zero-width bitfield that occurs after anything other than
2365//   a non-zero length bitfield is ignored.
2366// * The Itanium equivalent vtable pointers are split into a vfptr (virtual
2367//   function pointer) and a vbptr (virtual base pointer).  They can each be
2368//   shared with a, non-virtual bases. These bases need not be the same.  vfptrs always occur at offset 0.  vbptrs can occur at an
2369//   arbitrary offset and are placed after non-virtual bases but before fields.
2370// * Virtual bases sometimes require a 'vtordisp' field that is laid out before
2371//   the virtual base and is used in conjunction with virtual overrides during
2372//   construction and destruction.
2373// * vfptrs are allocated in a block of memory equal to the alignment of the
2374//   fields and non-virtual bases at offset 0.
2375// * vbptrs are allocated in a block of memory equal to the alignment of the
2376//   fields and non-virtual bases.  This block is at a potentially unaligned offset.  If the
2377//   allocation slot is unaligned and the alignment is less than or equal to the
2378//   pointer size, additional space is allocated so that the pointer can be aligned properly.  This causes very strange effects on the placement of objects after the allocated block. (see
2379//   the code).
2380// * vtordisps are allocated in a block of memory with size and alignment equal
2381//   to the alignment of the completed structure (before applying __declspec(
2382//   align())).  The vtordisp always occur at the end of the allocation block, immediately prior to the virtual base.
2383// * The last zero sized non-virtual base is allocated after the placement of
2384//   vbptr if one exists and can be placed at the end of the struct, potentially
2385//   aliasing either the first member or another struct allocated after this
2386//   one.
2387// * The last zero size virtual base may be placed at the end of the struct.
2388//   and can potentially alias a zero sized type in the next struct.
2389
2390namespace {
2391struct MicrosoftRecordLayoutBuilder {
2392  typedef llvm::DenseMap<const CXXRecordDecl *, CharUnits> BaseOffsetsMapTy;
2393  MicrosoftRecordLayoutBuilder(const ASTContext &Context) : Context(Context) {}
2394private:
2395  MicrosoftRecordLayoutBuilder(const MicrosoftRecordLayoutBuilder &)
2396  LLVM_DELETED_FUNCTION;
2397  void operator=(const MicrosoftRecordLayoutBuilder &) LLVM_DELETED_FUNCTION;
2398public:
2399
2400  void layout(const RecordDecl *RD);
2401  void cxxLayout(const CXXRecordDecl *RD);
2402  /// \brief Initializes size and alignment and honors some flags.
2403  void initializeLayout(const RecordDecl *RD);
2404  /// \brief Initialized C++ layout, compute alignment and virtual alignment and
2405  /// existance of vfptrs and vbptrs.  Alignment is needed before the vfptr is
2406  /// laid out.
2407  void initializeCXXLayout(const CXXRecordDecl *RD);
2408  void layoutVFPtr(const CXXRecordDecl *RD);
2409  void layoutNonVirtualBases(const CXXRecordDecl *RD);
2410  void layoutNonVirtualBase(const CXXRecordDecl *RD);
2411  void layoutVBPtr(const CXXRecordDecl *RD);
2412  /// \brief Lays out the fields of the record.  Also rounds size up to
2413  /// alignment.
2414  void layoutFields(const RecordDecl *RD);
2415  void layoutField(const FieldDecl *FD);
2416  void layoutBitField(const FieldDecl *FD);
2417  /// \brief Lays out a single zero-width bit-field in the record and handles
2418  /// special cases associated with zero-width bit-fields.
2419  void layoutZeroWidthBitField(const FieldDecl *FD);
2420  void layoutVirtualBases(const CXXRecordDecl *RD);
2421  void layoutVirtualBase(const CXXRecordDecl *RD, bool HasVtordisp);
2422  /// \brief Flushes the lazy virtual base and conditionally rounds up to
2423  /// alignment.
2424  void finalizeCXXLayout(const CXXRecordDecl *RD);
2425  void honorDeclspecAlign(const RecordDecl *RD);
2426
2427  /// \brief Updates the alignment of the type.  This function doesn't take any
2428  /// properties (such as packedness) into account.  getAdjustedFieldInfo()
2429  /// adjustes for packedness.
2430  void updateAlignment(CharUnits NewAlignment) {
2431    Alignment = std::max(Alignment, NewAlignment);
2432  }
2433  /// \brief Gets the size and alignment taking attributes into account.
2434  std::pair<CharUnits, CharUnits> getAdjustedFieldInfo(const FieldDecl *FD);
2435  /// \brief Places a field at offset 0.
2436  void placeFieldAtZero() { FieldOffsets.push_back(0); }
2437  /// \brief Places a field at an offset in CharUnits.
2438  void placeFieldAtOffset(CharUnits FieldOffset) {
2439    FieldOffsets.push_back(Context.toBits(FieldOffset));
2440  }
2441  /// \brief Places a bitfield at a bit offset.
2442  void placeFieldAtBitOffset(uint64_t FieldOffset) {
2443    FieldOffsets.push_back(FieldOffset);
2444  }
2445  /// \brief Compute the set of virtual bases for which vtordisps are required.
2446  llvm::SmallPtrSet<const CXXRecordDecl *, 2>
2447  computeVtorDispSet(const CXXRecordDecl *RD);
2448
2449  const ASTContext &Context;
2450  /// \brief The size of the record being laid out.
2451  CharUnits Size;
2452  /// \brief The current alignment of the record layout.
2453  CharUnits Alignment;
2454  /// \brief The collection of field offsets.
2455  SmallVector<uint64_t, 16> FieldOffsets;
2456  /// \brief The maximum allowed field alignment. This is set by #pragma pack.
2457  CharUnits MaxFieldAlignment;
2458  /// \brief Alignment does not occur for virtual bases unless something
2459  /// forces it to by explicitly using __declspec(align())
2460  bool AlignAfterVBases : 1;
2461  bool IsUnion : 1;
2462  /// \brief True if the last field laid out was a bitfield and was not 0
2463  /// width.
2464  bool LastFieldIsNonZeroWidthBitfield : 1;
2465  /// \brief The size of the allocation of the currently active bitfield.
2466  /// This value isn't meaningful unless LastFieldIsNonZeroWidthBitfield
2467  /// is true.
2468  CharUnits CurrentBitfieldSize;
2469  /// \brief The number of remaining bits in our last bitfield allocation.
2470  /// This value isn't meaningful unless LastFieldIsNonZeroWidthBitfield is
2471  /// true.
2472  unsigned RemainingBitsInField;
2473
2474  /// \brief The data alignment of the record layout.
2475  CharUnits DataSize;
2476  /// \brief The alignment of the non-virtual portion of the record layout
2477  /// including. Only used for C++ layouts.
2478  CharUnits NonVirtualAlignment;
2479  /// \brief The additional alignment imposed by the virtual bases.
2480  CharUnits VirtualAlignment;
2481  /// \brief The primary base class (if one exists).
2482  const CXXRecordDecl *PrimaryBase;
2483  /// \brief The class we share our vb-pointer with.
2484  const CXXRecordDecl *SharedVBPtrBase;
2485  /// \brief True if the class has a (not necessarily its own) vftable pointer.
2486  bool HasVFPtr : 1;
2487  /// \brief True if the class has a (not necessarily its own) vbtable pointer.
2488  bool HasVBPtr : 1;
2489  /// \brief Offset to the virtual base table pointer (if one exists).
2490  CharUnits VBPtrOffset;
2491  /// \brief Base classes and their offsets in the record.
2492  BaseOffsetsMapTy Bases;
2493  /// \brief virtual base classes and their offsets in the record.
2494  ASTRecordLayout::VBaseOffsetsMapTy VBases;
2495  /// \brief The size of a pointer.
2496  CharUnits PointerSize;
2497  /// \brief The alignment of a pointer.
2498  CharUnits PointerAlignment;
2499  /// \brief Holds an empty base we haven't yet laid out.
2500  const CXXRecordDecl *LazyEmptyBase;
2501};
2502} // namespace
2503
2504std::pair<CharUnits, CharUnits>
2505MicrosoftRecordLayoutBuilder::getAdjustedFieldInfo(const FieldDecl *FD) {
2506  std::pair<CharUnits, CharUnits> FieldInfo;
2507  if (FD->getType()->isIncompleteArrayType()) {
2508    // This is a flexible array member; we can't directly
2509    // query getTypeInfo about these, so we figure it out here.
2510    // Flexible array members don't have any size, but they
2511    // have to be aligned appropriately for their element type.
2512    FieldInfo.first = CharUnits::Zero();
2513    const ArrayType *ATy = Context.getAsArrayType(FD->getType());
2514    FieldInfo.second = Context.getTypeAlignInChars(ATy->getElementType());
2515  } else if (const ReferenceType *RT = FD->getType()->getAs<ReferenceType>()) {
2516    unsigned AS = RT->getPointeeType().getAddressSpace();
2517    FieldInfo.first = Context
2518        .toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(AS));
2519    FieldInfo.second = Context
2520        .toCharUnitsFromBits(Context.getTargetInfo().getPointerAlign(AS));
2521  } else
2522    FieldInfo = Context.getTypeInfoInChars(FD->getType());
2523
2524  // If we're not on win32 and using ms_struct the field alignment will be wrong
2525  // for 64 bit types, so we fix that here.
2526  if (FD->getASTContext().getTargetInfo().getTriple().getOS() !=
2527      llvm::Triple::Win32) {
2528    QualType T = Context.getBaseElementType(FD->getType());
2529    if (const BuiltinType *BTy = T->getAs<BuiltinType>()) {
2530      CharUnits TypeSize = Context.getTypeSizeInChars(BTy);
2531      if (TypeSize > FieldInfo.second)
2532        FieldInfo.second = TypeSize;
2533    }
2534  }
2535
2536  // Respect packed attribute.
2537  if (FD->hasAttr<PackedAttr>())
2538    FieldInfo.second = CharUnits::One();
2539  // Respect pack pragma.
2540  else if (!MaxFieldAlignment.isZero())
2541    FieldInfo.second = std::min(FieldInfo.second, MaxFieldAlignment);
2542  // Respect alignment attributes.
2543  if (unsigned fieldAlign = FD->getMaxAlignment()) {
2544    CharUnits FieldAlign = Context.toCharUnitsFromBits(fieldAlign);
2545    AlignAfterVBases = true;
2546    FieldInfo.second = std::max(FieldInfo.second, FieldAlign);
2547  }
2548  return FieldInfo;
2549}
2550
2551void MicrosoftRecordLayoutBuilder::initializeLayout(const RecordDecl *RD) {
2552  IsUnion = RD->isUnion();
2553
2554  Size = CharUnits::Zero();
2555  Alignment = CharUnits::One();
2556  AlignAfterVBases = false;
2557
2558  // Compute the maximum field alignment.
2559  MaxFieldAlignment = CharUnits::Zero();
2560  // Honor the default struct packing maximum alignment flag.
2561  if (unsigned DefaultMaxFieldAlignment = Context.getLangOpts().PackStruct)
2562    MaxFieldAlignment = CharUnits::fromQuantity(DefaultMaxFieldAlignment);
2563  // Honor the packing attribute.
2564  if (const MaxFieldAlignmentAttr *MFAA = RD->getAttr<MaxFieldAlignmentAttr>())
2565    MaxFieldAlignment = Context.toCharUnitsFromBits(MFAA->getAlignment());
2566  // Packed attribute forces max field alignment to be 1.
2567  if (RD->hasAttr<PackedAttr>())
2568    MaxFieldAlignment = CharUnits::One();
2569}
2570
2571void MicrosoftRecordLayoutBuilder::layout(const RecordDecl *RD) {
2572  initializeLayout(RD);
2573  layoutFields(RD);
2574  honorDeclspecAlign(RD);
2575}
2576
2577void MicrosoftRecordLayoutBuilder::cxxLayout(const CXXRecordDecl *RD) {
2578  initializeLayout(RD);
2579  initializeCXXLayout(RD);
2580  layoutVFPtr(RD);
2581  layoutNonVirtualBases(RD);
2582  layoutVBPtr(RD);
2583  layoutFields(RD);
2584  DataSize = Size;
2585  NonVirtualAlignment = Alignment;
2586  layoutVirtualBases(RD);
2587  finalizeCXXLayout(RD);
2588  honorDeclspecAlign(RD);
2589}
2590
2591void
2592MicrosoftRecordLayoutBuilder::initializeCXXLayout(const CXXRecordDecl *RD) {
2593  // Calculate pointer size and alignment.
2594  PointerSize =
2595      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(0));
2596  PointerAlignment = PointerSize;
2597  if (!MaxFieldAlignment.isZero())
2598    PointerAlignment = std::min(PointerAlignment, MaxFieldAlignment);
2599
2600  // Initialize information about the bases.
2601  HasVBPtr = false;
2602  HasVFPtr = false;
2603  SharedVBPtrBase = 0;
2604  PrimaryBase = 0;
2605  VirtualAlignment = CharUnits::One();
2606
2607  // If the record has a dynamic base class, attempt to choose a primary base
2608  // class. It is the first (in direct base class order) non-virtual dynamic
2609  // base class, if one exists.
2610  for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
2611                                                e = RD->bases_end();
2612       i != e; ++i) {
2613    const CXXRecordDecl *BaseDecl =
2614        cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
2615    const ASTRecordLayout &Layout = Context.getASTRecordLayout(BaseDecl);
2616    // Handle forced alignment.
2617    if (Layout.getAlignAfterVBases())
2618      AlignAfterVBases = true;
2619    // Handle virtual bases.
2620    if (i->isVirtual()) {
2621      VirtualAlignment = std::max(VirtualAlignment, Layout.getAlignment());
2622      HasVBPtr = true;
2623      continue;
2624    }
2625    // We located a primary base class!
2626    if (!PrimaryBase && Layout.hasVFPtr()) {
2627      PrimaryBase = BaseDecl;
2628      HasVFPtr = true;
2629    }
2630    // We located a base to share a VBPtr with!
2631    if (!SharedVBPtrBase && Layout.hasVBPtr()) {
2632      SharedVBPtrBase = BaseDecl;
2633      HasVBPtr = true;
2634    }
2635    updateAlignment(Layout.getAlignment());
2636  }
2637
2638  // Use LayoutFields to compute the alignment of the fields.  The layout
2639  // is discarded.  This is the simplest way to get all of the bit-field
2640  // behavior correct and is not actually very expensive.
2641  layoutFields(RD);
2642  Size = CharUnits::Zero();
2643  FieldOffsets.clear();
2644}
2645
2646void MicrosoftRecordLayoutBuilder::layoutVFPtr(const CXXRecordDecl *RD) {
2647  // If we have a primary base then our VFPtr was already laid out
2648  if (PrimaryBase)
2649    return;
2650
2651  // Look at all of our methods to determine if we need a VFPtr.  We need a
2652  // vfptr if we define a new virtual function.
2653  if (!HasVFPtr && RD->isDynamicClass())
2654    for (CXXRecordDecl::method_iterator i = RD->method_begin(),
2655                                        e = RD->method_end();
2656         !HasVFPtr && i != e; ++i)
2657      HasVFPtr = i->isVirtual() && i->size_overridden_methods() == 0;
2658  if (!HasVFPtr)
2659    return;
2660
2661  // MSVC potentially over-aligns the vf-table pointer by giving it
2662  // the max alignment of all the non-virtual data in the class.  The resulting
2663  // layout is essentially { vftbl, { nvdata } }.  This is completely
2664  // unnecessary, but we're not here to pass judgment.
2665  Size += Alignment;
2666  updateAlignment(PointerAlignment);
2667}
2668
2669void
2670MicrosoftRecordLayoutBuilder::layoutNonVirtualBases(const CXXRecordDecl *RD) {
2671  LazyEmptyBase = 0;
2672
2673  // Lay out the primary base first.
2674  if (PrimaryBase)
2675    layoutNonVirtualBase(PrimaryBase);
2676
2677  // Iterate through the bases and lay out the non-virtual ones.
2678  for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
2679                                                e = RD->bases_end();
2680       i != e; ++i) {
2681    if (i->isVirtual())
2682      continue;
2683    const CXXRecordDecl *BaseDecl =
2684        cast<CXXRecordDecl>(i->getType()->castAs<RecordType>()->getDecl());
2685    if (BaseDecl != PrimaryBase)
2686      layoutNonVirtualBase(BaseDecl);
2687  }
2688}
2689
2690void
2691MicrosoftRecordLayoutBuilder::layoutNonVirtualBase(const CXXRecordDecl *RD) {
2692  const ASTRecordLayout *Layout = RD ? &Context.getASTRecordLayout(RD) : 0;
2693
2694  // If we have a lazy empty base we haven't laid out yet, do that now.
2695  if (LazyEmptyBase) {
2696    const ASTRecordLayout &LazyLayout =
2697        Context.getASTRecordLayout(LazyEmptyBase);
2698    Size = Size.RoundUpToAlignment(LazyLayout.getAlignment());
2699    Bases.insert(std::make_pair(LazyEmptyBase, Size));
2700    // Empty bases only consume space when followed by another empty base.
2701    if (RD && Layout->getNonVirtualSize().isZero())
2702      Size++;
2703    LazyEmptyBase = 0;
2704  }
2705
2706  // RD is null when flushing the final lazy base.
2707  if (!RD)
2708    return;
2709
2710  if (Layout->getNonVirtualSize().isZero()) {
2711    LazyEmptyBase = RD;
2712    return;
2713  }
2714
2715  // Insert the base here.
2716  CharUnits BaseOffset = Size.RoundUpToAlignment(Layout->getAlignment());
2717  Bases.insert(std::make_pair(RD, BaseOffset));
2718  Size = BaseOffset + Layout->getDataSize();
2719  // Note: we don't update alignment here because it was accounted
2720  // for during initalization.
2721}
2722
2723void MicrosoftRecordLayoutBuilder::layoutVBPtr(const CXXRecordDecl *RD) {
2724  if (!HasVBPtr)
2725    VBPtrOffset = CharUnits::fromQuantity(-1);
2726  else if (SharedVBPtrBase) {
2727    const ASTRecordLayout &Layout = Context.getASTRecordLayout(SharedVBPtrBase);
2728    VBPtrOffset = Bases[SharedVBPtrBase] + Layout.getVBPtrOffset();
2729  } else {
2730    updateAlignment(PointerAlignment);
2731    VBPtrOffset = Size.RoundUpToAlignment(PointerAlignment);
2732
2733    if (Alignment == PointerAlignment && Size % PointerAlignment) {
2734      CharUnits x = Size + Alignment + Alignment;
2735      Size = VBPtrOffset + Alignment;
2736      // Handle strange padding rules.  I have no explanation for why the
2737      // virtual base is padded in such an odd way.  My guess is that they
2738      // always Add 2 * Alignment and incorrectly round down to the appropriate
2739      // alignment.  It's important to get this case correct because it impacts
2740      // the layout of the first member of the struct.
2741
2742      RecordDecl::field_iterator FieldBegin = RD->field_begin();
2743      if (FieldBegin != RD->field_end())
2744        Size += CharUnits::fromQuantity(
2745            x % getAdjustedFieldInfo(*FieldBegin).second);
2746    } else
2747      Size += Alignment;
2748  }
2749
2750  // Flush the lazy empty base.
2751  layoutNonVirtualBase(0);
2752}
2753
2754void MicrosoftRecordLayoutBuilder::layoutFields(const RecordDecl *RD) {
2755  LastFieldIsNonZeroWidthBitfield = false;
2756  for (RecordDecl::field_iterator Field = RD->field_begin(),
2757                                  FieldEnd = RD->field_end();
2758       Field != FieldEnd; ++Field)
2759    layoutField(*Field);
2760  Size = Size.RoundUpToAlignment(Alignment);
2761}
2762
2763void MicrosoftRecordLayoutBuilder::layoutField(const FieldDecl *FD) {
2764  if (FD->isBitField()) {
2765    layoutBitField(FD);
2766    return;
2767  }
2768  LastFieldIsNonZeroWidthBitfield = false;
2769
2770  std::pair<CharUnits, CharUnits> FieldInfo = getAdjustedFieldInfo(FD);
2771  CharUnits FieldSize = FieldInfo.first;
2772  CharUnits FieldAlign = FieldInfo.second;
2773
2774  updateAlignment(FieldAlign);
2775  if (IsUnion) {
2776    placeFieldAtZero();
2777    Size = std::max(Size, FieldSize);
2778  } else {
2779    // Round up the current record size to the field's alignment boundary.
2780    CharUnits FieldOffset = Size.RoundUpToAlignment(FieldAlign);
2781    placeFieldAtOffset(FieldOffset);
2782    Size = FieldOffset + FieldSize;
2783  }
2784}
2785
2786void MicrosoftRecordLayoutBuilder::layoutBitField(const FieldDecl *FD) {
2787  unsigned Width = FD->getBitWidthValue(Context);
2788  if (Width == 0) {
2789    layoutZeroWidthBitField(FD);
2790    return;
2791  }
2792
2793  std::pair<CharUnits, CharUnits> FieldInfo = getAdjustedFieldInfo(FD);
2794  CharUnits FieldSize = FieldInfo.first;
2795  CharUnits FieldAlign = FieldInfo.second;
2796
2797  // Clamp the bitfield to a containable size for the sake of being able
2798  // to lay them out.  Sema will throw an error.
2799  if (Width > Context.toBits(FieldSize))
2800    Width = Context.toBits(FieldSize);
2801
2802  // Check to see if this bitfield fits into an existing allocation.  Note:
2803  // MSVC refuses to pack bitfields of formal types with different sizes
2804  // into the same allocation.
2805  if (!IsUnion && LastFieldIsNonZeroWidthBitfield &&
2806      CurrentBitfieldSize == FieldSize && Width <= RemainingBitsInField) {
2807    placeFieldAtBitOffset(Context.toBits(Size) - RemainingBitsInField);
2808    RemainingBitsInField -= Width;
2809    return;
2810  }
2811
2812  LastFieldIsNonZeroWidthBitfield = true;
2813  CurrentBitfieldSize = FieldSize;
2814  if (IsUnion) {
2815    placeFieldAtZero();
2816    Size = std::max(Size, FieldSize);
2817    // TODO: Add a Sema warning that MS ignores bitfield alignment in unions.
2818  } else {
2819    // Allocate a new block of memory and place the bitfield in it.
2820    CharUnits FieldOffset = Size.RoundUpToAlignment(FieldAlign);
2821    placeFieldAtOffset(FieldOffset);
2822    Size = FieldOffset + FieldSize;
2823    updateAlignment(FieldAlign);
2824    RemainingBitsInField = Context.toBits(FieldSize) - Width;
2825  }
2826}
2827
2828void
2829MicrosoftRecordLayoutBuilder::layoutZeroWidthBitField(const FieldDecl *FD) {
2830  // Zero-width bitfields are ignored unless they follow a non-zero-width
2831  // bitfield.
2832  std::pair<CharUnits, CharUnits> FieldInfo = getAdjustedFieldInfo(FD);
2833  CharUnits FieldSize = FieldInfo.first;
2834  CharUnits FieldAlign = FieldInfo.second;
2835
2836  if (!LastFieldIsNonZeroWidthBitfield) {
2837    placeFieldAtOffset(IsUnion ? CharUnits::Zero() : Size);
2838    // TODO: Add a Sema warning that MS ignores alignment for zero
2839    // sized bitfields that occur after zero-size bitfields or non bitfields.
2840    return;
2841  }
2842
2843  LastFieldIsNonZeroWidthBitfield = false;
2844  if (IsUnion) {
2845    placeFieldAtZero();
2846    Size = std::max(Size, FieldSize);
2847  } else {
2848    // Round up the current record size to the field's alignment boundary.
2849    CharUnits FieldOffset = Size.RoundUpToAlignment(FieldAlign);
2850    placeFieldAtOffset(FieldOffset);
2851    Size = FieldOffset;
2852    updateAlignment(FieldAlign);
2853  }
2854}
2855
2856void MicrosoftRecordLayoutBuilder::layoutVirtualBases(const CXXRecordDecl *RD) {
2857  if (!HasVBPtr)
2858    return;
2859
2860  updateAlignment(VirtualAlignment);
2861
2862  // Zero-sized v-bases obey the alignment attribute so apply it here.  The
2863  // alignment attribute is normally accounted for in FinalizeLayout.
2864  if (unsigned MaxAlign = RD->getMaxAlignment())
2865    updateAlignment(Context.toCharUnitsFromBits(MaxAlign));
2866
2867  llvm::SmallPtrSet<const CXXRecordDecl *, 2> HasVtordisp =
2868      computeVtorDispSet(RD);
2869
2870  // Iterate through the virtual bases and lay them out.
2871  for (CXXRecordDecl::base_class_const_iterator i = RD->vbases_begin(),
2872                                                e = RD->vbases_end();
2873       i != e; ++i) {
2874    const CXXRecordDecl *BaseDecl =
2875        cast<CXXRecordDecl>(i->getType()->castAs<RecordType>()->getDecl());
2876    layoutVirtualBase(BaseDecl, HasVtordisp.count(BaseDecl));
2877  }
2878}
2879
2880void MicrosoftRecordLayoutBuilder::layoutVirtualBase(const CXXRecordDecl *RD,
2881                                                     bool HasVtordisp) {
2882  if (LazyEmptyBase) {
2883    const ASTRecordLayout &LazyLayout =
2884        Context.getASTRecordLayout(LazyEmptyBase);
2885    Size = Size.RoundUpToAlignment(LazyLayout.getAlignment());
2886    VBases.insert(
2887        std::make_pair(LazyEmptyBase, ASTRecordLayout::VBaseInfo(Size, false)));
2888    // Empty bases only consume space when followed by another empty base.
2889    // The space consumed is in an Alignment sized/aligned block and the v-base
2890    // is placed at its alignment offset into the chunk, unless its alignment
2891    // is less than the size of a pointer, at which it is placed at pointer
2892    // width offset in the chunck.  We have no idea why.
2893    if (RD && Context.getASTRecordLayout(RD).getNonVirtualSize().isZero())
2894      Size = Size.RoundUpToAlignment(Alignment) + PointerSize;
2895    LazyEmptyBase = 0;
2896  }
2897
2898  // RD is null when flushing the final lazy virtual base.
2899  if (!RD)
2900    return;
2901
2902  const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
2903  if (Layout.getNonVirtualSize().isZero() && !HasVtordisp) {
2904    LazyEmptyBase = RD;
2905    return;
2906  }
2907
2908  CharUnits BaseNVSize = Layout.getNonVirtualSize();
2909  CharUnits BaseAlign = Layout.getAlignment();
2910
2911  if (HasVtordisp)
2912    Size = Size.RoundUpToAlignment(Alignment) + PointerSize;
2913  Size = Size.RoundUpToAlignment(BaseAlign);
2914
2915  // Insert the base here.
2916  CharUnits BaseOffset = Size.RoundUpToAlignment(BaseAlign);
2917  VBases.insert(
2918      std::make_pair(RD, ASTRecordLayout::VBaseInfo(BaseOffset, HasVtordisp)));
2919  Size = BaseOffset + BaseNVSize;
2920  // Note: we don't update alignment here because it was accounted for in
2921  // InitializeLayout.
2922}
2923
2924void MicrosoftRecordLayoutBuilder::finalizeCXXLayout(const CXXRecordDecl *RD) {
2925  // Flush the lazy virtual base.
2926  layoutVirtualBase(0, false);
2927
2928  if (RD->vbases_begin() == RD->vbases_end() || AlignAfterVBases)
2929    Size = Size.RoundUpToAlignment(Alignment);
2930
2931  if (Size.isZero())
2932    Size = Alignment;
2933}
2934
2935void MicrosoftRecordLayoutBuilder::honorDeclspecAlign(const RecordDecl *RD) {
2936  if (unsigned MaxAlign = RD->getMaxAlignment()) {
2937    AlignAfterVBases = true;
2938    updateAlignment(Context.toCharUnitsFromBits(MaxAlign));
2939    Size = Size.RoundUpToAlignment(Alignment);
2940  }
2941}
2942
2943static bool
2944RequiresVtordisp(const llvm::SmallPtrSet<const CXXRecordDecl *, 2> &HasVtordisp,
2945                 const CXXRecordDecl *RD) {
2946  if (HasVtordisp.count(RD))
2947    return true;
2948  // If any of a virtual bases non-virtual bases (recursively) requires a
2949  // vtordisp than so does this virtual base.
2950  for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
2951                                                e = RD->bases_end();
2952       i != e; ++i)
2953    if (!i->isVirtual() &&
2954        RequiresVtordisp(
2955            HasVtordisp,
2956            cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl())))
2957      return true;
2958  return false;
2959}
2960
2961llvm::SmallPtrSet<const CXXRecordDecl *, 2>
2962MicrosoftRecordLayoutBuilder::computeVtorDispSet(const CXXRecordDecl *RD) {
2963  llvm::SmallPtrSet<const CXXRecordDecl *, 2> HasVtordisp;
2964
2965  // If any of our bases need a vtordisp for this type, so do we.  Check our
2966  // direct bases for vtordisp requirements.
2967  for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
2968                                                e = RD->bases_end();
2969       i != e; ++i) {
2970    const CXXRecordDecl *BaseDecl =
2971        cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
2972    const ASTRecordLayout &Layout = Context.getASTRecordLayout(BaseDecl);
2973    for (ASTRecordLayout::VBaseOffsetsMapTy::const_iterator
2974             bi = Layout.getVBaseOffsetsMap().begin(),
2975             be = Layout.getVBaseOffsetsMap().end();
2976         bi != be; ++bi)
2977      if (bi->second.hasVtorDisp())
2978        HasVtordisp.insert(bi->first);
2979  }
2980
2981  // If we define a constructor or destructor and override a function that is
2982  // defined in a virtual base's vtable, that virtual bases need a vtordisp.
2983  // Here we collect a list of classes with vtables for which our virtual bases
2984  // actually live.  The virtual bases with this property will require
2985  // vtordisps.  In addition, virtual bases that contain non-virtual bases that
2986  // define functions we override also require vtordisps, this case is checked
2987  // explicitly below.
2988  if (RD->hasUserDeclaredConstructor() || RD->hasUserDeclaredDestructor()) {
2989    llvm::SmallPtrSet<const CXXMethodDecl *, 8> Work;
2990    // Seed the working set with our non-destructor virtual methods.
2991    for (CXXRecordDecl::method_iterator i = RD->method_begin(),
2992                                        e = RD->method_end();
2993         i != e; ++i)
2994      if ((*i)->isVirtual() && (*i) != RD->getDestructor())
2995        Work.insert(*i);
2996    while (!Work.empty()) {
2997      const CXXMethodDecl *MD = *Work.begin();
2998      CXXMethodDecl::method_iterator i = MD->begin_overridden_methods(),
2999                                     e = MD->end_overridden_methods();
3000      if (i == e)
3001        // If a virtual method has no-overrides it lives in its parent's vtable.
3002        HasVtordisp.insert(MD->getParent());
3003      else
3004        Work.insert(i, e);
3005      // We've finished processing this element, remove it from the working set.
3006      Work.erase(MD);
3007    }
3008  }
3009
3010  // Re-check all of our vbases for vtordisp requirements (in case their
3011  // non-virtual bases have vtordisp requirements).
3012  for (CXXRecordDecl::base_class_const_iterator i = RD->vbases_begin(),
3013                                                e = RD->vbases_end();
3014       i != e; ++i) {
3015    const CXXRecordDecl *BaseDecl =  i->getType()->getAsCXXRecordDecl();
3016    if (!HasVtordisp.count(BaseDecl) && RequiresVtordisp(HasVtordisp, BaseDecl))
3017      HasVtordisp.insert(BaseDecl);
3018  }
3019
3020  return HasVtordisp;
3021}
3022
3023/// \brief Get or compute information about the layout of the specified record
3024/// (struct/union/class), which indicates its size and field position
3025/// information.
3026const ASTRecordLayout *
3027ASTContext::BuildMicrosoftASTRecordLayout(const RecordDecl *D) const {
3028  MicrosoftRecordLayoutBuilder Builder(*this);
3029  if (const CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(D)) {
3030    Builder.cxxLayout(RD);
3031    return new (*this) ASTRecordLayout(
3032        *this, Builder.Size, Builder.Alignment,
3033        Builder.HasVFPtr && !Builder.PrimaryBase, Builder.HasVFPtr,
3034        Builder.HasVBPtr && !Builder.SharedVBPtrBase, Builder.VBPtrOffset,
3035        Builder.DataSize, Builder.FieldOffsets.data(),
3036        Builder.FieldOffsets.size(), Builder.DataSize,
3037        Builder.NonVirtualAlignment, CharUnits::Zero(), Builder.PrimaryBase,
3038        false, Builder.AlignAfterVBases, Builder.Bases, Builder.VBases);
3039  } else {
3040    Builder.layout(D);
3041    return new (*this) ASTRecordLayout(
3042        *this, Builder.Size, Builder.Alignment, Builder.Size,
3043        Builder.FieldOffsets.data(), Builder.FieldOffsets.size());
3044  }
3045}
3046
3047/// getASTRecordLayout - Get or compute information about the layout of the
3048/// specified record (struct/union/class), which indicates its size and field
3049/// position information.
3050const ASTRecordLayout &
3051ASTContext::getASTRecordLayout(const RecordDecl *D) const {
3052  // These asserts test different things.  A record has a definition
3053  // as soon as we begin to parse the definition.  That definition is
3054  // not a complete definition (which is what isDefinition() tests)
3055  // until we *finish* parsing the definition.
3056
3057  if (D->hasExternalLexicalStorage() && !D->getDefinition())
3058    getExternalSource()->CompleteType(const_cast<RecordDecl*>(D));
3059
3060  D = D->getDefinition();
3061  assert(D && "Cannot get layout of forward declarations!");
3062  assert(!D->isInvalidDecl() && "Cannot get layout of invalid decl!");
3063  assert(D->isCompleteDefinition() && "Cannot layout type before complete!");
3064
3065  // Look up this layout, if already laid out, return what we have.
3066  // Note that we can't save a reference to the entry because this function
3067  // is recursive.
3068  const ASTRecordLayout *Entry = ASTRecordLayouts[D];
3069  if (Entry) return *Entry;
3070
3071  const ASTRecordLayout *NewEntry = 0;
3072
3073  if (isMsLayout(D) && !D->getASTContext().getExternalSource()) {
3074    NewEntry = BuildMicrosoftASTRecordLayout(D);
3075  } else if (const CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(D)) {
3076    EmptySubobjectMap EmptySubobjects(*this, RD);
3077    RecordLayoutBuilder Builder(*this, &EmptySubobjects);
3078    Builder.Layout(RD);
3079
3080    // In certain situations, we are allowed to lay out objects in the
3081    // tail-padding of base classes.  This is ABI-dependent.
3082    // FIXME: this should be stored in the record layout.
3083    bool skipTailPadding =
3084      mustSkipTailPadding(getTargetInfo().getCXXABI(), cast<CXXRecordDecl>(D));
3085
3086    // FIXME: This should be done in FinalizeLayout.
3087    CharUnits DataSize =
3088      skipTailPadding ? Builder.getSize() : Builder.getDataSize();
3089    CharUnits NonVirtualSize =
3090      skipTailPadding ? DataSize : Builder.NonVirtualSize;
3091    NewEntry =
3092      new (*this) ASTRecordLayout(*this, Builder.getSize(),
3093                                  Builder.Alignment,
3094                                  Builder.HasOwnVFPtr,
3095                                  RD->isDynamicClass(),
3096                                  Builder.HasOwnVBPtr,
3097                                  Builder.VBPtrOffset,
3098                                  DataSize,
3099                                  Builder.FieldOffsets.data(),
3100                                  Builder.FieldOffsets.size(),
3101                                  NonVirtualSize,
3102                                  Builder.NonVirtualAlignment,
3103                                  EmptySubobjects.SizeOfLargestEmptySubobject,
3104                                  Builder.PrimaryBase,
3105                                  Builder.PrimaryBaseIsVirtual,
3106                                  true,
3107                                  Builder.Bases, Builder.VBases);
3108  } else {
3109    RecordLayoutBuilder Builder(*this, /*EmptySubobjects=*/0);
3110    Builder.Layout(D);
3111
3112    NewEntry =
3113      new (*this) ASTRecordLayout(*this, Builder.getSize(),
3114                                  Builder.Alignment,
3115                                  Builder.getSize(),
3116                                  Builder.FieldOffsets.data(),
3117                                  Builder.FieldOffsets.size());
3118  }
3119
3120  ASTRecordLayouts[D] = NewEntry;
3121
3122  if (getLangOpts().DumpRecordLayouts) {
3123    llvm::outs() << "\n*** Dumping AST Record Layout\n";
3124    DumpRecordLayout(D, llvm::outs(), getLangOpts().DumpRecordLayoutsSimple);
3125  }
3126
3127  return *NewEntry;
3128}
3129
3130const CXXMethodDecl *ASTContext::getCurrentKeyFunction(const CXXRecordDecl *RD) {
3131  if (!getTargetInfo().getCXXABI().hasKeyFunctions())
3132    return 0;
3133
3134  assert(RD->getDefinition() && "Cannot get key function for forward decl!");
3135  RD = cast<CXXRecordDecl>(RD->getDefinition());
3136
3137  LazyDeclPtr &Entry = KeyFunctions[RD];
3138  if (!Entry)
3139    Entry = const_cast<CXXMethodDecl*>(computeKeyFunction(*this, RD));
3140
3141  return cast_or_null<CXXMethodDecl>(Entry.get(getExternalSource()));
3142}
3143
3144void ASTContext::setNonKeyFunction(const CXXMethodDecl *Method) {
3145  assert(Method == Method->getFirstDeclaration() &&
3146         "not working with method declaration from class definition");
3147
3148  // Look up the cache entry.  Since we're working with the first
3149  // declaration, its parent must be the class definition, which is
3150  // the correct key for the KeyFunctions hash.
3151  llvm::DenseMap<const CXXRecordDecl*, LazyDeclPtr>::iterator
3152    I = KeyFunctions.find(Method->getParent());
3153
3154  // If it's not cached, there's nothing to do.
3155  if (I == KeyFunctions.end()) return;
3156
3157  // If it is cached, check whether it's the target method, and if so,
3158  // remove it from the cache.
3159  if (I->second.get(getExternalSource()) == Method) {
3160    // FIXME: remember that we did this for module / chained PCH state?
3161    KeyFunctions.erase(I);
3162  }
3163}
3164
3165static uint64_t getFieldOffset(const ASTContext &C, const FieldDecl *FD) {
3166  const ASTRecordLayout &Layout = C.getASTRecordLayout(FD->getParent());
3167  return Layout.getFieldOffset(FD->getFieldIndex());
3168}
3169
3170uint64_t ASTContext::getFieldOffset(const ValueDecl *VD) const {
3171  uint64_t OffsetInBits;
3172  if (const FieldDecl *FD = dyn_cast<FieldDecl>(VD)) {
3173    OffsetInBits = ::getFieldOffset(*this, FD);
3174  } else {
3175    const IndirectFieldDecl *IFD = cast<IndirectFieldDecl>(VD);
3176
3177    OffsetInBits = 0;
3178    for (IndirectFieldDecl::chain_iterator CI = IFD->chain_begin(),
3179                                           CE = IFD->chain_end();
3180         CI != CE; ++CI)
3181      OffsetInBits += ::getFieldOffset(*this, cast<FieldDecl>(*CI));
3182  }
3183
3184  return OffsetInBits;
3185}
3186
3187/// getObjCLayout - Get or compute information about the layout of the
3188/// given interface.
3189///
3190/// \param Impl - If given, also include the layout of the interface's
3191/// implementation. This may differ by including synthesized ivars.
3192const ASTRecordLayout &
3193ASTContext::getObjCLayout(const ObjCInterfaceDecl *D,
3194                          const ObjCImplementationDecl *Impl) const {
3195  // Retrieve the definition
3196  if (D->hasExternalLexicalStorage() && !D->getDefinition())
3197    getExternalSource()->CompleteType(const_cast<ObjCInterfaceDecl*>(D));
3198  D = D->getDefinition();
3199  assert(D && D->isThisDeclarationADefinition() && "Invalid interface decl!");
3200
3201  // Look up this layout, if already laid out, return what we have.
3202  const ObjCContainerDecl *Key =
3203    Impl ? (const ObjCContainerDecl*) Impl : (const ObjCContainerDecl*) D;
3204  if (const ASTRecordLayout *Entry = ObjCLayouts[Key])
3205    return *Entry;
3206
3207  // Add in synthesized ivar count if laying out an implementation.
3208  if (Impl) {
3209    unsigned SynthCount = CountNonClassIvars(D);
3210    // If there aren't any sythesized ivars then reuse the interface
3211    // entry. Note we can't cache this because we simply free all
3212    // entries later; however we shouldn't look up implementations
3213    // frequently.
3214    if (SynthCount == 0)
3215      return getObjCLayout(D, 0);
3216  }
3217
3218  RecordLayoutBuilder Builder(*this, /*EmptySubobjects=*/0);
3219  Builder.Layout(D);
3220
3221  const ASTRecordLayout *NewEntry =
3222    new (*this) ASTRecordLayout(*this, Builder.getSize(),
3223                                Builder.Alignment,
3224                                Builder.getDataSize(),
3225                                Builder.FieldOffsets.data(),
3226                                Builder.FieldOffsets.size());
3227
3228  ObjCLayouts[Key] = NewEntry;
3229
3230  return *NewEntry;
3231}
3232
3233static void PrintOffset(raw_ostream &OS,
3234                        CharUnits Offset, unsigned IndentLevel) {
3235  OS << llvm::format("%4" PRId64 " | ", (int64_t)Offset.getQuantity());
3236  OS.indent(IndentLevel * 2);
3237}
3238
3239static void PrintIndentNoOffset(raw_ostream &OS, unsigned IndentLevel) {
3240  OS << "     | ";
3241  OS.indent(IndentLevel * 2);
3242}
3243
3244static void DumpCXXRecordLayout(raw_ostream &OS,
3245                                const CXXRecordDecl *RD, const ASTContext &C,
3246                                CharUnits Offset,
3247                                unsigned IndentLevel,
3248                                const char* Description,
3249                                bool IncludeVirtualBases) {
3250  const ASTRecordLayout &Layout = C.getASTRecordLayout(RD);
3251
3252  PrintOffset(OS, Offset, IndentLevel);
3253  OS << C.getTypeDeclType(const_cast<CXXRecordDecl *>(RD)).getAsString();
3254  if (Description)
3255    OS << ' ' << Description;
3256  if (RD->isEmpty())
3257    OS << " (empty)";
3258  OS << '\n';
3259
3260  IndentLevel++;
3261
3262  const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
3263  bool HasOwnVFPtr = Layout.hasOwnVFPtr();
3264  bool HasOwnVBPtr = Layout.hasOwnVBPtr();
3265
3266  // Vtable pointer.
3267  if (RD->isDynamicClass() && !PrimaryBase && !isMsLayout(RD)) {
3268    PrintOffset(OS, Offset, IndentLevel);
3269    OS << '(' << *RD << " vtable pointer)\n";
3270  } else if (HasOwnVFPtr) {
3271    PrintOffset(OS, Offset, IndentLevel);
3272    // vfptr (for Microsoft C++ ABI)
3273    OS << '(' << *RD << " vftable pointer)\n";
3274  }
3275
3276  // Dump (non-virtual) bases
3277  for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
3278         E = RD->bases_end(); I != E; ++I) {
3279    assert(!I->getType()->isDependentType() &&
3280           "Cannot layout class with dependent bases.");
3281    if (I->isVirtual())
3282      continue;
3283
3284    const CXXRecordDecl *Base =
3285      cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
3286
3287    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base);
3288
3289    DumpCXXRecordLayout(OS, Base, C, BaseOffset, IndentLevel,
3290                        Base == PrimaryBase ? "(primary base)" : "(base)",
3291                        /*IncludeVirtualBases=*/false);
3292  }
3293
3294  // vbptr (for Microsoft C++ ABI)
3295  if (HasOwnVBPtr) {
3296    PrintOffset(OS, Offset + Layout.getVBPtrOffset(), IndentLevel);
3297    OS << '(' << *RD << " vbtable pointer)\n";
3298  }
3299
3300  // Dump fields.
3301  uint64_t FieldNo = 0;
3302  for (CXXRecordDecl::field_iterator I = RD->field_begin(),
3303         E = RD->field_end(); I != E; ++I, ++FieldNo) {
3304    const FieldDecl &Field = **I;
3305    CharUnits FieldOffset = Offset +
3306      C.toCharUnitsFromBits(Layout.getFieldOffset(FieldNo));
3307
3308    if (const RecordType *RT = Field.getType()->getAs<RecordType>()) {
3309      if (const CXXRecordDecl *D = dyn_cast<CXXRecordDecl>(RT->getDecl())) {
3310        DumpCXXRecordLayout(OS, D, C, FieldOffset, IndentLevel,
3311                            Field.getName().data(),
3312                            /*IncludeVirtualBases=*/true);
3313        continue;
3314      }
3315    }
3316
3317    PrintOffset(OS, FieldOffset, IndentLevel);
3318    OS << Field.getType().getAsString() << ' ' << Field << '\n';
3319  }
3320
3321  if (!IncludeVirtualBases)
3322    return;
3323
3324  // Dump virtual bases.
3325  const ASTRecordLayout::VBaseOffsetsMapTy &vtordisps =
3326    Layout.getVBaseOffsetsMap();
3327  for (CXXRecordDecl::base_class_const_iterator I = RD->vbases_begin(),
3328         E = RD->vbases_end(); I != E; ++I) {
3329    assert(I->isVirtual() && "Found non-virtual class!");
3330    const CXXRecordDecl *VBase =
3331      cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
3332
3333    CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBase);
3334
3335    if (vtordisps.find(VBase)->second.hasVtorDisp()) {
3336      PrintOffset(OS, VBaseOffset - CharUnits::fromQuantity(4), IndentLevel);
3337      OS << "(vtordisp for vbase " << *VBase << ")\n";
3338    }
3339
3340    DumpCXXRecordLayout(OS, VBase, C, VBaseOffset, IndentLevel,
3341                        VBase == PrimaryBase ?
3342                        "(primary virtual base)" : "(virtual base)",
3343                        /*IncludeVirtualBases=*/false);
3344  }
3345
3346  PrintIndentNoOffset(OS, IndentLevel - 1);
3347  OS << "[sizeof=" << Layout.getSize().getQuantity();
3348  if (!isMsLayout(RD))
3349    OS << ", dsize=" << Layout.getDataSize().getQuantity();
3350  OS << ", align=" << Layout.getAlignment().getQuantity() << '\n';
3351
3352  PrintIndentNoOffset(OS, IndentLevel - 1);
3353  OS << " nvsize=" << Layout.getNonVirtualSize().getQuantity();
3354  OS << ", nvalign=" << Layout.getNonVirtualAlign().getQuantity() << "]\n";
3355  OS << '\n';
3356}
3357
3358void ASTContext::DumpRecordLayout(const RecordDecl *RD,
3359                                  raw_ostream &OS,
3360                                  bool Simple) const {
3361  const ASTRecordLayout &Info = getASTRecordLayout(RD);
3362
3363  if (const CXXRecordDecl *CXXRD = dyn_cast<CXXRecordDecl>(RD))
3364    if (!Simple)
3365      return DumpCXXRecordLayout(OS, CXXRD, *this, CharUnits(), 0, 0,
3366                                 /*IncludeVirtualBases=*/true);
3367
3368  OS << "Type: " << getTypeDeclType(RD).getAsString() << "\n";
3369  if (!Simple) {
3370    OS << "Record: ";
3371    RD->dump();
3372  }
3373  OS << "\nLayout: ";
3374  OS << "<ASTRecordLayout\n";
3375  OS << "  Size:" << toBits(Info.getSize()) << "\n";
3376  if (!isMsLayout(RD))
3377    OS << "  DataSize:" << toBits(Info.getDataSize()) << "\n";
3378  OS << "  Alignment:" << toBits(Info.getAlignment()) << "\n";
3379  OS << "  FieldOffsets: [";
3380  for (unsigned i = 0, e = Info.getFieldCount(); i != e; ++i) {
3381    if (i) OS << ", ";
3382    OS << Info.getFieldOffset(i);
3383  }
3384  OS << "]>\n";
3385}
3386