Coverage Report

Created: 2017-10-03 07:32

/Users/buildslave/jenkins/sharedspace/clang-stage2-coverage-R@2/llvm/tools/clang/lib/AST/RecordLayoutBuilder.cpp
Line
Count
Source (jump to first uncovered line)
1
//=== RecordLayoutBuilder.cpp - Helper class for building record layouts ---==//
2
//
3
//                     The LLVM Compiler Infrastructure
4
//
5
// This file is distributed under the University of Illinois Open Source
6
// License. See LICENSE.TXT for details.
7
//
8
//===----------------------------------------------------------------------===//
9
10
#include "clang/AST/RecordLayout.h"
11
#include "clang/AST/ASTContext.h"
12
#include "clang/AST/Attr.h"
13
#include "clang/AST/CXXInheritance.h"
14
#include "clang/AST/Decl.h"
15
#include "clang/AST/DeclCXX.h"
16
#include "clang/AST/DeclObjC.h"
17
#include "clang/AST/Expr.h"
18
#include "clang/Basic/TargetInfo.h"
19
#include "clang/Sema/SemaDiagnostic.h"
20
#include "llvm/ADT/SmallSet.h"
21
#include "llvm/Support/Format.h"
22
#include "llvm/Support/MathExtras.h"
23
24
using namespace clang;
25
26
namespace {
27
28
/// BaseSubobjectInfo - Represents a single base subobject in a complete class.
29
/// For a class hierarchy like
30
///
31
/// class A { };
32
/// class B : A { };
33
/// class C : A, B { };
34
///
35
/// The BaseSubobjectInfo graph for C will have three BaseSubobjectInfo
36
/// instances, one for B and two for A.
37
///
38
/// If a base is virtual, it will only have one BaseSubobjectInfo allocated.
39
struct BaseSubobjectInfo {
40
  /// Class - The class for this base info.
41
  const CXXRecordDecl *Class;
42
43
  /// IsVirtual - Whether the BaseInfo represents a virtual base or not.
44
  bool IsVirtual;
45
46
  /// Bases - Information about the base subobjects.
47
  SmallVector<BaseSubobjectInfo*, 4> Bases;
48
49
  /// PrimaryVirtualBaseInfo - Holds the base info for the primary virtual base
50
  /// of this base info (if one exists).
51
  BaseSubobjectInfo *PrimaryVirtualBaseInfo;
52
53
  // FIXME: Document.
54
  const BaseSubobjectInfo *Derived;
55
};
56
57
/// \brief Externally provided layout. Typically used when the AST source, such
58
/// as DWARF, lacks all the information that was available at compile time, such
59
/// as alignment attributes on fields and pragmas in effect.
60
struct ExternalLayout {
61
113k
  ExternalLayout() : Size(0), Align(0) {}
62
63
  /// \brief Overall record size in bits.
64
  uint64_t Size;
65
66
  /// \brief Overall record alignment in bits.
67
  uint64_t Align;
68
69
  /// \brief Record field offsets in bits.
70
  llvm::DenseMap<const FieldDecl *, uint64_t> FieldOffsets;
71
72
  /// \brief Direct, non-virtual base offsets.
73
  llvm::DenseMap<const CXXRecordDecl *, CharUnits> BaseOffsets;
74
75
  /// \brief Virtual base offsets.
76
  llvm::DenseMap<const CXXRecordDecl *, CharUnits> VirtualBaseOffsets;
77
78
  /// Get the offset of the given field. The external source must provide
79
  /// entries for all fields in the record.
80
61
  uint64_t getExternalFieldOffset(const FieldDecl *FD) {
81
61
    assert(FieldOffsets.count(FD) &&
82
61
           "Field does not have an external offset");
83
61
    return FieldOffsets[FD];
84
61
  }
85
86
1
  bool getExternalNVBaseOffset(const CXXRecordDecl *RD, CharUnits &BaseOffset) {
87
1
    auto Known = BaseOffsets.find(RD);
88
1
    if (Known == BaseOffsets.end())
89
1
      return false;
90
0
    BaseOffset = Known->second;
91
0
    return true;
92
0
  }
93
94
7
  bool getExternalVBaseOffset(const CXXRecordDecl *RD, CharUnits &BaseOffset) {
95
7
    auto Known = VirtualBaseOffsets.find(RD);
96
7
    if (Known == VirtualBaseOffsets.end())
97
7
      return false;
98
0
    BaseOffset = Known->second;
99
0
    return true;
100
0
  }
101
};
102
103
/// EmptySubobjectMap - Keeps track of which empty subobjects exist at different
104
/// offsets while laying out a C++ class.
105
class EmptySubobjectMap {
106
  const ASTContext &Context;
107
  uint64_t CharWidth;
108
  
109
  /// Class - The class whose empty entries we're keeping track of.
110
  const CXXRecordDecl *Class;
111
112
  /// EmptyClassOffsets - A map from offsets to empty record decls.
113
  typedef llvm::TinyPtrVector<const CXXRecordDecl *> ClassVectorTy;
114
  typedef llvm::DenseMap<CharUnits, ClassVectorTy> EmptyClassOffsetsMapTy;
115
  EmptyClassOffsetsMapTy EmptyClassOffsets;
116
  
117
  /// MaxEmptyClassOffset - The highest offset known to contain an empty
118
  /// base subobject.
119
  CharUnits MaxEmptyClassOffset;
120
  
121
  /// ComputeEmptySubobjectSizes - Compute the size of the largest base or
122
  /// member subobject that is empty.
123
  void ComputeEmptySubobjectSizes();
124
  
125
  void AddSubobjectAtOffset(const CXXRecordDecl *RD, CharUnits Offset);
126
  
127
  void UpdateEmptyBaseSubobjects(const BaseSubobjectInfo *Info,
128
                                 CharUnits Offset, bool PlacingEmptyBase);
129
  
130
  void UpdateEmptyFieldSubobjects(const CXXRecordDecl *RD, 
131
                                  const CXXRecordDecl *Class,
132
                                  CharUnits Offset);
133
  void UpdateEmptyFieldSubobjects(const FieldDecl *FD, CharUnits Offset);
134
  
135
  /// AnyEmptySubobjectsBeyondOffset - Returns whether there are any empty
136
  /// subobjects beyond the given offset.
137
114k
  bool AnyEmptySubobjectsBeyondOffset(CharUnits Offset) const {
138
114k
    return Offset <= MaxEmptyClassOffset;
139
114k
  }
140
141
  CharUnits 
142
8.35k
  getFieldOffset(const ASTRecordLayout &Layout, unsigned FieldNo) const {
143
8.35k
    uint64_t FieldOffset = Layout.getFieldOffset(FieldNo);
144
8.35k
    assert(FieldOffset % CharWidth == 0 && 
145
8.35k
           "Field offset not at char boundary!");
146
8.35k
147
8.35k
    return Context.toCharUnitsFromBits(FieldOffset);
148
8.35k
  }
149
150
protected:
151
  bool CanPlaceSubobjectAtOffset(const CXXRecordDecl *RD,
152
                                 CharUnits Offset) const;
153
154
  bool CanPlaceBaseSubobjectAtOffset(const BaseSubobjectInfo *Info,
155
                                     CharUnits Offset);
156
157
  bool CanPlaceFieldSubobjectAtOffset(const CXXRecordDecl *RD, 
158
                                      const CXXRecordDecl *Class,
159
                                      CharUnits Offset) const;
160
  bool CanPlaceFieldSubobjectAtOffset(const FieldDecl *FD,
161
                                      CharUnits Offset) const;
162
163
public:
164
  /// This holds the size of the largest empty subobject (either a base
165
  /// or a member). Will be zero if the record being built doesn't contain
166
  /// any empty classes.
167
  CharUnits SizeOfLargestEmptySubobject;
168
169
  EmptySubobjectMap(const ASTContext &Context, const CXXRecordDecl *Class)
170
40.3k
  : Context(Context), CharWidth(Context.getCharWidth()), Class(Class) {
171
40.3k
      ComputeEmptySubobjectSizes();
172
40.3k
  }
173
174
  /// CanPlaceBaseAtOffset - Return whether the given base class can be placed
175
  /// at the given offset.
176
  /// Returns false if placing the record will result in two components
177
  /// (direct or indirect) of the same type having the same offset.
178
  bool CanPlaceBaseAtOffset(const BaseSubobjectInfo *Info,
179
                            CharUnits Offset);
180
181
  /// CanPlaceFieldAtOffset - Return whether a field can be placed at the given
182
  /// offset.
183
  bool CanPlaceFieldAtOffset(const FieldDecl *FD, CharUnits Offset);
184
};
185
186
40.3k
void EmptySubobjectMap::ComputeEmptySubobjectSizes() {
187
40.3k
  // Check the bases.
188
8.89k
  for (const CXXBaseSpecifier &Base : Class->bases()) {
189
8.89k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
190
8.89k
191
8.89k
    CharUnits EmptySize;
192
8.89k
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(BaseDecl);
193
8.89k
    if (
BaseDecl->isEmpty()8.89k
) {
194
632
      // If the class decl is empty, get its size.
195
632
      EmptySize = Layout.getSize();
196
8.89k
    } else {
197
8.26k
      // Otherwise, we get the largest empty subobject for the decl.
198
8.26k
      EmptySize = Layout.getSizeOfLargestEmptySubobject();
199
8.26k
    }
200
8.89k
201
8.89k
    if (EmptySize > SizeOfLargestEmptySubobject)
202
679
      SizeOfLargestEmptySubobject = EmptySize;
203
8.89k
  }
204
40.3k
205
40.3k
  // Check the fields.
206
111k
  for (const FieldDecl *FD : Class->fields()) {
207
111k
    const RecordType *RT =
208
111k
        Context.getBaseElementType(FD->getType())->getAs<RecordType>();
209
111k
210
111k
    // We only care about record types.
211
111k
    if (!RT)
212
99.0k
      continue;
213
12.4k
214
12.4k
    CharUnits EmptySize;
215
12.4k
    const CXXRecordDecl *MemberDecl = RT->getAsCXXRecordDecl();
216
12.4k
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(MemberDecl);
217
12.4k
    if (
MemberDecl->isEmpty()12.4k
) {
218
398
      // If the class decl is empty, get its size.
219
398
      EmptySize = Layout.getSize();
220
12.4k
    } else {
221
12.0k
      // Otherwise, we get the largest empty subobject for the decl.
222
12.0k
      EmptySize = Layout.getSizeOfLargestEmptySubobject();
223
12.0k
    }
224
12.4k
225
12.4k
    if (EmptySize > SizeOfLargestEmptySubobject)
226
387
      SizeOfLargestEmptySubobject = EmptySize;
227
111k
  }
228
40.3k
}
229
230
bool
231
EmptySubobjectMap::CanPlaceSubobjectAtOffset(const CXXRecordDecl *RD, 
232
4.94k
                                             CharUnits Offset) const {
233
4.94k
  // We only need to check empty bases.
234
4.94k
  if (!RD->isEmpty())
235
3.80k
    return true;
236
1.14k
237
1.14k
  EmptyClassOffsetsMapTy::const_iterator I = EmptyClassOffsets.find(Offset);
238
1.14k
  if (I == EmptyClassOffsets.end())
239
959
    return true;
240
183
241
183
  const ClassVectorTy &Classes = I->second;
242
183
  if (std::find(Classes.begin(), Classes.end(), RD) == Classes.end())
243
86
    return true;
244
97
245
97
  // There is already an empty class of the same type at this offset.
246
97
  return false;
247
97
}
248
  
249
void EmptySubobjectMap::AddSubobjectAtOffset(const CXXRecordDecl *RD, 
250
1.60k
                                             CharUnits Offset) {
251
1.60k
  // We only care about empty bases.
252
1.60k
  if (!RD->isEmpty())
253
425
    return;
254
1.17k
255
1.17k
  // If we have empty structures inside a union, we can assign both
256
1.17k
  // the same offset. Just avoid pushing them twice in the list.
257
1.17k
  ClassVectorTy &Classes = EmptyClassOffsets[Offset];
258
1.17k
  if (std::find(Classes.begin(), Classes.end(), RD) != Classes.end())
259
1
    return;
260
1.17k
  
261
1.17k
  Classes.push_back(RD);
262
1.17k
  
263
1.17k
  // Update the empty class offset.
264
1.17k
  if (Offset > MaxEmptyClassOffset)
265
122
    MaxEmptyClassOffset = Offset;
266
1.60k
}
267
268
bool
269
EmptySubobjectMap::CanPlaceBaseSubobjectAtOffset(const BaseSubobjectInfo *Info,
270
1.30k
                                                 CharUnits Offset) {
271
1.30k
  // We don't have to keep looking past the maximum offset that's known to
272
1.30k
  // contain an empty class.
273
1.30k
  if (!AnyEmptySubobjectsBeyondOffset(Offset))
274
192
    return true;
275
1.11k
276
1.11k
  
if (1.11k
!CanPlaceSubobjectAtOffset(Info->Class, Offset)1.11k
)
277
78
    return false;
278
1.03k
279
1.03k
  // Traverse all non-virtual bases.
280
1.03k
  const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class);
281
311
  for (const BaseSubobjectInfo *Base : Info->Bases) {
282
311
    if (Base->IsVirtual)
283
89
      continue;
284
222
285
222
    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base->Class);
286
222
287
222
    if (!CanPlaceBaseSubobjectAtOffset(Base, BaseOffset))
288
59
      return false;
289
980
  }
290
980
291
980
  
if (980
Info->PrimaryVirtualBaseInfo980
) {
292
20
    BaseSubobjectInfo *PrimaryVirtualBaseInfo = Info->PrimaryVirtualBaseInfo;
293
20
294
20
    if (
Info == PrimaryVirtualBaseInfo->Derived20
) {
295
20
      if (!CanPlaceBaseSubobjectAtOffset(PrimaryVirtualBaseInfo, Offset))
296
4
        return false;
297
976
    }
298
20
  }
299
976
  
300
976
  // Traverse all member variables.
301
976
  unsigned FieldNo = 0;
302
976
  for (CXXRecordDecl::field_iterator I = Info->Class->field_begin(), 
303
1.10k
       E = Info->Class->field_end(); 
I != E1.10k
;
++I, ++FieldNo128
) {
304
130
    if (I->isBitField())
305
1
      continue;
306
129
307
129
    CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
308
129
    if (!CanPlaceFieldSubobjectAtOffset(*I, FieldOffset))
309
2
      return false;
310
130
  }
311
976
312
974
  return true;
313
1.30k
}
314
315
void EmptySubobjectMap::UpdateEmptyBaseSubobjects(const BaseSubobjectInfo *Info, 
316
                                                  CharUnits Offset,
317
1.29k
                                                  bool PlacingEmptyBase) {
318
1.29k
  if (
!PlacingEmptyBase && 1.29k
Offset >= SizeOfLargestEmptySubobject434
) {
319
139
    // We know that the only empty subobjects that can conflict with empty
320
139
    // subobject of non-empty bases, are empty bases that can be placed at
321
139
    // offset zero. Because of this, we only need to keep track of empty base 
322
139
    // subobjects with offsets less than the size of the largest empty
323
139
    // subobject for our class.    
324
139
    return;
325
139
  }
326
1.15k
327
1.15k
  AddSubobjectAtOffset(Info->Class, Offset);
328
1.15k
329
1.15k
  // Traverse all non-virtual bases.
330
1.15k
  const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class);
331
380
  for (const BaseSubobjectInfo *Base : Info->Bases) {
332
380
    if (Base->IsVirtual)
333
85
      continue;
334
295
335
295
    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base->Class);
336
295
    UpdateEmptyBaseSubobjects(Base, BaseOffset, PlacingEmptyBase);
337
295
  }
338
1.15k
339
1.15k
  if (
Info->PrimaryVirtualBaseInfo1.15k
) {
340
16
    BaseSubobjectInfo *PrimaryVirtualBaseInfo = Info->PrimaryVirtualBaseInfo;
341
16
    
342
16
    if (Info == PrimaryVirtualBaseInfo->Derived)
343
16
      UpdateEmptyBaseSubobjects(PrimaryVirtualBaseInfo, Offset,
344
16
                                PlacingEmptyBase);
345
16
  }
346
1.15k
347
1.15k
  // Traverse all member variables.
348
1.15k
  unsigned FieldNo = 0;
349
1.15k
  for (CXXRecordDecl::field_iterator I = Info->Class->field_begin(), 
350
1.28k
       E = Info->Class->field_end(); 
I != E1.28k
;
++I, ++FieldNo127
) {
351
127
    if (I->isBitField())
352
1
      continue;
353
126
354
126
    CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
355
126
    UpdateEmptyFieldSubobjects(*I, FieldOffset);
356
126
  }
357
1.29k
}
358
359
bool EmptySubobjectMap::CanPlaceBaseAtOffset(const BaseSubobjectInfo *Info,
360
9.12k
                                             CharUnits Offset) {
361
9.12k
  // If we know this class doesn't have any empty subobjects we don't need to
362
9.12k
  // bother checking.
363
9.12k
  if (SizeOfLargestEmptySubobject.isZero())
364
8.05k
    return true;
365
1.06k
366
1.06k
  
if (1.06k
!CanPlaceBaseSubobjectAtOffset(Info, Offset)1.06k
)
367
80
    return false;
368
987
369
987
  // We are able to place the base at this offset. Make sure to update the
370
987
  // empty base subobject map.
371
987
  UpdateEmptyBaseSubobjects(Info, Offset, Info->Class->isEmpty());
372
987
  return true;
373
987
}
374
375
bool
376
EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const CXXRecordDecl *RD, 
377
                                                  const CXXRecordDecl *Class,
378
3.84k
                                                  CharUnits Offset) const {
379
3.84k
  // We don't have to keep looking past the maximum offset that's known to
380
3.84k
  // contain an empty class.
381
3.84k
  if (!AnyEmptySubobjectsBeyondOffset(Offset))
382
13
    return true;
383
3.82k
384
3.82k
  
if (3.82k
!CanPlaceSubobjectAtOffset(RD, Offset)3.82k
)
385
19
    return false;
386
3.81k
  
387
3.81k
  const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
388
3.81k
389
3.81k
  // Traverse all non-virtual bases.
390
642
  for (const CXXBaseSpecifier &Base : RD->bases()) {
391
642
    if (Base.isVirtual())
392
20
      continue;
393
622
394
622
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
395
622
396
622
    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(BaseDecl);
397
622
    if (!CanPlaceFieldSubobjectAtOffset(BaseDecl, Class, BaseOffset))
398
3
      return false;
399
3.80k
  }
400
3.80k
401
3.80k
  
if (3.80k
RD == Class3.80k
) {
402
3.18k
    // This is the most derived class, traverse virtual bases as well.
403
20
    for (const CXXBaseSpecifier &Base : RD->vbases()) {
404
20
      const CXXRecordDecl *VBaseDecl = Base.getType()->getAsCXXRecordDecl();
405
20
406
20
      CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBaseDecl);
407
20
      if (!CanPlaceFieldSubobjectAtOffset(VBaseDecl, Class, VBaseOffset))
408
1
        return false;
409
3.80k
    }
410
3.18k
  }
411
3.80k
    
412
3.80k
  // Traverse all member variables.
413
3.80k
  unsigned FieldNo = 0;
414
3.80k
  for (CXXRecordDecl::field_iterator I = RD->field_begin(), E = RD->field_end();
415
11.7k
       
I != E11.7k
;
++I, ++FieldNo7.89k
) {
416
7.89k
    if (I->isBitField())
417
45
      continue;
418
7.85k
419
7.85k
    CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
420
7.85k
    
421
7.85k
    if (!CanPlaceFieldSubobjectAtOffset(*I, FieldOffset))
422
1
      return false;
423
7.89k
  }
424
3.80k
425
3.80k
  return true;
426
3.84k
}
427
428
bool
429
EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const FieldDecl *FD,
430
108k
                                                  CharUnits Offset) const {
431
108k
  // We don't have to keep looking past the maximum offset that's known to
432
108k
  // contain an empty class.
433
108k
  if (!AnyEmptySubobjectsBeyondOffset(Offset))
434
82.0k
    return true;
435
26.5k
  
436
26.5k
  QualType T = FD->getType();
437
26.5k
  if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl())
438
2.93k
    return CanPlaceFieldSubobjectAtOffset(RD, RD, Offset);
439
23.6k
440
23.6k
  // If we have an array type we need to look at every element.
441
23.6k
  
if (const ConstantArrayType *23.6k
AT23.6k
= Context.getAsConstantArrayType(T)) {
442
2.51k
    QualType ElemTy = Context.getBaseElementType(AT);
443
2.51k
    const RecordType *RT = ElemTy->getAs<RecordType>();
444
2.51k
    if (!RT)
445
2.24k
      return true;
446
265
447
265
    const CXXRecordDecl *RD = RT->getAsCXXRecordDecl();
448
265
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
449
265
450
265
    uint64_t NumElements = Context.getConstantArrayElementCount(AT);
451
265
    CharUnits ElementOffset = Offset;
452
525
    for (uint64_t I = 0; 
I != NumElements525
;
++I260
) {
453
465
      // We don't have to keep looking past the maximum offset that's known to
454
465
      // contain an empty class.
455
465
      if (!AnyEmptySubobjectsBeyondOffset(ElementOffset))
456
201
        return true;
457
264
      
458
264
      
if (264
!CanPlaceFieldSubobjectAtOffset(RD, RD, ElementOffset)264
)
459
4
        return false;
460
260
461
260
      ElementOffset += Layout.getSize();
462
260
    }
463
2.51k
  }
464
23.6k
465
21.1k
  return true;
466
108k
}
467
468
bool
469
EmptySubobjectMap::CanPlaceFieldAtOffset(const FieldDecl *FD, 
470
100k
                                         CharUnits Offset) {
471
100k
  if (!CanPlaceFieldSubobjectAtOffset(FD, Offset))
472
17
    return false;
473
100k
  
474
100k
  // We are able to place the member variable at this offset.
475
100k
  // Make sure to update the empty base subobject map.
476
100k
  UpdateEmptyFieldSubobjects(FD, Offset);
477
100k
  return true;
478
100k
}
479
480
void EmptySubobjectMap::UpdateEmptyFieldSubobjects(const CXXRecordDecl *RD, 
481
                                                   const CXXRecordDecl *Class,
482
11.5k
                                                   CharUnits Offset) {
483
11.5k
  // We know that the only empty subobjects that can conflict with empty
484
11.5k
  // field subobjects are subobjects of empty bases that can be placed at offset
485
11.5k
  // zero. Because of this, we only need to keep track of empty field 
486
11.5k
  // subobjects with offsets less than the size of the largest empty
487
11.5k
  // subobject for our class.
488
11.5k
  if (Offset >= SizeOfLargestEmptySubobject)
489
11.1k
    return;
490
445
491
445
  AddSubobjectAtOffset(RD, Offset);
492
445
493
445
  const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
494
445
495
445
  // Traverse all non-virtual bases.
496
50
  for (const CXXBaseSpecifier &Base : RD->bases()) {
497
50
    if (Base.isVirtual())
498
19
      continue;
499
31
500
31
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
501
31
502
31
    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(BaseDecl);
503
31
    UpdateEmptyFieldSubobjects(BaseDecl, Class, BaseOffset);
504
31
  }
505
445
506
445
  if (
RD == Class445
) {
507
407
    // This is the most derived class, traverse virtual bases as well.
508
19
    for (const CXXBaseSpecifier &Base : RD->vbases()) {
509
19
      const CXXRecordDecl *VBaseDecl = Base.getType()->getAsCXXRecordDecl();
510
19
511
19
      CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBaseDecl);
512
19
      UpdateEmptyFieldSubobjects(VBaseDecl, Class, VBaseOffset);
513
19
    }
514
407
  }
515
445
  
516
445
  // Traverse all member variables.
517
445
  unsigned FieldNo = 0;
518
445
  for (CXXRecordDecl::field_iterator I = RD->field_begin(), E = RD->field_end();
519
691
       
I != E691
;
++I, ++FieldNo246
) {
520
246
    if (I->isBitField())
521
3
      continue;
522
243
523
243
    CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
524
243
525
243
    UpdateEmptyFieldSubobjects(*I, FieldOffset);
526
243
  }
527
11.5k
}
528
  
529
void EmptySubobjectMap::UpdateEmptyFieldSubobjects(const FieldDecl *FD,
530
100k
                                                   CharUnits Offset) {
531
100k
  QualType T = FD->getType();
532
100k
  if (const CXXRecordDecl *
RD100k
= T->getAsCXXRecordDecl()) {
533
11.5k
    UpdateEmptyFieldSubobjects(RD, RD, Offset);
534
11.5k
    return;
535
11.5k
  }
536
89.4k
537
89.4k
  // If we have an array type we need to update every element.
538
89.4k
  
if (const ConstantArrayType *89.4k
AT89.4k
= Context.getAsConstantArrayType(T)) {
539
7.18k
    QualType ElemTy = Context.getBaseElementType(AT);
540
7.18k
    const RecordType *RT = ElemTy->getAs<RecordType>();
541
7.18k
    if (!RT)
542
6.66k
      return;
543
527
544
527
    const CXXRecordDecl *RD = RT->getAsCXXRecordDecl();
545
527
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
546
527
    
547
527
    uint64_t NumElements = Context.getConstantArrayElementCount(AT);
548
527
    CharUnits ElementOffset = Offset;
549
527
    
550
543
    for (uint64_t I = 0; 
I != NumElements543
;
++I16
) {
551
533
      // We know that the only empty subobjects that can conflict with empty
552
533
      // field subobjects are subobjects of empty bases that can be placed at 
553
533
      // offset zero. Because of this, we only need to keep track of empty field
554
533
      // subobjects with offsets less than the size of the largest empty
555
533
      // subobject for our class.
556
533
      if (ElementOffset >= SizeOfLargestEmptySubobject)
557
517
        return;
558
16
559
16
      UpdateEmptyFieldSubobjects(RD, RD, ElementOffset);
560
16
      ElementOffset += Layout.getSize();
561
16
    }
562
7.18k
  }
563
100k
}
564
565
typedef llvm::SmallPtrSet<const CXXRecordDecl*, 4> ClassSetTy;
566
567
class ItaniumRecordLayoutBuilder {
568
protected:
569
  // FIXME: Remove this and make the appropriate fields public.
570
  friend class clang::ASTContext;
571
572
  const ASTContext &Context;
573
574
  EmptySubobjectMap *EmptySubobjects;
575
576
  /// Size - The current size of the record layout.
577
  uint64_t Size;
578
579
  /// Alignment - The current alignment of the record layout.
580
  CharUnits Alignment;
581
582
  /// \brief The alignment if attribute packed is not used.
583
  CharUnits UnpackedAlignment;
584
585
  SmallVector<uint64_t, 16> FieldOffsets;
586
587
  /// \brief Whether the external AST source has provided a layout for this
588
  /// record.
589
  unsigned UseExternalLayout : 1;
590
591
  /// \brief Whether we need to infer alignment, even when we have an 
592
  /// externally-provided layout.
593
  unsigned InferAlignment : 1;
594
  
595
  /// Packed - Whether the record is packed or not.
596
  unsigned Packed : 1;
597
598
  unsigned IsUnion : 1;
599
600
  unsigned IsMac68kAlign : 1;
601
  
602
  unsigned IsMsStruct : 1;
603
604
  /// UnfilledBitsInLastUnit - If the last field laid out was a bitfield,
605
  /// this contains the number of bits in the last unit that can be used for
606
  /// an adjacent bitfield if necessary.  The unit in question is usually
607
  /// a byte, but larger units are used if IsMsStruct.
608
  unsigned char UnfilledBitsInLastUnit;
609
  /// LastBitfieldTypeSize - If IsMsStruct, represents the size of the type
610
  /// of the previous field if it was a bitfield.
611
  unsigned char LastBitfieldTypeSize;
612
613
  /// MaxFieldAlignment - The maximum allowed field alignment. This is set by
614
  /// #pragma pack.
615
  CharUnits MaxFieldAlignment;
616
617
  /// DataSize - The data size of the record being laid out.
618
  uint64_t DataSize;
619
620
  CharUnits NonVirtualSize;
621
  CharUnits NonVirtualAlignment;
622
623
  /// PrimaryBase - the primary base class (if one exists) of the class
624
  /// we're laying out.
625
  const CXXRecordDecl *PrimaryBase;
626
627
  /// PrimaryBaseIsVirtual - Whether the primary base of the class we're laying
628
  /// out is virtual.
629
  bool PrimaryBaseIsVirtual;
630
631
  /// HasOwnVFPtr - Whether the class provides its own vtable/vftbl
632
  /// pointer, as opposed to inheriting one from a primary base class.
633
  bool HasOwnVFPtr;
634
635
  /// \brief the flag of field offset changing due to packed attribute.
636
  bool HasPackedField;
637
638
  typedef llvm::DenseMap<const CXXRecordDecl *, CharUnits> BaseOffsetsMapTy;
639
640
  /// Bases - base classes and their offsets in the record.
641
  BaseOffsetsMapTy Bases;
642
643
  // VBases - virtual base classes and their offsets in the record.
644
  ASTRecordLayout::VBaseOffsetsMapTy VBases;
645
646
  /// IndirectPrimaryBases - Virtual base classes, direct or indirect, that are
647
  /// primary base classes for some other direct or indirect base class.
648
  CXXIndirectPrimaryBaseSet IndirectPrimaryBases;
649
650
  /// FirstNearlyEmptyVBase - The first nearly empty virtual base class in
651
  /// inheritance graph order. Used for determining the primary base class.
652
  const CXXRecordDecl *FirstNearlyEmptyVBase;
653
654
  /// VisitedVirtualBases - A set of all the visited virtual bases, used to
655
  /// avoid visiting virtual bases more than once.
656
  llvm::SmallPtrSet<const CXXRecordDecl *, 4> VisitedVirtualBases;
657
658
  /// Valid if UseExternalLayout is true.
659
  ExternalLayout External;
660
661
  ItaniumRecordLayoutBuilder(const ASTContext &Context,
662
                             EmptySubobjectMap *EmptySubobjects)
663
      : Context(Context), EmptySubobjects(EmptySubobjects), Size(0),
664
        Alignment(CharUnits::One()), UnpackedAlignment(CharUnits::One()),
665
        UseExternalLayout(false), InferAlignment(false), Packed(false),
666
        IsUnion(false), IsMac68kAlign(false), IsMsStruct(false),
667
        UnfilledBitsInLastUnit(0), LastBitfieldTypeSize(0),
668
        MaxFieldAlignment(CharUnits::Zero()), DataSize(0),
669
        NonVirtualSize(CharUnits::Zero()),
670
        NonVirtualAlignment(CharUnits::One()), PrimaryBase(nullptr),
671
        PrimaryBaseIsVirtual(false), HasOwnVFPtr(false),
672
109k
        HasPackedField(false), FirstNearlyEmptyVBase(nullptr) {}
673
674
  void Layout(const RecordDecl *D);
675
  void Layout(const CXXRecordDecl *D);
676
  void Layout(const ObjCInterfaceDecl *D);
677
678
  void LayoutFields(const RecordDecl *D);
679
  void LayoutField(const FieldDecl *D, bool InsertExtraPadding);
680
  void LayoutWideBitField(uint64_t FieldSize, uint64_t TypeSize,
681
                          bool FieldPacked, const FieldDecl *D);
682
  void LayoutBitField(const FieldDecl *D);
683
684
0
  TargetCXXABI getCXXABI() const {
685
0
    return Context.getTargetInfo().getCXXABI();
686
0
  }
687
688
  /// BaseSubobjectInfoAllocator - Allocator for BaseSubobjectInfo objects.
689
  llvm::SpecificBumpPtrAllocator<BaseSubobjectInfo> BaseSubobjectInfoAllocator;
690
  
691
  typedef llvm::DenseMap<const CXXRecordDecl *, BaseSubobjectInfo *>
692
    BaseSubobjectInfoMapTy;
693
694
  /// VirtualBaseInfo - Map from all the (direct or indirect) virtual bases
695
  /// of the class we're laying out to their base subobject info.
696
  BaseSubobjectInfoMapTy VirtualBaseInfo;
697
  
698
  /// NonVirtualBaseInfo - Map from all the direct non-virtual bases of the
699
  /// class we're laying out to their base subobject info.
700
  BaseSubobjectInfoMapTy NonVirtualBaseInfo;
701
702
  /// ComputeBaseSubobjectInfo - Compute the base subobject information for the
703
  /// bases of the given class.
704
  void ComputeBaseSubobjectInfo(const CXXRecordDecl *RD);
705
706
  /// ComputeBaseSubobjectInfo - Compute the base subobject information for a
707
  /// single class and all of its base classes.
708
  BaseSubobjectInfo *ComputeBaseSubobjectInfo(const CXXRecordDecl *RD, 
709
                                              bool IsVirtual,
710
                                              BaseSubobjectInfo *Derived);
711
712
  /// DeterminePrimaryBase - Determine the primary base of the given class.
713
  void DeterminePrimaryBase(const CXXRecordDecl *RD);
714
715
  void SelectPrimaryVBase(const CXXRecordDecl *RD);
716
717
  void EnsureVTablePointerAlignment(CharUnits UnpackedBaseAlign);
718
719
  /// LayoutNonVirtualBases - Determines the primary base class (if any) and
720
  /// lays it out. Will then proceed to lay out all non-virtual base clasess.
721
  void LayoutNonVirtualBases(const CXXRecordDecl *RD);
722
723
  /// LayoutNonVirtualBase - Lays out a single non-virtual base.
724
  void LayoutNonVirtualBase(const BaseSubobjectInfo *Base);
725
726
  void AddPrimaryVirtualBaseOffsets(const BaseSubobjectInfo *Info,
727
                                    CharUnits Offset);
728
729
  /// LayoutVirtualBases - Lays out all the virtual bases.
730
  void LayoutVirtualBases(const CXXRecordDecl *RD,
731
                          const CXXRecordDecl *MostDerivedClass);
732
733
  /// LayoutVirtualBase - Lays out a single virtual base.
734
  void LayoutVirtualBase(const BaseSubobjectInfo *Base);
735
736
  /// LayoutBase - Will lay out a base and return the offset where it was
737
  /// placed, in chars.
738
  CharUnits LayoutBase(const BaseSubobjectInfo *Base);
739
740
  /// InitializeLayout - Initialize record layout for the given record decl.
741
  void InitializeLayout(const Decl *D);
742
743
  /// FinishLayout - Finalize record layout. Adjust record size based on the
744
  /// alignment.
745
  void FinishLayout(const NamedDecl *D);
746
747
  void UpdateAlignment(CharUnits NewAlignment, CharUnits UnpackedNewAlignment);
748
688
  void UpdateAlignment(CharUnits NewAlignment) {
749
688
    UpdateAlignment(NewAlignment, NewAlignment);
750
688
  }
751
752
  /// \brief Retrieve the externally-supplied field offset for the given
753
  /// field.
754
  ///
755
  /// \param Field The field whose offset is being queried.
756
  /// \param ComputedOffset The offset that we've computed for this field.
757
  uint64_t updateExternalFieldOffset(const FieldDecl *Field, 
758
                                     uint64_t ComputedOffset);
759
  
760
  void CheckFieldPadding(uint64_t Offset, uint64_t UnpaddedOffset,
761
                          uint64_t UnpackedOffset, unsigned UnpackedAlign,
762
                          bool isPacked, const FieldDecl *D);
763
764
  DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID);
765
766
216k
  CharUnits getSize() const { 
767
216k
    assert(Size % Context.getCharWidth() == 0);
768
216k
    return Context.toCharUnitsFromBits(Size); 
769
216k
  }
770
1.01M
  uint64_t getSizeInBits() const { return Size; }
771
772
17.3k
  void setSize(CharUnits NewSize) { Size = Context.toBits(NewSize); }
773
596k
  void setSize(uint64_t NewSize) { Size = NewSize; }
774
775
0
  CharUnits getAligment() const { return Alignment; }
776
777
464k
  CharUnits getDataSize() const { 
778
464k
    assert(DataSize % Context.getCharWidth() == 0);
779
464k
    return Context.toCharUnitsFromBits(DataSize); 
780
464k
  }
781
1.03M
  uint64_t getDataSizeInBits() const { return DataSize; }
782
783
439k
  void setDataSize(CharUnits NewSize) { DataSize = Context.toBits(NewSize); }
784
60.3k
  void setDataSize(uint64_t NewSize) { DataSize = NewSize; }
785
786
  ItaniumRecordLayoutBuilder(const ItaniumRecordLayoutBuilder &) = delete;
787
  void operator=(const ItaniumRecordLayoutBuilder &) = delete;
788
};
789
} // end anonymous namespace
790
791
672
void ItaniumRecordLayoutBuilder::SelectPrimaryVBase(const CXXRecordDecl *RD) {
792
496
  for (const auto &I : RD->bases()) {
793
496
    assert(!I.getType()->isDependentType() &&
794
496
           "Cannot layout class with dependent bases.");
795
496
796
496
    const CXXRecordDecl *Base = I.getType()->getAsCXXRecordDecl();
797
496
798
496
    // Check if this is a nearly empty virtual base.
799
496
    if (
I.isVirtual() && 496
Context.isNearlyEmpty(Base)345
) {
800
141
      // If it's not an indirect primary base, then we've found our primary
801
141
      // base.
802
141
      if (
!IndirectPrimaryBases.count(Base)141
) {
803
138
        PrimaryBase = Base;
804
138
        PrimaryBaseIsVirtual = true;
805
138
        return;
806
138
      }
807
3
808
3
      // Is this the first nearly empty virtual base?
809
3
      
if (3
!FirstNearlyEmptyVBase3
)
810
3
        FirstNearlyEmptyVBase = Base;
811
141
    }
812
496
813
358
    SelectPrimaryVBase(Base);
814
358
    if (PrimaryBase)
815
7
      return;
816
527
  }
817
672
}
818
819
/// DeterminePrimaryBase - Determine the primary base of the given class.
820
40.3k
void ItaniumRecordLayoutBuilder::DeterminePrimaryBase(const CXXRecordDecl *RD) {
821
40.3k
  // If the class isn't dynamic, it won't have a primary base.
822
40.3k
  if (!RD->isDynamicClass())
823
33.0k
    return;
824
7.23k
825
7.23k
  // Compute all the primary virtual bases for all of our direct and
826
7.23k
  // indirect bases, and record all their primary virtual base classes.
827
7.23k
  RD->getIndirectPrimaryBases(IndirectPrimaryBases);
828
7.23k
829
7.23k
  // If the record has a dynamic base class, attempt to choose a primary base
830
7.23k
  // class. It is the first (in direct base class order) non-virtual dynamic
831
7.23k
  // base class, if one exists.
832
5.38k
  for (const auto &I : RD->bases()) {
833
5.38k
    // Ignore virtual bases.
834
5.38k
    if (I.isVirtual())
835
360
      continue;
836
5.02k
837
5.02k
    const CXXRecordDecl *Base = I.getType()->getAsCXXRecordDecl();
838
5.02k
839
5.02k
    if (
Base->isDynamicClass()5.02k
) {
840
4.89k
      // We found it.
841
4.89k
      PrimaryBase = Base;
842
4.89k
      PrimaryBaseIsVirtual = false;
843
4.89k
      return;
844
4.89k
    }
845
2.34k
  }
846
2.34k
847
2.34k
  // Under the Itanium ABI, if there is no non-virtual primary base class,
848
2.34k
  // try to compute the primary virtual base.  The primary virtual base is
849
2.34k
  // the first nearly empty virtual base that is not an indirect primary
850
2.34k
  // virtual base class, if one exists.
851
2.34k
  
if (2.34k
RD->getNumVBases() != 02.34k
) {
852
314
    SelectPrimaryVBase(RD);
853
314
    if (PrimaryBase)
854
138
      return;
855
2.20k
  }
856
2.20k
857
2.20k
  // Otherwise, it is the first indirect primary base class, if one exists.
858
2.20k
  
if (2.20k
FirstNearlyEmptyVBase2.20k
) {
859
2
    PrimaryBase = FirstNearlyEmptyVBase;
860
2
    PrimaryBaseIsVirtual = true;
861
2
    return;
862
2
  }
863
2.20k
864
2.20k
  assert(!PrimaryBase && "Should not get here with a primary base!");
865
2.20k
}
866
867
BaseSubobjectInfo *ItaniumRecordLayoutBuilder::ComputeBaseSubobjectInfo(
868
14.7k
    const CXXRecordDecl *RD, bool IsVirtual, BaseSubobjectInfo *Derived) {
869
14.7k
  BaseSubobjectInfo *Info;
870
14.7k
  
871
14.7k
  if (
IsVirtual14.7k
) {
872
813
    // Check if we already have info about this virtual base.
873
813
    BaseSubobjectInfo *&InfoSlot = VirtualBaseInfo[RD];
874
813
    if (
InfoSlot813
) {
875
170
      assert(InfoSlot->Class == RD && "Wrong class for virtual base info!");
876
170
      return InfoSlot;
877
170
    }
878
643
879
643
    // We don't, create it.
880
643
    InfoSlot = new (BaseSubobjectInfoAllocator.Allocate()) BaseSubobjectInfo;
881
643
    Info = InfoSlot;
882
14.7k
  } else {
883
13.8k
    Info = new (BaseSubobjectInfoAllocator.Allocate()) BaseSubobjectInfo;
884
13.8k
  }
885
14.7k
  
886
14.5k
  Info->Class = RD;
887
14.5k
  Info->IsVirtual = IsVirtual;
888
14.5k
  Info->Derived = nullptr;
889
14.5k
  Info->PrimaryVirtualBaseInfo = nullptr;
890
14.5k
891
14.5k
  const CXXRecordDecl *PrimaryVirtualBase = nullptr;
892
14.5k
  BaseSubobjectInfo *PrimaryVirtualBaseInfo = nullptr;
893
14.5k
894
14.5k
  // Check if this base has a primary virtual base.
895
14.5k
  if (
RD->getNumVBases()14.5k
) {
896
400
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
897
400
    if (
Layout.isPrimaryBaseVirtual()400
) {
898
123
      // This base does have a primary virtual base.
899
123
      PrimaryVirtualBase = Layout.getPrimaryBase();
900
123
      assert(PrimaryVirtualBase && "Didn't have a primary virtual base!");
901
123
      
902
123
      // Now check if we have base subobject info about this primary base.
903
123
      PrimaryVirtualBaseInfo = VirtualBaseInfo.lookup(PrimaryVirtualBase);
904
123
      
905
123
      if (
PrimaryVirtualBaseInfo123
) {
906
38
        if (
PrimaryVirtualBaseInfo->Derived38
) {
907
19
          // We did have info about this primary base, and it turns out that it
908
19
          // has already been claimed as a primary virtual base for another
909
19
          // base.
910
19
          PrimaryVirtualBase = nullptr;
911
38
        } else {
912
19
          // We can claim this base as our primary base.
913
19
          Info->PrimaryVirtualBaseInfo = PrimaryVirtualBaseInfo;
914
19
          PrimaryVirtualBaseInfo->Derived = Info;
915
19
        }
916
38
      }
917
123
    }
918
400
  }
919
14.5k
920
14.5k
  // Now go through all direct bases.
921
5.81k
  for (const auto &I : RD->bases()) {
922
5.81k
    bool IsVirtual = I.isVirtual();
923
5.81k
924
5.81k
    const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
925
5.81k
926
5.81k
    Info->Bases.push_back(ComputeBaseSubobjectInfo(BaseDecl, IsVirtual, Info));
927
5.81k
  }
928
14.5k
  
929
14.5k
  if (
PrimaryVirtualBase && 14.5k
!PrimaryVirtualBaseInfo104
) {
930
85
    // Traversing the bases must have created the base info for our primary
931
85
    // virtual base.
932
85
    PrimaryVirtualBaseInfo = VirtualBaseInfo.lookup(PrimaryVirtualBase);
933
85
    assert(PrimaryVirtualBaseInfo &&
934
85
           "Did not create a primary virtual base!");
935
85
      
936
85
    // Claim the primary virtual base as our primary virtual base.
937
85
    Info->PrimaryVirtualBaseInfo = PrimaryVirtualBaseInfo;
938
85
    PrimaryVirtualBaseInfo->Derived = Info;
939
85
  }
940
14.5k
  
941
14.5k
  return Info;
942
14.7k
}
943
944
void ItaniumRecordLayoutBuilder::ComputeBaseSubobjectInfo(
945
40.3k
    const CXXRecordDecl *RD) {
946
8.89k
  for (const auto &I : RD->bases()) {
947
8.89k
    bool IsVirtual = I.isVirtual();
948
8.89k
949
8.89k
    const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
950
8.89k
951
8.89k
    // Compute the base subobject info for this base.
952
8.89k
    BaseSubobjectInfo *Info = ComputeBaseSubobjectInfo(BaseDecl, IsVirtual,
953
8.89k
                                                       nullptr);
954
8.89k
955
8.89k
    if (
IsVirtual8.89k
) {
956
392
      // ComputeBaseInfo has already added this base for us.
957
392
      assert(VirtualBaseInfo.count(BaseDecl) &&
958
392
             "Did not add virtual base!");
959
8.89k
    } else {
960
8.50k
      // Add the base info to the map of non-virtual bases.
961
8.50k
      assert(!NonVirtualBaseInfo.count(BaseDecl) &&
962
8.50k
             "Non-virtual base already exists!");
963
8.50k
      NonVirtualBaseInfo.insert(std::make_pair(BaseDecl, Info));
964
8.50k
    }
965
8.89k
  }
966
40.3k
}
967
968
void ItaniumRecordLayoutBuilder::EnsureVTablePointerAlignment(
969
2.20k
    CharUnits UnpackedBaseAlign) {
970
2.20k
  CharUnits BaseAlign = (Packed) ? 
CharUnits::One()2
:
UnpackedBaseAlign2.20k
;
971
2.20k
972
2.20k
  // The maximum field alignment overrides base align.
973
2.20k
  if (
!MaxFieldAlignment.isZero()2.20k
) {
974
3
    BaseAlign = std::min(BaseAlign, MaxFieldAlignment);
975
3
    UnpackedBaseAlign = std::min(UnpackedBaseAlign, MaxFieldAlignment);
976
3
  }
977
2.20k
978
2.20k
  // Round up the current record size to pointer alignment.
979
2.20k
  setSize(getSize().alignTo(BaseAlign));
980
2.20k
  setDataSize(getSize());
981
2.20k
982
2.20k
  // Update the alignment.
983
2.20k
  UpdateAlignment(BaseAlign, UnpackedBaseAlign);
984
2.20k
}
985
986
void ItaniumRecordLayoutBuilder::LayoutNonVirtualBases(
987
40.3k
    const CXXRecordDecl *RD) {
988
40.3k
  // Then, determine the primary base class.
989
40.3k
  DeterminePrimaryBase(RD);
990
40.3k
991
40.3k
  // Compute base subobject info.
992
40.3k
  ComputeBaseSubobjectInfo(RD);
993
40.3k
  
994
40.3k
  // If we have a primary base class, lay it out.
995
40.3k
  if (
PrimaryBase40.3k
) {
996
5.03k
    if (
PrimaryBaseIsVirtual5.03k
) {
997
140
      // If the primary virtual base was a primary virtual base of some other
998
140
      // base class we'll have to steal it.
999
140
      BaseSubobjectInfo *PrimaryBaseInfo = VirtualBaseInfo.lookup(PrimaryBase);
1000
140
      PrimaryBaseInfo->Derived = nullptr;
1001
140
1002
140
      // We have a virtual primary base, insert it as an indirect primary base.
1003
140
      IndirectPrimaryBases.insert(PrimaryBase);
1004
140
1005
140
      assert(!VisitedVirtualBases.count(PrimaryBase) &&
1006
140
             "vbase already visited!");
1007
140
      VisitedVirtualBases.insert(PrimaryBase);
1008
140
1009
140
      LayoutVirtualBase(PrimaryBaseInfo);
1010
5.03k
    } else {
1011
4.89k
      BaseSubobjectInfo *PrimaryBaseInfo = 
1012
4.89k
        NonVirtualBaseInfo.lookup(PrimaryBase);
1013
4.89k
      assert(PrimaryBaseInfo && 
1014
4.89k
             "Did not find base info for non-virtual primary base!");
1015
4.89k
1016
4.89k
      LayoutNonVirtualBase(PrimaryBaseInfo);
1017
4.89k
    }
1018
5.03k
1019
5.03k
  // If this class needs a vtable/vf-table and didn't get one from a
1020
5.03k
  // primary base, add it in now.
1021
40.3k
  } else 
if (35.2k
RD->isDynamicClass()35.2k
) {
1022
2.20k
    assert(DataSize == 0 && "Vtable pointer must be at offset zero!");
1023
2.20k
    CharUnits PtrWidth = 
1024
2.20k
      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(0));
1025
2.20k
    CharUnits PtrAlign = 
1026
2.20k
      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerAlign(0));
1027
2.20k
    EnsureVTablePointerAlignment(PtrAlign);
1028
2.20k
    HasOwnVFPtr = true;
1029
2.20k
    setSize(getSize() + PtrWidth);
1030
2.20k
    setDataSize(getSize());
1031
2.20k
  }
1032
40.3k
1033
40.3k
  // Now lay out the non-virtual bases.
1034
8.89k
  for (const auto &I : RD->bases()) {
1035
8.89k
1036
8.89k
    // Ignore virtual bases.
1037
8.89k
    if (I.isVirtual())
1038
392
      continue;
1039
8.50k
1040
8.50k
    const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
1041
8.50k
1042
8.50k
    // Skip the primary base, because we've already laid it out.  The
1043
8.50k
    // !PrimaryBaseIsVirtual check is required because we might have a
1044
8.50k
    // non-virtual base of the same type as a primary virtual base.
1045
8.50k
    if (
BaseDecl == PrimaryBase && 8.50k
!PrimaryBaseIsVirtual4.89k
)
1046
4.89k
      continue;
1047
3.61k
1048
3.61k
    // Lay out the base.
1049
3.61k
    BaseSubobjectInfo *BaseInfo = NonVirtualBaseInfo.lookup(BaseDecl);
1050
3.61k
    assert(BaseInfo && "Did not find base info for non-virtual base!");
1051
3.61k
1052
3.61k
    LayoutNonVirtualBase(BaseInfo);
1053
3.61k
  }
1054
40.3k
}
1055
1056
void ItaniumRecordLayoutBuilder::LayoutNonVirtualBase(
1057
8.50k
    const BaseSubobjectInfo *Base) {
1058
8.50k
  // Layout the base.
1059
8.50k
  CharUnits Offset = LayoutBase(Base);
1060
8.50k
1061
8.50k
  // Add its base class offset.
1062
8.50k
  assert(!Bases.count(Base->Class) && "base offset already exists!");
1063
8.50k
  Bases.insert(std::make_pair(Base->Class, Offset));
1064
8.50k
1065
8.50k
  AddPrimaryVirtualBaseOffsets(Base, Offset);
1066
8.50k
}
1067
1068
void ItaniumRecordLayoutBuilder::AddPrimaryVirtualBaseOffsets(
1069
9.34k
    const BaseSubobjectInfo *Info, CharUnits Offset) {
1070
9.34k
  // This base isn't interesting, it has no virtual bases.
1071
9.34k
  if (!Info->Class->getNumVBases())
1072
8.94k
    return;
1073
400
  
1074
400
  // First, check if we have a virtual primary base to add offsets for.
1075
400
  
if (400
Info->PrimaryVirtualBaseInfo400
) {
1076
104
    assert(Info->PrimaryVirtualBaseInfo->IsVirtual && 
1077
104
           "Primary virtual base is not virtual!");
1078
104
    if (
Info->PrimaryVirtualBaseInfo->Derived == Info104
) {
1079
101
      // Add the offset.
1080
101
      assert(!VBases.count(Info->PrimaryVirtualBaseInfo->Class) && 
1081
101
             "primary vbase offset already exists!");
1082
101
      VBases.insert(std::make_pair(Info->PrimaryVirtualBaseInfo->Class,
1083
101
                                   ASTRecordLayout::VBaseInfo(Offset, false)));
1084
101
1085
101
      // Traverse the primary virtual base.
1086
101
      AddPrimaryVirtualBaseOffsets(Info->PrimaryVirtualBaseInfo, Offset);
1087
101
    }
1088
104
  }
1089
400
1090
400
  // Now go through all direct non-virtual bases.
1091
400
  const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class);
1092
624
  for (const BaseSubobjectInfo *Base : Info->Bases) {
1093
624
    if (Base->IsVirtual)
1094
421
      continue;
1095
203
1096
203
    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base->Class);
1097
203
    AddPrimaryVirtualBaseOffsets(Base, BaseOffset);
1098
203
  }
1099
9.34k
}
1100
1101
void ItaniumRecordLayoutBuilder::LayoutVirtualBases(
1102
40.7k
    const CXXRecordDecl *RD, const CXXRecordDecl *MostDerivedClass) {
1103
40.7k
  const CXXRecordDecl *PrimaryBase;
1104
40.7k
  bool PrimaryBaseIsVirtual;
1105
40.7k
1106
40.7k
  if (
MostDerivedClass == RD40.7k
) {
1107
40.3k
    PrimaryBase = this->PrimaryBase;
1108
40.3k
    PrimaryBaseIsVirtual = this->PrimaryBaseIsVirtual;
1109
40.7k
  } else {
1110
404
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
1111
404
    PrimaryBase = Layout.getPrimaryBase();
1112
404
    PrimaryBaseIsVirtual = Layout.isPrimaryBaseVirtual();
1113
404
  }
1114
40.7k
1115
9.52k
  for (const CXXBaseSpecifier &Base : RD->bases()) {
1116
9.52k
    assert(!Base.getType()->isDependentType() &&
1117
9.52k
           "Cannot layout class with dependent bases.");
1118
9.52k
1119
9.52k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
1120
9.52k
1121
9.52k
    if (
Base.isVirtual()9.52k
) {
1122
817
      if (
PrimaryBase != BaseDecl || 817
!PrimaryBaseIsVirtual251
) {
1123
566
        bool IndirectPrimaryBase = IndirectPrimaryBases.count(BaseDecl);
1124
566
1125
566
        // Only lay out the virtual base if it's not an indirect primary base.
1126
566
        if (
!IndirectPrimaryBase566
) {
1127
535
          // Only visit virtual bases once.
1128
535
          if (!VisitedVirtualBases.insert(BaseDecl).second)
1129
133
            continue;
1130
402
1131
402
          const BaseSubobjectInfo *BaseInfo = VirtualBaseInfo.lookup(BaseDecl);
1132
402
          assert(BaseInfo && "Did not find virtual base info!");
1133
402
          LayoutVirtualBase(BaseInfo);
1134
402
        }
1135
566
      }
1136
817
    }
1137
9.52k
1138
9.38k
    
if (9.38k
!BaseDecl->getNumVBases()9.38k
) {
1139
8.98k
      // This base isn't interesting since it doesn't have any virtual bases.
1140
8.98k
      continue;
1141
8.98k
    }
1142
404
1143
404
    LayoutVirtualBases(BaseDecl, MostDerivedClass);
1144
404
  }
1145
40.7k
}
1146
1147
void ItaniumRecordLayoutBuilder::LayoutVirtualBase(
1148
542
    const BaseSubobjectInfo *Base) {
1149
542
  assert(!Base->Derived && "Trying to lay out a primary virtual base!");
1150
542
  
1151
542
  // Layout the base.
1152
542
  CharUnits Offset = LayoutBase(Base);
1153
542
1154
542
  // Add its base class offset.
1155
542
  assert(!VBases.count(Base->Class) && "vbase offset already exists!");
1156
542
  VBases.insert(std::make_pair(Base->Class, 
1157
542
                       ASTRecordLayout::VBaseInfo(Offset, false)));
1158
542
1159
542
  AddPrimaryVirtualBaseOffsets(Base, Offset);
1160
542
}
1161
1162
CharUnits
1163
9.04k
ItaniumRecordLayoutBuilder::LayoutBase(const BaseSubobjectInfo *Base) {
1164
9.04k
  const ASTRecordLayout &Layout = Context.getASTRecordLayout(Base->Class);
1165
9.04k
1166
9.04k
  
1167
9.04k
  CharUnits Offset;
1168
9.04k
  
1169
9.04k
  // Query the external layout to see if it provides an offset.
1170
9.04k
  bool HasExternalLayout = false;
1171
9.04k
  if (
UseExternalLayout9.04k
) {
1172
8
    llvm::DenseMap<const CXXRecordDecl *, CharUnits>::iterator Known;
1173
8
    if (Base->IsVirtual)
1174
1
      HasExternalLayout = External.getExternalNVBaseOffset(Base->Class, Offset);
1175
8
    else
1176
7
      HasExternalLayout = External.getExternalVBaseOffset(Base->Class, Offset);
1177
8
  }
1178
9.04k
  
1179
9.04k
  CharUnits UnpackedBaseAlign = Layout.getNonVirtualAlignment();
1180
9.04k
  CharUnits BaseAlign = (Packed) ? 
CharUnits::One()20
:
UnpackedBaseAlign9.02k
;
1181
9.04k
 
1182
9.04k
  // If we have an empty base class, try to place it at offset 0.
1183
9.04k
  if (Base->Class->isEmpty() &&
1184
683
      
(!HasExternalLayout || 683
Offset == CharUnits::Zero()0
) &&
1185
9.04k
      
EmptySubobjects->CanPlaceBaseAtOffset(Base, CharUnits::Zero())683
) {
1186
629
    setSize(std::max(getSize(), Layout.getSize()));
1187
629
    UpdateAlignment(BaseAlign, UnpackedBaseAlign);
1188
629
1189
629
    return CharUnits::Zero();
1190
629
  }
1191
8.41k
1192
8.41k
  // The maximum field alignment overrides base align.
1193
8.41k
  
if (8.41k
!MaxFieldAlignment.isZero()8.41k
) {
1194
7
    BaseAlign = std::min(BaseAlign, MaxFieldAlignment);
1195
7
    UnpackedBaseAlign = std::min(UnpackedBaseAlign, MaxFieldAlignment);
1196
7
  }
1197
8.41k
1198
8.41k
  if (
!HasExternalLayout8.41k
) {
1199
8.41k
    // Round up the current record size to the base's alignment boundary.
1200
8.41k
    Offset = getDataSize().alignTo(BaseAlign);
1201
8.41k
1202
8.41k
    // Try to place the base.
1203
8.44k
    while (!EmptySubobjects->CanPlaceBaseAtOffset(Base, Offset))
1204
26
      Offset += BaseAlign;
1205
0
  } else {
1206
0
    bool Allowed = EmptySubobjects->CanPlaceBaseAtOffset(Base, Offset);
1207
0
    (void)Allowed;
1208
0
    assert(Allowed && "Base subobject externally placed at overlapping offset");
1209
0
1210
0
    if (
InferAlignment && 0
Offset < getDataSize().alignTo(BaseAlign)0
) {
1211
0
      // The externally-supplied base offset is before the base offset we
1212
0
      // computed. Assume that the structure is packed.
1213
0
      Alignment = CharUnits::One();
1214
0
      InferAlignment = false;
1215
0
    }
1216
0
  }
1217
8.41k
  
1218
8.41k
  if (
!Base->Class->isEmpty()8.41k
) {
1219
8.36k
    // Update the data size.
1220
8.36k
    setDataSize(Offset + Layout.getNonVirtualSize());
1221
8.36k
1222
8.36k
    setSize(std::max(getSize(), getDataSize()));
1223
8.36k
  } else
1224
54
    setSize(std::max(getSize(), Offset + Layout.getSize()));
1225
9.04k
1226
9.04k
  // Remember max struct/class alignment.
1227
9.04k
  UpdateAlignment(BaseAlign, UnpackedBaseAlign);
1228
9.04k
1229
9.04k
  return Offset;
1230
9.04k
}
1231
1232
109k
void ItaniumRecordLayoutBuilder::InitializeLayout(const Decl *D) {
1233
109k
  if (const RecordDecl *
RD109k
= dyn_cast<RecordDecl>(D)) {
1234
109k
    IsUnion = RD->isUnion();
1235
109k
    IsMsStruct = RD->isMsStruct(Context);
1236
109k
  }
1237
109k
1238
109k
  Packed = D->hasAttr<PackedAttr>();  
1239
109k
1240
109k
  // Honor the default struct packing maximum alignment flag.
1241
109k
  if (unsigned 
DefaultMaxFieldAlignment109k
= Context.getLangOpts().PackStruct) {
1242
2
    MaxFieldAlignment = CharUnits::fromQuantity(DefaultMaxFieldAlignment);
1243
2
  }
1244
109k
1245
109k
  // mac68k alignment supersedes maximum field alignment and attribute aligned,
1246
109k
  // and forces all structures to have 2-byte alignment. The IBM docs on it
1247
109k
  // allude to additional (more complicated) semantics, especially with regard
1248
109k
  // to bit-fields, but gcc appears not to follow that.
1249
109k
  if (
D->hasAttr<AlignMac68kAttr>()109k
) {
1250
12
    IsMac68kAlign = true;
1251
12
    MaxFieldAlignment = CharUnits::fromQuantity(2);
1252
12
    Alignment = CharUnits::fromQuantity(2);
1253
109k
  } else {
1254
109k
    if (const MaxFieldAlignmentAttr *MFAA = D->getAttr<MaxFieldAlignmentAttr>())
1255
1.56k
      MaxFieldAlignment = Context.toCharUnitsFromBits(MFAA->getAlignment());
1256
109k
1257
109k
    if (unsigned MaxAlign = D->getMaxAlignment())
1258
352
      UpdateAlignment(Context.toCharUnitsFromBits(MaxAlign));
1259
109k
  }
1260
109k
  
1261
109k
  // If there is an external AST source, ask it for the various offsets.
1262
109k
  if (const RecordDecl *RD = dyn_cast<RecordDecl>(D))
1263
109k
    
if (ExternalASTSource *109k
Source109k
= Context.getExternalSource()) {
1264
1.75k
      UseExternalLayout = Source->layoutRecordType(
1265
1.75k
          RD, External.Size, External.Align, External.FieldOffsets,
1266
1.75k
          External.BaseOffsets, External.VirtualBaseOffsets);
1267
1.75k
1268
1.75k
      // Update based on external alignment.
1269
1.75k
      if (
UseExternalLayout1.75k
) {
1270
25
        if (
External.Align > 025
) {
1271
25
          Alignment = Context.toCharUnitsFromBits(External.Align);
1272
25
        } else {
1273
0
          // The external source didn't have alignment information; infer it.
1274
0
          InferAlignment = true;
1275
0
        }
1276
25
      }
1277
109k
    }
1278
109k
}
1279
1280
68.7k
void ItaniumRecordLayoutBuilder::Layout(const RecordDecl *D) {
1281
68.7k
  InitializeLayout(D);
1282
68.7k
  LayoutFields(D);
1283
68.7k
1284
68.7k
  // Finally, round the size of the total struct up to the alignment of the
1285
68.7k
  // struct itself.
1286
68.7k
  FinishLayout(D);
1287
68.7k
}
1288
1289
40.3k
void ItaniumRecordLayoutBuilder::Layout(const CXXRecordDecl *RD) {
1290
40.3k
  InitializeLayout(RD);
1291
40.3k
1292
40.3k
  // Lay out the vtable and the non-virtual bases.
1293
40.3k
  LayoutNonVirtualBases(RD);
1294
40.3k
1295
40.3k
  LayoutFields(RD);
1296
40.3k
1297
40.3k
  NonVirtualSize = Context.toCharUnitsFromBits(
1298
40.3k
      llvm::alignTo(getSizeInBits(), Context.getTargetInfo().getCharAlign()));
1299
40.3k
  NonVirtualAlignment = Alignment;
1300
40.3k
1301
40.3k
  // Lay out the virtual bases and add the primary virtual base offsets.
1302
40.3k
  LayoutVirtualBases(RD, RD);
1303
40.3k
1304
40.3k
  // Finally, round the size of the total struct up to the alignment
1305
40.3k
  // of the struct itself.
1306
40.3k
  FinishLayout(RD);
1307
40.3k
1308
#ifndef NDEBUG
1309
  // Check that we have base offsets for all bases.
1310
  for (const CXXBaseSpecifier &Base : RD->bases()) {
1311
    if (Base.isVirtual())
1312
      continue;
1313
1314
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
1315
1316
    assert(Bases.count(BaseDecl) && "Did not find base offset!");
1317
  }
1318
1319
  // And all virtual bases.
1320
  for (const CXXBaseSpecifier &Base : RD->vbases()) {
1321
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
1322
1323
    assert(VBases.count(BaseDecl) && "Did not find base offset!");
1324
  }
1325
#endif
1326
}
1327
1328
902
void ItaniumRecordLayoutBuilder::Layout(const ObjCInterfaceDecl *D) {
1329
902
  if (ObjCInterfaceDecl *
SD902
= D->getSuperClass()) {
1330
323
    const ASTRecordLayout &SL = Context.getASTObjCInterfaceLayout(SD);
1331
323
1332
323
    UpdateAlignment(SL.getAlignment());
1333
323
1334
323
    // We start laying out ivars not at the end of the superclass
1335
323
    // structure, but at the next byte following the last field.
1336
323
    setSize(SL.getDataSize());
1337
323
    setDataSize(getSize());
1338
323
  }
1339
902
1340
902
  InitializeLayout(D);
1341
902
  // Layout each ivar sequentially.
1342
1.99k
  for (const ObjCIvarDecl *IVD = D->all_declared_ivar_begin(); IVD;
1343
1.09k
       IVD = IVD->getNextIvar())
1344
1.09k
    LayoutField(IVD, false);
1345
902
1346
902
  // Finally, round the size of the total struct up to the alignment of the
1347
902
  // struct itself.
1348
902
  FinishLayout(D);
1349
902
}
1350
1351
109k
void ItaniumRecordLayoutBuilder::LayoutFields(const RecordDecl *D) {
1352
109k
  // Layout each field, for now, just sequentially, respecting alignment.  In
1353
109k
  // the future, this will need to be tweakable by targets.
1354
109k
  bool InsertExtraPadding = D->mayInsertExtraPadding(/*EmitRemark=*/true);
1355
109k
  bool HasFlexibleArrayMember = D->hasFlexibleArrayMember();
1356
594k
  for (auto I = D->field_begin(), End = D->field_end(); 
I != End594k
;
++I485k
) {
1357
485k
    auto Next(I);
1358
485k
    ++Next;
1359
485k
    LayoutField(*I,
1360
42
                InsertExtraPadding && 
(Next != End || 42
!HasFlexibleArrayMember16
));
1361
485k
  }
1362
109k
}
1363
1364
// Rounds the specified size to have it a multiple of the char size.
1365
static uint64_t
1366
roundUpSizeToCharAlignment(uint64_t Size,
1367
45
                           const ASTContext &Context) {
1368
45
  uint64_t CharAlignment = Context.getTargetInfo().getCharAlign();
1369
45
  return llvm::alignTo(Size, CharAlignment);
1370
45
}
1371
1372
void ItaniumRecordLayoutBuilder::LayoutWideBitField(uint64_t FieldSize,
1373
                                                    uint64_t TypeSize,
1374
                                                    bool FieldPacked,
1375
13
                                                    const FieldDecl *D) {
1376
13
  assert(Context.getLangOpts().CPlusPlus &&
1377
13
         "Can only have wide bit-fields in C++!");
1378
13
1379
13
  // Itanium C++ ABI 2.4:
1380
13
  //   If sizeof(T)*8 < n, let T' be the largest integral POD type with
1381
13
  //   sizeof(T')*8 <= n.
1382
13
1383
13
  QualType IntegralPODTypes[] = {
1384
13
    Context.UnsignedCharTy, Context.UnsignedShortTy, Context.UnsignedIntTy,
1385
13
    Context.UnsignedLongTy, Context.UnsignedLongLongTy
1386
13
  };
1387
13
1388
13
  QualType Type;
1389
52
  for (const QualType &QT : IntegralPODTypes) {
1390
52
    uint64_t Size = Context.getTypeSize(QT);
1391
52
1392
52
    if (Size > FieldSize)
1393
8
      break;
1394
44
1395
44
    Type = QT;
1396
44
  }
1397
13
  assert(!Type.isNull() && "Did not find a type!");
1398
13
1399
13
  CharUnits TypeAlign = Context.getTypeAlignInChars(Type);
1400
13
1401
13
  // We're not going to use any of the unfilled bits in the last byte.
1402
13
  UnfilledBitsInLastUnit = 0;
1403
13
  LastBitfieldTypeSize = 0;
1404
13
1405
13
  uint64_t FieldOffset;
1406
13
  uint64_t UnpaddedFieldOffset = getDataSizeInBits() - UnfilledBitsInLastUnit;
1407
13
1408
13
  if (
IsUnion13
) {
1409
3
    uint64_t RoundedFieldSize = roundUpSizeToCharAlignment(FieldSize,
1410
3
                                                           Context);
1411
3
    setDataSize(std::max(getDataSizeInBits(), RoundedFieldSize));
1412
3
    FieldOffset = 0;
1413
13
  } else {
1414
10
    // The bitfield is allocated starting at the next offset aligned 
1415
10
    // appropriately for T', with length n bits.
1416
10
    FieldOffset = llvm::alignTo(getDataSizeInBits(), Context.toBits(TypeAlign));
1417
10
1418
10
    uint64_t NewSizeInBits = FieldOffset + FieldSize;
1419
10
1420
10
    setDataSize(
1421
10
        llvm::alignTo(NewSizeInBits, Context.getTargetInfo().getCharAlign()));
1422
10
    UnfilledBitsInLastUnit = getDataSizeInBits() - NewSizeInBits;
1423
10
  }
1424
13
1425
13
  // Place this field at the current location.
1426
13
  FieldOffsets.push_back(FieldOffset);
1427
13
1428
13
  CheckFieldPadding(FieldOffset, UnpaddedFieldOffset, FieldOffset,
1429
13
                    Context.toBits(TypeAlign), FieldPacked, D);
1430
13
1431
13
  // Update the size.
1432
13
  setSize(std::max(getSizeInBits(), getDataSizeInBits()));
1433
13
1434
13
  // Remember max struct/class alignment.
1435
13
  UpdateAlignment(TypeAlign);
1436
13
}
1437
1438
11.7k
void ItaniumRecordLayoutBuilder::LayoutBitField(const FieldDecl *D) {
1439
11.4k
  bool FieldPacked = Packed || D->hasAttr<PackedAttr>();
1440
11.7k
  uint64_t FieldSize = D->getBitWidthValue(Context);
1441
11.7k
  TypeInfo FieldInfo = Context.getTypeInfo(D->getType());
1442
11.7k
  uint64_t TypeSize = FieldInfo.Width;
1443
11.7k
  unsigned FieldAlign = FieldInfo.Align;
1444
11.7k
1445
11.7k
  // UnfilledBitsInLastUnit is the difference between the end of the
1446
11.7k
  // last allocated bitfield (i.e. the first bit offset available for
1447
11.7k
  // bitfields) and the end of the current data size in bits (i.e. the
1448
11.7k
  // first bit offset available for non-bitfields).  The current data
1449
11.7k
  // size in bits is always a multiple of the char size; additionally,
1450
11.7k
  // for ms_struct records it's also a multiple of the
1451
11.7k
  // LastBitfieldTypeSize (if set).
1452
11.7k
1453
11.7k
  // The struct-layout algorithm is dictated by the platform ABI,
1454
11.7k
  // which in principle could use almost any rules it likes.  In
1455
11.7k
  // practice, UNIXy targets tend to inherit the algorithm described
1456
11.7k
  // in the System V generic ABI.  The basic bitfield layout rule in
1457
11.7k
  // System V is to place bitfields at the next available bit offset
1458
11.7k
  // where the entire bitfield would fit in an aligned storage unit of
1459
11.7k
  // the declared type; it's okay if an earlier or later non-bitfield
1460
11.7k
  // is allocated in the same storage unit.  However, some targets
1461
11.7k
  // (those that !useBitFieldTypeAlignment(), e.g. ARM APCS) don't
1462
11.7k
  // require this storage unit to be aligned, and therefore always put
1463
11.7k
  // the bitfield at the next available bit offset.
1464
11.7k
1465
11.7k
  // ms_struct basically requests a complete replacement of the
1466
11.7k
  // platform ABI's struct-layout algorithm, with the high-level goal
1467
11.7k
  // of duplicating MSVC's layout.  For non-bitfields, this follows
1468
11.7k
  // the standard algorithm.  The basic bitfield layout rule is to
1469
11.7k
  // allocate an entire unit of the bitfield's declared type
1470
11.7k
  // (e.g. 'unsigned long'), then parcel it up among successive
1471
11.7k
  // bitfields whose declared types have the same size, making a new
1472
11.7k
  // unit as soon as the last can no longer store the whole value.
1473
11.7k
  // Since it completely replaces the platform ABI's algorithm,
1474
11.7k
  // settings like !useBitFieldTypeAlignment() do not apply.
1475
11.7k
1476
11.7k
  // A zero-width bitfield forces the use of a new storage unit for
1477
11.7k
  // later bitfields.  In general, this occurs by rounding up the
1478
11.7k
  // current size of the struct as if the algorithm were about to
1479
11.7k
  // place a non-bitfield of the field's formal type.  Usually this
1480
11.7k
  // does not change the alignment of the struct itself, but it does
1481
11.7k
  // on some targets (those that useZeroLengthBitfieldAlignment(),
1482
11.7k
  // e.g. ARM).  In ms_struct layout, zero-width bitfields are
1483
11.7k
  // ignored unless they follow a non-zero-width bitfield.
1484
11.7k
1485
11.7k
  // A field alignment restriction (e.g. from #pragma pack) or
1486
11.7k
  // specification (e.g. from __attribute__((aligned))) changes the
1487
11.7k
  // formal alignment of the field.  For System V, this alters the
1488
11.7k
  // required alignment of the notional storage unit that must contain
1489
11.7k
  // the bitfield.  For ms_struct, this only affects the placement of
1490
11.7k
  // new storage units.  In both cases, the effect of #pragma pack is
1491
11.7k
  // ignored on zero-width bitfields.
1492
11.7k
1493
11.7k
  // On System V, a packed field (e.g. from #pragma pack or
1494
11.7k
  // __attribute__((packed))) always uses the next available bit
1495
11.7k
  // offset.
1496
11.7k
1497
11.7k
  // In an ms_struct struct, the alignment of a fundamental type is
1498
11.7k
  // always equal to its size.  This is necessary in order to mimic
1499
11.7k
  // the i386 alignment rules on targets which might not fully align
1500
11.7k
  // all types (e.g. Darwin PPC32, where alignof(long long) == 4).
1501
11.7k
1502
11.7k
  // First, some simple bookkeeping to perform for ms_struct structs.
1503
11.7k
  if (
IsMsStruct11.7k
) {
1504
286
    // The field alignment for integer types is always the size.
1505
286
    FieldAlign = TypeSize;
1506
286
1507
286
    // If the previous field was not a bitfield, or was a bitfield
1508
286
    // with a different storage unit size, we're done with that
1509
286
    // storage unit.
1510
286
    if (
LastBitfieldTypeSize != TypeSize286
) {
1511
245
      // Also, ignore zero-length bitfields after non-bitfields.
1512
245
      if (
!LastBitfieldTypeSize && 245
!FieldSize166
)
1513
91
        FieldAlign = 1;
1514
245
1515
245
      UnfilledBitsInLastUnit = 0;
1516
245
      LastBitfieldTypeSize = 0;
1517
245
    }
1518
286
  }
1519
11.7k
1520
11.7k
  // If the field is wider than its declared type, it follows
1521
11.7k
  // different rules in all cases.
1522
11.7k
  if (
FieldSize > TypeSize11.7k
) {
1523
13
    LayoutWideBitField(FieldSize, TypeSize, FieldPacked, D);
1524
13
    return;
1525
13
  }
1526
11.7k
1527
11.7k
  // Compute the next available bit offset.
1528
11.7k
  uint64_t FieldOffset =
1529
11.7k
    IsUnion ? 
050
:
(getDataSizeInBits() - UnfilledBitsInLastUnit)11.7k
;
1530
11.7k
1531
11.7k
  // Handle targets that don't honor bitfield type alignment.
1532
11.7k
  if (
!IsMsStruct && 11.7k
!Context.getTargetInfo().useBitFieldTypeAlignment()11.4k
) {
1533
143
    // Some such targets do honor it on zero-width bitfields.
1534
143
    if (FieldSize == 0 &&
1535
143
        
Context.getTargetInfo().useZeroLengthBitfieldAlignment()44
) {
1536
44
      // The alignment to round up to is the max of the field's natural
1537
44
      // alignment and a target-specific fixed value (sometimes zero).
1538
44
      unsigned ZeroLengthBitfieldBoundary =
1539
44
        Context.getTargetInfo().getZeroLengthBitfieldBoundary();
1540
44
      FieldAlign = std::max(FieldAlign, ZeroLengthBitfieldBoundary);
1541
44
1542
44
    // If that doesn't apply, just ignore the field alignment.
1543
143
    } else {
1544
99
      FieldAlign = 1;
1545
99
    }
1546
143
  }
1547
11.7k
1548
11.7k
  // Remember the alignment we would have used if the field were not packed.
1549
11.7k
  unsigned UnpackedFieldAlign = FieldAlign;
1550
11.7k
1551
11.7k
  // Ignore the field alignment if the field is packed unless it has zero-size.
1552
11.7k
  if (
!IsMsStruct && 11.7k
FieldPacked11.4k
&&
FieldSize != 0385
)
1553
370
    FieldAlign = 1;
1554
11.7k
1555
11.7k
  // But, if there's an 'aligned' attribute on the field, honor that.
1556
11.7k
  unsigned ExplicitFieldAlign = D->getMaxAlignment();
1557
11.7k
  if (
ExplicitFieldAlign11.7k
) {
1558
153
    FieldAlign = std::max(FieldAlign, ExplicitFieldAlign);
1559
153
    UnpackedFieldAlign = std::max(UnpackedFieldAlign, ExplicitFieldAlign);
1560
153
  }
1561
11.7k
1562
11.7k
  // But, if there's a #pragma pack in play, that takes precedent over
1563
11.7k
  // even the 'aligned' attribute, for non-zero-width bitfields.
1564
11.7k
  unsigned MaxFieldAlignmentInBits = Context.toBits(MaxFieldAlignment);
1565
11.7k
  if (
!MaxFieldAlignment.isZero() && 11.7k
FieldSize642
) {
1566
636
    UnpackedFieldAlign = std::min(UnpackedFieldAlign, MaxFieldAlignmentInBits);
1567
636
    if (FieldPacked)
1568
34
      FieldAlign = UnpackedFieldAlign;
1569
636
    else
1570
602
      FieldAlign = std::min(FieldAlign, MaxFieldAlignmentInBits);
1571
636
  }
1572
11.7k
1573
11.7k
  // But, ms_struct just ignores all of that in unions, even explicit
1574
11.7k
  // alignment attributes.
1575
11.7k
  if (
IsMsStruct && 11.7k
IsUnion286
) {
1576
8
    FieldAlign = UnpackedFieldAlign = 1;
1577
8
  }
1578
11.7k
1579
11.7k
  // For purposes of diagnostics, we're going to simultaneously
1580
11.7k
  // compute the field offsets that we would have used if we weren't
1581
11.7k
  // adding any alignment padding or if the field weren't packed.
1582
11.7k
  uint64_t UnpaddedFieldOffset = FieldOffset;
1583
11.7k
  uint64_t UnpackedFieldOffset = FieldOffset;
1584
11.7k
1585
11.7k
  // Check if we need to add padding to fit the bitfield within an
1586
11.7k
  // allocation unit with the right size and alignment.  The rules are
1587
11.7k
  // somewhat different here for ms_struct structs.
1588
11.7k
  if (
IsMsStruct11.7k
) {
1589
286
    // If it's not a zero-width bitfield, and we can fit the bitfield
1590
286
    // into the active storage unit (and we haven't already decided to
1591
286
    // start a new storage unit), just do so, regardless of any other
1592
286
    // other consideration.  Otherwise, round up to the right alignment.
1593
286
    if (
FieldSize == 0 || 286
FieldSize > UnfilledBitsInLastUnit180
) {
1594
264
      FieldOffset = llvm::alignTo(FieldOffset, FieldAlign);
1595
264
      UnpackedFieldOffset =
1596
264
          llvm::alignTo(UnpackedFieldOffset, UnpackedFieldAlign);
1597
264
      UnfilledBitsInLastUnit = 0;
1598
264
    }
1599
286
1600
11.7k
  } else {
1601
11.4k
    // #pragma pack, with any value, suppresses the insertion of padding.
1602
11.4k
    bool AllowPadding = MaxFieldAlignment.isZero();
1603
11.4k
1604
11.4k
    // Compute the real offset.
1605
11.4k
    if (FieldSize == 0 || 
1606
11.1k
        (AllowPadding &&
1607
11.4k
         
(FieldOffset & (FieldAlign-1)) + FieldSize > TypeSize10.6k
)) {
1608
365
      FieldOffset = llvm::alignTo(FieldOffset, FieldAlign);
1609
11.4k
    } else 
if (11.1k
ExplicitFieldAlign &&
1610
124
               (MaxFieldAlignmentInBits == 0 ||
1611
124
                ExplicitFieldAlign <= MaxFieldAlignmentInBits) &&
1612
11.1k
               
Context.getTargetInfo().useExplicitBitFieldAlignment()116
) {
1613
96
      // TODO: figure it out what needs to be done on targets that don't honor
1614
96
      // bit-field type alignment like ARM APCS ABI.
1615
96
      FieldOffset = llvm::alignTo(FieldOffset, ExplicitFieldAlign);
1616
96
    }
1617
11.4k
1618
11.4k
    // Repeat the computation for diagnostic purposes.
1619
11.4k
    if (FieldSize == 0 ||
1620
11.1k
        (AllowPadding &&
1621
10.6k
         (UnpackedFieldOffset & (UnpackedFieldAlign-1)) + FieldSize > TypeSize))
1622
416
      UnpackedFieldOffset =
1623
416
          llvm::alignTo(UnpackedFieldOffset, UnpackedFieldAlign);
1624
11.0k
    else 
if (11.0k
ExplicitFieldAlign &&
1625
119
             (MaxFieldAlignmentInBits == 0 ||
1626
119
              ExplicitFieldAlign <= MaxFieldAlignmentInBits) &&
1627
111
             Context.getTargetInfo().useExplicitBitFieldAlignment())
1628
92
      UnpackedFieldOffset =
1629
92
          llvm::alignTo(UnpackedFieldOffset, ExplicitFieldAlign);
1630
11.4k
  }
1631
11.7k
1632
11.7k
  // If we're using external layout, give the external layout a chance
1633
11.7k
  // to override this information.
1634
11.7k
  if (UseExternalLayout)
1635
22
    FieldOffset = updateExternalFieldOffset(D, FieldOffset);
1636
11.7k
1637
11.7k
  // Okay, place the bitfield at the calculated offset.
1638
11.7k
  FieldOffsets.push_back(FieldOffset);
1639
11.7k
1640
11.7k
  // Bookkeeping:
1641
11.7k
1642
11.7k
  // Anonymous members don't affect the overall record alignment,
1643
11.7k
  // except on targets where they do.
1644
11.7k
  if (!IsMsStruct &&
1645
11.4k
      !Context.getTargetInfo().useZeroLengthBitfieldAlignment() &&
1646
2.15k
      !D->getIdentifier())
1647
334
    FieldAlign = UnpackedFieldAlign = 1;
1648
11.7k
1649
11.7k
  // Diagnose differences in layout due to padding or packing.
1650
11.7k
  if (!UseExternalLayout)
1651
11.7k
    CheckFieldPadding(FieldOffset, UnpaddedFieldOffset, UnpackedFieldOffset,
1652
11.7k
                      UnpackedFieldAlign, FieldPacked, D);
1653
11.7k
1654
11.7k
  // Update DataSize to include the last byte containing (part of) the bitfield.
1655
11.7k
1656
11.7k
  // For unions, this is just a max operation, as usual.
1657
11.7k
  if (
IsUnion11.7k
) {
1658
50
    // For ms_struct, allocate the entire storage unit --- unless this
1659
50
    // is a zero-width bitfield, in which case just use a size of 1.
1660
50
    uint64_t RoundedFieldSize;
1661
50
    if (
IsMsStruct50
) {
1662
8
      RoundedFieldSize =
1663
8
        (FieldSize ? 
TypeSize5
:
Context.getTargetInfo().getCharWidth()3
);
1664
8
1665
8
    // Otherwise, allocate just the number of bytes required to store
1666
8
    // the bitfield.
1667
50
    } else {
1668
42
      RoundedFieldSize = roundUpSizeToCharAlignment(FieldSize, Context);
1669
42
    }
1670
50
    setDataSize(std::max(getDataSizeInBits(), RoundedFieldSize));
1671
50
1672
50
  // For non-zero-width bitfields in ms_struct structs, allocate a new
1673
50
  // storage unit if necessary.
1674
11.7k
  } else 
if (11.7k
IsMsStruct && 11.7k
FieldSize278
) {
1675
175
    // We should have cleared UnfilledBitsInLastUnit in every case
1676
175
    // where we changed storage units.
1677
175
    if (
!UnfilledBitsInLastUnit175
) {
1678
153
      setDataSize(FieldOffset + TypeSize);
1679
153
      UnfilledBitsInLastUnit = TypeSize;
1680
153
    }
1681
175
    UnfilledBitsInLastUnit -= FieldSize;
1682
175
    LastBitfieldTypeSize = TypeSize;
1683
175
1684
175
  // Otherwise, bump the data size up to include the bitfield,
1685
175
  // including padding up to char alignment, and then remember how
1686
175
  // bits we didn't use.
1687
11.7k
  } else {
1688
11.5k
    uint64_t NewSizeInBits = FieldOffset + FieldSize;
1689
11.5k
    uint64_t CharAlignment = Context.getTargetInfo().getCharAlign();
1690
11.5k
    setDataSize(llvm::alignTo(NewSizeInBits, CharAlignment));
1691
11.5k
    UnfilledBitsInLastUnit = getDataSizeInBits() - NewSizeInBits;
1692
11.5k
1693
11.5k
    // The only time we can get here for an ms_struct is if this is a
1694
11.5k
    // zero-width bitfield, which doesn't count as anything for the
1695
11.5k
    // purposes of unfilled bits.
1696
11.5k
    LastBitfieldTypeSize = 0;
1697
11.5k
  }
1698
11.7k
1699
11.7k
  // Update the size.
1700
11.7k
  setSize(std::max(getSizeInBits(), getDataSizeInBits()));
1701
11.7k
1702
11.7k
  // Remember max struct/class alignment.
1703
11.7k
  UpdateAlignment(Context.toCharUnitsFromBits(FieldAlign), 
1704
11.7k
                  Context.toCharUnitsFromBits(UnpackedFieldAlign));
1705
11.7k
}
1706
1707
void ItaniumRecordLayoutBuilder::LayoutField(const FieldDecl *D,
1708
487k
                                             bool InsertExtraPadding) {
1709
487k
  if (
D->isBitField()487k
) {
1710
11.7k
    LayoutBitField(D);
1711
11.7k
    return;
1712
11.7k
  }
1713
475k
1714
475k
  uint64_t UnpaddedFieldOffset = getDataSizeInBits() - UnfilledBitsInLastUnit;
1715
475k
1716
475k
  // Reset the unfilled bits.
1717
475k
  UnfilledBitsInLastUnit = 0;
1718
475k
  LastBitfieldTypeSize = 0;
1719
475k
1720
474k
  bool FieldPacked = Packed || D->hasAttr<PackedAttr>();
1721
475k
  CharUnits FieldOffset = 
1722
475k
    IsUnion ? 
CharUnits::Zero()48.5k
:
getDataSize()426k
;
1723
475k
  CharUnits FieldSize;
1724
475k
  CharUnits FieldAlign;
1725
475k
1726
475k
  if (
D->getType()->isIncompleteArrayType()475k
) {
1727
76
    // This is a flexible array member; we can't directly
1728
76
    // query getTypeInfo about these, so we figure it out here.
1729
76
    // Flexible array members don't have any size, but they
1730
76
    // have to be aligned appropriately for their element type.
1731
76
    FieldSize = CharUnits::Zero();
1732
76
    const ArrayType* ATy = Context.getAsArrayType(D->getType());
1733
76
    FieldAlign = Context.getTypeAlignInChars(ATy->getElementType());
1734
475k
  } else 
if (const ReferenceType *475k
RT475k
= D->getType()->getAs<ReferenceType>()) {
1735
915
    unsigned AS = RT->getPointeeType().getAddressSpace();
1736
915
    FieldSize = 
1737
915
      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(AS));
1738
915
    FieldAlign = 
1739
915
      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerAlign(AS));
1740
475k
  } else {
1741
474k
    std::pair<CharUnits, CharUnits> FieldInfo = 
1742
474k
      Context.getTypeInfoInChars(D->getType());
1743
474k
    FieldSize = FieldInfo.first;
1744
474k
    FieldAlign = FieldInfo.second;
1745
474k
1746
474k
    if (
IsMsStruct474k
) {
1747
155
      // If MS bitfield layout is required, figure out what type is being
1748
155
      // laid out and align the field to the width of that type.
1749
155
      
1750
155
      // Resolve all typedefs down to their base type and round up the field
1751
155
      // alignment if necessary.
1752
155
      QualType T = Context.getBaseElementType(D->getType());
1753
155
      if (const BuiltinType *
BTy155
= T->getAs<BuiltinType>()) {
1754
149
        CharUnits TypeSize = Context.getTypeSizeInChars(BTy);
1755
149
        if (TypeSize > FieldAlign)
1756
8
          FieldAlign = TypeSize;
1757
149
      }
1758
155
    }
1759
475k
  }
1760
475k
1761
475k
  // The align if the field is not packed. This is to check if the attribute
1762
475k
  // was unnecessary (-Wpacked).
1763
475k
  CharUnits UnpackedFieldAlign = FieldAlign;
1764
475k
  CharUnits UnpackedFieldOffset = FieldOffset;
1765
475k
1766
475k
  if (FieldPacked)
1767
1.01k
    FieldAlign = CharUnits::One();
1768
475k
  CharUnits MaxAlignmentInChars = 
1769
475k
    Context.toCharUnitsFromBits(D->getMaxAlignment());
1770
475k
  FieldAlign = std::max(FieldAlign, MaxAlignmentInChars);
1771
475k
  UnpackedFieldAlign = std::max(UnpackedFieldAlign, MaxAlignmentInChars);
1772
475k
1773
475k
  // The maximum field alignment overrides the aligned attribute.
1774
475k
  if (
!MaxFieldAlignment.isZero()475k
) {
1775
5.49k
    FieldAlign = std::min(FieldAlign, MaxFieldAlignment);
1776
5.49k
    UnpackedFieldAlign = std::min(UnpackedFieldAlign, MaxFieldAlignment);
1777
5.49k
  }
1778
475k
1779
475k
  // Round up the current record size to the field's alignment boundary.
1780
475k
  FieldOffset = FieldOffset.alignTo(FieldAlign);
1781
475k
  UnpackedFieldOffset = UnpackedFieldOffset.alignTo(UnpackedFieldAlign);
1782
475k
1783
475k
  if (
UseExternalLayout475k
) {
1784
39
    FieldOffset = Context.toCharUnitsFromBits(
1785
39
                    updateExternalFieldOffset(D, Context.toBits(FieldOffset)));
1786
39
    
1787
39
    if (
!IsUnion && 39
EmptySubobjects37
) {
1788
10
      // Record the fact that we're placing a field at this offset.
1789
10
      bool Allowed = EmptySubobjects->CanPlaceFieldAtOffset(D, FieldOffset);
1790
10
      (void)Allowed;
1791
10
      assert(Allowed && "Externally-placed field cannot be placed here");      
1792
10
    }
1793
475k
  } else {
1794
475k
    if (
!IsUnion && 475k
EmptySubobjects426k
) {
1795
100k
      // Check if we can place the field at this offset.
1796
100k
      while (
!EmptySubobjects->CanPlaceFieldAtOffset(D, FieldOffset)100k
) {
1797
17
        // We couldn't place the field at the offset. Try again at a new offset.
1798
17
        FieldOffset += FieldAlign;
1799
17
      }
1800
100k
    }
1801
475k
  }
1802
475k
  
1803
475k
  // Place this field at the current location.
1804
475k
  FieldOffsets.push_back(Context.toBits(FieldOffset));
1805
475k
1806
475k
  if (!UseExternalLayout)
1807
475k
    CheckFieldPadding(Context.toBits(FieldOffset), UnpaddedFieldOffset, 
1808
475k
                      Context.toBits(UnpackedFieldOffset),
1809
475k
                      Context.toBits(UnpackedFieldAlign), FieldPacked, D);
1810
475k
1811
475k
  if (
InsertExtraPadding475k
) {
1812
36
    CharUnits ASanAlignment = CharUnits::fromQuantity(8);
1813
36
    CharUnits ExtraSizeForAsan = ASanAlignment;
1814
36
    if (FieldSize % ASanAlignment)
1815
30
      ExtraSizeForAsan +=
1816
30
          ASanAlignment - CharUnits::fromQuantity(FieldSize % ASanAlignment);
1817
36
    FieldSize += ExtraSizeForAsan;
1818
36
  }
1819
475k
1820
475k
  // Reserve space for this field.
1821
475k
  uint64_t FieldSizeInBits = Context.toBits(FieldSize);
1822
475k
  if (IsUnion)
1823
48.5k
    setDataSize(std::max(getDataSizeInBits(), FieldSizeInBits));
1824
475k
  else
1825
426k
    setDataSize(FieldOffset + FieldSize);
1826
487k
1827
487k
  // Update the size.
1828
487k
  setSize(std::max(getSizeInBits(), getDataSizeInBits()));
1829
487k
1830
487k
  // Remember max struct/class alignment.
1831
487k
  UpdateAlignment(FieldAlign, UnpackedFieldAlign);
1832
487k
}
1833
1834
109k
void ItaniumRecordLayoutBuilder::FinishLayout(const NamedDecl *D) {
1835
109k
  // In C++, records cannot be of size 0.
1836
109k
  if (
Context.getLangOpts().CPlusPlus && 109k
getSizeInBits() == 040.7k
) {
1837
3.58k
    if (const CXXRecordDecl *
RD3.58k
= dyn_cast<CXXRecordDecl>(D)) {
1838
3.54k
      // Compatibility with gcc requires a class (pod or non-pod)
1839
3.54k
      // which is not empty but of size 0; such as having fields of
1840
3.54k
      // array of zero-length, remains of Size 0
1841
3.54k
      if (RD->isEmpty())
1842
3.52k
        setSize(CharUnits::One());
1843
3.54k
    }
1844
3.58k
    else
1845
49
      setSize(CharUnits::One());
1846
3.58k
  }
1847
109k
1848
109k
  // Finally, round the size of the record up to the alignment of the
1849
109k
  // record itself.
1850
109k
  uint64_t UnpaddedSize = getSizeInBits() - UnfilledBitsInLastUnit;
1851
109k
  uint64_t UnpackedSizeInBits =
1852
109k
      llvm::alignTo(getSizeInBits(), Context.toBits(UnpackedAlignment));
1853
109k
  uint64_t RoundedSize =
1854
109k
      llvm::alignTo(getSizeInBits(), Context.toBits(Alignment));
1855
109k
1856
109k
  if (
UseExternalLayout109k
) {
1857
25
    // If we're inferring alignment, and the external size is smaller than
1858
25
    // our size after we've rounded up to alignment, conservatively set the
1859
25
    // alignment to 1.
1860
25
    if (
InferAlignment && 25
External.Size < RoundedSize0
) {
1861
0
      Alignment = CharUnits::One();
1862
0
      InferAlignment = false;
1863
0
    }
1864
25
    setSize(External.Size);
1865
25
    return;
1866
25
  }
1867
109k
1868
109k
  // Set the size to the final size.
1869
109k
  setSize(RoundedSize);
1870
109k
1871
109k
  unsigned CharBitNum = Context.getTargetInfo().getCharWidth();
1872
109k
  if (const RecordDecl *
RD109k
= dyn_cast<RecordDecl>(D)) {
1873
109k
    // Warn if padding was introduced to the struct/class/union.
1874
109k
    if (
getSizeInBits() > UnpaddedSize109k
) {
1875
12.2k
      unsigned PadSize = getSizeInBits() - UnpaddedSize;
1876
12.2k
      bool InBits = true;
1877
12.2k
      if (
PadSize % CharBitNum == 012.2k
) {
1878
11.3k
        PadSize = PadSize / CharBitNum;
1879
11.3k
        InBits = false;
1880
11.3k
      }
1881
12.2k
      Diag(RD->getLocation(), diag::warn_padded_struct_size)
1882
12.2k
          << Context.getTypeDeclType(RD)
1883
12.2k
          << PadSize
1884
12.2k
          << (InBits ? 
1907
:
011.3k
); // (byte|bit)
1885
12.2k
    }
1886
109k
1887
109k
    // Warn if we packed it unnecessarily, when the unpacked alignment is not
1888
109k
    // greater than the one after packing, the size in bits doesn't change and
1889
109k
    // the offset of each field is identical.
1890
109k
    if (
Packed && 109k
UnpackedAlignment <= Alignment432
&&
1891
109k
        
UnpackedSizeInBits == getSizeInBits()70
&&
!HasPackedField70
)
1892
56
      Diag(D->getLocation(), diag::warn_unnecessary_packed)
1893
56
          << Context.getTypeDeclType(RD);
1894
109k
  }
1895
109k
}
1896
1897
void ItaniumRecordLayoutBuilder::UpdateAlignment(
1898
498k
    CharUnits NewAlignment, CharUnits UnpackedNewAlignment) {
1899
498k
  // The alignment is not modified when using 'mac68k' alignment or when
1900
498k
  // we have an externally-supplied layout that also provides overall alignment.
1901
498k
  if (
IsMac68kAlign || 498k
(UseExternalLayout && 498k
!InferAlignment70
))
1902
94
    return;
1903
498k
1904
498k
  
if (498k
NewAlignment > Alignment498k
) {
1905
113k
    assert(llvm::isPowerOf2_64(NewAlignment.getQuantity()) &&
1906
113k
           "Alignment not a power of 2");
1907
113k
    Alignment = NewAlignment;
1908
113k
  }
1909
498k
1910
498k
  if (
UnpackedNewAlignment > UnpackedAlignment498k
) {
1911
113k
    assert(llvm::isPowerOf2_64(UnpackedNewAlignment.getQuantity()) &&
1912
113k
           "Alignment not a power of 2");
1913
113k
    UnpackedAlignment = UnpackedNewAlignment;
1914
113k
  }
1915
498k
}
1916
1917
uint64_t
1918
ItaniumRecordLayoutBuilder::updateExternalFieldOffset(const FieldDecl *Field,
1919
61
                                                      uint64_t ComputedOffset) {
1920
61
  uint64_t ExternalFieldOffset = External.getExternalFieldOffset(Field);
1921
61
1922
61
  if (
InferAlignment && 61
ExternalFieldOffset < ComputedOffset0
) {
1923
0
    // The externally-supplied field offset is before the field offset we
1924
0
    // computed. Assume that the structure is packed.
1925
0
    Alignment = CharUnits::One();
1926
0
    InferAlignment = false;
1927
0
  }
1928
61
  
1929
61
  // Use the externally-supplied field offset.
1930
61
  return ExternalFieldOffset;
1931
61
}
1932
1933
/// \brief Get diagnostic %select index for tag kind for
1934
/// field padding diagnostic message.
1935
/// WARNING: Indexes apply to particular diagnostics only!
1936
///
1937
/// \returns diagnostic %select index.
1938
26.5k
static unsigned getPaddingDiagFromTagKind(TagTypeKind Tag) {
1939
26.5k
  switch (Tag) {
1940
24.4k
  case TTK_Struct: return 0;
1941
0
  case TTK_Interface: return 1;
1942
2.04k
  case TTK_Class: return 2;
1943
0
  
default: 0
llvm_unreachable0
("Invalid tag kind for field padding diagnostic!");
1944
0
  }
1945
0
}
1946
1947
void ItaniumRecordLayoutBuilder::CheckFieldPadding(
1948
    uint64_t Offset, uint64_t UnpaddedOffset, uint64_t UnpackedOffset,
1949
486k
    unsigned UnpackedAlign, bool isPacked, const FieldDecl *D) {
1950
486k
  // We let objc ivars without warning, objc interfaces generally are not used
1951
486k
  // for padding tricks.
1952
486k
  if (isa<ObjCIvarDecl>(D))
1953
1.09k
    return;
1954
485k
1955
485k
  // Don't warn about structs created without a SourceLocation.  This can
1956
485k
  // be done by clients of the AST, such as codegen.
1957
485k
  
if (485k
D->getLocation().isInvalid()485k
)
1958
9.16k
    return;
1959
476k
  
1960
476k
  unsigned CharBitNum = Context.getTargetInfo().getCharWidth();
1961
476k
1962
476k
  // Warn if padding was introduced to the struct/class.
1963
476k
  if (
!IsUnion && 476k
Offset > UnpaddedOffset428k
) {
1964
26.5k
    unsigned PadSize = Offset - UnpaddedOffset;
1965
26.5k
    bool InBits = true;
1966
26.5k
    if (
PadSize % CharBitNum == 026.5k
) {
1967
25.8k
      PadSize = PadSize / CharBitNum;
1968
25.8k
      InBits = false;
1969
25.8k
    }
1970
26.5k
    if (D->getIdentifier())
1971
26.2k
      Diag(D->getLocation(), diag::warn_padded_struct_field)
1972
26.2k
          << getPaddingDiagFromTagKind(D->getParent()->getTagKind())
1973
26.2k
          << Context.getTypeDeclType(D->getParent())
1974
26.2k
          << PadSize
1975
26.2k
          << (InBits ? 
1570
:
025.6k
) // (byte|bit)
1976
26.2k
          << D->getIdentifier();
1977
26.5k
    else
1978
284
      Diag(D->getLocation(), diag::warn_padded_struct_anon_field)
1979
284
          << getPaddingDiagFromTagKind(D->getParent()->getTagKind())
1980
284
          << Context.getTypeDeclType(D->getParent())
1981
284
          << PadSize
1982
284
          << (InBits ? 
180
:
0204
); // (byte|bit)
1983
26.5k
 }
1984
476k
 if (
isPacked && 476k
Offset != UnpackedOffset1.40k
) {
1985
202
   HasPackedField = true;
1986
202
 }
1987
486k
}
1988
1989
static const CXXMethodDecl *computeKeyFunction(ASTContext &Context,
1990
24.7k
                                               const CXXRecordDecl *RD) {
1991
24.7k
  // If a class isn't polymorphic it doesn't have a key function.
1992
24.7k
  if (!RD->isPolymorphic())
1993
6.55k
    return nullptr;
1994
18.1k
1995
18.1k
  // A class that is not externally visible doesn't have a key function. (Or
1996
18.1k
  // at least, there's no point to assigning a key function to such a class;
1997
18.1k
  // this doesn't affect the ABI.)
1998
18.1k
  
if (18.1k
!RD->isExternallyVisible()18.1k
)
1999
233
    return nullptr;
2000
17.9k
2001
17.9k
  // Template instantiations don't have key functions per Itanium C++ ABI 5.2.6.
2002
17.9k
  // Same behavior as GCC.
2003
17.9k
  TemplateSpecializationKind TSK = RD->getTemplateSpecializationKind();
2004
17.9k
  if (TSK == TSK_ImplicitInstantiation ||
2005
14.6k
      TSK == TSK_ExplicitInstantiationDeclaration ||
2006
14.6k
      TSK == TSK_ExplicitInstantiationDefinition)
2007
3.47k
    return nullptr;
2008
14.4k
2009
14.4k
  bool allowInlineFunctions =
2010
14.4k
    Context.getTargetInfo().getCXXABI().canKeyFunctionBeInline();
2011
14.4k
2012
84.5k
  for (const CXXMethodDecl *MD : RD->methods()) {
2013
84.5k
    if (!MD->isVirtual())
2014
47.7k
      continue;
2015
36.7k
2016
36.7k
    
if (36.7k
MD->isPure()36.7k
)
2017
12.7k
      continue;
2018
24.0k
2019
24.0k
    // Ignore implicit member functions, they are always marked as inline, but
2020
24.0k
    // they don't have a body until they're defined.
2021
24.0k
    
if (24.0k
MD->isImplicit()24.0k
)
2022
3.98k
      continue;
2023
20.0k
2024
20.0k
    
if (20.0k
MD->isInlineSpecified()20.0k
)
2025
82
      continue;
2026
19.9k
2027
19.9k
    
if (19.9k
MD->hasInlineBody()19.9k
)
2028
14.7k
      continue;
2029
5.22k
2030
5.22k
    // Ignore inline deleted or defaulted functions.
2031
5.22k
    
if (5.22k
!MD->isUserProvided()5.22k
)
2032
8
      continue;
2033
5.21k
2034
5.21k
    // In certain ABIs, ignore functions with out-of-line inline definitions.
2035
5.21k
    
if (5.21k
!allowInlineFunctions5.21k
) {
2036
4.07k
      const FunctionDecl *Def;
2037
4.07k
      if (
MD->hasBody(Def) && 4.07k
Def->isInlineSpecified()1.18k
)
2038
106
        continue;
2039
5.11k
    }
2040
5.11k
2041
5.11k
    
if (5.11k
Context.getLangOpts().CUDA5.11k
) {
2042
7
      // While compiler may see key method in this TU, during CUDA
2043
7
      // compilation we should ignore methods that are not accessible
2044
7
      // on this side of compilation.
2045
7
      if (
Context.getLangOpts().CUDAIsDevice7
) {
2046
4
        // In device mode ignore methods without __device__ attribute.
2047
4
        if (!MD->hasAttr<CUDADeviceAttr>())
2048
2
          continue;
2049
3
      } else {
2050
3
        // In host mode ignore __device__-only methods.
2051
3
        if (
!MD->hasAttr<CUDAHostAttr>() && 3
MD->hasAttr<CUDADeviceAttr>()3
)
2052
1
          continue;
2053
5.10k
      }
2054
7
    }
2055
5.10k
2056
5.10k
    // If the key function is dllimport but the class isn't, then the class has
2057
5.10k
    // no key function. The DLL that exports the key function won't export the
2058
5.10k
    // vtable in this case.
2059
5.10k
    
if (5.10k
MD->hasAttr<DLLImportAttr>() && 5.10k
!RD->hasAttr<DLLImportAttr>()55
)
2060
48
      return nullptr;
2061
5.06k
2062
5.06k
    // We found it.
2063
5.06k
    return MD;
2064
5.06k
  }
2065
9.37k
2066
9.37k
  return nullptr;
2067
9.37k
}
2068
2069
DiagnosticBuilder ItaniumRecordLayoutBuilder::Diag(SourceLocation Loc,
2070
38.8k
                                                   unsigned DiagID) {
2071
38.8k
  return Context.getDiagnostics().Report(Loc, DiagID);
2072
38.8k
}
2073
2074
/// Does the target C++ ABI require us to skip over the tail-padding
2075
/// of the given class (considering it as a base class) when allocating
2076
/// objects?
2077
40.3k
static bool mustSkipTailPadding(TargetCXXABI ABI, const CXXRecordDecl *RD) {
2078
40.3k
  switch (ABI.getTailPaddingUseRules()) {
2079
0
  case TargetCXXABI::AlwaysUseTailPadding:
2080
0
    return false;
2081
40.3k
2082
20.9k
  case TargetCXXABI::UseTailPaddingUnlessPOD03:
2083
20.9k
    // FIXME: To the extent that this is meant to cover the Itanium ABI
2084
20.9k
    // rules, we should implement the restrictions about over-sized
2085
20.9k
    // bitfields:
2086
20.9k
    //
2087
20.9k
    // http://itanium-cxx-abi.github.io/cxx-abi/abi.html#POD :
2088
20.9k
    //   In general, a type is considered a POD for the purposes of
2089
20.9k
    //   layout if it is a POD type (in the sense of ISO C++
2090
20.9k
    //   [basic.types]). However, a POD-struct or POD-union (in the
2091
20.9k
    //   sense of ISO C++ [class]) with a bitfield member whose
2092
20.9k
    //   declared width is wider than the declared type of the
2093
20.9k
    //   bitfield is not a POD for the purpose of layout.  Similarly,
2094
20.9k
    //   an array type is not a POD for the purpose of layout if the
2095
20.9k
    //   element type of the array is not a POD for the purpose of
2096
20.9k
    //   layout.
2097
20.9k
    //
2098
20.9k
    //   Where references to the ISO C++ are made in this paragraph,
2099
20.9k
    //   the Technical Corrigendum 1 version of the standard is
2100
20.9k
    //   intended.
2101
20.9k
    return RD->isPOD();
2102
40.3k
2103
19.3k
  case TargetCXXABI::UseTailPaddingUnlessPOD11:
2104
19.3k
    // This is equivalent to RD->getTypeForDecl().isCXX11PODType(),
2105
19.3k
    // but with a lot of abstraction penalty stripped off.  This does
2106
19.3k
    // assume that these properties are set correctly even in C++98
2107
19.3k
    // mode; fortunately, that is true because we want to assign
2108
19.3k
    // consistently semantics to the type-traits intrinsics (or at
2109
19.3k
    // least as many of them as possible).
2110
9.95k
    return RD->isTrivial() && RD->isStandardLayout();
2111
0
  }
2112
0
2113
0
  
llvm_unreachable0
("bad tail-padding use kind");
2114
0
}
2115
2116
114k
static bool isMsLayout(const ASTContext &Context) {
2117
114k
  return Context.getTargetInfo().getCXXABI().isMicrosoft();
2118
114k
}
2119
2120
// This section contains an implementation of struct layout that is, up to the
2121
// included tests, compatible with cl.exe (2013).  The layout produced is
2122
// significantly different than those produced by the Itanium ABI.  Here we note
2123
// the most important differences.
2124
//
2125
// * The alignment of bitfields in unions is ignored when computing the
2126
//   alignment of the union.
2127
// * The existence of zero-width bitfield that occurs after anything other than
2128
//   a non-zero length bitfield is ignored.
2129
// * There is no explicit primary base for the purposes of layout.  All bases
2130
//   with vfptrs are laid out first, followed by all bases without vfptrs.
2131
// * The Itanium equivalent vtable pointers are split into a vfptr (virtual
2132
//   function pointer) and a vbptr (virtual base pointer).  They can each be
2133
//   shared with a, non-virtual bases. These bases need not be the same.  vfptrs
2134
//   always occur at offset 0.  vbptrs can occur at an arbitrary offset and are
2135
//   placed after the lexicographically last non-virtual base.  This placement
2136
//   is always before fields but can be in the middle of the non-virtual bases
2137
//   due to the two-pass layout scheme for non-virtual-bases.
2138
// * Virtual bases sometimes require a 'vtordisp' field that is laid out before
2139
//   the virtual base and is used in conjunction with virtual overrides during
2140
//   construction and destruction.  This is always a 4 byte value and is used as
2141
//   an alternative to constructor vtables.
2142
// * vtordisps are allocated in a block of memory with size and alignment equal
2143
//   to the alignment of the completed structure (before applying __declspec(
2144
//   align())).  The vtordisp always occur at the end of the allocation block,
2145
//   immediately prior to the virtual base.
2146
// * vfptrs are injected after all bases and fields have been laid out.  In
2147
//   order to guarantee proper alignment of all fields, the vfptr injection
2148
//   pushes all bases and fields back by the alignment imposed by those bases
2149
//   and fields.  This can potentially add a significant amount of padding.
2150
//   vfptrs are always injected at offset 0.
2151
// * vbptrs are injected after all bases and fields have been laid out.  In
2152
//   order to guarantee proper alignment of all fields, the vfptr injection
2153
//   pushes all bases and fields back by the alignment imposed by those bases
2154
//   and fields.  This can potentially add a significant amount of padding.
2155
//   vbptrs are injected immediately after the last non-virtual base as
2156
//   lexicographically ordered in the code.  If this site isn't pointer aligned
2157
//   the vbptr is placed at the next properly aligned location.  Enough padding
2158
//   is added to guarantee a fit.
2159
// * The last zero sized non-virtual base can be placed at the end of the
2160
//   struct (potentially aliasing another object), or may alias with the first
2161
//   field, even if they are of the same type.
2162
// * The last zero size virtual base may be placed at the end of the struct
2163
//   potentially aliasing another object.
2164
// * The ABI attempts to avoid aliasing of zero sized bases by adding padding
2165
//   between bases or vbases with specific properties.  The criteria for
2166
//   additional padding between two bases is that the first base is zero sized
2167
//   or ends with a zero sized subobject and the second base is zero sized or
2168
//   trails with a zero sized base or field (sharing of vfptrs can reorder the
2169
//   layout of the so the leading base is not always the first one declared).
2170
//   This rule does take into account fields that are not records, so padding
2171
//   will occur even if the last field is, e.g. an int. The padding added for
2172
//   bases is 1 byte.  The padding added between vbases depends on the alignment
2173
//   of the object but is at least 4 bytes (in both 32 and 64 bit modes).
2174
// * There is no concept of non-virtual alignment, non-virtual alignment and
2175
//   alignment are always identical.
2176
// * There is a distinction between alignment and required alignment.
2177
//   __declspec(align) changes the required alignment of a struct.  This
2178
//   alignment is _always_ obeyed, even in the presence of #pragma pack. A
2179
//   record inherits required alignment from all of its fields and bases.
2180
// * __declspec(align) on bitfields has the effect of changing the bitfield's
2181
//   alignment instead of its required alignment.  This is the only known way
2182
//   to make the alignment of a struct bigger than 8.  Interestingly enough
2183
//   this alignment is also immune to the effects of #pragma pack and can be
2184
//   used to create structures with large alignment under #pragma pack.
2185
//   However, because it does not impact required alignment, such a structure,
2186
//   when used as a field or base, will not be aligned if #pragma pack is
2187
//   still active at the time of use.
2188
//
2189
// Known incompatibilities:
2190
// * all: #pragma pack between fields in a record
2191
// * 2010 and back: If the last field in a record is a bitfield, every object
2192
//   laid out after the record will have extra padding inserted before it.  The
2193
//   extra padding will have size equal to the size of the storage class of the
2194
//   bitfield.  0 sized bitfields don't exhibit this behavior and the extra
2195
//   padding can be avoided by adding a 0 sized bitfield after the non-zero-
2196
//   sized bitfield.
2197
// * 2012 and back: In 64-bit mode, if the alignment of a record is 16 or
2198
//   greater due to __declspec(align()) then a second layout phase occurs after
2199
//   The locations of the vf and vb pointers are known.  This layout phase
2200
//   suffers from the "last field is a bitfield" bug in 2010 and results in
2201
//   _every_ field getting padding put in front of it, potentially including the
2202
//   vfptr, leaving the vfprt at a non-zero location which results in a fault if
2203
//   anything tries to read the vftbl.  The second layout phase also treats
2204
//   bitfields as separate entities and gives them each storage rather than
2205
//   packing them.  Additionally, because this phase appears to perform a
2206
//   (an unstable) sort on the members before laying them out and because merged
2207
//   bitfields have the same address, the bitfields end up in whatever order
2208
//   the sort left them in, a behavior we could never hope to replicate.
2209
2210
namespace {
2211
struct MicrosoftRecordLayoutBuilder {
2212
  struct ElementInfo {
2213
    CharUnits Size;
2214
    CharUnits Alignment;
2215
  };
2216
  typedef llvm::DenseMap<const CXXRecordDecl *, CharUnits> BaseOffsetsMapTy;
2217
3.52k
  MicrosoftRecordLayoutBuilder(const ASTContext &Context) : Context(Context) {}
2218
private:
2219
  MicrosoftRecordLayoutBuilder(const MicrosoftRecordLayoutBuilder &) = delete;
2220
  void operator=(const MicrosoftRecordLayoutBuilder &) = delete;
2221
public:
2222
  void layout(const RecordDecl *RD);
2223
  void cxxLayout(const CXXRecordDecl *RD);
2224
  /// \brief Initializes size and alignment and honors some flags.
2225
  void initializeLayout(const RecordDecl *RD);
2226
  /// \brief Initialized C++ layout, compute alignment and virtual alignment and
2227
  /// existence of vfptrs and vbptrs.  Alignment is needed before the vfptr is
2228
  /// laid out.
2229
  void initializeCXXLayout(const CXXRecordDecl *RD);
2230
  void layoutNonVirtualBases(const CXXRecordDecl *RD);
2231
  void layoutNonVirtualBase(const CXXRecordDecl *RD,
2232
                            const CXXRecordDecl *BaseDecl,
2233
                            const ASTRecordLayout &BaseLayout,
2234
                            const ASTRecordLayout *&PreviousBaseLayout);
2235
  void injectVFPtr(const CXXRecordDecl *RD);
2236
  void injectVBPtr(const CXXRecordDecl *RD);
2237
  /// \brief Lays out the fields of the record.  Also rounds size up to
2238
  /// alignment.
2239
  void layoutFields(const RecordDecl *RD);
2240
  void layoutField(const FieldDecl *FD);
2241
  void layoutBitField(const FieldDecl *FD);
2242
  /// \brief Lays out a single zero-width bit-field in the record and handles
2243
  /// special cases associated with zero-width bit-fields.
2244
  void layoutZeroWidthBitField(const FieldDecl *FD);
2245
  void layoutVirtualBases(const CXXRecordDecl *RD);
2246
  void finalizeLayout(const RecordDecl *RD);
2247
  /// \brief Gets the size and alignment of a base taking pragma pack and
2248
  /// __declspec(align) into account.
2249
  ElementInfo getAdjustedElementInfo(const ASTRecordLayout &Layout);
2250
  /// \brief Gets the size and alignment of a field taking pragma  pack and
2251
  /// __declspec(align) into account.  It also updates RequiredAlignment as a
2252
  /// side effect because it is most convenient to do so here.
2253
  ElementInfo getAdjustedElementInfo(const FieldDecl *FD);
2254
  /// \brief Places a field at an offset in CharUnits.
2255
1.99k
  void placeFieldAtOffset(CharUnits FieldOffset) {
2256
1.99k
    FieldOffsets.push_back(Context.toBits(FieldOffset));
2257
1.99k
  }
2258
  /// \brief Places a bitfield at a bit offset.
2259
24
  void placeFieldAtBitOffset(uint64_t FieldOffset) {
2260
24
    FieldOffsets.push_back(FieldOffset);
2261
24
  }
2262
  /// \brief Compute the set of virtual bases for which vtordisps are required.
2263
  void computeVtorDispSet(
2264
      llvm::SmallPtrSetImpl<const CXXRecordDecl *> &HasVtorDispSet,
2265
      const CXXRecordDecl *RD) const;
2266
  const ASTContext &Context;
2267
  /// \brief The size of the record being laid out.
2268
  CharUnits Size;
2269
  /// \brief The non-virtual size of the record layout.
2270
  CharUnits NonVirtualSize;
2271
  /// \brief The data size of the record layout.
2272
  CharUnits DataSize;
2273
  /// \brief The current alignment of the record layout.
2274
  CharUnits Alignment;
2275
  /// \brief The maximum allowed field alignment. This is set by #pragma pack.
2276
  CharUnits MaxFieldAlignment;
2277
  /// \brief The alignment that this record must obey.  This is imposed by
2278
  /// __declspec(align()) on the record itself or one of its fields or bases.
2279
  CharUnits RequiredAlignment;
2280
  /// \brief The size of the allocation of the currently active bitfield.
2281
  /// This value isn't meaningful unless LastFieldIsNonZeroWidthBitfield
2282
  /// is true.
2283
  CharUnits CurrentBitfieldSize;
2284
  /// \brief Offset to the virtual base table pointer (if one exists).
2285
  CharUnits VBPtrOffset;
2286
  /// \brief Minimum record size possible.
2287
  CharUnits MinEmptyStructSize;
2288
  /// \brief The size and alignment info of a pointer.
2289
  ElementInfo PointerInfo;
2290
  /// \brief The primary base class (if one exists).
2291
  const CXXRecordDecl *PrimaryBase;
2292
  /// \brief The class we share our vb-pointer with.
2293
  const CXXRecordDecl *SharedVBPtrBase;
2294
  /// \brief The collection of field offsets.
2295
  SmallVector<uint64_t, 16> FieldOffsets;
2296
  /// \brief Base classes and their offsets in the record.
2297
  BaseOffsetsMapTy Bases;
2298
  /// \brief virtual base classes and their offsets in the record.
2299
  ASTRecordLayout::VBaseOffsetsMapTy VBases;
2300
  /// \brief The number of remaining bits in our last bitfield allocation.
2301
  /// This value isn't meaningful unless LastFieldIsNonZeroWidthBitfield is
2302
  /// true.
2303
  unsigned RemainingBitsInField;
2304
  bool IsUnion : 1;
2305
  /// \brief True if the last field laid out was a bitfield and was not 0
2306
  /// width.
2307
  bool LastFieldIsNonZeroWidthBitfield : 1;
2308
  /// \brief True if the class has its own vftable pointer.
2309
  bool HasOwnVFPtr : 1;
2310
  /// \brief True if the class has a vbtable pointer.
2311
  bool HasVBPtr : 1;
2312
  /// \brief True if the last sub-object within the type is zero sized or the
2313
  /// object itself is zero sized.  This *does not* count members that are not
2314
  /// records.  Only used for MS-ABI.
2315
  bool EndsWithZeroSizedObject : 1;
2316
  /// \brief True if this class is zero sized or first base is zero sized or
2317
  /// has this property.  Only used for MS-ABI.
2318
  bool LeadsWithZeroSizedBase : 1;
2319
2320
  /// \brief True if the external AST source provided a layout for this record.
2321
  bool UseExternalLayout : 1;
2322
2323
  /// \brief The layout provided by the external AST source. Only active if
2324
  /// UseExternalLayout is true.
2325
  ExternalLayout External;
2326
};
2327
} // namespace
2328
2329
MicrosoftRecordLayoutBuilder::ElementInfo
2330
MicrosoftRecordLayoutBuilder::getAdjustedElementInfo(
2331
2.53k
    const ASTRecordLayout &Layout) {
2332
2.53k
  ElementInfo Info;
2333
2.53k
  Info.Alignment = Layout.getAlignment();
2334
2.53k
  // Respect pragma pack.
2335
2.53k
  if (!MaxFieldAlignment.isZero())
2336
34
    Info.Alignment = std::min(Info.Alignment, MaxFieldAlignment);
2337
2.53k
  // Track zero-sized subobjects here where it's already available.
2338
2.53k
  EndsWithZeroSizedObject = Layout.endsWithZeroSizedObject();
2339
2.53k
  // Respect required alignment, this is necessary because we may have adjusted
2340
2.53k
  // the alignment in the case of pragam pack.  Note that the required alignment
2341
2.53k
  // doesn't actually apply to the struct alignment at this point.
2342
2.53k
  Alignment = std::max(Alignment, Info.Alignment);
2343
2.53k
  RequiredAlignment = std::max(RequiredAlignment, Layout.getRequiredAlignment());
2344
2.53k
  Info.Alignment = std::max(Info.Alignment, Layout.getRequiredAlignment());
2345
2.53k
  Info.Size = Layout.getNonVirtualSize();
2346
2.53k
  return Info;
2347
2.53k
}
2348
2349
MicrosoftRecordLayoutBuilder::ElementInfo
2350
MicrosoftRecordLayoutBuilder::getAdjustedElementInfo(
2351
1.99k
    const FieldDecl *FD) {
2352
1.99k
  // Get the alignment of the field type's natural alignment, ignore any
2353
1.99k
  // alignment attributes.
2354
1.99k
  ElementInfo Info;
2355
1.99k
  std::tie(Info.Size, Info.Alignment) =
2356
1.99k
      Context.getTypeInfoInChars(FD->getType()->getUnqualifiedDesugaredType());
2357
1.99k
  // Respect align attributes on the field.
2358
1.99k
  CharUnits FieldRequiredAlignment =
2359
1.99k
      Context.toCharUnitsFromBits(FD->getMaxAlignment());
2360
1.99k
  // Respect align attributes on the type.
2361
1.99k
  if (Context.isAlignmentRequired(FD->getType()))
2362
25
    FieldRequiredAlignment = std::max(
2363
25
        Context.getTypeAlignInChars(FD->getType()), FieldRequiredAlignment);
2364
1.99k
  // Respect attributes applied to subobjects of the field.
2365
1.99k
  if (FD->isBitField())
2366
1.99k
    // For some reason __declspec align impacts alignment rather than required
2367
1.99k
    // alignment when it is applied to bitfields.
2368
156
    Info.Alignment = std::max(Info.Alignment, FieldRequiredAlignment);
2369
1.84k
  else {
2370
1.84k
    if (auto RT =
2371
156
            FD->getType()->getBaseElementTypeUnsafe()->getAs<RecordType>()) {
2372
156
      auto const &Layout = Context.getASTRecordLayout(RT->getDecl());
2373
156
      EndsWithZeroSizedObject = Layout.endsWithZeroSizedObject();
2374
156
      FieldRequiredAlignment = std::max(FieldRequiredAlignment,
2375
156
                                        Layout.getRequiredAlignment());
2376
156
    }
2377
1.84k
    // Capture required alignment as a side-effect.
2378
1.84k
    RequiredAlignment = std::max(RequiredAlignment, FieldRequiredAlignment);
2379
1.84k
  }
2380
1.99k
  // Respect pragma pack, attribute pack and declspec align
2381
1.99k
  if (!MaxFieldAlignment.isZero())
2382
178
    Info.Alignment = std::min(Info.Alignment, MaxFieldAlignment);
2383
1.99k
  if (FD->hasAttr<PackedAttr>())
2384
7
    Info.Alignment = CharUnits::One();
2385
1.99k
  Info.Alignment = std::max(Info.Alignment, FieldRequiredAlignment);
2386
1.99k
  return Info;
2387
1.99k
}
2388
2389
201
void MicrosoftRecordLayoutBuilder::layout(const RecordDecl *RD) {
2390
201
  // For C record layout, zero-sized records always have size 4.
2391
201
  MinEmptyStructSize = CharUnits::fromQuantity(4);
2392
201
  initializeLayout(RD);
2393
201
  layoutFields(RD);
2394
201
  DataSize = Size = Size.alignTo(Alignment);
2395
201
  RequiredAlignment = std::max(
2396
201
      RequiredAlignment, Context.toCharUnitsFromBits(RD->getMaxAlignment()));
2397
201
  finalizeLayout(RD);
2398
201
}
2399
2400
3.32k
void MicrosoftRecordLayoutBuilder::cxxLayout(const CXXRecordDecl *RD) {
2401
3.32k
  // The C++ standard says that empty structs have size 1.
2402
3.32k
  MinEmptyStructSize = CharUnits::One();
2403
3.32k
  initializeLayout(RD);
2404
3.32k
  initializeCXXLayout(RD);
2405
3.32k
  layoutNonVirtualBases(RD);
2406
3.32k
  layoutFields(RD);
2407
3.32k
  injectVBPtr(RD);
2408
3.32k
  injectVFPtr(RD);
2409
3.32k
  if (
HasOwnVFPtr || 3.32k
(HasVBPtr && 2.69k
!SharedVBPtrBase691
))
2410
1.13k
    Alignment = std::max(Alignment, PointerInfo.Alignment);
2411
3.32k
  auto RoundingAlignment = Alignment;
2412
3.32k
  if (!MaxFieldAlignment.isZero())
2413
56
    RoundingAlignment = std::min(RoundingAlignment, MaxFieldAlignment);
2414
3.32k
  NonVirtualSize = Size = Size.alignTo(RoundingAlignment);
2415
3.32k
  RequiredAlignment = std::max(
2416
3.32k
      RequiredAlignment, Context.toCharUnitsFromBits(RD->getMaxAlignment()));
2417
3.32k
  layoutVirtualBases(RD);
2418
3.32k
  finalizeLayout(RD);
2419
3.32k
}
2420
2421
3.52k
void MicrosoftRecordLayoutBuilder::initializeLayout(const RecordDecl *RD) {
2422
3.52k
  IsUnion = RD->isUnion();
2423
3.52k
  Size = CharUnits::Zero();
2424
3.52k
  Alignment = CharUnits::One();
2425
3.52k
  // In 64-bit mode we always perform an alignment step after laying out vbases.
2426
3.52k
  // In 32-bit mode we do not.  The check to see if we need to perform alignment
2427
3.52k
  // checks the RequiredAlignment field and performs alignment if it isn't 0.
2428
3.52k
  RequiredAlignment = Context.getTargetInfo().getTriple().isArch64Bit()
2429
1.36k
                          ? CharUnits::One()
2430
2.16k
                          : CharUnits::Zero();
2431
3.52k
  // Compute the maximum field alignment.
2432
3.52k
  MaxFieldAlignment = CharUnits::Zero();
2433
3.52k
  // Honor the default struct packing maximum alignment flag.
2434
3.52k
  if (unsigned DefaultMaxFieldAlignment = Context.getLangOpts().PackStruct)
2435
0
      MaxFieldAlignment = CharUnits::fromQuantity(DefaultMaxFieldAlignment);
2436
3.52k
  // Honor the packing attribute.  The MS-ABI ignores pragma pack if its larger
2437
3.52k
  // than the pointer size.
2438
3.52k
  if (const MaxFieldAlignmentAttr *
MFAA3.52k
= RD->getAttr<MaxFieldAlignmentAttr>()){
2439
112
    unsigned PackedAlignment = MFAA->getAlignment();
2440
112
    if (PackedAlignment <= Context.getTargetInfo().getPointerWidth(0))
2441
79
      MaxFieldAlignment = Context.toCharUnitsFromBits(PackedAlignment);
2442
112
  }
2443
3.52k
  // Packed attribute forces max field alignment to be 1.
2444
3.52k
  if (RD->hasAttr<PackedAttr>())
2445
7
    MaxFieldAlignment = CharUnits::One();
2446
3.52k
2447
3.52k
  // Try to respect the external layout if present.
2448
3.52k
  UseExternalLayout = false;
2449
3.52k
  if (ExternalASTSource *Source = Context.getExternalSource())
2450
16
    UseExternalLayout = Source->layoutRecordType(
2451
16
        RD, External.Size, External.Align, External.FieldOffsets,
2452
16
        External.BaseOffsets, External.VirtualBaseOffsets);
2453
3.52k
}
2454
2455
void
2456
3.32k
MicrosoftRecordLayoutBuilder::initializeCXXLayout(const CXXRecordDecl *RD) {
2457
3.32k
  EndsWithZeroSizedObject = false;
2458
3.32k
  LeadsWithZeroSizedBase = false;
2459
3.32k
  HasOwnVFPtr = false;
2460
3.32k
  HasVBPtr = false;
2461
3.32k
  PrimaryBase = nullptr;
2462
3.32k
  SharedVBPtrBase = nullptr;
2463
3.32k
  // Calculate pointer size and alignment.  These are used for vfptr and vbprt
2464
3.32k
  // injection.
2465
3.32k
  PointerInfo.Size =
2466
3.32k
      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(0));
2467
3.32k
  PointerInfo.Alignment =
2468
3.32k
      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerAlign(0));
2469
3.32k
  // Respect pragma pack.
2470
3.32k
  if (!MaxFieldAlignment.isZero())
2471
56
    PointerInfo.Alignment = std::min(PointerInfo.Alignment, MaxFieldAlignment);
2472
3.32k
}
2473
2474
void
2475
3.32k
MicrosoftRecordLayoutBuilder::layoutNonVirtualBases(const CXXRecordDecl *RD) {
2476
3.32k
  // The MS-ABI lays out all bases that contain leading vfptrs before it lays
2477
3.32k
  // out any bases that do not contain vfptrs.  We implement this as two passes
2478
3.32k
  // over the bases.  This approach guarantees that the primary base is laid out
2479
3.32k
  // first.  We use these passes to calculate some additional aggregated
2480
3.32k
  // information about the bases, such as required alignment and the presence of
2481
3.32k
  // zero sized members.
2482
3.32k
  const ASTRecordLayout *PreviousBaseLayout = nullptr;
2483
3.32k
  // Iterate through the bases and lay out the non-virtual ones.
2484
2.19k
  for (const CXXBaseSpecifier &Base : RD->bases()) {
2485
2.19k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
2486
2.19k
    const ASTRecordLayout &BaseLayout = Context.getASTRecordLayout(BaseDecl);
2487
2.19k
    // Mark and skip virtual bases.
2488
2.19k
    if (
Base.isVirtual()2.19k
) {
2489
864
      HasVBPtr = true;
2490
864
      continue;
2491
864
    }
2492
1.32k
    // Check for a base to share a VBPtr with.
2493
1.32k
    
if (1.32k
!SharedVBPtrBase && 1.32k
BaseLayout.hasVBPtr()1.20k
) {
2494
206
      SharedVBPtrBase = BaseDecl;
2495
206
      HasVBPtr = true;
2496
206
    }
2497
1.32k
    // Only lay out bases with extendable VFPtrs on the first pass.
2498
1.32k
    if (!BaseLayout.hasExtendableVFPtr())
2499
936
      continue;
2500
391
    // If we don't have a primary base, this one qualifies.
2501
391
    
if (391
!PrimaryBase391
) {
2502
289
      PrimaryBase = BaseDecl;
2503
289
      LeadsWithZeroSizedBase = BaseLayout.leadsWithZeroSizedBase();
2504
289
    }
2505
2.19k
    // Lay out the base.
2506
2.19k
    layoutNonVirtualBase(RD, BaseDecl, BaseLayout, PreviousBaseLayout);
2507
2.19k
  }
2508
3.32k
  // Figure out if we need a fresh VFPtr for this class.
2509
3.32k
  if (
!PrimaryBase && 3.32k
RD->isDynamicClass()3.03k
)
2510
1.24k
    for (CXXRecordDecl::method_iterator i = RD->method_begin(),
2511
1.24k
                                        e = RD->method_end();
2512
4.51k
         
!HasOwnVFPtr && 4.51k
i != e3.89k
;
++i3.27k
)
2513
3.27k
      
HasOwnVFPtr = i->isVirtual() && 3.27k
i->size_overridden_methods() == 0832
;
2514
3.32k
  // If we don't have a primary base then we have a leading object that could
2515
3.32k
  // itself lead with a zero-sized object, something we track.
2516
3.32k
  bool CheckLeadingLayout = !PrimaryBase;
2517
3.32k
  // Iterate through the bases and lay out the non-virtual ones.
2518
2.19k
  for (const CXXBaseSpecifier &Base : RD->bases()) {
2519
2.19k
    if (Base.isVirtual())
2520
864
      continue;
2521
1.32k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
2522
1.32k
    const ASTRecordLayout &BaseLayout = Context.getASTRecordLayout(BaseDecl);
2523
1.32k
    // Only lay out bases without extendable VFPtrs on the second pass.
2524
1.32k
    if (
BaseLayout.hasExtendableVFPtr()1.32k
) {
2525
391
      VBPtrOffset = Bases[BaseDecl] + BaseLayout.getNonVirtualSize();
2526
391
      continue;
2527
391
    }
2528
936
    // If this is the first layout, check to see if it leads with a zero sized
2529
936
    // object.  If it does, so do we.
2530
936
    
if (936
CheckLeadingLayout936
) {
2531
576
      CheckLeadingLayout = false;
2532
576
      LeadsWithZeroSizedBase = BaseLayout.leadsWithZeroSizedBase();
2533
576
    }
2534
2.19k
    // Lay out the base.
2535
2.19k
    layoutNonVirtualBase(RD, BaseDecl, BaseLayout, PreviousBaseLayout);
2536
2.19k
    VBPtrOffset = Bases[BaseDecl] + BaseLayout.getNonVirtualSize();
2537
2.19k
  }
2538
3.32k
  // Set our VBPtroffset if we know it at this point.
2539
3.32k
  if (!HasVBPtr)
2540
2.52k
    VBPtrOffset = CharUnits::fromQuantity(-1);
2541
799
  else 
if (799
SharedVBPtrBase799
) {
2542
206
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(SharedVBPtrBase);
2543
206
    VBPtrOffset = Bases[SharedVBPtrBase] + Layout.getVBPtrOffset();
2544
206
  }
2545
3.32k
}
2546
2547
2.53k
static bool recordUsesEBO(const RecordDecl *RD) {
2548
2.53k
  if (!isa<CXXRecordDecl>(RD))
2549
16
    return false;
2550
2.51k
  
if (2.51k
RD->hasAttr<EmptyBasesAttr>()2.51k
)
2551
16
    return true;
2552
2.50k
  
if (auto *2.50k
LVA2.50k
= RD->getAttr<LayoutVersionAttr>())
2553
2.50k
    // TODO: Double check with the next version of MSVC.
2554
0
    
if (0
LVA->getVersion() <= LangOptions::MSVC20150
)
2555
0
      return false;
2556
2.50k
  // TODO: Some later version of MSVC will change the default behavior of the
2557
2.50k
  // compiler to enable EBO by default.  When this happens, we will need an
2558
2.50k
  // additional isCompatibleWithMSVC check.
2559
2.50k
  return false;
2560
2.50k
}
2561
2562
void MicrosoftRecordLayoutBuilder::layoutNonVirtualBase(
2563
    const CXXRecordDecl *RD,
2564
    const CXXRecordDecl *BaseDecl,
2565
    const ASTRecordLayout &BaseLayout,
2566
1.32k
    const ASTRecordLayout *&PreviousBaseLayout) {
2567
1.32k
  // Insert padding between two bases if the left first one is zero sized or
2568
1.32k
  // contains a zero sized subobject and the right is zero sized or one leads
2569
1.32k
  // with a zero sized base.
2570
1.32k
  bool MDCUsesEBO = recordUsesEBO(RD);
2571
1.32k
  if (
PreviousBaseLayout && 1.32k
PreviousBaseLayout->endsWithZeroSizedObject()462
&&
2572
1.32k
      
BaseLayout.leadsWithZeroSizedBase()143
&&
!MDCUsesEBO110
)
2573
108
    Size++;
2574
1.32k
  ElementInfo Info = getAdjustedElementInfo(BaseLayout);
2575
1.32k
  CharUnits BaseOffset;
2576
1.32k
2577
1.32k
  // Respect the external AST source base offset, if present.
2578
1.32k
  bool FoundBase = false;
2579
1.32k
  if (
UseExternalLayout1.32k
) {
2580
0
    FoundBase = External.getExternalNVBaseOffset(BaseDecl, BaseOffset);
2581
0
    if (
FoundBase0
) {
2582
0
      assert(BaseOffset >= Size && "base offset already allocated");
2583
0
      Size = BaseOffset;
2584
0
    }
2585
0
  }
2586
1.32k
2587
1.32k
  if (
!FoundBase1.32k
) {
2588
1.32k
    if (
MDCUsesEBO && 1.32k
BaseDecl->isEmpty()12
&&
2589
1.32k
        
BaseLayout.getNonVirtualSize() == CharUnits::Zero()4
) {
2590
4
      BaseOffset = CharUnits::Zero();
2591
1.32k
    } else {
2592
1.32k
      // Otherwise, lay the base out at the end of the MDC.
2593
1.32k
      BaseOffset = Size = Size.alignTo(Info.Alignment);
2594
1.32k
    }
2595
1.32k
  }
2596
1.32k
  Bases.insert(std::make_pair(BaseDecl, BaseOffset));
2597
1.32k
  Size += BaseLayout.getNonVirtualSize();
2598
1.32k
  PreviousBaseLayout = &BaseLayout;
2599
1.32k
}
2600
2601
3.52k
void MicrosoftRecordLayoutBuilder::layoutFields(const RecordDecl *RD) {
2602
3.52k
  LastFieldIsNonZeroWidthBitfield = false;
2603
3.52k
  for (const FieldDecl *Field : RD->fields())
2604
2.01k
    layoutField(Field);
2605
3.52k
}
2606
2607
2.01k
void MicrosoftRecordLayoutBuilder::layoutField(const FieldDecl *FD) {
2608
2.01k
  if (
FD->isBitField()2.01k
) {
2609
176
    layoutBitField(FD);
2610
176
    return;
2611
176
  }
2612
1.84k
  LastFieldIsNonZeroWidthBitfield = false;
2613
1.84k
  ElementInfo Info = getAdjustedElementInfo(FD);
2614
1.84k
  Alignment = std::max(Alignment, Info.Alignment);
2615
1.84k
  if (
IsUnion1.84k
) {
2616
61
    placeFieldAtOffset(CharUnits::Zero());
2617
61
    Size = std::max(Size, Info.Size);
2618
1.84k
  } else {
2619
1.78k
    CharUnits FieldOffset;
2620
1.78k
    if (
UseExternalLayout1.78k
) {
2621
0
      FieldOffset =
2622
0
          Context.toCharUnitsFromBits(External.getExternalFieldOffset(FD));
2623
0
      assert(FieldOffset >= Size && "field offset already allocated");
2624
1.78k
    } else {
2625
1.78k
      FieldOffset = Size.alignTo(Info.Alignment);
2626
1.78k
    }
2627
1.78k
    placeFieldAtOffset(FieldOffset);
2628
1.78k
    Size = FieldOffset + Info.Size;
2629
1.78k
  }
2630
2.01k
}
2631
2632
176
void MicrosoftRecordLayoutBuilder::layoutBitField(const FieldDecl *FD) {
2633
176
  unsigned Width = FD->getBitWidthValue(Context);
2634
176
  if (
Width == 0176
) {
2635
42
    layoutZeroWidthBitField(FD);
2636
42
    return;
2637
42
  }
2638
134
  ElementInfo Info = getAdjustedElementInfo(FD);
2639
134
  // Clamp the bitfield to a containable size for the sake of being able
2640
134
  // to lay them out.  Sema will throw an error.
2641
134
  if (Width > Context.toBits(Info.Size))
2642
0
    Width = Context.toBits(Info.Size);
2643
134
  // Check to see if this bitfield fits into an existing allocation.  Note:
2644
134
  // MSVC refuses to pack bitfields of formal types with different sizes
2645
134
  // into the same allocation.
2646
134
  if (
!IsUnion && 134
LastFieldIsNonZeroWidthBitfield122
&&
2647
134
      
CurrentBitfieldSize == Info.Size55
&&
Width <= RemainingBitsInField46
) {
2648
24
    placeFieldAtBitOffset(Context.toBits(Size) - RemainingBitsInField);
2649
24
    RemainingBitsInField -= Width;
2650
24
    return;
2651
24
  }
2652
110
  LastFieldIsNonZeroWidthBitfield = true;
2653
110
  CurrentBitfieldSize = Info.Size;
2654
110
  if (
IsUnion110
) {
2655
12
    placeFieldAtOffset(CharUnits::Zero());
2656
12
    Size = std::max(Size, Info.Size);
2657
12
    // TODO: Add a Sema warning that MS ignores bitfield alignment in unions.
2658
110
  } else {
2659
98
    // Allocate a new block of memory and place the bitfield in it.
2660
98
    CharUnits FieldOffset = Size.alignTo(Info.Alignment);
2661
98
    placeFieldAtOffset(FieldOffset);
2662
98
    Size = FieldOffset + Info.Size;
2663
98
    Alignment = std::max(Alignment, Info.Alignment);
2664
98
    RemainingBitsInField = Context.toBits(Info.Size) - Width;
2665
98
  }
2666
176
}
2667
2668
void
2669
42
MicrosoftRecordLayoutBuilder::layoutZeroWidthBitField(const FieldDecl *FD) {
2670
42
  // Zero-width bitfields are ignored unless they follow a non-zero-width
2671
42
  // bitfield.
2672
42
  if (
!LastFieldIsNonZeroWidthBitfield42
) {
2673
20
    placeFieldAtOffset(IsUnion ? 
CharUnits::Zero()4
:
Size16
);
2674
20
    // TODO: Add a Sema warning that MS ignores alignment for zero
2675
20
    // sized bitfields that occur after zero-size bitfields or non-bitfields.
2676
20
    return;
2677
20
  }
2678
22
  LastFieldIsNonZeroWidthBitfield = false;
2679
22
  ElementInfo Info = getAdjustedElementInfo(FD);
2680
22
  if (
IsUnion22
) {
2681
8
    placeFieldAtOffset(CharUnits::Zero());
2682
8
    Size = std::max(Size, Info.Size);
2683
8
    // TODO: Add a Sema warning that MS ignores bitfield alignment in unions.
2684
22
  } else {
2685
14
    // Round up the current record size to the field's alignment boundary.
2686
14
    CharUnits FieldOffset = Size.alignTo(Info.Alignment);
2687
14
    placeFieldAtOffset(FieldOffset);
2688
14
    Size = FieldOffset;
2689
14
    Alignment = std::max(Alignment, Info.Alignment);
2690
14
  }
2691
42
}
2692
2693
3.32k
void MicrosoftRecordLayoutBuilder::injectVBPtr(const CXXRecordDecl *RD) {
2694
3.32k
  if (
!HasVBPtr || 3.32k
SharedVBPtrBase799
)
2695
2.73k
    return;
2696
593
  // Inject the VBPointer at the injection site.
2697
593
  CharUnits InjectionSite = VBPtrOffset;
2698
593
  // But before we do, make sure it's properly aligned.
2699
593
  VBPtrOffset = VBPtrOffset.alignTo(PointerInfo.Alignment);
2700
593
  // Shift everything after the vbptr down, unless we're using an external
2701
593
  // layout.
2702
593
  if (UseExternalLayout)
2703
0
    return;
2704
593
  // Determine where the first field should be laid out after the vbptr.
2705
593
  CharUnits FieldStart = VBPtrOffset + PointerInfo.Size;
2706
593
  // Make sure that the amount we push the fields back by is a multiple of the
2707
593
  // alignment.
2708
593
  CharUnits Offset = (FieldStart - InjectionSite)
2709
593
                         .alignTo(std::max(RequiredAlignment, Alignment));
2710
593
  Size += Offset;
2711
593
  for (uint64_t &FieldOffset : FieldOffsets)
2712
356
    FieldOffset += Context.toBits(Offset);
2713
593
  for (BaseOffsetsMapTy::value_type &Base : Bases)
2714
251
    
if (251
Base.second >= InjectionSite251
)
2715
62
      Base.second += Offset;
2716
3.32k
}
2717
2718
3.32k
void MicrosoftRecordLayoutBuilder::injectVFPtr(const CXXRecordDecl *RD) {
2719
3.32k
  if (!HasOwnVFPtr)
2720
2.69k
    return;
2721
625
  // Make sure that the amount we push the struct back by is a multiple of the
2722
625
  // alignment.
2723
625
  CharUnits Offset =
2724
625
      PointerInfo.Size.alignTo(std::max(RequiredAlignment, Alignment));
2725
625
  // Push back the vbptr, but increase the size of the object and push back
2726
625
  // regular fields by the offset only if not using external record layout.
2727
625
  if (HasVBPtr)
2728
108
    VBPtrOffset += Offset;
2729
625
2730
625
  if (UseExternalLayout)
2731
0
    return;
2732
625
2733
625
  Size += Offset;
2734
625
2735
625
  // If we're using an external layout, the fields offsets have already
2736
625
  // accounted for this adjustment.
2737
625
  for (uint64_t &FieldOffset : FieldOffsets)
2738
150
    FieldOffset += Context.toBits(Offset);
2739
625
  for (BaseOffsetsMapTy::value_type &Base : Bases)
2740
79
    Base.second += Offset;
2741
3.32k
}
2742
2743
3.32k
void MicrosoftRecordLayoutBuilder::layoutVirtualBases(const CXXRecordDecl *RD) {
2744
3.32k
  if (!HasVBPtr)
2745
2.52k
    return;
2746
799
  // Vtordisps are always 4 bytes (even in 64-bit mode)
2747
799
  CharUnits VtorDispSize = CharUnits::fromQuantity(4);
2748
799
  CharUnits VtorDispAlignment = VtorDispSize;
2749
799
  // vtordisps respect pragma pack.
2750
799
  if (!MaxFieldAlignment.isZero())
2751
16
    VtorDispAlignment = std::min(VtorDispAlignment, MaxFieldAlignment);
2752
799
  // The alignment of the vtordisp is at least the required alignment of the
2753
799
  // entire record.  This requirement may be present to support vtordisp
2754
799
  // injection.
2755
1.20k
  for (const CXXBaseSpecifier &VBase : RD->vbases()) {
2756
1.20k
    const CXXRecordDecl *BaseDecl = VBase.getType()->getAsCXXRecordDecl();
2757
1.20k
    const ASTRecordLayout &BaseLayout = Context.getASTRecordLayout(BaseDecl);
2758
1.20k
    RequiredAlignment =
2759
1.20k
        std::max(RequiredAlignment, BaseLayout.getRequiredAlignment());
2760
1.20k
  }
2761
799
  VtorDispAlignment = std::max(VtorDispAlignment, RequiredAlignment);
2762
799
  // Compute the vtordisp set.
2763
799
  llvm::SmallPtrSet<const CXXRecordDecl *, 2> HasVtorDispSet;
2764
799
  computeVtorDispSet(HasVtorDispSet, RD);
2765
799
  // Iterate through the virtual bases and lay them out.
2766
799
  const ASTRecordLayout *PreviousBaseLayout = nullptr;
2767
1.20k
  for (const CXXBaseSpecifier &VBase : RD->vbases()) {
2768
1.20k
    const CXXRecordDecl *BaseDecl = VBase.getType()->getAsCXXRecordDecl();
2769
1.20k
    const ASTRecordLayout &BaseLayout = Context.getASTRecordLayout(BaseDecl);
2770
1.20k
    bool HasVtordisp = HasVtorDispSet.count(BaseDecl) > 0;
2771
1.20k
    // Insert padding between two bases if the left first one is zero sized or
2772
1.20k
    // contains a zero sized subobject and the right is zero sized or one leads
2773
1.20k
    // with a zero sized base.  The padding between virtual bases is 4
2774
1.20k
    // bytes (in both 32 and 64 bits modes) and always involves rounding up to
2775
1.20k
    // the required alignment, we don't know why.
2776
1.20k
    if (
(PreviousBaseLayout && 1.20k
PreviousBaseLayout->endsWithZeroSizedObject()410
&&
2777
1.20k
         
BaseLayout.leadsWithZeroSizedBase()165
&&
!recordUsesEBO(RD)116
) ||
2778
1.20k
        
HasVtordisp1.09k
) {
2779
271
      Size = Size.alignTo(VtorDispAlignment) + VtorDispSize;
2780
271
      Alignment = std::max(VtorDispAlignment, Alignment);
2781
271
    }
2782
1.20k
    // Insert the virtual base.
2783
1.20k
    ElementInfo Info = getAdjustedElementInfo(BaseLayout);
2784
1.20k
    CharUnits BaseOffset;
2785
1.20k
2786
1.20k
    // Respect the external AST source base offset, if present.
2787
1.20k
    bool FoundBase = false;
2788
1.20k
    if (
UseExternalLayout1.20k
) {
2789
0
      FoundBase = External.getExternalVBaseOffset(BaseDecl, BaseOffset);
2790
0
      if (FoundBase)
2791
0
        assert(BaseOffset >= Size && "base offset already allocated");
2792
0
    }
2793
1.20k
    if (!FoundBase)
2794
1.20k
      BaseOffset = Size.alignTo(Info.Alignment);
2795
1.20k
2796
1.20k
    VBases.insert(std::make_pair(BaseDecl,
2797
1.20k
        ASTRecordLayout::VBaseInfo(BaseOffset, HasVtordisp)));
2798
1.20k
    Size = BaseOffset + BaseLayout.getNonVirtualSize();
2799
1.20k
    PreviousBaseLayout = &BaseLayout;
2800
1.20k
  }
2801
3.32k
}
2802
2803
3.52k
void MicrosoftRecordLayoutBuilder::finalizeLayout(const RecordDecl *RD) {
2804
3.52k
  // Respect required alignment.  Note that in 32-bit mode Required alignment
2805
3.52k
  // may be 0 and cause size not to be updated.
2806
3.52k
  DataSize = Size;
2807
3.52k
  if (
!RequiredAlignment.isZero()3.52k
) {
2808
1.52k
    Alignment = std::max(Alignment, RequiredAlignment);
2809
1.52k
    auto RoundingAlignment = Alignment;
2810
1.52k
    if (!MaxFieldAlignment.isZero())
2811
65
      RoundingAlignment = std::min(RoundingAlignment, MaxFieldAlignment);
2812
1.52k
    RoundingAlignment = std::max(RoundingAlignment, RequiredAlignment);
2813
1.52k
    Size = Size.alignTo(RoundingAlignment);
2814
1.52k
  }
2815
3.52k
  if (
Size.isZero()3.52k
) {
2816
1.09k
    if (
!recordUsesEBO(RD) || 1.09k
!cast<CXXRecordDecl>(RD)->isEmpty()4
) {
2817
1.08k
      EndsWithZeroSizedObject = true;
2818
1.08k
      LeadsWithZeroSizedBase = true;
2819
1.08k
    }
2820
1.09k
    // Zero-sized structures have size equal to their alignment if a
2821
1.09k
    // __declspec(align) came into play.
2822
1.09k
    if (RequiredAlignment >= MinEmptyStructSize)
2823
418
      Size = Alignment;
2824
1.09k
    else
2825
673
      Size = MinEmptyStructSize;
2826
1.09k
  }
2827
3.52k
2828
3.52k
  if (
UseExternalLayout3.52k
) {
2829
0
    Size = Context.toCharUnitsFromBits(External.Size);
2830
0
    if (External.Align)
2831
0
      Alignment = Context.toCharUnitsFromBits(External.Align);
2832
0
  }
2833
3.52k
}
2834
2835
// Recursively walks the non-virtual bases of a class and determines if any of
2836
// them are in the bases with overridden methods set.
2837
static bool
2838
RequiresVtordisp(const llvm::SmallPtrSetImpl<const CXXRecordDecl *> &
2839
                     BasesWithOverriddenMethods,
2840
651
                 const CXXRecordDecl *RD) {
2841
651
  if (BasesWithOverriddenMethods.count(RD))
2842
91
    return true;
2843
560
  // If any of a virtual bases non-virtual bases (recursively) requires a
2844
560
  // vtordisp than so does this virtual base.
2845
560
  for (const CXXBaseSpecifier &Base : RD->bases())
2846
81
    
if (81
!Base.isVirtual() &&
2847
46
        RequiresVtordisp(BasesWithOverriddenMethods,
2848
46
                         Base.getType()->getAsCXXRecordDecl()))
2849
16
      return true;
2850
544
  return false;
2851
544
}
2852
2853
void MicrosoftRecordLayoutBuilder::computeVtorDispSet(
2854
    llvm::SmallPtrSetImpl<const CXXRecordDecl *> &HasVtordispSet,
2855
799
    const CXXRecordDecl *RD) const {
2856
799
  // /vd2 or #pragma vtordisp(2): Always use vtordisps for virtual bases with
2857
799
  // vftables.
2858
799
  if (
RD->getMSVtorDispMode() == MSVtorDispAttr::ForVFTable799
) {
2859
25
    for (const CXXBaseSpecifier &Base : RD->vbases()) {
2860
25
      const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
2861
25
      const ASTRecordLayout &Layout = Context.getASTRecordLayout(BaseDecl);
2862
25
      if (Layout.hasExtendableVFPtr())
2863
23
        HasVtordispSet.insert(BaseDecl);
2864
25
    }
2865
20
    return;
2866
20
  }
2867
779
2868
779
  // If any of our bases need a vtordisp for this type, so do we.  Check our
2869
779
  // direct bases for vtordisp requirements.
2870
779
  
for (const CXXBaseSpecifier &Base : RD->bases()) 779
{
2871
1.45k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
2872
1.45k
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(BaseDecl);
2873
1.45k
    for (const auto &bi : Layout.getVBaseOffsetsMap())
2874
459
      
if (459
bi.second.hasVtorDisp()459
)
2875
43
        HasVtordispSet.insert(bi.first);
2876
1.45k
  }
2877
779
  // We don't introduce any additional vtordisps if either:
2878
779
  // * A user declared constructor or destructor aren't declared.
2879
779
  // * #pragma vtordisp(0) or the /vd0 flag are in use.
2880
779
  if (
(!RD->hasUserDeclaredConstructor() && 779
!RD->hasUserDeclaredDestructor()425
) ||
2881
408
      RD->getMSVtorDispMode() == MSVtorDispAttr::Never)
2882
376
    return;
2883
403
  // /vd1 or #pragma vtordisp(1): Try to guess based on whether we think it's
2884
403
  // possible for a partially constructed object with virtual base overrides to
2885
403
  // escape a non-trivial constructor.
2886
779
  assert(RD->getMSVtorDispMode() == MSVtorDispAttr::ForVBaseOverride);
2887
403
  // Compute a set of base classes which define methods we override.  A virtual
2888
403
  // base in this set will require a vtordisp.  A virtual base that transitively
2889
403
  // contains one of these bases as a non-virtual base will also require a
2890
403
  // vtordisp.
2891
403
  llvm::SmallPtrSet<const CXXMethodDecl *, 8> Work;
2892
403
  llvm::SmallPtrSet<const CXXRecordDecl *, 2> BasesWithOverriddenMethods;
2893
403
  // Seed the working set with our non-destructor, non-pure virtual methods.
2894
403
  for (const CXXMethodDecl *MD : RD->methods())
2895
1.64k
    
if (1.64k
MD->isVirtual() && 1.64k
!isa<CXXDestructorDecl>(MD)248
&&
!MD->isPure()178
)
2896
176
      Work.insert(MD);
2897
705
  while (
!Work.empty()705
) {
2898
302
    const CXXMethodDecl *MD = *Work.begin();
2899
302
    CXXMethodDecl::method_iterator i = MD->begin_overridden_methods(),
2900
302
                                   e = MD->end_overridden_methods();
2901
302
    // If a virtual method has no-overrides it lives in its parent's vtable.
2902
302
    if (i == e)
2903
189
      BasesWithOverriddenMethods.insert(MD->getParent());
2904
302
    else
2905
113
      Work.insert(i, e);
2906
302
    // We've finished processing this element, remove it from the working set.
2907
302
    Work.erase(MD);
2908
302
  }
2909
403
  // For each of our virtual bases, check if it is in the set of overridden
2910
403
  // bases or if it transitively contains a non-virtual base that is.
2911
637
  for (const CXXBaseSpecifier &Base : RD->vbases()) {
2912
637
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
2913
637
    if (!HasVtordispSet.count(BaseDecl) &&
2914
605
        RequiresVtordisp(BasesWithOverriddenMethods, BaseDecl))
2915
91
      HasVtordispSet.insert(BaseDecl);
2916
637
  }
2917
799
}
2918
2919
/// getASTRecordLayout - Get or compute information about the layout of the
2920
/// specified record (struct/union/class), which indicates its size and field
2921
/// position information.
2922
const ASTRecordLayout &
2923
1.81M
ASTContext::getASTRecordLayout(const RecordDecl *D) const {
2924
1.81M
  // These asserts test different things.  A record has a definition
2925
1.81M
  // as soon as we begin to parse the definition.  That definition is
2926
1.81M
  // not a complete definition (which is what isDefinition() tests)
2927
1.81M
  // until we *finish* parsing the definition.
2928
1.81M
2929
1.81M
  if (
D->hasExternalLexicalStorage() && 1.81M
!D->getDefinition()5.34k
)
2930
0
    getExternalSource()->CompleteType(const_cast<RecordDecl*>(D));
2931
1.81M
    
2932
1.81M
  D = D->getDefinition();
2933
1.81M
  assert(D && "Cannot get layout of forward declarations!");
2934
1.81M
  assert(!D->isInvalidDecl() && "Cannot get layout of invalid decl!");
2935
1.81M
  assert(D->isCompleteDefinition() && "Cannot layout type before complete!");
2936
1.81M
2937
1.81M
  // Look up this layout, if already laid out, return what we have.
2938
1.81M
  // Note that we can't save a reference to the entry because this function
2939
1.81M
  // is recursive.
2940
1.81M
  const ASTRecordLayout *Entry = ASTRecordLayouts[D];
2941
1.81M
  if (
Entry1.81M
)
return *Entry1.70M
;
2942
112k
2943
112k
  const ASTRecordLayout *NewEntry = nullptr;
2944
112k
2945
112k
  if (
isMsLayout(*this)112k
) {
2946
3.52k
    MicrosoftRecordLayoutBuilder Builder(*this);
2947
3.52k
    if (const auto *
RD3.52k
= dyn_cast<CXXRecordDecl>(D)) {
2948
3.32k
      Builder.cxxLayout(RD);
2949
3.32k
      NewEntry = new (*this) ASTRecordLayout(
2950
3.32k
          *this, Builder.Size, Builder.Alignment, Builder.RequiredAlignment,
2951
2.69k
          Builder.HasOwnVFPtr, Builder.HasOwnVFPtr || Builder.PrimaryBase,
2952
3.32k
          Builder.VBPtrOffset, Builder.DataSize, Builder.FieldOffsets,
2953
3.32k
          Builder.NonVirtualSize, Builder.Alignment, CharUnits::Zero(),
2954
3.32k
          Builder.PrimaryBase, false, Builder.SharedVBPtrBase,
2955
3.32k
          Builder.EndsWithZeroSizedObject, Builder.LeadsWithZeroSizedBase,
2956
3.32k
          Builder.Bases, Builder.VBases);
2957
3.52k
    } else {
2958
201
      Builder.layout(D);
2959
201
      NewEntry = new (*this) ASTRecordLayout(
2960
201
          *this, Builder.Size, Builder.Alignment, Builder.RequiredAlignment,
2961
201
          Builder.Size, Builder.FieldOffsets);
2962
201
    }
2963
112k
  } else {
2964
109k
    if (const auto *
RD109k
= dyn_cast<CXXRecordDecl>(D)) {
2965
40.3k
      EmptySubobjectMap EmptySubobjects(*this, RD);
2966
40.3k
      ItaniumRecordLayoutBuilder Builder(*this, &EmptySubobjects);
2967
40.3k
      Builder.Layout(RD);
2968
40.3k
2969
40.3k
      // In certain situations, we are allowed to lay out objects in the
2970
40.3k
      // tail-padding of base classes.  This is ABI-dependent.
2971
40.3k
      // FIXME: this should be stored in the record layout.
2972
40.3k
      bool skipTailPadding =
2973
40.3k
          mustSkipTailPadding(getTargetInfo().getCXXABI(), RD);
2974
40.3k
2975
40.3k
      // FIXME: This should be done in FinalizeLayout.
2976
40.3k
      CharUnits DataSize =
2977
40.3k
          skipTailPadding ? 
Builder.getSize()19.9k
:
Builder.getDataSize()20.3k
;
2978
40.3k
      CharUnits NonVirtualSize =
2979
40.3k
          skipTailPadding ? 
DataSize19.9k
:
Builder.NonVirtualSize20.3k
;
2980
40.3k
      NewEntry = new (*this) ASTRecordLayout(
2981
40.3k
          *this, Builder.getSize(), Builder.Alignment,
2982
40.3k
          /*RequiredAlignment : used by MS-ABI)*/
2983
40.3k
          Builder.Alignment, Builder.HasOwnVFPtr, RD->isDynamicClass(),
2984
40.3k
          CharUnits::fromQuantity(-1), DataSize, Builder.FieldOffsets,
2985
40.3k
          NonVirtualSize, Builder.NonVirtualAlignment,
2986
40.3k
          EmptySubobjects.SizeOfLargestEmptySubobject, Builder.PrimaryBase,
2987
40.3k
          Builder.PrimaryBaseIsVirtual, nullptr, false, false, Builder.Bases,
2988
40.3k
          Builder.VBases);
2989
109k
    } else {
2990
68.7k
      ItaniumRecordLayoutBuilder Builder(*this, /*EmptySubobjects=*/nullptr);
2991
68.7k
      Builder.Layout(D);
2992
68.7k
2993
68.7k
      NewEntry = new (*this) ASTRecordLayout(
2994
68.7k
          *this, Builder.getSize(), Builder.Alignment,
2995
68.7k
          /*RequiredAlignment : used by MS-ABI)*/
2996
68.7k
          Builder.Alignment, Builder.getSize(), Builder.FieldOffsets);
2997
68.7k
    }
2998
109k
  }
2999
112k
3000
112k
  ASTRecordLayouts[D] = NewEntry;
3001
112k
3002
112k
  if (
getLangOpts().DumpRecordLayouts112k
) {
3003
995
    llvm::outs() << "\n*** Dumping AST Record Layout\n";
3004
995
    DumpRecordLayout(D, llvm::outs(), getLangOpts().DumpRecordLayoutsSimple);
3005
995
  }
3006
1.81M
3007
1.81M
  return *NewEntry;
3008
1.81M
}
3009
3010
38.2k
const CXXMethodDecl *ASTContext::getCurrentKeyFunction(const CXXRecordDecl *RD) {
3011
38.2k
  if (!getTargetInfo().getCXXABI().hasKeyFunctions())
3012
2.87k
    return nullptr;
3013
35.4k
3014
38.2k
  assert(RD->getDefinition() && "Cannot get key function for forward decl!");
3015
35.4k
  RD = cast<CXXRecordDecl>(RD->getDefinition());
3016
35.4k
3017
35.4k
  // Beware:
3018
35.4k
  //  1) computing the key function might trigger deserialization, which might
3019
35.4k
  //     invalidate iterators into KeyFunctions
3020
35.4k
  //  2) 'get' on the LazyDeclPtr might also trigger deserialization and
3021
35.4k
  //     invalidate the LazyDeclPtr within the map itself
3022
35.4k
  LazyDeclPtr Entry = KeyFunctions[RD];
3023
35.4k
  const Decl *Result =
3024
35.4k
      Entry ? 
Entry.get(getExternalSource())10.6k
:
computeKeyFunction(*this, RD)24.7k
;
3025
35.4k
3026
35.4k
  // Store it back if it changed.
3027
35.4k
  if (
Entry.isOffset() || 35.4k
Entry.isValid() != bool(Result)35.4k
)
3028
5.06k
    KeyFunctions[RD] = const_cast<Decl*>(Result);
3029
38.2k
3030
38.2k
  return cast_or_null<CXXMethodDecl>(Result);
3031
38.2k
}
3032
3033
14
void ASTContext::setNonKeyFunction(const CXXMethodDecl *Method) {
3034
14
  assert(Method == Method->getFirstDecl() &&
3035
14
         "not working with method declaration from class definition");
3036
14
3037
14
  // Look up the cache entry.  Since we're working with the first
3038
14
  // declaration, its parent must be the class definition, which is
3039
14
  // the correct key for the KeyFunctions hash.
3040
14
  const auto &Map = KeyFunctions;
3041
14
  auto I = Map.find(Method->getParent());
3042
14
3043
14
  // If it's not cached, there's nothing to do.
3044
14
  if (
I == Map.end()14
)
return0
;
3045
14
3046
14
  // If it is cached, check whether it's the target method, and if so,
3047
14
  // remove it from the cache. Note, the call to 'get' might invalidate
3048
14
  // the iterator and the LazyDeclPtr object within the map.
3049
14
  LazyDeclPtr Ptr = I->second;
3050
14
  if (
Ptr.get(getExternalSource()) == Method14
) {
3051
14
    // FIXME: remember that we did this for module / chained PCH state?
3052
14
    KeyFunctions.erase(Method->getParent());
3053
14
  }
3054
14
}
3055
3056
403
static uint64_t getFieldOffset(const ASTContext &C, const FieldDecl *FD) {
3057
403
  const ASTRecordLayout &Layout = C.getASTRecordLayout(FD->getParent());
3058
403
  return Layout.getFieldOffset(FD->getFieldIndex());
3059
403
}
3060
3061
371
uint64_t ASTContext::getFieldOffset(const ValueDecl *VD) const {
3062
371
  uint64_t OffsetInBits;
3063
371
  if (const FieldDecl *
FD371
= dyn_cast<FieldDecl>(VD)) {
3064
353
    OffsetInBits = ::getFieldOffset(*this, FD);
3065
371
  } else {
3066
18
    const IndirectFieldDecl *IFD = cast<IndirectFieldDecl>(VD);
3067
18
3068
18
    OffsetInBits = 0;
3069
18
    for (const NamedDecl *ND : IFD->chain())
3070
50
      OffsetInBits += ::getFieldOffset(*this, cast<FieldDecl>(ND));
3071
18
  }
3072
371
3073
371
  return OffsetInBits;
3074
371
}
3075
3076
uint64_t ASTContext::lookupFieldBitOffset(const ObjCInterfaceDecl *OID,
3077
                                          const ObjCImplementationDecl *ID,
3078
1.69k
                                          const ObjCIvarDecl *Ivar) const {
3079
1.69k
  const ObjCInterfaceDecl *Container = Ivar->getContainingInterface();
3080
1.69k
3081
1.69k
  // FIXME: We should eliminate the need to have ObjCImplementationDecl passed
3082
1.69k
  // in here; it should never be necessary because that should be the lexical
3083
1.69k
  // decl context for the ivar.
3084
1.69k
3085
1.69k
  // If we know have an implementation (and the ivar is in it) then
3086
1.69k
  // look up in the implementation layout.
3087
1.69k
  const ASTRecordLayout *RL;
3088
1.69k
  if (
ID && 1.69k
declaresSameEntity(ID->getClassInterface(), Container)1.13k
)
3089
1.07k
    RL = &getASTObjCImplementationLayout(ID);
3090
1.69k
  else
3091
621
    RL = &getASTObjCInterfaceLayout(Container);
3092
1.69k
3093
1.69k
  // Compute field index.
3094
1.69k
  //
3095
1.69k
  // FIXME: The index here is closely tied to how ASTContext::getObjCLayout is
3096
1.69k
  // implemented. This should be fixed to get the information from the layout
3097
1.69k
  // directly.
3098
1.69k
  unsigned Index = 0;
3099
1.69k
3100
1.69k
  for (const ObjCIvarDecl *IVD = Container->all_declared_ivar_begin();
3101
6.13k
       
IVD6.13k
;
IVD = IVD->getNextIvar()4.43k
) {
3102
6.13k
    if (Ivar == IVD)
3103
1.69k
      break;
3104
4.43k
    ++Index;
3105
4.43k
  }
3106
1.69k
  assert(Index < RL->getFieldCount() && "Ivar is not inside record layout!");
3107
1.69k
3108
1.69k
  return RL->getFieldOffset(Index);
3109
1.69k
}
3110
3111
/// getObjCLayout - Get or compute information about the layout of the
3112
/// given interface.
3113
///
3114
/// \param Impl - If given, also include the layout of the interface's
3115
/// implementation. This may differ by including synthesized ivars.
3116
const ASTRecordLayout &
3117
ASTContext::getObjCLayout(const ObjCInterfaceDecl *D,
3118
4.00k
                          const ObjCImplementationDecl *Impl) const {
3119
4.00k
  // Retrieve the definition
3120
4.00k
  if (
D->hasExternalLexicalStorage() && 4.00k
!D->getDefinition()0
)
3121
0
    getExternalSource()->CompleteType(const_cast<ObjCInterfaceDecl*>(D));
3122
4.00k
  D = D->getDefinition();
3123
4.00k
  assert(D && D->isThisDeclarationADefinition() && "Invalid interface decl!");
3124
4.00k
3125
4.00k
  // Look up this layout, if already laid out, return what we have.
3126
4.00k
  const ObjCContainerDecl *Key =
3127
4.00k
    Impl ? 
(const ObjCContainerDecl*) Impl1.69k
:
(const ObjCContainerDecl*) D2.31k
;
3128
4.00k
  if (const ASTRecordLayout *Entry = ObjCLayouts[Key])
3129
2.02k
    return *Entry;
3130
1.98k
3131
1.98k
  // Add in synthesized ivar count if laying out an implementation.
3132
1.98k
  
if (1.98k
Impl1.98k
) {
3133
1.22k
    unsigned SynthCount = CountNonClassIvars(D);
3134
1.22k
    // If there aren't any synthesized ivars then reuse the interface
3135
1.22k
    // entry. Note we can't cache this because we simply free all
3136
1.22k
    // entries later; however we shouldn't look up implementations
3137
1.22k
    // frequently.
3138
1.22k
    if (SynthCount == 0)
3139
1.08k
      return getObjCLayout(D, nullptr);
3140
902
  }
3141
902
3142
902
  ItaniumRecordLayoutBuilder Builder(*this, /*EmptySubobjects=*/nullptr);
3143
902
  Builder.Layout(D);
3144
902
3145
902
  const ASTRecordLayout *NewEntry =
3146
902
    new (*this) ASTRecordLayout(*this, Builder.getSize(),
3147
902
                                Builder.Alignment,
3148
902
                                /*RequiredAlignment : used by MS-ABI)*/
3149
902
                                Builder.Alignment,
3150
902
                                Builder.getDataSize(),
3151
902
                                Builder.FieldOffsets);
3152
902
3153
902
  ObjCLayouts[Key] = NewEntry;
3154
902
3155
902
  return *NewEntry;
3156
902
}
3157
3158
static void PrintOffset(raw_ostream &OS,
3159
4.56k
                        CharUnits Offset, unsigned IndentLevel) {
3160
4.56k
  OS << llvm::format("%10" PRId64 " | ", (int64_t)Offset.getQuantity());
3161
4.56k
  OS.indent(IndentLevel * 2);
3162
4.56k
}
3163
3164
static void PrintBitFieldOffset(raw_ostream &OS, CharUnits Offset,
3165
                                unsigned Begin, unsigned Width,
3166
286
                                unsigned IndentLevel) {
3167
286
  llvm::SmallString<10> Buffer;
3168
286
  {
3169
286
    llvm::raw_svector_ostream BufferOS(Buffer);
3170
286
    BufferOS << Offset.getQuantity() << ':';
3171
286
    if (
Width == 0286
) {
3172
45
      BufferOS << '-';
3173
286
    } else {
3174
241
      BufferOS << Begin << '-' << (Begin + Width - 1);
3175
241
    }
3176
286
  }
3177
286
  
3178
286
  OS << llvm::right_justify(Buffer, 10) << " | ";
3179
286
  OS.indent(IndentLevel * 2);
3180
286
}
3181
3182
1.80k
static void PrintIndentNoOffset(raw_ostream &OS, unsigned IndentLevel) {
3183
1.80k
  OS << "           | ";
3184
1.80k
  OS.indent(IndentLevel * 2);
3185
1.80k
}
3186
3187
static void DumpRecordLayout(raw_ostream &OS, const RecordDecl *RD,
3188
                             const ASTContext &C,
3189
                             CharUnits Offset,
3190
                             unsigned IndentLevel,
3191
                             const char* Description,
3192
                             bool PrintSizeInfo,
3193
2.36k
                             bool IncludeVirtualBases) {
3194
2.36k
  const ASTRecordLayout &Layout = C.getASTRecordLayout(RD);
3195
2.36k
  auto CXXRD = dyn_cast<CXXRecordDecl>(RD);
3196
2.36k
3197
2.36k
  PrintOffset(OS, Offset, IndentLevel);
3198
2.36k
  OS << C.getTypeDeclType(const_cast<RecordDecl*>(RD)).getAsString();
3199
2.36k
  if (Description)
3200
1.43k
    OS << ' ' << Description;
3201
2.36k
  if (
CXXRD && 2.36k
CXXRD->isEmpty()2.29k
)
3202
739
    OS << " (empty)";
3203
2.36k
  OS << '\n';
3204
2.36k
3205
2.36k
  IndentLevel++;
3206
2.36k
3207
2.36k
  // Dump bases.
3208
2.36k
  if (
CXXRD2.36k
) {
3209
2.29k
    const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
3210
2.29k
    bool HasOwnVFPtr = Layout.hasOwnVFPtr();
3211
2.29k
    bool HasOwnVBPtr = Layout.hasOwnVBPtr();
3212
2.29k
3213
2.29k
    // Vtable pointer.
3214
2.29k
    if (
CXXRD->isDynamicClass() && 2.29k
!PrimaryBase774
&&
!isMsLayout(C)719
) {
3215
0
      PrintOffset(OS, Offset, IndentLevel);
3216
0
      OS << '(' << *RD << " vtable pointer)\n";
3217
2.29k
    } else 
if (2.29k
HasOwnVFPtr2.29k
) {
3218
345
      PrintOffset(OS, Offset, IndentLevel);
3219
345
      // vfptr (for Microsoft C++ ABI)
3220
345
      OS << '(' << *RD << " vftable pointer)\n";
3221
345
    }
3222
2.29k
3223
2.29k
    // Collect nvbases.
3224
2.29k
    SmallVector<const CXXRecordDecl *, 4> Bases;
3225
1.33k
    for (const CXXBaseSpecifier &Base : CXXRD->bases()) {
3226
1.33k
      assert(!Base.getType()->isDependentType() &&
3227
1.33k
             "Cannot layout class with dependent bases.");
3228
1.33k
      if (!Base.isVirtual())
3229
711
        Bases.push_back(Base.getType()->getAsCXXRecordDecl());
3230
1.33k
    }
3231
2.29k
3232
2.29k
    // Sort nvbases by offset.
3233
2.29k
    std::stable_sort(Bases.begin(), Bases.end(),
3234
302
                     [&](const CXXRecordDecl *L, const CXXRecordDecl *R) {
3235
302
      return Layout.getBaseClassOffset(L) < Layout.getBaseClassOffset(R);
3236
302
    });
3237
2.29k
3238
2.29k
    // Dump (non-virtual) bases
3239
711
    for (const CXXRecordDecl *Base : Bases) {
3240
711
      CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base);
3241
711
      DumpRecordLayout(OS, Base, C, BaseOffset, IndentLevel,
3242
711
                       Base == PrimaryBase ? 
"(primary base)"55
:
"(base)"656
,
3243
711
                       /*PrintSizeInfo=*/false,
3244
711
                       /*IncludeVirtualBases=*/false);
3245
711
    }
3246
2.29k
3247
2.29k
    // vbptr (for Microsoft C++ ABI)
3248
2.29k
    if (
HasOwnVBPtr2.29k
) {
3249
406
      PrintOffset(OS, Offset + Layout.getVBPtrOffset(), IndentLevel);
3250
406
      OS << '(' << *RD << " vbtable pointer)\n";
3251
406
    }
3252
2.29k
  }
3253
2.36k
3254
2.36k
  // Dump fields.
3255
2.36k
  uint64_t FieldNo = 0;
3256
2.36k
  for (RecordDecl::field_iterator I = RD->field_begin(),
3257
4.14k
         E = RD->field_end(); 
I != E4.14k
;
++I, ++FieldNo1.77k
) {
3258
1.77k
    const FieldDecl &Field = **I;
3259
1.77k
    uint64_t LocalFieldOffsetInBits = Layout.getFieldOffset(FieldNo);
3260
1.77k
    CharUnits FieldOffset =
3261
1.77k
      Offset + C.toCharUnitsFromBits(LocalFieldOffsetInBits);
3262
1.77k
3263
1.77k
    // Recursively dump fields of record type.
3264
1.77k
    if (auto 
RT1.77k
= Field.getType()->getAs<RecordType>()) {
3265
112
      DumpRecordLayout(OS, RT->getDecl(), C, FieldOffset, IndentLevel,
3266
112
                       Field.getName().data(),
3267
112
                       /*PrintSizeInfo=*/false,
3268
112
                       /*IncludeVirtualBases=*/true);
3269
112
      continue;
3270
112
    }
3271
1.66k
3272
1.66k
    
if (1.66k
Field.isBitField()1.66k
) {
3273
286
      uint64_t LocalFieldByteOffsetInBits = C.toBits(FieldOffset - Offset);
3274
286
      unsigned Begin = LocalFieldOffsetInBits - LocalFieldByteOffsetInBits;
3275
286
      unsigned Width = Field.getBitWidthValue(C);
3276
286
      PrintBitFieldOffset(OS, FieldOffset, Begin, Width, IndentLevel);
3277
1.66k
    } else {
3278
1.37k
      PrintOffset(OS, FieldOffset, IndentLevel);
3279
1.37k
    }
3280
1.77k
    OS << Field.getType().getAsString() << ' ' << Field << '\n';
3281
1.77k
  }
3282
2.36k
3283
2.36k
  // Dump virtual bases.
3284
2.36k
  if (
CXXRD && 2.36k
IncludeVirtualBases2.29k
) {
3285
979
    const ASTRecordLayout::VBaseOffsetsMapTy &VtorDisps = 
3286
979
      Layout.getVBaseOffsetsMap();
3287
979
3288
607
    for (const CXXBaseSpecifier &Base : CXXRD->vbases()) {
3289
607
      assert(Base.isVirtual() && "Found non-virtual class!");
3290
607
      const CXXRecordDecl *VBase = Base.getType()->getAsCXXRecordDecl();
3291
607
3292
607
      CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBase);
3293
607
3294
607
      if (
VtorDisps.find(VBase)->second.hasVtorDisp()607
) {
3295
74
        PrintOffset(OS, VBaseOffset - CharUnits::fromQuantity(4), IndentLevel);
3296
74
        OS << "(vtordisp for vbase " << *VBase << ")\n";
3297
74
      }
3298
607
3299
607
      DumpRecordLayout(OS, VBase, C, VBaseOffset, IndentLevel,
3300
607
                       VBase == Layout.getPrimaryBase() ?
3301
607
                         
"(primary virtual base)"0
:
"(virtual base)"607
,
3302
607
                       /*PrintSizeInfo=*/false,
3303
607
                       /*IncludeVirtualBases=*/false);
3304
607
    }
3305
979
  }
3306
2.36k
3307
2.36k
  if (
!PrintSizeInfo2.36k
)
return1.43k
;
3308
934
3309
934
  PrintIndentNoOffset(OS, IndentLevel - 1);
3310
934
  OS << "[sizeof=" << Layout.getSize().getQuantity();
3311
934
  if (
CXXRD && 934
!isMsLayout(C)867
)
3312
2
    OS << ", dsize=" << Layout.getDataSize().getQuantity();
3313
934
  OS << ", align=" << Layout.getAlignment().getQuantity();
3314
934
3315
934
  if (
CXXRD934
) {
3316
867
    OS << ",\n";
3317
867
    PrintIndentNoOffset(OS, IndentLevel - 1);
3318
867
    OS << " nvsize=" << Layout.getNonVirtualSize().getQuantity();
3319
867
    OS << ", nvalign=" << Layout.getNonVirtualAlignment().getQuantity();
3320
867
  }
3321
2.36k
  OS << "]\n";
3322
2.36k
}
3323
3324
void ASTContext::DumpRecordLayout(const RecordDecl *RD,
3325
                                  raw_ostream &OS,
3326
995
                                  bool Simple) const {
3327
995
  if (
!Simple995
) {
3328
934
    ::DumpRecordLayout(OS, RD, *this, CharUnits(), 0, nullptr,
3329
934
                       /*PrintSizeInfo*/true,
3330
934
                       /*IncludeVirtualBases=*/true);
3331
934
    return;
3332
934
  }
3333
61
3334
61
  // The "simple" format is designed to be parsed by the
3335
61
  // layout-override testing code.  There shouldn't be any external
3336
61
  // uses of this format --- when LLDB overrides a layout, it sets up
3337
61
  // the data structures directly --- so feel free to adjust this as
3338
61
  // you like as long as you also update the rudimentary parser for it
3339
61
  // in libFrontend.
3340
61
3341
61
  const ASTRecordLayout &Info = getASTRecordLayout(RD);
3342
61
  OS << "Type: " << getTypeDeclType(RD).getAsString() << "\n";
3343
61
  OS << "\nLayout: ";
3344
61
  OS << "<ASTRecordLayout\n";
3345
61
  OS << "  Size:" << toBits(Info.getSize()) << "\n";
3346
61
  if (!isMsLayout(*this))
3347
61
    OS << "  DataSize:" << toBits(Info.getDataSize()) << "\n";
3348
61
  OS << "  Alignment:" << toBits(Info.getAlignment()) << "\n";
3349
61
  OS << "  FieldOffsets: [";
3350
213
  for (unsigned i = 0, e = Info.getFieldCount(); 
i != e213
;
++i152
) {
3351
152
    if (
i152
)
OS << ", "97
;
3352
152
    OS << Info.getFieldOffset(i);
3353
152
  }
3354
995
  OS << "]>\n";
3355
995
}