Coverage Report

Created: 2020-09-19 12:23

/Users/buildslave/jenkins/workspace/coverage/llvm-project/clang/lib/AST/RecordLayoutBuilder.cpp
Line
Count
Source (jump to first uncovered line)
1
//=== RecordLayoutBuilder.cpp - Helper class for building record layouts ---==//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
9
#include "clang/AST/ASTContext.h"
10
#include "clang/AST/ASTDiagnostic.h"
11
#include "clang/AST/Attr.h"
12
#include "clang/AST/CXXInheritance.h"
13
#include "clang/AST/Decl.h"
14
#include "clang/AST/DeclCXX.h"
15
#include "clang/AST/DeclObjC.h"
16
#include "clang/AST/Expr.h"
17
#include "clang/AST/VTableBuilder.h"
18
#include "clang/AST/RecordLayout.h"
19
#include "clang/Basic/TargetInfo.h"
20
#include "llvm/ADT/SmallSet.h"
21
#include "llvm/Support/Format.h"
22
#include "llvm/Support/MathExtras.h"
23
24
using namespace clang;
25
26
namespace {
27
28
/// BaseSubobjectInfo - Represents a single base subobject in a complete class.
29
/// For a class hierarchy like
30
///
31
/// class A { };
32
/// class B : A { };
33
/// class C : A, B { };
34
///
35
/// The BaseSubobjectInfo graph for C will have three BaseSubobjectInfo
36
/// instances, one for B and two for A.
37
///
38
/// If a base is virtual, it will only have one BaseSubobjectInfo allocated.
39
struct BaseSubobjectInfo {
40
  /// Class - The class for this base info.
41
  const CXXRecordDecl *Class;
42
43
  /// IsVirtual - Whether the BaseInfo represents a virtual base or not.
44
  bool IsVirtual;
45
46
  /// Bases - Information about the base subobjects.
47
  SmallVector<BaseSubobjectInfo*, 4> Bases;
48
49
  /// PrimaryVirtualBaseInfo - Holds the base info for the primary virtual base
50
  /// of this base info (if one exists).
51
  BaseSubobjectInfo *PrimaryVirtualBaseInfo;
52
53
  // FIXME: Document.
54
  const BaseSubobjectInfo *Derived;
55
};
56
57
/// Externally provided layout. Typically used when the AST source, such
58
/// as DWARF, lacks all the information that was available at compile time, such
59
/// as alignment attributes on fields and pragmas in effect.
60
struct ExternalLayout {
61
315k
  ExternalLayout() : Size(0), Align(0) {}
62
63
  /// Overall record size in bits.
64
  uint64_t Size;
65
66
  /// Overall record alignment in bits.
67
  uint64_t Align;
68
69
  /// Record field offsets in bits.
70
  llvm::DenseMap<const FieldDecl *, uint64_t> FieldOffsets;
71
72
  /// Direct, non-virtual base offsets.
73
  llvm::DenseMap<const CXXRecordDecl *, CharUnits> BaseOffsets;
74
75
  /// Virtual base offsets.
76
  llvm::DenseMap<const CXXRecordDecl *, CharUnits> VirtualBaseOffsets;
77
78
  /// Get the offset of the given field. The external source must provide
79
  /// entries for all fields in the record.
80
17.9k
  uint64_t getExternalFieldOffset(const FieldDecl *FD) {
81
17.9k
    assert(FieldOffsets.count(FD) &&
82
17.9k
           "Field does not have an external offset");
83
17.9k
    return FieldOffsets[FD];
84
17.9k
  }
85
86
3.48k
  bool getExternalNVBaseOffset(const CXXRecordDecl *RD, CharUnits &BaseOffset) {
87
3.48k
    auto Known = BaseOffsets.find(RD);
88
3.48k
    if (Known == BaseOffsets.end())
89
10
      return false;
90
3.47k
    BaseOffset = Known->second;
91
3.47k
    return true;
92
3.47k
  }
93
94
23
  bool getExternalVBaseOffset(const CXXRecordDecl *RD, CharUnits &BaseOffset) {
95
23
    auto Known = VirtualBaseOffsets.find(RD);
96
23
    if (Known == VirtualBaseOffsets.end())
97
11
      return false;
98
12
    BaseOffset = Known->second;
99
12
    return true;
100
12
  }
101
};
102
103
/// EmptySubobjectMap - Keeps track of which empty subobjects exist at different
104
/// offsets while laying out a C++ class.
105
class EmptySubobjectMap {
106
  const ASTContext &Context;
107
  uint64_t CharWidth;
108
109
  /// Class - The class whose empty entries we're keeping track of.
110
  const CXXRecordDecl *Class;
111
112
  /// EmptyClassOffsets - A map from offsets to empty record decls.
113
  typedef llvm::TinyPtrVector<const CXXRecordDecl *> ClassVectorTy;
114
  typedef llvm::DenseMap<CharUnits, ClassVectorTy> EmptyClassOffsetsMapTy;
115
  EmptyClassOffsetsMapTy EmptyClassOffsets;
116
117
  /// MaxEmptyClassOffset - The highest offset known to contain an empty
118
  /// base subobject.
119
  CharUnits MaxEmptyClassOffset;
120
121
  /// ComputeEmptySubobjectSizes - Compute the size of the largest base or
122
  /// member subobject that is empty.
123
  void ComputeEmptySubobjectSizes();
124
125
  void AddSubobjectAtOffset(const CXXRecordDecl *RD, CharUnits Offset);
126
127
  void UpdateEmptyBaseSubobjects(const BaseSubobjectInfo *Info,
128
                                 CharUnits Offset, bool PlacingEmptyBase);
129
130
  void UpdateEmptyFieldSubobjects(const CXXRecordDecl *RD,
131
                                  const CXXRecordDecl *Class, CharUnits Offset,
132
                                  bool PlacingOverlappingField);
133
  void UpdateEmptyFieldSubobjects(const FieldDecl *FD, CharUnits Offset,
134
                                  bool PlacingOverlappingField);
135
136
  /// AnyEmptySubobjectsBeyondOffset - Returns whether there are any empty
137
  /// subobjects beyond the given offset.
138
991k
  bool AnyEmptySubobjectsBeyondOffset(CharUnits Offset) const {
139
991k
    return Offset <= MaxEmptyClassOffset;
140
991k
  }
141
142
  CharUnits
143
138k
  getFieldOffset(const ASTRecordLayout &Layout, unsigned FieldNo) const {
144
138k
    uint64_t FieldOffset = Layout.getFieldOffset(FieldNo);
145
138k
    assert(FieldOffset % CharWidth == 0 &&
146
138k
           "Field offset not at char boundary!");
147
138k
148
138k
    return Context.toCharUnitsFromBits(FieldOffset);
149
138k
  }
150
151
protected:
152
  bool CanPlaceSubobjectAtOffset(const CXXRecordDecl *RD,
153
                                 CharUnits Offset) const;
154
155
  bool CanPlaceBaseSubobjectAtOffset(const BaseSubobjectInfo *Info,
156
                                     CharUnits Offset);
157
158
  bool CanPlaceFieldSubobjectAtOffset(const CXXRecordDecl *RD,
159
                                      const CXXRecordDecl *Class,
160
                                      CharUnits Offset) const;
161
  bool CanPlaceFieldSubobjectAtOffset(const FieldDecl *FD,
162
                                      CharUnits Offset) const;
163
164
public:
165
  /// This holds the size of the largest empty subobject (either a base
166
  /// or a member). Will be zero if the record being built doesn't contain
167
  /// any empty classes.
168
  CharUnits SizeOfLargestEmptySubobject;
169
170
  EmptySubobjectMap(const ASTContext &Context, const CXXRecordDecl *Class)
171
186k
  : Context(Context), CharWidth(Context.getCharWidth()), Class(Class) {
172
186k
      ComputeEmptySubobjectSizes();
173
186k
  }
174
175
  /// CanPlaceBaseAtOffset - Return whether the given base class can be placed
176
  /// at the given offset.
177
  /// Returns false if placing the record will result in two components
178
  /// (direct or indirect) of the same type having the same offset.
179
  bool CanPlaceBaseAtOffset(const BaseSubobjectInfo *Info,
180
                            CharUnits Offset);
181
182
  /// CanPlaceFieldAtOffset - Return whether a field can be placed at the given
183
  /// offset.
184
  bool CanPlaceFieldAtOffset(const FieldDecl *FD, CharUnits Offset);
185
};
186
187
186k
void EmptySubobjectMap::ComputeEmptySubobjectSizes() {
188
  // Check the bases.
189
42.9k
  for (const CXXBaseSpecifier &Base : Class->bases()) {
190
42.9k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
191
42.9k
192
42.9k
    CharUnits EmptySize;
193
42.9k
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(BaseDecl);
194
42.9k
    if (BaseDecl->isEmpty()) {
195
      // If the class decl is empty, get its size.
196
24.6k
      EmptySize = Layout.getSize();
197
18.3k
    } else {
198
      // Otherwise, we get the largest empty subobject for the decl.
199
18.3k
      EmptySize = Layout.getSizeOfLargestEmptySubobject();
200
18.3k
    }
201
42.9k
202
42.9k
    if (EmptySize > SizeOfLargestEmptySubobject)
203
26.9k
      SizeOfLargestEmptySubobject = EmptySize;
204
42.9k
  }
205
186k
206
  // Check the fields.
207
300k
  for (const FieldDecl *FD : Class->fields()) {
208
300k
    const RecordType *RT =
209
300k
        Context.getBaseElementType(FD->getType())->getAs<RecordType>();
210
300k
211
    // We only care about record types.
212
300k
    if (!RT)
213
246k
      continue;
214
54.6k
215
54.6k
    CharUnits EmptySize;
216
54.6k
    const CXXRecordDecl *MemberDecl = RT->getAsCXXRecordDecl();
217
54.6k
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(MemberDecl);
218
54.6k
    if (MemberDecl->isEmpty()) {
219
      // If the class decl is empty, get its size.
220
731
      EmptySize = Layout.getSize();
221
53.9k
    } else {
222
      // Otherwise, we get the largest empty subobject for the decl.
223
53.9k
      EmptySize = Layout.getSizeOfLargestEmptySubobject();
224
53.9k
    }
225
54.6k
226
54.6k
    if (EmptySize > SizeOfLargestEmptySubobject)
227
3.84k
      SizeOfLargestEmptySubobject = EmptySize;
228
54.6k
  }
229
186k
}
230
231
bool
232
EmptySubobjectMap::CanPlaceSubobjectAtOffset(const CXXRecordDecl *RD,
233
609k
                                             CharUnits Offset) const {
234
  // We only need to check empty bases.
235
609k
  if (!RD->isEmpty())
236
570k
    return true;
237
39.0k
238
39.0k
  EmptyClassOffsetsMapTy::const_iterator I = EmptyClassOffsets.find(Offset);
239
39.0k
  if (I == EmptyClassOffsets.end())
240
37.2k
    return true;
241
1.81k
242
1.81k
  const ClassVectorTy &Classes = I->second;
243
1.81k
  if (llvm::find(Classes, RD) == Classes.end())
244
1.66k
    return true;
245
152
246
  // There is already an empty class of the same type at this offset.
247
152
  return false;
248
152
}
249
250
void EmptySubobjectMap::AddSubobjectAtOffset(const CXXRecordDecl *RD,
251
588k
                                             CharUnits Offset) {
252
  // We only care about empty bases.
253
588k
  if (!RD->isEmpty())
254
549k
    return;
255
39.0k
256
  // If we have empty structures inside a union, we can assign both
257
  // the same offset. Just avoid pushing them twice in the list.
258
39.0k
  ClassVectorTy &Classes = EmptyClassOffsets[Offset];
259
39.0k
  if (llvm::is_contained(Classes, RD))
260
1
    return;
261
39.0k
262
39.0k
  Classes.push_back(RD);
263
39.0k
264
  // Update the empty class offset.
265
39.0k
  if (Offset > MaxEmptyClassOffset)
266
180
    MaxEmptyClassOffset = Offset;
267
39.0k
}
268
269
bool
270
EmptySubobjectMap::CanPlaceBaseSubobjectAtOffset(const BaseSubobjectInfo *Info,
271
561k
                                                 CharUnits Offset) {
272
  // We don't have to keep looking past the maximum offset that's known to
273
  // contain an empty class.
274
561k
  if (!AnyEmptySubobjectsBeyondOffset(Offset))
275
583
    return true;
276
561k
277
561k
  if (!CanPlaceSubobjectAtOffset(Info->Class, Offset))
278
102
    return false;
279
561k
280
  // Traverse all non-virtual bases.
281
561k
  const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class);
282
530k
  for (const BaseSubobjectInfo *Base : Info->Bases) {
283
530k
    if (Base->IsVirtual)
284
138
      continue;
285
530k
286
530k
    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base->Class);
287
530k
288
530k
    if (!CanPlaceBaseSubobjectAtOffset(Base, BaseOffset))
289
77
      return false;
290
530k
  }
291
561k
292
561k
  if (Info->PrimaryVirtualBaseInfo) {
293
24
    BaseSubobjectInfo *PrimaryVirtualBaseInfo = Info->PrimaryVirtualBaseInfo;
294
24
295
24
    if (Info == PrimaryVirtualBaseInfo->Derived) {
296
24
      if (!CanPlaceBaseSubobjectAtOffset(PrimaryVirtualBaseInfo, Offset))
297
5
        return false;
298
561k
    }
299
24
  }
300
561k
301
  // Traverse all member variables.
302
561k
  unsigned FieldNo = 0;
303
561k
  for (CXXRecordDecl::field_iterator I = Info->Class->field_begin(),
304
569k
       E = Info->Class->field_end(); I != E; 
++I, ++FieldNo8.09k
) {
305
8.09k
    if (I->isBitField())
306
4
      continue;
307
8.09k
308
8.09k
    CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
309
8.09k
    if (!CanPlaceFieldSubobjectAtOffset(*I, FieldOffset))
310
2
      return false;
311
8.09k
  }
312
561k
313
561k
  return true;
314
561k
}
315
316
void EmptySubobjectMap::UpdateEmptyBaseSubobjects(const BaseSubobjectInfo *Info,
317
                                                  CharUnits Offset,
318
561k
                                                  bool PlacingEmptyBase) {
319
561k
  if (!PlacingEmptyBase && 
Offset >= SizeOfLargestEmptySubobject531k
) {
320
    // We know that the only empty subobjects that can conflict with empty
321
    // subobject of non-empty bases, are empty bases that can be placed at
322
    // offset zero. Because of this, we only need to keep track of empty base
323
    // subobjects with offsets less than the size of the largest empty
324
    // subobject for our class.
325
516
    return;
326
516
  }
327
561k
328
561k
  AddSubobjectAtOffset(Info->Class, Offset);
329
561k
330
  // Traverse all non-virtual bases.
331
561k
  const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class);
332
530k
  for (const BaseSubobjectInfo *Base : Info->Bases) {
333
530k
    if (Base->IsVirtual)
334
133
      continue;
335
530k
336
530k
    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base->Class);
337
530k
    UpdateEmptyBaseSubobjects(Base, BaseOffset, PlacingEmptyBase);
338
530k
  }
339
561k
340
561k
  if (Info->PrimaryVirtualBaseInfo) {
341
19
    BaseSubobjectInfo *PrimaryVirtualBaseInfo = Info->PrimaryVirtualBaseInfo;
342
19
343
19
    if (Info == PrimaryVirtualBaseInfo->Derived)
344
19
      UpdateEmptyBaseSubobjects(PrimaryVirtualBaseInfo, Offset,
345
19
                                PlacingEmptyBase);
346
19
  }
347
561k
348
  // Traverse all member variables.
349
561k
  unsigned FieldNo = 0;
350
561k
  for (CXXRecordDecl::field_iterator I = Info->Class->field_begin(),
351
569k
       E = Info->Class->field_end(); I != E; 
++I, ++FieldNo8.09k
) {
352
8.09k
    if (I->isBitField())
353
4
      continue;
354
8.09k
355
8.09k
    CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
356
8.09k
    UpdateEmptyFieldSubobjects(*I, FieldOffset, PlacingEmptyBase);
357
8.09k
  }
358
561k
}
359
360
bool EmptySubobjectMap::CanPlaceBaseAtOffset(const BaseSubobjectInfo *Info,
361
43.3k
                                             CharUnits Offset) {
362
  // If we know this class doesn't have any empty subobjects we don't need to
363
  // bother checking.
364
43.3k
  if (SizeOfLargestEmptySubobject.isZero())
365
11.7k
    return true;
366
31.5k
367
31.5k
  if (!CanPlaceBaseSubobjectAtOffset(Info, Offset))
368
104
    return false;
369
31.4k
370
  // We are able to place the base at this offset. Make sure to update the
371
  // empty base subobject map.
372
31.4k
  UpdateEmptyBaseSubobjects(Info, Offset, Info->Class->isEmpty());
373
31.4k
  return true;
374
31.4k
}
375
376
bool
377
EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const CXXRecordDecl *RD,
378
                                                  const CXXRecordDecl *Class,
379
49.0k
                                                  CharUnits Offset) const {
380
  // We don't have to keep looking past the maximum offset that's known to
381
  // contain an empty class.
382
49.0k
  if (!AnyEmptySubobjectsBeyondOffset(Offset))
383
728
    return true;
384
48.3k
385
48.3k
  if (!CanPlaceSubobjectAtOffset(RD, Offset))
386
50
    return false;
387
48.2k
388
48.2k
  const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
389
48.2k
390
  // Traverse all non-virtual bases.
391
13.3k
  for (const CXXBaseSpecifier &Base : RD->bases()) {
392
13.3k
    if (Base.isVirtual())
393
28
      continue;
394
13.2k
395
13.2k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
396
13.2k
397
13.2k
    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(BaseDecl);
398
13.2k
    if (!CanPlaceFieldSubobjectAtOffset(BaseDecl, Class, BaseOffset))
399
32
      return false;
400
13.2k
  }
401
48.2k
402
48.2k
  if (RD == Class) {
403
    // This is the most derived class, traverse virtual bases as well.
404
28
    for (const CXXBaseSpecifier &Base : RD->vbases()) {
405
28
      const CXXRecordDecl *VBaseDecl = Base.getType()->getAsCXXRecordDecl();
406
28
407
28
      CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBaseDecl);
408
28
      if (!CanPlaceFieldSubobjectAtOffset(VBaseDecl, Class, VBaseOffset))
409
1
        return false;
410
28
    }
411
35.7k
  }
412
48.2k
413
  // Traverse all member variables.
414
48.2k
  unsigned FieldNo = 0;
415
48.2k
  for (CXXRecordDecl::field_iterator I = RD->field_begin(), E = RD->field_end();
416
144k
       I != E; 
++I, ++FieldNo96.1k
) {
417
96.1k
    if (I->isBitField())
418
190
      continue;
419
95.9k
420
95.9k
    CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
421
95.9k
422
95.9k
    if (!CanPlaceFieldSubobjectAtOffset(*I, FieldOffset))
423
10
      return false;
424
95.9k
  }
425
48.2k
426
48.2k
  return true;
427
48.2k
}
428
429
bool
430
EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const FieldDecl *FD,
431
380k
                                                  CharUnits Offset) const {
432
  // We don't have to keep looking past the maximum offset that's known to
433
  // contain an empty class.
434
380k
  if (!AnyEmptySubobjectsBeyondOffset(Offset))
435
241k
    return true;
436
138k
437
138k
  QualType T = FD->getType();
438
138k
  if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl())
439
35.5k
    return CanPlaceFieldSubobjectAtOffset(RD, RD, Offset);
440
102k
441
  // If we have an array type we need to look at every element.
442
102k
  if (const ConstantArrayType *AT = Context.getAsConstantArrayType(T)) {
443
10.1k
    QualType ElemTy = Context.getBaseElementType(AT);
444
10.1k
    const RecordType *RT = ElemTy->getAs<RecordType>();
445
10.1k
    if (!RT)
446
8.95k
      return true;
447
1.22k
448
1.22k
    const CXXRecordDecl *RD = RT->getAsCXXRecordDecl();
449
1.22k
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
450
1.22k
451
1.22k
    uint64_t NumElements = Context.getConstantArrayElementCount(AT);
452
1.22k
    CharUnits ElementOffset = Offset;
453
1.42k
    for (uint64_t I = 0; I != NumElements; 
++I198
) {
454
      // We don't have to keep looking past the maximum offset that's known to
455
      // contain an empty class.
456
390
      if (!AnyEmptySubobjectsBeyondOffset(ElementOffset))
457
188
        return true;
458
202
459
202
      if (!CanPlaceFieldSubobjectAtOffset(RD, RD, ElementOffset))
460
4
        return false;
461
198
462
198
      ElementOffset += Layout.getSize();
463
198
    }
464
1.22k
  }
465
102k
466
93.4k
  return true;
467
102k
}
468
469
bool
470
EmptySubobjectMap::CanPlaceFieldAtOffset(const FieldDecl *FD,
471
275k
                                         CharUnits Offset) {
472
275k
  if (!CanPlaceFieldSubobjectAtOffset(FD, Offset))
473
48
    return false;
474
275k
475
  // We are able to place the member variable at this offset.
476
  // Make sure to update the empty field subobject map.
477
275k
  UpdateEmptyFieldSubobjects(FD, Offset, FD->hasAttr<NoUniqueAddressAttr>());
478
275k
  return true;
479
275k
}
480
481
void EmptySubobjectMap::UpdateEmptyFieldSubobjects(
482
    const CXXRecordDecl *RD, const CXXRecordDecl *Class, CharUnits Offset,
483
69.2k
    bool PlacingOverlappingField) {
484
  // We know that the only empty subobjects that can conflict with empty
485
  // field subobjects are subobjects of empty bases and potentially-overlapping
486
  // fields that can be placed at offset zero. Because of this, we only need to
487
  // keep track of empty field subobjects with offsets less than the size of
488
  // the largest empty subobject for our class.
489
  //
490
  // (Proof: we will only consider placing a subobject at offset zero or at
491
  // >= the current dsize. The only cases where the earlier subobject can be
492
  // placed beyond the end of dsize is if it's an empty base or a
493
  // potentially-overlapping field.)
494
69.2k
  if (!PlacingOverlappingField && 
Offset >= SizeOfLargestEmptySubobject69.1k
)
495
41.9k
    return;
496
27.3k
497
27.3k
  AddSubobjectAtOffset(RD, Offset);
498
27.3k
499
27.3k
  const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
500
27.3k
501
  // Traverse all non-virtual bases.
502
11.8k
  for (const CXXBaseSpecifier &Base : RD->bases()) {
503
11.8k
    if (Base.isVirtual())
504
26
      continue;
505
11.7k
506
11.7k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
507
11.7k
508
11.7k
    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(BaseDecl);
509
11.7k
    UpdateEmptyFieldSubobjects(BaseDecl, Class, BaseOffset,
510
11.7k
                               PlacingOverlappingField);
511
11.7k
  }
512
27.3k
513
27.3k
  if (RD == Class) {
514
    // This is the most derived class, traverse virtual bases as well.
515
26
    for (const CXXBaseSpecifier &Base : RD->vbases()) {
516
26
      const CXXRecordDecl *VBaseDecl = Base.getType()->getAsCXXRecordDecl();
517
26
518
26
      CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBaseDecl);
519
26
      UpdateEmptyFieldSubobjects(VBaseDecl, Class, VBaseOffset,
520
26
                                 PlacingOverlappingField);
521
26
    }
522
15.9k
  }
523
27.3k
524
  // Traverse all member variables.
525
27.3k
  unsigned FieldNo = 0;
526
27.3k
  for (CXXRecordDecl::field_iterator I = RD->field_begin(), E = RD->field_end();
527
54.0k
       I != E; 
++I, ++FieldNo26.7k
) {
528
26.7k
    if (I->isBitField())
529
2
      continue;
530
26.7k
531
26.7k
    CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
532
26.7k
533
26.7k
    UpdateEmptyFieldSubobjects(*I, FieldOffset, PlacingOverlappingField);
534
26.7k
  }
535
27.3k
}
536
537
void EmptySubobjectMap::UpdateEmptyFieldSubobjects(
538
310k
    const FieldDecl *FD, CharUnits Offset, bool PlacingOverlappingField) {
539
310k
  QualType T = FD->getType();
540
310k
  if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl()) {
541
57.4k
    UpdateEmptyFieldSubobjects(RD, RD, Offset, PlacingOverlappingField);
542
57.4k
    return;
543
57.4k
  }
544
253k
545
  // If we have an array type we need to update every element.
546
253k
  if (const ConstantArrayType *AT = Context.getAsConstantArrayType(T)) {
547
22.6k
    QualType ElemTy = Context.getBaseElementType(AT);
548
22.6k
    const RecordType *RT = ElemTy->getAs<RecordType>();
549
22.6k
    if (!RT)
550
20.6k
      return;
551
1.99k
552
1.99k
    const CXXRecordDecl *RD = RT->getAsCXXRecordDecl();
553
1.99k
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
554
1.99k
555
1.99k
    uint64_t NumElements = Context.getConstantArrayElementCount(AT);
556
1.99k
    CharUnits ElementOffset = Offset;
557
1.99k
558
2.04k
    for (uint64_t I = 0; I != NumElements; 
++I47
) {
559
      // We know that the only empty subobjects that can conflict with empty
560
      // field subobjects are subobjects of empty bases that can be placed at
561
      // offset zero. Because of this, we only need to keep track of empty field
562
      // subobjects with offsets less than the size of the largest empty
563
      // subobject for our class.
564
990
      if (!PlacingOverlappingField &&
565
966
          ElementOffset >= SizeOfLargestEmptySubobject)
566
943
        return;
567
47
568
47
      UpdateEmptyFieldSubobjects(RD, RD, ElementOffset,
569
47
                                 PlacingOverlappingField);
570
47
      ElementOffset += Layout.getSize();
571
47
    }
572
1.99k
  }
573
253k
}
574
575
typedef llvm::SmallPtrSet<const CXXRecordDecl*, 4> ClassSetTy;
576
577
class ItaniumRecordLayoutBuilder {
578
protected:
579
  // FIXME: Remove this and make the appropriate fields public.
580
  friend class clang::ASTContext;
581
582
  const ASTContext &Context;
583
584
  EmptySubobjectMap *EmptySubobjects;
585
586
  /// Size - The current size of the record layout.
587
  uint64_t Size;
588
589
  /// Alignment - The current alignment of the record layout.
590
  CharUnits Alignment;
591
592
  /// PreferredAlignment - The preferred alignment of the record layout.
593
  CharUnits PreferredAlignment;
594
595
  /// The alignment if attribute packed is not used.
596
  CharUnits UnpackedAlignment;
597
598
  /// \brief The maximum of the alignments of top-level members.
599
  CharUnits UnadjustedAlignment;
600
601
  SmallVector<uint64_t, 16> FieldOffsets;
602
603
  /// Whether the external AST source has provided a layout for this
604
  /// record.
605
  unsigned UseExternalLayout : 1;
606
607
  /// Whether we need to infer alignment, even when we have an
608
  /// externally-provided layout.
609
  unsigned InferAlignment : 1;
610
611
  /// Packed - Whether the record is packed or not.
612
  unsigned Packed : 1;
613
614
  unsigned IsUnion : 1;
615
616
  unsigned IsMac68kAlign : 1;
617
618
  unsigned IsMsStruct : 1;
619
620
  /// UnfilledBitsInLastUnit - If the last field laid out was a bitfield,
621
  /// this contains the number of bits in the last unit that can be used for
622
  /// an adjacent bitfield if necessary.  The unit in question is usually
623
  /// a byte, but larger units are used if IsMsStruct.
624
  unsigned char UnfilledBitsInLastUnit;
625
  /// LastBitfieldTypeSize - If IsMsStruct, represents the size of the type
626
  /// of the previous field if it was a bitfield.
627
  unsigned char LastBitfieldTypeSize;
628
629
  /// MaxFieldAlignment - The maximum allowed field alignment. This is set by
630
  /// #pragma pack.
631
  CharUnits MaxFieldAlignment;
632
633
  /// DataSize - The data size of the record being laid out.
634
  uint64_t DataSize;
635
636
  CharUnits NonVirtualSize;
637
  CharUnits NonVirtualAlignment;
638
  CharUnits PreferredNVAlignment;
639
640
  /// If we've laid out a field but not included its tail padding in Size yet,
641
  /// this is the size up to the end of that field.
642
  CharUnits PaddedFieldSize;
643
644
  /// PrimaryBase - the primary base class (if one exists) of the class
645
  /// we're laying out.
646
  const CXXRecordDecl *PrimaryBase;
647
648
  /// PrimaryBaseIsVirtual - Whether the primary base of the class we're laying
649
  /// out is virtual.
650
  bool PrimaryBaseIsVirtual;
651
652
  /// HasOwnVFPtr - Whether the class provides its own vtable/vftbl
653
  /// pointer, as opposed to inheriting one from a primary base class.
654
  bool HasOwnVFPtr;
655
656
  /// the flag of field offset changing due to packed attribute.
657
  bool HasPackedField;
658
659
  /// HandledFirstNonOverlappingEmptyField - An auxiliary field used for AIX.
660
  /// When there are OverlappingEmptyFields existing in the aggregate, the
661
  /// flag shows if the following first non-empty or empty-but-non-overlapping
662
  /// field has been handled, if any.
663
  bool HandledFirstNonOverlappingEmptyField;
664
665
  typedef llvm::DenseMap<const CXXRecordDecl *, CharUnits> BaseOffsetsMapTy;
666
667
  /// Bases - base classes and their offsets in the record.
668
  BaseOffsetsMapTy Bases;
669
670
  // VBases - virtual base classes and their offsets in the record.
671
  ASTRecordLayout::VBaseOffsetsMapTy VBases;
672
673
  /// IndirectPrimaryBases - Virtual base classes, direct or indirect, that are
674
  /// primary base classes for some other direct or indirect base class.
675
  CXXIndirectPrimaryBaseSet IndirectPrimaryBases;
676
677
  /// FirstNearlyEmptyVBase - The first nearly empty virtual base class in
678
  /// inheritance graph order. Used for determining the primary base class.
679
  const CXXRecordDecl *FirstNearlyEmptyVBase;
680
681
  /// VisitedVirtualBases - A set of all the visited virtual bases, used to
682
  /// avoid visiting virtual bases more than once.
683
  llvm::SmallPtrSet<const CXXRecordDecl *, 4> VisitedVirtualBases;
684
685
  /// Valid if UseExternalLayout is true.
686
  ExternalLayout External;
687
688
  ItaniumRecordLayoutBuilder(const ASTContext &Context,
689
                             EmptySubobjectMap *EmptySubobjects)
690
      : Context(Context), EmptySubobjects(EmptySubobjects), Size(0),
691
        Alignment(CharUnits::One()), PreferredAlignment(CharUnits::One()),
692
        UnpackedAlignment(CharUnits::One()),
693
        UnadjustedAlignment(CharUnits::One()), UseExternalLayout(false),
694
        InferAlignment(false), Packed(false), IsUnion(false),
695
        IsMac68kAlign(false), IsMsStruct(false), UnfilledBitsInLastUnit(0),
696
        LastBitfieldTypeSize(0), MaxFieldAlignment(CharUnits::Zero()),
697
        DataSize(0), NonVirtualSize(CharUnits::Zero()),
698
        NonVirtualAlignment(CharUnits::One()),
699
        PreferredNVAlignment(CharUnits::One()),
700
        PaddedFieldSize(CharUnits::Zero()), PrimaryBase(nullptr),
701
        PrimaryBaseIsVirtual(false), HasOwnVFPtr(false), HasPackedField(false),
702
        HandledFirstNonOverlappingEmptyField(false),
703
311k
        FirstNearlyEmptyVBase(nullptr) {}
704
705
  void Layout(const RecordDecl *D);
706
  void Layout(const CXXRecordDecl *D);
707
  void Layout(const ObjCInterfaceDecl *D);
708
709
  void LayoutFields(const RecordDecl *D);
710
  void LayoutField(const FieldDecl *D, bool InsertExtraPadding);
711
  void LayoutWideBitField(uint64_t FieldSize, uint64_t TypeSize,
712
                          bool FieldPacked, const FieldDecl *D);
713
  void LayoutBitField(const FieldDecl *D);
714
715
0
  TargetCXXABI getCXXABI() const {
716
0
    return Context.getTargetInfo().getCXXABI();
717
0
  }
718
719
  /// BaseSubobjectInfoAllocator - Allocator for BaseSubobjectInfo objects.
720
  llvm::SpecificBumpPtrAllocator<BaseSubobjectInfo> BaseSubobjectInfoAllocator;
721
722
  typedef llvm::DenseMap<const CXXRecordDecl *, BaseSubobjectInfo *>
723
    BaseSubobjectInfoMapTy;
724
725
  /// VirtualBaseInfo - Map from all the (direct or indirect) virtual bases
726
  /// of the class we're laying out to their base subobject info.
727
  BaseSubobjectInfoMapTy VirtualBaseInfo;
728
729
  /// NonVirtualBaseInfo - Map from all the direct non-virtual bases of the
730
  /// class we're laying out to their base subobject info.
731
  BaseSubobjectInfoMapTy NonVirtualBaseInfo;
732
733
  /// ComputeBaseSubobjectInfo - Compute the base subobject information for the
734
  /// bases of the given class.
735
  void ComputeBaseSubobjectInfo(const CXXRecordDecl *RD);
736
737
  /// ComputeBaseSubobjectInfo - Compute the base subobject information for a
738
  /// single class and all of its base classes.
739
  BaseSubobjectInfo *ComputeBaseSubobjectInfo(const CXXRecordDecl *RD,
740
                                              bool IsVirtual,
741
                                              BaseSubobjectInfo *Derived);
742
743
  /// DeterminePrimaryBase - Determine the primary base of the given class.
744
  void DeterminePrimaryBase(const CXXRecordDecl *RD);
745
746
  void SelectPrimaryVBase(const CXXRecordDecl *RD);
747
748
  void EnsureVTablePointerAlignment(CharUnits UnpackedBaseAlign);
749
750
  /// LayoutNonVirtualBases - Determines the primary base class (if any) and
751
  /// lays it out. Will then proceed to lay out all non-virtual base clasess.
752
  void LayoutNonVirtualBases(const CXXRecordDecl *RD);
753
754
  /// LayoutNonVirtualBase - Lays out a single non-virtual base.
755
  void LayoutNonVirtualBase(const BaseSubobjectInfo *Base);
756
757
  void AddPrimaryVirtualBaseOffsets(const BaseSubobjectInfo *Info,
758
                                    CharUnits Offset);
759
760
  /// LayoutVirtualBases - Lays out all the virtual bases.
761
  void LayoutVirtualBases(const CXXRecordDecl *RD,
762
                          const CXXRecordDecl *MostDerivedClass);
763
764
  /// LayoutVirtualBase - Lays out a single virtual base.
765
  void LayoutVirtualBase(const BaseSubobjectInfo *Base);
766
767
  /// LayoutBase - Will lay out a base and return the offset where it was
768
  /// placed, in chars.
769
  CharUnits LayoutBase(const BaseSubobjectInfo *Base);
770
771
  /// InitializeLayout - Initialize record layout for the given record decl.
772
  void InitializeLayout(const Decl *D);
773
774
  /// FinishLayout - Finalize record layout. Adjust record size based on the
775
  /// alignment.
776
  void FinishLayout(const NamedDecl *D);
777
778
  void UpdateAlignment(CharUnits NewAlignment, CharUnits UnpackedNewAlignment,
779
                       CharUnits PreferredAlignment);
780
18.3k
  void UpdateAlignment(CharUnits NewAlignment, CharUnits UnpackedNewAlignment) {
781
18.3k
    UpdateAlignment(NewAlignment, UnpackedNewAlignment, NewAlignment);
782
18.3k
  }
783
7.96k
  void UpdateAlignment(CharUnits NewAlignment) {
784
7.96k
    UpdateAlignment(NewAlignment, NewAlignment, NewAlignment);
785
7.96k
  }
786
787
  /// Retrieve the externally-supplied field offset for the given
788
  /// field.
789
  ///
790
  /// \param Field The field whose offset is being queried.
791
  /// \param ComputedOffset The offset that we've computed for this field.
792
  uint64_t updateExternalFieldOffset(const FieldDecl *Field,
793
                                     uint64_t ComputedOffset);
794
795
  void CheckFieldPadding(uint64_t Offset, uint64_t UnpaddedOffset,
796
                          uint64_t UnpackedOffset, unsigned UnpackedAlign,
797
                          bool isPacked, const FieldDecl *D);
798
799
  DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID);
800
801
590k
  CharUnits getSize() const {
802
590k
    assert(Size % Context.getCharWidth() == 0);
803
590k
    return Context.toCharUnitsFromBits(Size);
804
590k
  }
805
2.74M
  uint64_t getSizeInBits() const { return Size; }
806
807
106k
  void setSize(CharUnits NewSize) { Size = Context.toBits(NewSize); }
808
1.41M
  void setSize(uint64_t NewSize) { Size = NewSize; }
809
810
0
  CharUnits getAligment() const { return Alignment; }
811
812
872k
  CharUnits getDataSize() const {
813
872k
    assert(DataSize % Context.getCharWidth() == 0);
814
872k
    return Context.toCharUnitsFromBits(DataSize);
815
872k
  }
816
1.64M
  uint64_t getDataSizeInBits() const { return DataSize; }
817
818
753k
  void setDataSize(CharUnits NewSize) { DataSize = Context.toBits(NewSize); }
819
64.7k
  void setDataSize(uint64_t NewSize) { DataSize = NewSize; }
820
821
  ItaniumRecordLayoutBuilder(const ItaniumRecordLayoutBuilder &) = delete;
822
  void operator=(const ItaniumRecordLayoutBuilder &) = delete;
823
};
824
} // end anonymous namespace
825
826
1.41k
void ItaniumRecordLayoutBuilder::SelectPrimaryVBase(const CXXRecordDecl *RD) {
827
982
  for (const auto &I : RD->bases()) {
828
982
    assert(!I.getType()->isDependentType() &&
829
982
           "Cannot layout class with dependent bases.");
830
982
831
982
    const CXXRecordDecl *Base = I.getType()->getAsCXXRecordDecl();
832
982
833
    // Check if this is a nearly empty virtual base.
834
982
    if (I.isVirtual() && 
Context.isNearlyEmpty(Base)672
) {
835
      // If it's not an indirect primary base, then we've found our primary
836
      // base.
837
197
      if (!IndirectPrimaryBases.count(Base)) {
838
194
        PrimaryBase = Base;
839
194
        PrimaryBaseIsVirtual = true;
840
194
        return;
841
194
      }
842
3
843
      // Is this the first nearly empty virtual base?
844
3
      if (!FirstNearlyEmptyVBase)
845
3
        FirstNearlyEmptyVBase = Base;
846
3
    }
847
982
848
788
    SelectPrimaryVBase(Base);
849
788
    if (PrimaryBase)
850
8
      return;
851
788
  }
852
1.41k
}
853
854
/// DeterminePrimaryBase - Determine the primary base of the given class.
855
186k
void ItaniumRecordLayoutBuilder::DeterminePrimaryBase(const CXXRecordDecl *RD) {
856
  // If the class isn't dynamic, it won't have a primary base.
857
186k
  if (!RD->isDynamicClass())
858
175k
    return;
859
11.6k
860
  // Compute all the primary virtual bases for all of our direct and
861
  // indirect bases, and record all their primary virtual base classes.
862
11.6k
  RD->getIndirectPrimaryBases(IndirectPrimaryBases);
863
11.6k
864
  // If the record has a dynamic base class, attempt to choose a primary base
865
  // class. It is the first (in direct base class order) non-virtual dynamic
866
  // base class, if one exists.
867
8.67k
  for (const auto &I : RD->bases()) {
868
    // Ignore virtual bases.
869
8.67k
    if (I.isVirtual())
870
697
      continue;
871
7.97k
872
7.97k
    const CXXRecordDecl *Base = I.getType()->getAsCXXRecordDecl();
873
7.97k
874
7.97k
    if (Base->isDynamicClass()) {
875
      // We found it.
876
7.81k
      PrimaryBase = Base;
877
7.81k
      PrimaryBaseIsVirtual = false;
878
7.81k
      return;
879
7.81k
    }
880
7.97k
  }
881
11.6k
882
  // Under the Itanium ABI, if there is no non-virtual primary base class,
883
  // try to compute the primary virtual base.  The primary virtual base is
884
  // the first nearly empty virtual base that is not an indirect primary
885
  // virtual base class, if one exists.
886
3.81k
  if (RD->getNumVBases() != 0) {
887
626
    SelectPrimaryVBase(RD);
888
626
    if (PrimaryBase)
889
194
      return;
890
3.61k
  }
891
3.61k
892
  // Otherwise, it is the first indirect primary base class, if one exists.
893
3.61k
  if (FirstNearlyEmptyVBase) {
894
2
    PrimaryBase = FirstNearlyEmptyVBase;
895
2
    PrimaryBaseIsVirtual = true;
896
2
    return;
897
2
  }
898
3.61k
899
3.61k
  assert(!PrimaryBase && "Should not get here with a primary base!");
900
3.61k
}
901
902
BaseSubobjectInfo *ItaniumRecordLayoutBuilder::ComputeBaseSubobjectInfo(
903
580k
    const CXXRecordDecl *RD, bool IsVirtual, BaseSubobjectInfo *Derived) {
904
580k
  BaseSubobjectInfo *Info;
905
580k
906
580k
  if (IsVirtual) {
907
    // Check if we already have info about this virtual base.
908
1.38k
    BaseSubobjectInfo *&InfoSlot = VirtualBaseInfo[RD];
909
1.38k
    if (InfoSlot) {
910
230
      assert(InfoSlot->Class == RD && "Wrong class for virtual base info!");
911
230
      return InfoSlot;
912
230
    }
913
1.15k
914
    // We don't, create it.
915
1.15k
    InfoSlot = new (BaseSubobjectInfoAllocator.Allocate()) BaseSubobjectInfo;
916
1.15k
    Info = InfoSlot;
917
579k
  } else {
918
579k
    Info = new (BaseSubobjectInfoAllocator.Allocate()) BaseSubobjectInfo;
919
579k
  }
920
580k
921
580k
  Info->Class = RD;
922
580k
  Info->IsVirtual = IsVirtual;
923
580k
  Info->Derived = nullptr;
924
580k
  Info->PrimaryVirtualBaseInfo = nullptr;
925
580k
926
580k
  const CXXRecordDecl *PrimaryVirtualBase = nullptr;
927
580k
  BaseSubobjectInfo *PrimaryVirtualBaseInfo = nullptr;
928
580k
929
  // Check if this base has a primary virtual base.
930
580k
  if (RD->getNumVBases()) {
931
628
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
932
628
    if (Layout.isPrimaryBaseVirtual()) {
933
      // This base does have a primary virtual base.
934
170
      PrimaryVirtualBase = Layout.getPrimaryBase();
935
170
      assert(PrimaryVirtualBase && "Didn't have a primary virtual base!");
936
170
937
      // Now check if we have base subobject info about this primary base.
938
170
      PrimaryVirtualBaseInfo = VirtualBaseInfo.lookup(PrimaryVirtualBase);
939
170
940
170
      if (PrimaryVirtualBaseInfo) {
941
55
        if (PrimaryVirtualBaseInfo->Derived) {
942
          // We did have info about this primary base, and it turns out that it
943
          // has already been claimed as a primary virtual base for another
944
          // base.
945
33
          PrimaryVirtualBase = nullptr;
946
22
        } else {
947
          // We can claim this base as our primary base.
948
22
          Info->PrimaryVirtualBaseInfo = PrimaryVirtualBaseInfo;
949
22
          PrimaryVirtualBaseInfo->Derived = Info;
950
22
        }
951
55
      }
952
170
    }
953
628
  }
954
580k
955
  // Now go through all direct bases.
956
537k
  for (const auto &I : RD->bases()) {
957
537k
    bool IsVirtual = I.isVirtual();
958
537k
959
537k
    const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
960
537k
961
537k
    Info->Bases.push_back(ComputeBaseSubobjectInfo(BaseDecl, IsVirtual, Info));
962
537k
  }
963
580k
964
580k
  if (PrimaryVirtualBase && 
!PrimaryVirtualBaseInfo137
) {
965
    // Traversing the bases must have created the base info for our primary
966
    // virtual base.
967
115
    PrimaryVirtualBaseInfo = VirtualBaseInfo.lookup(PrimaryVirtualBase);
968
115
    assert(PrimaryVirtualBaseInfo &&
969
115
           "Did not create a primary virtual base!");
970
115
971
    // Claim the primary virtual base as our primary virtual base.
972
115
    Info->PrimaryVirtualBaseInfo = PrimaryVirtualBaseInfo;
973
115
    PrimaryVirtualBaseInfo->Derived = Info;
974
115
  }
975
580k
976
580k
  return Info;
977
580k
}
978
979
void ItaniumRecordLayoutBuilder::ComputeBaseSubobjectInfo(
980
186k
    const CXXRecordDecl *RD) {
981
42.9k
  for (const auto &I : RD->bases()) {
982
42.9k
    bool IsVirtual = I.isVirtual();
983
42.9k
984
42.9k
    const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
985
42.9k
986
    // Compute the base subobject info for this base.
987
42.9k
    BaseSubobjectInfo *Info = ComputeBaseSubobjectInfo(BaseDecl, IsVirtual,
988
42.9k
                                                       nullptr);
989
42.9k
990
42.9k
    if (IsVirtual) {
991
      // ComputeBaseInfo has already added this base for us.
992
738
      assert(VirtualBaseInfo.count(BaseDecl) &&
993
738
             "Did not add virtual base!");
994
42.2k
    } else {
995
      // Add the base info to the map of non-virtual bases.
996
42.2k
      assert(!NonVirtualBaseInfo.count(BaseDecl) &&
997
42.2k
             "Non-virtual base already exists!");
998
42.2k
      NonVirtualBaseInfo.insert(std::make_pair(BaseDecl, Info));
999
42.2k
    }
1000
42.9k
  }
1001
186k
}
1002
1003
void ItaniumRecordLayoutBuilder::EnsureVTablePointerAlignment(
1004
3.61k
    CharUnits UnpackedBaseAlign) {
1005
3.61k
  CharUnits BaseAlign = Packed ? 
CharUnits::One()2
: UnpackedBaseAlign;
1006
3.61k
1007
  // The maximum field alignment overrides base align.
1008
3.61k
  if (!MaxFieldAlignment.isZero()) {
1009
5
    BaseAlign = std::min(BaseAlign, MaxFieldAlignment);
1010
5
    UnpackedBaseAlign = std::min(UnpackedBaseAlign, MaxFieldAlignment);
1011
5
  }
1012
3.61k
1013
  // Round up the current record size to pointer alignment.
1014
3.61k
  setSize(getSize().alignTo(BaseAlign));
1015
3.61k
1016
  // Update the alignment.
1017
3.61k
  UpdateAlignment(BaseAlign, UnpackedBaseAlign, BaseAlign);
1018
3.61k
}
1019
1020
void ItaniumRecordLayoutBuilder::LayoutNonVirtualBases(
1021
186k
    const CXXRecordDecl *RD) {
1022
  // Then, determine the primary base class.
1023
186k
  DeterminePrimaryBase(RD);
1024
186k
1025
  // Compute base subobject info.
1026
186k
  ComputeBaseSubobjectInfo(RD);
1027
186k
1028
  // If we have a primary base class, lay it out.
1029
186k
  if (PrimaryBase) {
1030
8.00k
    if (PrimaryBaseIsVirtual) {
1031
      // If the primary virtual base was a primary virtual base of some other
1032
      // base class we'll have to steal it.
1033
196
      BaseSubobjectInfo *PrimaryBaseInfo = VirtualBaseInfo.lookup(PrimaryBase);
1034
196
      PrimaryBaseInfo->Derived = nullptr;
1035
196
1036
      // We have a virtual primary base, insert it as an indirect primary base.
1037
196
      IndirectPrimaryBases.insert(PrimaryBase);
1038
196
1039
196
      assert(!VisitedVirtualBases.count(PrimaryBase) &&
1040
196
             "vbase already visited!");
1041
196
      VisitedVirtualBases.insert(PrimaryBase);
1042
196
1043
196
      LayoutVirtualBase(PrimaryBaseInfo);
1044
7.81k
    } else {
1045
7.81k
      BaseSubobjectInfo *PrimaryBaseInfo =
1046
7.81k
        NonVirtualBaseInfo.lookup(PrimaryBase);
1047
7.81k
      assert(PrimaryBaseInfo &&
1048
7.81k
             "Did not find base info for non-virtual primary base!");
1049
7.81k
1050
7.81k
      LayoutNonVirtualBase(PrimaryBaseInfo);
1051
7.81k
    }
1052
8.00k
1053
  // If this class needs a vtable/vf-table and didn't get one from a
1054
  // primary base, add it in now.
1055
178k
  } else if (RD->isDynamicClass()) {
1056
3.61k
    assert(DataSize == 0 && "Vtable pointer must be at offset zero!");
1057
3.61k
    CharUnits PtrWidth =
1058
3.61k
      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(0));
1059
3.61k
    CharUnits PtrAlign =
1060
3.61k
      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerAlign(0));
1061
3.61k
    EnsureVTablePointerAlignment(PtrAlign);
1062
3.61k
    HasOwnVFPtr = true;
1063
3.61k
1064
3.61k
    assert(!IsUnion && "Unions cannot be dynamic classes.");
1065
3.61k
    HandledFirstNonOverlappingEmptyField = true;
1066
3.61k
1067
3.61k
    setSize(getSize() + PtrWidth);
1068
3.61k
    setDataSize(getSize());
1069
3.61k
  }
1070
186k
1071
  // Now lay out the non-virtual bases.
1072
42.9k
  for (const auto &I : RD->bases()) {
1073
42.9k
1074
    // Ignore virtual bases.
1075
42.9k
    if (I.isVirtual())
1076
738
      continue;
1077
42.2k
1078
42.2k
    const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
1079
42.2k
1080
    // Skip the primary base, because we've already laid it out.  The
1081
    // !PrimaryBaseIsVirtual check is required because we might have a
1082
    // non-virtual base of the same type as a primary virtual base.
1083
42.2k
    if (BaseDecl == PrimaryBase && 
!PrimaryBaseIsVirtual7.81k
)
1084
7.81k
      continue;
1085
34.3k
1086
    // Lay out the base.
1087
34.3k
    BaseSubobjectInfo *BaseInfo = NonVirtualBaseInfo.lookup(BaseDecl);
1088
34.3k
    assert(BaseInfo && "Did not find base info for non-virtual base!");
1089
34.3k
1090
34.3k
    LayoutNonVirtualBase(BaseInfo);
1091
34.3k
  }
1092
186k
}
1093
1094
void ItaniumRecordLayoutBuilder::LayoutNonVirtualBase(
1095
42.2k
    const BaseSubobjectInfo *Base) {
1096
  // Layout the base.
1097
42.2k
  CharUnits Offset = LayoutBase(Base);
1098
42.2k
1099
  // Add its base class offset.
1100
42.2k
  assert(!Bases.count(Base->Class) && "base offset already exists!");
1101
42.2k
  Bases.insert(std::make_pair(Base->Class, Offset));
1102
42.2k
1103
42.2k
  AddPrimaryVirtualBaseOffsets(Base, Offset);
1104
42.2k
}
1105
1106
void ItaniumRecordLayoutBuilder::AddPrimaryVirtualBaseOffsets(
1107
43.5k
    const BaseSubobjectInfo *Info, CharUnits Offset) {
1108
  // This base isn't interesting, it has no virtual bases.
1109
43.5k
  if (!Info->Class->getNumVBases())
1110
42.9k
    return;
1111
628
1112
  // First, check if we have a virtual primary base to add offsets for.
1113
628
  if (Info->PrimaryVirtualBaseInfo) {
1114
137
    assert(Info->PrimaryVirtualBaseInfo->IsVirtual &&
1115
137
           "Primary virtual base is not virtual!");
1116
137
    if (Info->PrimaryVirtualBaseInfo->Derived == Info) {
1117
      // Add the offset.
1118
134
      assert(!VBases.count(Info->PrimaryVirtualBaseInfo->Class) &&
1119
134
             "primary vbase offset already exists!");
1120
134
      VBases.insert(std::make_pair(Info->PrimaryVirtualBaseInfo->Class,
1121
134
                                   ASTRecordLayout::VBaseInfo(Offset, false)));
1122
134
1123
      // Traverse the primary virtual base.
1124
134
      AddPrimaryVirtualBaseOffsets(Info->PrimaryVirtualBaseInfo, Offset);
1125
134
    }
1126
137
  }
1127
628
1128
  // Now go through all direct non-virtual bases.
1129
628
  const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class);
1130
876
  for (const BaseSubobjectInfo *Base : Info->Bases) {
1131
876
    if (Base->IsVirtual)
1132
646
      continue;
1133
230
1134
230
    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base->Class);
1135
230
    AddPrimaryVirtualBaseOffsets(Base, BaseOffset);
1136
230
  }
1137
628
}
1138
1139
void ItaniumRecordLayoutBuilder::LayoutVirtualBases(
1140
187k
    const CXXRecordDecl *RD, const CXXRecordDecl *MostDerivedClass) {
1141
187k
  const CXXRecordDecl *PrimaryBase;
1142
187k
  bool PrimaryBaseIsVirtual;
1143
187k
1144
187k
  if (MostDerivedClass == RD) {
1145
186k
    PrimaryBase = this->PrimaryBase;
1146
186k
    PrimaryBaseIsVirtual = this->PrimaryBaseIsVirtual;
1147
632
  } else {
1148
632
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
1149
632
    PrimaryBase = Layout.getPrimaryBase();
1150
632
    PrimaryBaseIsVirtual = Layout.isPrimaryBaseVirtual();
1151
632
  }
1152
187k
1153
43.8k
  for (const CXXBaseSpecifier &Base : RD->bases()) {
1154
43.8k
    assert(!Base.getType()->isDependentType() &&
1155
43.8k
           "Cannot layout class with dependent bases.");
1156
43.8k
1157
43.8k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
1158
43.8k
1159
43.8k
    if (Base.isVirtual()) {
1160
1.38k
      if (PrimaryBase != BaseDecl || 
!PrimaryBaseIsVirtual352
) {
1161
1.03k
        bool IndirectPrimaryBase = IndirectPrimaryBases.count(BaseDecl);
1162
1.03k
1163
        // Only lay out the virtual base if it's not an indirect primary base.
1164
1.03k
        if (!IndirectPrimaryBase) {
1165
          // Only visit virtual bases once.
1166
1.00k
          if (!VisitedVirtualBases.insert(BaseDecl).second)
1167
177
            continue;
1168
824
1169
824
          const BaseSubobjectInfo *BaseInfo = VirtualBaseInfo.lookup(BaseDecl);
1170
824
          assert(BaseInfo && "Did not find virtual base info!");
1171
824
          LayoutVirtualBase(BaseInfo);
1172
824
        }
1173
1.03k
      }
1174
1.38k
    }
1175
43.8k
1176
43.6k
    if (!BaseDecl->getNumVBases()) {
1177
      // This base isn't interesting since it doesn't have any virtual bases.
1178
43.0k
      continue;
1179
43.0k
    }
1180
632
1181
632
    LayoutVirtualBases(BaseDecl, MostDerivedClass);
1182
632
  }
1183
187k
}
1184
1185
void ItaniumRecordLayoutBuilder::LayoutVirtualBase(
1186
1.02k
    const BaseSubobjectInfo *Base) {
1187
1.02k
  assert(!Base->Derived && "Trying to lay out a primary virtual base!");
1188
1.02k
1189
  // Layout the base.
1190
1.02k
  CharUnits Offset = LayoutBase(Base);
1191
1.02k
1192
  // Add its base class offset.
1193
1.02k
  assert(!VBases.count(Base->Class) && "vbase offset already exists!");
1194
1.02k
  VBases.insert(std::make_pair(Base->Class,
1195
1.02k
                       ASTRecordLayout::VBaseInfo(Offset, false)));
1196
1.02k
1197
1.02k
  AddPrimaryVirtualBaseOffsets(Base, Offset);
1198
1.02k
}
1199
1200
CharUnits
1201
43.2k
ItaniumRecordLayoutBuilder::LayoutBase(const BaseSubobjectInfo *Base) {
1202
43.2k
  assert(!IsUnion && "Unions cannot have base classes.");
1203
43.2k
1204
43.2k
  const ASTRecordLayout &Layout = Context.getASTRecordLayout(Base->Class);
1205
43.2k
  CharUnits Offset;
1206
43.2k
1207
  // Query the external layout to see if it provides an offset.
1208
43.2k
  bool HasExternalLayout = false;
1209
43.2k
  if (UseExternalLayout) {
1210
3.50k
    if (Base->IsVirtual)
1211
20
      HasExternalLayout = External.getExternalVBaseOffset(Base->Class, Offset);
1212
3.48k
    else
1213
3.48k
      HasExternalLayout = External.getExternalNVBaseOffset(Base->Class, Offset);
1214
3.50k
  }
1215
43.2k
1216
86.4k
  auto getBaseOrPreferredBaseAlignFromUnpacked = [&](CharUnits UnpackedAlign) {
1217
    // Clang <= 6 incorrectly applied the 'packed' attribute to base classes.
1218
    // Per GCC's documentation, it only applies to non-static data members.
1219
86.4k
    return (Packed && 
(90
(Context.getLangOpts().getClangABICompat() <=
1220
90
                        LangOptions::ClangABI::Ver6) ||
1221
76
                       Context.getTargetInfo().getTriple().isPS4() ||
1222
72
                       Context.getTargetInfo().getTriple().isOSAIX()))
1223
34
               ? CharUnits::One()
1224
86.4k
               : UnpackedAlign;
1225
86.4k
  };
1226
43.2k
1227
43.2k
  CharUnits UnpackedBaseAlign = Layout.getNonVirtualAlignment();
1228
43.2k
  CharUnits UnpackedPreferredBaseAlign = Layout.getPreferredNVAlignment();
1229
43.2k
  CharUnits BaseAlign =
1230
43.2k
      getBaseOrPreferredBaseAlignFromUnpacked(UnpackedBaseAlign);
1231
43.2k
  CharUnits PreferredBaseAlign =
1232
43.2k
      getBaseOrPreferredBaseAlignFromUnpacked(UnpackedPreferredBaseAlign);
1233
43.2k
1234
43.2k
  const bool DefaultsToAIXPowerAlignment =
1235
43.2k
      Context.getTargetInfo().defaultsToAIXPowerAlignment();
1236
43.2k
  if (DefaultsToAIXPowerAlignment) {
1237
    // AIX `power` alignment does not apply the preferred alignment for
1238
    // non-union classes if the source of the alignment (the current base in
1239
    // this context) follows introduction of the first subobject with
1240
    // exclusively allocated space or zero-extent array.
1241
40
    if (!Base->Class->isEmpty() && 
!HandledFirstNonOverlappingEmptyField34
) {
1242
      // By handling a base class that is not empty, we're handling the
1243
      // "first (inherited) member".
1244
22
      HandledFirstNonOverlappingEmptyField = true;
1245
18
    } else {
1246
18
      UnpackedPreferredBaseAlign = UnpackedBaseAlign;
1247
18
      PreferredBaseAlign = BaseAlign;
1248
18
    }
1249
40
  }
1250
43.2k
1251
43.2k
  CharUnits UnpackedAlignTo = !DefaultsToAIXPowerAlignment
1252
43.1k
                                  ? UnpackedBaseAlign
1253
40
                                  : UnpackedPreferredBaseAlign;
1254
  // If we have an empty base class, try to place it at offset 0.
1255
43.2k
  if (Base->Class->isEmpty() &&
1256
24.7k
      (!HasExternalLayout || 
Offset == CharUnits::Zero()1.98k
) &&
1257
24.7k
      EmptySubobjects->CanPlaceBaseAtOffset(Base, CharUnits::Zero())) {
1258
24.6k
    setSize(std::max(getSize(), Layout.getSize()));
1259
24.6k
    UpdateAlignment(BaseAlign, UnpackedAlignTo, PreferredBaseAlign);
1260
24.6k
1261
24.6k
    return CharUnits::Zero();
1262
24.6k
  }
1263
18.5k
1264
  // The maximum field alignment overrides the base align/(AIX-only) preferred
1265
  // base align.
1266
18.5k
  if (!MaxFieldAlignment.isZero()) {
1267
25
    BaseAlign = std::min(BaseAlign, MaxFieldAlignment);
1268
25
    PreferredBaseAlign = std::min(PreferredBaseAlign, MaxFieldAlignment);
1269
25
    UnpackedAlignTo = std::min(UnpackedAlignTo, MaxFieldAlignment);
1270
25
  }
1271
18.5k
1272
18.5k
  CharUnits AlignTo =
1273
18.5k
      !DefaultsToAIXPowerAlignment ? BaseAlign : 
PreferredBaseAlign34
;
1274
18.5k
  if (!HasExternalLayout) {
1275
    // Round up the current record size to the base's alignment boundary.
1276
17.0k
    Offset = getDataSize().alignTo(AlignTo);
1277
17.0k
1278
    // Try to place the base.
1279
17.1k
    while (!EmptySubobjects->CanPlaceBaseAtOffset(Base, Offset))
1280
36
      Offset += AlignTo;
1281
1.50k
  } else {
1282
1.50k
    bool Allowed = EmptySubobjects->CanPlaceBaseAtOffset(Base, Offset);
1283
1.50k
    (void)Allowed;
1284
1.50k
    assert(Allowed && "Base subobject externally placed at overlapping offset");
1285
1.50k
1286
1.50k
    if (InferAlignment && 
Offset < getDataSize().alignTo(AlignTo)763
) {
1287
      // The externally-supplied base offset is before the base offset we
1288
      // computed. Assume that the structure is packed.
1289
0
      Alignment = CharUnits::One();
1290
0
      InferAlignment = false;
1291
0
    }
1292
1.50k
  }
1293
18.5k
1294
18.5k
  if (!Base->Class->isEmpty()) {
1295
    // Update the data size.
1296
18.5k
    setDataSize(Offset + Layout.getNonVirtualSize());
1297
18.5k
1298
18.5k
    setSize(std::max(getSize(), getDataSize()));
1299
18.5k
  } else
1300
68
    setSize(std::max(getSize(), Offset + Layout.getSize()));
1301
18.5k
1302
  // Remember max struct/class alignment.
1303
18.5k
  UpdateAlignment(BaseAlign, UnpackedAlignTo, PreferredBaseAlign);
1304
18.5k
1305
18.5k
  return Offset;
1306
18.5k
}
1307
1308
311k
void ItaniumRecordLayoutBuilder::InitializeLayout(const Decl *D) {
1309
311k
  if (const RecordDecl *RD = dyn_cast<RecordDecl>(D)) {
1310
305k
    IsUnion = RD->isUnion();
1311
305k
    IsMsStruct = RD->isMsStruct(Context);
1312
305k
  }
1313
311k
1314
311k
  Packed = D->hasAttr<PackedAttr>();
1315
311k
  HandledFirstNonOverlappingEmptyField =
1316
311k
      !Context.getTargetInfo().defaultsToAIXPowerAlignment();
1317
311k
1318
  // Honor the default struct packing maximum alignment flag.
1319
311k
  if (unsigned DefaultMaxFieldAlignment = Context.getLangOpts().PackStruct) {
1320
2
    MaxFieldAlignment = CharUnits::fromQuantity(DefaultMaxFieldAlignment);
1321
2
  }
1322
311k
1323
  // mac68k alignment supersedes maximum field alignment and attribute aligned,
1324
  // and forces all structures to have 2-byte alignment. The IBM docs on it
1325
  // allude to additional (more complicated) semantics, especially with regard
1326
  // to bit-fields, but gcc appears not to follow that.
1327
311k
  if (D->hasAttr<AlignMac68kAttr>()) {
1328
12
    IsMac68kAlign = true;
1329
12
    MaxFieldAlignment = CharUnits::fromQuantity(2);
1330
12
    Alignment = CharUnits::fromQuantity(2);
1331
12
    PreferredAlignment = CharUnits::fromQuantity(2);
1332
311k
  } else {
1333
311k
    if (const MaxFieldAlignmentAttr *MFAA = D->getAttr<MaxFieldAlignmentAttr>())
1334
53.4k
      MaxFieldAlignment = Context.toCharUnitsFromBits(MFAA->getAlignment());
1335
311k
1336
311k
    if (unsigned MaxAlign = D->getMaxAlignment())
1337
3.84k
      UpdateAlignment(Context.toCharUnitsFromBits(MaxAlign));
1338
311k
  }
1339
311k
1340
  // If there is an external AST source, ask it for the various offsets.
1341
311k
  if (const RecordDecl *RD = dyn_cast<RecordDecl>(D))
1342
305k
    if (ExternalASTSource *Source = Context.getExternalSource()) {
1343
112k
      UseExternalLayout = Source->layoutRecordType(
1344
112k
          RD, External.Size, External.Align, External.FieldOffsets,
1345
112k
          External.BaseOffsets, External.VirtualBaseOffsets);
1346
112k
1347
      // Update based on external alignment.
1348
112k
      if (UseExternalLayout) {
1349
11.8k
        if (External.Align > 0) {
1350
7.60k
          Alignment = Context.toCharUnitsFromBits(External.Align);
1351
7.60k
          PreferredAlignment = Context.toCharUnitsFromBits(External.Align);
1352
4.24k
        } else {
1353
          // The external source didn't have alignment information; infer it.
1354
4.24k
          InferAlignment = true;
1355
4.24k
        }
1356
11.8k
      }
1357
112k
    }
1358
311k
}
1359
1360
119k
void ItaniumRecordLayoutBuilder::Layout(const RecordDecl *D) {
1361
119k
  InitializeLayout(D);
1362
119k
  LayoutFields(D);
1363
119k
1364
  // Finally, round the size of the total struct up to the alignment of the
1365
  // struct itself.
1366
119k
  FinishLayout(D);
1367
119k
}
1368
1369
186k
void ItaniumRecordLayoutBuilder::Layout(const CXXRecordDecl *RD) {
1370
186k
  InitializeLayout(RD);
1371
186k
1372
  // Lay out the vtable and the non-virtual bases.
1373
186k
  LayoutNonVirtualBases(RD);
1374
186k
1375
186k
  LayoutFields(RD);
1376
186k
1377
186k
  NonVirtualSize = Context.toCharUnitsFromBits(
1378
186k
      llvm::alignTo(getSizeInBits(), Context.getTargetInfo().getCharAlign()));
1379
186k
  NonVirtualAlignment = Alignment;
1380
186k
  PreferredNVAlignment = PreferredAlignment;
1381
186k
1382
  // Lay out the virtual bases and add the primary virtual base offsets.
1383
186k
  LayoutVirtualBases(RD, RD);
1384
186k
1385
  // Finally, round the size of the total struct up to the alignment
1386
  // of the struct itself.
1387
186k
  FinishLayout(RD);
1388
186k
1389
186k
#ifndef NDEBUG
1390
  // Check that we have base offsets for all bases.
1391
42.9k
  for (const CXXBaseSpecifier &Base : RD->bases()) {
1392
42.9k
    if (Base.isVirtual())
1393
738
      continue;
1394
42.2k
1395
42.2k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
1396
42.2k
1397
42.2k
    assert(Bases.count(BaseDecl) && "Did not find base offset!");
1398
42.2k
  }
1399
186k
1400
  // And all virtual bases.
1401
1.15k
  for (const CXXBaseSpecifier &Base : RD->vbases()) {
1402
1.15k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
1403
1.15k
1404
1.15k
    assert(VBases.count(BaseDecl) && "Did not find base offset!");
1405
1.15k
  }
1406
186k
#endif
1407
186k
}
1408
1409
5.29k
void ItaniumRecordLayoutBuilder::Layout(const ObjCInterfaceDecl *D) {
1410
5.29k
  if (ObjCInterfaceDecl *SD = D->getSuperClass()) {
1411
4.09k
    const ASTRecordLayout &SL = Context.getASTObjCInterfaceLayout(SD);
1412
4.09k
1413
4.09k
    UpdateAlignment(SL.getAlignment());
1414
4.09k
1415
    // We start laying out ivars not at the end of the superclass
1416
    // structure, but at the next byte following the last field.
1417
4.09k
    setDataSize(SL.getDataSize());
1418
4.09k
    setSize(getDataSize());
1419
4.09k
  }
1420
5.29k
1421
5.29k
  InitializeLayout(D);
1422
  // Layout each ivar sequentially.
1423
12.5k
  for (const ObjCIvarDecl *IVD = D->all_declared_ivar_begin(); IVD;
1424
7.22k
       IVD = IVD->getNextIvar())
1425
7.22k
    LayoutField(IVD, false);
1426
5.29k
1427
  // Finally, round the size of the total struct up to the alignment of the
1428
  // struct itself.
1429
5.29k
  FinishLayout(D);
1430
5.29k
}
1431
1432
305k
void ItaniumRecordLayoutBuilder::LayoutFields(const RecordDecl *D) {
1433
  // Layout each field, for now, just sequentially, respecting alignment.  In
1434
  // the future, this will need to be tweakable by targets.
1435
305k
  bool InsertExtraPadding = D->mayInsertExtraPadding(/*EmitRemark=*/true);
1436
305k
  bool HasFlexibleArrayMember = D->hasFlexibleArrayMember();
1437
1.09M
  for (auto I = D->field_begin(), End = D->field_end(); I != End; 
++I785k
) {
1438
785k
    auto Next(I);
1439
785k
    ++Next;
1440
785k
    LayoutField(*I,
1441
785k
                InsertExtraPadding && 
(42
Next != End42
||
!HasFlexibleArrayMember16
));
1442
785k
  }
1443
305k
}
1444
1445
// Rounds the specified size to have it a multiple of the char size.
1446
static uint64_t
1447
roundUpSizeToCharAlignment(uint64_t Size,
1448
60
                           const ASTContext &Context) {
1449
60
  uint64_t CharAlignment = Context.getTargetInfo().getCharAlign();
1450
60
  return llvm::alignTo(Size, CharAlignment);
1451
60
}
1452
1453
void ItaniumRecordLayoutBuilder::LayoutWideBitField(uint64_t FieldSize,
1454
                                                    uint64_t TypeSize,
1455
                                                    bool FieldPacked,
1456
31
                                                    const FieldDecl *D) {
1457
31
  assert(Context.getLangOpts().CPlusPlus &&
1458
31
         "Can only have wide bit-fields in C++!");
1459
31
1460
  // Itanium C++ ABI 2.4:
1461
  //   If sizeof(T)*8 < n, let T' be the largest integral POD type with
1462
  //   sizeof(T')*8 <= n.
1463
31
1464
31
  QualType IntegralPODTypes[] = {
1465
31
    Context.UnsignedCharTy, Context.UnsignedShortTy, Context.UnsignedIntTy,
1466
31
    Context.UnsignedLongTy, Context.UnsignedLongLongTy
1467
31
  };
1468
31
1469
31
  QualType Type;
1470
127
  for (const QualType &QT : IntegralPODTypes) {
1471
127
    uint64_t Size = Context.getTypeSize(QT);
1472
127
1473
127
    if (Size > FieldSize)
1474
17
      break;
1475
110
1476
110
    Type = QT;
1477
110
  }
1478
31
  assert(!Type.isNull() && "Did not find a type!");
1479
31
1480
31
  CharUnits TypeAlign = Context.getTypeAlignInChars(Type);
1481
31
1482
  // We're not going to use any of the unfilled bits in the last byte.
1483
31
  UnfilledBitsInLastUnit = 0;
1484
31
  LastBitfieldTypeSize = 0;
1485
31
1486
31
  uint64_t FieldOffset;
1487
31
  uint64_t UnpaddedFieldOffset = getDataSizeInBits() - UnfilledBitsInLastUnit;
1488
31
1489
31
  if (IsUnion) {
1490
9
    uint64_t RoundedFieldSize = roundUpSizeToCharAlignment(FieldSize,
1491
9
                                                           Context);
1492
9
    setDataSize(std::max(getDataSizeInBits(), RoundedFieldSize));
1493
9
    FieldOffset = 0;
1494
22
  } else {
1495
    // The bitfield is allocated starting at the next offset aligned
1496
    // appropriately for T', with length n bits.
1497
22
    FieldOffset = llvm::alignTo(getDataSizeInBits(), Context.toBits(TypeAlign));
1498
22
1499
22
    uint64_t NewSizeInBits = FieldOffset + FieldSize;
1500
22
1501
22
    setDataSize(
1502
22
        llvm::alignTo(NewSizeInBits, Context.getTargetInfo().getCharAlign()));
1503
22
    UnfilledBitsInLastUnit = getDataSizeInBits() - NewSizeInBits;
1504
22
  }
1505
31
1506
  // Place this field at the current location.
1507
31
  FieldOffsets.push_back(FieldOffset);
1508
31
1509
31
  CheckFieldPadding(FieldOffset, UnpaddedFieldOffset, FieldOffset,
1510
31
                    Context.toBits(TypeAlign), FieldPacked, D);
1511
31
1512
  // Update the size.
1513
31
  setSize(std::max(getSizeInBits(), getDataSizeInBits()));
1514
31
1515
  // Remember max struct/class alignment.
1516
31
  UpdateAlignment(TypeAlign);
1517
31
}
1518
1519
18.3k
void ItaniumRecordLayoutBuilder::LayoutBitField(const FieldDecl *D) {
1520
18.3k
  bool FieldPacked = Packed || 
D->hasAttr<PackedAttr>()17.9k
;
1521
18.3k
  uint64_t FieldSize = D->getBitWidthValue(Context);
1522
18.3k
  TypeInfo FieldInfo = Context.getTypeInfo(D->getType());
1523
18.3k
  uint64_t TypeSize = FieldInfo.Width;
1524
18.3k
  unsigned FieldAlign = FieldInfo.Align;
1525
18.3k
1526
  // UnfilledBitsInLastUnit is the difference between the end of the
1527
  // last allocated bitfield (i.e. the first bit offset available for
1528
  // bitfields) and the end of the current data size in bits (i.e. the
1529
  // first bit offset available for non-bitfields).  The current data
1530
  // size in bits is always a multiple of the char size; additionally,
1531
  // for ms_struct records it's also a multiple of the
1532
  // LastBitfieldTypeSize (if set).
1533
18.3k
1534
  // The struct-layout algorithm is dictated by the platform ABI,
1535
  // which in principle could use almost any rules it likes.  In
1536
  // practice, UNIXy targets tend to inherit the algorithm described
1537
  // in the System V generic ABI.  The basic bitfield layout rule in
1538
  // System V is to place bitfields at the next available bit offset
1539
  // where the entire bitfield would fit in an aligned storage unit of
1540
  // the declared type; it's okay if an earlier or later non-bitfield
1541
  // is allocated in the same storage unit.  However, some targets
1542
  // (those that !useBitFieldTypeAlignment(), e.g. ARM APCS) don't
1543
  // require this storage unit to be aligned, and therefore always put
1544
  // the bitfield at the next available bit offset.
1545
18.3k
1546
  // ms_struct basically requests a complete replacement of the
1547
  // platform ABI's struct-layout algorithm, with the high-level goal
1548
  // of duplicating MSVC's layout.  For non-bitfields, this follows
1549
  // the standard algorithm.  The basic bitfield layout rule is to
1550
  // allocate an entire unit of the bitfield's declared type
1551
  // (e.g. 'unsigned long'), then parcel it up among successive
1552
  // bitfields whose declared types have the same size, making a new
1553
  // unit as soon as the last can no longer store the whole value.
1554
  // Since it completely replaces the platform ABI's algorithm,
1555
  // settings like !useBitFieldTypeAlignment() do not apply.
1556
18.3k
1557
  // A zero-width bitfield forces the use of a new storage unit for
1558
  // later bitfields.  In general, this occurs by rounding up the
1559
  // current size of the struct as if the algorithm were about to
1560
  // place a non-bitfield of the field's formal type.  Usually this
1561
  // does not change the alignment of the struct itself, but it does
1562
  // on some targets (those that useZeroLengthBitfieldAlignment(),
1563
  // e.g. ARM).  In ms_struct layout, zero-width bitfields are
1564
  // ignored unless they follow a non-zero-width bitfield.
1565
18.3k
1566
  // A field alignment restriction (e.g. from #pragma pack) or
1567
  // specification (e.g. from __attribute__((aligned))) changes the
1568
  // formal alignment of the field.  For System V, this alters the
1569
  // required alignment of the notional storage unit that must contain
1570
  // the bitfield.  For ms_struct, this only affects the placement of
1571
  // new storage units.  In both cases, the effect of #pragma pack is
1572
  // ignored on zero-width bitfields.
1573
18.3k
1574
  // On System V, a packed field (e.g. from #pragma pack or
1575
  // __attribute__((packed))) always uses the next available bit
1576
  // offset.
1577
18.3k
1578
  // In an ms_struct struct, the alignment of a fundamental type is
1579
  // always equal to its size.  This is necessary in order to mimic
1580
  // the i386 alignment rules on targets which might not fully align
1581
  // all types (e.g. Darwin PPC32, where alignof(long long) == 4).
1582
18.3k
1583
  // First, some simple bookkeeping to perform for ms_struct structs.
1584
18.3k
  if (IsMsStruct) {
1585
    // The field alignment for integer types is always the size.
1586
307
    FieldAlign = TypeSize;
1587
307
1588
    // If the previous field was not a bitfield, or was a bitfield
1589
    // with a different storage unit size, or if this field doesn't fit into
1590
    // the current storage unit, we're done with that storage unit.
1591
307
    if (LastBitfieldTypeSize != TypeSize ||
1592
272
        
UnfilledBitsInLastUnit < FieldSize55
) {
1593
      // Also, ignore zero-length bitfields after non-bitfields.
1594
272
      if (!LastBitfieldTypeSize && 
!FieldSize173
)
1595
93
        FieldAlign = 1;
1596
272
1597
272
      UnfilledBitsInLastUnit = 0;
1598
272
      LastBitfieldTypeSize = 0;
1599
272
    }
1600
307
  }
1601
18.3k
1602
  // If the field is wider than its declared type, it follows
1603
  // different rules in all cases.
1604
18.3k
  if (FieldSize > TypeSize) {
1605
31
    LayoutWideBitField(FieldSize, TypeSize, FieldPacked, D);
1606
31
    return;
1607
31
  }
1608
18.3k
1609
  // Compute the next available bit offset.
1610
18.3k
  uint64_t FieldOffset =
1611
18.2k
    IsUnion ? 
059
: (getDataSizeInBits() - UnfilledBitsInLastUnit);
1612
18.3k
1613
  // Handle targets that don't honor bitfield type alignment.
1614
18.3k
  if (!IsMsStruct && 
!Context.getTargetInfo().useBitFieldTypeAlignment()18.0k
) {
1615
    // Some such targets do honor it on zero-width bitfields.
1616
95
    if (FieldSize == 0 &&
1617
47
        Context.getTargetInfo().useZeroLengthBitfieldAlignment()) {
1618
      // The alignment to round up to is the max of the field's natural
1619
      // alignment and a target-specific fixed value (sometimes zero).
1620
45
      unsigned ZeroLengthBitfieldBoundary =
1621
45
        Context.getTargetInfo().getZeroLengthBitfieldBoundary();
1622
45
      FieldAlign = std::max(FieldAlign, ZeroLengthBitfieldBoundary);
1623
45
1624
    // If that doesn't apply, just ignore the field alignment.
1625
50
    } else {
1626
50
      FieldAlign = 1;
1627
50
    }
1628
95
  }
1629
18.3k
1630
  // Remember the alignment we would have used if the field were not packed.
1631
18.3k
  unsigned UnpackedFieldAlign = FieldAlign;
1632
18.3k
1633
  // Ignore the field alignment if the field is packed unless it has zero-size.
1634
18.3k
  if (!IsMsStruct && 
FieldPacked18.0k
&&
FieldSize != 0505
)
1635
490
    FieldAlign = 1;
1636
18.3k
1637
  // But, if there's an 'aligned' attribute on the field, honor that.
1638
18.3k
  unsigned ExplicitFieldAlign = D->getMaxAlignment();
1639
18.3k
  if (ExplicitFieldAlign) {
1640
156
    FieldAlign = std::max(FieldAlign, ExplicitFieldAlign);
1641
156
    UnpackedFieldAlign = std::max(UnpackedFieldAlign, ExplicitFieldAlign);
1642
156
  }
1643
18.3k
1644
  // But, if there's a #pragma pack in play, that takes precedent over
1645
  // even the 'aligned' attribute, for non-zero-width bitfields.
1646
18.3k
  unsigned MaxFieldAlignmentInBits = Context.toBits(MaxFieldAlignment);
1647
18.3k
  if (!MaxFieldAlignment.isZero() && 
FieldSize4.83k
) {
1648
4.83k
    UnpackedFieldAlign = std::min(UnpackedFieldAlign, MaxFieldAlignmentInBits);
1649
4.83k
    if (FieldPacked)
1650
34
      FieldAlign = UnpackedFieldAlign;
1651
4.79k
    else
1652
4.79k
      FieldAlign = std::min(FieldAlign, MaxFieldAlignmentInBits);
1653
4.83k
  }
1654
18.3k
1655
  // But, ms_struct just ignores all of that in unions, even explicit
1656
  // alignment attributes.
1657
18.3k
  if (IsMsStruct && 
IsUnion307
) {
1658
8
    FieldAlign = UnpackedFieldAlign = 1;
1659
8
  }
1660
18.3k
1661
  // For purposes of diagnostics, we're going to simultaneously
1662
  // compute the field offsets that we would have used if we weren't
1663
  // adding any alignment padding or if the field weren't packed.
1664
18.3k
  uint64_t UnpaddedFieldOffset = FieldOffset;
1665
18.3k
  uint64_t UnpackedFieldOffset = FieldOffset;
1666
18.3k
1667
  // Check if we need to add padding to fit the bitfield within an
1668
  // allocation unit with the right size and alignment.  The rules are
1669
  // somewhat different here for ms_struct structs.
1670
18.3k
  if (IsMsStruct) {
1671
    // If it's not a zero-width bitfield, and we can fit the bitfield
1672
    // into the active storage unit (and we haven't already decided to
1673
    // start a new storage unit), just do so, regardless of any other
1674
    // other consideration.  Otherwise, round up to the right alignment.
1675
307
    if (FieldSize == 0 || 
FieldSize > UnfilledBitsInLastUnit199
) {
1676
275
      FieldOffset = llvm::alignTo(FieldOffset, FieldAlign);
1677
275
      UnpackedFieldOffset =
1678
275
          llvm::alignTo(UnpackedFieldOffset, UnpackedFieldAlign);
1679
275
      UnfilledBitsInLastUnit = 0;
1680
275
    }
1681
307
1682
18.0k
  } else {
1683
    // #pragma pack, with any value, suppresses the insertion of padding.
1684
18.0k
    bool AllowPadding = MaxFieldAlignment.isZero();
1685
18.0k
1686
    // Compute the real offset.
1687
18.0k
    if (FieldSize == 0 ||
1688
17.6k
        (AllowPadding &&
1689
12.9k
         (FieldOffset & (FieldAlign-1)) + FieldSize > TypeSize)) {
1690
474
      FieldOffset = llvm::alignTo(FieldOffset, FieldAlign);
1691
17.5k
    } else if (ExplicitFieldAlign &&
1692
127
               (MaxFieldAlignmentInBits == 0 ||
1693
24
                ExplicitFieldAlign <= MaxFieldAlignmentInBits) &&
1694
119
               Context.getTargetInfo().useExplicitBitFieldAlignment()) {
1695
      // TODO: figure it out what needs to be done on targets that don't honor
1696
      // bit-field type alignment like ARM APCS ABI.
1697
99
      FieldOffset = llvm::alignTo(FieldOffset, ExplicitFieldAlign);
1698
99
    }
1699
18.0k
1700
    // Repeat the computation for diagnostic purposes.
1701
18.0k
    if (FieldSize == 0 ||
1702
17.6k
        (AllowPadding &&
1703
12.9k
         (UnpackedFieldOffset & (UnpackedFieldAlign-1)) + FieldSize > TypeSize))
1704
525
      UnpackedFieldOffset =
1705
525
          llvm::alignTo(UnpackedFieldOffset, UnpackedFieldAlign);
1706
17.5k
    else if (ExplicitFieldAlign &&
1707
122
             (MaxFieldAlignmentInBits == 0 ||
1708
24
              ExplicitFieldAlign <= MaxFieldAlignmentInBits) &&
1709
114
             Context.getTargetInfo().useExplicitBitFieldAlignment())
1710
95
      UnpackedFieldOffset =
1711
95
          llvm::alignTo(UnpackedFieldOffset, ExplicitFieldAlign);
1712
18.0k
  }
1713
18.3k
1714
  // If we're using external layout, give the external layout a chance
1715
  // to override this information.
1716
18.3k
  if (UseExternalLayout)
1717
1.64k
    FieldOffset = updateExternalFieldOffset(D, FieldOffset);
1718
18.3k
1719
  // Okay, place the bitfield at the calculated offset.
1720
18.3k
  FieldOffsets.push_back(FieldOffset);
1721
18.3k
1722
  // Bookkeeping:
1723
18.3k
1724
  // Anonymous members don't affect the overall record alignment,
1725
  // except on targets where they do.
1726
18.3k
  if (!IsMsStruct &&
1727
18.0k
      !Context.getTargetInfo().useZeroLengthBitfieldAlignment() &&
1728
17.1k
      !D->getIdentifier())
1729
1.46k
    FieldAlign = UnpackedFieldAlign = 1;
1730
18.3k
1731
  // Diagnose differences in layout due to padding or packing.
1732
18.3k
  if (!UseExternalLayout)
1733
16.7k
    CheckFieldPadding(FieldOffset, UnpaddedFieldOffset, UnpackedFieldOffset,
1734
16.7k
                      UnpackedFieldAlign, FieldPacked, D);
1735
18.3k
1736
  // Update DataSize to include the last byte containing (part of) the bitfield.
1737
18.3k
1738
  // For unions, this is just a max operation, as usual.
1739
18.3k
  if (IsUnion) {
1740
    // For ms_struct, allocate the entire storage unit --- unless this
1741
    // is a zero-width bitfield, in which case just use a size of 1.
1742
59
    uint64_t RoundedFieldSize;
1743
59
    if (IsMsStruct) {
1744
8
      RoundedFieldSize =
1745
5
        (FieldSize ? TypeSize : 
Context.getTargetInfo().getCharWidth()3
);
1746
8
1747
    // Otherwise, allocate just the number of bytes required to store
1748
    // the bitfield.
1749
51
    } else {
1750
51
      RoundedFieldSize = roundUpSizeToCharAlignment(FieldSize, Context);
1751
51
    }
1752
59
    setDataSize(std::max(getDataSizeInBits(), RoundedFieldSize));
1753
59
1754
  // For non-zero-width bitfields in ms_struct structs, allocate a new
1755
  // storage unit if necessary.
1756
18.2k
  } else if (IsMsStruct && 
FieldSize299
) {
1757
    // We should have cleared UnfilledBitsInLastUnit in every case
1758
    // where we changed storage units.
1759
194
    if (!UnfilledBitsInLastUnit) {
1760
162
      setDataSize(FieldOffset + TypeSize);
1761
162
      UnfilledBitsInLastUnit = TypeSize;
1762
162
    }
1763
194
    UnfilledBitsInLastUnit -= FieldSize;
1764
194
    LastBitfieldTypeSize = TypeSize;
1765
194
1766
  // Otherwise, bump the data size up to include the bitfield,
1767
  // including padding up to char alignment, and then remember how
1768
  // bits we didn't use.
1769
18.1k
  } else {
1770
18.1k
    uint64_t NewSizeInBits = FieldOffset + FieldSize;
1771
18.1k
    uint64_t CharAlignment = Context.getTargetInfo().getCharAlign();
1772
18.1k
    setDataSize(llvm::alignTo(NewSizeInBits, CharAlignment));
1773
18.1k
    UnfilledBitsInLastUnit = getDataSizeInBits() - NewSizeInBits;
1774
18.1k
1775
    // The only time we can get here for an ms_struct is if this is a
1776
    // zero-width bitfield, which doesn't count as anything for the
1777
    // purposes of unfilled bits.
1778
18.1k
    LastBitfieldTypeSize = 0;
1779
18.1k
  }
1780
18.3k
1781
  // Update the size.
1782
18.3k
  setSize(std::max(getSizeInBits(), getDataSizeInBits()));
1783
18.3k
1784
  // Remember max struct/class alignment.
1785
18.3k
  UnadjustedAlignment =
1786
18.3k
      std::max(UnadjustedAlignment, Context.toCharUnitsFromBits(FieldAlign));
1787
18.3k
  UpdateAlignment(Context.toCharUnitsFromBits(FieldAlign),
1788
18.3k
                  Context.toCharUnitsFromBits(UnpackedFieldAlign));
1789
18.3k
}
1790
1791
void ItaniumRecordLayoutBuilder::LayoutField(const FieldDecl *D,
1792
792k
                                             bool InsertExtraPadding) {
1793
792k
  auto *FieldClass = D->getType()->getAsCXXRecordDecl();
1794
792k
  bool PotentiallyOverlapping = D->hasAttr<NoUniqueAddressAttr>() && 
FieldClass90
;
1795
792k
  bool IsOverlappingEmptyField =
1796
792k
      PotentiallyOverlapping && 
FieldClass->isEmpty()80
;
1797
792k
1798
792k
  CharUnits FieldOffset =
1799
792k
      (IsUnion || 
IsOverlappingEmptyField745k
) ?
CharUnits::Zero()46.5k
:
getDataSize()745k
;
1800
792k
1801
792k
  const bool DefaultsToAIXPowerAlignment =
1802
792k
      Context.getTargetInfo().defaultsToAIXPowerAlignment();
1803
792k
  bool FoundFirstNonOverlappingEmptyFieldForAIX = false;
1804
792k
  if (DefaultsToAIXPowerAlignment && 
!HandledFirstNonOverlappingEmptyField214
) {
1805
138
    assert(FieldOffset == CharUnits::Zero() &&
1806
138
           "The first non-overlapping empty field should have been handled.");
1807
138
1808
138
    if (!IsOverlappingEmptyField) {
1809
130
      FoundFirstNonOverlappingEmptyFieldForAIX = true;
1810
130
1811
      // We're going to handle the "first member" based on
1812
      // `FoundFirstNonOverlappingEmptyFieldForAIX` during the current
1813
      // invocation of this function; record it as handled for future
1814
      // invocations (except for unions, because the current field does not
1815
      // represent all "firsts").
1816
130
      HandledFirstNonOverlappingEmptyField = !IsUnion;
1817
130
    }
1818
138
  }
1819
792k
1820
792k
  if (D->isBitField()) {
1821
18.3k
    LayoutBitField(D);
1822
18.3k
    return;
1823
18.3k
  }
1824
773k
1825
773k
  uint64_t UnpaddedFieldOffset = getDataSizeInBits() - UnfilledBitsInLastUnit;
1826
  // Reset the unfilled bits.
1827
773k
  UnfilledBitsInLastUnit = 0;
1828
773k
  LastBitfieldTypeSize = 0;
1829
773k
1830
773k
  bool FieldPacked = Packed || 
D->hasAttr<PackedAttr>()746k
;
1831
773k
1832
773k
  bool AlignIsRequired = false;
1833
773k
  CharUnits FieldSize;
1834
773k
  CharUnits FieldAlign;
1835
  // The amount of this class's dsize occupied by the field.
1836
  // This is equal to FieldSize unless we're permitted to pack
1837
  // into the field's tail padding.
1838
773k
  CharUnits EffectiveFieldSize;
1839
773k
1840
769k
  auto setDeclInfo = [&](bool IsIncompleteArrayType) {
1841
769k
    auto TI = Context.getTypeInfoInChars(D->getType());
1842
769k
    FieldAlign = TI.second;
1843
    // Flexible array members don't have any size, but they have to be
1844
    // aligned appropriately for their element type.
1845
769k
    EffectiveFieldSize = FieldSize =
1846
769k
        IsIncompleteArrayType ? 
CharUnits::Zero()137
: TI.first;
1847
769k
    AlignIsRequired = Context.getTypeInfo(D->getType()).AlignIsRequired;
1848
769k
  };
1849
773k
1850
773k
  if (D->getType()->isIncompleteArrayType()) {
1851
137
    setDeclInfo(true /* IsIncompleteArrayType */);
1852
773k
  } else if (const ReferenceType *RT = D->getType()->getAs<ReferenceType>()) {
1853
4.46k
    unsigned AS = Context.getTargetAddressSpace(RT->getPointeeType());
1854
4.46k
    EffectiveFieldSize = FieldSize = Context.toCharUnitsFromBits(
1855
4.46k
        Context.getTargetInfo().getPointerWidth(AS));
1856
4.46k
    FieldAlign = Context.toCharUnitsFromBits(
1857
4.46k
        Context.getTargetInfo().getPointerAlign(AS));
1858
769k
  } else {
1859
769k
    setDeclInfo(false /* IsIncompleteArrayType */);
1860
769k
1861
    // A potentially-overlapping field occupies its dsize or nvsize, whichever
1862
    // is larger.
1863
769k
    if (PotentiallyOverlapping) {
1864
80
      const ASTRecordLayout &Layout = Context.getASTRecordLayout(FieldClass);
1865
80
      EffectiveFieldSize =
1866
80
          std::max(Layout.getNonVirtualSize(), Layout.getDataSize());
1867
80
    }
1868
769k
1869
769k
    if (IsMsStruct) {
1870
      // If MS bitfield layout is required, figure out what type is being
1871
      // laid out and align the field to the width of that type.
1872
173
1873
      // Resolve all typedefs down to their base type and round up the field
1874
      // alignment if necessary.
1875
173
      QualType T = Context.getBaseElementType(D->getType());
1876
173
      if (const BuiltinType *BTy = T->getAs<BuiltinType>()) {
1877
166
        CharUnits TypeSize = Context.getTypeSizeInChars(BTy);
1878
166
1879
166
        if (!llvm::isPowerOf2_64(TypeSize.getQuantity())) {
1880
4
          assert(
1881
4
              !Context.getTargetInfo().getTriple().isWindowsMSVCEnvironment() &&
1882
4
              "Non PowerOf2 size in MSVC mode");
1883
          // Base types with sizes that aren't a power of two don't work
1884
          // with the layout rules for MS structs. This isn't an issue in
1885
          // MSVC itself since there are no such base data types there.
1886
          // On e.g. x86_32 mingw and linux, long double is 12 bytes though.
1887
          // Any structs involving that data type obviously can't be ABI
1888
          // compatible with MSVC regardless of how it is laid out.
1889
4
1890
          // Since ms_struct can be mass enabled (via a pragma or via the
1891
          // -mms-bitfields command line parameter), this can trigger for
1892
          // structs that don't actually need MSVC compatibility, so we
1893
          // need to be able to sidestep the ms_struct layout for these types.
1894
4
1895
          // Since the combination of -mms-bitfields together with structs
1896
          // like max_align_t (which contains a long double) for mingw is
1897
          // quite comon (and GCC handles it silently), just handle it
1898
          // silently there. For other targets that have ms_struct enabled
1899
          // (most probably via a pragma or attribute), trigger a diagnostic
1900
          // that defaults to an error.
1901
4
          if (!Context.getTargetInfo().getTriple().isWindowsGNUEnvironment())
1902
2
            Diag(D->getLocation(), diag::warn_npot_ms_struct);
1903
4
        }
1904
166
        if (TypeSize > FieldAlign &&
1905
13
            llvm::isPowerOf2_64(TypeSize.getQuantity()))
1906
9
          FieldAlign = TypeSize;
1907
166
      }
1908
173
    }
1909
769k
  }
1910
773k
1911
  // The AIX `power` alignment rules apply the natural alignment of the
1912
  // "first member" if it is of a floating-point data type (or is an aggregate
1913
  // whose recursively "first" member or element is such a type). The alignment
1914
  // associated with these types for subsequent members use an alignment value
1915
  // where the floating-point data type is considered to have 4-byte alignment.
1916
  //
1917
  // For the purposes of the foregoing: vtable pointers, non-empty base classes,
1918
  // and zero-width bit-fields count as prior members; members of empty class
1919
  // types marked `no_unique_address` are not considered to be prior members.
1920
773k
  CharUnits PreferredAlign = FieldAlign;
1921
773k
  if (DefaultsToAIXPowerAlignment && 
!AlignIsRequired210
&&
1922
206
      FoundFirstNonOverlappingEmptyFieldForAIX) {
1923
102
    auto performBuiltinTypeAlignmentUpgrade = [&](const BuiltinType *BTy) {
1924
102
      if (BTy->getKind() == BuiltinType::Double ||
1925
66
          BTy->getKind() == BuiltinType::LongDouble) {
1926
38
        assert(PreferredAlign == CharUnits::fromQuantity(4) &&
1927
38
               "No need to upgrade the alignment value.");
1928
38
        PreferredAlign = CharUnits::fromQuantity(8);
1929
38
      }
1930
102
    };
1931
122
1932
122
    const Type *Ty = D->getType()->getBaseElementTypeUnsafe();
1933
122
    if (const ComplexType *CTy = Ty->getAs<ComplexType>()) {
1934
2
      performBuiltinTypeAlignmentUpgrade(CTy->getElementType()->castAs<BuiltinType>());
1935
120
    } else if (const BuiltinType *BTy = Ty->getAs<BuiltinType>()) {
1936
100
      performBuiltinTypeAlignmentUpgrade(BTy);
1937
20
    } else if (const RecordType *RT = Ty->getAs<RecordType>()) {
1938
20
      const RecordDecl *RD = RT->getDecl();
1939
20
      assert(RD && "Expected non-null RecordDecl.");
1940
20
      const ASTRecordLayout &FieldRecord = Context.getASTRecordLayout(RD);
1941
20
      PreferredAlign = FieldRecord.getPreferredAlignment();
1942
20
    }
1943
122
  }
1944
773k
1945
  // The align if the field is not packed. This is to check if the attribute
1946
  // was unnecessary (-Wpacked).
1947
773k
  CharUnits UnpackedFieldAlign =
1948
773k
      !DefaultsToAIXPowerAlignment ? FieldAlign : 
PreferredAlign210
;
1949
773k
  CharUnits UnpackedFieldOffset = FieldOffset;
1950
773k
1951
773k
  if (FieldPacked) {
1952
28.2k
    FieldAlign = CharUnits::One();
1953
28.2k
    PreferredAlign = CharUnits::One();
1954
28.2k
  }
1955
773k
  CharUnits MaxAlignmentInChars =
1956
773k
      Context.toCharUnitsFromBits(D->getMaxAlignment());
1957
773k
  FieldAlign = std::max(FieldAlign, MaxAlignmentInChars);
1958
773k
  PreferredAlign = std::max(PreferredAlign, MaxAlignmentInChars);
1959
773k
  UnpackedFieldAlign = std::max(UnpackedFieldAlign, MaxAlignmentInChars);
1960
773k
1961
  // The maximum field alignment overrides the aligned attribute.
1962
773k
  if (!MaxFieldAlignment.isZero()) {
1963
212k
    FieldAlign = std::min(FieldAlign, MaxFieldAlignment);
1964
212k
    PreferredAlign = std::min(PreferredAlign, MaxFieldAlignment);
1965
212k
    UnpackedFieldAlign = std::min(UnpackedFieldAlign, MaxFieldAlignment);
1966
212k
  }
1967
773k
1968
773k
  CharUnits AlignTo =
1969
773k
      !DefaultsToAIXPowerAlignment ? FieldAlign : 
PreferredAlign210
;
1970
  // Round up the current record size to the field's alignment boundary.
1971
773k
  FieldOffset = FieldOffset.alignTo(AlignTo);
1972
773k
  UnpackedFieldOffset = UnpackedFieldOffset.alignTo(UnpackedFieldAlign);
1973
773k
1974
773k
  if (UseExternalLayout) {
1975
16.2k
    FieldOffset = Context.toCharUnitsFromBits(
1976
16.2k
        updateExternalFieldOffset(D, Context.toBits(FieldOffset)));
1977
16.2k
1978
16.2k
    if (!IsUnion && 
EmptySubobjects13.5k
) {
1979
      // Record the fact that we're placing a field at this offset.
1980
13.5k
      bool Allowed = EmptySubobjects->CanPlaceFieldAtOffset(D, FieldOffset);
1981
13.5k
      (void)Allowed;
1982
13.5k
      assert(Allowed && "Externally-placed field cannot be placed here");
1983
13.5k
    }
1984
757k
  } else {
1985
757k
    if (!IsUnion && 
EmptySubobjects713k
) {
1986
      // Check if we can place the field at this offset.
1987
262k
      while (!EmptySubobjects->CanPlaceFieldAtOffset(D, FieldOffset)) {
1988
        // We couldn't place the field at the offset. Try again at a new offset.
1989
        // We try offset 0 (for an empty field) and then dsize(C) onwards.
1990
48
        if (FieldOffset == CharUnits::Zero() &&
1991
41
            getDataSize() != CharUnits::Zero())
1992
8
          FieldOffset = getDataSize().alignTo(AlignTo);
1993
40
        else
1994
40
          FieldOffset += AlignTo;
1995
48
      }
1996
262k
    }
1997
757k
  }
1998
773k
1999
  // Place this field at the current location.
2000
773k
  FieldOffsets.push_back(Context.toBits(FieldOffset));
2001
773k
2002
773k
  if (!UseExternalLayout)
2003
757k
    CheckFieldPadding(Context.toBits(FieldOffset), UnpaddedFieldOffset,
2004
757k
                      Context.toBits(UnpackedFieldOffset),
2005
757k
                      Context.toBits(UnpackedFieldAlign), FieldPacked, D);
2006
773k
2007
773k
  if (InsertExtraPadding) {
2008
36
    CharUnits ASanAlignment = CharUnits::fromQuantity(8);
2009
36
    CharUnits ExtraSizeForAsan = ASanAlignment;
2010
36
    if (FieldSize % ASanAlignment)
2011
30
      ExtraSizeForAsan +=
2012
30
          ASanAlignment - CharUnits::fromQuantity(FieldSize % ASanAlignment);
2013
36
    EffectiveFieldSize = FieldSize = FieldSize + ExtraSizeForAsan;
2014
36
  }
2015
773k
2016
  // Reserve space for this field.
2017
773k
  if (!IsOverlappingEmptyField) {
2018
773k
    uint64_t EffectiveFieldSizeInBits = Context.toBits(EffectiveFieldSize);
2019
773k
    if (IsUnion)
2020
46.4k
      setDataSize(std::max(getDataSizeInBits(), EffectiveFieldSizeInBits));
2021
727k
    else
2022
727k
      setDataSize(FieldOffset + EffectiveFieldSize);
2023
773k
2024
773k
    PaddedFieldSize = std::max(PaddedFieldSize, FieldOffset + FieldSize);
2025
773k
    setSize(std::max(getSizeInBits(), getDataSizeInBits()));
2026
60
  } else {
2027
60
    setSize(std::max(getSizeInBits(),
2028
60
                     (uint64_t)Context.toBits(FieldOffset + FieldSize)));
2029
60
  }
2030
773k
2031
  // Remember max struct/class ABI-specified alignment.
2032
773k
  UnadjustedAlignment = std::max(UnadjustedAlignment, FieldAlign);
2033
773k
  UpdateAlignment(FieldAlign, UnpackedFieldAlign, PreferredAlign);
2034
773k
}
2035
2036
311k
void ItaniumRecordLayoutBuilder::FinishLayout(const NamedDecl *D) {
2037
  // In C++, records cannot be of size 0.
2038
311k
  if (Context.getLangOpts().CPlusPlus && 
getSizeInBits() == 0210k
) {
2039
52.7k
    if (const CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(D)) {
2040
      // Compatibility with gcc requires a class (pod or non-pod)
2041
      // which is not empty but of size 0; such as having fields of
2042
      // array of zero-length, remains of Size 0
2043
52.6k
      if (RD->isEmpty())
2044
51.6k
        setSize(CharUnits::One());
2045
52.6k
    }
2046
60
    else
2047
60
      setSize(CharUnits::One());
2048
52.7k
  }
2049
311k
2050
  // If we have any remaining field tail padding, include that in the overall
2051
  // size.
2052
311k
  setSize(std::max(getSizeInBits(), (uint64_t)Context.toBits(PaddedFieldSize)));
2053
311k
2054
  // Finally, round the size of the record up to the alignment of the
2055
  // record itself.
2056
311k
  uint64_t UnpaddedSize = getSizeInBits() - UnfilledBitsInLastUnit;
2057
311k
  uint64_t UnpackedSizeInBits =
2058
311k
      llvm::alignTo(getSizeInBits(), Context.toBits(UnpackedAlignment));
2059
311k
2060
311k
  uint64_t RoundedSize = llvm::alignTo(
2061
311k
      getSizeInBits(),
2062
311k
      Context.toBits(!Context.getTargetInfo().defaultsToAIXPowerAlignment()
2063
311k
                         ? Alignment
2064
182
                         : PreferredAlignment));
2065
311k
2066
311k
  if (UseExternalLayout) {
2067
    // If we're inferring alignment, and the external size is smaller than
2068
    // our size after we've rounded up to alignment, conservatively set the
2069
    // alignment to 1.
2070
11.8k
    if (InferAlignment && 
External.Size < RoundedSize4.24k
) {
2071
15
      Alignment = CharUnits::One();
2072
15
      PreferredAlignment = CharUnits::One();
2073
15
      InferAlignment = false;
2074
15
    }
2075
11.8k
    setSize(External.Size);
2076
11.8k
    return;
2077
11.8k
  }
2078
299k
2079
  // Set the size to the final size.
2080
299k
  setSize(RoundedSize);
2081
299k
2082
299k
  unsigned CharBitNum = Context.getTargetInfo().getCharWidth();
2083
299k
  if (const RecordDecl *RD = dyn_cast<RecordDecl>(D)) {
2084
    // Warn if padding was introduced to the struct/class/union.
2085
294k
    if (getSizeInBits() > UnpaddedSize) {
2086
12.7k
      unsigned PadSize = getSizeInBits() - UnpaddedSize;
2087
12.7k
      bool InBits = true;
2088
12.7k
      if (PadSize % CharBitNum == 0) {
2089
12.1k
        PadSize = PadSize / CharBitNum;
2090
12.1k
        InBits = false;
2091
12.1k
      }
2092
12.7k
      Diag(RD->getLocation(), diag::warn_padded_struct_size)
2093
12.7k
          << Context.getTypeDeclType(RD)
2094
12.7k
          << PadSize
2095
12.1k
          << (InBits ? 
1593
: 0); // (byte|bit)
2096
12.7k
    }
2097
294k
2098
    // Warn if we packed it unnecessarily, when the unpacked alignment is not
2099
    // greater than the one after packing, the size in bits doesn't change and
2100
    // the offset of each field is identical.
2101
294k
    if (Packed && 
UnpackedAlignment <= Alignment7.44k
&&
2102
1.13k
        UnpackedSizeInBits == getSizeInBits() && !HasPackedField)
2103
1.11k
      Diag(D->getLocation(), diag::warn_unnecessary_packed)
2104
1.11k
          << Context.getTypeDeclType(RD);
2105
294k
  }
2106
299k
}
2107
2108
void ItaniumRecordLayoutBuilder::UpdateAlignment(
2109
    CharUnits NewAlignment, CharUnits UnpackedNewAlignment,
2110
847k
    CharUnits PreferredNewAlignment) {
2111
  // The alignment is not modified when using 'mac68k' alignment or when
2112
  // we have an externally-supplied layout that also provides overall alignment.
2113
847k
  if (IsMac68kAlign || 
(846k
UseExternalLayout846k
&&
!InferAlignment21.7k
))
2114
14.0k
    return;
2115
833k
2116
833k
  if (NewAlignment > Alignment) {
2117
235k
    assert(llvm::isPowerOf2_64(NewAlignment.getQuantity()) &&
2118
235k
           "Alignment not a power of 2");
2119
235k
    Alignment = NewAlignment;
2120
235k
  }
2121
833k
2122
833k
  if (UnpackedNewAlignment > UnpackedAlignment) {
2123
243k
    assert(llvm::isPowerOf2_64(UnpackedNewAlignment.getQuantity()) &&
2124
243k
           "Alignment not a power of 2");
2125
243k
    UnpackedAlignment = UnpackedNewAlignment;
2126
243k
  }
2127
833k
2128
833k
  if (PreferredNewAlignment > PreferredAlignment) {
2129
235k
    assert(llvm::isPowerOf2_64(PreferredNewAlignment.getQuantity()) &&
2130
235k
           "Alignment not a power of 2");
2131
235k
    PreferredAlignment = PreferredNewAlignment;
2132
235k
  }
2133
833k
}
2134
2135
uint64_t
2136
ItaniumRecordLayoutBuilder::updateExternalFieldOffset(const FieldDecl *Field,
2137
17.8k
                                                      uint64_t ComputedOffset) {
2138
17.8k
  uint64_t ExternalFieldOffset = External.getExternalFieldOffset(Field);
2139
17.8k
2140
17.8k
  if (InferAlignment && 
ExternalFieldOffset < ComputedOffset5.83k
) {
2141
    // The externally-supplied field offset is before the field offset we
2142
    // computed. Assume that the structure is packed.
2143
6
    Alignment = CharUnits::One();
2144
6
    PreferredAlignment = CharUnits::One();
2145
6
    InferAlignment = false;
2146
6
  }
2147
17.8k
2148
  // Use the externally-supplied field offset.
2149
17.8k
  return ExternalFieldOffset;
2150
17.8k
}
2151
2152
/// Get diagnostic %select index for tag kind for
2153
/// field padding diagnostic message.
2154
/// WARNING: Indexes apply to particular diagnostics only!
2155
///
2156
/// \returns diagnostic %select index.
2157
15.2k
static unsigned getPaddingDiagFromTagKind(TagTypeKind Tag) {
2158
15.2k
  switch (Tag) {
2159
14.6k
  case TTK_Struct: return 0;
2160
0
  case TTK_Interface: return 1;
2161
635
  case TTK_Class: return 2;
2162
0
  default: llvm_unreachable("Invalid tag kind for field padding diagnostic!");
2163
15.2k
  }
2164
15.2k
}
2165
2166
void ItaniumRecordLayoutBuilder::CheckFieldPadding(
2167
    uint64_t Offset, uint64_t UnpaddedOffset, uint64_t UnpackedOffset,
2168
774k
    unsigned UnpackedAlign, bool isPacked, const FieldDecl *D) {
2169
  // We let objc ivars without warning, objc interfaces generally are not used
2170
  // for padding tricks.
2171
774k
  if (isa<ObjCIvarDecl>(D))
2172
7.22k
    return;
2173
767k
2174
  // Don't warn about structs created without a SourceLocation.  This can
2175
  // be done by clients of the AST, such as codegen.
2176
767k
  if (D->getLocation().isInvalid())
2177
77.0k
    return;
2178
690k
2179
690k
  unsigned CharBitNum = Context.getTargetInfo().getCharWidth();
2180
690k
2181
  // Warn if padding was introduced to the struct/class.
2182
690k
  if (!IsUnion && 
Offset > UnpaddedOffset646k
) {
2183
15.2k
    unsigned PadSize = Offset - UnpaddedOffset;
2184
15.2k
    bool InBits = true;
2185
15.2k
    if (PadSize % CharBitNum == 0) {
2186
14.8k
      PadSize = PadSize / CharBitNum;
2187
14.8k
      InBits = false;
2188
14.8k
    }
2189
15.2k
    if (D->getIdentifier())
2190
15.0k
      Diag(D->getLocation(), diag::warn_padded_struct_field)
2191
15.0k
          << getPaddingDiagFromTagKind(D->getParent()->getTagKind())
2192
15.0k
          << Context.getTypeDeclType(D->getParent())
2193
15.0k
          << PadSize
2194
14.6k
          << (InBits ? 
1376
: 0) // (byte|bit)
2195
15.0k
          << D->getIdentifier();
2196
208
    else
2197
208
      Diag(D->getLocation(), diag::warn_padded_struct_anon_field)
2198
208
          << getPaddingDiagFromTagKind(D->getParent()->getTagKind())
2199
208
          << Context.getTypeDeclType(D->getParent())
2200
208
          << PadSize
2201
121
          << (InBits ? 
187
: 0); // (byte|bit)
2202
15.2k
 }
2203
690k
 if (isPacked && 
Offset != UnpackedOffset17.9k
) {
2204
2.36k
   HasPackedField = true;
2205
2.36k
 }
2206
690k
}
2207
2208
static const CXXMethodDecl *computeKeyFunction(ASTContext &Context,
2209
134k
                                               const CXXRecordDecl *RD) {
2210
  // If a class isn't polymorphic it doesn't have a key function.
2211
134k
  if (!RD->isPolymorphic())
2212
111k
    return nullptr;
2213
22.8k
2214
  // A class that is not externally visible doesn't have a key function. (Or
2215
  // at least, there's no point to assigning a key function to such a class;
2216
  // this doesn't affect the ABI.)
2217
22.8k
  if (!RD->isExternallyVisible())
2218
851
    return nullptr;
2219
22.0k
2220
  // Template instantiations don't have key functions per Itanium C++ ABI 5.2.6.
2221
  // Same behavior as GCC.
2222
22.0k
  TemplateSpecializationKind TSK = RD->getTemplateSpecializationKind();
2223
22.0k
  if (TSK == TSK_ImplicitInstantiation ||
2224
19.1k
      TSK == TSK_ExplicitInstantiationDeclaration ||
2225
17.6k
      TSK == TSK_ExplicitInstantiationDefinition)
2226
4.60k
    return nullptr;
2227
17.4k
2228
17.4k
  bool allowInlineFunctions =
2229
17.4k
    Context.getTargetInfo().getCXXABI().canKeyFunctionBeInline();
2230
17.4k
2231
76.8k
  for (const CXXMethodDecl *MD : RD->methods()) {
2232
76.8k
    if (!MD->isVirtual())
2233
57.2k
      continue;
2234
19.6k
2235
19.6k
    if (MD->isPure())
2236
681
      continue;
2237
18.9k
2238
    // Ignore implicit member functions, they are always marked as inline, but
2239
    // they don't have a body until they're defined.
2240
18.9k
    if (MD->isImplicit())
2241
1.33k
      continue;
2242
17.6k
2243
17.6k
    if (MD->isInlineSpecified() || 
MD->isConstexpr()17.5k
)
2244
117
      continue;
2245
17.5k
2246
17.5k
    if (MD->hasInlineBody())
2247
5.11k
      continue;
2248
12.3k
2249
    // Ignore inline deleted or defaulted functions.
2250
12.3k
    if (!MD->isUserProvided())
2251
57
      continue;
2252
12.3k
2253
    // In certain ABIs, ignore functions with out-of-line inline definitions.
2254
12.3k
    if (!allowInlineFunctions) {
2255
229
      const FunctionDecl *Def;
2256
229
      if (MD->hasBody(Def) && 
Def->isInlineSpecified()165
)
2257
88
        continue;
2258
12.2k
    }
2259
12.2k
2260
12.2k
    if (Context.getLangOpts().CUDA) {
2261
      // While compiler may see key method in this TU, during CUDA
2262
      // compilation we should ignore methods that are not accessible
2263
      // on this side of compilation.
2264
7
      if (Context.getLangOpts().CUDAIsDevice) {
2265
        // In device mode ignore methods without __device__ attribute.
2266
4
        if (!MD->hasAttr<CUDADeviceAttr>())
2267
2
          continue;
2268
3
      } else {
2269
        // In host mode ignore __device__-only methods.
2270
3
        if (!MD->hasAttr<CUDAHostAttr>() && MD->hasAttr<CUDADeviceAttr>())
2271
1
          continue;
2272
12.2k
      }
2273
7
    }
2274
12.2k
2275
    // If the key function is dllimport but the class isn't, then the class has
2276
    // no key function. The DLL that exports the key function won't export the
2277
    // vtable in this case.
2278
12.2k
    if (MD->hasAttr<DLLImportAttr>() && 
!RD->hasAttr<DLLImportAttr>()61
)
2279
54
      return nullptr;
2280
12.1k
2281
    // We found it.
2282
12.1k
    return MD;
2283
12.1k
  }
2284
17.4k
2285
5.19k
  return nullptr;
2286
17.4k
}
2287
2288
DiagnosticBuilder ItaniumRecordLayoutBuilder::Diag(SourceLocation Loc,
2289
29.1k
                                                   unsigned DiagID) {
2290
29.1k
  return Context.getDiagnostics().Report(Loc, DiagID);
2291
29.1k
}
2292
2293
/// Does the target C++ ABI require us to skip over the tail-padding
2294
/// of the given class (considering it as a base class) when allocating
2295
/// objects?
2296
186k
static bool mustSkipTailPadding(TargetCXXABI ABI, const CXXRecordDecl *RD) {
2297
186k
  switch (ABI.getTailPaddingUseRules()) {
2298
0
  case TargetCXXABI::AlwaysUseTailPadding:
2299
0
    return false;
2300
0
2301
186k
  case TargetCXXABI::UseTailPaddingUnlessPOD03:
2302
    // FIXME: To the extent that this is meant to cover the Itanium ABI
2303
    // rules, we should implement the restrictions about over-sized
2304
    // bitfields:
2305
    //
2306
    // http://itanium-cxx-abi.github.io/cxx-abi/abi.html#POD :
2307
    //   In general, a type is considered a POD for the purposes of
2308
    //   layout if it is a POD type (in the sense of ISO C++
2309
    //   [basic.types]). However, a POD-struct or POD-union (in the
2310
    //   sense of ISO C++ [class]) with a bitfield member whose
2311
    //   declared width is wider than the declared type of the
2312
    //   bitfield is not a POD for the purpose of layout.  Similarly,
2313
    //   an array type is not a POD for the purpose of layout if the
2314
    //   element type of the array is not a POD for the purpose of
2315
    //   layout.
2316
    //
2317
    //   Where references to the ISO C++ are made in this paragraph,
2318
    //   the Technical Corrigendum 1 version of the standard is
2319
    //   intended.
2320
186k
    return RD->isPOD();
2321
0
2322
276
  case TargetCXXABI::UseTailPaddingUnlessPOD11:
2323
    // This is equivalent to RD->getTypeForDecl().isCXX11PODType(),
2324
    // but with a lot of abstraction penalty stripped off.  This does
2325
    // assume that these properties are set correctly even in C++98
2326
    // mode; fortunately, that is true because we want to assign
2327
    // consistently semantics to the type-traits intrinsics (or at
2328
    // least as many of them as possible).
2329
276
    return RD->isTrivial() && 
RD->isCXX11StandardLayout()61
;
2330
0
  }
2331
0
2332
0
  llvm_unreachable("bad tail-padding use kind");
2333
0
}
2334
2335
312k
static bool isMsLayout(const ASTContext &Context) {
2336
312k
  return Context.getTargetInfo().getCXXABI().isMicrosoft();
2337
312k
}
2338
2339
// This section contains an implementation of struct layout that is, up to the
2340
// included tests, compatible with cl.exe (2013).  The layout produced is
2341
// significantly different than those produced by the Itanium ABI.  Here we note
2342
// the most important differences.
2343
//
2344
// * The alignment of bitfields in unions is ignored when computing the
2345
//   alignment of the union.
2346
// * The existence of zero-width bitfield that occurs after anything other than
2347
//   a non-zero length bitfield is ignored.
2348
// * There is no explicit primary base for the purposes of layout.  All bases
2349
//   with vfptrs are laid out first, followed by all bases without vfptrs.
2350
// * The Itanium equivalent vtable pointers are split into a vfptr (virtual
2351
//   function pointer) and a vbptr (virtual base pointer).  They can each be
2352
//   shared with a, non-virtual bases. These bases need not be the same.  vfptrs
2353
//   always occur at offset 0.  vbptrs can occur at an arbitrary offset and are
2354
//   placed after the lexicographically last non-virtual base.  This placement
2355
//   is always before fields but can be in the middle of the non-virtual bases
2356
//   due to the two-pass layout scheme for non-virtual-bases.
2357
// * Virtual bases sometimes require a 'vtordisp' field that is laid out before
2358
//   the virtual base and is used in conjunction with virtual overrides during
2359
//   construction and destruction.  This is always a 4 byte value and is used as
2360
//   an alternative to constructor vtables.
2361
// * vtordisps are allocated in a block of memory with size and alignment equal
2362
//   to the alignment of the completed structure (before applying __declspec(
2363
//   align())).  The vtordisp always occur at the end of the allocation block,
2364
//   immediately prior to the virtual base.
2365
// * vfptrs are injected after all bases and fields have been laid out.  In
2366
//   order to guarantee proper alignment of all fields, the vfptr injection
2367
//   pushes all bases and fields back by the alignment imposed by those bases
2368
//   and fields.  This can potentially add a significant amount of padding.
2369
//   vfptrs are always injected at offset 0.
2370
// * vbptrs are injected after all bases and fields have been laid out.  In
2371
//   order to guarantee proper alignment of all fields, the vfptr injection
2372
//   pushes all bases and fields back by the alignment imposed by those bases
2373
//   and fields.  This can potentially add a significant amount of padding.
2374
//   vbptrs are injected immediately after the last non-virtual base as
2375
//   lexicographically ordered in the code.  If this site isn't pointer aligned
2376
//   the vbptr is placed at the next properly aligned location.  Enough padding
2377
//   is added to guarantee a fit.
2378
// * The last zero sized non-virtual base can be placed at the end of the
2379
//   struct (potentially aliasing another object), or may alias with the first
2380
//   field, even if they are of the same type.
2381
// * The last zero size virtual base may be placed at the end of the struct
2382
//   potentially aliasing another object.
2383
// * The ABI attempts to avoid aliasing of zero sized bases by adding padding
2384
//   between bases or vbases with specific properties.  The criteria for
2385
//   additional padding between two bases is that the first base is zero sized
2386
//   or ends with a zero sized subobject and the second base is zero sized or
2387
//   trails with a zero sized base or field (sharing of vfptrs can reorder the
2388
//   layout of the so the leading base is not always the first one declared).
2389
//   This rule does take into account fields that are not records, so padding
2390
//   will occur even if the last field is, e.g. an int. The padding added for
2391
//   bases is 1 byte.  The padding added between vbases depends on the alignment
2392
//   of the object but is at least 4 bytes (in both 32 and 64 bit modes).
2393
// * There is no concept of non-virtual alignment, non-virtual alignment and
2394
//   alignment are always identical.
2395
// * There is a distinction between alignment and required alignment.
2396
//   __declspec(align) changes the required alignment of a struct.  This
2397
//   alignment is _always_ obeyed, even in the presence of #pragma pack. A
2398
//   record inherits required alignment from all of its fields and bases.
2399
// * __declspec(align) on bitfields has the effect of changing the bitfield's
2400
//   alignment instead of its required alignment.  This is the only known way
2401
//   to make the alignment of a struct bigger than 8.  Interestingly enough
2402
//   this alignment is also immune to the effects of #pragma pack and can be
2403
//   used to create structures with large alignment under #pragma pack.
2404
//   However, because it does not impact required alignment, such a structure,
2405
//   when used as a field or base, will not be aligned if #pragma pack is
2406
//   still active at the time of use.
2407
//
2408
// Known incompatibilities:
2409
// * all: #pragma pack between fields in a record
2410
// * 2010 and back: If the last field in a record is a bitfield, every object
2411
//   laid out after the record will have extra padding inserted before it.  The
2412
//   extra padding will have size equal to the size of the storage class of the
2413
//   bitfield.  0 sized bitfields don't exhibit this behavior and the extra
2414
//   padding can be avoided by adding a 0 sized bitfield after the non-zero-
2415
//   sized bitfield.
2416
// * 2012 and back: In 64-bit mode, if the alignment of a record is 16 or
2417
//   greater due to __declspec(align()) then a second layout phase occurs after
2418
//   The locations of the vf and vb pointers are known.  This layout phase
2419
//   suffers from the "last field is a bitfield" bug in 2010 and results in
2420
//   _every_ field getting padding put in front of it, potentially including the
2421
//   vfptr, leaving the vfprt at a non-zero location which results in a fault if
2422
//   anything tries to read the vftbl.  The second layout phase also treats
2423
//   bitfields as separate entities and gives them each storage rather than
2424
//   packing them.  Additionally, because this phase appears to perform a
2425
//   (an unstable) sort on the members before laying them out and because merged
2426
//   bitfields have the same address, the bitfields end up in whatever order
2427
//   the sort left them in, a behavior we could never hope to replicate.
2428
2429
namespace {
2430
struct MicrosoftRecordLayoutBuilder {
2431
  struct ElementInfo {
2432
    CharUnits Size;
2433
    CharUnits Alignment;
2434
  };
2435
  typedef llvm::DenseMap<const CXXRecordDecl *, CharUnits> BaseOffsetsMapTy;
2436
4.69k
  MicrosoftRecordLayoutBuilder(const ASTContext &Context) : Context(Context) {}
2437
private:
2438
  MicrosoftRecordLayoutBuilder(const MicrosoftRecordLayoutBuilder &) = delete;
2439
  void operator=(const MicrosoftRecordLayoutBuilder &) = delete;
2440
public:
2441
  void layout(const RecordDecl *RD);
2442
  void cxxLayout(const CXXRecordDecl *RD);
2443
  /// Initializes size and alignment and honors some flags.
2444
  void initializeLayout(const RecordDecl *RD);
2445
  /// Initialized C++ layout, compute alignment and virtual alignment and
2446
  /// existence of vfptrs and vbptrs.  Alignment is needed before the vfptr is
2447
  /// laid out.
2448
  void initializeCXXLayout(const CXXRecordDecl *RD);
2449
  void layoutNonVirtualBases(const CXXRecordDecl *RD);
2450
  void layoutNonVirtualBase(const CXXRecordDecl *RD,
2451
                            const CXXRecordDecl *BaseDecl,
2452
                            const ASTRecordLayout &BaseLayout,
2453
                            const ASTRecordLayout *&PreviousBaseLayout);
2454
  void injectVFPtr(const CXXRecordDecl *RD);
2455
  void injectVBPtr(const CXXRecordDecl *RD);
2456
  /// Lays out the fields of the record.  Also rounds size up to
2457
  /// alignment.
2458
  void layoutFields(const RecordDecl *RD);
2459
  void layoutField(const FieldDecl *FD);
2460
  void layoutBitField(const FieldDecl *FD);
2461
  /// Lays out a single zero-width bit-field in the record and handles
2462
  /// special cases associated with zero-width bit-fields.
2463
  void layoutZeroWidthBitField(const FieldDecl *FD);
2464
  void layoutVirtualBases(const CXXRecordDecl *RD);
2465
  void finalizeLayout(const RecordDecl *RD);
2466
  /// Gets the size and alignment of a base taking pragma pack and
2467
  /// __declspec(align) into account.
2468
  ElementInfo getAdjustedElementInfo(const ASTRecordLayout &Layout);
2469
  /// Gets the size and alignment of a field taking pragma  pack and
2470
  /// __declspec(align) into account.  It also updates RequiredAlignment as a
2471
  /// side effect because it is most convenient to do so here.
2472
  ElementInfo getAdjustedElementInfo(const FieldDecl *FD);
2473
  /// Places a field at an offset in CharUnits.
2474
2.54k
  void placeFieldAtOffset(CharUnits FieldOffset) {
2475
2.54k
    FieldOffsets.push_back(Context.toBits(FieldOffset));
2476
2.54k
  }
2477
  /// Places a bitfield at a bit offset.
2478
46
  void placeFieldAtBitOffset(uint64_t FieldOffset) {
2479
46
    FieldOffsets.push_back(FieldOffset);
2480
46
  }
2481
  /// Compute the set of virtual bases for which vtordisps are required.
2482
  void computeVtorDispSet(
2483
      llvm::SmallPtrSetImpl<const CXXRecordDecl *> &HasVtorDispSet,
2484
      const CXXRecordDecl *RD) const;
2485
  const ASTContext &Context;
2486
  /// The size of the record being laid out.
2487
  CharUnits Size;
2488
  /// The non-virtual size of the record layout.
2489
  CharUnits NonVirtualSize;
2490
  /// The data size of the record layout.
2491
  CharUnits DataSize;
2492
  /// The current alignment of the record layout.
2493
  CharUnits Alignment;
2494
  /// The maximum allowed field alignment. This is set by #pragma pack.
2495
  CharUnits MaxFieldAlignment;
2496
  /// The alignment that this record must obey.  This is imposed by
2497
  /// __declspec(align()) on the record itself or one of its fields or bases.
2498
  CharUnits RequiredAlignment;
2499
  /// The size of the allocation of the currently active bitfield.
2500
  /// This value isn't meaningful unless LastFieldIsNonZeroWidthBitfield
2501
  /// is true.
2502
  CharUnits CurrentBitfieldSize;
2503
  /// Offset to the virtual base table pointer (if one exists).
2504
  CharUnits VBPtrOffset;
2505
  /// Minimum record size possible.
2506
  CharUnits MinEmptyStructSize;
2507
  /// The size and alignment info of a pointer.
2508
  ElementInfo PointerInfo;
2509
  /// The primary base class (if one exists).
2510
  const CXXRecordDecl *PrimaryBase;
2511
  /// The class we share our vb-pointer with.
2512
  const CXXRecordDecl *SharedVBPtrBase;
2513
  /// The collection of field offsets.
2514
  SmallVector<uint64_t, 16> FieldOffsets;
2515
  /// Base classes and their offsets in the record.
2516
  BaseOffsetsMapTy Bases;
2517
  /// virtual base classes and their offsets in the record.
2518
  ASTRecordLayout::VBaseOffsetsMapTy VBases;
2519
  /// The number of remaining bits in our last bitfield allocation.
2520
  /// This value isn't meaningful unless LastFieldIsNonZeroWidthBitfield is
2521
  /// true.
2522
  unsigned RemainingBitsInField;
2523
  bool IsUnion : 1;
2524
  /// True if the last field laid out was a bitfield and was not 0
2525
  /// width.
2526
  bool LastFieldIsNonZeroWidthBitfield : 1;
2527
  /// True if the class has its own vftable pointer.
2528
  bool HasOwnVFPtr : 1;
2529
  /// True if the class has a vbtable pointer.
2530
  bool HasVBPtr : 1;
2531
  /// True if the last sub-object within the type is zero sized or the
2532
  /// object itself is zero sized.  This *does not* count members that are not
2533
  /// records.  Only used for MS-ABI.
2534
  bool EndsWithZeroSizedObject : 1;
2535
  /// True if this class is zero sized or first base is zero sized or
2536
  /// has this property.  Only used for MS-ABI.
2537
  bool LeadsWithZeroSizedBase : 1;
2538
2539
  /// True if the external AST source provided a layout for this record.
2540
  bool UseExternalLayout : 1;
2541
2542
  /// The layout provided by the external AST source. Only active if
2543
  /// UseExternalLayout is true.
2544
  ExternalLayout External;
2545
};
2546
} // namespace
2547
2548
MicrosoftRecordLayoutBuilder::ElementInfo
2549
MicrosoftRecordLayoutBuilder::getAdjustedElementInfo(
2550
2.81k
    const ASTRecordLayout &Layout) {
2551
2.81k
  ElementInfo Info;
2552
2.81k
  Info.Alignment = Layout.getAlignment();
2553
  // Respect pragma pack.
2554
2.81k
  if (!MaxFieldAlignment.isZero())
2555
34
    Info.Alignment = std::min(Info.Alignment, MaxFieldAlignment);
2556
  // Track zero-sized subobjects here where it's already available.
2557
2.81k
  EndsWithZeroSizedObject = Layout.endsWithZeroSizedObject();
2558
  // Respect required alignment, this is necessary because we may have adjusted
2559
  // the alignment in the case of pragam pack.  Note that the required alignment
2560
  // doesn't actually apply to the struct alignment at this point.
2561
2.81k
  Alignment = std::max(Alignment, Info.Alignment);
2562
2.81k
  RequiredAlignment = std::max(RequiredAlignment, Layout.getRequiredAlignment());
2563
2.81k
  Info.Alignment = std::max(Info.Alignment, Layout.getRequiredAlignment());
2564
2.81k
  Info.Size = Layout.getNonVirtualSize();
2565
2.81k
  return Info;
2566
2.81k
}
2567
2568
MicrosoftRecordLayoutBuilder::ElementInfo
2569
MicrosoftRecordLayoutBuilder::getAdjustedElementInfo(
2570
2.56k
    const FieldDecl *FD) {
2571
  // Get the alignment of the field type's natural alignment, ignore any
2572
  // alignment attributes.
2573
2.56k
  ElementInfo Info;
2574
2.56k
  std::tie(Info.Size, Info.Alignment) =
2575
2.56k
      Context.getTypeInfoInChars(FD->getType()->getUnqualifiedDesugaredType());
2576
  // Respect align attributes on the field.
2577
2.56k
  CharUnits FieldRequiredAlignment =
2578
2.56k
      Context.toCharUnitsFromBits(FD->getMaxAlignment());
2579
  // Respect align attributes on the type.
2580
2.56k
  if (Context.isAlignmentRequired(FD->getType()))
2581
72
    FieldRequiredAlignment = std::max(
2582
72
        Context.getTypeAlignInChars(FD->getType()), FieldRequiredAlignment);
2583
  // Respect attributes applied to subobjects of the field.
2584
2.56k
  if (FD->isBitField())
2585
    // For some reason __declspec align impacts alignment rather than required
2586
    // alignment when it is applied to bitfields.
2587
189
    Info.Alignment = std::max(Info.Alignment, FieldRequiredAlignment);
2588
2.37k
  else {
2589
2.37k
    if (auto RT =
2590
181
            FD->getType()->getBaseElementTypeUnsafe()->getAs<RecordType>()) {
2591
181
      auto const &Layout = Context.getASTRecordLayout(RT->getDecl());
2592
181
      EndsWithZeroSizedObject = Layout.endsWithZeroSizedObject();
2593
181
      FieldRequiredAlignment = std::max(FieldRequiredAlignment,
2594
181
                                        Layout.getRequiredAlignment());
2595
181
    }
2596
    // Capture required alignment as a side-effect.
2597
2.37k
    RequiredAlignment = std::max(RequiredAlignment, FieldRequiredAlignment);
2598
2.37k
  }
2599
  // Respect pragma pack, attribute pack and declspec align
2600
2.56k
  if (!MaxFieldAlignment.isZero())
2601
257
    Info.Alignment = std::min(Info.Alignment, MaxFieldAlignment);
2602
2.56k
  if (FD->hasAttr<PackedAttr>())
2603
7
    Info.Alignment = CharUnits::One();
2604
2.56k
  Info.Alignment = std::max(Info.Alignment, FieldRequiredAlignment);
2605
2.56k
  return Info;
2606
2.56k
}
2607
2608
322
void MicrosoftRecordLayoutBuilder::layout(const RecordDecl *RD) {
2609
  // For C record layout, zero-sized records always have size 4.
2610
322
  MinEmptyStructSize = CharUnits::fromQuantity(4);
2611
322
  initializeLayout(RD);
2612
322
  layoutFields(RD);
2613
322
  DataSize = Size = Size.alignTo(Alignment);
2614
322
  RequiredAlignment = std::max(
2615
322
      RequiredAlignment, Context.toCharUnitsFromBits(RD->getMaxAlignment()));
2616
322
  finalizeLayout(RD);
2617
322
}
2618
2619
4.37k
void MicrosoftRecordLayoutBuilder::cxxLayout(const CXXRecordDecl *RD) {
2620
  // The C++ standard says that empty structs have size 1.
2621
4.37k
  MinEmptyStructSize = CharUnits::One();
2622
4.37k
  initializeLayout(RD);
2623
4.37k
  initializeCXXLayout(RD);
2624
4.37k
  layoutNonVirtualBases(RD);
2625
4.37k
  layoutFields(RD);
2626
4.37k
  injectVBPtr(RD);
2627
4.37k
  injectVFPtr(RD);
2628
4.37k
  if (HasOwnVFPtr || 
(3.60k
HasVBPtr3.60k
&&
!SharedVBPtrBase740
))
2629
1.31k
    Alignment = std::max(Alignment, PointerInfo.Alignment);
2630
4.37k
  auto RoundingAlignment = Alignment;
2631
4.37k
  if (!MaxFieldAlignment.isZero())
2632
56
    RoundingAlignment = std::min(RoundingAlignment, MaxFieldAlignment);
2633
4.37k
  if (!UseExternalLayout)
2634
4.36k
    Size = Size.alignTo(RoundingAlignment);
2635
4.37k
  NonVirtualSize = Size;
2636
4.37k
  RequiredAlignment = std::max(
2637
4.37k
      RequiredAlignment, Context.toCharUnitsFromBits(RD->getMaxAlignment()));
2638
4.37k
  layoutVirtualBases(RD);
2639
4.37k
  finalizeLayout(RD);
2640
4.37k
}
2641
2642
4.69k
void MicrosoftRecordLayoutBuilder::initializeLayout(const RecordDecl *RD) {
2643
4.69k
  IsUnion = RD->isUnion();
2644
4.69k
  Size = CharUnits::Zero();
2645
4.69k
  Alignment = CharUnits::One();
2646
  // In 64-bit mode we always perform an alignment step after laying out vbases.
2647
  // In 32-bit mode we do not.  The check to see if we need to perform alignment
2648
  // checks the RequiredAlignment field and performs alignment if it isn't 0.
2649
4.69k
  RequiredAlignment = Context.getTargetInfo().getTriple().isArch64Bit()
2650
2.17k
                          ? CharUnits::One()
2651
2.52k
                          : CharUnits::Zero();
2652
  // Compute the maximum field alignment.
2653
4.69k
  MaxFieldAlignment = CharUnits::Zero();
2654
  // Honor the default struct packing maximum alignment flag.
2655
4.69k
  if (unsigned DefaultMaxFieldAlignment = Context.getLangOpts().PackStruct)
2656
0
      MaxFieldAlignment = CharUnits::fromQuantity(DefaultMaxFieldAlignment);
2657
  // Honor the packing attribute.  The MS-ABI ignores pragma pack if its larger
2658
  // than the pointer size.
2659
4.69k
  if (const MaxFieldAlignmentAttr *MFAA = RD->getAttr<MaxFieldAlignmentAttr>()){
2660
125
    unsigned PackedAlignment = MFAA->getAlignment();
2661
125
    if (PackedAlignment <= Context.getTargetInfo().getPointerWidth(0))
2662
92
      MaxFieldAlignment = Context.toCharUnitsFromBits(PackedAlignment);
2663
125
  }
2664
  // Packed attribute forces max field alignment to be 1.
2665
4.69k
  if (RD->hasAttr<PackedAttr>())
2666
61
    MaxFieldAlignment = CharUnits::One();
2667
4.69k
2668
  // Try to respect the external layout if present.
2669
4.69k
  UseExternalLayout = false;
2670
4.69k
  if (ExternalASTSource *Source = Context.getExternalSource())
2671
121
    UseExternalLayout = Source->layoutRecordType(
2672
121
        RD, External.Size, External.Align, External.FieldOffsets,
2673
121
        External.BaseOffsets, External.VirtualBaseOffsets);
2674
4.69k
}
2675
2676
void
2677
4.37k
MicrosoftRecordLayoutBuilder::initializeCXXLayout(const CXXRecordDecl *RD) {
2678
4.37k
  EndsWithZeroSizedObject = false;
2679
4.37k
  LeadsWithZeroSizedBase = false;
2680
4.37k
  HasOwnVFPtr = false;
2681
4.37k
  HasVBPtr = false;
2682
4.37k
  PrimaryBase = nullptr;
2683
4.37k
  SharedVBPtrBase = nullptr;
2684
  // Calculate pointer size and alignment.  These are used for vfptr and vbprt
2685
  // injection.
2686
4.37k
  PointerInfo.Size =
2687
4.37k
      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(0));
2688
4.37k
  PointerInfo.Alignment =
2689
4.37k
      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerAlign(0));
2690
  // Respect pragma pack.
2691
4.37k
  if (!MaxFieldAlignment.isZero())
2692
56
    PointerInfo.Alignment = std::min(PointerInfo.Alignment, MaxFieldAlignment);
2693
4.37k
}
2694
2695
void
2696
4.37k
MicrosoftRecordLayoutBuilder::layoutNonVirtualBases(const CXXRecordDecl *RD) {
2697
  // The MS-ABI lays out all bases that contain leading vfptrs before it lays
2698
  // out any bases that do not contain vfptrs.  We implement this as two passes
2699
  // over the bases.  This approach guarantees that the primary base is laid out
2700
  // first.  We use these passes to calculate some additional aggregated
2701
  // information about the bases, such as required alignment and the presence of
2702
  // zero sized members.
2703
4.37k
  const ASTRecordLayout *PreviousBaseLayout = nullptr;
2704
4.37k
  bool HasPolymorphicBaseClass = false;
2705
  // Iterate through the bases and lay out the non-virtual ones.
2706
2.45k
  for (const CXXBaseSpecifier &Base : RD->bases()) {
2707
2.45k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
2708
2.45k
    HasPolymorphicBaseClass |= BaseDecl->isPolymorphic();
2709
2.45k
    const ASTRecordLayout &BaseLayout = Context.getASTRecordLayout(BaseDecl);
2710
    // Mark and skip virtual bases.
2711
2.45k
    if (Base.isVirtual()) {
2712
907
      HasVBPtr = true;
2713
907
      continue;
2714
907
    }
2715
    // Check for a base to share a VBPtr with.
2716
1.55k
    if (!SharedVBPtrBase && 
BaseLayout.hasVBPtr()1.42k
) {
2717
225
      SharedVBPtrBase = BaseDecl;
2718
225
      HasVBPtr = true;
2719
225
    }
2720
    // Only lay out bases with extendable VFPtrs on the first pass.
2721
1.55k
    if (!BaseLayout.hasExtendableVFPtr())
2722
1.04k
      continue;
2723
    // If we don't have a primary base, this one qualifies.
2724
506
    if (!PrimaryBase) {
2725
367
      PrimaryBase = BaseDecl;
2726
367
      LeadsWithZeroSizedBase = BaseLayout.leadsWithZeroSizedBase();
2727
367
    }
2728
    // Lay out the base.
2729
506
    layoutNonVirtualBase(RD, BaseDecl, BaseLayout, PreviousBaseLayout);
2730
506
  }
2731
  // Figure out if we need a fresh VFPtr for this class.
2732
4.37k
  if (RD->isPolymorphic()) {
2733
1.41k
    if (!HasPolymorphicBaseClass)
2734
      // This class introduces polymorphism, so we need a vftable to store the
2735
      // RTTI information.
2736
694
      HasOwnVFPtr = true;
2737
722
    else if (!PrimaryBase) {
2738
      // We have a polymorphic base class but can't extend its vftable. Add a
2739
      // new vfptr if we would use any vftable slots.
2740
1.62k
      for (CXXMethodDecl *M : RD->methods()) {
2741
1.62k
        if (MicrosoftVTableContext::hasVtableSlot(M) &&
2742
317
            M->size_overridden_methods() == 0) {
2743
82
          HasOwnVFPtr = true;
2744
82
          break;
2745
82
        }
2746
1.62k
      }
2747
355
    }
2748
1.41k
  }
2749
  // If we don't have a primary base then we have a leading object that could
2750
  // itself lead with a zero-sized object, something we track.
2751
4.37k
  bool CheckLeadingLayout = !PrimaryBase;
2752
  // Iterate through the bases and lay out the non-virtual ones.
2753
2.45k
  for (const CXXBaseSpecifier &Base : RD->bases()) {
2754
2.45k
    if (Base.isVirtual())
2755
907
      continue;
2756
1.55k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
2757
1.55k
    const ASTRecordLayout &BaseLayout = Context.getASTRecordLayout(BaseDecl);
2758
    // Only lay out bases without extendable VFPtrs on the second pass.
2759
1.55k
    if (BaseLayout.hasExtendableVFPtr()) {
2760
506
      VBPtrOffset = Bases[BaseDecl] + BaseLayout.getNonVirtualSize();
2761
506
      continue;
2762
506
    }
2763
    // If this is the first layout, check to see if it leads with a zero sized
2764
    // object.  If it does, so do we.
2765
1.04k
    if (CheckLeadingLayout) {
2766
664
      CheckLeadingLayout = false;
2767
664
      LeadsWithZeroSizedBase = BaseLayout.leadsWithZeroSizedBase();
2768
664
    }
2769
    // Lay out the base.
2770
1.04k
    layoutNonVirtualBase(RD, BaseDecl, BaseLayout, PreviousBaseLayout);
2771
1.04k
    VBPtrOffset = Bases[BaseDecl] + BaseLayout.getNonVirtualSize();
2772
1.04k
  }
2773
  // Set our VBPtroffset if we know it at this point.
2774
4.37k
  if (!HasVBPtr)
2775
3.52k
    VBPtrOffset = CharUnits::fromQuantity(-1);
2776
850
  else if (SharedVBPtrBase) {
2777
225
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(SharedVBPtrBase);
2778
225
    VBPtrOffset = Bases[SharedVBPtrBase] + Layout.getVBPtrOffset();
2779
225
  }
2780
4.37k
}
2781
2782
3.26k
static bool recordUsesEBO(const RecordDecl *RD) {
2783
3.26k
  if (!isa<CXXRecordDecl>(RD))
2784
20
    return false;
2785
3.24k
  if (RD->hasAttr<EmptyBasesAttr>())
2786
16
    return true;
2787
3.23k
  if (auto *LVA = RD->getAttr<LayoutVersionAttr>())
2788
    // TODO: Double check with the next version of MSVC.
2789
0
    if (LVA->getVersion() <= LangOptions::MSVC2015)
2790
0
      return false;
2791
  // TODO: Some later version of MSVC will change the default behavior of the
2792
  // compiler to enable EBO by default.  When this happens, we will need an
2793
  // additional isCompatibleWithMSVC check.
2794
3.23k
  return false;
2795
3.23k
}
2796
2797
void MicrosoftRecordLayoutBuilder::layoutNonVirtualBase(
2798
    const CXXRecordDecl *RD,
2799
    const CXXRecordDecl *BaseDecl,
2800
    const ASTRecordLayout &BaseLayout,
2801
1.55k
    const ASTRecordLayout *&PreviousBaseLayout) {
2802
  // Insert padding between two bases if the left first one is zero sized or
2803
  // contains a zero sized subobject and the right is zero sized or one leads
2804
  // with a zero sized base.
2805
1.55k
  bool MDCUsesEBO = recordUsesEBO(RD);
2806
1.55k
  if (PreviousBaseLayout && 
PreviousBaseLayout->endsWithZeroSizedObject()519
&&
2807
144
      BaseLayout.leadsWithZeroSizedBase() && 
!MDCUsesEBO111
)
2808
109
    Size++;
2809
1.55k
  ElementInfo Info = getAdjustedElementInfo(BaseLayout);
2810
1.55k
  CharUnits BaseOffset;
2811
1.55k
2812
  // Respect the external AST source base offset, if present.
2813
1.55k
  bool FoundBase = false;
2814
1.55k
  if (UseExternalLayout) {
2815
2
    FoundBase = External.getExternalNVBaseOffset(BaseDecl, BaseOffset);
2816
2
    if (FoundBase) {
2817
0
      assert(BaseOffset >= Size && "base offset already allocated");
2818
0
      Size = BaseOffset;
2819
0
    }
2820
2
  }
2821
1.55k
2822
1.55k
  if (!FoundBase) {
2823
1.55k
    if (MDCUsesEBO && 
BaseDecl->isEmpty()12
) {
2824
4
      assert(BaseLayout.getNonVirtualSize() == CharUnits::Zero());
2825
4
      BaseOffset = CharUnits::Zero();
2826
1.54k
    } else {
2827
      // Otherwise, lay the base out at the end of the MDC.
2828
1.54k
      BaseOffset = Size = Size.alignTo(Info.Alignment);
2829
1.54k
    }
2830
1.55k
  }
2831
1.55k
  Bases.insert(std::make_pair(BaseDecl, BaseOffset));
2832
1.55k
  Size += BaseLayout.getNonVirtualSize();
2833
1.55k
  PreviousBaseLayout = &BaseLayout;
2834
1.55k
}
2835
2836
4.69k
void MicrosoftRecordLayoutBuilder::layoutFields(const RecordDecl *RD) {
2837
4.69k
  LastFieldIsNonZeroWidthBitfield = false;
2838
4.69k
  for (const FieldDecl *Field : RD->fields())
2839
2.58k
    layoutField(Field);
2840
4.69k
}
2841
2842
2.58k
void MicrosoftRecordLayoutBuilder::layoutField(const FieldDecl *FD) {
2843
2.58k
  if (FD->isBitField()) {
2844
209
    layoutBitField(FD);
2845
209
    return;
2846
209
  }
2847
2.37k
  LastFieldIsNonZeroWidthBitfield = false;
2848
2.37k
  ElementInfo Info = getAdjustedElementInfo(FD);
2849
2.37k
  Alignment = std::max(Alignment, Info.Alignment);
2850
2.37k
  CharUnits FieldOffset;
2851
2.37k
  if (UseExternalLayout)
2852
6
    FieldOffset =
2853
6
        Context.toCharUnitsFromBits(External.getExternalFieldOffset(FD));
2854
2.37k
  else if (IsUnion)
2855
71
    FieldOffset = CharUnits::Zero();
2856
2.30k
  else
2857
2.30k
    FieldOffset = Size.alignTo(Info.Alignment);
2858
2.37k
  placeFieldAtOffset(FieldOffset);
2859
2.37k
  Size = std::max(Size, FieldOffset + Info.Size);
2860
2.37k
}
2861
2862
209
void MicrosoftRecordLayoutBuilder::layoutBitField(const FieldDecl *FD) {
2863
209
  unsigned Width = FD->getBitWidthValue(Context);
2864
209
  if (Width == 0) {
2865
42
    layoutZeroWidthBitField(FD);
2866
42
    return;
2867
42
  }
2868
167
  ElementInfo Info = getAdjustedElementInfo(FD);
2869
  // Clamp the bitfield to a containable size for the sake of being able
2870
  // to lay them out.  Sema will throw an error.
2871
167
  if (Width > Context.toBits(Info.Size))
2872
0
    Width = Context.toBits(Info.Size);
2873
  // Check to see if this bitfield fits into an existing allocation.  Note:
2874
  // MSVC refuses to pack bitfields of formal types with different sizes
2875
  // into the same allocation.
2876
167
  if (!UseExternalLayout && 
!IsUnion162
&&
LastFieldIsNonZeroWidthBitfield150
&&
2877
74
      CurrentBitfieldSize == Info.Size && 
Width <= RemainingBitsInField65
) {
2878
41
    placeFieldAtBitOffset(Context.toBits(Size) - RemainingBitsInField);
2879
41
    RemainingBitsInField -= Width;
2880
41
    return;
2881
41
  }
2882
126
  LastFieldIsNonZeroWidthBitfield = true;
2883
126
  CurrentBitfieldSize = Info.Size;
2884
126
  if (UseExternalLayout) {
2885
5
    auto FieldBitOffset = External.getExternalFieldOffset(FD);
2886
5
    placeFieldAtBitOffset(FieldBitOffset);
2887
5
    auto NewSize = Context.toCharUnitsFromBits(
2888
5
        llvm::alignDown(FieldBitOffset, Context.toBits(Info.Alignment)) +
2889
5
        Context.toBits(Info.Size));
2890
5
    Size = std::max(Size, NewSize);
2891
5
    Alignment = std::max(Alignment, Info.Alignment);
2892
121
  } else if (IsUnion) {
2893
12
    placeFieldAtOffset(CharUnits::Zero());
2894
12
    Size = std::max(Size, Info.Size);
2895
    // TODO: Add a Sema warning that MS ignores bitfield alignment in unions.
2896
109
  } else {
2897
    // Allocate a new block of memory and place the bitfield in it.
2898
109
    CharUnits FieldOffset = Size.alignTo(Info.Alignment);
2899
109
    placeFieldAtOffset(FieldOffset);
2900
109
    Size = FieldOffset + Info.Size;
2901
109
    Alignment = std::max(Alignment, Info.Alignment);
2902
109
    RemainingBitsInField = Context.toBits(Info.Size) - Width;
2903
109
  }
2904
126
}
2905
2906
void
2907
42
MicrosoftRecordLayoutBuilder::layoutZeroWidthBitField(const FieldDecl *FD) {
2908
  // Zero-width bitfields are ignored unless they follow a non-zero-width
2909
  // bitfield.
2910
42
  if (!LastFieldIsNonZeroWidthBitfield) {
2911
16
    placeFieldAtOffset(IsUnion ? 
CharUnits::Zero()4
: Size);
2912
    // TODO: Add a Sema warning that MS ignores alignment for zero
2913
    // sized bitfields that occur after zero-size bitfields or non-bitfields.
2914
20
    return;
2915
20
  }
2916
22
  LastFieldIsNonZeroWidthBitfield = false;
2917
22
  ElementInfo Info = getAdjustedElementInfo(FD);
2918
22
  if (IsUnion) {
2919
8
    placeFieldAtOffset(CharUnits::Zero());
2920
8
    Size = std::max(Size, Info.Size);
2921
    // TODO: Add a Sema warning that MS ignores bitfield alignment in unions.
2922
14
  } else {
2923
    // Round up the current record size to the field's alignment boundary.
2924
14
    CharUnits FieldOffset = Size.alignTo(Info.Alignment);
2925
14
    placeFieldAtOffset(FieldOffset);
2926
14
    Size = FieldOffset;
2927
14
    Alignment = std::max(Alignment, Info.Alignment);
2928
14
  }
2929
22
}
2930
2931
4.37k
void MicrosoftRecordLayoutBuilder::injectVBPtr(const CXXRecordDecl *RD) {
2932
4.37k
  if (!HasVBPtr || 
SharedVBPtrBase850
)
2933
3.75k
    return;
2934
  // Inject the VBPointer at the injection site.
2935
625
  CharUnits InjectionSite = VBPtrOffset;
2936
  // But before we do, make sure it's properly aligned.
2937
625
  VBPtrOffset = VBPtrOffset.alignTo(PointerInfo.Alignment);
2938
  // Determine where the first field should be laid out after the vbptr.
2939
625
  CharUnits FieldStart = VBPtrOffset + PointerInfo.Size;
2940
  // Shift everything after the vbptr down, unless we're using an external
2941
  // layout.
2942
625
  if (UseExternalLayout) {
2943
    // It is possible that there were no fields or bases located after vbptr,
2944
    // so the size was not adjusted before.
2945
2
    if (Size < FieldStart)
2946
1
      Size = FieldStart;
2947
2
    return;
2948
2
  }
2949
  // Make sure that the amount we push the fields back by is a multiple of the
2950
  // alignment.
2951
623
  CharUnits Offset = (FieldStart - InjectionSite)
2952
623
                         .alignTo(std::max(RequiredAlignment, Alignment));
2953
623
  Size += Offset;
2954
623
  for (uint64_t &FieldOffset : FieldOffsets)
2955
357
    FieldOffset += Context.toBits(Offset);
2956
623
  for (BaseOffsetsMapTy::value_type &Base : Bases)
2957
257
    if (Base.second >= InjectionSite)
2958
64
      Base.second += Offset;
2959
623
}
2960
2961
4.37k
void MicrosoftRecordLayoutBuilder::injectVFPtr(const CXXRecordDecl *RD) {
2962
4.37k
  if (!HasOwnVFPtr)
2963
3.60k
    return;
2964
  // Make sure that the amount we push the struct back by is a multiple of the
2965
  // alignment.
2966
776
  CharUnits Offset =
2967
776
      PointerInfo.Size.alignTo(std::max(RequiredAlignment, Alignment));
2968
  // Push back the vbptr, but increase the size of the object and push back
2969
  // regular fields by the offset only if not using external record layout.
2970
776
  if (HasVBPtr)
2971
110
    VBPtrOffset += Offset;
2972
776
2973
776
  if (UseExternalLayout) {
2974
    // The class may have no bases or fields, but still have a vfptr
2975
    // (e.g. it's an interface class). The size was not correctly set before
2976
    // in this case.
2977
2
    if (FieldOffsets.empty() && 
Bases.empty()1
)
2978
1
      Size += Offset;
2979
2
    return;
2980
2
  }
2981
774
2982
774
  Size += Offset;
2983
774
2984
  // If we're using an external layout, the fields offsets have already
2985
  // accounted for this adjustment.
2986
774
  for (uint64_t &FieldOffset : FieldOffsets)
2987
159
    FieldOffset += Context.toBits(Offset);
2988
774
  for (BaseOffsetsMapTy::value_type &Base : Bases)
2989
83
    Base.second += Offset;
2990
774
}
2991
2992
4.37k
void MicrosoftRecordLayoutBuilder::layoutVirtualBases(const CXXRecordDecl *RD) {
2993
4.37k
  if (!HasVBPtr)
2994
3.52k
    return;
2995
  // Vtordisps are always 4 bytes (even in 64-bit mode)
2996
850
  CharUnits VtorDispSize = CharUnits::fromQuantity(4);
2997
850
  CharUnits VtorDispAlignment = VtorDispSize;
2998
  // vtordisps respect pragma pack.
2999
850
  if (!MaxFieldAlignment.isZero())
3000
16
    VtorDispAlignment = std::min(VtorDispAlignment, MaxFieldAlignment);
3001
  // The alignment of the vtordisp is at least the required alignment of the
3002
  // entire record.  This requirement may be present to support vtordisp
3003
  // injection.
3004
1.26k
  for (const CXXBaseSpecifier &VBase : RD->vbases()) {
3005
1.26k
    const CXXRecordDecl *BaseDecl = VBase.getType()->getAsCXXRecordDecl();
3006
1.26k
    const ASTRecordLayout &BaseLayout = Context.getASTRecordLayout(BaseDecl);
3007
1.26k
    RequiredAlignment =
3008
1.26k
        std::max(RequiredAlignment, BaseLayout.getRequiredAlignment());
3009
1.26k
  }
3010
850
  VtorDispAlignment = std::max(VtorDispAlignment, RequiredAlignment);
3011
  // Compute the vtordisp set.
3012
850
  llvm::SmallPtrSet<const CXXRecordDecl *, 2> HasVtorDispSet;
3013
850
  computeVtorDispSet(HasVtorDispSet, RD);
3014
  // Iterate through the virtual bases and lay them out.
3015
850
  const ASTRecordLayout *PreviousBaseLayout = nullptr;
3016
1.26k
  for (const CXXBaseSpecifier &VBase : RD->vbases()) {
3017
1.26k
    const CXXRecordDecl *BaseDecl = VBase.getType()->getAsCXXRecordDecl();
3018
1.26k
    const ASTRecordLayout &BaseLayout = Context.getASTRecordLayout(BaseDecl);
3019
1.26k
    bool HasVtordisp = HasVtorDispSet.count(BaseDecl) > 0;
3020
    // Insert padding between two bases if the left first one is zero sized or
3021
    // contains a zero sized subobject and the right is zero sized or one leads
3022
    // with a zero sized base.  The padding between virtual bases is 4
3023
    // bytes (in both 32 and 64 bits modes) and always involves rounding up to
3024
    // the required alignment, we don't know why.
3025
1.26k
    if ((PreviousBaseLayout && 
PreviousBaseLayout->endsWithZeroSizedObject()413
&&
3026
165
         BaseLayout.leadsWithZeroSizedBase() && 
!recordUsesEBO(RD)116
) ||
3027
1.14k
        HasVtordisp) {
3028
274
      Size = Size.alignTo(VtorDispAlignment) + VtorDispSize;
3029
274
      Alignment = std::max(VtorDispAlignment, Alignment);
3030
274
    }
3031
    // Insert the virtual base.
3032
1.26k
    ElementInfo Info = getAdjustedElementInfo(BaseLayout);
3033
1.26k
    CharUnits BaseOffset;
3034
1.26k
3035
    // Respect the external AST source base offset, if present.
3036
1.26k
    if (UseExternalLayout) {
3037
3
      if (!External.getExternalVBaseOffset(BaseDecl, BaseOffset))
3038
3
        BaseOffset = Size;
3039
3
    } else
3040
1.26k
      BaseOffset = Size.alignTo(Info.Alignment);
3041
1.26k
3042
1.26k
    assert(BaseOffset >= Size && "base offset already allocated");
3043
1.26k
3044
1.26k
    VBases.insert(std::make_pair(BaseDecl,
3045
1.26k
        ASTRecordLayout::VBaseInfo(BaseOffset, HasVtordisp)));
3046
1.26k
    Size = BaseOffset + BaseLayout.getNonVirtualSize();
3047
1.26k
    PreviousBaseLayout = &BaseLayout;
3048
1.26k
  }
3049
850
}
3050
3051
4.69k
void MicrosoftRecordLayoutBuilder::finalizeLayout(const RecordDecl *RD) {
3052
  // Respect required alignment.  Note that in 32-bit mode Required alignment
3053
  // may be 0 and cause size not to be updated.
3054
4.69k
  DataSize = Size;
3055
4.69k
  if (!RequiredAlignment.isZero()) {
3056
2.35k
    Alignment = std::max(Alignment, RequiredAlignment);
3057
2.35k
    auto RoundingAlignment = Alignment;
3058
2.35k
    if (!MaxFieldAlignment.isZero())
3059
132
      RoundingAlignment = std::min(RoundingAlignment, MaxFieldAlignment);
3060
2.35k
    RoundingAlignment = std::max(RoundingAlignment, RequiredAlignment);
3061
2.35k
    Size = Size.alignTo(RoundingAlignment);
3062
2.35k
  }
3063
4.69k
  if (Size.isZero()) {
3064
1.60k
    if (!recordUsesEBO(RD) || 
!cast<CXXRecordDecl>(RD)->isEmpty()4
) {
3065
1.59k
      EndsWithZeroSizedObject = true;
3066
1.59k
      LeadsWithZeroSizedBase = true;
3067
1.59k
    }
3068
    // Zero-sized structures have size equal to their alignment if a
3069
    // __declspec(align) came into play.
3070
1.60k
    if (RequiredAlignment >= MinEmptyStructSize)
3071
750
      Size = Alignment;
3072
850
    else
3073
850
      Size = MinEmptyStructSize;
3074
1.60k
  }
3075
4.69k
3076
4.69k
  if (UseExternalLayout) {
3077
8
    Size = Context.toCharUnitsFromBits(External.Size);
3078
8
    if (External.Align)
3079
4
      Alignment = Context.toCharUnitsFromBits(External.Align);
3080
8
  }
3081
4.69k
}
3082
3083
// Recursively walks the non-virtual bases of a class and determines if any of
3084
// them are in the bases with overridden methods set.
3085
static bool
3086
RequiresVtordisp(const llvm::SmallPtrSetImpl<const CXXRecordDecl *> &
3087
                     BasesWithOverriddenMethods,
3088
669
                 const CXXRecordDecl *RD) {
3089
669
  if (BasesWithOverriddenMethods.count(RD))
3090
94
    return true;
3091
  // If any of a virtual bases non-virtual bases (recursively) requires a
3092
  // vtordisp than so does this virtual base.
3093
575
  for (const CXXBaseSpecifier &Base : RD->bases())
3094
87
    if (!Base.isVirtual() &&
3095
52
        RequiresVtordisp(BasesWithOverriddenMethods,
3096
52
                         Base.getType()->getAsCXXRecordDecl()))
3097
19
      return true;
3098
556
  return false;
3099
575
}
3100
3101
void MicrosoftRecordLayoutBuilder::computeVtorDispSet(
3102
    llvm::SmallPtrSetImpl<const CXXRecordDecl *> &HasVtordispSet,
3103
850
    const CXXRecordDecl *RD) const {
3104
  // /vd2 or #pragma vtordisp(2): Always use vtordisps for virtual bases with
3105
  // vftables.
3106
850
  if (RD->getMSVtorDispMode() == MSVtorDispMode::ForVFTable) {
3107
25
    for (const CXXBaseSpecifier &Base : RD->vbases()) {
3108
25
      const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
3109
25
      const ASTRecordLayout &Layout = Context.getASTRecordLayout(BaseDecl);
3110
25
      if (Layout.hasExtendableVFPtr())
3111
23
        HasVtordispSet.insert(BaseDecl);
3112
25
    }
3113
20
    return;
3114
20
  }
3115
830
3116
  // If any of our bases need a vtordisp for this type, so do we.  Check our
3117
  // direct bases for vtordisp requirements.
3118
1.52k
  
for (const CXXBaseSpecifier &Base : RD->bases())830
{
3119
1.52k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
3120
1.52k
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(BaseDecl);
3121
1.52k
    for (const auto &bi : Layout.getVBaseOffsetsMap())
3122
478
      if (bi.second.hasVtorDisp())
3123
43
        HasVtordispSet.insert(bi.first);
3124
1.52k
  }
3125
  // We don't introduce any additional vtordisps if either:
3126
  // * A user declared constructor or destructor aren't declared.
3127
  // * #pragma vtordisp(0) or the /vd0 flag are in use.
3128
830
  if ((!RD->hasUserDeclaredConstructor() && 
!RD->hasUserDeclaredDestructor()473
) ||
3129
420
      RD->getMSVtorDispMode() == MSVtorDispMode::Never)
3130
415
    return;
3131
  // /vd1 or #pragma vtordisp(1): Try to guess based on whether we think it's
3132
  // possible for a partially constructed object with virtual base overrides to
3133
  // escape a non-trivial constructor.
3134
415
  assert(RD->getMSVtorDispMode() == MSVtorDispMode::ForVBaseOverride);
3135
  // Compute a set of base classes which define methods we override.  A virtual
3136
  // base in this set will require a vtordisp.  A virtual base that transitively
3137
  // contains one of these bases as a non-virtual base will also require a
3138
  // vtordisp.
3139
415
  llvm::SmallPtrSet<const CXXMethodDecl *, 8> Work;
3140
415
  llvm::SmallPtrSet<const CXXRecordDecl *, 2> BasesWithOverriddenMethods;
3141
  // Seed the working set with our non-destructor, non-pure virtual methods.
3142
415
  for (const CXXMethodDecl *MD : RD->methods())
3143
2.08k
    if (MicrosoftVTableContext::hasVtableSlot(MD) &&
3144
258
        !isa<CXXDestructorDecl>(MD) && 
!MD->isPure()181
)
3145
179
      Work.insert(MD);
3146
723
  while (!Work.empty()) {
3147
308
    const CXXMethodDecl *MD = *Work.begin();
3148
308
    auto MethodRange = MD->overridden_methods();
3149
    // If a virtual method has no-overrides it lives in its parent's vtable.
3150
308
    if (MethodRange.begin() == MethodRange.end())
3151
192
      BasesWithOverriddenMethods.insert(MD->getParent());
3152
116
    else
3153
116
      Work.insert(MethodRange.begin(), MethodRange.end());
3154
    // We've finished processing this element, remove it from the working set.
3155
308
    Work.erase(MD);
3156
308
  }
3157
  // For each of our virtual bases, check if it is in the set of overridden
3158
  // bases or if it transitively contains a non-virtual base that is.
3159
649
  for (const CXXBaseSpecifier &Base : RD->vbases()) {
3160
649
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
3161
649
    if (!HasVtordispSet.count(BaseDecl) &&
3162
617
        RequiresVtordisp(BasesWithOverriddenMethods, BaseDecl))
3163
94
      HasVtordispSet.insert(BaseDecl);
3164
649
  }
3165
415
}
3166
3167
/// getASTRecordLayout - Get or compute information about the layout of the
3168
/// specified record (struct/union/class), which indicates its size and field
3169
/// position information.
3170
const ASTRecordLayout &
3171
3.49M
ASTContext::getASTRecordLayout(const RecordDecl *D) const {
3172
  // These asserts test different things.  A record has a definition
3173
  // as soon as we begin to parse the definition.  That definition is
3174
  // not a complete definition (which is what isDefinition() tests)
3175
  // until we *finish* parsing the definition.
3176
3.49M
3177
3.49M
  if (D->hasExternalLexicalStorage() && 
!D->getDefinition()79.8k
)
3178
0
    getExternalSource()->CompleteType(const_cast<RecordDecl*>(D));
3179
3.49M
3180
3.49M
  D = D->getDefinition();
3181
3.49M
  assert(D && "Cannot get layout of forward declarations!");
3182
3.49M
  assert(!D->isInvalidDecl() && "Cannot get layout of invalid decl!");
3183
3.49M
  assert(D->isCompleteDefinition() && "Cannot layout type before complete!");
3184
3.49M
3185
  // Look up this layout, if already laid out, return what we have.
3186
  // Note that we can't save a reference to the entry because this function
3187
  // is recursive.
3188
3.49M
  const ASTRecordLayout *Entry = ASTRecordLayouts[D];
3189
3.49M
  if (Entry) 
return *Entry3.18M
;
3190
310k
3191
310k
  const ASTRecordLayout *NewEntry = nullptr;
3192
310k
3193
310k
  if (isMsLayout(*this)) {
3194
4.69k
    MicrosoftRecordLayoutBuilder Builder(*this);
3195
4.69k
    if (const auto *RD = dyn_cast<CXXRecordDecl>(D)) {
3196
4.37k
      Builder.cxxLayout(RD);
3197
4.37k
      NewEntry = new (*this) ASTRecordLayout(
3198
4.37k
          *this, Builder.Size, Builder.Alignment, Builder.Alignment,
3199
4.37k
          Builder.Alignment, Builder.RequiredAlignment, Builder.HasOwnVFPtr,
3200
4.37k
          Builder.HasOwnVFPtr || 
Builder.PrimaryBase3.60k
, Builder.VBPtrOffset,
3201
4.37k
          Builder.DataSize, Builder.FieldOffsets, Builder.NonVirtualSize,
3202
4.37k
          Builder.Alignment, Builder.Alignment, CharUnits::Zero(),
3203
4.37k
          Builder.PrimaryBase, false, Builder.SharedVBPtrBase,
3204
4.37k
          Builder.EndsWithZeroSizedObject, Builder.LeadsWithZeroSizedBase,
3205
4.37k
          Builder.Bases, Builder.VBases);
3206
322
    } else {
3207
322
      Builder.layout(D);
3208
322
      NewEntry = new (*this) ASTRecordLayout(
3209
322
          *this, Builder.Size, Builder.Alignment, Builder.Alignment,
3210
322
          Builder.Alignment, Builder.RequiredAlignment, Builder.Size,
3211
322
          Builder.FieldOffsets);
3212
322
    }
3213
305k
  } else {
3214
305k
    if (const auto *RD = dyn_cast<CXXRecordDecl>(D)) {
3215
186k
      EmptySubobjectMap EmptySubobjects(*this, RD);
3216
186k
      ItaniumRecordLayoutBuilder Builder(*this, &EmptySubobjects);
3217
186k
      Builder.Layout(RD);
3218
186k
3219
      // In certain situations, we are allowed to lay out objects in the
3220
      // tail-padding of base classes.  This is ABI-dependent.
3221
      // FIXME: this should be stored in the record layout.
3222
186k
      bool skipTailPadding =
3223
186k
          mustSkipTailPadding(getTargetInfo().getCXXABI(), RD);
3224
186k
3225
      // FIXME: This should be done in FinalizeLayout.
3226
186k
      CharUnits DataSize =
3227
105k
          skipTailPadding ? Builder.getSize() : 
Builder.getDataSize()81.1k
;
3228
186k
      CharUnits NonVirtualSize =
3229
105k
          skipTailPadding ? DataSize : 
Builder.NonVirtualSize81.1k
;
3230
186k
      NewEntry = new (*this) ASTRecordLayout(
3231
186k
          *this, Builder.getSize(), Builder.Alignment,
3232
186k
          Builder.PreferredAlignment, Builder.UnadjustedAlignment,
3233
          /*RequiredAlignment : used by MS-ABI)*/
3234
186k
          Builder.Alignment, Builder.HasOwnVFPtr, RD->isDynamicClass(),
3235
186k
          CharUnits::fromQuantity(-1), DataSize, Builder.FieldOffsets,
3236
186k
          NonVirtualSize, Builder.NonVirtualAlignment,
3237
186k
          Builder.PreferredNVAlignment,
3238
186k
          EmptySubobjects.SizeOfLargestEmptySubobject, Builder.PrimaryBase,
3239
186k
          Builder.PrimaryBaseIsVirtual, nullptr, false, false, Builder.Bases,
3240
186k
          Builder.VBases);
3241
119k
    } else {
3242
119k
      ItaniumRecordLayoutBuilder Builder(*this, /*EmptySubobjects=*/nullptr);
3243
119k
      Builder.Layout(D);
3244
119k
3245
119k
      NewEntry = new (*this) ASTRecordLayout(
3246
119k
          *this, Builder.getSize(), Builder.Alignment,
3247
119k
          Builder.PreferredAlignment, Builder.UnadjustedAlignment,
3248
          /*RequiredAlignment : used by MS-ABI)*/
3249
119k
          Builder.Alignment, Builder.getSize(), Builder.FieldOffsets);
3250
119k
    }
3251
305k
  }
3252
310k
3253
310k
  ASTRecordLayouts[D] = NewEntry;
3254
310k
3255
310k
  if (getLangOpts().DumpRecordLayouts) {
3256
1.19k
    llvm::outs() << "\n*** Dumping AST Record Layout\n";
3257
1.19k
    DumpRecordLayout(D, llvm::outs(), getLangOpts().DumpRecordLayoutsSimple);
3258
1.19k
  }
3259
310k
3260
310k
  return *NewEntry;
3261
310k
}
3262
3263
157k
const CXXMethodDecl *ASTContext::getCurrentKeyFunction(const CXXRecordDecl *RD) {
3264
157k
  if (!getTargetInfo().getCXXABI().hasKeyFunctions())
3265
3.37k
    return nullptr;
3266
153k
3267
153k
  assert(RD->getDefinition() && "Cannot get key function for forward decl!");
3268
153k
  RD = RD->getDefinition();
3269
153k
3270
  // Beware:
3271
  //  1) computing the key function might trigger deserialization, which might
3272
  //     invalidate iterators into KeyFunctions
3273
  //  2) 'get' on the LazyDeclPtr might also trigger deserialization and
3274
  //     invalidate the LazyDeclPtr within the map itself
3275
153k
  LazyDeclPtr Entry = KeyFunctions[RD];
3276
153k
  const Decl *Result =
3277
134k
      Entry ? 
Entry.get(getExternalSource())19.0k
: computeKeyFunction(*this, RD);
3278
153k
3279
  // Store it back if it changed.
3280
153k
  if (Entry.isOffset() || Entry.isValid() != bool(Result))
3281
12.1k
    KeyFunctions[RD] = const_cast<Decl*>(Result);
3282
153k
3283
153k
  return cast_or_null<CXXMethodDecl>(Result);
3284
153k
}
3285
3286
14
void ASTContext::setNonKeyFunction(const CXXMethodDecl *Method) {
3287
14
  assert(Method == Method->getFirstDecl() &&
3288
14
         "not working with method declaration from class definition");
3289
14
3290
  // Look up the cache entry.  Since we're working with the first
3291
  // declaration, its parent must be the class definition, which is
3292
  // the correct key for the KeyFunctions hash.
3293
14
  const auto &Map = KeyFunctions;
3294
14
  auto I = Map.find(Method->getParent());
3295
14
3296
  // If it's not cached, there's nothing to do.
3297
14
  if (I == Map.end()) 
return0
;
3298
14
3299
  // If it is cached, check whether it's the target method, and if so,
3300
  // remove it from the cache. Note, the call to 'get' might invalidate
3301
  // the iterator and the LazyDeclPtr object within the map.
3302
14
  LazyDeclPtr Ptr = I->second;
3303
14
  if (Ptr.get(getExternalSource()) == Method) {
3304
    // FIXME: remember that we did this for module / chained PCH state?
3305
14
    KeyFunctions.erase(Method->getParent());
3306
14
  }
3307
14
}
3308
3309
563
static uint64_t getFieldOffset(const ASTContext &C, const FieldDecl *FD) {
3310
563
  const ASTRecordLayout &Layout = C.getASTRecordLayout(FD->getParent());
3311
563
  return Layout.getFieldOffset(FD->getFieldIndex());
3312
563
}
3313
3314
523
uint64_t ASTContext::getFieldOffset(const ValueDecl *VD) const {
3315
523
  uint64_t OffsetInBits;
3316
523
  if (const FieldDecl *FD = dyn_cast<FieldDecl>(VD)) {
3317
497
    OffsetInBits = ::getFieldOffset(*this, FD);
3318
26
  } else {
3319
26
    const IndirectFieldDecl *IFD = cast<IndirectFieldDecl>(VD);
3320
26
3321
26
    OffsetInBits = 0;
3322
26
    for (const NamedDecl *ND : IFD->chain())
3323
66
      OffsetInBits += ::getFieldOffset(*this, cast<FieldDecl>(ND));
3324
26
  }
3325
523
3326
523
  return OffsetInBits;
3327
523
}
3328
3329
uint64_t ASTContext::lookupFieldBitOffset(const ObjCInterfaceDecl *OID,
3330
                                          const ObjCImplementationDecl *ID,
3331
3.39k
                                          const ObjCIvarDecl *Ivar) const {
3332
3.39k
  const ObjCInterfaceDecl *Container = Ivar->getContainingInterface();
3333
3.39k
3334
  // FIXME: We should eliminate the need to have ObjCImplementationDecl passed
3335
  // in here; it should never be necessary because that should be the lexical
3336
  // decl context for the ivar.
3337
3.39k
3338
  // If we know have an implementation (and the ivar is in it) then
3339
  // look up in the implementation layout.
3340
3.39k
  const ASTRecordLayout *RL;
3341
3.39k
  if (ID && 
declaresSameEntity(ID->getClassInterface(), Container)2.51k
)
3342
2.46k
    RL = &getASTObjCImplementationLayout(ID);
3343
932
  else
3344
932
    RL = &getASTObjCInterfaceLayout(Container);
3345
3.39k
3346
  // Compute field index.
3347
  //
3348
  // FIXME: The index here is closely tied to how ASTContext::getObjCLayout is
3349
  // implemented. This should be fixed to get the information from the layout
3350
  // directly.
3351
3.39k
  unsigned Index = 0;
3352
3.39k
3353
3.39k
  for (const ObjCIvarDecl *IVD = Container->all_declared_ivar_begin();
3354
8.88k
       IVD; 
IVD = IVD->getNextIvar()5.48k
) {
3355
8.88k
    if (Ivar == IVD)
3356
3.39k
      break;
3357
5.48k
    ++Index;
3358
5.48k
  }
3359
3.39k
  assert(Index < RL->getFieldCount() && "Ivar is not inside record layout!");
3360
3.39k
3361
3.39k
  return RL->getFieldOffset(Index);
3362
3.39k
}
3363
3364
/// getObjCLayout - Get or compute information about the layout of the
3365
/// given interface.
3366
///
3367
/// \param Impl - If given, also include the layout of the interface's
3368
/// implementation. This may differ by including synthesized ivars.
3369
const ASTRecordLayout &
3370
ASTContext::getObjCLayout(const ObjCInterfaceDecl *D,
3371
19.4k
                          const ObjCImplementationDecl *Impl) const {
3372
  // Retrieve the definition
3373
19.4k
  if (D->hasExternalLexicalStorage() && 
!D->getDefinition()104
)
3374
0
    getExternalSource()->CompleteType(const_cast<ObjCInterfaceDecl*>(D));
3375
19.4k
  D = D->getDefinition();
3376
19.4k
  assert(D && !D->isInvalidDecl() && D->isThisDeclarationADefinition() &&
3377
19.4k
         "Invalid interface decl!");
3378
19.4k
3379
  // Look up this layout, if already laid out, return what we have.
3380
19.4k
  const ObjCContainerDecl *Key =
3381
15.9k
    Impl ? 
(const ObjCContainerDecl*) Impl3.54k
: (const ObjCContainerDecl*) D;
3382
19.4k
  if (const ASTRecordLayout *Entry = ObjCLayouts[Key])
3383
11.8k
    return *Entry;
3384
7.62k
3385
  // Add in synthesized ivar count if laying out an implementation.
3386
7.62k
  if (Impl) {
3387
2.56k
    unsigned SynthCount = CountNonClassIvars(D);
3388
    // If there aren't any synthesized ivars then reuse the interface
3389
    // entry. Note we can't cache this because we simply free all
3390
    // entries later; however we shouldn't look up implementations
3391
    // frequently.
3392
2.56k
    if (SynthCount == 0)
3393
2.32k
      return getObjCLayout(D, nullptr);
3394
5.29k
  }
3395
5.29k
3396
5.29k
  ItaniumRecordLayoutBuilder Builder(*this, /*EmptySubobjects=*/nullptr);
3397
5.29k
  Builder.Layout(D);
3398
5.29k
3399
5.29k
  const ASTRecordLayout *NewEntry = new (*this) ASTRecordLayout(
3400
5.29k
      *this, Builder.getSize(), Builder.Alignment, Builder.PreferredAlignment,
3401
5.29k
      Builder.UnadjustedAlignment,
3402
      /*RequiredAlignment : used by MS-ABI)*/
3403
5.29k
      Builder.Alignment, Builder.getDataSize(), Builder.FieldOffsets);
3404
5.29k
3405
5.29k
  ObjCLayouts[Key] = NewEntry;
3406
5.29k
3407
5.29k
  return *NewEntry;
3408
5.29k
}
3409
3410
static void PrintOffset(raw_ostream &OS,
3411
5.19k
                        CharUnits Offset, unsigned IndentLevel) {
3412
5.19k
  OS << llvm::format("%10" PRId64 " | ", (int64_t)Offset.getQuantity());
3413
5.19k
  OS.indent(IndentLevel * 2);
3414
5.19k
}
3415
3416
static void PrintBitFieldOffset(raw_ostream &OS, CharUnits Offset,
3417
                                unsigned Begin, unsigned Width,
3418
297
                                unsigned IndentLevel) {
3419
297
  llvm::SmallString<10> Buffer;
3420
297
  {
3421
297
    llvm::raw_svector_ostream BufferOS(Buffer);
3422
297
    BufferOS << Offset.getQuantity() << ':';
3423
297
    if (Width == 0) {
3424
52
      BufferOS << '-';
3425
245
    } else {
3426
245
      BufferOS << Begin << '-' << (Begin + Width - 1);
3427
245
    }
3428
297
  }
3429
297
3430
297
  OS << llvm::right_justify(Buffer, 10) << " | ";
3431
297
  OS.indent(IndentLevel * 2);
3432
297
}
3433
3434
2.13k
static void PrintIndentNoOffset(raw_ostream &OS, unsigned IndentLevel) {
3435
2.13k
  OS << "           | ";
3436
2.13k
  OS.indent(IndentLevel * 2);
3437
2.13k
}
3438
3439
static void DumpRecordLayout(raw_ostream &OS, const RecordDecl *RD,
3440
                             const ASTContext &C,
3441
                             CharUnits Offset,
3442
                             unsigned IndentLevel,
3443
                             const char* Description,
3444
                             bool PrintSizeInfo,
3445
2.68k
                             bool IncludeVirtualBases) {
3446
2.68k
  const ASTRecordLayout &Layout = C.getASTRecordLayout(RD);
3447
2.68k
  auto CXXRD = dyn_cast<CXXRecordDecl>(RD);
3448
2.68k
3449
2.68k
  PrintOffset(OS, Offset, IndentLevel);
3450
2.68k
  OS << C.getTypeDeclType(const_cast<RecordDecl*>(RD)).getAsString();
3451
2.68k
  if (Description)
3452
1.57k
    OS << ' ' << Description;
3453
2.68k
  if (CXXRD && 
CXXRD->isEmpty()2.59k
)
3454
815
    OS << " (empty)";
3455
2.68k
  OS << '\n';
3456
2.68k
3457
2.68k
  IndentLevel++;
3458
2.68k
3459
  // Dump bases.
3460
2.68k
  if (CXXRD) {
3461
2.59k
    const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
3462
2.59k
    bool HasOwnVFPtr = Layout.hasOwnVFPtr();
3463
2.59k
    bool HasOwnVBPtr = Layout.hasOwnVBPtr();
3464
2.59k
3465
    // Vtable pointer.
3466
2.59k
    if (CXXRD->isDynamicClass() && 
!PrimaryBase793
&&
!isMsLayout(C)736
) {
3467
17
      PrintOffset(OS, Offset, IndentLevel);
3468
17
      OS << '(' << *RD << " vtable pointer)\n";
3469
2.57k
    } else if (HasOwnVFPtr) {
3470
345
      PrintOffset(OS, Offset, IndentLevel);
3471
      // vfptr (for Microsoft C++ ABI)
3472
345
      OS << '(' << *RD << " vftable pointer)\n";
3473
345
    }
3474
2.59k
3475
    // Collect nvbases.
3476
2.59k
    SmallVector<const CXXRecordDecl *, 4> Bases;
3477
1.40k
    for (const CXXBaseSpecifier &Base : CXXRD->bases()) {
3478
1.40k
      assert(!Base.getType()->isDependentType() &&
3479
1.40k
             "Cannot layout class with dependent bases.");
3480
1.40k
      if (!Base.isVirtual())
3481
767
        Bases.push_back(Base.getType()->getAsCXXRecordDecl());
3482
1.40k
    }
3483
2.59k
3484
    // Sort nvbases by offset.
3485
2.59k
    llvm::stable_sort(
3486
310
        Bases, [&](const CXXRecordDecl *L, const CXXRecordDecl *R) {
3487
310
          return Layout.getBaseClassOffset(L) < Layout.getBaseClassOffset(R);
3488
310
        });
3489
2.59k
3490
    // Dump (non-virtual) bases
3491
767
    for (const CXXRecordDecl *Base : Bases) {
3492
767
      CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base);
3493
767
      DumpRecordLayout(OS, Base, C, BaseOffset, IndentLevel,
3494
710
                       Base == PrimaryBase ? 
"(primary base)"57
: "(base)",
3495
767
                       /*PrintSizeInfo=*/false,
3496
767
                       /*IncludeVirtualBases=*/false);
3497
767
    }
3498
2.59k
3499
    // vbptr (for Microsoft C++ ABI)
3500
2.59k
    if (HasOwnVBPtr) {
3501
406
      PrintOffset(OS, Offset + Layout.getVBPtrOffset(), IndentLevel);
3502
406
      OS << '(' << *RD << " vbtable pointer)\n";
3503
406
    }
3504
2.59k
  }
3505
2.68k
3506
  // Dump fields.
3507
2.68k
  uint64_t FieldNo = 0;
3508
2.68k
  for (RecordDecl::field_iterator I = RD->field_begin(),
3509
4.83k
         E = RD->field_end(); I != E; 
++I, ++FieldNo2.15k
) {
3510
2.15k
    const FieldDecl &Field = **I;
3511
2.15k
    uint64_t LocalFieldOffsetInBits = Layout.getFieldOffset(FieldNo);
3512
2.15k
    CharUnits FieldOffset =
3513
2.15k
      Offset + C.toCharUnitsFromBits(LocalFieldOffsetInBits);
3514
2.15k
3515
    // Recursively dump fields of record type.
3516
2.15k
    if (auto RT = Field.getType()->getAs<RecordType>()) {
3517
185
      DumpRecordLayout(OS, RT->getDecl(), C, FieldOffset, IndentLevel,
3518
185
                       Field.getName().data(),
3519
185
                       /*PrintSizeInfo=*/false,
3520
185
                       /*IncludeVirtualBases=*/true);
3521
185
      continue;
3522
185
    }
3523
1.96k
3524
1.96k
    if (Field.isBitField()) {
3525
297
      uint64_t LocalFieldByteOffsetInBits = C.toBits(FieldOffset - Offset);
3526
297
      unsigned Begin = LocalFieldOffsetInBits - LocalFieldByteOffsetInBits;
3527
297
      unsigned Width = Field.getBitWidthValue(C);
3528
297
      PrintBitFieldOffset(OS, FieldOffset, Begin, Width, IndentLevel);
3529
1.66k
    } else {
3530
1.66k
      PrintOffset(OS, FieldOffset, IndentLevel);
3531
1.66k
    }
3532
1.96k
    OS << Field.getType().getAsString() << ' ' << Field << '\n';
3533
1.96k
  }
3534
2.68k
3535
  // Dump virtual bases.
3536
2.68k
  if (CXXRD && 
IncludeVirtualBases2.59k
) {
3537
1.20k
    const ASTRecordLayout::VBaseOffsetsMapTy &VtorDisps =
3538
1.20k
      Layout.getVBaseOffsetsMap();
3539
1.20k
3540
620
    for (const CXXBaseSpecifier &Base : CXXRD->vbases()) {
3541
620
      assert(Base.isVirtual() && "Found non-virtual class!");
3542
620
      const CXXRecordDecl *VBase = Base.getType()->getAsCXXRecordDecl();
3543
620
3544
620
      CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBase);
3545
620
3546
620
      if (VtorDisps.find(VBase)->second.hasVtorDisp()) {
3547
74
        PrintOffset(OS, VBaseOffset - CharUnits::fromQuantity(4), IndentLevel);
3548
74
        OS << "(vtordisp for vbase " << *VBase << ")\n";
3549
74
      }
3550
620
3551
620
      DumpRecordLayout(OS, VBase, C, VBaseOffset, IndentLevel,
3552
620
                       VBase == Layout.getPrimaryBase() ?
3553
620
                         
"(primary virtual base)"0
: "(virtual base)",
3554
620
                       /*PrintSizeInfo=*/false,
3555
620
                       /*IncludeVirtualBases=*/false);
3556
620
    }
3557
1.20k
  }
3558
2.68k
3559
2.68k
  if (!PrintSizeInfo) 
return1.57k
;
3560
1.11k
3561
1.11k
  PrintIndentNoOffset(OS, IndentLevel - 1);
3562
1.11k
  OS << "[sizeof=" << Layout.getSize().getQuantity();
3563
1.11k
  if (CXXRD && 
!isMsLayout(C)1.01k
)
3564
154
    OS << ", dsize=" << Layout.getDataSize().getQuantity();
3565
1.11k
  OS << ", align=" << Layout.getAlignment().getQuantity();
3566
1.11k
  if (C.getTargetInfo().defaultsToAIXPowerAlignment())
3567
112
    OS << ", preferredalign=" << Layout.getPreferredAlignment().getQuantity();
3568
1.11k
3569
1.11k
  if (CXXRD) {
3570
1.01k
    OS << ",\n";
3571
1.01k
    PrintIndentNoOffset(OS, IndentLevel - 1);
3572
1.01k
    OS << " nvsize=" << Layout.getNonVirtualSize().getQuantity();
3573
1.01k
    OS << ", nvalign=" << Layout.getNonVirtualAlignment().getQuantity();
3574
1.01k
    if (C.getTargetInfo().defaultsToAIXPowerAlignment())
3575
112
      OS << ", preferrednvalign="
3576
112
         << Layout.getPreferredNVAlignment().getQuantity();
3577
1.01k
  }
3578
1.11k
  OS << "]\n";
3579
1.11k
}
3580
3581
void ASTContext::DumpRecordLayout(const RecordDecl *RD, raw_ostream &OS,
3582
1.19k
                                  bool Simple) const {
3583
1.19k
  if (!Simple) {
3584
1.11k
    ::DumpRecordLayout(OS, RD, *this, CharUnits(), 0, nullptr,
3585
1.11k
                       /*PrintSizeInfo*/ true,
3586
1.11k
                       /*IncludeVirtualBases=*/true);
3587
1.11k
    return;
3588
1.11k
  }
3589
83
3590
  // The "simple" format is designed to be parsed by the
3591
  // layout-override testing code.  There shouldn't be any external
3592
  // uses of this format --- when LLDB overrides a layout, it sets up
3593
  // the data structures directly --- so feel free to adjust this as
3594
  // you like as long as you also update the rudimentary parser for it
3595
  // in libFrontend.
3596
83
3597
83
  const ASTRecordLayout &Info = getASTRecordLayout(RD);
3598
83
  OS << "Type: " << getTypeDeclType(RD).getAsString() << "\n";
3599
83
  OS << "\nLayout: ";
3600
83
  OS << "<ASTRecordLayout\n";
3601
83
  OS << "  Size:" << toBits(Info.getSize()) << "\n";
3602
83
  if (!isMsLayout(*this))
3603
72
    OS << "  DataSize:" << toBits(Info.getDataSize()) << "\n";
3604
83
  OS << "  Alignment:" << toBits(Info.getAlignment()) << "\n";
3605
83
  if (Target->defaultsToAIXPowerAlignment())
3606
0
    OS << "  PreferredAlignment:" << toBits(Info.getPreferredAlignment())
3607
0
       << "\n";
3608
83
  OS << "  FieldOffsets: [";
3609
270
  for (unsigned i = 0, e = Info.getFieldCount(); i != e; 
++i187
) {
3610
187
    if (i)
3611
111
      OS << ", ";
3612
187
    OS << Info.getFieldOffset(i);
3613
187
  }
3614
83
  OS << "]>\n";
3615
83
}