Coverage Report

Created: 2020-02-25 14:32

/Users/buildslave/jenkins/workspace/coverage/llvm-project/clang/lib/AST/RecordLayoutBuilder.cpp
Line
Count
Source (jump to first uncovered line)
1
//=== RecordLayoutBuilder.cpp - Helper class for building record layouts ---==//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
9
#include "clang/AST/RecordLayout.h"
10
#include "clang/AST/ASTContext.h"
11
#include "clang/AST/ASTDiagnostic.h"
12
#include "clang/AST/Attr.h"
13
#include "clang/AST/CXXInheritance.h"
14
#include "clang/AST/Decl.h"
15
#include "clang/AST/DeclCXX.h"
16
#include "clang/AST/DeclObjC.h"
17
#include "clang/AST/Expr.h"
18
#include "clang/Basic/TargetInfo.h"
19
#include "llvm/ADT/SmallSet.h"
20
#include "llvm/Support/Format.h"
21
#include "llvm/Support/MathExtras.h"
22
23
using namespace clang;
24
25
namespace {
26
27
/// BaseSubobjectInfo - Represents a single base subobject in a complete class.
28
/// For a class hierarchy like
29
///
30
/// class A { };
31
/// class B : A { };
32
/// class C : A, B { };
33
///
34
/// The BaseSubobjectInfo graph for C will have three BaseSubobjectInfo
35
/// instances, one for B and two for A.
36
///
37
/// If a base is virtual, it will only have one BaseSubobjectInfo allocated.
38
struct BaseSubobjectInfo {
39
  /// Class - The class for this base info.
40
  const CXXRecordDecl *Class;
41
42
  /// IsVirtual - Whether the BaseInfo represents a virtual base or not.
43
  bool IsVirtual;
44
45
  /// Bases - Information about the base subobjects.
46
  SmallVector<BaseSubobjectInfo*, 4> Bases;
47
48
  /// PrimaryVirtualBaseInfo - Holds the base info for the primary virtual base
49
  /// of this base info (if one exists).
50
  BaseSubobjectInfo *PrimaryVirtualBaseInfo;
51
52
  // FIXME: Document.
53
  const BaseSubobjectInfo *Derived;
54
};
55
56
/// Externally provided layout. Typically used when the AST source, such
57
/// as DWARF, lacks all the information that was available at compile time, such
58
/// as alignment attributes on fields and pragmas in effect.
59
struct ExternalLayout {
60
338k
  ExternalLayout() : Size(0), Align(0) {}
61
62
  /// Overall record size in bits.
63
  uint64_t Size;
64
65
  /// Overall record alignment in bits.
66
  uint64_t Align;
67
68
  /// Record field offsets in bits.
69
  llvm::DenseMap<const FieldDecl *, uint64_t> FieldOffsets;
70
71
  /// Direct, non-virtual base offsets.
72
  llvm::DenseMap<const CXXRecordDecl *, CharUnits> BaseOffsets;
73
74
  /// Virtual base offsets.
75
  llvm::DenseMap<const CXXRecordDecl *, CharUnits> VirtualBaseOffsets;
76
77
  /// Get the offset of the given field. The external source must provide
78
  /// entries for all fields in the record.
79
17.5k
  uint64_t getExternalFieldOffset(const FieldDecl *FD) {
80
17.5k
    assert(FieldOffsets.count(FD) &&
81
17.5k
           "Field does not have an external offset");
82
17.5k
    return FieldOffsets[FD];
83
17.5k
  }
84
85
22
  bool getExternalNVBaseOffset(const CXXRecordDecl *RD, CharUnits &BaseOffset) {
86
22
    auto Known = BaseOffsets.find(RD);
87
22
    if (Known == BaseOffsets.end())
88
22
      return false;
89
0
    BaseOffset = Known->second;
90
0
    return true;
91
0
  }
92
93
3.31k
  bool getExternalVBaseOffset(const CXXRecordDecl *RD, CharUnits &BaseOffset) {
94
3.31k
    auto Known = VirtualBaseOffsets.find(RD);
95
3.31k
    if (Known == VirtualBaseOffsets.end())
96
3.31k
      return false;
97
0
    BaseOffset = Known->second;
98
0
    return true;
99
0
  }
100
};
101
102
/// EmptySubobjectMap - Keeps track of which empty subobjects exist at different
103
/// offsets while laying out a C++ class.
104
class EmptySubobjectMap {
105
  const ASTContext &Context;
106
  uint64_t CharWidth;
107
108
  /// Class - The class whose empty entries we're keeping track of.
109
  const CXXRecordDecl *Class;
110
111
  /// EmptyClassOffsets - A map from offsets to empty record decls.
112
  typedef llvm::TinyPtrVector<const CXXRecordDecl *> ClassVectorTy;
113
  typedef llvm::DenseMap<CharUnits, ClassVectorTy> EmptyClassOffsetsMapTy;
114
  EmptyClassOffsetsMapTy EmptyClassOffsets;
115
116
  /// MaxEmptyClassOffset - The highest offset known to contain an empty
117
  /// base subobject.
118
  CharUnits MaxEmptyClassOffset;
119
120
  /// ComputeEmptySubobjectSizes - Compute the size of the largest base or
121
  /// member subobject that is empty.
122
  void ComputeEmptySubobjectSizes();
123
124
  void AddSubobjectAtOffset(const CXXRecordDecl *RD, CharUnits Offset);
125
126
  void UpdateEmptyBaseSubobjects(const BaseSubobjectInfo *Info,
127
                                 CharUnits Offset, bool PlacingEmptyBase);
128
129
  void UpdateEmptyFieldSubobjects(const CXXRecordDecl *RD,
130
                                  const CXXRecordDecl *Class, CharUnits Offset,
131
                                  bool PlacingOverlappingField);
132
  void UpdateEmptyFieldSubobjects(const FieldDecl *FD, CharUnits Offset,
133
                                  bool PlacingOverlappingField);
134
135
  /// AnyEmptySubobjectsBeyondOffset - Returns whether there are any empty
136
  /// subobjects beyond the given offset.
137
1.07M
  bool AnyEmptySubobjectsBeyondOffset(CharUnits Offset) const {
138
1.07M
    return Offset <= MaxEmptyClassOffset;
139
1.07M
  }
140
141
  CharUnits
142
160k
  getFieldOffset(const ASTRecordLayout &Layout, unsigned FieldNo) const {
143
160k
    uint64_t FieldOffset = Layout.getFieldOffset(FieldNo);
144
160k
    assert(FieldOffset % CharWidth == 0 &&
145
160k
           "Field offset not at char boundary!");
146
160k
147
160k
    return Context.toCharUnitsFromBits(FieldOffset);
148
160k
  }
149
150
protected:
151
  bool CanPlaceSubobjectAtOffset(const CXXRecordDecl *RD,
152
                                 CharUnits Offset) const;
153
154
  bool CanPlaceBaseSubobjectAtOffset(const BaseSubobjectInfo *Info,
155
                                     CharUnits Offset);
156
157
  bool CanPlaceFieldSubobjectAtOffset(const CXXRecordDecl *RD,
158
                                      const CXXRecordDecl *Class,
159
                                      CharUnits Offset) const;
160
  bool CanPlaceFieldSubobjectAtOffset(const FieldDecl *FD,
161
                                      CharUnits Offset) const;
162
163
public:
164
  /// This holds the size of the largest empty subobject (either a base
165
  /// or a member). Will be zero if the record being built doesn't contain
166
  /// any empty classes.
167
  CharUnits SizeOfLargestEmptySubobject;
168
169
  EmptySubobjectMap(const ASTContext &Context, const CXXRecordDecl *Class)
170
221k
  : Context(Context), CharWidth(Context.getCharWidth()), Class(Class) {
171
221k
      ComputeEmptySubobjectSizes();
172
221k
  }
173
174
  /// CanPlaceBaseAtOffset - Return whether the given base class can be placed
175
  /// at the given offset.
176
  /// Returns false if placing the record will result in two components
177
  /// (direct or indirect) of the same type having the same offset.
178
  bool CanPlaceBaseAtOffset(const BaseSubobjectInfo *Info,
179
                            CharUnits Offset);
180
181
  /// CanPlaceFieldAtOffset - Return whether a field can be placed at the given
182
  /// offset.
183
  bool CanPlaceFieldAtOffset(const FieldDecl *FD, CharUnits Offset);
184
};
185
186
221k
void EmptySubobjectMap::ComputeEmptySubobjectSizes() {
187
221k
  // Check the bases.
188
221k
  for (const CXXBaseSpecifier &Base : Class->bases()) {
189
54.1k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
190
54.1k
191
54.1k
    CharUnits EmptySize;
192
54.1k
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(BaseDecl);
193
54.1k
    if (BaseDecl->isEmpty()) {
194
35.4k
      // If the class decl is empty, get its size.
195
35.4k
      EmptySize = Layout.getSize();
196
35.4k
    } else {
197
18.6k
      // Otherwise, we get the largest empty subobject for the decl.
198
18.6k
      EmptySize = Layout.getSizeOfLargestEmptySubobject();
199
18.6k
    }
200
54.1k
201
54.1k
    if (EmptySize > SizeOfLargestEmptySubobject)
202
38.0k
      SizeOfLargestEmptySubobject = EmptySize;
203
54.1k
  }
204
221k
205
221k
  // Check the fields.
206
352k
  for (const FieldDecl *FD : Class->fields()) {
207
352k
    const RecordType *RT =
208
352k
        Context.getBaseElementType(FD->getType())->getAs<RecordType>();
209
352k
210
352k
    // We only care about record types.
211
352k
    if (!RT)
212
283k
      continue;
213
69.1k
214
69.1k
    CharUnits EmptySize;
215
69.1k
    const CXXRecordDecl *MemberDecl = RT->getAsCXXRecordDecl();
216
69.1k
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(MemberDecl);
217
69.1k
    if (MemberDecl->isEmpty()) {
218
653
      // If the class decl is empty, get its size.
219
653
      EmptySize = Layout.getSize();
220
68.5k
    } else {
221
68.5k
      // Otherwise, we get the largest empty subobject for the decl.
222
68.5k
      EmptySize = Layout.getSizeOfLargestEmptySubobject();
223
68.5k
    }
224
69.1k
225
69.1k
    if (EmptySize > SizeOfLargestEmptySubobject)
226
3.71k
      SizeOfLargestEmptySubobject = EmptySize;
227
69.1k
  }
228
221k
}
229
230
bool
231
EmptySubobjectMap::CanPlaceSubobjectAtOffset(const CXXRecordDecl *RD,
232
627k
                                             CharUnits Offset) const {
233
627k
  // We only need to check empty bases.
234
627k
  if (!RD->isEmpty())
235
575k
    return true;
236
52.6k
237
52.6k
  EmptyClassOffsetsMapTy::const_iterator I = EmptyClassOffsets.find(Offset);
238
52.6k
  if (I == EmptyClassOffsets.end())
239
50.8k
    return true;
240
1.80k
241
1.80k
  const ClassVectorTy &Classes = I->second;
242
1.80k
  if (llvm::find(Classes, RD) == Classes.end())
243
1.65k
    return true;
244
148
245
148
  // There is already an empty class of the same type at this offset.
246
148
  return false;
247
148
}
248
249
void EmptySubobjectMap::AddSubobjectAtOffset(const CXXRecordDecl *RD,
250
603k
                                             CharUnits Offset) {
251
603k
  // We only care about empty bases.
252
603k
  if (!RD->isEmpty())
253
550k
    return;
254
52.6k
255
52.6k
  // If we have empty structures inside a union, we can assign both
256
52.6k
  // the same offset. Just avoid pushing them twice in the list.
257
52.6k
  ClassVectorTy &Classes = EmptyClassOffsets[Offset];
258
52.6k
  if (llvm::is_contained(Classes, RD))
259
1
    return;
260
52.6k
261
52.6k
  Classes.push_back(RD);
262
52.6k
263
52.6k
  // Update the empty class offset.
264
52.6k
  if (Offset > MaxEmptyClassOffset)
265
158
    MaxEmptyClassOffset = Offset;
266
52.6k
}
267
268
bool
269
EmptySubobjectMap::CanPlaceBaseSubobjectAtOffset(const BaseSubobjectInfo *Info,
270
576k
                                                 CharUnits Offset) {
271
576k
  // We don't have to keep looking past the maximum offset that's known to
272
576k
  // contain an empty class.
273
576k
  if (!AnyEmptySubobjectsBeyondOffset(Offset))
274
623
    return true;
275
576k
276
576k
  if (!CanPlaceSubobjectAtOffset(Info->Class, Offset))
277
102
    return false;
278
576k
279
576k
  // Traverse all non-virtual bases.
280
576k
  const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class);
281
576k
  for (const BaseSubobjectInfo *Base : Info->Bases) {
282
534k
    if (Base->IsVirtual)
283
138
      continue;
284
534k
285
534k
    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base->Class);
286
534k
287
534k
    if (!CanPlaceBaseSubobjectAtOffset(Base, BaseOffset))
288
77
      return false;
289
534k
  }
290
576k
291
576k
  
if (576k
Info->PrimaryVirtualBaseInfo576k
) {
292
24
    BaseSubobjectInfo *PrimaryVirtualBaseInfo = Info->PrimaryVirtualBaseInfo;
293
24
294
24
    if (Info == PrimaryVirtualBaseInfo->Derived) {
295
24
      if (!CanPlaceBaseSubobjectAtOffset(PrimaryVirtualBaseInfo, Offset))
296
5
        return false;
297
576k
    }
298
24
  }
299
576k
300
576k
  // Traverse all member variables.
301
576k
  unsigned FieldNo = 0;
302
576k
  for (CXXRecordDecl::field_iterator I = Info->Class->field_begin(),
303
584k
       E = Info->Class->field_end(); I != E; 
++I, ++FieldNo8.55k
) {
304
8.55k
    if (I->isBitField())
305
4
      continue;
306
8.55k
307
8.55k
    CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
308
8.55k
    if (!CanPlaceFieldSubobjectAtOffset(*I, FieldOffset))
309
2
      return false;
310
8.55k
  }
311
576k
312
576k
  
return true576k
;
313
576k
}
314
315
void EmptySubobjectMap::UpdateEmptyBaseSubobjects(const BaseSubobjectInfo *Info,
316
                                                  CharUnits Offset,
317
576k
                                                  bool PlacingEmptyBase) {
318
576k
  if (!PlacingEmptyBase && 
Offset >= SizeOfLargestEmptySubobject532k
) {
319
556
    // We know that the only empty subobjects that can conflict with empty
320
556
    // subobject of non-empty bases, are empty bases that can be placed at
321
556
    // offset zero. Because of this, we only need to keep track of empty base
322
556
    // subobjects with offsets less than the size of the largest empty
323
556
    // subobject for our class.
324
556
    return;
325
556
  }
326
576k
327
576k
  AddSubobjectAtOffset(Info->Class, Offset);
328
576k
329
576k
  // Traverse all non-virtual bases.
330
576k
  const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class);
331
576k
  for (const BaseSubobjectInfo *Base : Info->Bases) {
332
534k
    if (Base->IsVirtual)
333
133
      continue;
334
534k
335
534k
    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base->Class);
336
534k
    UpdateEmptyBaseSubobjects(Base, BaseOffset, PlacingEmptyBase);
337
534k
  }
338
576k
339
576k
  if (Info->PrimaryVirtualBaseInfo) {
340
19
    BaseSubobjectInfo *PrimaryVirtualBaseInfo = Info->PrimaryVirtualBaseInfo;
341
19
342
19
    if (Info == PrimaryVirtualBaseInfo->Derived)
343
19
      UpdateEmptyBaseSubobjects(PrimaryVirtualBaseInfo, Offset,
344
19
                                PlacingEmptyBase);
345
19
  }
346
576k
347
576k
  // Traverse all member variables.
348
576k
  unsigned FieldNo = 0;
349
576k
  for (CXXRecordDecl::field_iterator I = Info->Class->field_begin(),
350
584k
       E = Info->Class->field_end(); I != E; 
++I, ++FieldNo8.55k
) {
351
8.55k
    if (I->isBitField())
352
4
      continue;
353
8.54k
354
8.54k
    CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
355
8.54k
    UpdateEmptyFieldSubobjects(*I, FieldOffset, PlacingEmptyBase);
356
8.54k
  }
357
576k
}
358
359
bool EmptySubobjectMap::CanPlaceBaseAtOffset(const BaseSubobjectInfo *Info,
360
54.5k
                                             CharUnits Offset) {
361
54.5k
  // If we know this class doesn't have any empty subobjects we don't need to
362
54.5k
  // bother checking.
363
54.5k
  if (SizeOfLargestEmptySubobject.isZero())
364
11.7k
    return true;
365
42.7k
366
42.7k
  if (!CanPlaceBaseSubobjectAtOffset(Info, Offset))
367
104
    return false;
368
42.6k
369
42.6k
  // We are able to place the base at this offset. Make sure to update the
370
42.6k
  // empty base subobject map.
371
42.6k
  UpdateEmptyBaseSubobjects(Info, Offset, Info->Class->isEmpty());
372
42.6k
  return true;
373
42.6k
}
374
375
bool
376
EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const CXXRecordDecl *RD,
377
                                                  const CXXRecordDecl *Class,
378
52.2k
                                                  CharUnits Offset) const {
379
52.2k
  // We don't have to keep looking past the maximum offset that's known to
380
52.2k
  // contain an empty class.
381
52.2k
  if (!AnyEmptySubobjectsBeyondOffset(Offset))
382
808
    return true;
383
51.4k
384
51.4k
  if (!CanPlaceSubobjectAtOffset(RD, Offset))
385
46
    return false;
386
51.3k
387
51.3k
  const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
388
51.3k
389
51.3k
  // Traverse all non-virtual bases.
390
51.3k
  for (const CXXBaseSpecifier &Base : RD->bases()) {
391
13.1k
    if (Base.isVirtual())
392
28
      continue;
393
13.1k
394
13.1k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
395
13.1k
396
13.1k
    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(BaseDecl);
397
13.1k
    if (!CanPlaceFieldSubobjectAtOffset(BaseDecl, Class, BaseOffset))
398
30
      return false;
399
13.1k
  }
400
51.3k
401
51.3k
  
if (51.3k
RD == Class51.3k
) {
402
38.9k
    // This is the most derived class, traverse virtual bases as well.
403
38.9k
    for (const CXXBaseSpecifier &Base : RD->vbases()) {
404
28
      const CXXRecordDecl *VBaseDecl = Base.getType()->getAsCXXRecordDecl();
405
28
406
28
      CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBaseDecl);
407
28
      if (!CanPlaceFieldSubobjectAtOffset(VBaseDecl, Class, VBaseOffset))
408
1
        return false;
409
28
    }
410
38.9k
  }
411
51.3k
412
51.3k
  // Traverse all member variables.
413
51.3k
  unsigned FieldNo = 0;
414
51.3k
  for (CXXRecordDecl::field_iterator I = RD->field_begin(), E = RD->field_end();
415
167k
       I != E; 
++I, ++FieldNo116k
) {
416
116k
    if (I->isBitField())
417
190
      continue;
418
116k
419
116k
    CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
420
116k
421
116k
    if (!CanPlaceFieldSubobjectAtOffset(*I, FieldOffset))
422
10
      return false;
423
116k
  }
424
51.3k
425
51.3k
  
return true51.3k
;
426
51.3k
}
427
428
bool
429
EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const FieldDecl *FD,
430
445k
                                                  CharUnits Offset) const {
431
445k
  // We don't have to keep looking past the maximum offset that's known to
432
445k
  // contain an empty class.
433
445k
  if (!AnyEmptySubobjectsBeyondOffset(Offset))
434
296k
    return true;
435
149k
436
149k
  QualType T = FD->getType();
437
149k
  if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl())
438
38.8k
    return CanPlaceFieldSubobjectAtOffset(RD, RD, Offset);
439
110k
440
110k
  // If we have an array type we need to look at every element.
441
110k
  if (const ConstantArrayType *AT = Context.getAsConstantArrayType(T)) {
442
9.38k
    QualType ElemTy = Context.getBaseElementType(AT);
443
9.38k
    const RecordType *RT = ElemTy->getAs<RecordType>();
444
9.38k
    if (!RT)
445
8.17k
      return true;
446
1.21k
447
1.21k
    const CXXRecordDecl *RD = RT->getAsCXXRecordDecl();
448
1.21k
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
449
1.21k
450
1.21k
    uint64_t NumElements = Context.getConstantArrayElementCount(AT);
451
1.21k
    CharUnits ElementOffset = Offset;
452
1.39k
    for (uint64_t I = 0; I != NumElements; 
++I185
) {
453
364
      // We don't have to keep looking past the maximum offset that's known to
454
364
      // contain an empty class.
455
364
      if (!AnyEmptySubobjectsBeyondOffset(ElementOffset))
456
175
        return true;
457
189
458
189
      if (!CanPlaceFieldSubobjectAtOffset(RD, RD, ElementOffset))
459
4
        return false;
460
185
461
185
      ElementOffset += Layout.getSize();
462
185
    }
463
1.21k
  }
464
110k
465
110k
  
return true102k
;
466
110k
}
467
468
bool
469
EmptySubobjectMap::CanPlaceFieldAtOffset(const FieldDecl *FD,
470
320k
                                         CharUnits Offset) {
471
320k
  if (!CanPlaceFieldSubobjectAtOffset(FD, Offset))
472
44
    return false;
473
320k
474
320k
  // We are able to place the member variable at this offset.
475
320k
  // Make sure to update the empty field subobject map.
476
320k
  UpdateEmptyFieldSubobjects(FD, Offset, FD->hasAttr<NoUniqueAddressAttr>());
477
320k
  return true;
478
320k
}
479
480
void EmptySubobjectMap::UpdateEmptyFieldSubobjects(
481
    const CXXRecordDecl *RD, const CXXRecordDecl *Class, CharUnits Offset,
482
79.2k
    bool PlacingOverlappingField) {
483
79.2k
  // We know that the only empty subobjects that can conflict with empty
484
79.2k
  // field subobjects are subobjects of empty bases and potentially-overlapping
485
79.2k
  // fields that can be placed at offset zero. Because of this, we only need to
486
79.2k
  // keep track of empty field subobjects with offsets less than the size of
487
79.2k
  // the largest empty subobject for our class.
488
79.2k
  //
489
79.2k
  // (Proof: we will only consider placing a subobject at offset zero or at
490
79.2k
  // >= the current dsize. The only cases where the earlier subobject can be
491
79.2k
  // placed beyond the end of dsize is if it's an empty base or a
492
79.2k
  // potentially-overlapping field.)
493
79.2k
  if (!PlacingOverlappingField && 
Offset >= SizeOfLargestEmptySubobject79.1k
)
494
52.1k
    return;
495
27.1k
496
27.1k
  AddSubobjectAtOffset(RD, Offset);
497
27.1k
498
27.1k
  const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
499
27.1k
500
27.1k
  // Traverse all non-virtual bases.
501
27.1k
  for (const CXXBaseSpecifier &Base : RD->bases()) {
502
11.5k
    if (Base.isVirtual())
503
26
      continue;
504
11.5k
505
11.5k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
506
11.5k
507
11.5k
    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(BaseDecl);
508
11.5k
    UpdateEmptyFieldSubobjects(BaseDecl, Class, BaseOffset,
509
11.5k
                               PlacingOverlappingField);
510
11.5k
  }
511
27.1k
512
27.1k
  if (RD == Class) {
513
15.9k
    // This is the most derived class, traverse virtual bases as well.
514
15.9k
    for (const CXXBaseSpecifier &Base : RD->vbases()) {
515
26
      const CXXRecordDecl *VBaseDecl = Base.getType()->getAsCXXRecordDecl();
516
26
517
26
      CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBaseDecl);
518
26
      UpdateEmptyFieldSubobjects(VBaseDecl, Class, VBaseOffset,
519
26
                                 PlacingOverlappingField);
520
26
    }
521
15.9k
  }
522
27.1k
523
27.1k
  // Traverse all member variables.
524
27.1k
  unsigned FieldNo = 0;
525
27.1k
  for (CXXRecordDecl::field_iterator I = RD->field_begin(), E = RD->field_end();
526
54.2k
       I != E; 
++I, ++FieldNo27.0k
) {
527
27.0k
    if (I->isBitField())
528
2
      continue;
529
27.0k
530
27.0k
    CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
531
27.0k
532
27.0k
    UpdateEmptyFieldSubobjects(*I, FieldOffset, PlacingOverlappingField);
533
27.0k
  }
534
27.1k
}
535
536
void EmptySubobjectMap::UpdateEmptyFieldSubobjects(
537
356k
    const FieldDecl *FD, CharUnits Offset, bool PlacingOverlappingField) {
538
356k
  QualType T = FD->getType();
539
356k
  if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl()) {
540
67.6k
    UpdateEmptyFieldSubobjects(RD, RD, Offset, PlacingOverlappingField);
541
67.6k
    return;
542
67.6k
  }
543
288k
544
288k
  // If we have an array type we need to update every element.
545
288k
  if (const ConstantArrayType *AT = Context.getAsConstantArrayType(T)) {
546
23.0k
    QualType ElemTy = Context.getBaseElementType(AT);
547
23.0k
    const RecordType *RT = ElemTy->getAs<RecordType>();
548
23.0k
    if (!RT)
549
20.9k
      return;
550
2.15k
551
2.15k
    const CXXRecordDecl *RD = RT->getAsCXXRecordDecl();
552
2.15k
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
553
2.15k
554
2.15k
    uint64_t NumElements = Context.getConstantArrayElementCount(AT);
555
2.15k
    CharUnits ElementOffset = Offset;
556
2.15k
557
2.17k
    for (uint64_t I = 0; I != NumElements; 
++I19
) {
558
1.13k
      // We know that the only empty subobjects that can conflict with empty
559
1.13k
      // field subobjects are subobjects of empty bases that can be placed at
560
1.13k
      // offset zero. Because of this, we only need to keep track of empty field
561
1.13k
      // subobjects with offsets less than the size of the largest empty
562
1.13k
      // subobject for our class.
563
1.13k
      if (!PlacingOverlappingField &&
564
1.13k
          ElementOffset >= SizeOfLargestEmptySubobject)
565
1.11k
        return;
566
19
567
19
      UpdateEmptyFieldSubobjects(RD, RD, ElementOffset,
568
19
                                 PlacingOverlappingField);
569
19
      ElementOffset += Layout.getSize();
570
19
    }
571
2.15k
  }
572
288k
}
573
574
typedef llvm::SmallPtrSet<const CXXRecordDecl*, 4> ClassSetTy;
575
576
class ItaniumRecordLayoutBuilder {
577
protected:
578
  // FIXME: Remove this and make the appropriate fields public.
579
  friend class clang::ASTContext;
580
581
  const ASTContext &Context;
582
583
  EmptySubobjectMap *EmptySubobjects;
584
585
  /// Size - The current size of the record layout.
586
  uint64_t Size;
587
588
  /// Alignment - The current alignment of the record layout.
589
  CharUnits Alignment;
590
591
  /// The alignment if attribute packed is not used.
592
  CharUnits UnpackedAlignment;
593
594
  /// \brief The maximum of the alignments of top-level members.
595
  CharUnits UnadjustedAlignment;
596
597
  SmallVector<uint64_t, 16> FieldOffsets;
598
599
  /// Whether the external AST source has provided a layout for this
600
  /// record.
601
  unsigned UseExternalLayout : 1;
602
603
  /// Whether we need to infer alignment, even when we have an
604
  /// externally-provided layout.
605
  unsigned InferAlignment : 1;
606
607
  /// Packed - Whether the record is packed or not.
608
  unsigned Packed : 1;
609
610
  unsigned IsUnion : 1;
611
612
  unsigned IsMac68kAlign : 1;
613
614
  unsigned IsMsStruct : 1;
615
616
  /// UnfilledBitsInLastUnit - If the last field laid out was a bitfield,
617
  /// this contains the number of bits in the last unit that can be used for
618
  /// an adjacent bitfield if necessary.  The unit in question is usually
619
  /// a byte, but larger units are used if IsMsStruct.
620
  unsigned char UnfilledBitsInLastUnit;
621
  /// LastBitfieldTypeSize - If IsMsStruct, represents the size of the type
622
  /// of the previous field if it was a bitfield.
623
  unsigned char LastBitfieldTypeSize;
624
625
  /// MaxFieldAlignment - The maximum allowed field alignment. This is set by
626
  /// #pragma pack.
627
  CharUnits MaxFieldAlignment;
628
629
  /// DataSize - The data size of the record being laid out.
630
  uint64_t DataSize;
631
632
  CharUnits NonVirtualSize;
633
  CharUnits NonVirtualAlignment;
634
635
  /// If we've laid out a field but not included its tail padding in Size yet,
636
  /// this is the size up to the end of that field.
637
  CharUnits PaddedFieldSize;
638
639
  /// PrimaryBase - the primary base class (if one exists) of the class
640
  /// we're laying out.
641
  const CXXRecordDecl *PrimaryBase;
642
643
  /// PrimaryBaseIsVirtual - Whether the primary base of the class we're laying
644
  /// out is virtual.
645
  bool PrimaryBaseIsVirtual;
646
647
  /// HasOwnVFPtr - Whether the class provides its own vtable/vftbl
648
  /// pointer, as opposed to inheriting one from a primary base class.
649
  bool HasOwnVFPtr;
650
651
  /// the flag of field offset changing due to packed attribute.
652
  bool HasPackedField;
653
654
  typedef llvm::DenseMap<const CXXRecordDecl *, CharUnits> BaseOffsetsMapTy;
655
656
  /// Bases - base classes and their offsets in the record.
657
  BaseOffsetsMapTy Bases;
658
659
  // VBases - virtual base classes and their offsets in the record.
660
  ASTRecordLayout::VBaseOffsetsMapTy VBases;
661
662
  /// IndirectPrimaryBases - Virtual base classes, direct or indirect, that are
663
  /// primary base classes for some other direct or indirect base class.
664
  CXXIndirectPrimaryBaseSet IndirectPrimaryBases;
665
666
  /// FirstNearlyEmptyVBase - The first nearly empty virtual base class in
667
  /// inheritance graph order. Used for determining the primary base class.
668
  const CXXRecordDecl *FirstNearlyEmptyVBase;
669
670
  /// VisitedVirtualBases - A set of all the visited virtual bases, used to
671
  /// avoid visiting virtual bases more than once.
672
  llvm::SmallPtrSet<const CXXRecordDecl *, 4> VisitedVirtualBases;
673
674
  /// Valid if UseExternalLayout is true.
675
  ExternalLayout External;
676
677
  ItaniumRecordLayoutBuilder(const ASTContext &Context,
678
                             EmptySubobjectMap *EmptySubobjects)
679
      : Context(Context), EmptySubobjects(EmptySubobjects), Size(0),
680
        Alignment(CharUnits::One()), UnpackedAlignment(CharUnits::One()),
681
        UnadjustedAlignment(CharUnits::One()),
682
        UseExternalLayout(false), InferAlignment(false), Packed(false),
683
        IsUnion(false), IsMac68kAlign(false), IsMsStruct(false),
684
        UnfilledBitsInLastUnit(0), LastBitfieldTypeSize(0),
685
        MaxFieldAlignment(CharUnits::Zero()), DataSize(0),
686
        NonVirtualSize(CharUnits::Zero()),
687
        NonVirtualAlignment(CharUnits::One()),
688
        PaddedFieldSize(CharUnits::Zero()), PrimaryBase(nullptr),
689
        PrimaryBaseIsVirtual(false), HasOwnVFPtr(false),
690
333k
        HasPackedField(false), FirstNearlyEmptyVBase(nullptr) {}
691
692
  void Layout(const RecordDecl *D);
693
  void Layout(const CXXRecordDecl *D);
694
  void Layout(const ObjCInterfaceDecl *D);
695
696
  void LayoutFields(const RecordDecl *D);
697
  void LayoutField(const FieldDecl *D, bool InsertExtraPadding);
698
  void LayoutWideBitField(uint64_t FieldSize, uint64_t TypeSize,
699
                          bool FieldPacked, const FieldDecl *D);
700
  void LayoutBitField(const FieldDecl *D);
701
702
0
  TargetCXXABI getCXXABI() const {
703
0
    return Context.getTargetInfo().getCXXABI();
704
0
  }
705
706
  /// BaseSubobjectInfoAllocator - Allocator for BaseSubobjectInfo objects.
707
  llvm::SpecificBumpPtrAllocator<BaseSubobjectInfo> BaseSubobjectInfoAllocator;
708
709
  typedef llvm::DenseMap<const CXXRecordDecl *, BaseSubobjectInfo *>
710
    BaseSubobjectInfoMapTy;
711
712
  /// VirtualBaseInfo - Map from all the (direct or indirect) virtual bases
713
  /// of the class we're laying out to their base subobject info.
714
  BaseSubobjectInfoMapTy VirtualBaseInfo;
715
716
  /// NonVirtualBaseInfo - Map from all the direct non-virtual bases of the
717
  /// class we're laying out to their base subobject info.
718
  BaseSubobjectInfoMapTy NonVirtualBaseInfo;
719
720
  /// ComputeBaseSubobjectInfo - Compute the base subobject information for the
721
  /// bases of the given class.
722
  void ComputeBaseSubobjectInfo(const CXXRecordDecl *RD);
723
724
  /// ComputeBaseSubobjectInfo - Compute the base subobject information for a
725
  /// single class and all of its base classes.
726
  BaseSubobjectInfo *ComputeBaseSubobjectInfo(const CXXRecordDecl *RD,
727
                                              bool IsVirtual,
728
                                              BaseSubobjectInfo *Derived);
729
730
  /// DeterminePrimaryBase - Determine the primary base of the given class.
731
  void DeterminePrimaryBase(const CXXRecordDecl *RD);
732
733
  void SelectPrimaryVBase(const CXXRecordDecl *RD);
734
735
  void EnsureVTablePointerAlignment(CharUnits UnpackedBaseAlign);
736
737
  /// LayoutNonVirtualBases - Determines the primary base class (if any) and
738
  /// lays it out. Will then proceed to lay out all non-virtual base clasess.
739
  void LayoutNonVirtualBases(const CXXRecordDecl *RD);
740
741
  /// LayoutNonVirtualBase - Lays out a single non-virtual base.
742
  void LayoutNonVirtualBase(const BaseSubobjectInfo *Base);
743
744
  void AddPrimaryVirtualBaseOffsets(const BaseSubobjectInfo *Info,
745
                                    CharUnits Offset);
746
747
  /// LayoutVirtualBases - Lays out all the virtual bases.
748
  void LayoutVirtualBases(const CXXRecordDecl *RD,
749
                          const CXXRecordDecl *MostDerivedClass);
750
751
  /// LayoutVirtualBase - Lays out a single virtual base.
752
  void LayoutVirtualBase(const BaseSubobjectInfo *Base);
753
754
  /// LayoutBase - Will lay out a base and return the offset where it was
755
  /// placed, in chars.
756
  CharUnits LayoutBase(const BaseSubobjectInfo *Base);
757
758
  /// InitializeLayout - Initialize record layout for the given record decl.
759
  void InitializeLayout(const Decl *D);
760
761
  /// FinishLayout - Finalize record layout. Adjust record size based on the
762
  /// alignment.
763
  void FinishLayout(const NamedDecl *D);
764
765
  void UpdateAlignment(CharUnits NewAlignment, CharUnits UnpackedNewAlignment);
766
8.05k
  void UpdateAlignment(CharUnits NewAlignment) {
767
8.05k
    UpdateAlignment(NewAlignment, NewAlignment);
768
8.05k
  }
769
770
  /// Retrieve the externally-supplied field offset for the given
771
  /// field.
772
  ///
773
  /// \param Field The field whose offset is being queried.
774
  /// \param ComputedOffset The offset that we've computed for this field.
775
  uint64_t updateExternalFieldOffset(const FieldDecl *Field,
776
                                     uint64_t ComputedOffset);
777
778
  void CheckFieldPadding(uint64_t Offset, uint64_t UnpaddedOffset,
779
                          uint64_t UnpackedOffset, unsigned UnpackedAlign,
780
                          bool isPacked, const FieldDecl *D);
781
782
  DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID);
783
784
636k
  CharUnits getSize() const {
785
636k
    assert(Size % Context.getCharWidth() == 0);
786
636k
    return Context.toCharUnitsFromBits(Size);
787
636k
  }
788
2.92M
  uint64_t getSizeInBits() const { return Size; }
789
790
131k
  void setSize(CharUnits NewSize) { Size = Context.toBits(NewSize); }
791
1.46M
  void setSize(uint64_t NewSize) { Size = NewSize; }
792
793
0
  CharUnits getAligment() const { return Alignment; }
794
795
860k
  CharUnits getDataSize() const {
796
860k
    assert(DataSize % Context.getCharWidth() == 0);
797
860k
    return Context.toCharUnitsFromBits(DataSize);
798
860k
  }
799
1.65M
  uint64_t getDataSizeInBits() const { return DataSize; }
800
801
749k
  void setDataSize(CharUnits NewSize) { DataSize = Context.toBits(NewSize); }
802
70.4k
  void setDataSize(uint64_t NewSize) { DataSize = NewSize; }
803
804
  ItaniumRecordLayoutBuilder(const ItaniumRecordLayoutBuilder &) = delete;
805
  void operator=(const ItaniumRecordLayoutBuilder &) = delete;
806
};
807
} // end anonymous namespace
808
809
1.38k
void ItaniumRecordLayoutBuilder::SelectPrimaryVBase(const CXXRecordDecl *RD) {
810
1.38k
  for (const auto &I : RD->bases()) {
811
960
    assert(!I.getType()->isDependentType() &&
812
960
           "Cannot layout class with dependent bases.");
813
960
814
960
    const CXXRecordDecl *Base = I.getType()->getAsCXXRecordDecl();
815
960
816
960
    // Check if this is a nearly empty virtual base.
817
960
    if (I.isVirtual() && 
Context.isNearlyEmpty(Base)659
) {
818
197
      // If it's not an indirect primary base, then we've found our primary
819
197
      // base.
820
197
      if (!IndirectPrimaryBases.count(Base)) {
821
194
        PrimaryBase = Base;
822
194
        PrimaryBaseIsVirtual = true;
823
194
        return;
824
194
      }
825
3
826
3
      // Is this the first nearly empty virtual base?
827
3
      if (!FirstNearlyEmptyVBase)
828
3
        FirstNearlyEmptyVBase = Base;
829
3
    }
830
960
831
960
    SelectPrimaryVBase(Base);
832
766
    if (PrimaryBase)
833
8
      return;
834
766
  }
835
1.38k
}
836
837
/// DeterminePrimaryBase - Determine the primary base of the given class.
838
221k
void ItaniumRecordLayoutBuilder::DeterminePrimaryBase(const CXXRecordDecl *RD) {
839
221k
  // If the class isn't dynamic, it won't have a primary base.
840
221k
  if (!RD->isDynamicClass())
841
209k
    return;
842
11.6k
843
11.6k
  // Compute all the primary virtual bases for all of our direct and
844
11.6k
  // indirect bases, and record all their primary virtual base classes.
845
11.6k
  RD->getIndirectPrimaryBases(IndirectPrimaryBases);
846
11.6k
847
11.6k
  // If the record has a dynamic base class, attempt to choose a primary base
848
11.6k
  // class. It is the first (in direct base class order) non-virtual dynamic
849
11.6k
  // base class, if one exists.
850
11.6k
  for (const auto &I : RD->bases()) {
851
8.95k
    // Ignore virtual bases.
852
8.95k
    if (I.isVirtual())
853
685
      continue;
854
8.26k
855
8.26k
    const CXXRecordDecl *Base = I.getType()->getAsCXXRecordDecl();
856
8.26k
857
8.26k
    if (Base->isDynamicClass()) {
858
8.11k
      // We found it.
859
8.11k
      PrimaryBase = Base;
860
8.11k
      PrimaryBaseIsVirtual = false;
861
8.11k
      return;
862
8.11k
    }
863
8.26k
  }
864
11.6k
865
11.6k
  // Under the Itanium ABI, if there is no non-virtual primary base class,
866
11.6k
  // try to compute the primary virtual base.  The primary virtual base is
867
11.6k
  // the first nearly empty virtual base that is not an indirect primary
868
11.6k
  // virtual base class, if one exists.
869
11.6k
  
if (3.52k
RD->getNumVBases() != 03.52k
) {
870
614
    SelectPrimaryVBase(RD);
871
614
    if (PrimaryBase)
872
194
      return;
873
3.32k
  }
874
3.32k
875
3.32k
  // Otherwise, it is the first indirect primary base class, if one exists.
876
3.32k
  if (FirstNearlyEmptyVBase) {
877
2
    PrimaryBase = FirstNearlyEmptyVBase;
878
2
    PrimaryBaseIsVirtual = true;
879
2
    return;
880
2
  }
881
3.32k
882
3.32k
  assert(!PrimaryBase && "Should not get here with a primary base!");
883
3.32k
}
884
885
BaseSubobjectInfo *ItaniumRecordLayoutBuilder::ComputeBaseSubobjectInfo(
886
595k
    const CXXRecordDecl *RD, bool IsVirtual, BaseSubobjectInfo *Derived) {
887
595k
  BaseSubobjectInfo *Info;
888
595k
889
595k
  if (IsVirtual) {
890
1.41k
    // Check if we already have info about this virtual base.
891
1.41k
    BaseSubobjectInfo *&InfoSlot = VirtualBaseInfo[RD];
892
1.41k
    if (InfoSlot) {
893
243
      assert(InfoSlot->Class == RD && "Wrong class for virtual base info!");
894
243
      return InfoSlot;
895
243
    }
896
1.16k
897
1.16k
    // We don't, create it.
898
1.16k
    InfoSlot = new (BaseSubobjectInfoAllocator.Allocate()) BaseSubobjectInfo;
899
1.16k
    Info = InfoSlot;
900
593k
  } else {
901
593k
    Info = new (BaseSubobjectInfoAllocator.Allocate()) BaseSubobjectInfo;
902
593k
  }
903
595k
904
595k
  Info->Class = RD;
905
595k
  Info->IsVirtual = IsVirtual;
906
595k
  Info->Derived = nullptr;
907
595k
  Info->PrimaryVirtualBaseInfo = nullptr;
908
595k
909
595k
  const CXXRecordDecl *PrimaryVirtualBase = nullptr;
910
595k
  BaseSubobjectInfo *PrimaryVirtualBaseInfo = nullptr;
911
595k
912
595k
  // Check if this base has a primary virtual base.
913
595k
  if (RD->getNumVBases()) {
914
673
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
915
673
    if (Layout.isPrimaryBaseVirtual()) {
916
170
      // This base does have a primary virtual base.
917
170
      PrimaryVirtualBase = Layout.getPrimaryBase();
918
170
      assert(PrimaryVirtualBase && "Didn't have a primary virtual base!");
919
170
920
170
      // Now check if we have base subobject info about this primary base.
921
170
      PrimaryVirtualBaseInfo = VirtualBaseInfo.lookup(PrimaryVirtualBase);
922
170
923
170
      if (PrimaryVirtualBaseInfo) {
924
55
        if (PrimaryVirtualBaseInfo->Derived) {
925
33
          // We did have info about this primary base, and it turns out that it
926
33
          // has already been claimed as a primary virtual base for another
927
33
          // base.
928
33
          PrimaryVirtualBase = nullptr;
929
33
        } else {
930
22
          // We can claim this base as our primary base.
931
22
          Info->PrimaryVirtualBaseInfo = PrimaryVirtualBaseInfo;
932
22
          PrimaryVirtualBaseInfo->Derived = Info;
933
22
        }
934
55
      }
935
170
    }
936
673
  }
937
595k
938
595k
  // Now go through all direct bases.
939
595k
  for (const auto &I : RD->bases()) {
940
541k
    bool IsVirtual = I.isVirtual();
941
541k
942
541k
    const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
943
541k
944
541k
    Info->Bases.push_back(ComputeBaseSubobjectInfo(BaseDecl, IsVirtual, Info));
945
541k
  }
946
595k
947
595k
  if (PrimaryVirtualBase && 
!PrimaryVirtualBaseInfo137
) {
948
115
    // Traversing the bases must have created the base info for our primary
949
115
    // virtual base.
950
115
    PrimaryVirtualBaseInfo = VirtualBaseInfo.lookup(PrimaryVirtualBase);
951
115
    assert(PrimaryVirtualBaseInfo &&
952
115
           "Did not create a primary virtual base!");
953
115
954
115
    // Claim the primary virtual base as our primary virtual base.
955
115
    Info->PrimaryVirtualBaseInfo = PrimaryVirtualBaseInfo;
956
115
    PrimaryVirtualBaseInfo->Derived = Info;
957
115
  }
958
595k
959
595k
  return Info;
960
595k
}
961
962
void ItaniumRecordLayoutBuilder::ComputeBaseSubobjectInfo(
963
221k
    const CXXRecordDecl *RD) {
964
221k
  for (const auto &I : RD->bases()) {
965
54.1k
    bool IsVirtual = I.isVirtual();
966
54.1k
967
54.1k
    const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
968
54.1k
969
54.1k
    // Compute the base subobject info for this base.
970
54.1k
    BaseSubobjectInfo *Info = ComputeBaseSubobjectInfo(BaseDecl, IsVirtual,
971
54.1k
                                                       nullptr);
972
54.1k
973
54.1k
    if (IsVirtual) {
974
726
      // ComputeBaseInfo has already added this base for us.
975
726
      assert(VirtualBaseInfo.count(BaseDecl) &&
976
726
             "Did not add virtual base!");
977
53.4k
    } else {
978
53.4k
      // Add the base info to the map of non-virtual bases.
979
53.4k
      assert(!NonVirtualBaseInfo.count(BaseDecl) &&
980
53.4k
             "Non-virtual base already exists!");
981
53.4k
      NonVirtualBaseInfo.insert(std::make_pair(BaseDecl, Info));
982
53.4k
    }
983
54.1k
  }
984
221k
}
985
986
void ItaniumRecordLayoutBuilder::EnsureVTablePointerAlignment(
987
3.32k
    CharUnits UnpackedBaseAlign) {
988
3.32k
  CharUnits BaseAlign = Packed ? 
CharUnits::One()2
:
UnpackedBaseAlign3.32k
;
989
3.32k
990
3.32k
  // The maximum field alignment overrides base align.
991
3.32k
  if (!MaxFieldAlignment.isZero()) {
992
3
    BaseAlign = std::min(BaseAlign, MaxFieldAlignment);
993
3
    UnpackedBaseAlign = std::min(UnpackedBaseAlign, MaxFieldAlignment);
994
3
  }
995
3.32k
996
3.32k
  // Round up the current record size to pointer alignment.
997
3.32k
  setSize(getSize().alignTo(BaseAlign));
998
3.32k
999
3.32k
  // Update the alignment.
1000
3.32k
  UpdateAlignment(BaseAlign, UnpackedBaseAlign);
1001
3.32k
}
1002
1003
void ItaniumRecordLayoutBuilder::LayoutNonVirtualBases(
1004
221k
    const CXXRecordDecl *RD) {
1005
221k
  // Then, determine the primary base class.
1006
221k
  DeterminePrimaryBase(RD);
1007
221k
1008
221k
  // Compute base subobject info.
1009
221k
  ComputeBaseSubobjectInfo(RD);
1010
221k
1011
221k
  // If we have a primary base class, lay it out.
1012
221k
  if (PrimaryBase) {
1013
8.31k
    if (PrimaryBaseIsVirtual) {
1014
196
      // If the primary virtual base was a primary virtual base of some other
1015
196
      // base class we'll have to steal it.
1016
196
      BaseSubobjectInfo *PrimaryBaseInfo = VirtualBaseInfo.lookup(PrimaryBase);
1017
196
      PrimaryBaseInfo->Derived = nullptr;
1018
196
1019
196
      // We have a virtual primary base, insert it as an indirect primary base.
1020
196
      IndirectPrimaryBases.insert(PrimaryBase);
1021
196
1022
196
      assert(!VisitedVirtualBases.count(PrimaryBase) &&
1023
196
             "vbase already visited!");
1024
196
      VisitedVirtualBases.insert(PrimaryBase);
1025
196
1026
196
      LayoutVirtualBase(PrimaryBaseInfo);
1027
8.11k
    } else {
1028
8.11k
      BaseSubobjectInfo *PrimaryBaseInfo =
1029
8.11k
        NonVirtualBaseInfo.lookup(PrimaryBase);
1030
8.11k
      assert(PrimaryBaseInfo &&
1031
8.11k
             "Did not find base info for non-virtual primary base!");
1032
8.11k
1033
8.11k
      LayoutNonVirtualBase(PrimaryBaseInfo);
1034
8.11k
    }
1035
8.31k
1036
8.31k
  // If this class needs a vtable/vf-table and didn't get one from a
1037
8.31k
  // primary base, add it in now.
1038
213k
  } else if (RD->isDynamicClass()) {
1039
3.32k
    assert(DataSize == 0 && "Vtable pointer must be at offset zero!");
1040
3.32k
    CharUnits PtrWidth =
1041
3.32k
      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(0));
1042
3.32k
    CharUnits PtrAlign =
1043
3.32k
      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerAlign(0));
1044
3.32k
    EnsureVTablePointerAlignment(PtrAlign);
1045
3.32k
    HasOwnVFPtr = true;
1046
3.32k
    setSize(getSize() + PtrWidth);
1047
3.32k
    setDataSize(getSize());
1048
3.32k
  }
1049
221k
1050
221k
  // Now lay out the non-virtual bases.
1051
221k
  for (const auto &I : RD->bases()) {
1052
54.1k
1053
54.1k
    // Ignore virtual bases.
1054
54.1k
    if (I.isVirtual())
1055
726
      continue;
1056
53.4k
1057
53.4k
    const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
1058
53.4k
1059
53.4k
    // Skip the primary base, because we've already laid it out.  The
1060
53.4k
    // !PrimaryBaseIsVirtual check is required because we might have a
1061
53.4k
    // non-virtual base of the same type as a primary virtual base.
1062
53.4k
    if (BaseDecl == PrimaryBase && 
!PrimaryBaseIsVirtual8.11k
)
1063
8.11k
      continue;
1064
45.2k
1065
45.2k
    // Lay out the base.
1066
45.2k
    BaseSubobjectInfo *BaseInfo = NonVirtualBaseInfo.lookup(BaseDecl);
1067
45.2k
    assert(BaseInfo && "Did not find base info for non-virtual base!");
1068
45.2k
1069
45.2k
    LayoutNonVirtualBase(BaseInfo);
1070
45.2k
  }
1071
221k
}
1072
1073
void ItaniumRecordLayoutBuilder::LayoutNonVirtualBase(
1074
53.4k
    const BaseSubobjectInfo *Base) {
1075
53.4k
  // Layout the base.
1076
53.4k
  CharUnits Offset = LayoutBase(Base);
1077
53.4k
1078
53.4k
  // Add its base class offset.
1079
53.4k
  assert(!Bases.count(Base->Class) && "base offset already exists!");
1080
53.4k
  Bases.insert(std::make_pair(Base->Class, Offset));
1081
53.4k
1082
53.4k
  AddPrimaryVirtualBaseOffsets(Base, Offset);
1083
53.4k
}
1084
1085
void ItaniumRecordLayoutBuilder::AddPrimaryVirtualBaseOffsets(
1086
54.8k
    const BaseSubobjectInfo *Info, CharUnits Offset) {
1087
54.8k
  // This base isn't interesting, it has no virtual bases.
1088
54.8k
  if (!Info->Class->getNumVBases())
1089
54.1k
    return;
1090
673
1091
673
  // First, check if we have a virtual primary base to add offsets for.
1092
673
  if (Info->PrimaryVirtualBaseInfo) {
1093
137
    assert(Info->PrimaryVirtualBaseInfo->IsVirtual &&
1094
137
           "Primary virtual base is not virtual!");
1095
137
    if (Info->PrimaryVirtualBaseInfo->Derived == Info) {
1096
134
      // Add the offset.
1097
134
      assert(!VBases.count(Info->PrimaryVirtualBaseInfo->Class) &&
1098
134
             "primary vbase offset already exists!");
1099
134
      VBases.insert(std::make_pair(Info->PrimaryVirtualBaseInfo->Class,
1100
134
                                   ASTRecordLayout::VBaseInfo(Offset, false)));
1101
134
1102
134
      // Traverse the primary virtual base.
1103
134
      AddPrimaryVirtualBaseOffsets(Info->PrimaryVirtualBaseInfo, Offset);
1104
134
    }
1105
137
  }
1106
673
1107
673
  // Now go through all direct non-virtual bases.
1108
673
  const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class);
1109
928
  for (const BaseSubobjectInfo *Base : Info->Bases) {
1110
928
    if (Base->IsVirtual)
1111
684
      continue;
1112
244
1113
244
    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base->Class);
1114
244
    AddPrimaryVirtualBaseOffsets(Base, BaseOffset);
1115
244
  }
1116
673
}
1117
1118
void ItaniumRecordLayoutBuilder::LayoutVirtualBases(
1119
222k
    const CXXRecordDecl *RD, const CXXRecordDecl *MostDerivedClass) {
1120
222k
  const CXXRecordDecl *PrimaryBase;
1121
222k
  bool PrimaryBaseIsVirtual;
1122
222k
1123
222k
  if (MostDerivedClass == RD) {
1124
221k
    PrimaryBase = this->PrimaryBase;
1125
221k
    PrimaryBaseIsVirtual = this->PrimaryBaseIsVirtual;
1126
221k
  } else {
1127
677
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
1128
677
    PrimaryBase = Layout.getPrimaryBase();
1129
677
    PrimaryBaseIsVirtual = Layout.isPrimaryBaseVirtual();
1130
677
  }
1131
222k
1132
222k
  for (const CXXBaseSpecifier &Base : RD->bases()) {
1133
55.0k
    assert(!Base.getType()->isDependentType() &&
1134
55.0k
           "Cannot layout class with dependent bases.");
1135
55.0k
1136
55.0k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
1137
55.0k
1138
55.0k
    if (Base.isVirtual()) {
1139
1.41k
      if (PrimaryBase != BaseDecl || 
!PrimaryBaseIsVirtual352
) {
1140
1.06k
        bool IndirectPrimaryBase = IndirectPrimaryBases.count(BaseDecl);
1141
1.06k
1142
1.06k
        // Only lay out the virtual base if it's not an indirect primary base.
1143
1.06k
        if (!IndirectPrimaryBase) {
1144
1.02k
          // Only visit virtual bases once.
1145
1.02k
          if (!VisitedVirtualBases.insert(BaseDecl).second)
1146
190
            continue;
1147
837
1148
837
          const BaseSubobjectInfo *BaseInfo = VirtualBaseInfo.lookup(BaseDecl);
1149
837
          assert(BaseInfo && "Did not find virtual base info!");
1150
837
          LayoutVirtualBase(BaseInfo);
1151
837
        }
1152
1.06k
      }
1153
1.41k
    }
1154
55.0k
1155
55.0k
    
if (54.8k
!BaseDecl->getNumVBases()54.8k
) {
1156
54.1k
      // This base isn't interesting since it doesn't have any virtual bases.
1157
54.1k
      continue;
1158
54.1k
    }
1159
677
1160
677
    LayoutVirtualBases(BaseDecl, MostDerivedClass);
1161
677
  }
1162
222k
}
1163
1164
void ItaniumRecordLayoutBuilder::LayoutVirtualBase(
1165
1.03k
    const BaseSubobjectInfo *Base) {
1166
1.03k
  assert(!Base->Derived && "Trying to lay out a primary virtual base!");
1167
1.03k
1168
1.03k
  // Layout the base.
1169
1.03k
  CharUnits Offset = LayoutBase(Base);
1170
1.03k
1171
1.03k
  // Add its base class offset.
1172
1.03k
  assert(!VBases.count(Base->Class) && "vbase offset already exists!");
1173
1.03k
  VBases.insert(std::make_pair(Base->Class,
1174
1.03k
                       ASTRecordLayout::VBaseInfo(Offset, false)));
1175
1.03k
1176
1.03k
  AddPrimaryVirtualBaseOffsets(Base, Offset);
1177
1.03k
}
1178
1179
CharUnits
1180
54.4k
ItaniumRecordLayoutBuilder::LayoutBase(const BaseSubobjectInfo *Base) {
1181
54.4k
  const ASTRecordLayout &Layout = Context.getASTRecordLayout(Base->Class);
1182
54.4k
1183
54.4k
1184
54.4k
  CharUnits Offset;
1185
54.4k
1186
54.4k
  // Query the external layout to see if it provides an offset.
1187
54.4k
  bool HasExternalLayout = false;
1188
54.4k
  if (UseExternalLayout) {
1189
3.32k
    // FIXME: This appears to be reversed.
1190
3.32k
    if (Base->IsVirtual)
1191
20
      HasExternalLayout = External.getExternalNVBaseOffset(Base->Class, Offset);
1192
3.30k
    else
1193
3.30k
      HasExternalLayout = External.getExternalVBaseOffset(Base->Class, Offset);
1194
3.32k
  }
1195
54.4k
1196
54.4k
  // Clang <= 6 incorrectly applied the 'packed' attribute to base classes.
1197
54.4k
  // Per GCC's documentation, it only applies to non-static data members.
1198
54.4k
  CharUnits UnpackedBaseAlign = Layout.getNonVirtualAlignment();
1199
54.4k
  CharUnits BaseAlign =
1200
54.4k
      (Packed && 
(37
(Context.getLangOpts().getClangABICompat() <=
1201
37
                   LangOptions::ClangABI::Ver6) ||
1202
37
                  
Context.getTargetInfo().getTriple().isPS4()30
))
1203
54.4k
          ? 
CharUnits::One()9
1204
54.4k
          : 
UnpackedBaseAlign54.4k
;
1205
54.4k
1206
54.4k
  // If we have an empty base class, try to place it at offset 0.
1207
54.4k
  if (Base->Class->isEmpty() &&
1208
54.4k
      
(35.5k
!HasExternalLayout35.5k
||
Offset == CharUnits::Zero()0
) &&
1209
54.4k
      
EmptySubobjects->CanPlaceBaseAtOffset(Base, CharUnits::Zero())35.5k
) {
1210
35.4k
    setSize(std::max(getSize(), Layout.getSize()));
1211
35.4k
    UpdateAlignment(BaseAlign, UnpackedBaseAlign);
1212
35.4k
1213
35.4k
    return CharUnits::Zero();
1214
35.4k
  }
1215
18.9k
1216
18.9k
  // The maximum field alignment overrides base align.
1217
18.9k
  if (!MaxFieldAlignment.isZero()) {
1218
19
    BaseAlign = std::min(BaseAlign, MaxFieldAlignment);
1219
19
    UnpackedBaseAlign = std::min(UnpackedBaseAlign, MaxFieldAlignment);
1220
19
  }
1221
18.9k
1222
18.9k
  if (!HasExternalLayout) {
1223
18.9k
    // Round up the current record size to the base's alignment boundary.
1224
18.9k
    Offset = getDataSize().alignTo(BaseAlign);
1225
18.9k
1226
18.9k
    // Try to place the base.
1227
18.9k
    while (!EmptySubobjects->CanPlaceBaseAtOffset(Base, Offset))
1228
36
      Offset += BaseAlign;
1229
18.9k
  } else {
1230
0
    bool Allowed = EmptySubobjects->CanPlaceBaseAtOffset(Base, Offset);
1231
0
    (void)Allowed;
1232
0
    assert(Allowed && "Base subobject externally placed at overlapping offset");
1233
0
1234
0
    if (InferAlignment && Offset < getDataSize().alignTo(BaseAlign)) {
1235
0
      // The externally-supplied base offset is before the base offset we
1236
0
      // computed. Assume that the structure is packed.
1237
0
      Alignment = CharUnits::One();
1238
0
      InferAlignment = false;
1239
0
    }
1240
0
  }
1241
18.9k
1242
18.9k
  if (!Base->Class->isEmpty()) {
1243
18.8k
    // Update the data size.
1244
18.8k
    setDataSize(Offset + Layout.getNonVirtualSize());
1245
18.8k
1246
18.8k
    setSize(std::max(getSize(), getDataSize()));
1247
18.8k
  } else
1248
68
    setSize(std::max(getSize(), Offset + Layout.getSize()));
1249
18.9k
1250
18.9k
  // Remember max struct/class alignment.
1251
18.9k
  UpdateAlignment(BaseAlign, UnpackedBaseAlign);
1252
18.9k
1253
18.9k
  return Offset;
1254
18.9k
}
1255
1256
333k
void ItaniumRecordLayoutBuilder::InitializeLayout(const Decl *D) {
1257
333k
  if (const RecordDecl *RD = dyn_cast<RecordDecl>(D)) {
1258
328k
    IsUnion = RD->isUnion();
1259
328k
    IsMsStruct = RD->isMsStruct(Context);
1260
328k
  }
1261
333k
1262
333k
  Packed = D->hasAttr<PackedAttr>();
1263
333k
1264
333k
  // Honor the default struct packing maximum alignment flag.
1265
333k
  if (unsigned DefaultMaxFieldAlignment = Context.getLangOpts().PackStruct) {
1266
2
    MaxFieldAlignment = CharUnits::fromQuantity(DefaultMaxFieldAlignment);
1267
2
  }
1268
333k
1269
333k
  // mac68k alignment supersedes maximum field alignment and attribute aligned,
1270
333k
  // and forces all structures to have 2-byte alignment. The IBM docs on it
1271
333k
  // allude to additional (more complicated) semantics, especially with regard
1272
333k
  // to bit-fields, but gcc appears not to follow that.
1273
333k
  if (D->hasAttr<AlignMac68kAttr>()) {
1274
12
    IsMac68kAlign = true;
1275
12
    MaxFieldAlignment = CharUnits::fromQuantity(2);
1276
12
    Alignment = CharUnits::fromQuantity(2);
1277
333k
  } else {
1278
333k
    if (const MaxFieldAlignmentAttr *MFAA = D->getAttr<MaxFieldAlignmentAttr>())
1279
49.5k
      MaxFieldAlignment = Context.toCharUnitsFromBits(MFAA->getAlignment());
1280
333k
1281
333k
    if (unsigned MaxAlign = D->getMaxAlignment())
1282
3.98k
      UpdateAlignment(Context.toCharUnitsFromBits(MaxAlign));
1283
333k
  }
1284
333k
1285
333k
  // If there is an external AST source, ask it for the various offsets.
1286
333k
  if (const RecordDecl *RD = dyn_cast<RecordDecl>(D))
1287
328k
    if (ExternalASTSource *Source = Context.getExternalSource()) {
1288
149k
      UseExternalLayout = Source->layoutRecordType(
1289
149k
          RD, External.Size, External.Align, External.FieldOffsets,
1290
149k
          External.BaseOffsets, External.VirtualBaseOffsets);
1291
149k
1292
149k
      // Update based on external alignment.
1293
149k
      if (UseExternalLayout) {
1294
11.3k
        if (External.Align > 0) {
1295
7.38k
          Alignment = Context.toCharUnitsFromBits(External.Align);
1296
7.38k
        } else {
1297
4.01k
          // The external source didn't have alignment information; infer it.
1298
4.01k
          InferAlignment = true;
1299
4.01k
        }
1300
11.3k
      }
1301
149k
    }
1302
333k
}
1303
1304
107k
void ItaniumRecordLayoutBuilder::Layout(const RecordDecl *D) {
1305
107k
  InitializeLayout(D);
1306
107k
  LayoutFields(D);
1307
107k
1308
107k
  // Finally, round the size of the total struct up to the alignment of the
1309
107k
  // struct itself.
1310
107k
  FinishLayout(D);
1311
107k
}
1312
1313
221k
void ItaniumRecordLayoutBuilder::Layout(const CXXRecordDecl *RD) {
1314
221k
  InitializeLayout(RD);
1315
221k
1316
221k
  // Lay out the vtable and the non-virtual bases.
1317
221k
  LayoutNonVirtualBases(RD);
1318
221k
1319
221k
  LayoutFields(RD);
1320
221k
1321
221k
  NonVirtualSize = Context.toCharUnitsFromBits(
1322
221k
      llvm::alignTo(getSizeInBits(), Context.getTargetInfo().getCharAlign()));
1323
221k
  NonVirtualAlignment = Alignment;
1324
221k
1325
221k
  // Lay out the virtual bases and add the primary virtual base offsets.
1326
221k
  LayoutVirtualBases(RD, RD);
1327
221k
1328
221k
  // Finally, round the size of the total struct up to the alignment
1329
221k
  // of the struct itself.
1330
221k
  FinishLayout(RD);
1331
221k
1332
221k
#ifndef NDEBUG
1333
221k
  // Check that we have base offsets for all bases.
1334
221k
  for (const CXXBaseSpecifier &Base : RD->bases()) {
1335
54.1k
    if (Base.isVirtual())
1336
726
      continue;
1337
53.4k
1338
53.4k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
1339
53.4k
1340
53.4k
    assert(Bases.count(BaseDecl) && "Did not find base offset!");
1341
53.4k
  }
1342
221k
1343
221k
  // And all virtual bases.
1344
221k
  for (const CXXBaseSpecifier &Base : RD->vbases()) {
1345
1.16k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
1346
1.16k
1347
1.16k
    assert(VBases.count(BaseDecl) && "Did not find base offset!");
1348
1.16k
  }
1349
221k
#endif
1350
221k
}
1351
1352
5.24k
void ItaniumRecordLayoutBuilder::Layout(const ObjCInterfaceDecl *D) {
1353
5.24k
  if (ObjCInterfaceDecl *SD = D->getSuperClass()) {
1354
4.05k
    const ASTRecordLayout &SL = Context.getASTObjCInterfaceLayout(SD);
1355
4.05k
1356
4.05k
    UpdateAlignment(SL.getAlignment());
1357
4.05k
1358
4.05k
    // We start laying out ivars not at the end of the superclass
1359
4.05k
    // structure, but at the next byte following the last field.
1360
4.05k
    setDataSize(SL.getDataSize());
1361
4.05k
    setSize(getDataSize());
1362
4.05k
  }
1363
5.24k
1364
5.24k
  InitializeLayout(D);
1365
5.24k
  // Layout each ivar sequentially.
1366
14.9k
  for (const ObjCIvarDecl *IVD = D->all_declared_ivar_begin(); IVD;
1367
9.66k
       IVD = IVD->getNextIvar())
1368
9.66k
    LayoutField(IVD, false);
1369
5.24k
1370
5.24k
  // Finally, round the size of the total struct up to the alignment of the
1371
5.24k
  // struct itself.
1372
5.24k
  FinishLayout(D);
1373
5.24k
}
1374
1375
328k
void ItaniumRecordLayoutBuilder::LayoutFields(const RecordDecl *D) {
1376
328k
  // Layout each field, for now, just sequentially, respecting alignment.  In
1377
328k
  // the future, this will need to be tweakable by targets.
1378
328k
  bool InsertExtraPadding = D->mayInsertExtraPadding(/*EmitRemark=*/true);
1379
328k
  bool HasFlexibleArrayMember = D->hasFlexibleArrayMember();
1380
1.11M
  for (auto I = D->field_begin(), End = D->field_end(); I != End; 
++I783k
) {
1381
783k
    auto Next(I);
1382
783k
    ++Next;
1383
783k
    LayoutField(*I,
1384
783k
                InsertExtraPadding && 
(42
Next != End42
||
!HasFlexibleArrayMember16
));
1385
783k
  }
1386
328k
}
1387
1388
// Rounds the specified size to have it a multiple of the char size.
1389
static uint64_t
1390
roundUpSizeToCharAlignment(uint64_t Size,
1391
50
                           const ASTContext &Context) {
1392
50
  uint64_t CharAlignment = Context.getTargetInfo().getCharAlign();
1393
50
  return llvm::alignTo(Size, CharAlignment);
1394
50
}
1395
1396
void ItaniumRecordLayoutBuilder::LayoutWideBitField(uint64_t FieldSize,
1397
                                                    uint64_t TypeSize,
1398
                                                    bool FieldPacked,
1399
19
                                                    const FieldDecl *D) {
1400
19
  assert(Context.getLangOpts().CPlusPlus &&
1401
19
         "Can only have wide bit-fields in C++!");
1402
19
1403
19
  // Itanium C++ ABI 2.4:
1404
19
  //   If sizeof(T)*8 < n, let T' be the largest integral POD type with
1405
19
  //   sizeof(T')*8 <= n.
1406
19
1407
19
  QualType IntegralPODTypes[] = {
1408
19
    Context.UnsignedCharTy, Context.UnsignedShortTy, Context.UnsignedIntTy,
1409
19
    Context.UnsignedLongTy, Context.UnsignedLongLongTy
1410
19
  };
1411
19
1412
19
  QualType Type;
1413
76
  for (const QualType &QT : IntegralPODTypes) {
1414
76
    uint64_t Size = Context.getTypeSize(QT);
1415
76
1416
76
    if (Size > FieldSize)
1417
11
      break;
1418
65
1419
65
    Type = QT;
1420
65
  }
1421
19
  assert(!Type.isNull() && "Did not find a type!");
1422
19
1423
19
  CharUnits TypeAlign = Context.getTypeAlignInChars(Type);
1424
19
1425
19
  // We're not going to use any of the unfilled bits in the last byte.
1426
19
  UnfilledBitsInLastUnit = 0;
1427
19
  LastBitfieldTypeSize = 0;
1428
19
1429
19
  uint64_t FieldOffset;
1430
19
  uint64_t UnpaddedFieldOffset = getDataSizeInBits() - UnfilledBitsInLastUnit;
1431
19
1432
19
  if (IsUnion) {
1433
3
    uint64_t RoundedFieldSize = roundUpSizeToCharAlignment(FieldSize,
1434
3
                                                           Context);
1435
3
    setDataSize(std::max(getDataSizeInBits(), RoundedFieldSize));
1436
3
    FieldOffset = 0;
1437
16
  } else {
1438
16
    // The bitfield is allocated starting at the next offset aligned
1439
16
    // appropriately for T', with length n bits.
1440
16
    FieldOffset = llvm::alignTo(getDataSizeInBits(), Context.toBits(TypeAlign));
1441
16
1442
16
    uint64_t NewSizeInBits = FieldOffset + FieldSize;
1443
16
1444
16
    setDataSize(
1445
16
        llvm::alignTo(NewSizeInBits, Context.getTargetInfo().getCharAlign()));
1446
16
    UnfilledBitsInLastUnit = getDataSizeInBits() - NewSizeInBits;
1447
16
  }
1448
19
1449
19
  // Place this field at the current location.
1450
19
  FieldOffsets.push_back(FieldOffset);
1451
19
1452
19
  CheckFieldPadding(FieldOffset, UnpaddedFieldOffset, FieldOffset,
1453
19
                    Context.toBits(TypeAlign), FieldPacked, D);
1454
19
1455
19
  // Update the size.
1456
19
  setSize(std::max(getSizeInBits(), getDataSizeInBits()));
1457
19
1458
19
  // Remember max struct/class alignment.
1459
19
  UpdateAlignment(TypeAlign);
1460
19
}
1461
1462
18.1k
void ItaniumRecordLayoutBuilder::LayoutBitField(const FieldDecl *D) {
1463
18.1k
  bool FieldPacked = Packed || 
D->hasAttr<PackedAttr>()17.6k
;
1464
18.1k
  uint64_t FieldSize = D->getBitWidthValue(Context);
1465
18.1k
  TypeInfo FieldInfo = Context.getTypeInfo(D->getType());
1466
18.1k
  uint64_t TypeSize = FieldInfo.Width;
1467
18.1k
  unsigned FieldAlign = FieldInfo.Align;
1468
18.1k
1469
18.1k
  // UnfilledBitsInLastUnit is the difference between the end of the
1470
18.1k
  // last allocated bitfield (i.e. the first bit offset available for
1471
18.1k
  // bitfields) and the end of the current data size in bits (i.e. the
1472
18.1k
  // first bit offset available for non-bitfields).  The current data
1473
18.1k
  // size in bits is always a multiple of the char size; additionally,
1474
18.1k
  // for ms_struct records it's also a multiple of the
1475
18.1k
  // LastBitfieldTypeSize (if set).
1476
18.1k
1477
18.1k
  // The struct-layout algorithm is dictated by the platform ABI,
1478
18.1k
  // which in principle could use almost any rules it likes.  In
1479
18.1k
  // practice, UNIXy targets tend to inherit the algorithm described
1480
18.1k
  // in the System V generic ABI.  The basic bitfield layout rule in
1481
18.1k
  // System V is to place bitfields at the next available bit offset
1482
18.1k
  // where the entire bitfield would fit in an aligned storage unit of
1483
18.1k
  // the declared type; it's okay if an earlier or later non-bitfield
1484
18.1k
  // is allocated in the same storage unit.  However, some targets
1485
18.1k
  // (those that !useBitFieldTypeAlignment(), e.g. ARM APCS) don't
1486
18.1k
  // require this storage unit to be aligned, and therefore always put
1487
18.1k
  // the bitfield at the next available bit offset.
1488
18.1k
1489
18.1k
  // ms_struct basically requests a complete replacement of the
1490
18.1k
  // platform ABI's struct-layout algorithm, with the high-level goal
1491
18.1k
  // of duplicating MSVC's layout.  For non-bitfields, this follows
1492
18.1k
  // the standard algorithm.  The basic bitfield layout rule is to
1493
18.1k
  // allocate an entire unit of the bitfield's declared type
1494
18.1k
  // (e.g. 'unsigned long'), then parcel it up among successive
1495
18.1k
  // bitfields whose declared types have the same size, making a new
1496
18.1k
  // unit as soon as the last can no longer store the whole value.
1497
18.1k
  // Since it completely replaces the platform ABI's algorithm,
1498
18.1k
  // settings like !useBitFieldTypeAlignment() do not apply.
1499
18.1k
1500
18.1k
  // A zero-width bitfield forces the use of a new storage unit for
1501
18.1k
  // later bitfields.  In general, this occurs by rounding up the
1502
18.1k
  // current size of the struct as if the algorithm were about to
1503
18.1k
  // place a non-bitfield of the field's formal type.  Usually this
1504
18.1k
  // does not change the alignment of the struct itself, but it does
1505
18.1k
  // on some targets (those that useZeroLengthBitfieldAlignment(),
1506
18.1k
  // e.g. ARM).  In ms_struct layout, zero-width bitfields are
1507
18.1k
  // ignored unless they follow a non-zero-width bitfield.
1508
18.1k
1509
18.1k
  // A field alignment restriction (e.g. from #pragma pack) or
1510
18.1k
  // specification (e.g. from __attribute__((aligned))) changes the
1511
18.1k
  // formal alignment of the field.  For System V, this alters the
1512
18.1k
  // required alignment of the notional storage unit that must contain
1513
18.1k
  // the bitfield.  For ms_struct, this only affects the placement of
1514
18.1k
  // new storage units.  In both cases, the effect of #pragma pack is
1515
18.1k
  // ignored on zero-width bitfields.
1516
18.1k
1517
18.1k
  // On System V, a packed field (e.g. from #pragma pack or
1518
18.1k
  // __attribute__((packed))) always uses the next available bit
1519
18.1k
  // offset.
1520
18.1k
1521
18.1k
  // In an ms_struct struct, the alignment of a fundamental type is
1522
18.1k
  // always equal to its size.  This is necessary in order to mimic
1523
18.1k
  // the i386 alignment rules on targets which might not fully align
1524
18.1k
  // all types (e.g. Darwin PPC32, where alignof(long long) == 4).
1525
18.1k
1526
18.1k
  // First, some simple bookkeeping to perform for ms_struct structs.
1527
18.1k
  if (IsMsStruct) {
1528
305
    // The field alignment for integer types is always the size.
1529
305
    FieldAlign = TypeSize;
1530
305
1531
305
    // If the previous field was not a bitfield, or was a bitfield
1532
305
    // with a different storage unit size, or if this field doesn't fit into
1533
305
    // the current storage unit, we're done with that storage unit.
1534
305
    if (LastBitfieldTypeSize != TypeSize ||
1535
305
        
UnfilledBitsInLastUnit < FieldSize55
) {
1536
270
      // Also, ignore zero-length bitfields after non-bitfields.
1537
270
      if (!LastBitfieldTypeSize && 
!FieldSize171
)
1538
93
        FieldAlign = 1;
1539
270
1540
270
      UnfilledBitsInLastUnit = 0;
1541
270
      LastBitfieldTypeSize = 0;
1542
270
    }
1543
305
  }
1544
18.1k
1545
18.1k
  // If the field is wider than its declared type, it follows
1546
18.1k
  // different rules in all cases.
1547
18.1k
  if (FieldSize > TypeSize) {
1548
19
    LayoutWideBitField(FieldSize, TypeSize, FieldPacked, D);
1549
19
    return;
1550
19
  }
1551
18.0k
1552
18.0k
  // Compute the next available bit offset.
1553
18.0k
  uint64_t FieldOffset =
1554
18.0k
    IsUnion ? 
055
:
(getDataSizeInBits() - UnfilledBitsInLastUnit)18.0k
;
1555
18.0k
1556
18.0k
  // Handle targets that don't honor bitfield type alignment.
1557
18.0k
  if (!IsMsStruct && 
!Context.getTargetInfo().useBitFieldTypeAlignment()17.7k
) {
1558
95
    // Some such targets do honor it on zero-width bitfields.
1559
95
    if (FieldSize == 0 &&
1560
95
        
Context.getTargetInfo().useZeroLengthBitfieldAlignment()47
) {
1561
45
      // The alignment to round up to is the max of the field's natural
1562
45
      // alignment and a target-specific fixed value (sometimes zero).
1563
45
      unsigned ZeroLengthBitfieldBoundary =
1564
45
        Context.getTargetInfo().getZeroLengthBitfieldBoundary();
1565
45
      FieldAlign = std::max(FieldAlign, ZeroLengthBitfieldBoundary);
1566
45
1567
45
    // If that doesn't apply, just ignore the field alignment.
1568
50
    } else {
1569
50
      FieldAlign = 1;
1570
50
    }
1571
95
  }
1572
18.0k
1573
18.0k
  // Remember the alignment we would have used if the field were not packed.
1574
18.0k
  unsigned UnpackedFieldAlign = FieldAlign;
1575
18.0k
1576
18.0k
  // Ignore the field alignment if the field is packed unless it has zero-size.
1577
18.0k
  if (!IsMsStruct && 
FieldPacked17.7k
&&
FieldSize != 0505
)
1578
490
    FieldAlign = 1;
1579
18.0k
1580
18.0k
  // But, if there's an 'aligned' attribute on the field, honor that.
1581
18.0k
  unsigned ExplicitFieldAlign = D->getMaxAlignment();
1582
18.0k
  if (ExplicitFieldAlign) {
1583
156
    FieldAlign = std::max(FieldAlign, ExplicitFieldAlign);
1584
156
    UnpackedFieldAlign = std::max(UnpackedFieldAlign, ExplicitFieldAlign);
1585
156
  }
1586
18.0k
1587
18.0k
  // But, if there's a #pragma pack in play, that takes precedent over
1588
18.0k
  // even the 'aligned' attribute, for non-zero-width bitfields.
1589
18.0k
  unsigned MaxFieldAlignmentInBits = Context.toBits(MaxFieldAlignment);
1590
18.0k
  if (!MaxFieldAlignment.isZero() && 
FieldSize2.53k
) {
1591
2.52k
    UnpackedFieldAlign = std::min(UnpackedFieldAlign, MaxFieldAlignmentInBits);
1592
2.52k
    if (FieldPacked)
1593
34
      FieldAlign = UnpackedFieldAlign;
1594
2.49k
    else
1595
2.49k
      FieldAlign = std::min(FieldAlign, MaxFieldAlignmentInBits);
1596
2.52k
  }
1597
18.0k
1598
18.0k
  // But, ms_struct just ignores all of that in unions, even explicit
1599
18.0k
  // alignment attributes.
1600
18.0k
  if (IsMsStruct && 
IsUnion305
) {
1601
8
    FieldAlign = UnpackedFieldAlign = 1;
1602
8
  }
1603
18.0k
1604
18.0k
  // For purposes of diagnostics, we're going to simultaneously
1605
18.0k
  // compute the field offsets that we would have used if we weren't
1606
18.0k
  // adding any alignment padding or if the field weren't packed.
1607
18.0k
  uint64_t UnpaddedFieldOffset = FieldOffset;
1608
18.0k
  uint64_t UnpackedFieldOffset = FieldOffset;
1609
18.0k
1610
18.0k
  // Check if we need to add padding to fit the bitfield within an
1611
18.0k
  // allocation unit with the right size and alignment.  The rules are
1612
18.0k
  // somewhat different here for ms_struct structs.
1613
18.0k
  if (IsMsStruct) {
1614
305
    // If it's not a zero-width bitfield, and we can fit the bitfield
1615
305
    // into the active storage unit (and we haven't already decided to
1616
305
    // start a new storage unit), just do so, regardless of any other
1617
305
    // other consideration.  Otherwise, round up to the right alignment.
1618
305
    if (FieldSize == 0 || 
FieldSize > UnfilledBitsInLastUnit197
) {
1619
273
      FieldOffset = llvm::alignTo(FieldOffset, FieldAlign);
1620
273
      UnpackedFieldOffset =
1621
273
          llvm::alignTo(UnpackedFieldOffset, UnpackedFieldAlign);
1622
273
      UnfilledBitsInLastUnit = 0;
1623
273
    }
1624
305
1625
17.7k
  } else {
1626
17.7k
    // #pragma pack, with any value, suppresses the insertion of padding.
1627
17.7k
    bool AllowPadding = MaxFieldAlignment.isZero();
1628
17.7k
1629
17.7k
    // Compute the real offset.
1630
17.7k
    if (FieldSize == 0 ||
1631
17.7k
        
(17.4k
AllowPadding17.4k
&&
1632
17.4k
         
(FieldOffset & (FieldAlign-1)) + FieldSize > TypeSize14.9k
)) {
1633
470
      FieldOffset = llvm::alignTo(FieldOffset, FieldAlign);
1634
17.3k
    } else if (ExplicitFieldAlign &&
1635
17.3k
               
(127
MaxFieldAlignmentInBits == 0127
||
1636
127
                
ExplicitFieldAlign <= MaxFieldAlignmentInBits24
) &&
1637
17.3k
               
Context.getTargetInfo().useExplicitBitFieldAlignment()119
) {
1638
99
      // TODO: figure it out what needs to be done on targets that don't honor
1639
99
      // bit-field type alignment like ARM APCS ABI.
1640
99
      FieldOffset = llvm::alignTo(FieldOffset, ExplicitFieldAlign);
1641
99
    }
1642
17.7k
1643
17.7k
    // Repeat the computation for diagnostic purposes.
1644
17.7k
    if (FieldSize == 0 ||
1645
17.7k
        
(17.4k
AllowPadding17.4k
&&
1646
17.4k
         
(UnpackedFieldOffset & (UnpackedFieldAlign-1)) + FieldSize > TypeSize14.9k
))
1647
521
      UnpackedFieldOffset =
1648
521
          llvm::alignTo(UnpackedFieldOffset, UnpackedFieldAlign);
1649
17.2k
    else if (ExplicitFieldAlign &&
1650
17.2k
             
(122
MaxFieldAlignmentInBits == 0122
||
1651
122
              
ExplicitFieldAlign <= MaxFieldAlignmentInBits24
) &&
1652
17.2k
             
Context.getTargetInfo().useExplicitBitFieldAlignment()114
)
1653
95
      UnpackedFieldOffset =
1654
95
          llvm::alignTo(UnpackedFieldOffset, ExplicitFieldAlign);
1655
17.7k
  }
1656
18.0k
1657
18.0k
  // If we're using external layout, give the external layout a chance
1658
18.0k
  // to override this information.
1659
18.0k
  if (UseExternalLayout)
1660
1.62k
    FieldOffset = updateExternalFieldOffset(D, FieldOffset);
1661
18.0k
1662
18.0k
  // Okay, place the bitfield at the calculated offset.
1663
18.0k
  FieldOffsets.push_back(FieldOffset);
1664
18.0k
1665
18.0k
  // Bookkeeping:
1666
18.0k
1667
18.0k
  // Anonymous members don't affect the overall record alignment,
1668
18.0k
  // except on targets where they do.
1669
18.0k
  if (!IsMsStruct &&
1670
18.0k
      
!Context.getTargetInfo().useZeroLengthBitfieldAlignment()17.7k
&&
1671
18.0k
      
!D->getIdentifier()17.1k
)
1672
1.50k
    FieldAlign = UnpackedFieldAlign = 1;
1673
18.0k
1674
18.0k
  // Diagnose differences in layout due to padding or packing.
1675
18.0k
  if (!UseExternalLayout)
1676
16.4k
    CheckFieldPadding(FieldOffset, UnpaddedFieldOffset, UnpackedFieldOffset,
1677
16.4k
                      UnpackedFieldAlign, FieldPacked, D);
1678
18.0k
1679
18.0k
  // Update DataSize to include the last byte containing (part of) the bitfield.
1680
18.0k
1681
18.0k
  // For unions, this is just a max operation, as usual.
1682
18.0k
  if (IsUnion) {
1683
55
    // For ms_struct, allocate the entire storage unit --- unless this
1684
55
    // is a zero-width bitfield, in which case just use a size of 1.
1685
55
    uint64_t RoundedFieldSize;
1686
55
    if (IsMsStruct) {
1687
8
      RoundedFieldSize =
1688
8
        (FieldSize ? 
TypeSize5
:
Context.getTargetInfo().getCharWidth()3
);
1689
8
1690
8
    // Otherwise, allocate just the number of bytes required to store
1691
8
    // the bitfield.
1692
47
    } else {
1693
47
      RoundedFieldSize = roundUpSizeToCharAlignment(FieldSize, Context);
1694
47
    }
1695
55
    setDataSize(std::max(getDataSizeInBits(), RoundedFieldSize));
1696
55
1697
55
  // For non-zero-width bitfields in ms_struct structs, allocate a new
1698
55
  // storage unit if necessary.
1699
18.0k
  } else if (IsMsStruct && 
FieldSize297
) {
1700
192
    // We should have cleared UnfilledBitsInLastUnit in every case
1701
192
    // where we changed storage units.
1702
192
    if (!UnfilledBitsInLastUnit) {
1703
160
      setDataSize(FieldOffset + TypeSize);
1704
160
      UnfilledBitsInLastUnit = TypeSize;
1705
160
    }
1706
192
    UnfilledBitsInLastUnit -= FieldSize;
1707
192
    LastBitfieldTypeSize = TypeSize;
1708
192
1709
192
  // Otherwise, bump the data size up to include the bitfield,
1710
192
  // including padding up to char alignment, and then remember how
1711
192
  // bits we didn't use.
1712
17.8k
  } else {
1713
17.8k
    uint64_t NewSizeInBits = FieldOffset + FieldSize;
1714
17.8k
    uint64_t CharAlignment = Context.getTargetInfo().getCharAlign();
1715
17.8k
    setDataSize(llvm::alignTo(NewSizeInBits, CharAlignment));
1716
17.8k
    UnfilledBitsInLastUnit = getDataSizeInBits() - NewSizeInBits;
1717
17.8k
1718
17.8k
    // The only time we can get here for an ms_struct is if this is a
1719
17.8k
    // zero-width bitfield, which doesn't count as anything for the
1720
17.8k
    // purposes of unfilled bits.
1721
17.8k
    LastBitfieldTypeSize = 0;
1722
17.8k
  }
1723
18.0k
1724
18.0k
  // Update the size.
1725
18.0k
  setSize(std::max(getSizeInBits(), getDataSizeInBits()));
1726
18.0k
1727
18.0k
  // Remember max struct/class alignment.
1728
18.0k
  UnadjustedAlignment =
1729
18.0k
      std::max(UnadjustedAlignment, Context.toCharUnitsFromBits(FieldAlign));
1730
18.0k
  UpdateAlignment(Context.toCharUnitsFromBits(FieldAlign),
1731
18.0k
                  Context.toCharUnitsFromBits(UnpackedFieldAlign));
1732
18.0k
}
1733
1734
void ItaniumRecordLayoutBuilder::LayoutField(const FieldDecl *D,
1735
793k
                                             bool InsertExtraPadding) {
1736
793k
  if (D->isBitField()) {
1737
18.1k
    LayoutBitField(D);
1738
18.1k
    return;
1739
18.1k
  }
1740
775k
1741
775k
  uint64_t UnpaddedFieldOffset = getDataSizeInBits() - UnfilledBitsInLastUnit;
1742
775k
1743
775k
  // Reset the unfilled bits.
1744
775k
  UnfilledBitsInLastUnit = 0;
1745
775k
  LastBitfieldTypeSize = 0;
1746
775k
1747
775k
  auto *FieldClass = D->getType()->getAsCXXRecordDecl();
1748
775k
  bool PotentiallyOverlapping = D->hasAttr<NoUniqueAddressAttr>() && 
FieldClass58
;
1749
775k
  bool IsOverlappingEmptyField = PotentiallyOverlapping && 
FieldClass->isEmpty()58
;
1750
775k
  bool FieldPacked = Packed || 
D->hasAttr<PackedAttr>()750k
;
1751
775k
1752
775k
  CharUnits FieldOffset = (IsUnion || 
IsOverlappingEmptyField722k
)
1753
775k
                              ? 
CharUnits::Zero()52.3k
1754
775k
                              : 
getDataSize()722k
;
1755
775k
  CharUnits FieldSize;
1756
775k
  CharUnits FieldAlign;
1757
775k
  // The amount of this class's dsize occupied by the field.
1758
775k
  // This is equal to FieldSize unless we're permitted to pack
1759
775k
  // into the field's tail padding.
1760
775k
  CharUnits EffectiveFieldSize;
1761
775k
1762
775k
  if (D->getType()->isIncompleteArrayType()) {
1763
130
    // This is a flexible array member; we can't directly
1764
130
    // query getTypeInfo about these, so we figure it out here.
1765
130
    // Flexible array members don't have any size, but they
1766
130
    // have to be aligned appropriately for their element type.
1767
130
    EffectiveFieldSize = FieldSize = CharUnits::Zero();
1768
130
    const ArrayType* ATy = Context.getAsArrayType(D->getType());
1769
130
    FieldAlign = Context.getTypeAlignInChars(ATy->getElementType());
1770
775k
  } else if (const ReferenceType *RT = D->getType()->getAs<ReferenceType>()) {
1771
4.47k
    unsigned AS = Context.getTargetAddressSpace(RT->getPointeeType());
1772
4.47k
    EffectiveFieldSize = FieldSize =
1773
4.47k
      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(AS));
1774
4.47k
    FieldAlign =
1775
4.47k
      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerAlign(AS));
1776
770k
  } else {
1777
770k
    std::pair<CharUnits, CharUnits> FieldInfo =
1778
770k
      Context.getTypeInfoInChars(D->getType());
1779
770k
    EffectiveFieldSize = FieldSize = FieldInfo.first;
1780
770k
    FieldAlign = FieldInfo.second;
1781
770k
1782
770k
    // A potentially-overlapping field occupies its dsize or nvsize, whichever
1783
770k
    // is larger.
1784
770k
    if (PotentiallyOverlapping) {
1785
58
      const ASTRecordLayout &Layout = Context.getASTRecordLayout(FieldClass);
1786
58
      EffectiveFieldSize =
1787
58
          std::max(Layout.getNonVirtualSize(), Layout.getDataSize());
1788
58
    }
1789
770k
1790
770k
    if (IsMsStruct) {
1791
170
      // If MS bitfield layout is required, figure out what type is being
1792
170
      // laid out and align the field to the width of that type.
1793
170
1794
170
      // Resolve all typedefs down to their base type and round up the field
1795
170
      // alignment if necessary.
1796
170
      QualType T = Context.getBaseElementType(D->getType());
1797
170
      if (const BuiltinType *BTy = T->getAs<BuiltinType>()) {
1798
163
        CharUnits TypeSize = Context.getTypeSizeInChars(BTy);
1799
163
1800
163
        if (!llvm::isPowerOf2_64(TypeSize.getQuantity())) {
1801
4
          assert(
1802
4
              !Context.getTargetInfo().getTriple().isWindowsMSVCEnvironment() &&
1803
4
              "Non PowerOf2 size in MSVC mode");
1804
4
          // Base types with sizes that aren't a power of two don't work
1805
4
          // with the layout rules for MS structs. This isn't an issue in
1806
4
          // MSVC itself since there are no such base data types there.
1807
4
          // On e.g. x86_32 mingw and linux, long double is 12 bytes though.
1808
4
          // Any structs involving that data type obviously can't be ABI
1809
4
          // compatible with MSVC regardless of how it is laid out.
1810
4
1811
4
          // Since ms_struct can be mass enabled (via a pragma or via the
1812
4
          // -mms-bitfields command line parameter), this can trigger for
1813
4
          // structs that don't actually need MSVC compatibility, so we
1814
4
          // need to be able to sidestep the ms_struct layout for these types.
1815
4
1816
4
          // Since the combination of -mms-bitfields together with structs
1817
4
          // like max_align_t (which contains a long double) for mingw is
1818
4
          // quite comon (and GCC handles it silently), just handle it
1819
4
          // silently there. For other targets that have ms_struct enabled
1820
4
          // (most probably via a pragma or attribute), trigger a diagnostic
1821
4
          // that defaults to an error.
1822
4
          if (!Context.getTargetInfo().getTriple().isWindowsGNUEnvironment())
1823
2
            Diag(D->getLocation(), diag::warn_npot_ms_struct);
1824
4
        }
1825
163
        if (TypeSize > FieldAlign &&
1826
163
            
llvm::isPowerOf2_64(TypeSize.getQuantity())12
)
1827
8
          FieldAlign = TypeSize;
1828
163
      }
1829
170
    }
1830
770k
  }
1831
775k
1832
775k
  // The align if the field is not packed. This is to check if the attribute
1833
775k
  // was unnecessary (-Wpacked).
1834
775k
  CharUnits UnpackedFieldAlign = FieldAlign;
1835
775k
  CharUnits UnpackedFieldOffset = FieldOffset;
1836
775k
1837
775k
  if (FieldPacked)
1838
25.6k
    FieldAlign = CharUnits::One();
1839
775k
  CharUnits MaxAlignmentInChars =
1840
775k
    Context.toCharUnitsFromBits(D->getMaxAlignment());
1841
775k
  FieldAlign = std::max(FieldAlign, MaxAlignmentInChars);
1842
775k
  UnpackedFieldAlign = std::max(UnpackedFieldAlign, MaxAlignmentInChars);
1843
775k
1844
775k
  // The maximum field alignment overrides the aligned attribute.
1845
775k
  if (!MaxFieldAlignment.isZero()) {
1846
201k
    FieldAlign = std::min(FieldAlign, MaxFieldAlignment);
1847
201k
    UnpackedFieldAlign = std::min(UnpackedFieldAlign, MaxFieldAlignment);
1848
201k
  }
1849
775k
1850
775k
  // Round up the current record size to the field's alignment boundary.
1851
775k
  FieldOffset = FieldOffset.alignTo(FieldAlign);
1852
775k
  UnpackedFieldOffset = UnpackedFieldOffset.alignTo(UnpackedFieldAlign);
1853
775k
1854
775k
  if (UseExternalLayout) {
1855
15.9k
    FieldOffset = Context.toCharUnitsFromBits(
1856
15.9k
                    updateExternalFieldOffset(D, Context.toBits(FieldOffset)));
1857
15.9k
1858
15.9k
    if (!IsUnion && 
EmptySubobjects13.2k
) {
1859
13.2k
      // Record the fact that we're placing a field at this offset.
1860
13.2k
      bool Allowed = EmptySubobjects->CanPlaceFieldAtOffset(D, FieldOffset);
1861
13.2k
      (void)Allowed;
1862
13.2k
      assert(Allowed && "Externally-placed field cannot be placed here");
1863
13.2k
    }
1864
759k
  } else {
1865
759k
    if (!IsUnion && 
EmptySubobjects709k
) {
1866
307k
      // Check if we can place the field at this offset.
1867
307k
      while (!EmptySubobjects->CanPlaceFieldAtOffset(D, FieldOffset)) {
1868
44
        // We couldn't place the field at the offset. Try again at a new offset.
1869
44
        // We try offset 0 (for an empty field) and then dsize(C) onwards.
1870
44
        if (FieldOffset == CharUnits::Zero() &&
1871
44
            
getDataSize() != CharUnits::Zero()37
)
1872
8
          FieldOffset = getDataSize().alignTo(FieldAlign);
1873
36
        else
1874
36
          FieldOffset += FieldAlign;
1875
44
      }
1876
307k
    }
1877
759k
  }
1878
775k
1879
775k
  // Place this field at the current location.
1880
775k
  FieldOffsets.push_back(Context.toBits(FieldOffset));
1881
775k
1882
775k
  if (!UseExternalLayout)
1883
759k
    CheckFieldPadding(Context.toBits(FieldOffset), UnpaddedFieldOffset,
1884
759k
                      Context.toBits(UnpackedFieldOffset),
1885
759k
                      Context.toBits(UnpackedFieldAlign), FieldPacked, D);
1886
775k
1887
775k
  if (InsertExtraPadding) {
1888
36
    CharUnits ASanAlignment = CharUnits::fromQuantity(8);
1889
36
    CharUnits ExtraSizeForAsan = ASanAlignment;
1890
36
    if (FieldSize % ASanAlignment)
1891
30
      ExtraSizeForAsan +=
1892
30
          ASanAlignment - CharUnits::fromQuantity(FieldSize % ASanAlignment);
1893
36
    EffectiveFieldSize = FieldSize = FieldSize + ExtraSizeForAsan;
1894
36
  }
1895
775k
1896
775k
  // Reserve space for this field.
1897
775k
  if (!IsOverlappingEmptyField) {
1898
775k
    uint64_t EffectiveFieldSizeInBits = Context.toBits(EffectiveFieldSize);
1899
775k
    if (IsUnion)
1900
52.3k
      setDataSize(std::max(getDataSizeInBits(), EffectiveFieldSizeInBits));
1901
722k
    else
1902
722k
      setDataSize(FieldOffset + EffectiveFieldSize);
1903
775k
1904
775k
    PaddedFieldSize = std::max(PaddedFieldSize, FieldOffset + FieldSize);
1905
775k
    setSize(std::max(getSizeInBits(), getDataSizeInBits()));
1906
775k
  } else {
1907
44
    setSize(std::max(getSizeInBits(),
1908
44
                     (uint64_t)Context.toBits(FieldOffset + FieldSize)));
1909
44
  }
1910
775k
1911
775k
  // Remember max struct/class alignment.
1912
775k
  UnadjustedAlignment = std::max(UnadjustedAlignment, FieldAlign);
1913
775k
  UpdateAlignment(FieldAlign, UnpackedFieldAlign);
1914
775k
}
1915
1916
333k
void ItaniumRecordLayoutBuilder::FinishLayout(const NamedDecl *D) {
1917
333k
  // In C++, records cannot be of size 0.
1918
333k
  if (Context.getLangOpts().CPlusPlus && 
getSizeInBits() == 0243k
) {
1919
66.9k
    if (const CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(D)) {
1920
66.8k
      // Compatibility with gcc requires a class (pod or non-pod)
1921
66.8k
      // which is not empty but of size 0; such as having fields of
1922
66.8k
      // array of zero-length, remains of Size 0
1923
66.8k
      if (RD->isEmpty())
1924
65.8k
        setSize(CharUnits::One());
1925
66.8k
    }
1926
59
    else
1927
59
      setSize(CharUnits::One());
1928
66.9k
  }
1929
333k
1930
333k
  // If we have any remaining field tail padding, include that in the overall
1931
333k
  // size.
1932
333k
  setSize(std::max(getSizeInBits(), (uint64_t)Context.toBits(PaddedFieldSize)));
1933
333k
1934
333k
  // Finally, round the size of the record up to the alignment of the
1935
333k
  // record itself.
1936
333k
  uint64_t UnpaddedSize = getSizeInBits() - UnfilledBitsInLastUnit;
1937
333k
  uint64_t UnpackedSizeInBits =
1938
333k
      llvm::alignTo(getSizeInBits(), Context.toBits(UnpackedAlignment));
1939
333k
  uint64_t RoundedSize =
1940
333k
      llvm::alignTo(getSizeInBits(), Context.toBits(Alignment));
1941
333k
1942
333k
  if (UseExternalLayout) {
1943
11.3k
    // If we're inferring alignment, and the external size is smaller than
1944
11.3k
    // our size after we've rounded up to alignment, conservatively set the
1945
11.3k
    // alignment to 1.
1946
11.3k
    if (InferAlignment && 
External.Size < RoundedSize4.00k
) {
1947
9
      Alignment = CharUnits::One();
1948
9
      InferAlignment = false;
1949
9
    }
1950
11.3k
    setSize(External.Size);
1951
11.3k
    return;
1952
11.3k
  }
1953
322k
1954
322k
  // Set the size to the final size.
1955
322k
  setSize(RoundedSize);
1956
322k
1957
322k
  unsigned CharBitNum = Context.getTargetInfo().getCharWidth();
1958
322k
  if (const RecordDecl *RD = dyn_cast<RecordDecl>(D)) {
1959
317k
    // Warn if padding was introduced to the struct/class/union.
1960
317k
    if (getSizeInBits() > UnpaddedSize) {
1961
12.6k
      unsigned PadSize = getSizeInBits() - UnpaddedSize;
1962
12.6k
      bool InBits = true;
1963
12.6k
      if (PadSize % CharBitNum == 0) {
1964
12.0k
        PadSize = PadSize / CharBitNum;
1965
12.0k
        InBits = false;
1966
12.0k
      }
1967
12.6k
      Diag(RD->getLocation(), diag::warn_padded_struct_size)
1968
12.6k
          << Context.getTypeDeclType(RD)
1969
12.6k
          << PadSize
1970
12.6k
          << (InBits ? 
1560
:
012.0k
); // (byte|bit)
1971
12.6k
    }
1972
317k
1973
317k
    // Warn if we packed it unnecessarily, when the unpacked alignment is not
1974
317k
    // greater than the one after packing, the size in bits doesn't change and
1975
317k
    // the offset of each field is identical.
1976
317k
    if (Packed && 
UnpackedAlignment <= Alignment6.80k
&&
1977
317k
        
UnpackedSizeInBits == getSizeInBits()1.08k
&&
!HasPackedField1.08k
)
1978
1.07k
      Diag(D->getLocation(), diag::warn_unnecessary_packed)
1979
1.07k
          << Context.getTypeDeclType(RD);
1980
317k
  }
1981
322k
}
1982
1983
void ItaniumRecordLayoutBuilder::UpdateAlignment(
1984
859k
    CharUnits NewAlignment, CharUnits UnpackedNewAlignment) {
1985
859k
  // The alignment is not modified when using 'mac68k' alignment or when
1986
859k
  // we have an externally-supplied layout that also provides overall alignment.
1987
859k
  if (IsMac68kAlign || 
(859k
UseExternalLayout859k
&&
!InferAlignment21.0k
))
1988
13.7k
    return;
1989
845k
1990
845k
  if (NewAlignment > Alignment) {
1991
237k
    assert(llvm::isPowerOf2_64(NewAlignment.getQuantity()) &&
1992
237k
           "Alignment not a power of 2");
1993
237k
    Alignment = NewAlignment;
1994
237k
  }
1995
845k
1996
845k
  if (UnpackedNewAlignment > UnpackedAlignment) {
1997
244k
    assert(llvm::isPowerOf2_64(UnpackedNewAlignment.getQuantity()) &&
1998
244k
           "Alignment not a power of 2");
1999
244k
    UnpackedAlignment = UnpackedNewAlignment;
2000
244k
  }
2001
845k
}
2002
2003
uint64_t
2004
ItaniumRecordLayoutBuilder::updateExternalFieldOffset(const FieldDecl *Field,
2005
17.5k
                                                      uint64_t ComputedOffset) {
2006
17.5k
  uint64_t ExternalFieldOffset = External.getExternalFieldOffset(Field);
2007
17.5k
2008
17.5k
  if (InferAlignment && 
ExternalFieldOffset < ComputedOffset5.59k
) {
2009
6
    // The externally-supplied field offset is before the field offset we
2010
6
    // computed. Assume that the structure is packed.
2011
6
    Alignment = CharUnits::One();
2012
6
    InferAlignment = false;
2013
6
  }
2014
17.5k
2015
17.5k
  // Use the externally-supplied field offset.
2016
17.5k
  return ExternalFieldOffset;
2017
17.5k
}
2018
2019
/// Get diagnostic %select index for tag kind for
2020
/// field padding diagnostic message.
2021
/// WARNING: Indexes apply to particular diagnostics only!
2022
///
2023
/// \returns diagnostic %select index.
2024
15.6k
static unsigned getPaddingDiagFromTagKind(TagTypeKind Tag) {
2025
15.6k
  switch (Tag) {
2026
15.0k
  case TTK_Struct: return 0;
2027
0
  case TTK_Interface: return 1;
2028
676
  case TTK_Class: return 2;
2029
0
  default: llvm_unreachable("Invalid tag kind for field padding diagnostic!");
2030
15.6k
  }
2031
15.6k
}
2032
2033
void ItaniumRecordLayoutBuilder::CheckFieldPadding(
2034
    uint64_t Offset, uint64_t UnpaddedOffset, uint64_t UnpackedOffset,
2035
775k
    unsigned UnpackedAlign, bool isPacked, const FieldDecl *D) {
2036
775k
  // We let objc ivars without warning, objc interfaces generally are not used
2037
775k
  // for padding tricks.
2038
775k
  if (isa<ObjCIvarDecl>(D))
2039
9.66k
    return;
2040
766k
2041
766k
  // Don't warn about structs created without a SourceLocation.  This can
2042
766k
  // be done by clients of the AST, such as codegen.
2043
766k
  if (D->getLocation().isInvalid())
2044
90.8k
    return;
2045
675k
2046
675k
  unsigned CharBitNum = Context.getTargetInfo().getCharWidth();
2047
675k
2048
675k
  // Warn if padding was introduced to the struct/class.
2049
675k
  if (!IsUnion && 
Offset > UnpaddedOffset626k
) {
2050
15.6k
    unsigned PadSize = Offset - UnpaddedOffset;
2051
15.6k
    bool InBits = true;
2052
15.6k
    if (PadSize % CharBitNum == 0) {
2053
15.2k
      PadSize = PadSize / CharBitNum;
2054
15.2k
      InBits = false;
2055
15.2k
    }
2056
15.6k
    if (D->getIdentifier())
2057
15.4k
      Diag(D->getLocation(), diag::warn_padded_struct_field)
2058
15.4k
          << getPaddingDiagFromTagKind(D->getParent()->getTagKind())
2059
15.4k
          << Context.getTypeDeclType(D->getParent())
2060
15.4k
          << PadSize
2061
15.4k
          << (InBits ? 
1365
:
015.1k
) // (byte|bit)
2062
15.4k
          << D->getIdentifier();
2063
208
    else
2064
208
      Diag(D->getLocation(), diag::warn_padded_struct_anon_field)
2065
208
          << getPaddingDiagFromTagKind(D->getParent()->getTagKind())
2066
208
          << Context.getTypeDeclType(D->getParent())
2067
208
          << PadSize
2068
208
          << (InBits ? 
182
:
0126
); // (byte|bit)
2069
15.6k
 }
2070
675k
 if (isPacked && 
Offset != UnpackedOffset18.3k
) {
2071
2.54k
   HasPackedField = true;
2072
2.54k
 }
2073
675k
}
2074
2075
static const CXXMethodDecl *computeKeyFunction(ASTContext &Context,
2076
172k
                                               const CXXRecordDecl *RD) {
2077
172k
  // If a class isn't polymorphic it doesn't have a key function.
2078
172k
  if (!RD->isPolymorphic())
2079
148k
    return nullptr;
2080
23.4k
2081
23.4k
  // A class that is not externally visible doesn't have a key function. (Or
2082
23.4k
  // at least, there's no point to assigning a key function to such a class;
2083
23.4k
  // this doesn't affect the ABI.)
2084
23.4k
  if (!RD->isExternallyVisible())
2085
847
    return nullptr;
2086
22.5k
2087
22.5k
  // Template instantiations don't have key functions per Itanium C++ ABI 5.2.6.
2088
22.5k
  // Same behavior as GCC.
2089
22.5k
  TemplateSpecializationKind TSK = RD->getTemplateSpecializationKind();
2090
22.5k
  if (TSK == TSK_ImplicitInstantiation ||
2091
22.5k
      
TSK == TSK_ExplicitInstantiationDeclaration19.5k
||
2092
22.5k
      
TSK == TSK_ExplicitInstantiationDefinition17.1k
)
2093
5.59k
    return nullptr;
2094
16.9k
2095
16.9k
  bool allowInlineFunctions =
2096
16.9k
    Context.getTargetInfo().getCXXABI().canKeyFunctionBeInline();
2097
16.9k
2098
69.9k
  for (const CXXMethodDecl *MD : RD->methods()) {
2099
69.9k
    if (!MD->isVirtual())
2100
50.6k
      continue;
2101
19.3k
2102
19.3k
    if (MD->isPure())
2103
795
      continue;
2104
18.5k
2105
18.5k
    // Ignore implicit member functions, they are always marked as inline, but
2106
18.5k
    // they don't have a body until they're defined.
2107
18.5k
    if (MD->isImplicit())
2108
1.17k
      continue;
2109
17.3k
2110
17.3k
    if (MD->isInlineSpecified())
2111
31
      continue;
2112
17.3k
2113
17.3k
    if (MD->hasInlineBody())
2114
5.14k
      continue;
2115
12.1k
2116
12.1k
    // Ignore inline deleted or defaulted functions.
2117
12.1k
    if (!MD->isUserProvided())
2118
64
      continue;
2119
12.1k
2120
12.1k
    // In certain ABIs, ignore functions with out-of-line inline definitions.
2121
12.1k
    if (!allowInlineFunctions) {
2122
161
      const FunctionDecl *Def;
2123
161
      if (MD->hasBody(Def) && 
Def->isInlineSpecified()117
)
2124
82
        continue;
2125
12.0k
    }
2126
12.0k
2127
12.0k
    if (Context.getLangOpts().CUDA) {
2128
7
      // While compiler may see key method in this TU, during CUDA
2129
7
      // compilation we should ignore methods that are not accessible
2130
7
      // on this side of compilation.
2131
7
      if (Context.getLangOpts().CUDAIsDevice) {
2132
4
        // In device mode ignore methods without __device__ attribute.
2133
4
        if (!MD->hasAttr<CUDADeviceAttr>())
2134
2
          continue;
2135
3
      } else {
2136
3
        // In host mode ignore __device__-only methods.
2137
3
        if (!MD->hasAttr<CUDAHostAttr>() && MD->hasAttr<CUDADeviceAttr>())
2138
1
          continue;
2139
12.0k
      }
2140
7
    }
2141
12.0k
2142
12.0k
    // If the key function is dllimport but the class isn't, then the class has
2143
12.0k
    // no key function. The DLL that exports the key function won't export the
2144
12.0k
    // vtable in this case.
2145
12.0k
    if (MD->hasAttr<DLLImportAttr>() && 
!RD->hasAttr<DLLImportAttr>()61
)
2146
54
      return nullptr;
2147
11.9k
2148
11.9k
    // We found it.
2149
11.9k
    return MD;
2150
11.9k
  }
2151
16.9k
2152
16.9k
  
return nullptr4.92k
;
2153
16.9k
}
2154
2155
DiagnosticBuilder ItaniumRecordLayoutBuilder::Diag(SourceLocation Loc,
2156
29.4k
                                                   unsigned DiagID) {
2157
29.4k
  return Context.getDiagnostics().Report(Loc, DiagID);
2158
29.4k
}
2159
2160
/// Does the target C++ ABI require us to skip over the tail-padding
2161
/// of the given class (considering it as a base class) when allocating
2162
/// objects?
2163
221k
static bool mustSkipTailPadding(TargetCXXABI ABI, const CXXRecordDecl *RD) {
2164
221k
  switch (ABI.getTailPaddingUseRules()) {
2165
0
  case TargetCXXABI::AlwaysUseTailPadding:
2166
0
    return false;
2167
0
2168
221k
  case TargetCXXABI::UseTailPaddingUnlessPOD03:
2169
221k
    // FIXME: To the extent that this is meant to cover the Itanium ABI
2170
221k
    // rules, we should implement the restrictions about over-sized
2171
221k
    // bitfields:
2172
221k
    //
2173
221k
    // http://itanium-cxx-abi.github.io/cxx-abi/abi.html#POD :
2174
221k
    //   In general, a type is considered a POD for the purposes of
2175
221k
    //   layout if it is a POD type (in the sense of ISO C++
2176
221k
    //   [basic.types]). However, a POD-struct or POD-union (in the
2177
221k
    //   sense of ISO C++ [class]) with a bitfield member whose
2178
221k
    //   declared width is wider than the declared type of the
2179
221k
    //   bitfield is not a POD for the purpose of layout.  Similarly,
2180
221k
    //   an array type is not a POD for the purpose of layout if the
2181
221k
    //   element type of the array is not a POD for the purpose of
2182
221k
    //   layout.
2183
221k
    //
2184
221k
    //   Where references to the ISO C++ are made in this paragraph,
2185
221k
    //   the Technical Corrigendum 1 version of the standard is
2186
221k
    //   intended.
2187
221k
    return RD->isPOD();
2188
0
2189
195
  case TargetCXXABI::UseTailPaddingUnlessPOD11:
2190
195
    // This is equivalent to RD->getTypeForDecl().isCXX11PODType(),
2191
195
    // but with a lot of abstraction penalty stripped off.  This does
2192
195
    // assume that these properties are set correctly even in C++98
2193
195
    // mode; fortunately, that is true because we want to assign
2194
195
    // consistently semantics to the type-traits intrinsics (or at
2195
195
    // least as many of them as possible).
2196
195
    return RD->isTrivial() && 
RD->isCXX11StandardLayout()60
;
2197
0
  }
2198
0
2199
0
  llvm_unreachable("bad tail-padding use kind");
2200
0
}
2201
2202
334k
static bool isMsLayout(const ASTContext &Context) {
2203
334k
  return Context.getTargetInfo().getCXXABI().isMicrosoft();
2204
334k
}
2205
2206
// This section contains an implementation of struct layout that is, up to the
2207
// included tests, compatible with cl.exe (2013).  The layout produced is
2208
// significantly different than those produced by the Itanium ABI.  Here we note
2209
// the most important differences.
2210
//
2211
// * The alignment of bitfields in unions is ignored when computing the
2212
//   alignment of the union.
2213
// * The existence of zero-width bitfield that occurs after anything other than
2214
//   a non-zero length bitfield is ignored.
2215
// * There is no explicit primary base for the purposes of layout.  All bases
2216
//   with vfptrs are laid out first, followed by all bases without vfptrs.
2217
// * The Itanium equivalent vtable pointers are split into a vfptr (virtual
2218
//   function pointer) and a vbptr (virtual base pointer).  They can each be
2219
//   shared with a, non-virtual bases. These bases need not be the same.  vfptrs
2220
//   always occur at offset 0.  vbptrs can occur at an arbitrary offset and are
2221
//   placed after the lexicographically last non-virtual base.  This placement
2222
//   is always before fields but can be in the middle of the non-virtual bases
2223
//   due to the two-pass layout scheme for non-virtual-bases.
2224
// * Virtual bases sometimes require a 'vtordisp' field that is laid out before
2225
//   the virtual base and is used in conjunction with virtual overrides during
2226
//   construction and destruction.  This is always a 4 byte value and is used as
2227
//   an alternative to constructor vtables.
2228
// * vtordisps are allocated in a block of memory with size and alignment equal
2229
//   to the alignment of the completed structure (before applying __declspec(
2230
//   align())).  The vtordisp always occur at the end of the allocation block,
2231
//   immediately prior to the virtual base.
2232
// * vfptrs are injected after all bases and fields have been laid out.  In
2233
//   order to guarantee proper alignment of all fields, the vfptr injection
2234
//   pushes all bases and fields back by the alignment imposed by those bases
2235
//   and fields.  This can potentially add a significant amount of padding.
2236
//   vfptrs are always injected at offset 0.
2237
// * vbptrs are injected after all bases and fields have been laid out.  In
2238
//   order to guarantee proper alignment of all fields, the vfptr injection
2239
//   pushes all bases and fields back by the alignment imposed by those bases
2240
//   and fields.  This can potentially add a significant amount of padding.
2241
//   vbptrs are injected immediately after the last non-virtual base as
2242
//   lexicographically ordered in the code.  If this site isn't pointer aligned
2243
//   the vbptr is placed at the next properly aligned location.  Enough padding
2244
//   is added to guarantee a fit.
2245
// * The last zero sized non-virtual base can be placed at the end of the
2246
//   struct (potentially aliasing another object), or may alias with the first
2247
//   field, even if they are of the same type.
2248
// * The last zero size virtual base may be placed at the end of the struct
2249
//   potentially aliasing another object.
2250
// * The ABI attempts to avoid aliasing of zero sized bases by adding padding
2251
//   between bases or vbases with specific properties.  The criteria for
2252
//   additional padding between two bases is that the first base is zero sized
2253
//   or ends with a zero sized subobject and the second base is zero sized or
2254
//   trails with a zero sized base or field (sharing of vfptrs can reorder the
2255
//   layout of the so the leading base is not always the first one declared).
2256
//   This rule does take into account fields that are not records, so padding
2257
//   will occur even if the last field is, e.g. an int. The padding added for
2258
//   bases is 1 byte.  The padding added between vbases depends on the alignment
2259
//   of the object but is at least 4 bytes (in both 32 and 64 bit modes).
2260
// * There is no concept of non-virtual alignment, non-virtual alignment and
2261
//   alignment are always identical.
2262
// * There is a distinction between alignment and required alignment.
2263
//   __declspec(align) changes the required alignment of a struct.  This
2264
//   alignment is _always_ obeyed, even in the presence of #pragma pack. A
2265
//   record inherits required alignment from all of its fields and bases.
2266
// * __declspec(align) on bitfields has the effect of changing the bitfield's
2267
//   alignment instead of its required alignment.  This is the only known way
2268
//   to make the alignment of a struct bigger than 8.  Interestingly enough
2269
//   this alignment is also immune to the effects of #pragma pack and can be
2270
//   used to create structures with large alignment under #pragma pack.
2271
//   However, because it does not impact required alignment, such a structure,
2272
//   when used as a field or base, will not be aligned if #pragma pack is
2273
//   still active at the time of use.
2274
//
2275
// Known incompatibilities:
2276
// * all: #pragma pack between fields in a record
2277
// * 2010 and back: If the last field in a record is a bitfield, every object
2278
//   laid out after the record will have extra padding inserted before it.  The
2279
//   extra padding will have size equal to the size of the storage class of the
2280
//   bitfield.  0 sized bitfields don't exhibit this behavior and the extra
2281
//   padding can be avoided by adding a 0 sized bitfield after the non-zero-
2282
//   sized bitfield.
2283
// * 2012 and back: In 64-bit mode, if the alignment of a record is 16 or
2284
//   greater due to __declspec(align()) then a second layout phase occurs after
2285
//   The locations of the vf and vb pointers are known.  This layout phase
2286
//   suffers from the "last field is a bitfield" bug in 2010 and results in
2287
//   _every_ field getting padding put in front of it, potentially including the
2288
//   vfptr, leaving the vfprt at a non-zero location which results in a fault if
2289
//   anything tries to read the vftbl.  The second layout phase also treats
2290
//   bitfields as separate entities and gives them each storage rather than
2291
//   packing them.  Additionally, because this phase appears to perform a
2292
//   (an unstable) sort on the members before laying them out and because merged
2293
//   bitfields have the same address, the bitfields end up in whatever order
2294
//   the sort left them in, a behavior we could never hope to replicate.
2295
2296
namespace {
2297
struct MicrosoftRecordLayoutBuilder {
2298
  struct ElementInfo {
2299
    CharUnits Size;
2300
    CharUnits Alignment;
2301
  };
2302
  typedef llvm::DenseMap<const CXXRecordDecl *, CharUnits> BaseOffsetsMapTy;
2303
4.42k
  MicrosoftRecordLayoutBuilder(const ASTContext &Context) : Context(Context) {}
2304
private:
2305
  MicrosoftRecordLayoutBuilder(const MicrosoftRecordLayoutBuilder &) = delete;
2306
  void operator=(const MicrosoftRecordLayoutBuilder &) = delete;
2307
public:
2308
  void layout(const RecordDecl *RD);
2309
  void cxxLayout(const CXXRecordDecl *RD);
2310
  /// Initializes size and alignment and honors some flags.
2311
  void initializeLayout(const RecordDecl *RD);
2312
  /// Initialized C++ layout, compute alignment and virtual alignment and
2313
  /// existence of vfptrs and vbptrs.  Alignment is needed before the vfptr is
2314
  /// laid out.
2315
  void initializeCXXLayout(const CXXRecordDecl *RD);
2316
  void layoutNonVirtualBases(const CXXRecordDecl *RD);
2317
  void layoutNonVirtualBase(const CXXRecordDecl *RD,
2318
                            const CXXRecordDecl *BaseDecl,
2319
                            const ASTRecordLayout &BaseLayout,
2320
                            const ASTRecordLayout *&PreviousBaseLayout);
2321
  void injectVFPtr(const CXXRecordDecl *RD);
2322
  void injectVBPtr(const CXXRecordDecl *RD);
2323
  /// Lays out the fields of the record.  Also rounds size up to
2324
  /// alignment.
2325
  void layoutFields(const RecordDecl *RD);
2326
  void layoutField(const FieldDecl *FD);
2327
  void layoutBitField(const FieldDecl *FD);
2328
  /// Lays out a single zero-width bit-field in the record and handles
2329
  /// special cases associated with zero-width bit-fields.
2330
  void layoutZeroWidthBitField(const FieldDecl *FD);
2331
  void layoutVirtualBases(const CXXRecordDecl *RD);
2332
  void finalizeLayout(const RecordDecl *RD);
2333
  /// Gets the size and alignment of a base taking pragma pack and
2334
  /// __declspec(align) into account.
2335
  ElementInfo getAdjustedElementInfo(const ASTRecordLayout &Layout);
2336
  /// Gets the size and alignment of a field taking pragma  pack and
2337
  /// __declspec(align) into account.  It also updates RequiredAlignment as a
2338
  /// side effect because it is most convenient to do so here.
2339
  ElementInfo getAdjustedElementInfo(const FieldDecl *FD);
2340
  /// Places a field at an offset in CharUnits.
2341
2.45k
  void placeFieldAtOffset(CharUnits FieldOffset) {
2342
2.45k
    FieldOffsets.push_back(Context.toBits(FieldOffset));
2343
2.45k
  }
2344
  /// Places a bitfield at a bit offset.
2345
30
  void placeFieldAtBitOffset(uint64_t FieldOffset) {
2346
30
    FieldOffsets.push_back(FieldOffset);
2347
30
  }
2348
  /// Compute the set of virtual bases for which vtordisps are required.
2349
  void computeVtorDispSet(
2350
      llvm::SmallPtrSetImpl<const CXXRecordDecl *> &HasVtorDispSet,
2351
      const CXXRecordDecl *RD) const;
2352
  const ASTContext &Context;
2353
  /// The size of the record being laid out.
2354
  CharUnits Size;
2355
  /// The non-virtual size of the record layout.
2356
  CharUnits NonVirtualSize;
2357
  /// The data size of the record layout.
2358
  CharUnits DataSize;
2359
  /// The current alignment of the record layout.
2360
  CharUnits Alignment;
2361
  /// The maximum allowed field alignment. This is set by #pragma pack.
2362
  CharUnits MaxFieldAlignment;
2363
  /// The alignment that this record must obey.  This is imposed by
2364
  /// __declspec(align()) on the record itself or one of its fields or bases.
2365
  CharUnits RequiredAlignment;
2366
  /// The size of the allocation of the currently active bitfield.
2367
  /// This value isn't meaningful unless LastFieldIsNonZeroWidthBitfield
2368
  /// is true.
2369
  CharUnits CurrentBitfieldSize;
2370
  /// Offset to the virtual base table pointer (if one exists).
2371
  CharUnits VBPtrOffset;
2372
  /// Minimum record size possible.
2373
  CharUnits MinEmptyStructSize;
2374
  /// The size and alignment info of a pointer.
2375
  ElementInfo PointerInfo;
2376
  /// The primary base class (if one exists).
2377
  const CXXRecordDecl *PrimaryBase;
2378
  /// The class we share our vb-pointer with.
2379
  const CXXRecordDecl *SharedVBPtrBase;
2380
  /// The collection of field offsets.
2381
  SmallVector<uint64_t, 16> FieldOffsets;
2382
  /// Base classes and their offsets in the record.
2383
  BaseOffsetsMapTy Bases;
2384
  /// virtual base classes and their offsets in the record.
2385
  ASTRecordLayout::VBaseOffsetsMapTy VBases;
2386
  /// The number of remaining bits in our last bitfield allocation.
2387
  /// This value isn't meaningful unless LastFieldIsNonZeroWidthBitfield is
2388
  /// true.
2389
  unsigned RemainingBitsInField;
2390
  bool IsUnion : 1;
2391
  /// True if the last field laid out was a bitfield and was not 0
2392
  /// width.
2393
  bool LastFieldIsNonZeroWidthBitfield : 1;
2394
  /// True if the class has its own vftable pointer.
2395
  bool HasOwnVFPtr : 1;
2396
  /// True if the class has a vbtable pointer.
2397
  bool HasVBPtr : 1;
2398
  /// True if the last sub-object within the type is zero sized or the
2399
  /// object itself is zero sized.  This *does not* count members that are not
2400
  /// records.  Only used for MS-ABI.
2401
  bool EndsWithZeroSizedObject : 1;
2402
  /// True if this class is zero sized or first base is zero sized or
2403
  /// has this property.  Only used for MS-ABI.
2404
  bool LeadsWithZeroSizedBase : 1;
2405
2406
  /// True if the external AST source provided a layout for this record.
2407
  bool UseExternalLayout : 1;
2408
2409
  /// The layout provided by the external AST source. Only active if
2410
  /// UseExternalLayout is true.
2411
  ExternalLayout External;
2412
};
2413
} // namespace
2414
2415
MicrosoftRecordLayoutBuilder::ElementInfo
2416
MicrosoftRecordLayoutBuilder::getAdjustedElementInfo(
2417
2.76k
    const ASTRecordLayout &Layout) {
2418
2.76k
  ElementInfo Info;
2419
2.76k
  Info.Alignment = Layout.getAlignment();
2420
2.76k
  // Respect pragma pack.
2421
2.76k
  if (!MaxFieldAlignment.isZero())
2422
34
    Info.Alignment = std::min(Info.Alignment, MaxFieldAlignment);
2423
2.76k
  // Track zero-sized subobjects here where it's already available.
2424
2.76k
  EndsWithZeroSizedObject = Layout.endsWithZeroSizedObject();
2425
2.76k
  // Respect required alignment, this is necessary because we may have adjusted
2426
2.76k
  // the alignment in the case of pragam pack.  Note that the required alignment
2427
2.76k
  // doesn't actually apply to the struct alignment at this point.
2428
2.76k
  Alignment = std::max(Alignment, Info.Alignment);
2429
2.76k
  RequiredAlignment = std::max(RequiredAlignment, Layout.getRequiredAlignment());
2430
2.76k
  Info.Alignment = std::max(Info.Alignment, Layout.getRequiredAlignment());
2431
2.76k
  Info.Size = Layout.getNonVirtualSize();
2432
2.76k
  return Info;
2433
2.76k
}
2434
2435
MicrosoftRecordLayoutBuilder::ElementInfo
2436
MicrosoftRecordLayoutBuilder::getAdjustedElementInfo(
2437
2.46k
    const FieldDecl *FD) {
2438
2.46k
  // Get the alignment of the field type's natural alignment, ignore any
2439
2.46k
  // alignment attributes.
2440
2.46k
  ElementInfo Info;
2441
2.46k
  std::tie(Info.Size, Info.Alignment) =
2442
2.46k
      Context.getTypeInfoInChars(FD->getType()->getUnqualifiedDesugaredType());
2443
2.46k
  // Respect align attributes on the field.
2444
2.46k
  CharUnits FieldRequiredAlignment =
2445
2.46k
      Context.toCharUnitsFromBits(FD->getMaxAlignment());
2446
2.46k
  // Respect align attributes on the type.
2447
2.46k
  if (Context.isAlignmentRequired(FD->getType()))
2448
67
    FieldRequiredAlignment = std::max(
2449
67
        Context.getTypeAlignInChars(FD->getType()), FieldRequiredAlignment);
2450
2.46k
  // Respect attributes applied to subobjects of the field.
2451
2.46k
  if (FD->isBitField())
2452
165
    // For some reason __declspec align impacts alignment rather than required
2453
165
    // alignment when it is applied to bitfields.
2454
165
    Info.Alignment = std::max(Info.Alignment, FieldRequiredAlignment);
2455
2.29k
  else {
2456
2.29k
    if (auto RT =
2457
175
            FD->getType()->getBaseElementTypeUnsafe()->getAs<RecordType>()) {
2458
175
      auto const &Layout = Context.getASTRecordLayout(RT->getDecl());
2459
175
      EndsWithZeroSizedObject = Layout.endsWithZeroSizedObject();
2460
175
      FieldRequiredAlignment = std::max(FieldRequiredAlignment,
2461
175
                                        Layout.getRequiredAlignment());
2462
175
    }
2463
2.29k
    // Capture required alignment as a side-effect.
2464
2.29k
    RequiredAlignment = std::max(RequiredAlignment, FieldRequiredAlignment);
2465
2.29k
  }
2466
2.46k
  // Respect pragma pack, attribute pack and declspec align
2467
2.46k
  if (!MaxFieldAlignment.isZero())
2468
252
    Info.Alignment = std::min(Info.Alignment, MaxFieldAlignment);
2469
2.46k
  if (FD->hasAttr<PackedAttr>())
2470
7
    Info.Alignment = CharUnits::One();
2471
2.46k
  Info.Alignment = std::max(Info.Alignment, FieldRequiredAlignment);
2472
2.46k
  return Info;
2473
2.46k
}
2474
2475
307
void MicrosoftRecordLayoutBuilder::layout(const RecordDecl *RD) {
2476
307
  // For C record layout, zero-sized records always have size 4.
2477
307
  MinEmptyStructSize = CharUnits::fromQuantity(4);
2478
307
  initializeLayout(RD);
2479
307
  layoutFields(RD);
2480
307
  DataSize = Size = Size.alignTo(Alignment);
2481
307
  RequiredAlignment = std::max(
2482
307
      RequiredAlignment, Context.toCharUnitsFromBits(RD->getMaxAlignment()));
2483
307
  finalizeLayout(RD);
2484
307
}
2485
2486
4.12k
void MicrosoftRecordLayoutBuilder::cxxLayout(const CXXRecordDecl *RD) {
2487
4.12k
  // The C++ standard says that empty structs have size 1.
2488
4.12k
  MinEmptyStructSize = CharUnits::One();
2489
4.12k
  initializeLayout(RD);
2490
4.12k
  initializeCXXLayout(RD);
2491
4.12k
  layoutNonVirtualBases(RD);
2492
4.12k
  layoutFields(RD);
2493
4.12k
  injectVBPtr(RD);
2494
4.12k
  injectVFPtr(RD);
2495
4.12k
  if (HasOwnVFPtr || 
(3.37k
HasVBPtr3.37k
&&
!SharedVBPtrBase735
))
2496
1.28k
    Alignment = std::max(Alignment, PointerInfo.Alignment);
2497
4.12k
  auto RoundingAlignment = Alignment;
2498
4.12k
  if (!MaxFieldAlignment.isZero())
2499
56
    RoundingAlignment = std::min(RoundingAlignment, MaxFieldAlignment);
2500
4.12k
  if (!UseExternalLayout)
2501
4.11k
    Size = Size.alignTo(RoundingAlignment);
2502
4.12k
  NonVirtualSize = Size;
2503
4.12k
  RequiredAlignment = std::max(
2504
4.12k
      RequiredAlignment, Context.toCharUnitsFromBits(RD->getMaxAlignment()));
2505
4.12k
  layoutVirtualBases(RD);
2506
4.12k
  finalizeLayout(RD);
2507
4.12k
}
2508
2509
4.42k
void MicrosoftRecordLayoutBuilder::initializeLayout(const RecordDecl *RD) {
2510
4.42k
  IsUnion = RD->isUnion();
2511
4.42k
  Size = CharUnits::Zero();
2512
4.42k
  Alignment = CharUnits::One();
2513
4.42k
  // In 64-bit mode we always perform an alignment step after laying out vbases.
2514
4.42k
  // In 32-bit mode we do not.  The check to see if we need to perform alignment
2515
4.42k
  // checks the RequiredAlignment field and performs alignment if it isn't 0.
2516
4.42k
  RequiredAlignment = Context.getTargetInfo().getTriple().isArch64Bit()
2517
4.42k
                          ? 
CharUnits::One()1.94k
2518
4.42k
                          : 
CharUnits::Zero()2.48k
;
2519
4.42k
  // Compute the maximum field alignment.
2520
4.42k
  MaxFieldAlignment = CharUnits::Zero();
2521
4.42k
  // Honor the default struct packing maximum alignment flag.
2522
4.42k
  if (unsigned DefaultMaxFieldAlignment = Context.getLangOpts().PackStruct)
2523
0
      MaxFieldAlignment = CharUnits::fromQuantity(DefaultMaxFieldAlignment);
2524
4.42k
  // Honor the packing attribute.  The MS-ABI ignores pragma pack if its larger
2525
4.42k
  // than the pointer size.
2526
4.42k
  if (const MaxFieldAlignmentAttr *MFAA = RD->getAttr<MaxFieldAlignmentAttr>()){
2527
122
    unsigned PackedAlignment = MFAA->getAlignment();
2528
122
    if (PackedAlignment <= Context.getTargetInfo().getPointerWidth(0))
2529
89
      MaxFieldAlignment = Context.toCharUnitsFromBits(PackedAlignment);
2530
122
  }
2531
4.42k
  // Packed attribute forces max field alignment to be 1.
2532
4.42k
  if (RD->hasAttr<PackedAttr>())
2533
61
    MaxFieldAlignment = CharUnits::One();
2534
4.42k
2535
4.42k
  // Try to respect the external layout if present.
2536
4.42k
  UseExternalLayout = false;
2537
4.42k
  if (ExternalASTSource *Source = Context.getExternalSource())
2538
51
    UseExternalLayout = Source->layoutRecordType(
2539
51
        RD, External.Size, External.Align, External.FieldOffsets,
2540
51
        External.BaseOffsets, External.VirtualBaseOffsets);
2541
4.42k
}
2542
2543
void
2544
4.12k
MicrosoftRecordLayoutBuilder::initializeCXXLayout(const CXXRecordDecl *RD) {
2545
4.12k
  EndsWithZeroSizedObject = false;
2546
4.12k
  LeadsWithZeroSizedBase = false;
2547
4.12k
  HasOwnVFPtr = false;
2548
4.12k
  HasVBPtr = false;
2549
4.12k
  PrimaryBase = nullptr;
2550
4.12k
  SharedVBPtrBase = nullptr;
2551
4.12k
  // Calculate pointer size and alignment.  These are used for vfptr and vbprt
2552
4.12k
  // injection.
2553
4.12k
  PointerInfo.Size =
2554
4.12k
      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(0));
2555
4.12k
  PointerInfo.Alignment =
2556
4.12k
      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerAlign(0));
2557
4.12k
  // Respect pragma pack.
2558
4.12k
  if (!MaxFieldAlignment.isZero())
2559
56
    PointerInfo.Alignment = std::min(PointerInfo.Alignment, MaxFieldAlignment);
2560
4.12k
}
2561
2562
void
2563
4.12k
MicrosoftRecordLayoutBuilder::layoutNonVirtualBases(const CXXRecordDecl *RD) {
2564
4.12k
  // The MS-ABI lays out all bases that contain leading vfptrs before it lays
2565
4.12k
  // out any bases that do not contain vfptrs.  We implement this as two passes
2566
4.12k
  // over the bases.  This approach guarantees that the primary base is laid out
2567
4.12k
  // first.  We use these passes to calculate some additional aggregated
2568
4.12k
  // information about the bases, such as required alignment and the presence of
2569
4.12k
  // zero sized members.
2570
4.12k
  const ASTRecordLayout *PreviousBaseLayout = nullptr;
2571
4.12k
  // Iterate through the bases and lay out the non-virtual ones.
2572
4.12k
  for (const CXXBaseSpecifier &Base : RD->bases()) {
2573
2.41k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
2574
2.41k
    const ASTRecordLayout &BaseLayout = Context.getASTRecordLayout(BaseDecl);
2575
2.41k
    // Mark and skip virtual bases.
2576
2.41k
    if (Base.isVirtual()) {
2577
902
      HasVBPtr = true;
2578
902
      continue;
2579
902
    }
2580
1.50k
    // Check for a base to share a VBPtr with.
2581
1.50k
    if (!SharedVBPtrBase && 
BaseLayout.hasVBPtr()1.38k
) {
2582
225
      SharedVBPtrBase = BaseDecl;
2583
225
      HasVBPtr = true;
2584
225
    }
2585
1.50k
    // Only lay out bases with extendable VFPtrs on the first pass.
2586
1.50k
    if (!BaseLayout.hasExtendableVFPtr())
2587
1.03k
      continue;
2588
478
    // If we don't have a primary base, this one qualifies.
2589
478
    if (!PrimaryBase) {
2590
346
      PrimaryBase = BaseDecl;
2591
346
      LeadsWithZeroSizedBase = BaseLayout.leadsWithZeroSizedBase();
2592
346
    }
2593
478
    // Lay out the base.
2594
478
    layoutNonVirtualBase(RD, BaseDecl, BaseLayout, PreviousBaseLayout);
2595
478
  }
2596
4.12k
  // Figure out if we need a fresh VFPtr for this class.
2597
4.12k
  if (!PrimaryBase && 
RD->isDynamicClass()3.77k
)
2598
1.41k
    for (CXXRecordDecl::method_iterator i = RD->method_begin(),
2599
1.41k
                                        e = RD->method_end();
2600
5.74k
         !HasOwnVFPtr && 
i != e5.00k
;
++i4.33k
)
2601
4.33k
      HasOwnVFPtr = i->isVirtual() && 
i->size_overridden_methods() == 0982
;
2602
4.12k
  // If we don't have a primary base then we have a leading object that could
2603
4.12k
  // itself lead with a zero-sized object, something we track.
2604
4.12k
  bool CheckLeadingLayout = !PrimaryBase;
2605
4.12k
  // Iterate through the bases and lay out the non-virtual ones.
2606
4.12k
  for (const CXXBaseSpecifier &Base : RD->bases()) {
2607
2.41k
    if (Base.isVirtual())
2608
902
      continue;
2609
1.50k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
2610
1.50k
    const ASTRecordLayout &BaseLayout = Context.getASTRecordLayout(BaseDecl);
2611
1.50k
    // Only lay out bases without extendable VFPtrs on the second pass.
2612
1.50k
    if (BaseLayout.hasExtendableVFPtr()) {
2613
478
      VBPtrOffset = Bases[BaseDecl] + BaseLayout.getNonVirtualSize();
2614
478
      continue;
2615
478
    }
2616
1.03k
    // If this is the first layout, check to see if it leads with a zero sized
2617
1.03k
    // object.  If it does, so do we.
2618
1.03k
    if (CheckLeadingLayout) {
2619
656
      CheckLeadingLayout = false;
2620
656
      LeadsWithZeroSizedBase = BaseLayout.leadsWithZeroSizedBase();
2621
656
    }
2622
1.03k
    // Lay out the base.
2623
1.03k
    layoutNonVirtualBase(RD, BaseDecl, BaseLayout, PreviousBaseLayout);
2624
1.03k
    VBPtrOffset = Bases[BaseDecl] + BaseLayout.getNonVirtualSize();
2625
1.03k
  }
2626
4.12k
  // Set our VBPtroffset if we know it at this point.
2627
4.12k
  if (!HasVBPtr)
2628
3.27k
    VBPtrOffset = CharUnits::fromQuantity(-1);
2629
845
  else if (SharedVBPtrBase) {
2630
225
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(SharedVBPtrBase);
2631
225
    VBPtrOffset = Bases[SharedVBPtrBase] + Layout.getVBPtrOffset();
2632
225
  }
2633
4.12k
}
2634
2635
3.07k
static bool recordUsesEBO(const RecordDecl *RD) {
2636
3.07k
  if (!isa<CXXRecordDecl>(RD))
2637
16
    return false;
2638
3.05k
  if (RD->hasAttr<EmptyBasesAttr>())
2639
16
    return true;
2640
3.04k
  if (auto *LVA = RD->getAttr<LayoutVersionAttr>())
2641
0
    // TODO: Double check with the next version of MSVC.
2642
0
    if (LVA->getVersion() <= LangOptions::MSVC2015)
2643
0
      return false;
2644
3.04k
  // TODO: Some later version of MSVC will change the default behavior of the
2645
3.04k
  // compiler to enable EBO by default.  When this happens, we will need an
2646
3.04k
  // additional isCompatibleWithMSVC check.
2647
3.04k
  return false;
2648
3.04k
}
2649
2650
void MicrosoftRecordLayoutBuilder::layoutNonVirtualBase(
2651
    const CXXRecordDecl *RD,
2652
    const CXXRecordDecl *BaseDecl,
2653
    const ASTRecordLayout &BaseLayout,
2654
1.50k
    const ASTRecordLayout *&PreviousBaseLayout) {
2655
1.50k
  // Insert padding between two bases if the left first one is zero sized or
2656
1.50k
  // contains a zero sized subobject and the right is zero sized or one leads
2657
1.50k
  // with a zero sized base.
2658
1.50k
  bool MDCUsesEBO = recordUsesEBO(RD);
2659
1.50k
  if (PreviousBaseLayout && 
PreviousBaseLayout->endsWithZeroSizedObject()506
&&
2660
1.50k
      
BaseLayout.leadsWithZeroSizedBase()144
&&
!MDCUsesEBO111
)
2661
109
    Size++;
2662
1.50k
  ElementInfo Info = getAdjustedElementInfo(BaseLayout);
2663
1.50k
  CharUnits BaseOffset;
2664
1.50k
2665
1.50k
  // Respect the external AST source base offset, if present.
2666
1.50k
  bool FoundBase = false;
2667
1.50k
  if (UseExternalLayout) {
2668
2
    FoundBase = External.getExternalNVBaseOffset(BaseDecl, BaseOffset);
2669
2
    if (FoundBase) {
2670
0
      assert(BaseOffset >= Size && "base offset already allocated");
2671
0
      Size = BaseOffset;
2672
0
    }
2673
2
  }
2674
1.50k
2675
1.50k
  if (!FoundBase) {
2676
1.50k
    if (MDCUsesEBO && 
BaseDecl->isEmpty()12
) {
2677
4
      assert(BaseLayout.getNonVirtualSize() == CharUnits::Zero());
2678
4
      BaseOffset = CharUnits::Zero();
2679
1.50k
    } else {
2680
1.50k
      // Otherwise, lay the base out at the end of the MDC.
2681
1.50k
      BaseOffset = Size = Size.alignTo(Info.Alignment);
2682
1.50k
    }
2683
1.50k
  }
2684
1.50k
  Bases.insert(std::make_pair(BaseDecl, BaseOffset));
2685
1.50k
  Size += BaseLayout.getNonVirtualSize();
2686
1.50k
  PreviousBaseLayout = &BaseLayout;
2687
1.50k
}
2688
2689
4.42k
void MicrosoftRecordLayoutBuilder::layoutFields(const RecordDecl *RD) {
2690
4.42k
  LastFieldIsNonZeroWidthBitfield = false;
2691
4.42k
  for (const FieldDecl *Field : RD->fields())
2692
2.48k
    layoutField(Field);
2693
4.42k
}
2694
2695
2.48k
void MicrosoftRecordLayoutBuilder::layoutField(const FieldDecl *FD) {
2696
2.48k
  if (FD->isBitField()) {
2697
185
    layoutBitField(FD);
2698
185
    return;
2699
185
  }
2700
2.29k
  LastFieldIsNonZeroWidthBitfield = false;
2701
2.29k
  ElementInfo Info = getAdjustedElementInfo(FD);
2702
2.29k
  Alignment = std::max(Alignment, Info.Alignment);
2703
2.29k
  CharUnits FieldOffset;
2704
2.29k
  if (UseExternalLayout)
2705
6
    FieldOffset =
2706
6
        Context.toCharUnitsFromBits(External.getExternalFieldOffset(FD));
2707
2.29k
  else if (IsUnion)
2708
69
    FieldOffset = CharUnits::Zero();
2709
2.22k
  else
2710
2.22k
    FieldOffset = Size.alignTo(Info.Alignment);
2711
2.29k
  placeFieldAtOffset(FieldOffset);
2712
2.29k
  Size = std::max(Size, FieldOffset + Info.Size);
2713
2.29k
}
2714
2715
185
void MicrosoftRecordLayoutBuilder::layoutBitField(const FieldDecl *FD) {
2716
185
  unsigned Width = FD->getBitWidthValue(Context);
2717
185
  if (Width == 0) {
2718
42
    layoutZeroWidthBitField(FD);
2719
42
    return;
2720
42
  }
2721
143
  ElementInfo Info = getAdjustedElementInfo(FD);
2722
143
  // Clamp the bitfield to a containable size for the sake of being able
2723
143
  // to lay them out.  Sema will throw an error.
2724
143
  if (Width > Context.toBits(Info.Size))
2725
0
    Width = Context.toBits(Info.Size);
2726
143
  // Check to see if this bitfield fits into an existing allocation.  Note:
2727
143
  // MSVC refuses to pack bitfields of formal types with different sizes
2728
143
  // into the same allocation.
2729
143
  if (!UseExternalLayout && 
!IsUnion138
&&
LastFieldIsNonZeroWidthBitfield126
&&
2730
143
      
CurrentBitfieldSize == Info.Size58
&&
Width <= RemainingBitsInField49
) {
2731
25
    placeFieldAtBitOffset(Context.toBits(Size) - RemainingBitsInField);
2732
25
    RemainingBitsInField -= Width;
2733
25
    return;
2734
25
  }
2735
118
  LastFieldIsNonZeroWidthBitfield = true;
2736
118
  CurrentBitfieldSize = Info.Size;
2737
118
  if (UseExternalLayout) {
2738
5
    auto FieldBitOffset = External.getExternalFieldOffset(FD);
2739
5
    placeFieldAtBitOffset(FieldBitOffset);
2740
5
    auto NewSize = Context.toCharUnitsFromBits(
2741
5
        llvm::alignDown(FieldBitOffset, Context.toBits(Info.Alignment)) +
2742
5
        Context.toBits(Info.Size));
2743
5
    Size = std::max(Size, NewSize);
2744
5
    Alignment = std::max(Alignment, Info.Alignment);
2745
113
  } else if (IsUnion) {
2746
12
    placeFieldAtOffset(CharUnits::Zero());
2747
12
    Size = std::max(Size, Info.Size);
2748
12
    // TODO: Add a Sema warning that MS ignores bitfield alignment in unions.
2749
101
  } else {
2750
101
    // Allocate a new block of memory and place the bitfield in it.
2751
101
    CharUnits FieldOffset = Size.alignTo(Info.Alignment);
2752
101
    placeFieldAtOffset(FieldOffset);
2753
101
    Size = FieldOffset + Info.Size;
2754
101
    Alignment = std::max(Alignment, Info.Alignment);
2755
101
    RemainingBitsInField = Context.toBits(Info.Size) - Width;
2756
101
  }
2757
118
}
2758
2759
void
2760
42
MicrosoftRecordLayoutBuilder::layoutZeroWidthBitField(const FieldDecl *FD) {
2761
42
  // Zero-width bitfields are ignored unless they follow a non-zero-width
2762
42
  // bitfield.
2763
42
  if (!LastFieldIsNonZeroWidthBitfield) {
2764
20
    placeFieldAtOffset(IsUnion ? 
CharUnits::Zero()4
:
Size16
);
2765
20
    // TODO: Add a Sema warning that MS ignores alignment for zero
2766
20
    // sized bitfields that occur after zero-size bitfields or non-bitfields.
2767
20
    return;
2768
20
  }
2769
22
  LastFieldIsNonZeroWidthBitfield = false;
2770
22
  ElementInfo Info = getAdjustedElementInfo(FD);
2771
22
  if (IsUnion) {
2772
8
    placeFieldAtOffset(CharUnits::Zero());
2773
8
    Size = std::max(Size, Info.Size);
2774
8
    // TODO: Add a Sema warning that MS ignores bitfield alignment in unions.
2775
14
  } else {
2776
14
    // Round up the current record size to the field's alignment boundary.
2777
14
    CharUnits FieldOffset = Size.alignTo(Info.Alignment);
2778
14
    placeFieldAtOffset(FieldOffset);
2779
14
    Size = FieldOffset;
2780
14
    Alignment = std::max(Alignment, Info.Alignment);
2781
14
  }
2782
22
}
2783
2784
4.12k
void MicrosoftRecordLayoutBuilder::injectVBPtr(const CXXRecordDecl *RD) {
2785
4.12k
  if (!HasVBPtr || 
SharedVBPtrBase845
)
2786
3.50k
    return;
2787
620
  // Inject the VBPointer at the injection site.
2788
620
  CharUnits InjectionSite = VBPtrOffset;
2789
620
  // But before we do, make sure it's properly aligned.
2790
620
  VBPtrOffset = VBPtrOffset.alignTo(PointerInfo.Alignment);
2791
620
  // Determine where the first field should be laid out after the vbptr.
2792
620
  CharUnits FieldStart = VBPtrOffset + PointerInfo.Size;
2793
620
  // Shift everything after the vbptr down, unless we're using an external
2794
620
  // layout.
2795
620
  if (UseExternalLayout) {
2796
2
    // It is possible that there were no fields or bases located after vbptr,
2797
2
    // so the size was not adjusted before.
2798
2
    if (Size < FieldStart)
2799
1
      Size = FieldStart;
2800
2
    return;
2801
2
  }
2802
618
  // Make sure that the amount we push the fields back by is a multiple of the
2803
618
  // alignment.
2804
618
  CharUnits Offset = (FieldStart - InjectionSite)
2805
618
                         .alignTo(std::max(RequiredAlignment, Alignment));
2806
618
  Size += Offset;
2807
618
  for (uint64_t &FieldOffset : FieldOffsets)
2808
356
    FieldOffset += Context.toBits(Offset);
2809
618
  for (BaseOffsetsMapTy::value_type &Base : Bases)
2810
257
    if (Base.second >= InjectionSite)
2811
64
      Base.second += Offset;
2812
618
}
2813
2814
4.12k
void MicrosoftRecordLayoutBuilder::injectVFPtr(const CXXRecordDecl *RD) {
2815
4.12k
  if (!HasOwnVFPtr)
2816
3.37k
    return;
2817
747
  // Make sure that the amount we push the struct back by is a multiple of the
2818
747
  // alignment.
2819
747
  CharUnits Offset =
2820
747
      PointerInfo.Size.alignTo(std::max(RequiredAlignment, Alignment));
2821
747
  // Push back the vbptr, but increase the size of the object and push back
2822
747
  // regular fields by the offset only if not using external record layout.
2823
747
  if (HasVBPtr)
2824
110
    VBPtrOffset += Offset;
2825
747
2826
747
  if (UseExternalLayout) {
2827
2
    // The class may have no bases or fields, but still have a vfptr
2828
2
    // (e.g. it's an interface class). The size was not correctly set before
2829
2
    // in this case.
2830
2
    if (FieldOffsets.empty() && 
Bases.empty()1
)
2831
1
      Size += Offset;
2832
2
    return;
2833
2
  }
2834
745
2835
745
  Size += Offset;
2836
745
2837
745
  // If we're using an external layout, the fields offsets have already
2838
745
  // accounted for this adjustment.
2839
745
  for (uint64_t &FieldOffset : FieldOffsets)
2840
157
    FieldOffset += Context.toBits(Offset);
2841
745
  for (BaseOffsetsMapTy::value_type &Base : Bases)
2842
80
    Base.second += Offset;
2843
745
}
2844
2845
4.12k
void MicrosoftRecordLayoutBuilder::layoutVirtualBases(const CXXRecordDecl *RD) {
2846
4.12k
  if (!HasVBPtr)
2847
3.27k
    return;
2848
845
  // Vtordisps are always 4 bytes (even in 64-bit mode)
2849
845
  CharUnits VtorDispSize = CharUnits::fromQuantity(4);
2850
845
  CharUnits VtorDispAlignment = VtorDispSize;
2851
845
  // vtordisps respect pragma pack.
2852
845
  if (!MaxFieldAlignment.isZero())
2853
16
    VtorDispAlignment = std::min(VtorDispAlignment, MaxFieldAlignment);
2854
845
  // The alignment of the vtordisp is at least the required alignment of the
2855
845
  // entire record.  This requirement may be present to support vtordisp
2856
845
  // injection.
2857
1.25k
  for (const CXXBaseSpecifier &VBase : RD->vbases()) {
2858
1.25k
    const CXXRecordDecl *BaseDecl = VBase.getType()->getAsCXXRecordDecl();
2859
1.25k
    const ASTRecordLayout &BaseLayout = Context.getASTRecordLayout(BaseDecl);
2860
1.25k
    RequiredAlignment =
2861
1.25k
        std::max(RequiredAlignment, BaseLayout.getRequiredAlignment());
2862
1.25k
  }
2863
845
  VtorDispAlignment = std::max(VtorDispAlignment, RequiredAlignment);
2864
845
  // Compute the vtordisp set.
2865
845
  llvm::SmallPtrSet<const CXXRecordDecl *, 2> HasVtorDispSet;
2866
845
  computeVtorDispSet(HasVtorDispSet, RD);
2867
845
  // Iterate through the virtual bases and lay them out.
2868
845
  const ASTRecordLayout *PreviousBaseLayout = nullptr;
2869
1.25k
  for (const CXXBaseSpecifier &VBase : RD->vbases()) {
2870
1.25k
    const CXXRecordDecl *BaseDecl = VBase.getType()->getAsCXXRecordDecl();
2871
1.25k
    const ASTRecordLayout &BaseLayout = Context.getASTRecordLayout(BaseDecl);
2872
1.25k
    bool HasVtordisp = HasVtorDispSet.count(BaseDecl) > 0;
2873
1.25k
    // Insert padding between two bases if the left first one is zero sized or
2874
1.25k
    // contains a zero sized subobject and the right is zero sized or one leads
2875
1.25k
    // with a zero sized base.  The padding between virtual bases is 4
2876
1.25k
    // bytes (in both 32 and 64 bits modes) and always involves rounding up to
2877
1.25k
    // the required alignment, we don't know why.
2878
1.25k
    if ((PreviousBaseLayout && 
PreviousBaseLayout->endsWithZeroSizedObject()413
&&
2879
1.25k
         
BaseLayout.leadsWithZeroSizedBase()165
&&
!recordUsesEBO(RD)116
) ||
2880
1.25k
        
HasVtordisp1.14k
) {
2881
274
      Size = Size.alignTo(VtorDispAlignment) + VtorDispSize;
2882
274
      Alignment = std::max(VtorDispAlignment, Alignment);
2883
274
    }
2884
1.25k
    // Insert the virtual base.
2885
1.25k
    ElementInfo Info = getAdjustedElementInfo(BaseLayout);
2886
1.25k
    CharUnits BaseOffset;
2887
1.25k
2888
1.25k
    // Respect the external AST source base offset, if present.
2889
1.25k
    if (UseExternalLayout) {
2890
3
      if (!External.getExternalVBaseOffset(BaseDecl, BaseOffset))
2891
3
        BaseOffset = Size;
2892
3
    } else
2893
1.25k
      BaseOffset = Size.alignTo(Info.Alignment);
2894
1.25k
2895
1.25k
    assert(BaseOffset >= Size && "base offset already allocated");
2896
1.25k
2897
1.25k
    VBases.insert(std::make_pair(BaseDecl,
2898
1.25k
        ASTRecordLayout::VBaseInfo(BaseOffset, HasVtordisp)));
2899
1.25k
    Size = BaseOffset + BaseLayout.getNonVirtualSize();
2900
1.25k
    PreviousBaseLayout = &BaseLayout;
2901
1.25k
  }
2902
845
}
2903
2904
4.42k
void MicrosoftRecordLayoutBuilder::finalizeLayout(const RecordDecl *RD) {
2905
4.42k
  // Respect required alignment.  Note that in 32-bit mode Required alignment
2906
4.42k
  // may be 0 and cause size not to be updated.
2907
4.42k
  DataSize = Size;
2908
4.42k
  if (!RequiredAlignment.isZero()) {
2909
2.11k
    Alignment = std::max(Alignment, RequiredAlignment);
2910
2.11k
    auto RoundingAlignment = Alignment;
2911
2.11k
    if (!MaxFieldAlignment.isZero())
2912
129
      RoundingAlignment = std::min(RoundingAlignment, MaxFieldAlignment);
2913
2.11k
    RoundingAlignment = std::max(RoundingAlignment, RequiredAlignment);
2914
2.11k
    Size = Size.alignTo(RoundingAlignment);
2915
2.11k
  }
2916
4.42k
  if (Size.isZero()) {
2917
1.44k
    if (!recordUsesEBO(RD) || 
!cast<CXXRecordDecl>(RD)->isEmpty()4
) {
2918
1.44k
      EndsWithZeroSizedObject = true;
2919
1.44k
      LeadsWithZeroSizedBase = true;
2920
1.44k
    }
2921
1.44k
    // Zero-sized structures have size equal to their alignment if a
2922
1.44k
    // __declspec(align) came into play.
2923
1.44k
    if (RequiredAlignment >= MinEmptyStructSize)
2924
613
      Size = Alignment;
2925
836
    else
2926
836
      Size = MinEmptyStructSize;
2927
1.44k
  }
2928
4.42k
2929
4.42k
  if (UseExternalLayout) {
2930
8
    Size = Context.toCharUnitsFromBits(External.Size);
2931
8
    if (External.Align)
2932
4
      Alignment = Context.toCharUnitsFromBits(External.Align);
2933
8
  }
2934
4.42k
}
2935
2936
// Recursively walks the non-virtual bases of a class and determines if any of
2937
// them are in the bases with overridden methods set.
2938
static bool
2939
RequiresVtordisp(const llvm::SmallPtrSetImpl<const CXXRecordDecl *> &
2940
                     BasesWithOverriddenMethods,
2941
665
                 const CXXRecordDecl *RD) {
2942
665
  if (BasesWithOverriddenMethods.count(RD))
2943
94
    return true;
2944
571
  // If any of a virtual bases non-virtual bases (recursively) requires a
2945
571
  // vtordisp than so does this virtual base.
2946
571
  for (const CXXBaseSpecifier &Base : RD->bases())
2947
87
    if (!Base.isVirtual() &&
2948
87
        RequiresVtordisp(BasesWithOverriddenMethods,
2949
52
                         Base.getType()->getAsCXXRecordDecl()))
2950
19
      return true;
2951
571
  
return false552
;
2952
571
}
2953
2954
void MicrosoftRecordLayoutBuilder::computeVtorDispSet(
2955
    llvm::SmallPtrSetImpl<const CXXRecordDecl *> &HasVtordispSet,
2956
845
    const CXXRecordDecl *RD) const {
2957
845
  // /vd2 or #pragma vtordisp(2): Always use vtordisps for virtual bases with
2958
845
  // vftables.
2959
845
  if (RD->getMSVtorDispMode() == MSVtorDispMode::ForVFTable) {
2960
25
    for (const CXXBaseSpecifier &Base : RD->vbases()) {
2961
25
      const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
2962
25
      const ASTRecordLayout &Layout = Context.getASTRecordLayout(BaseDecl);
2963
25
      if (Layout.hasExtendableVFPtr())
2964
23
        HasVtordispSet.insert(BaseDecl);
2965
25
    }
2966
20
    return;
2967
20
  }
2968
825
2969
825
  // If any of our bases need a vtordisp for this type, so do we.  Check our
2970
825
  // direct bases for vtordisp requirements.
2971
1.51k
  
for (const CXXBaseSpecifier &Base : RD->bases())825
{
2972
1.51k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
2973
1.51k
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(BaseDecl);
2974
1.51k
    for (const auto &bi : Layout.getVBaseOffsetsMap())
2975
478
      if (bi.second.hasVtorDisp())
2976
43
        HasVtordispSet.insert(bi.first);
2977
1.51k
  }
2978
825
  // We don't introduce any additional vtordisps if either:
2979
825
  // * A user declared constructor or destructor aren't declared.
2980
825
  // * #pragma vtordisp(0) or the /vd0 flag are in use.
2981
825
  if ((!RD->hasUserDeclaredConstructor() && 
!RD->hasUserDeclaredDestructor()468
) ||
2982
825
      
RD->getMSVtorDispMode() == MSVtorDispMode::Never416
)
2983
414
    return;
2984
411
  // /vd1 or #pragma vtordisp(1): Try to guess based on whether we think it's
2985
411
  // possible for a partially constructed object with virtual base overrides to
2986
411
  // escape a non-trivial constructor.
2987
411
  assert(RD->getMSVtorDispMode() == MSVtorDispMode::ForVBaseOverride);
2988
411
  // Compute a set of base classes which define methods we override.  A virtual
2989
411
  // base in this set will require a vtordisp.  A virtual base that transitively
2990
411
  // contains one of these bases as a non-virtual base will also require a
2991
411
  // vtordisp.
2992
411
  llvm::SmallPtrSet<const CXXMethodDecl *, 8> Work;
2993
411
  llvm::SmallPtrSet<const CXXRecordDecl *, 2> BasesWithOverriddenMethods;
2994
411
  // Seed the working set with our non-destructor, non-pure virtual methods.
2995
411
  for (const CXXMethodDecl *MD : RD->methods())
2996
2.07k
    if (MD->isVirtual() && 
!isa<CXXDestructorDecl>(MD)258
&&
!MD->isPure()181
)
2997
179
      Work.insert(MD);
2998
719
  while (!Work.empty()) {
2999
308
    const CXXMethodDecl *MD = *Work.begin();
3000
308
    auto MethodRange = MD->overridden_methods();
3001
308
    // If a virtual method has no-overrides it lives in its parent's vtable.
3002
308
    if (MethodRange.begin() == MethodRange.end())
3003
192
      BasesWithOverriddenMethods.insert(MD->getParent());
3004
116
    else
3005
116
      Work.insert(MethodRange.begin(), MethodRange.end());
3006
308
    // We've finished processing this element, remove it from the working set.
3007
308
    Work.erase(MD);
3008
308
  }
3009
411
  // For each of our virtual bases, check if it is in the set of overridden
3010
411
  // bases or if it transitively contains a non-virtual base that is.
3011
645
  for (const CXXBaseSpecifier &Base : RD->vbases()) {
3012
645
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
3013
645
    if (!HasVtordispSet.count(BaseDecl) &&
3014
645
        
RequiresVtordisp(BasesWithOverriddenMethods, BaseDecl)613
)
3015
94
      HasVtordispSet.insert(BaseDecl);
3016
645
  }
3017
411
}
3018
3019
/// getASTRecordLayout - Get or compute information about the layout of the
3020
/// specified record (struct/union/class), which indicates its size and field
3021
/// position information.
3022
const ASTRecordLayout &
3023
3.54M
ASTContext::getASTRecordLayout(const RecordDecl *D) const {
3024
3.54M
  // These asserts test different things.  A record has a definition
3025
3.54M
  // as soon as we begin to parse the definition.  That definition is
3026
3.54M
  // not a complete definition (which is what isDefinition() tests)
3027
3.54M
  // until we *finish* parsing the definition.
3028
3.54M
3029
3.54M
  if (D->hasExternalLexicalStorage() && 
!D->getDefinition()63.3k
)
3030
0
    getExternalSource()->CompleteType(const_cast<RecordDecl*>(D));
3031
3.54M
3032
3.54M
  D = D->getDefinition();
3033
3.54M
  assert(D && "Cannot get layout of forward declarations!");
3034
3.54M
  assert(!D->isInvalidDecl() && "Cannot get layout of invalid decl!");
3035
3.54M
  assert(D->isCompleteDefinition() && "Cannot layout type before complete!");
3036
3.54M
3037
3.54M
  // Look up this layout, if already laid out, return what we have.
3038
3.54M
  // Note that we can't save a reference to the entry because this function
3039
3.54M
  // is recursive.
3040
3.54M
  const ASTRecordLayout *Entry = ASTRecordLayouts[D];
3041
3.54M
  if (Entry) 
return *Entry3.20M
;
3042
332k
3043
332k
  const ASTRecordLayout *NewEntry = nullptr;
3044
332k
3045
332k
  if (isMsLayout(*this)) {
3046
4.42k
    MicrosoftRecordLayoutBuilder Builder(*this);
3047
4.42k
    if (const auto *RD = dyn_cast<CXXRecordDecl>(D)) {
3048
4.12k
      Builder.cxxLayout(RD);
3049
4.12k
      NewEntry = new (*this) ASTRecordLayout(
3050
4.12k
          *this, Builder.Size, Builder.Alignment, Builder.Alignment,
3051
4.12k
          Builder.RequiredAlignment,
3052
4.12k
          Builder.HasOwnVFPtr, Builder.HasOwnVFPtr || 
Builder.PrimaryBase3.37k
,
3053
4.12k
          Builder.VBPtrOffset, Builder.DataSize, Builder.FieldOffsets,
3054
4.12k
          Builder.NonVirtualSize, Builder.Alignment, CharUnits::Zero(),
3055
4.12k
          Builder.PrimaryBase, false, Builder.SharedVBPtrBase,
3056
4.12k
          Builder.EndsWithZeroSizedObject, Builder.LeadsWithZeroSizedBase,
3057
4.12k
          Builder.Bases, Builder.VBases);
3058
4.12k
    } else {
3059
307
      Builder.layout(D);
3060
307
      NewEntry = new (*this) ASTRecordLayout(
3061
307
          *this, Builder.Size, Builder.Alignment, Builder.Alignment,
3062
307
          Builder.RequiredAlignment,
3063
307
          Builder.Size, Builder.FieldOffsets);
3064
307
    }
3065
328k
  } else {
3066
328k
    if (const auto *RD = dyn_cast<CXXRecordDecl>(D)) {
3067
221k
      EmptySubobjectMap EmptySubobjects(*this, RD);
3068
221k
      ItaniumRecordLayoutBuilder Builder(*this, &EmptySubobjects);
3069
221k
      Builder.Layout(RD);
3070
221k
3071
221k
      // In certain situations, we are allowed to lay out objects in the
3072
221k
      // tail-padding of base classes.  This is ABI-dependent.
3073
221k
      // FIXME: this should be stored in the record layout.
3074
221k
      bool skipTailPadding =
3075
221k
          mustSkipTailPadding(getTargetInfo().getCXXABI(), RD);
3076
221k
3077
221k
      // FIXME: This should be done in FinalizeLayout.
3078
221k
      CharUnits DataSize =
3079
221k
          skipTailPadding ? 
Builder.getSize()131k
:
Builder.getDataSize()89.9k
;
3080
221k
      CharUnits NonVirtualSize =
3081
221k
          skipTailPadding ? 
DataSize131k
:
Builder.NonVirtualSize89.9k
;
3082
221k
      NewEntry = new (*this) ASTRecordLayout(
3083
221k
          *this, Builder.getSize(), Builder.Alignment, Builder.UnadjustedAlignment,
3084
221k
          /*RequiredAlignment : used by MS-ABI)*/
3085
221k
          Builder.Alignment, Builder.HasOwnVFPtr, RD->isDynamicClass(),
3086
221k
          CharUnits::fromQuantity(-1), DataSize, Builder.FieldOffsets,
3087
221k
          NonVirtualSize, Builder.NonVirtualAlignment,
3088
221k
          EmptySubobjects.SizeOfLargestEmptySubobject, Builder.PrimaryBase,
3089
221k
          Builder.PrimaryBaseIsVirtual, nullptr, false, false, Builder.Bases,
3090
221k
          Builder.VBases);
3091
221k
    } else {
3092
107k
      ItaniumRecordLayoutBuilder Builder(*this, /*EmptySubobjects=*/nullptr);
3093
107k
      Builder.Layout(D);
3094
107k
3095
107k
      NewEntry = new (*this) ASTRecordLayout(
3096
107k
          *this, Builder.getSize(), Builder.Alignment, Builder.UnadjustedAlignment,
3097
107k
          /*RequiredAlignment : used by MS-ABI)*/
3098
107k
          Builder.Alignment, Builder.getSize(), Builder.FieldOffsets);
3099
107k
    }
3100
328k
  }
3101
332k
3102
332k
  ASTRecordLayouts[D] = NewEntry;
3103
332k
3104
332k
  if (getLangOpts().DumpRecordLayouts) {
3105
1.08k
    llvm::outs() << "\n*** Dumping AST Record Layout\n";
3106
1.08k
    DumpRecordLayout(D, llvm::outs(), getLangOpts().DumpRecordLayoutsSimple);
3107
1.08k
  }
3108
332k
3109
332k
  return *NewEntry;
3110
332k
}
3111
3112
193k
const CXXMethodDecl *ASTContext::getCurrentKeyFunction(const CXXRecordDecl *RD) {
3113
193k
  if (!getTargetInfo().getCXXABI().hasKeyFunctions())
3114
3.28k
    return nullptr;
3115
190k
3116
190k
  assert(RD->getDefinition() && "Cannot get key function for forward decl!");
3117
190k
  RD = RD->getDefinition();
3118
190k
3119
190k
  // Beware:
3120
190k
  //  1) computing the key function might trigger deserialization, which might
3121
190k
  //     invalidate iterators into KeyFunctions
3122
190k
  //  2) 'get' on the LazyDeclPtr might also trigger deserialization and
3123
190k
  //     invalidate the LazyDeclPtr within the map itself
3124
190k
  LazyDeclPtr Entry = KeyFunctions[RD];
3125
190k
  const Decl *Result =
3126
190k
      Entry ? 
Entry.get(getExternalSource())18.4k
:
computeKeyFunction(*this, RD)172k
;
3127
190k
3128
190k
  // Store it back if it changed.
3129
190k
  if (Entry.isOffset() || Entry.isValid() != bool(Result))
3130
11.9k
    KeyFunctions[RD] = const_cast<Decl*>(Result);
3131
190k
3132
190k
  return cast_or_null<CXXMethodDecl>(Result);
3133
190k
}
3134
3135
14
void ASTContext::setNonKeyFunction(const CXXMethodDecl *Method) {
3136
14
  assert(Method == Method->getFirstDecl() &&
3137
14
         "not working with method declaration from class definition");
3138
14
3139
14
  // Look up the cache entry.  Since we're working with the first
3140
14
  // declaration, its parent must be the class definition, which is
3141
14
  // the correct key for the KeyFunctions hash.
3142
14
  const auto &Map = KeyFunctions;
3143
14
  auto I = Map.find(Method->getParent());
3144
14
3145
14
  // If it's not cached, there's nothing to do.
3146
14
  if (I == Map.end()) 
return0
;
3147
14
3148
14
  // If it is cached, check whether it's the target method, and if so,
3149
14
  // remove it from the cache. Note, the call to 'get' might invalidate
3150
14
  // the iterator and the LazyDeclPtr object within the map.
3151
14
  LazyDeclPtr Ptr = I->second;
3152
14
  if (Ptr.get(getExternalSource()) == Method) {
3153
14
    // FIXME: remember that we did this for module / chained PCH state?
3154
14
    KeyFunctions.erase(Method->getParent());
3155
14
  }
3156
14
}
3157
3158
563
static uint64_t getFieldOffset(const ASTContext &C, const FieldDecl *FD) {
3159
563
  const ASTRecordLayout &Layout = C.getASTRecordLayout(FD->getParent());
3160
563
  return Layout.getFieldOffset(FD->getFieldIndex());
3161
563
}
3162
3163
523
uint64_t ASTContext::getFieldOffset(const ValueDecl *VD) const {
3164
523
  uint64_t OffsetInBits;
3165
523
  if (const FieldDecl *FD = dyn_cast<FieldDecl>(VD)) {
3166
497
    OffsetInBits = ::getFieldOffset(*this, FD);
3167
497
  } else {
3168
26
    const IndirectFieldDecl *IFD = cast<IndirectFieldDecl>(VD);
3169
26
3170
26
    OffsetInBits = 0;
3171
26
    for (const NamedDecl *ND : IFD->chain())
3172
66
      OffsetInBits += ::getFieldOffset(*this, cast<FieldDecl>(ND));
3173
26
  }
3174
523
3175
523
  return OffsetInBits;
3176
523
}
3177
3178
uint64_t ASTContext::lookupFieldBitOffset(const ObjCInterfaceDecl *OID,
3179
                                          const ObjCImplementationDecl *ID,
3180
3.44k
                                          const ObjCIvarDecl *Ivar) const {
3181
3.44k
  const ObjCInterfaceDecl *Container = Ivar->getContainingInterface();
3182
3.44k
3183
3.44k
  // FIXME: We should eliminate the need to have ObjCImplementationDecl passed
3184
3.44k
  // in here; it should never be necessary because that should be the lexical
3185
3.44k
  // decl context for the ivar.
3186
3.44k
3187
3.44k
  // If we know have an implementation (and the ivar is in it) then
3188
3.44k
  // look up in the implementation layout.
3189
3.44k
  const ASTRecordLayout *RL;
3190
3.44k
  if (ID && 
declaresSameEntity(ID->getClassInterface(), Container)2.48k
)
3191
2.42k
    RL = &getASTObjCImplementationLayout(ID);
3192
1.01k
  else
3193
1.01k
    RL = &getASTObjCInterfaceLayout(Container);
3194
3.44k
3195
3.44k
  // Compute field index.
3196
3.44k
  //
3197
3.44k
  // FIXME: The index here is closely tied to how ASTContext::getObjCLayout is
3198
3.44k
  // implemented. This should be fixed to get the information from the layout
3199
3.44k
  // directly.
3200
3.44k
  unsigned Index = 0;
3201
3.44k
3202
3.44k
  for (const ObjCIvarDecl *IVD = Container->all_declared_ivar_begin();
3203
10.1k
       IVD; 
IVD = IVD->getNextIvar()6.69k
) {
3204
10.1k
    if (Ivar == IVD)
3205
3.44k
      break;
3206
6.69k
    ++Index;
3207
6.69k
  }
3208
3.44k
  assert(Index < RL->getFieldCount() && "Ivar is not inside record layout!");
3209
3.44k
3210
3.44k
  return RL->getFieldOffset(Index);
3211
3.44k
}
3212
3213
/// getObjCLayout - Get or compute information about the layout of the
3214
/// given interface.
3215
///
3216
/// \param Impl - If given, also include the layout of the interface's
3217
/// implementation. This may differ by including synthesized ivars.
3218
const ASTRecordLayout &
3219
ASTContext::getObjCLayout(const ObjCInterfaceDecl *D,
3220
19.3k
                          const ObjCImplementationDecl *Impl) const {
3221
19.3k
  // Retrieve the definition
3222
19.3k
  if (D->hasExternalLexicalStorage() && 
!D->getDefinition()90
)
3223
0
    getExternalSource()->CompleteType(const_cast<ObjCInterfaceDecl*>(D));
3224
19.3k
  D = D->getDefinition();
3225
19.3k
  assert(D && D->isThisDeclarationADefinition() && "Invalid interface decl!");
3226
19.3k
3227
19.3k
  // Look up this layout, if already laid out, return what we have.
3228
19.3k
  const ObjCContainerDecl *Key =
3229
19.3k
    Impl ? 
(const ObjCContainerDecl*) Impl3.50k
:
(const ObjCContainerDecl*) D15.8k
;
3230
19.3k
  if (const ASTRecordLayout *Entry = ObjCLayouts[Key])
3231
11.8k
    return *Entry;
3232
7.52k
3233
7.52k
  // Add in synthesized ivar count if laying out an implementation.
3234
7.52k
  if (Impl) {
3235
2.51k
    unsigned SynthCount = CountNonClassIvars(D);
3236
2.51k
    // If there aren't any synthesized ivars then reuse the interface
3237
2.51k
    // entry. Note we can't cache this because we simply free all
3238
2.51k
    // entries later; however we shouldn't look up implementations
3239
2.51k
    // frequently.
3240
2.51k
    if (SynthCount == 0)
3241
2.28k
      return getObjCLayout(D, nullptr);
3242
5.24k
  }
3243
5.24k
3244
5.24k
  ItaniumRecordLayoutBuilder Builder(*this, /*EmptySubobjects=*/nullptr);
3245
5.24k
  Builder.Layout(D);
3246
5.24k
3247
5.24k
  const ASTRecordLayout *NewEntry =
3248
5.24k
    new (*this) ASTRecordLayout(*this, Builder.getSize(),
3249
5.24k
                                Builder.Alignment,
3250
5.24k
                                Builder.UnadjustedAlignment,
3251
5.24k
                                /*RequiredAlignment : used by MS-ABI)*/
3252
5.24k
                                Builder.Alignment,
3253
5.24k
                                Builder.getDataSize(),
3254
5.24k
                                Builder.FieldOffsets);
3255
5.24k
3256
5.24k
  ObjCLayouts[Key] = NewEntry;
3257
5.24k
3258
5.24k
  return *NewEntry;
3259
5.24k
}
3260
3261
static void PrintOffset(raw_ostream &OS,
3262
4.78k
                        CharUnits Offset, unsigned IndentLevel) {
3263
4.78k
  OS << llvm::format("%10" PRId64 " | ", (int64_t)Offset.getQuantity());
3264
4.78k
  OS.indent(IndentLevel * 2);
3265
4.78k
}
3266
3267
static void PrintBitFieldOffset(raw_ostream &OS, CharUnits Offset,
3268
                                unsigned Begin, unsigned Width,
3269
293
                                unsigned IndentLevel) {
3270
293
  llvm::SmallString<10> Buffer;
3271
293
  {
3272
293
    llvm::raw_svector_ostream BufferOS(Buffer);
3273
293
    BufferOS << Offset.getQuantity() << ':';
3274
293
    if (Width == 0) {
3275
48
      BufferOS << '-';
3276
245
    } else {
3277
245
      BufferOS << Begin << '-' << (Begin + Width - 1);
3278
245
    }
3279
293
  }
3280
293
3281
293
  OS << llvm::right_justify(Buffer, 10) << " | ";
3282
293
  OS.indent(IndentLevel * 2);
3283
293
}
3284
3285
1.90k
static void PrintIndentNoOffset(raw_ostream &OS, unsigned IndentLevel) {
3286
1.90k
  OS << "           | ";
3287
1.90k
  OS.indent(IndentLevel * 2);
3288
1.90k
}
3289
3290
static void DumpRecordLayout(raw_ostream &OS, const RecordDecl *RD,
3291
                             const ASTContext &C,
3292
                             CharUnits Offset,
3293
                             unsigned IndentLevel,
3294
                             const char* Description,
3295
                             bool PrintSizeInfo,
3296
2.48k
                             bool IncludeVirtualBases) {
3297
2.48k
  const ASTRecordLayout &Layout = C.getASTRecordLayout(RD);
3298
2.48k
  auto CXXRD = dyn_cast<CXXRecordDecl>(RD);
3299
2.48k
3300
2.48k
  PrintOffset(OS, Offset, IndentLevel);
3301
2.48k
  OS << C.getTypeDeclType(const_cast<RecordDecl*>(RD)).getAsString();
3302
2.48k
  if (Description)
3303
1.48k
    OS << ' ' << Description;
3304
2.48k
  if (CXXRD && 
CXXRD->isEmpty()2.39k
)
3305
787
    OS << " (empty)";
3306
2.48k
  OS << '\n';
3307
2.48k
3308
2.48k
  IndentLevel++;
3309
2.48k
3310
2.48k
  // Dump bases.
3311
2.48k
  if (CXXRD) {
3312
2.39k
    const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
3313
2.39k
    bool HasOwnVFPtr = Layout.hasOwnVFPtr();
3314
2.39k
    bool HasOwnVBPtr = Layout.hasOwnVBPtr();
3315
2.39k
3316
2.39k
    // Vtable pointer.
3317
2.39k
    if (CXXRD->isDynamicClass() && 
!PrimaryBase781
&&
!isMsLayout(C)726
) {
3318
7
      PrintOffset(OS, Offset, IndentLevel);
3319
7
      OS << '(' << *RD << " vtable pointer)\n";
3320
2.38k
    } else if (HasOwnVFPtr) {
3321
345
      PrintOffset(OS, Offset, IndentLevel);
3322
345
      // vfptr (for Microsoft C++ ABI)
3323
345
      OS << '(' << *RD << " vftable pointer)\n";
3324
345
    }
3325
2.39k
3326
2.39k
    // Collect nvbases.
3327
2.39k
    SmallVector<const CXXRecordDecl *, 4> Bases;
3328
2.39k
    for (const CXXBaseSpecifier &Base : CXXRD->bases()) {
3329
1.36k
      assert(!Base.getType()->isDependentType() &&
3330
1.36k
             "Cannot layout class with dependent bases.");
3331
1.36k
      if (!Base.isVirtual())
3332
733
        Bases.push_back(Base.getType()->getAsCXXRecordDecl());
3333
1.36k
    }
3334
2.39k
3335
2.39k
    // Sort nvbases by offset.
3336
2.39k
    llvm::stable_sort(
3337
2.39k
        Bases, [&](const CXXRecordDecl *L, const CXXRecordDecl *R) {
3338
304
          return Layout.getBaseClassOffset(L) < Layout.getBaseClassOffset(R);
3339
304
        });
3340
2.39k
3341
2.39k
    // Dump (non-virtual) bases
3342
2.39k
    for (const CXXRecordDecl *Base : Bases) {
3343
733
      CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base);
3344
733
      DumpRecordLayout(OS, Base, C, BaseOffset, IndentLevel,
3345
733
                       Base == PrimaryBase ? 
"(primary base)"55
:
"(base)"678
,
3346
733
                       /*PrintSizeInfo=*/false,
3347
733
                       /*IncludeVirtualBases=*/false);
3348
733
    }
3349
2.39k
3350
2.39k
    // vbptr (for Microsoft C++ ABI)
3351
2.39k
    if (HasOwnVBPtr) {
3352
406
      PrintOffset(OS, Offset + Layout.getVBPtrOffset(), IndentLevel);
3353
406
      OS << '(' << *RD << " vbtable pointer)\n";
3354
406
    }
3355
2.39k
  }
3356
2.48k
3357
2.48k
  // Dump fields.
3358
2.48k
  uint64_t FieldNo = 0;
3359
2.48k
  for (RecordDecl::field_iterator I = RD->field_begin(),
3360
4.38k
         E = RD->field_end(); I != E; 
++I, ++FieldNo1.89k
) {
3361
1.89k
    const FieldDecl &Field = **I;
3362
1.89k
    uint64_t LocalFieldOffsetInBits = Layout.getFieldOffset(FieldNo);
3363
1.89k
    CharUnits FieldOffset =
3364
1.89k
      Offset + C.toCharUnitsFromBits(LocalFieldOffsetInBits);
3365
1.89k
3366
1.89k
    // Recursively dump fields of record type.
3367
1.89k
    if (auto RT = Field.getType()->getAs<RecordType>()) {
3368
139
      DumpRecordLayout(OS, RT->getDecl(), C, FieldOffset, IndentLevel,
3369
139
                       Field.getName().data(),
3370
139
                       /*PrintSizeInfo=*/false,
3371
139
                       /*IncludeVirtualBases=*/true);
3372
139
      continue;
3373
139
    }
3374
1.75k
3375
1.75k
    if (Field.isBitField()) {
3376
293
      uint64_t LocalFieldByteOffsetInBits = C.toBits(FieldOffset - Offset);
3377
293
      unsigned Begin = LocalFieldOffsetInBits - LocalFieldByteOffsetInBits;
3378
293
      unsigned Width = Field.getBitWidthValue(C);
3379
293
      PrintBitFieldOffset(OS, FieldOffset, Begin, Width, IndentLevel);
3380
1.46k
    } else {
3381
1.46k
      PrintOffset(OS, FieldOffset, IndentLevel);
3382
1.46k
    }
3383
1.75k
    OS << Field.getType().getAsString() << ' ' << Field << '\n';
3384
1.75k
  }
3385
2.48k
3386
2.48k
  // Dump virtual bases.
3387
2.48k
  if (CXXRD && 
IncludeVirtualBases2.39k
) {
3388
1.04k
    const ASTRecordLayout::VBaseOffsetsMapTy &VtorDisps =
3389
1.04k
      Layout.getVBaseOffsetsMap();
3390
1.04k
3391
1.04k
    for (const CXXBaseSpecifier &Base : CXXRD->vbases()) {
3392
616
      assert(Base.isVirtual() && "Found non-virtual class!");
3393
616
      const CXXRecordDecl *VBase = Base.getType()->getAsCXXRecordDecl();
3394
616
3395
616
      CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBase);
3396
616
3397
616
      if (VtorDisps.find(VBase)->second.hasVtorDisp()) {
3398
74
        PrintOffset(OS, VBaseOffset - CharUnits::fromQuantity(4), IndentLevel);
3399
74
        OS << "(vtordisp for vbase " << *VBase << ")\n";
3400
74
      }
3401
616
3402
616
      DumpRecordLayout(OS, VBase, C, VBaseOffset, IndentLevel,
3403
616
                       VBase == Layout.getPrimaryBase() ?
3404
616
                         
"(primary virtual base)"0
: "(virtual base)",
3405
616
                       /*PrintSizeInfo=*/false,
3406
616
                       /*IncludeVirtualBases=*/false);
3407
616
    }
3408
1.04k
  }
3409
2.48k
3410
2.48k
  if (!PrintSizeInfo) 
return1.48k
;
3411
997
3412
997
  PrintIndentNoOffset(OS, IndentLevel - 1);
3413
997
  OS << "[sizeof=" << Layout.getSize().getQuantity();
3414
997
  if (CXXRD && 
!isMsLayout(C)907
)
3415
42
    OS << ", dsize=" << Layout.getDataSize().getQuantity();
3416
997
  OS << ", align=" << Layout.getAlignment().getQuantity();
3417
997
3418
997
  if (CXXRD) {
3419
907
    OS << ",\n";
3420
907
    PrintIndentNoOffset(OS, IndentLevel - 1);
3421
907
    OS << " nvsize=" << Layout.getNonVirtualSize().getQuantity();
3422
907
    OS << ", nvalign=" << Layout.getNonVirtualAlignment().getQuantity();
3423
907
  }
3424
997
  OS << "]\n";
3425
997
}
3426
3427
void ASTContext::DumpRecordLayout(const RecordDecl *RD,
3428
                                  raw_ostream &OS,
3429
1.08k
                                  bool Simple) const {
3430
1.08k
  if (!Simple) {
3431
997
    ::DumpRecordLayout(OS, RD, *this, CharUnits(), 0, nullptr,
3432
997
                       /*PrintSizeInfo*/true,
3433
997
                       /*IncludeVirtualBases=*/true);
3434
997
    return;
3435
997
  }
3436
83
3437
83
  // The "simple" format is designed to be parsed by the
3438
83
  // layout-override testing code.  There shouldn't be any external
3439
83
  // uses of this format --- when LLDB overrides a layout, it sets up
3440
83
  // the data structures directly --- so feel free to adjust this as
3441
83
  // you like as long as you also update the rudimentary parser for it
3442
83
  // in libFrontend.
3443
83
3444
83
  const ASTRecordLayout &Info = getASTRecordLayout(RD);
3445
83
  OS << "Type: " << getTypeDeclType(RD).getAsString() << "\n";
3446
83
  OS << "\nLayout: ";
3447
83
  OS << "<ASTRecordLayout\n";
3448
83
  OS << "  Size:" << toBits(Info.getSize()) << "\n";
3449
83
  if (!isMsLayout(*this))
3450
72
    OS << "  DataSize:" << toBits(Info.getDataSize()) << "\n";
3451
83
  OS << "  Alignment:" << toBits(Info.getAlignment()) << "\n";
3452
83
  OS << "  FieldOffsets: [";
3453
270
  for (unsigned i = 0, e = Info.getFieldCount(); i != e; 
++i187
) {
3454
187
    if (i) 
OS << ", "111
;
3455
187
    OS << Info.getFieldOffset(i);
3456
187
  }
3457
83
  OS << "]>\n";
3458
83
}