Coverage Report

Created: 2019-07-24 05:18

/Users/buildslave/jenkins/workspace/clang-stage2-coverage-R/llvm/tools/clang/lib/AST/RecordLayoutBuilder.cpp
Line
Count
Source (jump to first uncovered line)
1
//=== RecordLayoutBuilder.cpp - Helper class for building record layouts ---==//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
9
#include "clang/AST/RecordLayout.h"
10
#include "clang/AST/ASTContext.h"
11
#include "clang/AST/ASTDiagnostic.h"
12
#include "clang/AST/Attr.h"
13
#include "clang/AST/CXXInheritance.h"
14
#include "clang/AST/Decl.h"
15
#include "clang/AST/DeclCXX.h"
16
#include "clang/AST/DeclObjC.h"
17
#include "clang/AST/Expr.h"
18
#include "clang/Basic/TargetInfo.h"
19
#include "llvm/ADT/SmallSet.h"
20
#include "llvm/Support/Format.h"
21
#include "llvm/Support/MathExtras.h"
22
23
using namespace clang;
24
25
namespace {
26
27
/// BaseSubobjectInfo - Represents a single base subobject in a complete class.
28
/// For a class hierarchy like
29
///
30
/// class A { };
31
/// class B : A { };
32
/// class C : A, B { };
33
///
34
/// The BaseSubobjectInfo graph for C will have three BaseSubobjectInfo
35
/// instances, one for B and two for A.
36
///
37
/// If a base is virtual, it will only have one BaseSubobjectInfo allocated.
38
struct BaseSubobjectInfo {
39
  /// Class - The class for this base info.
40
  const CXXRecordDecl *Class;
41
42
  /// IsVirtual - Whether the BaseInfo represents a virtual base or not.
43
  bool IsVirtual;
44
45
  /// Bases - Information about the base subobjects.
46
  SmallVector<BaseSubobjectInfo*, 4> Bases;
47
48
  /// PrimaryVirtualBaseInfo - Holds the base info for the primary virtual base
49
  /// of this base info (if one exists).
50
  BaseSubobjectInfo *PrimaryVirtualBaseInfo;
51
52
  // FIXME: Document.
53
  const BaseSubobjectInfo *Derived;
54
};
55
56
/// Externally provided layout. Typically used when the AST source, such
57
/// as DWARF, lacks all the information that was available at compile time, such
58
/// as alignment attributes on fields and pragmas in effect.
59
struct ExternalLayout {
60
193k
  ExternalLayout() : Size(0), Align(0) {}
61
62
  /// Overall record size in bits.
63
  uint64_t Size;
64
65
  /// Overall record alignment in bits.
66
  uint64_t Align;
67
68
  /// Record field offsets in bits.
69
  llvm::DenseMap<const FieldDecl *, uint64_t> FieldOffsets;
70
71
  /// Direct, non-virtual base offsets.
72
  llvm::DenseMap<const CXXRecordDecl *, CharUnits> BaseOffsets;
73
74
  /// Virtual base offsets.
75
  llvm::DenseMap<const CXXRecordDecl *, CharUnits> VirtualBaseOffsets;
76
77
  /// Get the offset of the given field. The external source must provide
78
  /// entries for all fields in the record.
79
85
  uint64_t getExternalFieldOffset(const FieldDecl *FD) {
80
85
    assert(FieldOffsets.count(FD) &&
81
85
           "Field does not have an external offset");
82
85
    return FieldOffsets[FD];
83
85
  }
84
85
4
  bool getExternalNVBaseOffset(const CXXRecordDecl *RD, CharUnits &BaseOffset) {
86
4
    auto Known = BaseOffsets.find(RD);
87
4
    if (Known == BaseOffsets.end())
88
4
      return false;
89
0
    BaseOffset = Known->second;
90
0
    return true;
91
0
  }
92
93
11
  bool getExternalVBaseOffset(const CXXRecordDecl *RD, CharUnits &BaseOffset) {
94
11
    auto Known = VirtualBaseOffsets.find(RD);
95
11
    if (Known == VirtualBaseOffsets.end())
96
11
      return false;
97
0
    BaseOffset = Known->second;
98
0
    return true;
99
0
  }
100
};
101
102
/// EmptySubobjectMap - Keeps track of which empty subobjects exist at different
103
/// offsets while laying out a C++ class.
104
class EmptySubobjectMap {
105
  const ASTContext &Context;
106
  uint64_t CharWidth;
107
108
  /// Class - The class whose empty entries we're keeping track of.
109
  const CXXRecordDecl *Class;
110
111
  /// EmptyClassOffsets - A map from offsets to empty record decls.
112
  typedef llvm::TinyPtrVector<const CXXRecordDecl *> ClassVectorTy;
113
  typedef llvm::DenseMap<CharUnits, ClassVectorTy> EmptyClassOffsetsMapTy;
114
  EmptyClassOffsetsMapTy EmptyClassOffsets;
115
116
  /// MaxEmptyClassOffset - The highest offset known to contain an empty
117
  /// base subobject.
118
  CharUnits MaxEmptyClassOffset;
119
120
  /// ComputeEmptySubobjectSizes - Compute the size of the largest base or
121
  /// member subobject that is empty.
122
  void ComputeEmptySubobjectSizes();
123
124
  void AddSubobjectAtOffset(const CXXRecordDecl *RD, CharUnits Offset);
125
126
  void UpdateEmptyBaseSubobjects(const BaseSubobjectInfo *Info,
127
                                 CharUnits Offset, bool PlacingEmptyBase);
128
129
  void UpdateEmptyFieldSubobjects(const CXXRecordDecl *RD,
130
                                  const CXXRecordDecl *Class, CharUnits Offset,
131
                                  bool PlacingOverlappingField);
132
  void UpdateEmptyFieldSubobjects(const FieldDecl *FD, CharUnits Offset,
133
                                  bool PlacingOverlappingField);
134
135
  /// AnyEmptySubobjectsBeyondOffset - Returns whether there are any empty
136
  /// subobjects beyond the given offset.
137
1.04M
  bool AnyEmptySubobjectsBeyondOffset(CharUnits Offset) const {
138
1.04M
    return Offset <= MaxEmptyClassOffset;
139
1.04M
  }
140
141
  CharUnits
142
125k
  getFieldOffset(const ASTRecordLayout &Layout, unsigned FieldNo) const {
143
125k
    uint64_t FieldOffset = Layout.getFieldOffset(FieldNo);
144
125k
    assert(FieldOffset % CharWidth == 0 &&
145
125k
           "Field offset not at char boundary!");
146
125k
147
125k
    return Context.toCharUnitsFromBits(FieldOffset);
148
125k
  }
149
150
protected:
151
  bool CanPlaceSubobjectAtOffset(const CXXRecordDecl *RD,
152
                                 CharUnits Offset) const;
153
154
  bool CanPlaceBaseSubobjectAtOffset(const BaseSubobjectInfo *Info,
155
                                     CharUnits Offset);
156
157
  bool CanPlaceFieldSubobjectAtOffset(const CXXRecordDecl *RD,
158
                                      const CXXRecordDecl *Class,
159
                                      CharUnits Offset) const;
160
  bool CanPlaceFieldSubobjectAtOffset(const FieldDecl *FD,
161
                                      CharUnits Offset) const;
162
163
public:
164
  /// This holds the size of the largest empty subobject (either a base
165
  /// or a member). Will be zero if the record being built doesn't contain
166
  /// any empty classes.
167
  CharUnits SizeOfLargestEmptySubobject;
168
169
  EmptySubobjectMap(const ASTContext &Context, const CXXRecordDecl *Class)
170
142k
  : Context(Context), CharWidth(Context.getCharWidth()), Class(Class) {
171
142k
      ComputeEmptySubobjectSizes();
172
142k
  }
173
174
  /// CanPlaceBaseAtOffset - Return whether the given base class can be placed
175
  /// at the given offset.
176
  /// Returns false if placing the record will result in two components
177
  /// (direct or indirect) of the same type having the same offset.
178
  bool CanPlaceBaseAtOffset(const BaseSubobjectInfo *Info,
179
                            CharUnits Offset);
180
181
  /// CanPlaceFieldAtOffset - Return whether a field can be placed at the given
182
  /// offset.
183
  bool CanPlaceFieldAtOffset(const FieldDecl *FD, CharUnits Offset);
184
};
185
186
142k
void EmptySubobjectMap::ComputeEmptySubobjectSizes() {
187
142k
  // Check the bases.
188
142k
  for (const CXXBaseSpecifier &Base : Class->bases()) {
189
35.1k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
190
35.1k
191
35.1k
    CharUnits EmptySize;
192
35.1k
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(BaseDecl);
193
35.1k
    if (BaseDecl->isEmpty()) {
194
11.4k
      // If the class decl is empty, get its size.
195
11.4k
      EmptySize = Layout.getSize();
196
23.6k
    } else {
197
23.6k
      // Otherwise, we get the largest empty subobject for the decl.
198
23.6k
      EmptySize = Layout.getSizeOfLargestEmptySubobject();
199
23.6k
    }
200
35.1k
201
35.1k
    if (EmptySize > SizeOfLargestEmptySubobject)
202
14.3k
      SizeOfLargestEmptySubobject = EmptySize;
203
35.1k
  }
204
142k
205
142k
  // Check the fields.
206
382k
  for (const FieldDecl *FD : Class->fields()) {
207
382k
    const RecordType *RT =
208
382k
        Context.getBaseElementType(FD->getType())->getAs<RecordType>();
209
382k
210
382k
    // We only care about record types.
211
382k
    if (!RT)
212
328k
      continue;
213
53.8k
214
53.8k
    CharUnits EmptySize;
215
53.8k
    const CXXRecordDecl *MemberDecl = RT->getAsCXXRecordDecl();
216
53.8k
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(MemberDecl);
217
53.8k
    if (MemberDecl->isEmpty()) {
218
1.11k
      // If the class decl is empty, get its size.
219
1.11k
      EmptySize = Layout.getSize();
220
52.7k
    } else {
221
52.7k
      // Otherwise, we get the largest empty subobject for the decl.
222
52.7k
      EmptySize = Layout.getSizeOfLargestEmptySubobject();
223
52.7k
    }
224
53.8k
225
53.8k
    if (EmptySize > SizeOfLargestEmptySubobject)
226
5.24k
      SizeOfLargestEmptySubobject = EmptySize;
227
53.8k
  }
228
142k
}
229
230
bool
231
EmptySubobjectMap::CanPlaceSubobjectAtOffset(const CXXRecordDecl *RD,
232
598k
                                             CharUnits Offset) const {
233
598k
  // We only need to check empty bases.
234
598k
  if (!RD->isEmpty())
235
572k
    return true;
236
25.9k
237
25.9k
  EmptyClassOffsetsMapTy::const_iterator I = EmptyClassOffsets.find(Offset);
238
25.9k
  if (I == EmptyClassOffsets.end())
239
23.5k
    return true;
240
2.39k
241
2.39k
  const ClassVectorTy &Classes = I->second;
242
2.39k
  if (llvm::find(Classes, RD) == Classes.end())
243
2.24k
    return true;
244
150
245
150
  // There is already an empty class of the same type at this offset.
246
150
  return false;
247
150
}
248
249
void EmptySubobjectMap::AddSubobjectAtOffset(const CXXRecordDecl *RD,
250
582k
                                             CharUnits Offset) {
251
582k
  // We only care about empty bases.
252
582k
  if (!RD->isEmpty())
253
556k
    return;
254
25.9k
255
25.9k
  // If we have empty structures inside a union, we can assign both
256
25.9k
  // the same offset. Just avoid pushing them twice in the list.
257
25.9k
  ClassVectorTy &Classes = EmptyClassOffsets[Offset];
258
25.9k
  if (llvm::is_contained(Classes, RD))
259
1
    return;
260
25.9k
261
25.9k
  Classes.push_back(RD);
262
25.9k
263
25.9k
  // Update the empty class offset.
264
25.9k
  if (Offset > MaxEmptyClassOffset)
265
158
    MaxEmptyClassOffset = Offset;
266
25.9k
}
267
268
bool
269
EmptySubobjectMap::CanPlaceBaseSubobjectAtOffset(const BaseSubobjectInfo *Info,
270
547k
                                                 CharUnits Offset) {
271
547k
  // We don't have to keep looking past the maximum offset that's known to
272
547k
  // contain an empty class.
273
547k
  if (!AnyEmptySubobjectsBeyondOffset(Offset))
274
471
    return true;
275
546k
276
546k
  if (!CanPlaceSubobjectAtOffset(Info->Class, Offset))
277
102
    return false;
278
546k
279
546k
  // Traverse all non-virtual bases.
280
546k
  const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class);
281
546k
  for (const BaseSubobjectInfo *Base : Info->Bases) {
282
529k
    if (Base->IsVirtual)
283
204
      continue;
284
528k
285
528k
    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base->Class);
286
528k
287
528k
    if (!CanPlaceBaseSubobjectAtOffset(Base, BaseOffset))
288
77
      return false;
289
528k
  }
290
546k
291
546k
  
if (546k
Info->PrimaryVirtualBaseInfo546k
) {
292
24
    BaseSubobjectInfo *PrimaryVirtualBaseInfo = Info->PrimaryVirtualBaseInfo;
293
24
294
24
    if (Info == PrimaryVirtualBaseInfo->Derived) {
295
24
      if (!CanPlaceBaseSubobjectAtOffset(PrimaryVirtualBaseInfo, Offset))
296
5
        return false;
297
546k
    }
298
24
  }
299
546k
300
546k
  // Traverse all member variables.
301
546k
  unsigned FieldNo = 0;
302
546k
  for (CXXRecordDecl::field_iterator I = Info->Class->field_begin(),
303
558k
       E = Info->Class->field_end(); I != E; 
++I, ++FieldNo11.5k
) {
304
11.5k
    if (I->isBitField())
305
4
      continue;
306
11.5k
307
11.5k
    CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
308
11.5k
    if (!CanPlaceFieldSubobjectAtOffset(*I, FieldOffset))
309
2
      return false;
310
11.5k
  }
311
546k
312
546k
  
return true546k
;
313
546k
}
314
315
void EmptySubobjectMap::UpdateEmptyBaseSubobjects(const BaseSubobjectInfo *Info,
316
                                                  CharUnits Offset,
317
547k
                                                  bool PlacingEmptyBase) {
318
547k
  if (!PlacingEmptyBase && 
Offset >= SizeOfLargestEmptySubobject530k
) {
319
404
    // We know that the only empty subobjects that can conflict with empty
320
404
    // subobject of non-empty bases, are empty bases that can be placed at
321
404
    // offset zero. Because of this, we only need to keep track of empty base
322
404
    // subobjects with offsets less than the size of the largest empty
323
404
    // subobject for our class.
324
404
    return;
325
404
  }
326
546k
327
546k
  AddSubobjectAtOffset(Info->Class, Offset);
328
546k
329
546k
  // Traverse all non-virtual bases.
330
546k
  const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class);
331
546k
  for (const BaseSubobjectInfo *Base : Info->Bases) {
332
529k
    if (Base->IsVirtual)
333
199
      continue;
334
528k
335
528k
    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base->Class);
336
528k
    UpdateEmptyBaseSubobjects(Base, BaseOffset, PlacingEmptyBase);
337
528k
  }
338
546k
339
546k
  if (Info->PrimaryVirtualBaseInfo) {
340
19
    BaseSubobjectInfo *PrimaryVirtualBaseInfo = Info->PrimaryVirtualBaseInfo;
341
19
342
19
    if (Info == PrimaryVirtualBaseInfo->Derived)
343
19
      UpdateEmptyBaseSubobjects(PrimaryVirtualBaseInfo, Offset,
344
19
                                PlacingEmptyBase);
345
19
  }
346
546k
347
546k
  // Traverse all member variables.
348
546k
  unsigned FieldNo = 0;
349
546k
  for (CXXRecordDecl::field_iterator I = Info->Class->field_begin(),
350
558k
       E = Info->Class->field_end(); I != E; 
++I, ++FieldNo11.5k
) {
351
11.5k
    if (I->isBitField())
352
4
      continue;
353
11.5k
354
11.5k
    CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
355
11.5k
    UpdateEmptyFieldSubobjects(*I, FieldOffset, PlacingEmptyBase);
356
11.5k
  }
357
546k
}
358
359
bool EmptySubobjectMap::CanPlaceBaseAtOffset(const BaseSubobjectInfo *Info,
360
35.6k
                                             CharUnits Offset) {
361
35.6k
  // If we know this class doesn't have any empty subobjects we don't need to
362
35.6k
  // bother checking.
363
35.6k
  if (SizeOfLargestEmptySubobject.isZero())
364
17.3k
    return true;
365
18.2k
366
18.2k
  if (!CanPlaceBaseSubobjectAtOffset(Info, Offset))
367
104
    return false;
368
18.1k
369
18.1k
  // We are able to place the base at this offset. Make sure to update the
370
18.1k
  // empty base subobject map.
371
18.1k
  UpdateEmptyBaseSubobjects(Info, Offset, Info->Class->isEmpty());
372
18.1k
  return true;
373
18.1k
}
374
375
bool
376
EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const CXXRecordDecl *RD,
377
                                                  const CXXRecordDecl *Class,
378
52.6k
                                                  CharUnits Offset) const {
379
52.6k
  // We don't have to keep looking past the maximum offset that's known to
380
52.6k
  // contain an empty class.
381
52.6k
  if (!AnyEmptySubobjectsBeyondOffset(Offset))
382
607
    return true;
383
52.0k
384
52.0k
  if (!CanPlaceSubobjectAtOffset(RD, Offset))
385
48
    return false;
386
51.9k
387
51.9k
  const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
388
51.9k
389
51.9k
  // Traverse all non-virtual bases.
390
51.9k
  for (const CXXBaseSpecifier &Base : RD->bases()) {
391
16.7k
    if (Base.isVirtual())
392
31
      continue;
393
16.7k
394
16.7k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
395
16.7k
396
16.7k
    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(BaseDecl);
397
16.7k
    if (!CanPlaceFieldSubobjectAtOffset(BaseDecl, Class, BaseOffset))
398
5
      return false;
399
16.7k
  }
400
51.9k
401
51.9k
  
if (51.9k
RD == Class51.9k
) {
402
35.8k
    // This is the most derived class, traverse virtual bases as well.
403
35.8k
    for (const CXXBaseSpecifier &Base : RD->vbases()) {
404
31
      const CXXRecordDecl *VBaseDecl = Base.getType()->getAsCXXRecordDecl();
405
31
406
31
      CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBaseDecl);
407
31
      if (!CanPlaceFieldSubobjectAtOffset(VBaseDecl, Class, VBaseOffset))
408
1
        return false;
409
31
    }
410
35.8k
  }
411
51.9k
412
51.9k
  // Traverse all member variables.
413
51.9k
  unsigned FieldNo = 0;
414
51.9k
  for (CXXRecordDecl::field_iterator I = RD->field_begin(), E = RD->field_end();
415
118k
       I != E; 
++I, ++FieldNo66.9k
) {
416
66.9k
    if (I->isBitField())
417
449
      continue;
418
66.4k
419
66.4k
    CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
420
66.4k
421
66.4k
    if (!CanPlaceFieldSubobjectAtOffset(*I, FieldOffset))
422
1
      return false;
423
66.4k
  }
424
51.9k
425
51.9k
  
return true51.9k
;
426
51.9k
}
427
428
bool
429
EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const FieldDecl *FD,
430
440k
                                                  CharUnits Offset) const {
431
440k
  // We don't have to keep looking past the maximum offset that's known to
432
440k
  // contain an empty class.
433
440k
  if (!AnyEmptySubobjectsBeyondOffset(Offset))
434
307k
    return true;
435
132k
436
132k
  QualType T = FD->getType();
437
132k
  if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl())
438
34.3k
    return CanPlaceFieldSubobjectAtOffset(RD, RD, Offset);
439
98.3k
440
98.3k
  // If we have an array type we need to look at every element.
441
98.3k
  if (const ConstantArrayType *AT = Context.getAsConstantArrayType(T)) {
442
16.5k
    QualType ElemTy = Context.getBaseElementType(AT);
443
16.5k
    const RecordType *RT = ElemTy->getAs<RecordType>();
444
16.5k
    if (!RT)
445
13.9k
      return true;
446
2.57k
447
2.57k
    const CXXRecordDecl *RD = RT->getAsCXXRecordDecl();
448
2.57k
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
449
2.57k
450
2.57k
    uint64_t NumElements = Context.getConstantArrayElementCount(AT);
451
2.57k
    CharUnits ElementOffset = Offset;
452
4.12k
    for (uint64_t I = 0; I != NumElements; 
++I1.54k
) {
453
3.01k
      // We don't have to keep looking past the maximum offset that's known to
454
3.01k
      // contain an empty class.
455
3.01k
      if (!AnyEmptySubobjectsBeyondOffset(ElementOffset))
456
1.46k
        return true;
457
1.55k
458
1.55k
      if (!CanPlaceFieldSubobjectAtOffset(RD, RD, ElementOffset))
459
4
        return false;
460
1.54k
461
1.54k
      ElementOffset += Layout.getSize();
462
1.54k
    }
463
2.57k
  }
464
98.3k
465
98.3k
  
return true82.8k
;
466
98.3k
}
467
468
bool
469
EmptySubobjectMap::CanPlaceFieldAtOffset(const FieldDecl *FD,
470
362k
                                         CharUnits Offset) {
471
362k
  if (!CanPlaceFieldSubobjectAtOffset(FD, Offset))
472
46
    return false;
473
362k
474
362k
  // We are able to place the member variable at this offset.
475
362k
  // Make sure to update the empty field subobject map.
476
362k
  UpdateEmptyFieldSubobjects(FD, Offset, FD->hasAttr<NoUniqueAddressAttr>());
477
362k
  return true;
478
362k
}
479
480
void EmptySubobjectMap::UpdateEmptyFieldSubobjects(
481
    const CXXRecordDecl *RD, const CXXRecordDecl *Class, CharUnits Offset,
482
85.9k
    bool PlacingOverlappingField) {
483
85.9k
  // We know that the only empty subobjects that can conflict with empty
484
85.9k
  // field subobjects are subobjects of empty bases and potentially-overlapping
485
85.9k
  // fields that can be placed at offset zero. Because of this, we only need to
486
85.9k
  // keep track of empty field subobjects with offsets less than the size of
487
85.9k
  // the largest empty subobject for our class.
488
85.9k
  //
489
85.9k
  // (Proof: we will only consider placing a subobject at offset zero or at
490
85.9k
  // >= the current dsize. The only cases where the earlier subobject can be
491
85.9k
  // placed beyond the end of dsize is if it's an empty base or a
492
85.9k
  // potentially-overlapping field.)
493
85.9k
  if (!PlacingOverlappingField && 
Offset >= SizeOfLargestEmptySubobject85.8k
)
494
50.4k
    return;
495
35.5k
496
35.5k
  AddSubobjectAtOffset(RD, Offset);
497
35.5k
498
35.5k
  const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
499
35.5k
500
35.5k
  // Traverse all non-virtual bases.
501
35.5k
  for (const CXXBaseSpecifier &Base : RD->bases()) {
502
15.0k
    if (Base.isVirtual())
503
29
      continue;
504
15.0k
505
15.0k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
506
15.0k
507
15.0k
    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(BaseDecl);
508
15.0k
    UpdateEmptyFieldSubobjects(BaseDecl, Class, BaseOffset,
509
15.0k
                               PlacingOverlappingField);
510
15.0k
  }
511
35.5k
512
35.5k
  if (RD == Class) {
513
20.8k
    // This is the most derived class, traverse virtual bases as well.
514
20.8k
    for (const CXXBaseSpecifier &Base : RD->vbases()) {
515
29
      const CXXRecordDecl *VBaseDecl = Base.getType()->getAsCXXRecordDecl();
516
29
517
29
      CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBaseDecl);
518
29
      UpdateEmptyFieldSubobjects(VBaseDecl, Class, VBaseOffset,
519
29
                                 PlacingOverlappingField);
520
29
    }
521
20.8k
  }
522
35.5k
523
35.5k
  // Traverse all member variables.
524
35.5k
  unsigned FieldNo = 0;
525
35.5k
  for (CXXRecordDecl::field_iterator I = RD->field_begin(), E = RD->field_end();
526
71.3k
       I != E; 
++I, ++FieldNo35.7k
) {
527
35.7k
    if (I->isBitField())
528
2
      continue;
529
35.7k
530
35.7k
    CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
531
35.7k
532
35.7k
    UpdateEmptyFieldSubobjects(*I, FieldOffset, PlacingOverlappingField);
533
35.7k
  }
534
35.5k
}
535
536
void EmptySubobjectMap::UpdateEmptyFieldSubobjects(
537
409k
    const FieldDecl *FD, CharUnits Offset, bool PlacingOverlappingField) {
538
409k
  QualType T = FD->getType();
539
409k
  if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl()) {
540
70.8k
    UpdateEmptyFieldSubobjects(RD, RD, Offset, PlacingOverlappingField);
541
70.8k
    return;
542
70.8k
  }
543
338k
544
338k
  // If we have an array type we need to update every element.
545
338k
  if (const ConstantArrayType *AT = Context.getAsConstantArrayType(T)) {
546
28.1k
    QualType ElemTy = Context.getBaseElementType(AT);
547
28.1k
    const RecordType *RT = ElemTy->getAs<RecordType>();
548
28.1k
    if (!RT)
549
24.0k
      return;
550
4.11k
551
4.11k
    const CXXRecordDecl *RD = RT->getAsCXXRecordDecl();
552
4.11k
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
553
4.11k
554
4.11k
    uint64_t NumElements = Context.getConstantArrayElementCount(AT);
555
4.11k
    CharUnits ElementOffset = Offset;
556
4.11k
557
4.16k
    for (uint64_t I = 0; I != NumElements; 
++I51
) {
558
3.13k
      // We know that the only empty subobjects that can conflict with empty
559
3.13k
      // field subobjects are subobjects of empty bases that can be placed at
560
3.13k
      // offset zero. Because of this, we only need to keep track of empty field
561
3.13k
      // subobjects with offsets less than the size of the largest empty
562
3.13k
      // subobject for our class.
563
3.13k
      if (!PlacingOverlappingField &&
564
3.13k
          ElementOffset >= SizeOfLargestEmptySubobject)
565
3.08k
        return;
566
51
567
51
      UpdateEmptyFieldSubobjects(RD, RD, ElementOffset,
568
51
                                 PlacingOverlappingField);
569
51
      ElementOffset += Layout.getSize();
570
51
    }
571
4.11k
  }
572
338k
}
573
574
typedef llvm::SmallPtrSet<const CXXRecordDecl*, 4> ClassSetTy;
575
576
class ItaniumRecordLayoutBuilder {
577
protected:
578
  // FIXME: Remove this and make the appropriate fields public.
579
  friend class clang::ASTContext;
580
581
  const ASTContext &Context;
582
583
  EmptySubobjectMap *EmptySubobjects;
584
585
  /// Size - The current size of the record layout.
586
  uint64_t Size;
587
588
  /// Alignment - The current alignment of the record layout.
589
  CharUnits Alignment;
590
591
  /// The alignment if attribute packed is not used.
592
  CharUnits UnpackedAlignment;
593
594
  /// \brief The maximum of the alignments of top-level members.
595
  CharUnits UnadjustedAlignment;
596
597
  SmallVector<uint64_t, 16> FieldOffsets;
598
599
  /// Whether the external AST source has provided a layout for this
600
  /// record.
601
  unsigned UseExternalLayout : 1;
602
603
  /// Whether we need to infer alignment, even when we have an
604
  /// externally-provided layout.
605
  unsigned InferAlignment : 1;
606
607
  /// Packed - Whether the record is packed or not.
608
  unsigned Packed : 1;
609
610
  unsigned IsUnion : 1;
611
612
  unsigned IsMac68kAlign : 1;
613
614
  unsigned IsMsStruct : 1;
615
616
  /// UnfilledBitsInLastUnit - If the last field laid out was a bitfield,
617
  /// this contains the number of bits in the last unit that can be used for
618
  /// an adjacent bitfield if necessary.  The unit in question is usually
619
  /// a byte, but larger units are used if IsMsStruct.
620
  unsigned char UnfilledBitsInLastUnit;
621
  /// LastBitfieldTypeSize - If IsMsStruct, represents the size of the type
622
  /// of the previous field if it was a bitfield.
623
  unsigned char LastBitfieldTypeSize;
624
625
  /// MaxFieldAlignment - The maximum allowed field alignment. This is set by
626
  /// #pragma pack.
627
  CharUnits MaxFieldAlignment;
628
629
  /// DataSize - The data size of the record being laid out.
630
  uint64_t DataSize;
631
632
  CharUnits NonVirtualSize;
633
  CharUnits NonVirtualAlignment;
634
635
  /// If we've laid out a field but not included its tail padding in Size yet,
636
  /// this is the size up to the end of that field.
637
  CharUnits PaddedFieldSize;
638
639
  /// PrimaryBase - the primary base class (if one exists) of the class
640
  /// we're laying out.
641
  const CXXRecordDecl *PrimaryBase;
642
643
  /// PrimaryBaseIsVirtual - Whether the primary base of the class we're laying
644
  /// out is virtual.
645
  bool PrimaryBaseIsVirtual;
646
647
  /// HasOwnVFPtr - Whether the class provides its own vtable/vftbl
648
  /// pointer, as opposed to inheriting one from a primary base class.
649
  bool HasOwnVFPtr;
650
651
  /// the flag of field offset changing due to packed attribute.
652
  bool HasPackedField;
653
654
  typedef llvm::DenseMap<const CXXRecordDecl *, CharUnits> BaseOffsetsMapTy;
655
656
  /// Bases - base classes and their offsets in the record.
657
  BaseOffsetsMapTy Bases;
658
659
  // VBases - virtual base classes and their offsets in the record.
660
  ASTRecordLayout::VBaseOffsetsMapTy VBases;
661
662
  /// IndirectPrimaryBases - Virtual base classes, direct or indirect, that are
663
  /// primary base classes for some other direct or indirect base class.
664
  CXXIndirectPrimaryBaseSet IndirectPrimaryBases;
665
666
  /// FirstNearlyEmptyVBase - The first nearly empty virtual base class in
667
  /// inheritance graph order. Used for determining the primary base class.
668
  const CXXRecordDecl *FirstNearlyEmptyVBase;
669
670
  /// VisitedVirtualBases - A set of all the visited virtual bases, used to
671
  /// avoid visiting virtual bases more than once.
672
  llvm::SmallPtrSet<const CXXRecordDecl *, 4> VisitedVirtualBases;
673
674
  /// Valid if UseExternalLayout is true.
675
  ExternalLayout External;
676
677
  ItaniumRecordLayoutBuilder(const ASTContext &Context,
678
                             EmptySubobjectMap *EmptySubobjects)
679
      : Context(Context), EmptySubobjects(EmptySubobjects), Size(0),
680
        Alignment(CharUnits::One()), UnpackedAlignment(CharUnits::One()),
681
        UnadjustedAlignment(CharUnits::One()),
682
        UseExternalLayout(false), InferAlignment(false), Packed(false),
683
        IsUnion(false), IsMac68kAlign(false), IsMsStruct(false),
684
        UnfilledBitsInLastUnit(0), LastBitfieldTypeSize(0),
685
        MaxFieldAlignment(CharUnits::Zero()), DataSize(0),
686
        NonVirtualSize(CharUnits::Zero()),
687
        NonVirtualAlignment(CharUnits::One()),
688
        PaddedFieldSize(CharUnits::Zero()), PrimaryBase(nullptr),
689
        PrimaryBaseIsVirtual(false), HasOwnVFPtr(false),
690
189k
        HasPackedField(false), FirstNearlyEmptyVBase(nullptr) {}
691
692
  void Layout(const RecordDecl *D);
693
  void Layout(const CXXRecordDecl *D);
694
  void Layout(const ObjCInterfaceDecl *D);
695
696
  void LayoutFields(const RecordDecl *D);
697
  void LayoutField(const FieldDecl *D, bool InsertExtraPadding);
698
  void LayoutWideBitField(uint64_t FieldSize, uint64_t TypeSize,
699
                          bool FieldPacked, const FieldDecl *D);
700
  void LayoutBitField(const FieldDecl *D);
701
702
0
  TargetCXXABI getCXXABI() const {
703
0
    return Context.getTargetInfo().getCXXABI();
704
0
  }
705
706
  /// BaseSubobjectInfoAllocator - Allocator for BaseSubobjectInfo objects.
707
  llvm::SpecificBumpPtrAllocator<BaseSubobjectInfo> BaseSubobjectInfoAllocator;
708
709
  typedef llvm::DenseMap<const CXXRecordDecl *, BaseSubobjectInfo *>
710
    BaseSubobjectInfoMapTy;
711
712
  /// VirtualBaseInfo - Map from all the (direct or indirect) virtual bases
713
  /// of the class we're laying out to their base subobject info.
714
  BaseSubobjectInfoMapTy VirtualBaseInfo;
715
716
  /// NonVirtualBaseInfo - Map from all the direct non-virtual bases of the
717
  /// class we're laying out to their base subobject info.
718
  BaseSubobjectInfoMapTy NonVirtualBaseInfo;
719
720
  /// ComputeBaseSubobjectInfo - Compute the base subobject information for the
721
  /// bases of the given class.
722
  void ComputeBaseSubobjectInfo(const CXXRecordDecl *RD);
723
724
  /// ComputeBaseSubobjectInfo - Compute the base subobject information for a
725
  /// single class and all of its base classes.
726
  BaseSubobjectInfo *ComputeBaseSubobjectInfo(const CXXRecordDecl *RD,
727
                                              bool IsVirtual,
728
                                              BaseSubobjectInfo *Derived);
729
730
  /// DeterminePrimaryBase - Determine the primary base of the given class.
731
  void DeterminePrimaryBase(const CXXRecordDecl *RD);
732
733
  void SelectPrimaryVBase(const CXXRecordDecl *RD);
734
735
  void EnsureVTablePointerAlignment(CharUnits UnpackedBaseAlign);
736
737
  /// LayoutNonVirtualBases - Determines the primary base class (if any) and
738
  /// lays it out. Will then proceed to lay out all non-virtual base clasess.
739
  void LayoutNonVirtualBases(const CXXRecordDecl *RD);
740
741
  /// LayoutNonVirtualBase - Lays out a single non-virtual base.
742
  void LayoutNonVirtualBase(const BaseSubobjectInfo *Base);
743
744
  void AddPrimaryVirtualBaseOffsets(const BaseSubobjectInfo *Info,
745
                                    CharUnits Offset);
746
747
  /// LayoutVirtualBases - Lays out all the virtual bases.
748
  void LayoutVirtualBases(const CXXRecordDecl *RD,
749
                          const CXXRecordDecl *MostDerivedClass);
750
751
  /// LayoutVirtualBase - Lays out a single virtual base.
752
  void LayoutVirtualBase(const BaseSubobjectInfo *Base);
753
754
  /// LayoutBase - Will lay out a base and return the offset where it was
755
  /// placed, in chars.
756
  CharUnits LayoutBase(const BaseSubobjectInfo *Base);
757
758
  /// InitializeLayout - Initialize record layout for the given record decl.
759
  void InitializeLayout(const Decl *D);
760
761
  /// FinishLayout - Finalize record layout. Adjust record size based on the
762
  /// alignment.
763
  void FinishLayout(const NamedDecl *D);
764
765
  void UpdateAlignment(CharUnits NewAlignment, CharUnits UnpackedNewAlignment);
766
2.30k
  void UpdateAlignment(CharUnits NewAlignment) {
767
2.30k
    UpdateAlignment(NewAlignment, NewAlignment);
768
2.30k
  }
769
770
  /// Retrieve the externally-supplied field offset for the given
771
  /// field.
772
  ///
773
  /// \param Field The field whose offset is being queried.
774
  /// \param ComputedOffset The offset that we've computed for this field.
775
  uint64_t updateExternalFieldOffset(const FieldDecl *Field,
776
                                     uint64_t ComputedOffset);
777
778
  void CheckFieldPadding(uint64_t Offset, uint64_t UnpaddedOffset,
779
                          uint64_t UnpackedOffset, unsigned UnpackedAlign,
780
                          bool isPacked, const FieldDecl *D);
781
782
  DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID);
783
784
345k
  CharUnits getSize() const {
785
345k
    assert(Size % Context.getCharWidth() == 0);
786
345k
    return Context.toCharUnitsFromBits(Size);
787
345k
  }
788
1.84M
  uint64_t getSizeInBits() const { return Size; }
789
790
60.0k
  void setSize(CharUnits NewSize) { Size = Context.toBits(NewSize); }
791
972k
  void setSize(uint64_t NewSize) { Size = NewSize; }
792
793
0
  CharUnits getAligment() const { return Alignment; }
794
795
670k
  CharUnits getDataSize() const {
796
670k
    assert(DataSize % Context.getCharWidth() == 0);
797
670k
    return Context.toCharUnitsFromBits(DataSize);
798
670k
  }
799
1.24M
  uint64_t getDataSizeInBits() const { return DataSize; }
800
801
570k
  void setDataSize(CharUnits NewSize) { DataSize = Context.toBits(NewSize); }
802
52.9k
  void setDataSize(uint64_t NewSize) { DataSize = NewSize; }
803
804
  ItaniumRecordLayoutBuilder(const ItaniumRecordLayoutBuilder &) = delete;
805
  void operator=(const ItaniumRecordLayoutBuilder &) = delete;
806
};
807
} // end anonymous namespace
808
809
1.59k
void ItaniumRecordLayoutBuilder::SelectPrimaryVBase(const CXXRecordDecl *RD) {
810
1.59k
  for (const auto &I : RD->bases()) {
811
1.10k
    assert(!I.getType()->isDependentType() &&
812
1.10k
           "Cannot layout class with dependent bases.");
813
1.10k
814
1.10k
    const CXXRecordDecl *Base = I.getType()->getAsCXXRecordDecl();
815
1.10k
816
1.10k
    // Check if this is a nearly empty virtual base.
817
1.10k
    if (I.isVirtual() && 
Context.isNearlyEmpty(Base)707
) {
818
183
      // If it's not an indirect primary base, then we've found our primary
819
183
      // base.
820
183
      if (!IndirectPrimaryBases.count(Base)) {
821
180
        PrimaryBase = Base;
822
180
        PrimaryBaseIsVirtual = true;
823
180
        return;
824
180
      }
825
3
826
3
      // Is this the first nearly empty virtual base?
827
3
      if (!FirstNearlyEmptyVBase)
828
3
        FirstNearlyEmptyVBase = Base;
829
3
    }
830
1.10k
831
1.10k
    SelectPrimaryVBase(Base);
832
928
    if (PrimaryBase)
833
8
      return;
834
928
  }
835
1.59k
}
836
837
/// DeterminePrimaryBase - Determine the primary base of the given class.
838
142k
void ItaniumRecordLayoutBuilder::DeterminePrimaryBase(const CXXRecordDecl *RD) {
839
142k
  // If the class isn't dynamic, it won't have a primary base.
840
142k
  if (!RD->isDynamicClass())
841
127k
    return;
842
14.5k
843
14.5k
  // Compute all the primary virtual bases for all of our direct and
844
14.5k
  // indirect bases, and record all their primary virtual base classes.
845
14.5k
  RD->getIndirectPrimaryBases(IndirectPrimaryBases);
846
14.5k
847
14.5k
  // If the record has a dynamic base class, attempt to choose a primary base
848
14.5k
  // class. It is the first (in direct base class order) non-virtual dynamic
849
14.5k
  // base class, if one exists.
850
14.5k
  for (const auto &I : RD->bases()) {
851
10.9k
    // Ignore virtual bases.
852
10.9k
    if (I.isVirtual())
853
733
      continue;
854
10.1k
855
10.1k
    const CXXRecordDecl *Base = I.getType()->getAsCXXRecordDecl();
856
10.1k
857
10.1k
    if (Base->isDynamicClass()) {
858
10.0k
      // We found it.
859
10.0k
      PrimaryBase = Base;
860
10.0k
      PrimaryBaseIsVirtual = false;
861
10.0k
      return;
862
10.0k
    }
863
10.1k
  }
864
14.5k
865
14.5k
  // Under the Itanium ABI, if there is no non-virtual primary base class,
866
14.5k
  // try to compute the primary virtual base.  The primary virtual base is
867
14.5k
  // the first nearly empty virtual base that is not an indirect primary
868
14.5k
  // virtual base class, if one exists.
869
14.5k
  
if (4.51k
RD->getNumVBases() != 04.51k
) {
870
664
    SelectPrimaryVBase(RD);
871
664
    if (PrimaryBase)
872
180
      return;
873
4.33k
  }
874
4.33k
875
4.33k
  // Otherwise, it is the first indirect primary base class, if one exists.
876
4.33k
  if (FirstNearlyEmptyVBase) {
877
2
    PrimaryBase = FirstNearlyEmptyVBase;
878
2
    PrimaryBaseIsVirtual = true;
879
2
    return;
880
2
  }
881
4.33k
882
4.33k
  assert(!PrimaryBase && "Should not get here with a primary base!");
883
4.33k
}
884
885
BaseSubobjectInfo *ItaniumRecordLayoutBuilder::ComputeBaseSubobjectInfo(
886
573k
    const CXXRecordDecl *RD, bool IsVirtual, BaseSubobjectInfo *Derived) {
887
573k
  BaseSubobjectInfo *Info;
888
573k
889
573k
  if (IsVirtual) {
890
1.54k
    // Check if we already have info about this virtual base.
891
1.54k
    BaseSubobjectInfo *&InfoSlot = VirtualBaseInfo[RD];
892
1.54k
    if (InfoSlot) {
893
270
      assert(InfoSlot->Class == RD && "Wrong class for virtual base info!");
894
270
      return InfoSlot;
895
270
    }
896
1.27k
897
1.27k
    // We don't, create it.
898
1.27k
    InfoSlot = new (BaseSubobjectInfoAllocator.Allocate()) BaseSubobjectInfo;
899
1.27k
    Info = InfoSlot;
900
571k
  } else {
901
571k
    Info = new (BaseSubobjectInfoAllocator.Allocate()) BaseSubobjectInfo;
902
571k
  }
903
573k
904
573k
  Info->Class = RD;
905
572k
  Info->IsVirtual = IsVirtual;
906
572k
  Info->Derived = nullptr;
907
572k
  Info->PrimaryVirtualBaseInfo = nullptr;
908
572k
909
572k
  const CXXRecordDecl *PrimaryVirtualBase = nullptr;
910
572k
  BaseSubobjectInfo *PrimaryVirtualBaseInfo = nullptr;
911
572k
912
572k
  // Check if this base has a primary virtual base.
913
572k
  if (RD->getNumVBases()) {
914
784
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
915
784
    if (Layout.isPrimaryBaseVirtual()) {
916
147
      // This base does have a primary virtual base.
917
147
      PrimaryVirtualBase = Layout.getPrimaryBase();
918
147
      assert(PrimaryVirtualBase && "Didn't have a primary virtual base!");
919
147
920
147
      // Now check if we have base subobject info about this primary base.
921
147
      PrimaryVirtualBaseInfo = VirtualBaseInfo.lookup(PrimaryVirtualBase);
922
147
923
147
      if (PrimaryVirtualBaseInfo) {
924
46
        if (PrimaryVirtualBaseInfo->Derived) {
925
24
          // We did have info about this primary base, and it turns out that it
926
24
          // has already been claimed as a primary virtual base for another
927
24
          // base.
928
24
          PrimaryVirtualBase = nullptr;
929
24
        } else {
930
22
          // We can claim this base as our primary base.
931
22
          Info->PrimaryVirtualBaseInfo = PrimaryVirtualBaseInfo;
932
22
          PrimaryVirtualBaseInfo->Derived = Info;
933
22
        }
934
46
      }
935
147
    }
936
784
  }
937
572k
938
572k
  // Now go through all direct bases.
939
572k
  for (const auto &I : RD->bases()) {
940
538k
    bool IsVirtual = I.isVirtual();
941
538k
942
538k
    const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
943
538k
944
538k
    Info->Bases.push_back(ComputeBaseSubobjectInfo(BaseDecl, IsVirtual, Info));
945
538k
  }
946
572k
947
572k
  if (PrimaryVirtualBase && 
!PrimaryVirtualBaseInfo123
) {
948
101
    // Traversing the bases must have created the base info for our primary
949
101
    // virtual base.
950
101
    PrimaryVirtualBaseInfo = VirtualBaseInfo.lookup(PrimaryVirtualBase);
951
101
    assert(PrimaryVirtualBaseInfo &&
952
101
           "Did not create a primary virtual base!");
953
101
954
101
    // Claim the primary virtual base as our primary virtual base.
955
101
    Info->PrimaryVirtualBaseInfo = PrimaryVirtualBaseInfo;
956
101
    PrimaryVirtualBaseInfo->Derived = Info;
957
101
  }
958
572k
959
572k
  return Info;
960
573k
}
961
962
void ItaniumRecordLayoutBuilder::ComputeBaseSubobjectInfo(
963
142k
    const CXXRecordDecl *RD) {
964
142k
  for (const auto &I : RD->bases()) {
965
35.1k
    bool IsVirtual = I.isVirtual();
966
35.1k
967
35.1k
    const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
968
35.1k
969
35.1k
    // Compute the base subobject info for this base.
970
35.1k
    BaseSubobjectInfo *Info = ComputeBaseSubobjectInfo(BaseDecl, IsVirtual,
971
35.1k
                                                       nullptr);
972
35.1k
973
35.1k
    if (IsVirtual) {
974
774
      // ComputeBaseInfo has already added this base for us.
975
774
      assert(VirtualBaseInfo.count(BaseDecl) &&
976
774
             "Did not add virtual base!");
977
34.3k
    } else {
978
34.3k
      // Add the base info to the map of non-virtual bases.
979
34.3k
      assert(!NonVirtualBaseInfo.count(BaseDecl) &&
980
34.3k
             "Non-virtual base already exists!");
981
34.3k
      NonVirtualBaseInfo.insert(std::make_pair(BaseDecl, Info));
982
34.3k
    }
983
35.1k
  }
984
142k
}
985
986
void ItaniumRecordLayoutBuilder::EnsureVTablePointerAlignment(
987
4.33k
    CharUnits UnpackedBaseAlign) {
988
4.33k
  CharUnits BaseAlign = Packed ? 
CharUnits::One()2
:
UnpackedBaseAlign4.33k
;
989
4.33k
990
4.33k
  // The maximum field alignment overrides base align.
991
4.33k
  if (!MaxFieldAlignment.isZero()) {
992
3
    BaseAlign = std::min(BaseAlign, MaxFieldAlignment);
993
3
    UnpackedBaseAlign = std::min(UnpackedBaseAlign, MaxFieldAlignment);
994
3
  }
995
4.33k
996
4.33k
  // Round up the current record size to pointer alignment.
997
4.33k
  setSize(getSize().alignTo(BaseAlign));
998
4.33k
999
4.33k
  // Update the alignment.
1000
4.33k
  UpdateAlignment(BaseAlign, UnpackedBaseAlign);
1001
4.33k
}
1002
1003
void ItaniumRecordLayoutBuilder::LayoutNonVirtualBases(
1004
142k
    const CXXRecordDecl *RD) {
1005
142k
  // Then, determine the primary base class.
1006
142k
  DeterminePrimaryBase(RD);
1007
142k
1008
142k
  // Compute base subobject info.
1009
142k
  ComputeBaseSubobjectInfo(RD);
1010
142k
1011
142k
  // If we have a primary base class, lay it out.
1012
142k
  if (PrimaryBase) {
1013
10.2k
    if (PrimaryBaseIsVirtual) {
1014
182
      // If the primary virtual base was a primary virtual base of some other
1015
182
      // base class we'll have to steal it.
1016
182
      BaseSubobjectInfo *PrimaryBaseInfo = VirtualBaseInfo.lookup(PrimaryBase);
1017
182
      PrimaryBaseInfo->Derived = nullptr;
1018
182
1019
182
      // We have a virtual primary base, insert it as an indirect primary base.
1020
182
      IndirectPrimaryBases.insert(PrimaryBase);
1021
182
1022
182
      assert(!VisitedVirtualBases.count(PrimaryBase) &&
1023
182
             "vbase already visited!");
1024
182
      VisitedVirtualBases.insert(PrimaryBase);
1025
182
1026
182
      LayoutVirtualBase(PrimaryBaseInfo);
1027
10.0k
    } else {
1028
10.0k
      BaseSubobjectInfo *PrimaryBaseInfo =
1029
10.0k
        NonVirtualBaseInfo.lookup(PrimaryBase);
1030
10.0k
      assert(PrimaryBaseInfo &&
1031
10.0k
             "Did not find base info for non-virtual primary base!");
1032
10.0k
1033
10.0k
      LayoutNonVirtualBase(PrimaryBaseInfo);
1034
10.0k
    }
1035
10.2k
1036
10.2k
  // If this class needs a vtable/vf-table and didn't get one from a
1037
10.2k
  // primary base, add it in now.
1038
131k
  } else if (RD->isDynamicClass()) {
1039
4.33k
    assert(DataSize == 0 && "Vtable pointer must be at offset zero!");
1040
4.33k
    CharUnits PtrWidth =
1041
4.33k
      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(0));
1042
4.33k
    CharUnits PtrAlign =
1043
4.33k
      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerAlign(0));
1044
4.33k
    EnsureVTablePointerAlignment(PtrAlign);
1045
4.33k
    HasOwnVFPtr = true;
1046
4.33k
    setSize(getSize() + PtrWidth);
1047
4.33k
    setDataSize(getSize());
1048
4.33k
  }
1049
142k
1050
142k
  // Now lay out the non-virtual bases.
1051
142k
  for (const auto &I : RD->bases()) {
1052
35.1k
1053
35.1k
    // Ignore virtual bases.
1054
35.1k
    if (I.isVirtual())
1055
774
      continue;
1056
34.3k
1057
34.3k
    const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
1058
34.3k
1059
34.3k
    // Skip the primary base, because we've already laid it out.  The
1060
34.3k
    // !PrimaryBaseIsVirtual check is required because we might have a
1061
34.3k
    // non-virtual base of the same type as a primary virtual base.
1062
34.3k
    if (BaseDecl == PrimaryBase && 
!PrimaryBaseIsVirtual10.0k
)
1063
10.0k
      continue;
1064
24.3k
1065
24.3k
    // Lay out the base.
1066
24.3k
    BaseSubobjectInfo *BaseInfo = NonVirtualBaseInfo.lookup(BaseDecl);
1067
24.3k
    assert(BaseInfo && "Did not find base info for non-virtual base!");
1068
24.3k
1069
24.3k
    LayoutNonVirtualBase(BaseInfo);
1070
24.3k
  }
1071
142k
}
1072
1073
void ItaniumRecordLayoutBuilder::LayoutNonVirtualBase(
1074
34.3k
    const BaseSubobjectInfo *Base) {
1075
34.3k
  // Layout the base.
1076
34.3k
  CharUnits Offset = LayoutBase(Base);
1077
34.3k
1078
34.3k
  // Add its base class offset.
1079
34.3k
  assert(!Bases.count(Base->Class) && "base offset already exists!");
1080
34.3k
  Bases.insert(std::make_pair(Base->Class, Offset));
1081
34.3k
1082
34.3k
  AddPrimaryVirtualBaseOffsets(Base, Offset);
1083
34.3k
}
1084
1085
void ItaniumRecordLayoutBuilder::AddPrimaryVirtualBaseOffsets(
1086
35.9k
    const BaseSubobjectInfo *Info, CharUnits Offset) {
1087
35.9k
  // This base isn't interesting, it has no virtual bases.
1088
35.9k
  if (!Info->Class->getNumVBases())
1089
35.1k
    return;
1090
784
1091
784
  // First, check if we have a virtual primary base to add offsets for.
1092
784
  if (Info->PrimaryVirtualBaseInfo) {
1093
123
    assert(Info->PrimaryVirtualBaseInfo->IsVirtual &&
1094
123
           "Primary virtual base is not virtual!");
1095
123
    if (Info->PrimaryVirtualBaseInfo->Derived == Info) {
1096
120
      // Add the offset.
1097
120
      assert(!VBases.count(Info->PrimaryVirtualBaseInfo->Class) &&
1098
120
             "primary vbase offset already exists!");
1099
120
      VBases.insert(std::make_pair(Info->PrimaryVirtualBaseInfo->Class,
1100
120
                                   ASTRecordLayout::VBaseInfo(Offset, false)));
1101
120
1102
120
      // Traverse the primary virtual base.
1103
120
      AddPrimaryVirtualBaseOffsets(Info->PrimaryVirtualBaseInfo, Offset);
1104
120
    }
1105
123
  }
1106
784
1107
784
  // Now go through all direct non-virtual bases.
1108
784
  const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class);
1109
1.06k
  for (const BaseSubobjectInfo *Base : Info->Bases) {
1110
1.06k
    if (Base->IsVirtual)
1111
770
      continue;
1112
294
1113
294
    CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base->Class);
1114
294
    AddPrimaryVirtualBaseOffsets(Base, BaseOffset);
1115
294
  }
1116
784
}
1117
1118
void ItaniumRecordLayoutBuilder::LayoutVirtualBases(
1119
142k
    const CXXRecordDecl *RD, const CXXRecordDecl *MostDerivedClass) {
1120
142k
  const CXXRecordDecl *PrimaryBase;
1121
142k
  bool PrimaryBaseIsVirtual;
1122
142k
1123
142k
  if (MostDerivedClass == RD) {
1124
142k
    PrimaryBase = this->PrimaryBase;
1125
142k
    PrimaryBaseIsVirtual = this->PrimaryBaseIsVirtual;
1126
142k
  } else {
1127
788
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
1128
788
    PrimaryBase = Layout.getPrimaryBase();
1129
788
    PrimaryBaseIsVirtual = Layout.isPrimaryBaseVirtual();
1130
788
  }
1131
142k
1132
142k
  for (const CXXBaseSpecifier &Base : RD->bases()) {
1133
36.2k
    assert(!Base.getType()->isDependentType() &&
1134
36.2k
           "Cannot layout class with dependent bases.");
1135
36.2k
1136
36.2k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
1137
36.2k
1138
36.2k
    if (Base.isVirtual()) {
1139
1.54k
      if (PrimaryBase != BaseDecl || 
!PrimaryBaseIsVirtual315
) {
1140
1.23k
        bool IndirectPrimaryBase = IndirectPrimaryBases.count(BaseDecl);
1141
1.23k
1142
1.23k
        // Only lay out the virtual base if it's not an indirect primary base.
1143
1.23k
        if (!IndirectPrimaryBase) {
1144
1.19k
          // Only visit virtual bases once.
1145
1.19k
          if (!VisitedVirtualBases.insert(BaseDecl).second)
1146
226
            continue;
1147
972
1148
972
          const BaseSubobjectInfo *BaseInfo = VirtualBaseInfo.lookup(BaseDecl);
1149
972
          assert(BaseInfo && "Did not find virtual base info!");
1150
972
          LayoutVirtualBase(BaseInfo);
1151
972
        }
1152
1.23k
      }
1153
1.54k
    }
1154
36.2k
1155
36.2k
    
if (35.9k
!BaseDecl->getNumVBases()35.9k
) {
1156
35.2k
      // This base isn't interesting since it doesn't have any virtual bases.
1157
35.2k
      continue;
1158
35.2k
    }
1159
788
1160
788
    LayoutVirtualBases(BaseDecl, MostDerivedClass);
1161
788
  }
1162
142k
}
1163
1164
void ItaniumRecordLayoutBuilder::LayoutVirtualBase(
1165
1.15k
    const BaseSubobjectInfo *Base) {
1166
1.15k
  assert(!Base->Derived && "Trying to lay out a primary virtual base!");
1167
1.15k
1168
1.15k
  // Layout the base.
1169
1.15k
  CharUnits Offset = LayoutBase(Base);
1170
1.15k
1171
1.15k
  // Add its base class offset.
1172
1.15k
  assert(!VBases.count(Base->Class) && "vbase offset already exists!");
1173
1.15k
  VBases.insert(std::make_pair(Base->Class,
1174
1.15k
                       ASTRecordLayout::VBaseInfo(Offset, false)));
1175
1.15k
1176
1.15k
  AddPrimaryVirtualBaseOffsets(Base, Offset);
1177
1.15k
}
1178
1179
CharUnits
1180
35.5k
ItaniumRecordLayoutBuilder::LayoutBase(const BaseSubobjectInfo *Base) {
1181
35.5k
  const ASTRecordLayout &Layout = Context.getASTRecordLayout(Base->Class);
1182
35.5k
1183
35.5k
1184
35.5k
  CharUnits Offset;
1185
35.5k
1186
35.5k
  // Query the external layout to see if it provides an offset.
1187
35.5k
  bool HasExternalLayout = false;
1188
35.5k
  if (UseExternalLayout) {
1189
10
    // FIXME: This appears to be reversed.
1190
10
    if (Base->IsVirtual)
1191
2
      HasExternalLayout = External.getExternalNVBaseOffset(Base->Class, Offset);
1192
8
    else
1193
8
      HasExternalLayout = External.getExternalVBaseOffset(Base->Class, Offset);
1194
10
  }
1195
35.5k
1196
35.5k
  // Clang <= 6 incorrectly applied the 'packed' attribute to base classes.
1197
35.5k
  // Per GCC's documentation, it only applies to non-static data members.
1198
35.5k
  CharUnits UnpackedBaseAlign = Layout.getNonVirtualAlignment();
1199
35.5k
  CharUnits BaseAlign =
1200
35.5k
      (Packed && 
(37
(Context.getLangOpts().getClangABICompat() <=
1201
37
                   LangOptions::ClangABI::Ver6) ||
1202
37
                  
Context.getTargetInfo().getTriple().isPS4()30
))
1203
35.5k
          ? 
CharUnits::One()9
1204
35.5k
          : 
UnpackedBaseAlign35.5k
;
1205
35.5k
1206
35.5k
  // If we have an empty base class, try to place it at offset 0.
1207
35.5k
  if (Base->Class->isEmpty() &&
1208
35.5k
      
(11.5k
!HasExternalLayout11.5k
||
Offset == CharUnits::Zero()0
) &&
1209
35.5k
      
EmptySubobjects->CanPlaceBaseAtOffset(Base, CharUnits::Zero())11.5k
) {
1210
11.4k
    setSize(std::max(getSize(), Layout.getSize()));
1211
11.4k
    UpdateAlignment(BaseAlign, UnpackedBaseAlign);
1212
11.4k
1213
11.4k
    return CharUnits::Zero();
1214
11.4k
  }
1215
24.0k
1216
24.0k
  // The maximum field alignment overrides base align.
1217
24.0k
  if (!MaxFieldAlignment.isZero()) {
1218
19
    BaseAlign = std::min(BaseAlign, MaxFieldAlignment);
1219
19
    UnpackedBaseAlign = std::min(UnpackedBaseAlign, MaxFieldAlignment);
1220
19
  }
1221
24.0k
1222
24.0k
  if (!HasExternalLayout) {
1223
24.0k
    // Round up the current record size to the base's alignment boundary.
1224
24.0k
    Offset = getDataSize().alignTo(BaseAlign);
1225
24.0k
1226
24.0k
    // Try to place the base.
1227
24.0k
    while (!EmptySubobjects->CanPlaceBaseAtOffset(Base, Offset))
1228
36
      Offset += BaseAlign;
1229
24.0k
  } else {
1230
0
    bool Allowed = EmptySubobjects->CanPlaceBaseAtOffset(Base, Offset);
1231
0
    (void)Allowed;
1232
0
    assert(Allowed && "Base subobject externally placed at overlapping offset");
1233
0
1234
0
    if (InferAlignment && Offset < getDataSize().alignTo(BaseAlign)) {
1235
0
      // The externally-supplied base offset is before the base offset we
1236
0
      // computed. Assume that the structure is packed.
1237
0
      Alignment = CharUnits::One();
1238
0
      InferAlignment = false;
1239
0
    }
1240
0
  }
1241
24.0k
1242
24.0k
  if (!Base->Class->isEmpty()) {
1243
23.9k
    // Update the data size.
1244
23.9k
    setDataSize(Offset + Layout.getNonVirtualSize());
1245
23.9k
1246
23.9k
    setSize(std::max(getSize(), getDataSize()));
1247
23.9k
  } else
1248
68
    setSize(std::max(getSize(), Offset + Layout.getSize()));
1249
24.0k
1250
24.0k
  // Remember max struct/class alignment.
1251
24.0k
  UpdateAlignment(BaseAlign, UnpackedBaseAlign);
1252
24.0k
1253
24.0k
  return Offset;
1254
24.0k
}
1255
1256
189k
void ItaniumRecordLayoutBuilder::InitializeLayout(const Decl *D) {
1257
189k
  if (const RecordDecl *RD = dyn_cast<RecordDecl>(D)) {
1258
188k
    IsUnion = RD->isUnion();
1259
188k
    IsMsStruct = RD->isMsStruct(Context);
1260
188k
  }
1261
189k
1262
189k
  Packed = D->hasAttr<PackedAttr>();
1263
189k
1264
189k
  // Honor the default struct packing maximum alignment flag.
1265
189k
  if (unsigned DefaultMaxFieldAlignment = Context.getLangOpts().PackStruct) {
1266
2
    MaxFieldAlignment = CharUnits::fromQuantity(DefaultMaxFieldAlignment);
1267
2
  }
1268
189k
1269
189k
  // mac68k alignment supersedes maximum field alignment and attribute aligned,
1270
189k
  // and forces all structures to have 2-byte alignment. The IBM docs on it
1271
189k
  // allude to additional (more complicated) semantics, especially with regard
1272
189k
  // to bit-fields, but gcc appears not to follow that.
1273
189k
  if (D->hasAttr<AlignMac68kAttr>()) {
1274
12
    IsMac68kAlign = true;
1275
12
    MaxFieldAlignment = CharUnits::fromQuantity(2);
1276
12
    Alignment = CharUnits::fromQuantity(2);
1277
189k
  } else {
1278
189k
    if (const MaxFieldAlignmentAttr *MFAA = D->getAttr<MaxFieldAlignmentAttr>())
1279
1.15k
      MaxFieldAlignment = Context.toCharUnitsFromBits(MFAA->getAlignment());
1280
189k
1281
189k
    if (unsigned MaxAlign = D->getMaxAlignment())
1282
1.97k
      UpdateAlignment(Context.toCharUnitsFromBits(MaxAlign));
1283
189k
  }
1284
189k
1285
189k
  // If there is an external AST source, ask it for the various offsets.
1286
189k
  if (const RecordDecl *RD = dyn_cast<RecordDecl>(D))
1287
188k
    if (ExternalASTSource *Source = Context.getExternalSource()) {
1288
6.98k
      UseExternalLayout = Source->layoutRecordType(
1289
6.98k
          RD, External.Size, External.Align, External.FieldOffsets,
1290
6.98k
          External.BaseOffsets, External.VirtualBaseOffsets);
1291
6.98k
1292
6.98k
      // Update based on external alignment.
1293
6.98k
      if (UseExternalLayout) {
1294
30
        if (External.Align > 0) {
1295
30
          Alignment = Context.toCharUnitsFromBits(External.Align);
1296
30
        } else {
1297
0
          // The external source didn't have alignment information; infer it.
1298
0
          InferAlignment = true;
1299
0
        }
1300
30
      }
1301
6.98k
    }
1302
189k
}
1303
1304
45.9k
void ItaniumRecordLayoutBuilder::Layout(const RecordDecl *D) {
1305
45.9k
  InitializeLayout(D);
1306
45.9k
  LayoutFields(D);
1307
45.9k
1308
45.9k
  // Finally, round the size of the total struct up to the alignment of the
1309
45.9k
  // struct itself.
1310
45.9k
  FinishLayout(D);
1311
45.9k
}
1312
1313
142k
void ItaniumRecordLayoutBuilder::Layout(const CXXRecordDecl *RD) {
1314
142k
  InitializeLayout(RD);
1315
142k
1316
142k
  // Lay out the vtable and the non-virtual bases.
1317
142k
  LayoutNonVirtualBases(RD);
1318
142k
1319
142k
  LayoutFields(RD);
1320
142k
1321
142k
  NonVirtualSize = Context.toCharUnitsFromBits(
1322
142k
      llvm::alignTo(getSizeInBits(), Context.getTargetInfo().getCharAlign()));
1323
142k
  NonVirtualAlignment = Alignment;
1324
142k
1325
142k
  // Lay out the virtual bases and add the primary virtual base offsets.
1326
142k
  LayoutVirtualBases(RD, RD);
1327
142k
1328
142k
  // Finally, round the size of the total struct up to the alignment
1329
142k
  // of the struct itself.
1330
142k
  FinishLayout(RD);
1331
142k
1332
#ifndef NDEBUG
1333
  // Check that we have base offsets for all bases.
1334
  for (const CXXBaseSpecifier &Base : RD->bases()) {
1335
    if (Base.isVirtual())
1336
      continue;
1337
1338
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
1339
1340
    assert(Bases.count(BaseDecl) && "Did not find base offset!");
1341
  }
1342
1343
  // And all virtual bases.
1344
  for (const CXXBaseSpecifier &Base : RD->vbases()) {
1345
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
1346
1347
    assert(VBases.count(BaseDecl) && "Did not find base offset!");
1348
  }
1349
#endif
1350
}
1351
1352
927
void ItaniumRecordLayoutBuilder::Layout(const ObjCInterfaceDecl *D) {
1353
927
  if (ObjCInterfaceDecl *SD = D->getSuperClass()) {
1354
307
    const ASTRecordLayout &SL = Context.getASTObjCInterfaceLayout(SD);
1355
307
1356
307
    UpdateAlignment(SL.getAlignment());
1357
307
1358
307
    // We start laying out ivars not at the end of the superclass
1359
307
    // structure, but at the next byte following the last field.
1360
307
    setDataSize(SL.getDataSize());
1361
307
    setSize(getDataSize());
1362
307
  }
1363
927
1364
927
  InitializeLayout(D);
1365
927
  // Layout each ivar sequentially.
1366
1.95k
  for (const ObjCIvarDecl *IVD = D->all_declared_ivar_begin(); IVD;
1367
1.03k
       IVD = IVD->getNextIvar())
1368
1.03k
    LayoutField(IVD, false);
1369
927
1370
927
  // Finally, round the size of the total struct up to the alignment of the
1371
927
  // struct itself.
1372
927
  FinishLayout(D);
1373
927
}
1374
1375
188k
void ItaniumRecordLayoutBuilder::LayoutFields(const RecordDecl *D) {
1376
188k
  // Layout each field, for now, just sequentially, respecting alignment.  In
1377
188k
  // the future, this will need to be tweakable by targets.
1378
188k
  bool InsertExtraPadding = D->mayInsertExtraPadding(/*EmitRemark=*/true);
1379
188k
  bool HasFlexibleArrayMember = D->hasFlexibleArrayMember();
1380
781k
  for (auto I = D->field_begin(), End = D->field_end(); I != End; 
++I593k
) {
1381
593k
    auto Next(I);
1382
593k
    ++Next;
1383
593k
    LayoutField(*I,
1384
593k
                InsertExtraPadding && 
(42
Next != End42
||
!HasFlexibleArrayMember16
));
1385
593k
  }
1386
188k
}
1387
1388
// Rounds the specified size to have it a multiple of the char size.
1389
static uint64_t
1390
roundUpSizeToCharAlignment(uint64_t Size,
1391
51
                           const ASTContext &Context) {
1392
51
  uint64_t CharAlignment = Context.getTargetInfo().getCharAlign();
1393
51
  return llvm::alignTo(Size, CharAlignment);
1394
51
}
1395
1396
void ItaniumRecordLayoutBuilder::LayoutWideBitField(uint64_t FieldSize,
1397
                                                    uint64_t TypeSize,
1398
                                                    bool FieldPacked,
1399
19
                                                    const FieldDecl *D) {
1400
19
  assert(Context.getLangOpts().CPlusPlus &&
1401
19
         "Can only have wide bit-fields in C++!");
1402
19
1403
19
  // Itanium C++ ABI 2.4:
1404
19
  //   If sizeof(T)*8 < n, let T' be the largest integral POD type with
1405
19
  //   sizeof(T')*8 <= n.
1406
19
1407
19
  QualType IntegralPODTypes[] = {
1408
19
    Context.UnsignedCharTy, Context.UnsignedShortTy, Context.UnsignedIntTy,
1409
19
    Context.UnsignedLongTy, Context.UnsignedLongLongTy
1410
19
  };
1411
19
1412
19
  QualType Type;
1413
76
  for (const QualType &QT : IntegralPODTypes) {
1414
76
    uint64_t Size = Context.getTypeSize(QT);
1415
76
1416
76
    if (Size > FieldSize)
1417
11
      break;
1418
65
1419
65
    Type = QT;
1420
65
  }
1421
19
  assert(!Type.isNull() && "Did not find a type!");
1422
19
1423
19
  CharUnits TypeAlign = Context.getTypeAlignInChars(Type);
1424
19
1425
19
  // We're not going to use any of the unfilled bits in the last byte.
1426
19
  UnfilledBitsInLastUnit = 0;
1427
19
  LastBitfieldTypeSize = 0;
1428
19
1429
19
  uint64_t FieldOffset;
1430
19
  uint64_t UnpaddedFieldOffset = getDataSizeInBits() - UnfilledBitsInLastUnit;
1431
19
1432
19
  if (IsUnion) {
1433
3
    uint64_t RoundedFieldSize = roundUpSizeToCharAlignment(FieldSize,
1434
3
                                                           Context);
1435
3
    setDataSize(std::max(getDataSizeInBits(), RoundedFieldSize));
1436
3
    FieldOffset = 0;
1437
16
  } else {
1438
16
    // The bitfield is allocated starting at the next offset aligned
1439
16
    // appropriately for T', with length n bits.
1440
16
    FieldOffset = llvm::alignTo(getDataSizeInBits(), Context.toBits(TypeAlign));
1441
16
1442
16
    uint64_t NewSizeInBits = FieldOffset + FieldSize;
1443
16
1444
16
    setDataSize(
1445
16
        llvm::alignTo(NewSizeInBits, Context.getTargetInfo().getCharAlign()));
1446
16
    UnfilledBitsInLastUnit = getDataSizeInBits() - NewSizeInBits;
1447
16
  }
1448
19
1449
19
  // Place this field at the current location.
1450
19
  FieldOffsets.push_back(FieldOffset);
1451
19
1452
19
  CheckFieldPadding(FieldOffset, UnpaddedFieldOffset, FieldOffset,
1453
19
                    Context.toBits(TypeAlign), FieldPacked, D);
1454
19
1455
19
  // Update the size.
1456
19
  setSize(std::max(getSizeInBits(), getDataSizeInBits()));
1457
19
1458
19
  // Remember max struct/class alignment.
1459
19
  UpdateAlignment(TypeAlign);
1460
19
}
1461
1462
10.0k
void ItaniumRecordLayoutBuilder::LayoutBitField(const FieldDecl *D) {
1463
10.0k
  bool FieldPacked = Packed || 
D->hasAttr<PackedAttr>()9.56k
;
1464
10.0k
  uint64_t FieldSize = D->getBitWidthValue(Context);
1465
10.0k
  TypeInfo FieldInfo = Context.getTypeInfo(D->getType());
1466
10.0k
  uint64_t TypeSize = FieldInfo.Width;
1467
10.0k
  unsigned FieldAlign = FieldInfo.Align;
1468
10.0k
1469
10.0k
  // UnfilledBitsInLastUnit is the difference between the end of the
1470
10.0k
  // last allocated bitfield (i.e. the first bit offset available for
1471
10.0k
  // bitfields) and the end of the current data size in bits (i.e. the
1472
10.0k
  // first bit offset available for non-bitfields).  The current data
1473
10.0k
  // size in bits is always a multiple of the char size; additionally,
1474
10.0k
  // for ms_struct records it's also a multiple of the
1475
10.0k
  // LastBitfieldTypeSize (if set).
1476
10.0k
1477
10.0k
  // The struct-layout algorithm is dictated by the platform ABI,
1478
10.0k
  // which in principle could use almost any rules it likes.  In
1479
10.0k
  // practice, UNIXy targets tend to inherit the algorithm described
1480
10.0k
  // in the System V generic ABI.  The basic bitfield layout rule in
1481
10.0k
  // System V is to place bitfields at the next available bit offset
1482
10.0k
  // where the entire bitfield would fit in an aligned storage unit of
1483
10.0k
  // the declared type; it's okay if an earlier or later non-bitfield
1484
10.0k
  // is allocated in the same storage unit.  However, some targets
1485
10.0k
  // (those that !useBitFieldTypeAlignment(), e.g. ARM APCS) don't
1486
10.0k
  // require this storage unit to be aligned, and therefore always put
1487
10.0k
  // the bitfield at the next available bit offset.
1488
10.0k
1489
10.0k
  // ms_struct basically requests a complete replacement of the
1490
10.0k
  // platform ABI's struct-layout algorithm, with the high-level goal
1491
10.0k
  // of duplicating MSVC's layout.  For non-bitfields, this follows
1492
10.0k
  // the standard algorithm.  The basic bitfield layout rule is to
1493
10.0k
  // allocate an entire unit of the bitfield's declared type
1494
10.0k
  // (e.g. 'unsigned long'), then parcel it up among successive
1495
10.0k
  // bitfields whose declared types have the same size, making a new
1496
10.0k
  // unit as soon as the last can no longer store the whole value.
1497
10.0k
  // Since it completely replaces the platform ABI's algorithm,
1498
10.0k
  // settings like !useBitFieldTypeAlignment() do not apply.
1499
10.0k
1500
10.0k
  // A zero-width bitfield forces the use of a new storage unit for
1501
10.0k
  // later bitfields.  In general, this occurs by rounding up the
1502
10.0k
  // current size of the struct as if the algorithm were about to
1503
10.0k
  // place a non-bitfield of the field's formal type.  Usually this
1504
10.0k
  // does not change the alignment of the struct itself, but it does
1505
10.0k
  // on some targets (those that useZeroLengthBitfieldAlignment(),
1506
10.0k
  // e.g. ARM).  In ms_struct layout, zero-width bitfields are
1507
10.0k
  // ignored unless they follow a non-zero-width bitfield.
1508
10.0k
1509
10.0k
  // A field alignment restriction (e.g. from #pragma pack) or
1510
10.0k
  // specification (e.g. from __attribute__((aligned))) changes the
1511
10.0k
  // formal alignment of the field.  For System V, this alters the
1512
10.0k
  // required alignment of the notional storage unit that must contain
1513
10.0k
  // the bitfield.  For ms_struct, this only affects the placement of
1514
10.0k
  // new storage units.  In both cases, the effect of #pragma pack is
1515
10.0k
  // ignored on zero-width bitfields.
1516
10.0k
1517
10.0k
  // On System V, a packed field (e.g. from #pragma pack or
1518
10.0k
  // __attribute__((packed))) always uses the next available bit
1519
10.0k
  // offset.
1520
10.0k
1521
10.0k
  // In an ms_struct struct, the alignment of a fundamental type is
1522
10.0k
  // always equal to its size.  This is necessary in order to mimic
1523
10.0k
  // the i386 alignment rules on targets which might not fully align
1524
10.0k
  // all types (e.g. Darwin PPC32, where alignof(long long) == 4).
1525
10.0k
1526
10.0k
  // First, some simple bookkeeping to perform for ms_struct structs.
1527
10.0k
  if (IsMsStruct) {
1528
305
    // The field alignment for integer types is always the size.
1529
305
    FieldAlign = TypeSize;
1530
305
1531
305
    // If the previous field was not a bitfield, or was a bitfield
1532
305
    // with a different storage unit size, or if this field doesn't fit into
1533
305
    // the current storage unit, we're done with that storage unit.
1534
305
    if (LastBitfieldTypeSize != TypeSize ||
1535
305
        
UnfilledBitsInLastUnit < FieldSize55
) {
1536
270
      // Also, ignore zero-length bitfields after non-bitfields.
1537
270
      if (!LastBitfieldTypeSize && 
!FieldSize171
)
1538
93
        FieldAlign = 1;
1539
270
1540
270
      UnfilledBitsInLastUnit = 0;
1541
270
      LastBitfieldTypeSize = 0;
1542
270
    }
1543
305
  }
1544
10.0k
1545
10.0k
  // If the field is wider than its declared type, it follows
1546
10.0k
  // different rules in all cases.
1547
10.0k
  if (FieldSize > TypeSize) {
1548
19
    LayoutWideBitField(FieldSize, TypeSize, FieldPacked, D);
1549
19
    return;
1550
19
  }
1551
10.0k
1552
10.0k
  // Compute the next available bit offset.
1553
10.0k
  uint64_t FieldOffset =
1554
10.0k
    IsUnion ? 
056
:
(getDataSizeInBits() - UnfilledBitsInLastUnit)9.94k
;
1555
10.0k
1556
10.0k
  // Handle targets that don't honor bitfield type alignment.
1557
10.0k
  if (!IsMsStruct && 
!Context.getTargetInfo().useBitFieldTypeAlignment()9.69k
) {
1558
235
    // Some such targets do honor it on zero-width bitfields.
1559
235
    if (FieldSize == 0 &&
1560
235
        
Context.getTargetInfo().useZeroLengthBitfieldAlignment()47
) {
1561
45
      // The alignment to round up to is the max of the field's natural
1562
45
      // alignment and a target-specific fixed value (sometimes zero).
1563
45
      unsigned ZeroLengthBitfieldBoundary =
1564
45
        Context.getTargetInfo().getZeroLengthBitfieldBoundary();
1565
45
      FieldAlign = std::max(FieldAlign, ZeroLengthBitfieldBoundary);
1566
45
1567
45
    // If that doesn't apply, just ignore the field alignment.
1568
190
    } else {
1569
190
      FieldAlign = 1;
1570
190
    }
1571
235
  }
1572
10.0k
1573
10.0k
  // Remember the alignment we would have used if the field were not packed.
1574
10.0k
  unsigned UnpackedFieldAlign = FieldAlign;
1575
10.0k
1576
10.0k
  // Ignore the field alignment if the field is packed unless it has zero-size.
1577
10.0k
  if (!IsMsStruct && 
FieldPacked9.69k
&&
FieldSize != 0482
)
1578
467
    FieldAlign = 1;
1579
10.0k
1580
10.0k
  // But, if there's an 'aligned' attribute on the field, honor that.
1581
10.0k
  unsigned ExplicitFieldAlign = D->getMaxAlignment();
1582
10.0k
  if (ExplicitFieldAlign) {
1583
156
    FieldAlign = std::max(FieldAlign, ExplicitFieldAlign);
1584
156
    UnpackedFieldAlign = std::max(UnpackedFieldAlign, ExplicitFieldAlign);
1585
156
  }
1586
10.0k
1587
10.0k
  // But, if there's a #pragma pack in play, that takes precedent over
1588
10.0k
  // even the 'aligned' attribute, for non-zero-width bitfields.
1589
10.0k
  unsigned MaxFieldAlignmentInBits = Context.toBits(MaxFieldAlignment);
1590
10.0k
  if (!MaxFieldAlignment.isZero() && 
FieldSize696
) {
1591
690
    UnpackedFieldAlign = std::min(UnpackedFieldAlign, MaxFieldAlignmentInBits);
1592
690
    if (FieldPacked)
1593
34
      FieldAlign = UnpackedFieldAlign;
1594
656
    else
1595
656
      FieldAlign = std::min(FieldAlign, MaxFieldAlignmentInBits);
1596
690
  }
1597
10.0k
1598
10.0k
  // But, ms_struct just ignores all of that in unions, even explicit
1599
10.0k
  // alignment attributes.
1600
10.0k
  if (IsMsStruct && 
IsUnion305
) {
1601
8
    FieldAlign = UnpackedFieldAlign = 1;
1602
8
  }
1603
10.0k
1604
10.0k
  // For purposes of diagnostics, we're going to simultaneously
1605
10.0k
  // compute the field offsets that we would have used if we weren't
1606
10.0k
  // adding any alignment padding or if the field weren't packed.
1607
10.0k
  uint64_t UnpaddedFieldOffset = FieldOffset;
1608
10.0k
  uint64_t UnpackedFieldOffset = FieldOffset;
1609
10.0k
1610
10.0k
  // Check if we need to add padding to fit the bitfield within an
1611
10.0k
  // allocation unit with the right size and alignment.  The rules are
1612
10.0k
  // somewhat different here for ms_struct structs.
1613
10.0k
  if (IsMsStruct) {
1614
305
    // If it's not a zero-width bitfield, and we can fit the bitfield
1615
305
    // into the active storage unit (and we haven't already decided to
1616
305
    // start a new storage unit), just do so, regardless of any other
1617
305
    // other consideration.  Otherwise, round up to the right alignment.
1618
305
    if (FieldSize == 0 || 
FieldSize > UnfilledBitsInLastUnit197
) {
1619
273
      FieldOffset = llvm::alignTo(FieldOffset, FieldAlign);
1620
273
      UnpackedFieldOffset =
1621
273
          llvm::alignTo(UnpackedFieldOffset, UnpackedFieldAlign);
1622
273
      UnfilledBitsInLastUnit = 0;
1623
273
    }
1624
305
1625
9.69k
  } else {
1626
9.69k
    // #pragma pack, with any value, suppresses the insertion of padding.
1627
9.69k
    bool AllowPadding = MaxFieldAlignment.isZero();
1628
9.69k
1629
9.69k
    // Compute the real offset.
1630
9.69k
    if (FieldSize == 0 ||
1631
9.69k
        
(9.34k
AllowPadding9.34k
&&
1632
9.34k
         
(FieldOffset & (FieldAlign-1)) + FieldSize > TypeSize8.75k
)) {
1633
408
      FieldOffset = llvm::alignTo(FieldOffset, FieldAlign);
1634
9.28k
    } else if (ExplicitFieldAlign &&
1635
9.28k
               
(127
MaxFieldAlignmentInBits == 0127
||
1636
127
                
ExplicitFieldAlign <= MaxFieldAlignmentInBits24
) &&
1637
9.28k
               
Context.getTargetInfo().useExplicitBitFieldAlignment()119
) {
1638
99
      // TODO: figure it out what needs to be done on targets that don't honor
1639
99
      // bit-field type alignment like ARM APCS ABI.
1640
99
      FieldOffset = llvm::alignTo(FieldOffset, ExplicitFieldAlign);
1641
99
    }
1642
9.69k
1643
9.69k
    // Repeat the computation for diagnostic purposes.
1644
9.69k
    if (FieldSize == 0 ||
1645
9.69k
        
(9.34k
AllowPadding9.34k
&&
1646
9.34k
         
(UnpackedFieldOffset & (UnpackedFieldAlign-1)) + FieldSize > TypeSize8.75k
))
1647
454
      UnpackedFieldOffset =
1648
454
          llvm::alignTo(UnpackedFieldOffset, UnpackedFieldAlign);
1649
9.24k
    else if (ExplicitFieldAlign &&
1650
9.24k
             
(122
MaxFieldAlignmentInBits == 0122
||
1651
122
              
ExplicitFieldAlign <= MaxFieldAlignmentInBits24
) &&
1652
9.24k
             
Context.getTargetInfo().useExplicitBitFieldAlignment()114
)
1653
95
      UnpackedFieldOffset =
1654
95
          llvm::alignTo(UnpackedFieldOffset, ExplicitFieldAlign);
1655
9.69k
  }
1656
10.0k
1657
10.0k
  // If we're using external layout, give the external layout a chance
1658
10.0k
  // to override this information.
1659
10.0k
  if (UseExternalLayout)
1660
25
    FieldOffset = updateExternalFieldOffset(D, FieldOffset);
1661
10.0k
1662
10.0k
  // Okay, place the bitfield at the calculated offset.
1663
10.0k
  FieldOffsets.push_back(FieldOffset);
1664
10.0k
1665
10.0k
  // Bookkeeping:
1666
10.0k
1667
10.0k
  // Anonymous members don't affect the overall record alignment,
1668
10.0k
  // except on targets where they do.
1669
10.0k
  if (!IsMsStruct &&
1670
10.0k
      
!Context.getTargetInfo().useZeroLengthBitfieldAlignment()9.69k
&&
1671
10.0k
      
!D->getIdentifier()4.08k
)
1672
473
    FieldAlign = UnpackedFieldAlign = 1;
1673
10.0k
1674
10.0k
  // Diagnose differences in layout due to padding or packing.
1675
10.0k
  if (!UseExternalLayout)
1676
9.97k
    CheckFieldPadding(FieldOffset, UnpaddedFieldOffset, UnpackedFieldOffset,
1677
9.97k
                      UnpackedFieldAlign, FieldPacked, D);
1678
10.0k
1679
10.0k
  // Update DataSize to include the last byte containing (part of) the bitfield.
1680
10.0k
1681
10.0k
  // For unions, this is just a max operation, as usual.
1682
10.0k
  if (IsUnion) {
1683
56
    // For ms_struct, allocate the entire storage unit --- unless this
1684
56
    // is a zero-width bitfield, in which case just use a size of 1.
1685
56
    uint64_t RoundedFieldSize;
1686
56
    if (IsMsStruct) {
1687
8
      RoundedFieldSize =
1688
8
        (FieldSize ? 
TypeSize5
:
Context.getTargetInfo().getCharWidth()3
);
1689
8
1690
8
    // Otherwise, allocate just the number of bytes required to store
1691
8
    // the bitfield.
1692
48
    } else {
1693
48
      RoundedFieldSize = roundUpSizeToCharAlignment(FieldSize, Context);
1694
48
    }
1695
56
    setDataSize(std::max(getDataSizeInBits(), RoundedFieldSize));
1696
56
1697
56
  // For non-zero-width bitfields in ms_struct structs, allocate a new
1698
56
  // storage unit if necessary.
1699
9.94k
  } else if (IsMsStruct && 
FieldSize297
) {
1700
192
    // We should have cleared UnfilledBitsInLastUnit in every case
1701
192
    // where we changed storage units.
1702
192
    if (!UnfilledBitsInLastUnit) {
1703
160
      setDataSize(FieldOffset + TypeSize);
1704
160
      UnfilledBitsInLastUnit = TypeSize;
1705
160
    }
1706
192
    UnfilledBitsInLastUnit -= FieldSize;
1707
192
    LastBitfieldTypeSize = TypeSize;
1708
192
1709
192
  // Otherwise, bump the data size up to include the bitfield,
1710
192
  // including padding up to char alignment, and then remember how
1711
192
  // bits we didn't use.
1712
9.75k
  } else {
1713
9.75k
    uint64_t NewSizeInBits = FieldOffset + FieldSize;
1714
9.75k
    uint64_t CharAlignment = Context.getTargetInfo().getCharAlign();
1715
9.75k
    setDataSize(llvm::alignTo(NewSizeInBits, CharAlignment));
1716
9.75k
    UnfilledBitsInLastUnit = getDataSizeInBits() - NewSizeInBits;
1717
9.75k
1718
9.75k
    // The only time we can get here for an ms_struct is if this is a
1719
9.75k
    // zero-width bitfield, which doesn't count as anything for the
1720
9.75k
    // purposes of unfilled bits.
1721
9.75k
    LastBitfieldTypeSize = 0;
1722
9.75k
  }
1723
10.0k
1724
10.0k
  // Update the size.
1725
10.0k
  setSize(std::max(getSizeInBits(), getDataSizeInBits()));
1726
10.0k
1727
10.0k
  // Remember max struct/class alignment.
1728
10.0k
  UnadjustedAlignment =
1729
10.0k
      std::max(UnadjustedAlignment, Context.toCharUnitsFromBits(FieldAlign));
1730
10.0k
  UpdateAlignment(Context.toCharUnitsFromBits(FieldAlign),
1731
10.0k
                  Context.toCharUnitsFromBits(UnpackedFieldAlign));
1732
10.0k
}
1733
1734
void ItaniumRecordLayoutBuilder::LayoutField(const FieldDecl *D,
1735
594k
                                             bool InsertExtraPadding) {
1736
594k
  if (D->isBitField()) {
1737
10.0k
    LayoutBitField(D);
1738
10.0k
    return;
1739
10.0k
  }
1740
584k
1741
584k
  uint64_t UnpaddedFieldOffset = getDataSizeInBits() - UnfilledBitsInLastUnit;
1742
584k
1743
584k
  // Reset the unfilled bits.
1744
584k
  UnfilledBitsInLastUnit = 0;
1745
584k
  LastBitfieldTypeSize = 0;
1746
584k
1747
584k
  auto *FieldClass = D->getType()->getAsCXXRecordDecl();
1748
584k
  bool PotentiallyOverlapping = D->hasAttr<NoUniqueAddressAttr>() && 
FieldClass56
;
1749
584k
  bool IsOverlappingEmptyField = PotentiallyOverlapping && 
FieldClass->isEmpty()56
;
1750
584k
  bool FieldPacked = Packed || 
D->hasAttr<PackedAttr>()577k
;
1751
584k
1752
584k
  CharUnits FieldOffset = (IsUnion || 
IsOverlappingEmptyField541k
)
1753
584k
                              ? 
CharUnits::Zero()43.0k
1754
584k
                              : 
getDataSize()541k
;
1755
584k
  CharUnits FieldSize;
1756
584k
  CharUnits FieldAlign;
1757
584k
  // The amount of this class's dsize occupied by the field.
1758
584k
  // This is equal to FieldSize unless we're permitted to pack
1759
584k
  // into the field's tail padding.
1760
584k
  CharUnits EffectiveFieldSize;
1761
584k
1762
584k
  if (D->getType()->isIncompleteArrayType()) {
1763
94
    // This is a flexible array member; we can't directly
1764
94
    // query getTypeInfo about these, so we figure it out here.
1765
94
    // Flexible array members don't have any size, but they
1766
94
    // have to be aligned appropriately for their element type.
1767
94
    EffectiveFieldSize = FieldSize = CharUnits::Zero();
1768
94
    const ArrayType* ATy = Context.getAsArrayType(D->getType());
1769
94
    FieldAlign = Context.getTypeAlignInChars(ATy->getElementType());
1770
584k
  } else if (const ReferenceType *RT = D->getType()->getAs<ReferenceType>()) {
1771
4.64k
    unsigned AS = Context.getTargetAddressSpace(RT->getPointeeType());
1772
4.64k
    EffectiveFieldSize = FieldSize =
1773
4.64k
      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(AS));
1774
4.64k
    FieldAlign =
1775
4.64k
      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerAlign(AS));
1776
580k
  } else {
1777
580k
    std::pair<CharUnits, CharUnits> FieldInfo =
1778
580k
      Context.getTypeInfoInChars(D->getType());
1779
580k
    EffectiveFieldSize = FieldSize = FieldInfo.first;
1780
580k
    FieldAlign = FieldInfo.second;
1781
580k
1782
580k
    // A potentially-overlapping field occupies its dsize or nvsize, whichever
1783
580k
    // is larger.
1784
580k
    if (PotentiallyOverlapping) {
1785
56
      const ASTRecordLayout &Layout = Context.getASTRecordLayout(FieldClass);
1786
56
      EffectiveFieldSize =
1787
56
          std::max(Layout.getNonVirtualSize(), Layout.getDataSize());
1788
56
    }
1789
580k
1790
580k
    if (IsMsStruct) {
1791
170
      // If MS bitfield layout is required, figure out what type is being
1792
170
      // laid out and align the field to the width of that type.
1793
170
1794
170
      // Resolve all typedefs down to their base type and round up the field
1795
170
      // alignment if necessary.
1796
170
      QualType T = Context.getBaseElementType(D->getType());
1797
170
      if (const BuiltinType *BTy = T->getAs<BuiltinType>()) {
1798
163
        CharUnits TypeSize = Context.getTypeSizeInChars(BTy);
1799
163
1800
163
        if (!llvm::isPowerOf2_64(TypeSize.getQuantity())) {
1801
4
          assert(
1802
4
              !Context.getTargetInfo().getTriple().isWindowsMSVCEnvironment() &&
1803
4
              "Non PowerOf2 size in MSVC mode");
1804
4
          // Base types with sizes that aren't a power of two don't work
1805
4
          // with the layout rules for MS structs. This isn't an issue in
1806
4
          // MSVC itself since there are no such base data types there.
1807
4
          // On e.g. x86_32 mingw and linux, long double is 12 bytes though.
1808
4
          // Any structs involving that data type obviously can't be ABI
1809
4
          // compatible with MSVC regardless of how it is laid out.
1810
4
1811
4
          // Since ms_struct can be mass enabled (via a pragma or via the
1812
4
          // -mms-bitfields command line parameter), this can trigger for
1813
4
          // structs that don't actually need MSVC compatibility, so we
1814
4
          // need to be able to sidestep the ms_struct layout for these types.
1815
4
1816
4
          // Since the combination of -mms-bitfields together with structs
1817
4
          // like max_align_t (which contains a long double) for mingw is
1818
4
          // quite comon (and GCC handles it silently), just handle it
1819
4
          // silently there. For other targets that have ms_struct enabled
1820
4
          // (most probably via a pragma or attribute), trigger a diagnostic
1821
4
          // that defaults to an error.
1822
4
          if (!Context.getTargetInfo().getTriple().isWindowsGNUEnvironment())
1823
2
            Diag(D->getLocation(), diag::warn_npot_ms_struct);
1824
4
        }
1825
163
        if (TypeSize > FieldAlign &&
1826
163
            
llvm::isPowerOf2_64(TypeSize.getQuantity())12
)
1827
8
          FieldAlign = TypeSize;
1828
163
      }
1829
170
    }
1830
580k
  }
1831
584k
1832
584k
  // The align if the field is not packed. This is to check if the attribute
1833
584k
  // was unnecessary (-Wpacked).
1834
584k
  CharUnits UnpackedFieldAlign = FieldAlign;
1835
584k
  CharUnits UnpackedFieldOffset = FieldOffset;
1836
584k
1837
584k
  if (FieldPacked)
1838
7.77k
    FieldAlign = CharUnits::One();
1839
584k
  CharUnits MaxAlignmentInChars =
1840
584k
    Context.toCharUnitsFromBits(D->getMaxAlignment());
1841
584k
  FieldAlign = std::max(FieldAlign, MaxAlignmentInChars);
1842
584k
  UnpackedFieldAlign = std::max(UnpackedFieldAlign, MaxAlignmentInChars);
1843
584k
1844
584k
  // The maximum field alignment overrides the aligned attribute.
1845
584k
  if (!MaxFieldAlignment.isZero()) {
1846
3.92k
    FieldAlign = std::min(FieldAlign, MaxFieldAlignment);
1847
3.92k
    UnpackedFieldAlign = std::min(UnpackedFieldAlign, MaxFieldAlignment);
1848
3.92k
  }
1849
584k
1850
584k
  // Round up the current record size to the field's alignment boundary.
1851
584k
  FieldOffset = FieldOffset.alignTo(FieldAlign);
1852
584k
  UnpackedFieldOffset = UnpackedFieldOffset.alignTo(UnpackedFieldAlign);
1853
584k
1854
584k
  if (UseExternalLayout) {
1855
49
    FieldOffset = Context.toCharUnitsFromBits(
1856
49
                    updateExternalFieldOffset(D, Context.toBits(FieldOffset)));
1857
49
1858
49
    if (!IsUnion && 
EmptySubobjects44
) {
1859
17
      // Record the fact that we're placing a field at this offset.
1860
17
      bool Allowed = EmptySubobjects->CanPlaceFieldAtOffset(D, FieldOffset);
1861
17
      (void)Allowed;
1862
17
      assert(Allowed && "Externally-placed field cannot be placed here");
1863
17
    }
1864
584k
  } else {
1865
584k
    if (!IsUnion && 
EmptySubobjects541k
) {
1866
362k
      // Check if we can place the field at this offset.
1867
362k
      while (!EmptySubobjects->CanPlaceFieldAtOffset(D, FieldOffset)) {
1868
46
        // We couldn't place the field at the offset. Try again at a new offset.
1869
46
        // We try offset 0 (for an empty field) and then dsize(C) onwards.
1870
46
        if (FieldOffset == CharUnits::Zero() &&
1871
46
            
getDataSize() != CharUnits::Zero()39
)
1872
8
          FieldOffset = getDataSize().alignTo(FieldAlign);
1873
38
        else
1874
38
          FieldOffset += FieldAlign;
1875
46
      }
1876
362k
    }
1877
584k
  }
1878
584k
1879
584k
  // Place this field at the current location.
1880
584k
  FieldOffsets.push_back(Context.toBits(FieldOffset));
1881
584k
1882
584k
  if (!UseExternalLayout)
1883
584k
    CheckFieldPadding(Context.toBits(FieldOffset), UnpaddedFieldOffset,
1884
584k
                      Context.toBits(UnpackedFieldOffset),
1885
584k
                      Context.toBits(UnpackedFieldAlign), FieldPacked, D);
1886
584k
1887
584k
  if (InsertExtraPadding) {
1888
36
    CharUnits ASanAlignment = CharUnits::fromQuantity(8);
1889
36
    CharUnits ExtraSizeForAsan = ASanAlignment;
1890
36
    if (FieldSize % ASanAlignment)
1891
30
      ExtraSizeForAsan +=
1892
30
          ASanAlignment - CharUnits::fromQuantity(FieldSize % ASanAlignment);
1893
36
    EffectiveFieldSize = FieldSize = FieldSize + ExtraSizeForAsan;
1894
36
  }
1895
584k
1896
584k
  // Reserve space for this field.
1897
584k
  if (!IsOverlappingEmptyField) {
1898
584k
    uint64_t EffectiveFieldSizeInBits = Context.toBits(EffectiveFieldSize);
1899
584k
    if (IsUnion)
1900
42.9k
      setDataSize(std::max(getDataSizeInBits(), EffectiveFieldSizeInBits));
1901
541k
    else
1902
541k
      setDataSize(FieldOffset + EffectiveFieldSize);
1903
584k
1904
584k
    PaddedFieldSize = std::max(PaddedFieldSize, FieldOffset + FieldSize);
1905
584k
    setSize(std::max(getSizeInBits(), getDataSizeInBits()));
1906
584k
  } else {
1907
42
    setSize(std::max(getSizeInBits(),
1908
42
                     (uint64_t)Context.toBits(FieldOffset + FieldSize)));
1909
42
  }
1910
584k
1911
584k
  // Remember max struct/class alignment.
1912
584k
  UnadjustedAlignment = std::max(UnadjustedAlignment, FieldAlign);
1913
584k
  UpdateAlignment(FieldAlign, UnpackedFieldAlign);
1914
584k
}
1915
1916
189k
void ItaniumRecordLayoutBuilder::FinishLayout(const NamedDecl *D) {
1917
189k
  // In C++, records cannot be of size 0.
1918
189k
  if (Context.getLangOpts().CPlusPlus && 
getSizeInBits() == 0142k
) {
1919
16.5k
    if (const CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(D)) {
1920
16.5k
      // Compatibility with gcc requires a class (pod or non-pod)
1921
16.5k
      // which is not empty but of size 0; such as having fields of
1922
16.5k
      // array of zero-length, remains of Size 0
1923
16.5k
      if (RD->isEmpty())
1924
15.4k
        setSize(CharUnits::One());
1925
16.5k
    }
1926
56
    else
1927
56
      setSize(CharUnits::One());
1928
16.5k
  }
1929
189k
1930
189k
  // If we have any remaining field tail padding, include that in the overall
1931
189k
  // size.
1932
189k
  setSize(std::max(getSizeInBits(), (uint64_t)Context.toBits(PaddedFieldSize)));
1933
189k
1934
189k
  // Finally, round the size of the record up to the alignment of the
1935
189k
  // record itself.
1936
189k
  uint64_t UnpaddedSize = getSizeInBits() - UnfilledBitsInLastUnit;
1937
189k
  uint64_t UnpackedSizeInBits =
1938
189k
      llvm::alignTo(getSizeInBits(), Context.toBits(UnpackedAlignment));
1939
189k
  uint64_t RoundedSize =
1940
189k
      llvm::alignTo(getSizeInBits(), Context.toBits(Alignment));
1941
189k
1942
189k
  if (UseExternalLayout) {
1943
30
    // If we're inferring alignment, and the external size is smaller than
1944
30
    // our size after we've rounded up to alignment, conservatively set the
1945
30
    // alignment to 1.
1946
30
    if (InferAlignment && 
External.Size < RoundedSize0
) {
1947
0
      Alignment = CharUnits::One();
1948
0
      InferAlignment = false;
1949
0
    }
1950
30
    setSize(External.Size);
1951
30
    return;
1952
30
  }
1953
189k
1954
189k
  // Set the size to the final size.
1955
189k
  setSize(RoundedSize);
1956
189k
1957
189k
  unsigned CharBitNum = Context.getTargetInfo().getCharWidth();
1958
189k
  if (const RecordDecl *RD = dyn_cast<RecordDecl>(D)) {
1959
188k
    // Warn if padding was introduced to the struct/class/union.
1960
188k
    if (getSizeInBits() > UnpaddedSize) {
1961
17.0k
      unsigned PadSize = getSizeInBits() - UnpaddedSize;
1962
17.0k
      bool InBits = true;
1963
17.0k
      if (PadSize % CharBitNum == 0) {
1964
16.3k
        PadSize = PadSize / CharBitNum;
1965
16.3k
        InBits = false;
1966
16.3k
      }
1967
17.0k
      Diag(RD->getLocation(), diag::warn_padded_struct_size)
1968
17.0k
          << Context.getTypeDeclType(RD)
1969
17.0k
          << PadSize
1970
17.0k
          << (InBits ? 
1740
:
016.3k
); // (byte|bit)
1971
17.0k
    }
1972
188k
1973
188k
    // Warn if we packed it unnecessarily, when the unpacked alignment is not
1974
188k
    // greater than the one after packing, the size in bits doesn't change and
1975
188k
    // the offset of each field is identical.
1976
188k
    if (Packed && 
UnpackedAlignment <= Alignment1.92k
&&
1977
188k
        
UnpackedSizeInBits == getSizeInBits()170
&&
!HasPackedField170
)
1978
154
      Diag(D->getLocation(), diag::warn_unnecessary_packed)
1979
154
          << Context.getTypeDeclType(RD);
1980
188k
  }
1981
189k
}
1982
1983
void ItaniumRecordLayoutBuilder::UpdateAlignment(
1984
636k
    CharUnits NewAlignment, CharUnits UnpackedNewAlignment) {
1985
636k
  // The alignment is not modified when using 'mac68k' alignment or when
1986
636k
  // we have an externally-supplied layout that also provides overall alignment.
1987
636k
  if (IsMac68kAlign || 
(636k
UseExternalLayout636k
&&
!InferAlignment86
))
1988
110
    return;
1989
636k
1990
636k
  if (NewAlignment > Alignment) {
1991
169k
    assert(llvm::isPowerOf2_64(NewAlignment.getQuantity()) &&
1992
169k
           "Alignment not a power of 2");
1993
169k
    Alignment = NewAlignment;
1994
169k
  }
1995
636k
1996
636k
  if (UnpackedNewAlignment > UnpackedAlignment) {
1997
170k
    assert(llvm::isPowerOf2_64(UnpackedNewAlignment.getQuantity()) &&
1998
170k
           "Alignment not a power of 2");
1999
170k
    UnpackedAlignment = UnpackedNewAlignment;
2000
170k
  }
2001
636k
}
2002
2003
uint64_t
2004
ItaniumRecordLayoutBuilder::updateExternalFieldOffset(const FieldDecl *Field,
2005
74
                                                      uint64_t ComputedOffset) {
2006
74
  uint64_t ExternalFieldOffset = External.getExternalFieldOffset(Field);
2007
74
2008
74
  if (InferAlignment && 
ExternalFieldOffset < ComputedOffset0
) {
2009
0
    // The externally-supplied field offset is before the field offset we
2010
0
    // computed. Assume that the structure is packed.
2011
0
    Alignment = CharUnits::One();
2012
0
    InferAlignment = false;
2013
0
  }
2014
74
2015
74
  // Use the externally-supplied field offset.
2016
74
  return ExternalFieldOffset;
2017
74
}
2018
2019
/// Get diagnostic %select index for tag kind for
2020
/// field padding diagnostic message.
2021
/// WARNING: Indexes apply to particular diagnostics only!
2022
///
2023
/// \returns diagnostic %select index.
2024
36.4k
static unsigned getPaddingDiagFromTagKind(TagTypeKind Tag) {
2025
36.4k
  switch (Tag) {
2026
36.4k
  
case TTK_Struct: return 028.0k
;
2027
36.4k
  
case TTK_Interface: return 10
;
2028
36.4k
  
case TTK_Class: return 28.48k
;
2029
36.4k
  
default: 0
llvm_unreachable0
("Invalid tag kind for field padding diagnostic!");
2030
36.4k
  }
2031
36.4k
}
2032
2033
void ItaniumRecordLayoutBuilder::CheckFieldPadding(
2034
    uint64_t Offset, uint64_t UnpaddedOffset, uint64_t UnpackedOffset,
2035
594k
    unsigned UnpackedAlign, bool isPacked, const FieldDecl *D) {
2036
594k
  // We let objc ivars without warning, objc interfaces generally are not used
2037
594k
  // for padding tricks.
2038
594k
  if (isa<ObjCIvarDecl>(D))
2039
1.03k
    return;
2040
593k
2041
593k
  // Don't warn about structs created without a SourceLocation.  This can
2042
593k
  // be done by clients of the AST, such as codegen.
2043
593k
  if (D->getLocation().isInvalid())
2044
40.0k
    return;
2045
553k
2046
553k
  unsigned CharBitNum = Context.getTargetInfo().getCharWidth();
2047
553k
2048
553k
  // Warn if padding was introduced to the struct/class.
2049
553k
  if (!IsUnion && 
Offset > UnpaddedOffset510k
) {
2050
36.4k
    unsigned PadSize = Offset - UnpaddedOffset;
2051
36.4k
    bool InBits = true;
2052
36.4k
    if (PadSize % CharBitNum == 0) {
2053
35.9k
      PadSize = PadSize / CharBitNum;
2054
35.9k
      InBits = false;
2055
35.9k
    }
2056
36.4k
    if (D->getIdentifier())
2057
36.1k
      Diag(D->getLocation(), diag::warn_padded_struct_field)
2058
36.1k
          << getPaddingDiagFromTagKind(D->getParent()->getTagKind())
2059
36.1k
          << Context.getTypeDeclType(D->getParent())
2060
36.1k
          << PadSize
2061
36.1k
          << (InBits ? 
1495
:
035.6k
) // (byte|bit)
2062
36.1k
          << D->getIdentifier();
2063
325
    else
2064
325
      Diag(D->getLocation(), diag::warn_padded_struct_anon_field)
2065
325
          << getPaddingDiagFromTagKind(D->getParent()->getTagKind())
2066
325
          << Context.getTypeDeclType(D->getParent())
2067
325
          << PadSize
2068
325
          << (InBits ? 
185
:
0240
); // (byte|bit)
2069
36.4k
 }
2070
553k
 if (isPacked && 
Offset != UnpackedOffset1.46k
) {
2071
191
   HasPackedField = true;
2072
191
 }
2073
553k
}
2074
2075
static const CXXMethodDecl *computeKeyFunction(ASTContext &Context,
2076
62.9k
                                               const CXXRecordDecl *RD) {
2077
62.9k
  // If a class isn't polymorphic it doesn't have a key function.
2078
62.9k
  if (!RD->isPolymorphic())
2079
26.6k
    return nullptr;
2080
36.3k
2081
36.3k
  // A class that is not externally visible doesn't have a key function. (Or
2082
36.3k
  // at least, there's no point to assigning a key function to such a class;
2083
36.3k
  // this doesn't affect the ABI.)
2084
36.3k
  if (!RD->isExternallyVisible())
2085
299
    return nullptr;
2086
36.0k
2087
36.0k
  // Template instantiations don't have key functions per Itanium C++ ABI 5.2.6.
2088
36.0k
  // Same behavior as GCC.
2089
36.0k
  TemplateSpecializationKind TSK = RD->getTemplateSpecializationKind();
2090
36.0k
  if (TSK == TSK_ImplicitInstantiation ||
2091
36.0k
      
TSK == TSK_ExplicitInstantiationDeclaration24.7k
||
2092
36.0k
      
TSK == TSK_ExplicitInstantiationDefinition23.8k
)
2093
12.4k
    return nullptr;
2094
23.6k
2095
23.6k
  bool allowInlineFunctions =
2096
23.6k
    Context.getTargetInfo().getCXXABI().canKeyFunctionBeInline();
2097
23.6k
2098
131k
  for (const CXXMethodDecl *MD : RD->methods()) {
2099
131k
    if (!MD->isVirtual())
2100
89.8k
      continue;
2101
41.9k
2102
41.9k
    if (MD->isPure())
2103
7.88k
      continue;
2104
34.0k
2105
34.0k
    // Ignore implicit member functions, they are always marked as inline, but
2106
34.0k
    // they don't have a body until they're defined.
2107
34.0k
    if (MD->isImplicit())
2108
3.28k
      continue;
2109
30.7k
2110
30.7k
    if (MD->isInlineSpecified())
2111
57
      continue;
2112
30.7k
2113
30.7k
    if (MD->hasInlineBody())
2114
15.6k
      continue;
2115
15.0k
2116
15.0k
    // Ignore inline deleted or defaulted functions.
2117
15.0k
    if (!MD->isUserProvided())
2118
9
      continue;
2119
15.0k
2120
15.0k
    // In certain ABIs, ignore functions with out-of-line inline definitions.
2121
15.0k
    if (!allowInlineFunctions) {
2122
11.2k
      const FunctionDecl *Def;
2123
11.2k
      if (MD->hasBody(Def) && 
Def->isInlineSpecified()4.01k
)
2124
98
        continue;
2125
14.9k
    }
2126
14.9k
2127
14.9k
    if (Context.getLangOpts().CUDA) {
2128
7
      // While compiler may see key method in this TU, during CUDA
2129
7
      // compilation we should ignore methods that are not accessible
2130
7
      // on this side of compilation.
2131
7
      if (Context.getLangOpts().CUDAIsDevice) {
2132
4
        // In device mode ignore methods without __device__ attribute.
2133
4
        if (!MD->hasAttr<CUDADeviceAttr>())
2134
2
          continue;
2135
3
      } else {
2136
3
        // In host mode ignore __device__-only methods.
2137
3
        if (!MD->hasAttr<CUDAHostAttr>() && MD->hasAttr<CUDADeviceAttr>())
2138
1
          continue;
2139
14.9k
      }
2140
7
    }
2141
14.9k
2142
14.9k
    // If the key function is dllimport but the class isn't, then the class has
2143
14.9k
    // no key function. The DLL that exports the key function won't export the
2144
14.9k
    // vtable in this case.
2145
14.9k
    if (MD->hasAttr<DLLImportAttr>() && 
!RD->hasAttr<DLLImportAttr>()54
)
2146
47
      return nullptr;
2147
14.9k
2148
14.9k
    // We found it.
2149
14.9k
    return MD;
2150
14.9k
  }
2151
23.6k
2152
23.6k
  
return nullptr8.69k
;
2153
23.6k
}
2154
2155
DiagnosticBuilder ItaniumRecordLayoutBuilder::Diag(SourceLocation Loc,
2156
53.7k
                                                   unsigned DiagID) {
2157
53.7k
  return Context.getDiagnostics().Report(Loc, DiagID);
2158
53.7k
}
2159
2160
/// Does the target C++ ABI require us to skip over the tail-padding
2161
/// of the given class (considering it as a base class) when allocating
2162
/// objects?
2163
142k
static bool mustSkipTailPadding(TargetCXXABI ABI, const CXXRecordDecl *RD) {
2164
142k
  switch (ABI.getTailPaddingUseRules()) {
2165
142k
  case TargetCXXABI::AlwaysUseTailPadding:
2166
0
    return false;
2167
142k
2168
142k
  case TargetCXXABI::UseTailPaddingUnlessPOD03:
2169
78.4k
    // FIXME: To the extent that this is meant to cover the Itanium ABI
2170
78.4k
    // rules, we should implement the restrictions about over-sized
2171
78.4k
    // bitfields:
2172
78.4k
    //
2173
78.4k
    // http://itanium-cxx-abi.github.io/cxx-abi/abi.html#POD :
2174
78.4k
    //   In general, a type is considered a POD for the purposes of
2175
78.4k
    //   layout if it is a POD type (in the sense of ISO C++
2176
78.4k
    //   [basic.types]). However, a POD-struct or POD-union (in the
2177
78.4k
    //   sense of ISO C++ [class]) with a bitfield member whose
2178
78.4k
    //   declared width is wider than the declared type of the
2179
78.4k
    //   bitfield is not a POD for the purpose of layout.  Similarly,
2180
78.4k
    //   an array type is not a POD for the purpose of layout if the
2181
78.4k
    //   element type of the array is not a POD for the purpose of
2182
78.4k
    //   layout.
2183
78.4k
    //
2184
78.4k
    //   Where references to the ISO C++ are made in this paragraph,
2185
78.4k
    //   the Technical Corrigendum 1 version of the standard is
2186
78.4k
    //   intended.
2187
78.4k
    return RD->isPOD();
2188
142k
2189
142k
  case TargetCXXABI::UseTailPaddingUnlessPOD11:
2190
63.6k
    // This is equivalent to RD->getTypeForDecl().isCXX11PODType(),
2191
63.6k
    // but with a lot of abstraction penalty stripped off.  This does
2192
63.6k
    // assume that these properties are set correctly even in C++98
2193
63.6k
    // mode; fortunately, that is true because we want to assign
2194
63.6k
    // consistently semantics to the type-traits intrinsics (or at
2195
63.6k
    // least as many of them as possible).
2196
63.6k
    return RD->isTrivial() && 
RD->isCXX11StandardLayout()28.8k
;
2197
0
  }
2198
0
2199
0
  llvm_unreachable("bad tail-padding use kind");
2200
0
}
2201
2202
194k
static bool isMsLayout(const ASTContext &Context) {
2203
194k
  return Context.getTargetInfo().getCXXABI().isMicrosoft();
2204
194k
}
2205
2206
// This section contains an implementation of struct layout that is, up to the
2207
// included tests, compatible with cl.exe (2013).  The layout produced is
2208
// significantly different than those produced by the Itanium ABI.  Here we note
2209
// the most important differences.
2210
//
2211
// * The alignment of bitfields in unions is ignored when computing the
2212
//   alignment of the union.
2213
// * The existence of zero-width bitfield that occurs after anything other than
2214
//   a non-zero length bitfield is ignored.
2215
// * There is no explicit primary base for the purposes of layout.  All bases
2216
//   with vfptrs are laid out first, followed by all bases without vfptrs.
2217
// * The Itanium equivalent vtable pointers are split into a vfptr (virtual
2218
//   function pointer) and a vbptr (virtual base pointer).  They can each be
2219
//   shared with a, non-virtual bases. These bases need not be the same.  vfptrs
2220
//   always occur at offset 0.  vbptrs can occur at an arbitrary offset and are
2221
//   placed after the lexicographically last non-virtual base.  This placement
2222
//   is always before fields but can be in the middle of the non-virtual bases
2223
//   due to the two-pass layout scheme for non-virtual-bases.
2224
// * Virtual bases sometimes require a 'vtordisp' field that is laid out before
2225
//   the virtual base and is used in conjunction with virtual overrides during
2226
//   construction and destruction.  This is always a 4 byte value and is used as
2227
//   an alternative to constructor vtables.
2228
// * vtordisps are allocated in a block of memory with size and alignment equal
2229
//   to the alignment of the completed structure (before applying __declspec(
2230
//   align())).  The vtordisp always occur at the end of the allocation block,
2231
//   immediately prior to the virtual base.
2232
// * vfptrs are injected after all bases and fields have been laid out.  In
2233
//   order to guarantee proper alignment of all fields, the vfptr injection
2234
//   pushes all bases and fields back by the alignment imposed by those bases
2235
//   and fields.  This can potentially add a significant amount of padding.
2236
//   vfptrs are always injected at offset 0.
2237
// * vbptrs are injected after all bases and fields have been laid out.  In
2238
//   order to guarantee proper alignment of all fields, the vfptr injection
2239
//   pushes all bases and fields back by the alignment imposed by those bases
2240
//   and fields.  This can potentially add a significant amount of padding.
2241
//   vbptrs are injected immediately after the last non-virtual base as
2242
//   lexicographically ordered in the code.  If this site isn't pointer aligned
2243
//   the vbptr is placed at the next properly aligned location.  Enough padding
2244
//   is added to guarantee a fit.
2245
// * The last zero sized non-virtual base can be placed at the end of the
2246
//   struct (potentially aliasing another object), or may alias with the first
2247
//   field, even if they are of the same type.
2248
// * The last zero size virtual base may be placed at the end of the struct
2249
//   potentially aliasing another object.
2250
// * The ABI attempts to avoid aliasing of zero sized bases by adding padding
2251
//   between bases or vbases with specific properties.  The criteria for
2252
//   additional padding between two bases is that the first base is zero sized
2253
//   or ends with a zero sized subobject and the second base is zero sized or
2254
//   trails with a zero sized base or field (sharing of vfptrs can reorder the
2255
//   layout of the so the leading base is not always the first one declared).
2256
//   This rule does take into account fields that are not records, so padding
2257
//   will occur even if the last field is, e.g. an int. The padding added for
2258
//   bases is 1 byte.  The padding added between vbases depends on the alignment
2259
//   of the object but is at least 4 bytes (in both 32 and 64 bit modes).
2260
// * There is no concept of non-virtual alignment, non-virtual alignment and
2261
//   alignment are always identical.
2262
// * There is a distinction between alignment and required alignment.
2263
//   __declspec(align) changes the required alignment of a struct.  This
2264
//   alignment is _always_ obeyed, even in the presence of #pragma pack. A
2265
//   record inherits required alignment from all of its fields and bases.
2266
// * __declspec(align) on bitfields has the effect of changing the bitfield's
2267
//   alignment instead of its required alignment.  This is the only known way
2268
//   to make the alignment of a struct bigger than 8.  Interestingly enough
2269
//   this alignment is also immune to the effects of #pragma pack and can be
2270
//   used to create structures with large alignment under #pragma pack.
2271
//   However, because it does not impact required alignment, such a structure,
2272
//   when used as a field or base, will not be aligned if #pragma pack is
2273
//   still active at the time of use.
2274
//
2275
// Known incompatibilities:
2276
// * all: #pragma pack between fields in a record
2277
// * 2010 and back: If the last field in a record is a bitfield, every object
2278
//   laid out after the record will have extra padding inserted before it.  The
2279
//   extra padding will have size equal to the size of the storage class of the
2280
//   bitfield.  0 sized bitfields don't exhibit this behavior and the extra
2281
//   padding can be avoided by adding a 0 sized bitfield after the non-zero-
2282
//   sized bitfield.
2283
// * 2012 and back: In 64-bit mode, if the alignment of a record is 16 or
2284
//   greater due to __declspec(align()) then a second layout phase occurs after
2285
//   The locations of the vf and vb pointers are known.  This layout phase
2286
//   suffers from the "last field is a bitfield" bug in 2010 and results in
2287
//   _every_ field getting padding put in front of it, potentially including the
2288
//   vfptr, leaving the vfprt at a non-zero location which results in a fault if
2289
//   anything tries to read the vftbl.  The second layout phase also treats
2290
//   bitfields as separate entities and gives them each storage rather than
2291
//   packing them.  Additionally, because this phase appears to perform a
2292
//   (an unstable) sort on the members before laying them out and because merged
2293
//   bitfields have the same address, the bitfields end up in whatever order
2294
//   the sort left them in, a behavior we could never hope to replicate.
2295
2296
namespace {
2297
struct MicrosoftRecordLayoutBuilder {
2298
  struct ElementInfo {
2299
    CharUnits Size;
2300
    CharUnits Alignment;
2301
  };
2302
  typedef llvm::DenseMap<const CXXRecordDecl *, CharUnits> BaseOffsetsMapTy;
2303
4.28k
  MicrosoftRecordLayoutBuilder(const ASTContext &Context) : Context(Context) {}
2304
private:
2305
  MicrosoftRecordLayoutBuilder(const MicrosoftRecordLayoutBuilder &) = delete;
2306
  void operator=(const MicrosoftRecordLayoutBuilder &) = delete;
2307
public:
2308
  void layout(const RecordDecl *RD);
2309
  void cxxLayout(const CXXRecordDecl *RD);
2310
  /// Initializes size and alignment and honors some flags.
2311
  void initializeLayout(const RecordDecl *RD);
2312
  /// Initialized C++ layout, compute alignment and virtual alignment and
2313
  /// existence of vfptrs and vbptrs.  Alignment is needed before the vfptr is
2314
  /// laid out.
2315
  void initializeCXXLayout(const CXXRecordDecl *RD);
2316
  void layoutNonVirtualBases(const CXXRecordDecl *RD);
2317
  void layoutNonVirtualBase(const CXXRecordDecl *RD,
2318
                            const CXXRecordDecl *BaseDecl,
2319
                            const ASTRecordLayout &BaseLayout,
2320
                            const ASTRecordLayout *&PreviousBaseLayout);
2321
  void injectVFPtr(const CXXRecordDecl *RD);
2322
  void injectVBPtr(const CXXRecordDecl *RD);
2323
  /// Lays out the fields of the record.  Also rounds size up to
2324
  /// alignment.
2325
  void layoutFields(const RecordDecl *RD);
2326
  void layoutField(const FieldDecl *FD);
2327
  void layoutBitField(const FieldDecl *FD);
2328
  /// Lays out a single zero-width bit-field in the record and handles
2329
  /// special cases associated with zero-width bit-fields.
2330
  void layoutZeroWidthBitField(const FieldDecl *FD);
2331
  void layoutVirtualBases(const CXXRecordDecl *RD);
2332
  void finalizeLayout(const RecordDecl *RD);
2333
  /// Gets the size and alignment of a base taking pragma pack and
2334
  /// __declspec(align) into account.
2335
  ElementInfo getAdjustedElementInfo(const ASTRecordLayout &Layout);
2336
  /// Gets the size and alignment of a field taking pragma  pack and
2337
  /// __declspec(align) into account.  It also updates RequiredAlignment as a
2338
  /// side effect because it is most convenient to do so here.
2339
  ElementInfo getAdjustedElementInfo(const FieldDecl *FD);
2340
  /// Places a field at an offset in CharUnits.
2341
2.38k
  void placeFieldAtOffset(CharUnits FieldOffset) {
2342
2.38k
    FieldOffsets.push_back(Context.toBits(FieldOffset));
2343
2.38k
  }
2344
  /// Places a bitfield at a bit offset.
2345
30
  void placeFieldAtBitOffset(uint64_t FieldOffset) {
2346
30
    FieldOffsets.push_back(FieldOffset);
2347
30
  }
2348
  /// Compute the set of virtual bases for which vtordisps are required.
2349
  void computeVtorDispSet(
2350
      llvm::SmallPtrSetImpl<const CXXRecordDecl *> &HasVtorDispSet,
2351
      const CXXRecordDecl *RD) const;
2352
  const ASTContext &Context;
2353
  /// The size of the record being laid out.
2354
  CharUnits Size;
2355
  /// The non-virtual size of the record layout.
2356
  CharUnits NonVirtualSize;
2357
  /// The data size of the record layout.
2358
  CharUnits DataSize;
2359
  /// The current alignment of the record layout.
2360
  CharUnits Alignment;
2361
  /// The maximum allowed field alignment. This is set by #pragma pack.
2362
  CharUnits MaxFieldAlignment;
2363
  /// The alignment that this record must obey.  This is imposed by
2364
  /// __declspec(align()) on the record itself or one of its fields or bases.
2365
  CharUnits RequiredAlignment;
2366
  /// The size of the allocation of the currently active bitfield.
2367
  /// This value isn't meaningful unless LastFieldIsNonZeroWidthBitfield
2368
  /// is true.
2369
  CharUnits CurrentBitfieldSize;
2370
  /// Offset to the virtual base table pointer (if one exists).
2371
  CharUnits VBPtrOffset;
2372
  /// Minimum record size possible.
2373
  CharUnits MinEmptyStructSize;
2374
  /// The size and alignment info of a pointer.
2375
  ElementInfo PointerInfo;
2376
  /// The primary base class (if one exists).
2377
  const CXXRecordDecl *PrimaryBase;
2378
  /// The class we share our vb-pointer with.
2379
  const CXXRecordDecl *SharedVBPtrBase;
2380
  /// The collection of field offsets.
2381
  SmallVector<uint64_t, 16> FieldOffsets;
2382
  /// Base classes and their offsets in the record.
2383
  BaseOffsetsMapTy Bases;
2384
  /// virtual base classes and their offsets in the record.
2385
  ASTRecordLayout::VBaseOffsetsMapTy VBases;
2386
  /// The number of remaining bits in our last bitfield allocation.
2387
  /// This value isn't meaningful unless LastFieldIsNonZeroWidthBitfield is
2388
  /// true.
2389
  unsigned RemainingBitsInField;
2390
  bool IsUnion : 1;
2391
  /// True if the last field laid out was a bitfield and was not 0
2392
  /// width.
2393
  bool LastFieldIsNonZeroWidthBitfield : 1;
2394
  /// True if the class has its own vftable pointer.
2395
  bool HasOwnVFPtr : 1;
2396
  /// True if the class has a vbtable pointer.
2397
  bool HasVBPtr : 1;
2398
  /// True if the last sub-object within the type is zero sized or the
2399
  /// object itself is zero sized.  This *does not* count members that are not
2400
  /// records.  Only used for MS-ABI.
2401
  bool EndsWithZeroSizedObject : 1;
2402
  /// True if this class is zero sized or first base is zero sized or
2403
  /// has this property.  Only used for MS-ABI.
2404
  bool LeadsWithZeroSizedBase : 1;
2405
2406
  /// True if the external AST source provided a layout for this record.
2407
  bool UseExternalLayout : 1;
2408
2409
  /// The layout provided by the external AST source. Only active if
2410
  /// UseExternalLayout is true.
2411
  ExternalLayout External;
2412
};
2413
} // namespace
2414
2415
MicrosoftRecordLayoutBuilder::ElementInfo
2416
MicrosoftRecordLayoutBuilder::getAdjustedElementInfo(
2417
2.71k
    const ASTRecordLayout &Layout) {
2418
2.71k
  ElementInfo Info;
2419
2.71k
  Info.Alignment = Layout.getAlignment();
2420
2.71k
  // Respect pragma pack.
2421
2.71k
  if (!MaxFieldAlignment.isZero())
2422
34
    Info.Alignment = std::min(Info.Alignment, MaxFieldAlignment);
2423
2.71k
  // Track zero-sized subobjects here where it's already available.
2424
2.71k
  EndsWithZeroSizedObject = Layout.endsWithZeroSizedObject();
2425
2.71k
  // Respect required alignment, this is necessary because we may have adjusted
2426
2.71k
  // the alignment in the case of pragam pack.  Note that the required alignment
2427
2.71k
  // doesn't actually apply to the struct alignment at this point.
2428
2.71k
  Alignment = std::max(Alignment, Info.Alignment);
2429
2.71k
  RequiredAlignment = std::max(RequiredAlignment, Layout.getRequiredAlignment());
2430
2.71k
  Info.Alignment = std::max(Info.Alignment, Layout.getRequiredAlignment());
2431
2.71k
  Info.Size = Layout.getNonVirtualSize();
2432
2.71k
  return Info;
2433
2.71k
}
2434
2435
MicrosoftRecordLayoutBuilder::ElementInfo
2436
MicrosoftRecordLayoutBuilder::getAdjustedElementInfo(
2437
2.39k
    const FieldDecl *FD) {
2438
2.39k
  // Get the alignment of the field type's natural alignment, ignore any
2439
2.39k
  // alignment attributes.
2440
2.39k
  ElementInfo Info;
2441
2.39k
  std::tie(Info.Size, Info.Alignment) =
2442
2.39k
      Context.getTypeInfoInChars(FD->getType()->getUnqualifiedDesugaredType());
2443
2.39k
  // Respect align attributes on the field.
2444
2.39k
  CharUnits FieldRequiredAlignment =
2445
2.39k
      Context.toCharUnitsFromBits(FD->getMaxAlignment());
2446
2.39k
  // Respect align attributes on the type.
2447
2.39k
  if (Context.isAlignmentRequired(FD->getType()))
2448
67
    FieldRequiredAlignment = std::max(
2449
67
        Context.getTypeAlignInChars(FD->getType()), FieldRequiredAlignment);
2450
2.39k
  // Respect attributes applied to subobjects of the field.
2451
2.39k
  if (FD->isBitField())
2452
165
    // For some reason __declspec align impacts alignment rather than required
2453
165
    // alignment when it is applied to bitfields.
2454
165
    Info.Alignment = std::max(Info.Alignment, FieldRequiredAlignment);
2455
2.22k
  else {
2456
2.22k
    if (auto RT =
2457
165
            FD->getType()->getBaseElementTypeUnsafe()->getAs<RecordType>()) {
2458
165
      auto const &Layout = Context.getASTRecordLayout(RT->getDecl());
2459
165
      EndsWithZeroSizedObject = Layout.endsWithZeroSizedObject();
2460
165
      FieldRequiredAlignment = std::max(FieldRequiredAlignment,
2461
165
                                        Layout.getRequiredAlignment());
2462
165
    }
2463
2.22k
    // Capture required alignment as a side-effect.
2464
2.22k
    RequiredAlignment = std::max(RequiredAlignment, FieldRequiredAlignment);
2465
2.22k
  }
2466
2.39k
  // Respect pragma pack, attribute pack and declspec align
2467
2.39k
  if (!MaxFieldAlignment.isZero())
2468
252
    Info.Alignment = std::min(Info.Alignment, MaxFieldAlignment);
2469
2.39k
  if (FD->hasAttr<PackedAttr>())
2470
7
    Info.Alignment = CharUnits::One();
2471
2.39k
  Info.Alignment = std::max(Info.Alignment, FieldRequiredAlignment);
2472
2.39k
  return Info;
2473
2.39k
}
2474
2475
305
void MicrosoftRecordLayoutBuilder::layout(const RecordDecl *RD) {
2476
305
  // For C record layout, zero-sized records always have size 4.
2477
305
  MinEmptyStructSize = CharUnits::fromQuantity(4);
2478
305
  initializeLayout(RD);
2479
305
  layoutFields(RD);
2480
305
  DataSize = Size = Size.alignTo(Alignment);
2481
305
  RequiredAlignment = std::max(
2482
305
      RequiredAlignment, Context.toCharUnitsFromBits(RD->getMaxAlignment()));
2483
305
  finalizeLayout(RD);
2484
305
}
2485
2486
3.97k
void MicrosoftRecordLayoutBuilder::cxxLayout(const CXXRecordDecl *RD) {
2487
3.97k
  // The C++ standard says that empty structs have size 1.
2488
3.97k
  MinEmptyStructSize = CharUnits::One();
2489
3.97k
  initializeLayout(RD);
2490
3.97k
  initializeCXXLayout(RD);
2491
3.97k
  layoutNonVirtualBases(RD);
2492
3.97k
  layoutFields(RD);
2493
3.97k
  injectVBPtr(RD);
2494
3.97k
  injectVFPtr(RD);
2495
3.97k
  if (HasOwnVFPtr || 
(3.27k
HasVBPtr3.27k
&&
!SharedVBPtrBase727
))
2496
1.23k
    Alignment = std::max(Alignment, PointerInfo.Alignment);
2497
3.97k
  auto RoundingAlignment = Alignment;
2498
3.97k
  if (!MaxFieldAlignment.isZero())
2499
56
    RoundingAlignment = std::min(RoundingAlignment, MaxFieldAlignment);
2500
3.97k
  if (!UseExternalLayout)
2501
3.97k
    Size = Size.alignTo(RoundingAlignment);
2502
3.97k
  NonVirtualSize = Size;
2503
3.97k
  RequiredAlignment = std::max(
2504
3.97k
      RequiredAlignment, Context.toCharUnitsFromBits(RD->getMaxAlignment()));
2505
3.97k
  layoutVirtualBases(RD);
2506
3.97k
  finalizeLayout(RD);
2507
3.97k
}
2508
2509
4.28k
void MicrosoftRecordLayoutBuilder::initializeLayout(const RecordDecl *RD) {
2510
4.28k
  IsUnion = RD->isUnion();
2511
4.28k
  Size = CharUnits::Zero();
2512
4.28k
  Alignment = CharUnits::One();
2513
4.28k
  // In 64-bit mode we always perform an alignment step after laying out vbases.
2514
4.28k
  // In 32-bit mode we do not.  The check to see if we need to perform alignment
2515
4.28k
  // checks the RequiredAlignment field and performs alignment if it isn't 0.
2516
4.28k
  RequiredAlignment = Context.getTargetInfo().getTriple().isArch64Bit()
2517
4.28k
                          ? 
CharUnits::One()1.83k
2518
4.28k
                          : 
CharUnits::Zero()2.44k
;
2519
4.28k
  // Compute the maximum field alignment.
2520
4.28k
  MaxFieldAlignment = CharUnits::Zero();
2521
4.28k
  // Honor the default struct packing maximum alignment flag.
2522
4.28k
  if (unsigned DefaultMaxFieldAlignment = Context.getLangOpts().PackStruct)
2523
0
      MaxFieldAlignment = CharUnits::fromQuantity(DefaultMaxFieldAlignment);
2524
4.28k
  // Honor the packing attribute.  The MS-ABI ignores pragma pack if its larger
2525
4.28k
  // than the pointer size.
2526
4.28k
  if (const MaxFieldAlignmentAttr *MFAA = RD->getAttr<MaxFieldAlignmentAttr>()){
2527
122
    unsigned PackedAlignment = MFAA->getAlignment();
2528
122
    if (PackedAlignment <= Context.getTargetInfo().getPointerWidth(0))
2529
89
      MaxFieldAlignment = Context.toCharUnitsFromBits(PackedAlignment);
2530
122
  }
2531
4.28k
  // Packed attribute forces max field alignment to be 1.
2532
4.28k
  if (RD->hasAttr<PackedAttr>())
2533
61
    MaxFieldAlignment = CharUnits::One();
2534
4.28k
2535
4.28k
  // Try to respect the external layout if present.
2536
4.28k
  UseExternalLayout = false;
2537
4.28k
  if (ExternalASTSource *Source = Context.getExternalSource())
2538
47
    UseExternalLayout = Source->layoutRecordType(
2539
47
        RD, External.Size, External.Align, External.FieldOffsets,
2540
47
        External.BaseOffsets, External.VirtualBaseOffsets);
2541
4.28k
}
2542
2543
void
2544
3.97k
MicrosoftRecordLayoutBuilder::initializeCXXLayout(const CXXRecordDecl *RD) {
2545
3.97k
  EndsWithZeroSizedObject = false;
2546
3.97k
  LeadsWithZeroSizedBase = false;
2547
3.97k
  HasOwnVFPtr = false;
2548
3.97k
  HasVBPtr = false;
2549
3.97k
  PrimaryBase = nullptr;
2550
3.97k
  SharedVBPtrBase = nullptr;
2551
3.97k
  // Calculate pointer size and alignment.  These are used for vfptr and vbprt
2552
3.97k
  // injection.
2553
3.97k
  PointerInfo.Size =
2554
3.97k
      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(0));
2555
3.97k
  PointerInfo.Alignment =
2556
3.97k
      Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerAlign(0));
2557
3.97k
  // Respect pragma pack.
2558
3.97k
  if (!MaxFieldAlignment.isZero())
2559
56
    PointerInfo.Alignment = std::min(PointerInfo.Alignment, MaxFieldAlignment);
2560
3.97k
}
2561
2562
void
2563
3.97k
MicrosoftRecordLayoutBuilder::layoutNonVirtualBases(const CXXRecordDecl *RD) {
2564
3.97k
  // The MS-ABI lays out all bases that contain leading vfptrs before it lays
2565
3.97k
  // out any bases that do not contain vfptrs.  We implement this as two passes
2566
3.97k
  // over the bases.  This approach guarantees that the primary base is laid out
2567
3.97k
  // first.  We use these passes to calculate some additional aggregated
2568
3.97k
  // information about the bases, such as required alignment and the presence of
2569
3.97k
  // zero sized members.
2570
3.97k
  const ASTRecordLayout *PreviousBaseLayout = nullptr;
2571
3.97k
  // Iterate through the bases and lay out the non-virtual ones.
2572
3.97k
  for (const CXXBaseSpecifier &Base : RD->bases()) {
2573
2.35k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
2574
2.35k
    const ASTRecordLayout &BaseLayout = Context.getASTRecordLayout(BaseDecl);
2575
2.35k
    // Mark and skip virtual bases.
2576
2.35k
    if (Base.isVirtual()) {
2577
895
      HasVBPtr = true;
2578
895
      continue;
2579
895
    }
2580
1.46k
    // Check for a base to share a VBPtr with.
2581
1.46k
    if (!SharedVBPtrBase && 
BaseLayout.hasVBPtr()1.33k
) {
2582
223
      SharedVBPtrBase = BaseDecl;
2583
223
      HasVBPtr = true;
2584
223
    }
2585
1.46k
    // Only lay out bases with extendable VFPtrs on the first pass.
2586
1.46k
    if (!BaseLayout.hasExtendableVFPtr())
2587
1.02k
      continue;
2588
437
    // If we don't have a primary base, this one qualifies.
2589
437
    if (!PrimaryBase) {
2590
319
      PrimaryBase = BaseDecl;
2591
319
      LeadsWithZeroSizedBase = BaseLayout.leadsWithZeroSizedBase();
2592
319
    }
2593
437
    // Lay out the base.
2594
437
    layoutNonVirtualBase(RD, BaseDecl, BaseLayout, PreviousBaseLayout);
2595
437
  }
2596
3.97k
  // Figure out if we need a fresh VFPtr for this class.
2597
3.97k
  if (!PrimaryBase && 
RD->isDynamicClass()3.65k
)
2598
1.36k
    for (CXXRecordDecl::method_iterator i = RD->method_begin(),
2599
1.36k
                                        e = RD->method_end();
2600
5.61k
         !HasOwnVFPtr && 
i != e4.91k
;
++i4.25k
)
2601
4.25k
      HasOwnVFPtr = i->isVirtual() && 
i->size_overridden_methods() == 0935
;
2602
3.97k
  // If we don't have a primary base then we have a leading object that could
2603
3.97k
  // itself lead with a zero-sized object, something we track.
2604
3.97k
  bool CheckLeadingLayout = !PrimaryBase;
2605
3.97k
  // Iterate through the bases and lay out the non-virtual ones.
2606
3.97k
  for (const CXXBaseSpecifier &Base : RD->bases()) {
2607
2.35k
    if (Base.isVirtual())
2608
895
      continue;
2609
1.46k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
2610
1.46k
    const ASTRecordLayout &BaseLayout = Context.getASTRecordLayout(BaseDecl);
2611
1.46k
    // Only lay out bases without extendable VFPtrs on the second pass.
2612
1.46k
    if (BaseLayout.hasExtendableVFPtr()) {
2613
437
      VBPtrOffset = Bases[BaseDecl] + BaseLayout.getNonVirtualSize();
2614
437
      continue;
2615
437
    }
2616
1.02k
    // If this is the first layout, check to see if it leads with a zero sized
2617
1.02k
    // object.  If it does, so do we.
2618
1.02k
    if (CheckLeadingLayout) {
2619
650
      CheckLeadingLayout = false;
2620
650
      LeadsWithZeroSizedBase = BaseLayout.leadsWithZeroSizedBase();
2621
650
    }
2622
1.02k
    // Lay out the base.
2623
1.02k
    layoutNonVirtualBase(RD, BaseDecl, BaseLayout, PreviousBaseLayout);
2624
1.02k
    VBPtrOffset = Bases[BaseDecl] + BaseLayout.getNonVirtualSize();
2625
1.02k
  }
2626
3.97k
  // Set our VBPtroffset if we know it at this point.
2627
3.97k
  if (!HasVBPtr)
2628
3.14k
    VBPtrOffset = CharUnits::fromQuantity(-1);
2629
836
  else if (SharedVBPtrBase) {
2630
223
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(SharedVBPtrBase);
2631
223
    VBPtrOffset = Bases[SharedVBPtrBase] + Layout.getVBPtrOffset();
2632
223
  }
2633
3.97k
}
2634
2635
3.00k
static bool recordUsesEBO(const RecordDecl *RD) {
2636
3.00k
  if (!isa<CXXRecordDecl>(RD))
2637
16
    return false;
2638
2.98k
  if (RD->hasAttr<EmptyBasesAttr>())
2639
16
    return true;
2640
2.97k
  if (auto *LVA = RD->getAttr<LayoutVersionAttr>())
2641
0
    // TODO: Double check with the next version of MSVC.
2642
0
    if (LVA->getVersion() <= LangOptions::MSVC2015)
2643
0
      return false;
2644
2.97k
  // TODO: Some later version of MSVC will change the default behavior of the
2645
2.97k
  // compiler to enable EBO by default.  When this happens, we will need an
2646
2.97k
  // additional isCompatibleWithMSVC check.
2647
2.97k
  return false;
2648
2.97k
}
2649
2650
void MicrosoftRecordLayoutBuilder::layoutNonVirtualBase(
2651
    const CXXRecordDecl *RD,
2652
    const CXXRecordDecl *BaseDecl,
2653
    const ASTRecordLayout &BaseLayout,
2654
1.46k
    const ASTRecordLayout *&PreviousBaseLayout) {
2655
1.46k
  // Insert padding between two bases if the left first one is zero sized or
2656
1.46k
  // contains a zero sized subobject and the right is zero sized or one leads
2657
1.46k
  // with a zero sized base.
2658
1.46k
  bool MDCUsesEBO = recordUsesEBO(RD);
2659
1.46k
  if (PreviousBaseLayout && 
PreviousBaseLayout->endsWithZeroSizedObject()492
&&
2660
1.46k
      
BaseLayout.leadsWithZeroSizedBase()144
&&
!MDCUsesEBO111
)
2661
109
    Size++;
2662
1.46k
  ElementInfo Info = getAdjustedElementInfo(BaseLayout);
2663
1.46k
  CharUnits BaseOffset;
2664
1.46k
2665
1.46k
  // Respect the external AST source base offset, if present.
2666
1.46k
  bool FoundBase = false;
2667
1.46k
  if (UseExternalLayout) {
2668
2
    FoundBase = External.getExternalNVBaseOffset(BaseDecl, BaseOffset);
2669
2
    if (FoundBase) {
2670
0
      assert(BaseOffset >= Size && "base offset already allocated");
2671
0
      Size = BaseOffset;
2672
0
    }
2673
2
  }
2674
1.46k
2675
1.46k
  if (!FoundBase) {
2676
1.46k
    if (MDCUsesEBO && 
BaseDecl->isEmpty()12
) {
2677
4
      assert(BaseLayout.getNonVirtualSize() == CharUnits::Zero());
2678
4
      BaseOffset = CharUnits::Zero();
2679
1.45k
    } else {
2680
1.45k
      // Otherwise, lay the base out at the end of the MDC.
2681
1.45k
      BaseOffset = Size = Size.alignTo(Info.Alignment);
2682
1.45k
    }
2683
1.46k
  }
2684
1.46k
  Bases.insert(std::make_pair(BaseDecl, BaseOffset));
2685
1.46k
  Size += BaseLayout.getNonVirtualSize();
2686
1.46k
  PreviousBaseLayout = &BaseLayout;
2687
1.46k
}
2688
2689
4.28k
void MicrosoftRecordLayoutBuilder::layoutFields(const RecordDecl *RD) {
2690
4.28k
  LastFieldIsNonZeroWidthBitfield = false;
2691
4.28k
  for (const FieldDecl *Field : RD->fields())
2692
2.41k
    layoutField(Field);
2693
4.28k
}
2694
2695
2.41k
void MicrosoftRecordLayoutBuilder::layoutField(const FieldDecl *FD) {
2696
2.41k
  if (FD->isBitField()) {
2697
185
    layoutBitField(FD);
2698
185
    return;
2699
185
  }
2700
2.22k
  LastFieldIsNonZeroWidthBitfield = false;
2701
2.22k
  ElementInfo Info = getAdjustedElementInfo(FD);
2702
2.22k
  Alignment = std::max(Alignment, Info.Alignment);
2703
2.22k
  CharUnits FieldOffset;
2704
2.22k
  if (UseExternalLayout)
2705
6
    FieldOffset =
2706
6
        Context.toCharUnitsFromBits(External.getExternalFieldOffset(FD));
2707
2.22k
  else if (IsUnion)
2708
67
    FieldOffset = CharUnits::Zero();
2709
2.15k
  else
2710
2.15k
    FieldOffset = Size.alignTo(Info.Alignment);
2711
2.22k
  placeFieldAtOffset(FieldOffset);
2712
2.22k
  Size = std::max(Size, FieldOffset + Info.Size);
2713
2.22k
}
2714
2715
185
void MicrosoftRecordLayoutBuilder::layoutBitField(const FieldDecl *FD) {
2716
185
  unsigned Width = FD->getBitWidthValue(Context);
2717
185
  if (Width == 0) {
2718
42
    layoutZeroWidthBitField(FD);
2719
42
    return;
2720
42
  }
2721
143
  ElementInfo Info = getAdjustedElementInfo(FD);
2722
143
  // Clamp the bitfield to a containable size for the sake of being able
2723
143
  // to lay them out.  Sema will throw an error.
2724
143
  if (Width > Context.toBits(Info.Size))
2725
0
    Width = Context.toBits(Info.Size);
2726
143
  // Check to see if this bitfield fits into an existing allocation.  Note:
2727
143
  // MSVC refuses to pack bitfields of formal types with different sizes
2728
143
  // into the same allocation.
2729
143
  if (!UseExternalLayout && 
!IsUnion138
&&
LastFieldIsNonZeroWidthBitfield126
&&
2730
143
      
CurrentBitfieldSize == Info.Size58
&&
Width <= RemainingBitsInField49
) {
2731
25
    placeFieldAtBitOffset(Context.toBits(Size) - RemainingBitsInField);
2732
25
    RemainingBitsInField -= Width;
2733
25
    return;
2734
25
  }
2735
118
  LastFieldIsNonZeroWidthBitfield = true;
2736
118
  CurrentBitfieldSize = Info.Size;
2737
118
  if (UseExternalLayout) {
2738
5
    auto FieldBitOffset = External.getExternalFieldOffset(FD);
2739
5
    placeFieldAtBitOffset(FieldBitOffset);
2740
5
    auto NewSize = Context.toCharUnitsFromBits(
2741
5
        llvm::alignDown(FieldBitOffset, Context.toBits(Info.Alignment)) +
2742
5
        Context.toBits(Info.Size));
2743
5
    Size = std::max(Size, NewSize);
2744
5
    Alignment = std::max(Alignment, Info.Alignment);
2745
113
  } else if (IsUnion) {
2746
12
    placeFieldAtOffset(CharUnits::Zero());
2747
12
    Size = std::max(Size, Info.Size);
2748
12
    // TODO: Add a Sema warning that MS ignores bitfield alignment in unions.
2749
101
  } else {
2750
101
    // Allocate a new block of memory and place the bitfield in it.
2751
101
    CharUnits FieldOffset = Size.alignTo(Info.Alignment);
2752
101
    placeFieldAtOffset(FieldOffset);
2753
101
    Size = FieldOffset + Info.Size;
2754
101
    Alignment = std::max(Alignment, Info.Alignment);
2755
101
    RemainingBitsInField = Context.toBits(Info.Size) - Width;
2756
101
  }
2757
118
}
2758
2759
void
2760
42
MicrosoftRecordLayoutBuilder::layoutZeroWidthBitField(const FieldDecl *FD) {
2761
42
  // Zero-width bitfields are ignored unless they follow a non-zero-width
2762
42
  // bitfield.
2763
42
  if (!LastFieldIsNonZeroWidthBitfield) {
2764
20
    placeFieldAtOffset(IsUnion ? 
CharUnits::Zero()4
:
Size16
);
2765
20
    // TODO: Add a Sema warning that MS ignores alignment for zero
2766
20
    // sized bitfields that occur after zero-size bitfields or non-bitfields.
2767
20
    return;
2768
20
  }
2769
22
  LastFieldIsNonZeroWidthBitfield = false;
2770
22
  ElementInfo Info = getAdjustedElementInfo(FD);
2771
22
  if (IsUnion) {
2772
8
    placeFieldAtOffset(CharUnits::Zero());
2773
8
    Size = std::max(Size, Info.Size);
2774
8
    // TODO: Add a Sema warning that MS ignores bitfield alignment in unions.
2775
14
  } else {
2776
14
    // Round up the current record size to the field's alignment boundary.
2777
14
    CharUnits FieldOffset = Size.alignTo(Info.Alignment);
2778
14
    placeFieldAtOffset(FieldOffset);
2779
14
    Size = FieldOffset;
2780
14
    Alignment = std::max(Alignment, Info.Alignment);
2781
14
  }
2782
22
}
2783
2784
3.97k
void MicrosoftRecordLayoutBuilder::injectVBPtr(const CXXRecordDecl *RD) {
2785
3.97k
  if (!HasVBPtr || 
SharedVBPtrBase836
)
2786
3.36k
    return;
2787
613
  // Inject the VBPointer at the injection site.
2788
613
  CharUnits InjectionSite = VBPtrOffset;
2789
613
  // But before we do, make sure it's properly aligned.
2790
613
  VBPtrOffset = VBPtrOffset.alignTo(PointerInfo.Alignment);
2791
613
  // Determine where the first field should be laid out after the vbptr.
2792
613
  CharUnits FieldStart = VBPtrOffset + PointerInfo.Size;
2793
613
  // Shift everything after the vbptr down, unless we're using an external
2794
613
  // layout.
2795
613
  if (UseExternalLayout) {
2796
2
    // It is possible that there were no fields or bases located after vbptr,
2797
2
    // so the size was not adjusted before.
2798
2
    if (Size < FieldStart)
2799
1
      Size = FieldStart;
2800
2
    return;
2801
2
  }
2802
611
  // Make sure that the amount we push the fields back by is a multiple of the
2803
611
  // alignment.
2804
611
  CharUnits Offset = (FieldStart - InjectionSite)
2805
611
                         .alignTo(std::max(RequiredAlignment, Alignment));
2806
611
  Size += Offset;
2807
611
  for (uint64_t &FieldOffset : FieldOffsets)
2808
356
    FieldOffset += Context.toBits(Offset);
2809
611
  for (BaseOffsetsMapTy::value_type &Base : Bases)
2810
257
    if (Base.second >= InjectionSite)
2811
64
      Base.second += Offset;
2812
611
}
2813
2814
3.97k
void MicrosoftRecordLayoutBuilder::injectVFPtr(const CXXRecordDecl *RD) {
2815
3.97k
  if (!HasOwnVFPtr)
2816
3.27k
    return;
2817
706
  // Make sure that the amount we push the struct back by is a multiple of the
2818
706
  // alignment.
2819
706
  CharUnits Offset =
2820
706
      PointerInfo.Size.alignTo(std::max(RequiredAlignment, Alignment));
2821
706
  // Push back the vbptr, but increase the size of the object and push back
2822
706
  // regular fields by the offset only if not using external record layout.
2823
706
  if (HasVBPtr)
2824
109
    VBPtrOffset += Offset;
2825
706
2826
706
  if (UseExternalLayout) {
2827
2
    // The class may have no bases or fields, but still have a vfptr
2828
2
    // (e.g. it's an interface class). The size was not correctly set before
2829
2
    // in this case.
2830
2
    if (FieldOffsets.empty() && 
Bases.empty()1
)
2831
1
      Size += Offset;
2832
2
    return;
2833
2
  }
2834
704
2835
704
  Size += Offset;
2836
704
2837
704
  // If we're using an external layout, the fields offsets have already
2838
704
  // accounted for this adjustment.
2839
704
  for (uint64_t &FieldOffset : FieldOffsets)
2840
155
    FieldOffset += Context.toBits(Offset);
2841
704
  for (BaseOffsetsMapTy::value_type &Base : Bases)
2842
80
    Base.second += Offset;
2843
704
}
2844
2845
3.97k
void MicrosoftRecordLayoutBuilder::layoutVirtualBases(const CXXRecordDecl *RD) {
2846
3.97k
  if (!HasVBPtr)
2847
3.14k
    return;
2848
836
  // Vtordisps are always 4 bytes (even in 64-bit mode)
2849
836
  CharUnits VtorDispSize = CharUnits::fromQuantity(4);
2850
836
  CharUnits VtorDispAlignment = VtorDispSize;
2851
836
  // vtordisps respect pragma pack.
2852
836
  if (!MaxFieldAlignment.isZero())
2853
16
    VtorDispAlignment = std::min(VtorDispAlignment, MaxFieldAlignment);
2854
836
  // The alignment of the vtordisp is at least the required alignment of the
2855
836
  // entire record.  This requirement may be present to support vtordisp
2856
836
  // injection.
2857
1.24k
  for (const CXXBaseSpecifier &VBase : RD->vbases()) {
2858
1.24k
    const CXXRecordDecl *BaseDecl = VBase.getType()->getAsCXXRecordDecl();
2859
1.24k
    const ASTRecordLayout &BaseLayout = Context.getASTRecordLayout(BaseDecl);
2860
1.24k
    RequiredAlignment =
2861
1.24k
        std::max(RequiredAlignment, BaseLayout.getRequiredAlignment());
2862
1.24k
  }
2863
836
  VtorDispAlignment = std::max(VtorDispAlignment, RequiredAlignment);
2864
836
  // Compute the vtordisp set.
2865
836
  llvm::SmallPtrSet<const CXXRecordDecl *, 2> HasVtorDispSet;
2866
836
  computeVtorDispSet(HasVtorDispSet, RD);
2867
836
  // Iterate through the virtual bases and lay them out.
2868
836
  const ASTRecordLayout *PreviousBaseLayout = nullptr;
2869
1.24k
  for (const CXXBaseSpecifier &VBase : RD->vbases()) {
2870
1.24k
    const CXXRecordDecl *BaseDecl = VBase.getType()->getAsCXXRecordDecl();
2871
1.24k
    const ASTRecordLayout &BaseLayout = Context.getASTRecordLayout(BaseDecl);
2872
1.24k
    bool HasVtordisp = HasVtorDispSet.count(BaseDecl) > 0;
2873
1.24k
    // Insert padding between two bases if the left first one is zero sized or
2874
1.24k
    // contains a zero sized subobject and the right is zero sized or one leads
2875
1.24k
    // with a zero sized base.  The padding between virtual bases is 4
2876
1.24k
    // bytes (in both 32 and 64 bits modes) and always involves rounding up to
2877
1.24k
    // the required alignment, we don't know why.
2878
1.24k
    if ((PreviousBaseLayout && 
PreviousBaseLayout->endsWithZeroSizedObject()413
&&
2879
1.24k
         
BaseLayout.leadsWithZeroSizedBase()165
&&
!recordUsesEBO(RD)116
) ||
2880
1.24k
        
HasVtordisp1.13k
) {
2881
273
      Size = Size.alignTo(VtorDispAlignment) + VtorDispSize;
2882
273
      Alignment = std::max(VtorDispAlignment, Alignment);
2883
273
    }
2884
1.24k
    // Insert the virtual base.
2885
1.24k
    ElementInfo Info = getAdjustedElementInfo(BaseLayout);
2886
1.24k
    CharUnits BaseOffset;
2887
1.24k
2888
1.24k
    // Respect the external AST source base offset, if present.
2889
1.24k
    if (UseExternalLayout) {
2890
3
      if (!External.getExternalVBaseOffset(BaseDecl, BaseOffset))
2891
3
        BaseOffset = Size;
2892
3
    } else
2893
1.24k
      BaseOffset = Size.alignTo(Info.Alignment);
2894
1.24k
2895
1.24k
    assert(BaseOffset >= Size && "base offset already allocated");
2896
1.24k
2897
1.24k
    VBases.insert(std::make_pair(BaseDecl,
2898
1.24k
        ASTRecordLayout::VBaseInfo(BaseOffset, HasVtordisp)));
2899
1.24k
    Size = BaseOffset + BaseLayout.getNonVirtualSize();
2900
1.24k
    PreviousBaseLayout = &BaseLayout;
2901
1.24k
  }
2902
836
}
2903
2904
4.28k
void MicrosoftRecordLayoutBuilder::finalizeLayout(const RecordDecl *RD) {
2905
4.28k
  // Respect required alignment.  Note that in 32-bit mode Required alignment
2906
4.28k
  // may be 0 and cause size not to be updated.
2907
4.28k
  DataSize = Size;
2908
4.28k
  if (!RequiredAlignment.isZero()) {
2909
2.00k
    Alignment = std::max(Alignment, RequiredAlignment);
2910
2.00k
    auto RoundingAlignment = Alignment;
2911
2.00k
    if (!MaxFieldAlignment.isZero())
2912
129
      RoundingAlignment = std::min(RoundingAlignment, MaxFieldAlignment);
2913
2.00k
    RoundingAlignment = std::max(RoundingAlignment, RequiredAlignment);
2914
2.00k
    Size = Size.alignTo(RoundingAlignment);
2915
2.00k
  }
2916
4.28k
  if (Size.isZero()) {
2917
1.42k
    if (!recordUsesEBO(RD) || 
!cast<CXXRecordDecl>(RD)->isEmpty()4
) {
2918
1.42k
      EndsWithZeroSizedObject = true;
2919
1.42k
      LeadsWithZeroSizedBase = true;
2920
1.42k
    }
2921
1.42k
    // Zero-sized structures have size equal to their alignment if a
2922
1.42k
    // __declspec(align) came into play.
2923
1.42k
    if (RequiredAlignment >= MinEmptyStructSize)
2924
595
      Size = Alignment;
2925
831
    else
2926
831
      Size = MinEmptyStructSize;
2927
1.42k
  }
2928
4.28k
2929
4.28k
  if (UseExternalLayout) {
2930
8
    Size = Context.toCharUnitsFromBits(External.Size);
2931
8
    if (External.Align)
2932
4
      Alignment = Context.toCharUnitsFromBits(External.Align);
2933
8
  }
2934
4.28k
}
2935
2936
// Recursively walks the non-virtual bases of a class and determines if any of
2937
// them are in the bases with overridden methods set.
2938
static bool
2939
RequiresVtordisp(const llvm::SmallPtrSetImpl<const CXXRecordDecl *> &
2940
                     BasesWithOverriddenMethods,
2941
662
                 const CXXRecordDecl *RD) {
2942
662
  if (BasesWithOverriddenMethods.count(RD))
2943
93
    return true;
2944
569
  // If any of a virtual bases non-virtual bases (recursively) requires a
2945
569
  // vtordisp than so does this virtual base.
2946
569
  for (const CXXBaseSpecifier &Base : RD->bases())
2947
85
    if (!Base.isVirtual() &&
2948
85
        RequiresVtordisp(BasesWithOverriddenMethods,
2949
50
                         Base.getType()->getAsCXXRecordDecl()))
2950
18
      return true;
2951
569
  
return false551
;
2952
569
}
2953
2954
void MicrosoftRecordLayoutBuilder::computeVtorDispSet(
2955
    llvm::SmallPtrSetImpl<const CXXRecordDecl *> &HasVtordispSet,
2956
836
    const CXXRecordDecl *RD) const {
2957
836
  // /vd2 or #pragma vtordisp(2): Always use vtordisps for virtual bases with
2958
836
  // vftables.
2959
836
  if (RD->getMSVtorDispMode() == MSVtorDispAttr::ForVFTable) {
2960
25
    for (const CXXBaseSpecifier &Base : RD->vbases()) {
2961
25
      const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
2962
25
      const ASTRecordLayout &Layout = Context.getASTRecordLayout(BaseDecl);
2963
25
      if (Layout.hasExtendableVFPtr())
2964
23
        HasVtordispSet.insert(BaseDecl);
2965
25
    }
2966
20
    return;
2967
20
  }
2968
816
2969
816
  // If any of our bases need a vtordisp for this type, so do we.  Check our
2970
816
  // direct bases for vtordisp requirements.
2971
1.50k
  
for (const CXXBaseSpecifier &Base : RD->bases())816
{
2972
1.50k
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
2973
1.50k
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(BaseDecl);
2974
1.50k
    for (const auto &bi : Layout.getVBaseOffsetsMap())
2975
476
      if (bi.second.hasVtorDisp())
2976
43
        HasVtordispSet.insert(bi.first);
2977
1.50k
  }
2978
816
  // We don't introduce any additional vtordisps if either:
2979
816
  // * A user declared constructor or destructor aren't declared.
2980
816
  // * #pragma vtordisp(0) or the /vd0 flag are in use.
2981
816
  if ((!RD->hasUserDeclaredConstructor() && 
!RD->hasUserDeclaredDestructor()459
) ||
2982
816
      
RD->getMSVtorDispMode() == MSVtorDispAttr::Never415
)
2983
406
    return;
2984
410
  // /vd1 or #pragma vtordisp(1): Try to guess based on whether we think it's
2985
410
  // possible for a partially constructed object with virtual base overrides to
2986
410
  // escape a non-trivial constructor.
2987
410
  assert(RD->getMSVtorDispMode() == MSVtorDispAttr::ForVBaseOverride);
2988
410
  // Compute a set of base classes which define methods we override.  A virtual
2989
410
  // base in this set will require a vtordisp.  A virtual base that transitively
2990
410
  // contains one of these bases as a non-virtual base will also require a
2991
410
  // vtordisp.
2992
410
  llvm::SmallPtrSet<const CXXMethodDecl *, 8> Work;
2993
410
  llvm::SmallPtrSet<const CXXRecordDecl *, 2> BasesWithOverriddenMethods;
2994
410
  // Seed the working set with our non-destructor, non-pure virtual methods.
2995
410
  for (const CXXMethodDecl *MD : RD->methods())
2996
2.06k
    if (MD->isVirtual() && 
!isa<CXXDestructorDecl>(MD)256
&&
!MD->isPure()180
)
2997
178
      Work.insert(MD);
2998
716
  while (!Work.empty()) {
2999
306
    const CXXMethodDecl *MD = *Work.begin();
3000
306
    auto MethodRange = MD->overridden_methods();
3001
306
    // If a virtual method has no-overrides it lives in its parent's vtable.
3002
306
    if (MethodRange.begin() == MethodRange.end())
3003
191
      BasesWithOverriddenMethods.insert(MD->getParent());
3004
115
    else
3005
115
      Work.insert(MethodRange.begin(), MethodRange.end());
3006
306
    // We've finished processing this element, remove it from the working set.
3007
306
    Work.erase(MD);
3008
306
  }
3009
410
  // For each of our virtual bases, check if it is in the set of overridden
3010
410
  // bases or if it transitively contains a non-virtual base that is.
3011
644
  for (const CXXBaseSpecifier &Base : RD->vbases()) {
3012
644
    const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
3013
644
    if (!HasVtordispSet.count(BaseDecl) &&
3014
644
        
RequiresVtordisp(BasesWithOverriddenMethods, BaseDecl)612
)
3015
93
      HasVtordispSet.insert(BaseDecl);
3016
644
  }
3017
410
}
3018
3019
/// getASTRecordLayout - Get or compute information about the layout of the
3020
/// specified record (struct/union/class), which indicates its size and field
3021
/// position information.
3022
const ASTRecordLayout &
3023
3.83M
ASTContext::getASTRecordLayout(const RecordDecl *D) const {
3024
3.83M
  // These asserts test different things.  A record has a definition
3025
3.83M
  // as soon as we begin to parse the definition.  That definition is
3026
3.83M
  // not a complete definition (which is what isDefinition() tests)
3027
3.83M
  // until we *finish* parsing the definition.
3028
3.83M
3029
3.83M
  if (D->hasExternalLexicalStorage() && 
!D->getDefinition()18.0k
)
3030
0
    getExternalSource()->CompleteType(const_cast<RecordDecl*>(D));
3031
3.83M
3032
3.83M
  D = D->getDefinition();
3033
3.83M
  assert(D && "Cannot get layout of forward declarations!");
3034
3.83M
  assert(!D->isInvalidDecl() && "Cannot get layout of invalid decl!");
3035
3.83M
  assert(D->isCompleteDefinition() && "Cannot layout type before complete!");
3036
3.83M
3037
3.83M
  // Look up this layout, if already laid out, return what we have.
3038
3.83M
  // Note that we can't save a reference to the entry because this function
3039
3.83M
  // is recursive.
3040
3.83M
  const ASTRecordLayout *Entry = ASTRecordLayouts[D];
3041
3.83M
  if (Entry) 
return *Entry3.63M
;
3042
192k
3043
192k
  const ASTRecordLayout *NewEntry = nullptr;
3044
192k
3045
192k
  if (isMsLayout(*this)) {
3046
4.28k
    MicrosoftRecordLayoutBuilder Builder(*this);
3047
4.28k
    if (const auto *RD = dyn_cast<CXXRecordDecl>(D)) {
3048
3.97k
      Builder.cxxLayout(RD);
3049
3.97k
      NewEntry = new (*this) ASTRecordLayout(
3050
3.97k
          *this, Builder.Size, Builder.Alignment, Builder.Alignment,
3051
3.97k
          Builder.RequiredAlignment,
3052
3.97k
          Builder.HasOwnVFPtr, Builder.HasOwnVFPtr || 
Builder.PrimaryBase3.27k
,
3053
3.97k
          Builder.VBPtrOffset, Builder.DataSize, Builder.FieldOffsets,
3054
3.97k
          Builder.NonVirtualSize, Builder.Alignment, CharUnits::Zero(),
3055
3.97k
          Builder.PrimaryBase, false, Builder.SharedVBPtrBase,
3056
3.97k
          Builder.EndsWithZeroSizedObject, Builder.LeadsWithZeroSizedBase,
3057
3.97k
          Builder.Bases, Builder.VBases);
3058
3.97k
    } else {
3059
305
      Builder.layout(D);
3060
305
      NewEntry = new (*this) ASTRecordLayout(
3061
305
          *this, Builder.Size, Builder.Alignment, Builder.Alignment,
3062
305
          Builder.RequiredAlignment,
3063
305
          Builder.Size, Builder.FieldOffsets);
3064
305
    }
3065
188k
  } else {
3066
188k
    if (const auto *RD = dyn_cast<CXXRecordDecl>(D)) {
3067
142k
      EmptySubobjectMap EmptySubobjects(*this, RD);
3068
142k
      ItaniumRecordLayoutBuilder Builder(*this, &EmptySubobjects);
3069
142k
      Builder.Layout(RD);
3070
142k
3071
142k
      // In certain situations, we are allowed to lay out objects in the
3072
142k
      // tail-padding of base classes.  This is ABI-dependent.
3073
142k
      // FIXME: this should be stored in the record layout.
3074
142k
      bool skipTailPadding =
3075
142k
          mustSkipTailPadding(getTargetInfo().getCXXABI(), RD);
3076
142k
3077
142k
      // FIXME: This should be done in FinalizeLayout.
3078
142k
      CharUnits DataSize =
3079
142k
          skipTailPadding ? 
Builder.getSize()62.3k
:
Builder.getDataSize()79.8k
;
3080
142k
      CharUnits NonVirtualSize =
3081
142k
          skipTailPadding ? 
DataSize62.3k
:
Builder.NonVirtualSize79.8k
;
3082
142k
      NewEntry = new (*this) ASTRecordLayout(
3083
142k
          *this, Builder.getSize(), Builder.Alignment, Builder.UnadjustedAlignment,
3084
142k
          /*RequiredAlignment : used by MS-ABI)*/
3085
142k
          Builder.Alignment, Builder.HasOwnVFPtr, RD->isDynamicClass(),
3086
142k
          CharUnits::fromQuantity(-1), DataSize, Builder.FieldOffsets,
3087
142k
          NonVirtualSize, Builder.NonVirtualAlignment,
3088
142k
          EmptySubobjects.SizeOfLargestEmptySubobject, Builder.PrimaryBase,
3089
142k
          Builder.PrimaryBaseIsVirtual, nullptr, false, false, Builder.Bases,
3090
142k
          Builder.VBases);
3091
142k
    } else {
3092
45.9k
      ItaniumRecordLayoutBuilder Builder(*this, /*EmptySubobjects=*/nullptr);
3093
45.9k
      Builder.Layout(D);
3094
45.9k
3095
45.9k
      NewEntry = new (*this) ASTRecordLayout(
3096
45.9k
          *this, Builder.getSize(), Builder.Alignment, Builder.UnadjustedAlignment,
3097
45.9k
          /*RequiredAlignment : used by MS-ABI)*/
3098
45.9k
          Builder.Alignment, Builder.getSize(), Builder.FieldOffsets);
3099
45.9k
    }
3100
188k
  }
3101
192k
3102
192k
  ASTRecordLayouts[D] = NewEntry;
3103
192k
3104
192k
  if (getLangOpts().DumpRecordLayouts) {
3105
1.08k
    llvm::outs() << "\n*** Dumping AST Record Layout\n";
3106
1.08k
    DumpRecordLayout(D, llvm::outs(), getLangOpts().DumpRecordLayoutsSimple);
3107
1.08k
  }
3108
192k
3109
192k
  return *NewEntry;
3110
192k
}
3111
3112
91.8k
const CXXMethodDecl *ASTContext::getCurrentKeyFunction(const CXXRecordDecl *RD) {
3113
91.8k
  if (!getTargetInfo().getCXXABI().hasKeyFunctions())
3114
3.17k
    return nullptr;
3115
88.7k
3116
88.7k
  assert(RD->getDefinition() && "Cannot get key function for forward decl!");
3117
88.7k
  RD = RD->getDefinition();
3118
88.7k
3119
88.7k
  // Beware:
3120
88.7k
  //  1) computing the key function might trigger deserialization, which might
3121
88.7k
  //     invalidate iterators into KeyFunctions
3122
88.7k
  //  2) 'get' on the LazyDeclPtr might also trigger deserialization and
3123
88.7k
  //     invalidate the LazyDeclPtr within the map itself
3124
88.7k
  LazyDeclPtr Entry = KeyFunctions[RD];
3125
88.7k
  const Decl *Result =
3126
88.7k
      Entry ? 
Entry.get(getExternalSource())25.7k
:
computeKeyFunction(*this, RD)62.9k
;
3127
88.7k
3128
88.7k
  // Store it back if it changed.
3129
88.7k
  if (Entry.isOffset() || Entry.isValid() != bool(Result))
3130
14.9k
    KeyFunctions[RD] = const_cast<Decl*>(Result);
3131
88.7k
3132
88.7k
  return cast_or_null<CXXMethodDecl>(Result);
3133
88.7k
}
3134
3135
14
void ASTContext::setNonKeyFunction(const CXXMethodDecl *Method) {
3136
14
  assert(Method == Method->getFirstDecl() &&
3137
14
         "not working with method declaration from class definition");
3138
14
3139
14
  // Look up the cache entry.  Since we're working with the first
3140
14
  // declaration, its parent must be the class definition, which is
3141
14
  // the correct key for the KeyFunctions hash.
3142
14
  const auto &Map = KeyFunctions;
3143
14
  auto I = Map.find(Method->getParent());
3144
14
3145
14
  // If it's not cached, there's nothing to do.
3146
14
  if (I == Map.end()) 
return0
;
3147
14
3148
14
  // If it is cached, check whether it's the target method, and if so,
3149
14
  // remove it from the cache. Note, the call to 'get' might invalidate
3150
14
  // the iterator and the LazyDeclPtr object within the map.
3151
14
  LazyDeclPtr Ptr = I->second;
3152
14
  if (Ptr.get(getExternalSource()) == Method) {
3153
14
    // FIXME: remember that we did this for module / chained PCH state?
3154
14
    KeyFunctions.erase(Method->getParent());
3155
14
  }
3156
14
}
3157
3158
580
static uint64_t getFieldOffset(const ASTContext &C, const FieldDecl *FD) {
3159
580
  const ASTRecordLayout &Layout = C.getASTRecordLayout(FD->getParent());
3160
580
  return Layout.getFieldOffset(FD->getFieldIndex());
3161
580
}
3162
3163
544
uint64_t ASTContext::getFieldOffset(const ValueDecl *VD) const {
3164
544
  uint64_t OffsetInBits;
3165
544
  if (const FieldDecl *FD = dyn_cast<FieldDecl>(VD)) {
3166
522
    OffsetInBits = ::getFieldOffset(*this, FD);
3167
522
  } else {
3168
22
    const IndirectFieldDecl *IFD = cast<IndirectFieldDecl>(VD);
3169
22
3170
22
    OffsetInBits = 0;
3171
22
    for (const NamedDecl *ND : IFD->chain())
3172
58
      OffsetInBits += ::getFieldOffset(*this, cast<FieldDecl>(ND));
3173
22
  }
3174
544
3175
544
  return OffsetInBits;
3176
544
}
3177
3178
uint64_t ASTContext::lookupFieldBitOffset(const ObjCInterfaceDecl *OID,
3179
                                          const ObjCImplementationDecl *ID,
3180
1.83k
                                          const ObjCIvarDecl *Ivar) const {
3181
1.83k
  const ObjCInterfaceDecl *Container = Ivar->getContainingInterface();
3182
1.83k
3183
1.83k
  // FIXME: We should eliminate the need to have ObjCImplementationDecl passed
3184
1.83k
  // in here; it should never be necessary because that should be the lexical
3185
1.83k
  // decl context for the ivar.
3186
1.83k
3187
1.83k
  // If we know have an implementation (and the ivar is in it) then
3188
1.83k
  // look up in the implementation layout.
3189
1.83k
  const ASTRecordLayout *RL;
3190
1.83k
  if (ID && 
declaresSameEntity(ID->getClassInterface(), Container)1.27k
)
3191
1.21k
    RL = &getASTObjCImplementationLayout(ID);
3192
617
  else
3193
617
    RL = &getASTObjCInterfaceLayout(Container);
3194
1.83k
3195
1.83k
  // Compute field index.
3196
1.83k
  //
3197
1.83k
  // FIXME: The index here is closely tied to how ASTContext::getObjCLayout is
3198
1.83k
  // implemented. This should be fixed to get the information from the layout
3199
1.83k
  // directly.
3200
1.83k
  unsigned Index = 0;
3201
1.83k
3202
1.83k
  for (const ObjCIvarDecl *IVD = Container->all_declared_ivar_begin();
3203
6.18k
       IVD; 
IVD = IVD->getNextIvar()4.35k
) {
3204
6.18k
    if (Ivar == IVD)
3205
1.83k
      break;
3206
4.35k
    ++Index;
3207
4.35k
  }
3208
1.83k
  assert(Index < RL->getFieldCount() && "Ivar is not inside record layout!");
3209
1.83k
3210
1.83k
  return RL->getFieldOffset(Index);
3211
1.83k
}
3212
3213
/// getObjCLayout - Get or compute information about the layout of the
3214
/// given interface.
3215
///
3216
/// \param Impl - If given, also include the layout of the interface's
3217
/// implementation. This may differ by including synthesized ivars.
3218
const ASTRecordLayout &
3219
ASTContext::getObjCLayout(const ObjCInterfaceDecl *D,
3220
4.23k
                          const ObjCImplementationDecl *Impl) const {
3221
4.23k
  // Retrieve the definition
3222
4.23k
  if (D->hasExternalLexicalStorage() && 
!D->getDefinition()1
)
3223
0
    getExternalSource()->CompleteType(const_cast<ObjCInterfaceDecl*>(D));
3224
4.23k
  D = D->getDefinition();
3225
4.23k
  assert(D && D->isThisDeclarationADefinition() && "Invalid interface decl!");
3226
4.23k
3227
4.23k
  // Look up this layout, if already laid out, return what we have.
3228
4.23k
  const ObjCContainerDecl *Key =
3229
4.23k
    Impl ? 
(const ObjCContainerDecl*) Impl1.85k
:
(const ObjCContainerDecl*) D2.37k
;
3230
4.23k
  if (const ASTRecordLayout *Entry = ObjCLayouts[Key])
3231
2.13k
    return *Entry;
3232
2.10k
3233
2.10k
  // Add in synthesized ivar count if laying out an implementation.
3234
2.10k
  if (Impl) {
3235
1.31k
    unsigned SynthCount = CountNonClassIvars(D);
3236
1.31k
    // If there aren't any synthesized ivars then reuse the interface
3237
1.31k
    // entry. Note we can't cache this because we simply free all
3238
1.31k
    // entries later; however we shouldn't look up implementations
3239
1.31k
    // frequently.
3240
1.31k
    if (SynthCount == 0)
3241
1.17k
      return getObjCLayout(D, nullptr);
3242
927
  }
3243
927
3244
927
  ItaniumRecordLayoutBuilder Builder(*this, /*EmptySubobjects=*/nullptr);
3245
927
  Builder.Layout(D);
3246
927
3247
927
  const ASTRecordLayout *NewEntry =
3248
927
    new (*this) ASTRecordLayout(*this, Builder.getSize(),
3249
927
                                Builder.Alignment,
3250
927
                                Builder.UnadjustedAlignment,
3251
927
                                /*RequiredAlignment : used by MS-ABI)*/
3252
927
                                Builder.Alignment,
3253
927
                                Builder.getDataSize(),
3254
927
                                Builder.FieldOffsets);
3255
927
3256
927
  ObjCLayouts[Key] = NewEntry;
3257
927
3258
927
  return *NewEntry;
3259
927
}
3260
3261
static void PrintOffset(raw_ostream &OS,
3262
4.78k
                        CharUnits Offset, unsigned IndentLevel) {
3263
4.78k
  OS << llvm::format("%10" PRId64 " | ", (int64_t)Offset.getQuantity());
3264
4.78k
  OS.indent(IndentLevel * 2);
3265
4.78k
}
3266
3267
static void PrintBitFieldOffset(raw_ostream &OS, CharUnits Offset,
3268
                                unsigned Begin, unsigned Width,
3269
293
                                unsigned IndentLevel) {
3270
293
  llvm::SmallString<10> Buffer;
3271
293
  {
3272
293
    llvm::raw_svector_ostream BufferOS(Buffer);
3273
293
    BufferOS << Offset.getQuantity() << ':';
3274
293
    if (Width == 0) {
3275
48
      BufferOS << '-';
3276
245
    } else {
3277
245
      BufferOS << Begin << '-' << (Begin + Width - 1);
3278
245
    }
3279
293
  }
3280
293
3281
293
  OS << llvm::right_justify(Buffer, 10) << " | ";
3282
293
  OS.indent(IndentLevel * 2);
3283
293
}
3284
3285
1.90k
static void PrintIndentNoOffset(raw_ostream &OS, unsigned IndentLevel) {
3286
1.90k
  OS << "           | ";
3287
1.90k
  OS.indent(IndentLevel * 2);
3288
1.90k
}
3289
3290
static void DumpRecordLayout(raw_ostream &OS, const RecordDecl *RD,
3291
                             const ASTContext &C,
3292
                             CharUnits Offset,
3293
                             unsigned IndentLevel,
3294
                             const char* Description,
3295
                             bool PrintSizeInfo,
3296
2.48k
                             bool IncludeVirtualBases) {
3297
2.48k
  const ASTRecordLayout &Layout = C.getASTRecordLayout(RD);
3298
2.48k
  auto CXXRD = dyn_cast<CXXRecordDecl>(RD);
3299
2.48k
3300
2.48k
  PrintOffset(OS, Offset, IndentLevel);
3301
2.48k
  OS << C.getTypeDeclType(const_cast<RecordDecl*>(RD)).getAsString();
3302
2.48k
  if (Description)
3303
1.48k
    OS << ' ' << Description;
3304
2.48k
  if (CXXRD && 
CXXRD->isEmpty()2.39k
)
3305
787
    OS << " (empty)";
3306
2.48k
  OS << '\n';
3307
2.48k
3308
2.48k
  IndentLevel++;
3309
2.48k
3310
2.48k
  // Dump bases.
3311
2.48k
  if (CXXRD) {
3312
2.39k
    const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
3313
2.39k
    bool HasOwnVFPtr = Layout.hasOwnVFPtr();
3314
2.39k
    bool HasOwnVBPtr = Layout.hasOwnVBPtr();
3315
2.39k
3316
2.39k
    // Vtable pointer.
3317
2.39k
    if (CXXRD->isDynamicClass() && 
!PrimaryBase781
&&
!isMsLayout(C)726
) {
3318
7
      PrintOffset(OS, Offset, IndentLevel);
3319
7
      OS << '(' << *RD << " vtable pointer)\n";
3320
2.38k
    } else if (HasOwnVFPtr) {
3321
345
      PrintOffset(OS, Offset, IndentLevel);
3322
345
      // vfptr (for Microsoft C++ ABI)
3323
345
      OS << '(' << *RD << " vftable pointer)\n";
3324
345
    }
3325
2.39k
3326
2.39k
    // Collect nvbases.
3327
2.39k
    SmallVector<const CXXRecordDecl *, 4> Bases;
3328
2.39k
    for (const CXXBaseSpecifier &Base : CXXRD->bases()) {
3329
1.36k
      assert(!Base.getType()->isDependentType() &&
3330
1.36k
             "Cannot layout class with dependent bases.");
3331
1.36k
      if (!Base.isVirtual())
3332
733
        Bases.push_back(Base.getType()->getAsCXXRecordDecl());
3333
1.36k
    }
3334
2.39k
3335
2.39k
    // Sort nvbases by offset.
3336
2.39k
    llvm::stable_sort(
3337
2.39k
        Bases, [&](const CXXRecordDecl *L, const CXXRecordDecl *R) {
3338
304
          return Layout.getBaseClassOffset(L) < Layout.getBaseClassOffset(R);
3339
304
        });
3340
2.39k
3341
2.39k
    // Dump (non-virtual) bases
3342
2.39k
    for (const CXXRecordDecl *Base : Bases) {
3343
733
      CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base);
3344
733
      DumpRecordLayout(OS, Base, C, BaseOffset, IndentLevel,
3345
733
                       Base == PrimaryBase ? 
"(primary base)"55
:
"(base)"678
,
3346
733
                       /*PrintSizeInfo=*/false,
3347
733
                       /*IncludeVirtualBases=*/false);
3348
733
    }
3349
2.39k
3350
2.39k
    // vbptr (for Microsoft C++ ABI)
3351
2.39k
    if (HasOwnVBPtr) {
3352
406
      PrintOffset(OS, Offset + Layout.getVBPtrOffset(), IndentLevel);
3353
406
      OS << '(' << *RD << " vbtable pointer)\n";
3354
406
    }
3355
2.39k
  }
3356
2.48k
3357
2.48k
  // Dump fields.
3358
2.48k
  uint64_t FieldNo = 0;
3359
2.48k
  for (RecordDecl::field_iterator I = RD->field_begin(),
3360
4.38k
         E = RD->field_end(); I != E; 
++I, ++FieldNo1.89k
) {
3361
1.89k
    const FieldDecl &Field = **I;
3362
1.89k
    uint64_t LocalFieldOffsetInBits = Layout.getFieldOffset(FieldNo);
3363
1.89k
    CharUnits FieldOffset =
3364
1.89k
      Offset + C.toCharUnitsFromBits(LocalFieldOffsetInBits);
3365
1.89k
3366
1.89k
    // Recursively dump fields of record type.
3367
1.89k
    if (auto RT = Field.getType()->getAs<RecordType>()) {
3368
139
      DumpRecordLayout(OS, RT->getDecl(), C, FieldOffset, IndentLevel,
3369
139
                       Field.getName().data(),
3370
139
                       /*PrintSizeInfo=*/false,
3371
139
                       /*IncludeVirtualBases=*/true);
3372
139
      continue;
3373
139
    }
3374
1.75k
3375
1.75k
    if (Field.isBitField()) {
3376
293
      uint64_t LocalFieldByteOffsetInBits = C.toBits(FieldOffset - Offset);
3377
293
      unsigned Begin = LocalFieldOffsetInBits - LocalFieldByteOffsetInBits;
3378
293
      unsigned Width = Field.getBitWidthValue(C);
3379
293
      PrintBitFieldOffset(OS, FieldOffset, Begin, Width, IndentLevel);
3380
1.46k
    } else {
3381
1.46k
      PrintOffset(OS, FieldOffset, IndentLevel);
3382
1.46k
    }
3383
1.75k
    OS << Field.getType().getAsString() << ' ' << Field << '\n';
3384
1.75k
  }
3385
2.48k
3386
2.48k
  // Dump virtual bases.
3387
2.48k
  if (CXXRD && 
IncludeVirtualBases2.39k
) {
3388
1.04k
    const ASTRecordLayout::VBaseOffsetsMapTy &VtorDisps =
3389
1.04k
      Layout.getVBaseOffsetsMap();
3390
1.04k
3391
1.04k
    for (const CXXBaseSpecifier &Base : CXXRD->vbases()) {
3392
616
      assert(Base.isVirtual() && "Found non-virtual class!");
3393
616
      const CXXRecordDecl *VBase = Base.getType()->getAsCXXRecordDecl();
3394
616
3395
616
      CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBase);
3396
616
3397
616
      if (VtorDisps.find(VBase)->second.hasVtorDisp()) {
3398
74
        PrintOffset(OS, VBaseOffset - CharUnits::fromQuantity(4), IndentLevel);
3399
74
        OS << "(vtordisp for vbase " << *VBase << ")\n";
3400
74
      }
3401
616
3402
616
      DumpRecordLayout(OS, VBase, C, VBaseOffset, IndentLevel,
3403
616
                       VBase == Layout.getPrimaryBase() ?
3404
616
                         
"(primary virtual base)"0
: "(virtual base)",
3405
616
                       /*PrintSizeInfo=*/false,
3406
616
                       /*IncludeVirtualBases=*/false);
3407
616
    }
3408
1.04k
  }
3409
2.48k
3410
2.48k
  if (!PrintSizeInfo) 
return1.48k
;
3411
997
3412
997
  PrintIndentNoOffset(OS, IndentLevel - 1);
3413
997
  OS << "[sizeof=" << Layout.getSize().getQuantity();
3414
997
  if (CXXRD && 
!isMsLayout(C)907
)
3415
42
    OS << ", dsize=" << Layout.getDataSize().getQuantity();
3416
997
  OS << ", align=" << Layout.getAlignment().getQuantity();
3417
997
3418
997
  if (CXXRD) {
3419
907
    OS << ",\n";
3420
907
    PrintIndentNoOffset(OS, IndentLevel - 1);
3421
907
    OS << " nvsize=" << Layout.getNonVirtualSize().getQuantity();
3422
907
    OS << ", nvalign=" << Layout.getNonVirtualAlignment().getQuantity();
3423
907
  }
3424
997
  OS << "]\n";
3425
997
}
3426
3427
void ASTContext::DumpRecordLayout(const RecordDecl *RD,
3428
                                  raw_ostream &OS,
3429
1.08k
                                  bool Simple) const {
3430
1.08k
  if (!Simple) {
3431
997
    ::DumpRecordLayout(OS, RD, *this, CharUnits(), 0, nullptr,
3432
997
                       /*PrintSizeInfo*/true,
3433
997
                       /*IncludeVirtualBases=*/true);
3434
997
    return;
3435
997
  }
3436
83
3437
83
  // The "simple" format is designed to be parsed by the
3438
83
  // layout-override testing code.  There shouldn't be any external
3439
83
  // uses of this format --- when LLDB overrides a layout, it sets up
3440
83
  // the data structures directly --- so feel free to adjust this as
3441
83
  // you like as long as you also update the rudimentary parser for it
3442
83
  // in libFrontend.
3443
83
3444
83
  const ASTRecordLayout &Info = getASTRecordLayout(RD);
3445
83
  OS << "Type: " << getTypeDeclType(RD).getAsString() << "\n";
3446
83
  OS << "\nLayout: ";
3447
83
  OS << "<ASTRecordLayout\n";
3448
83
  OS << "  Size:" << toBits(Info.getSize()) << "\n";
3449
83
  if (!isMsLayout(*this))
3450
72
    OS << "  DataSize:" << toBits(Info.getDataSize()) << "\n";
3451
83
  OS << "  Alignment:" << toBits(Info.getAlignment()) << "\n";
3452
83
  OS << "  FieldOffsets: [";
3453
270
  for (unsigned i = 0, e = Info.getFieldCount(); i != e; 
++i187
) {
3454
187
    if (i) 
OS << ", "111
;
3455
187
    OS << Info.getFieldOffset(i);
3456
187
  }
3457
83
  OS << "]>\n";
3458
83
}