Coverage Report

Created: 2019-07-24 05:18

/Users/buildslave/jenkins/workspace/clang-stage2-coverage-R/llvm/tools/clang/lib/CodeGen/CGClass.cpp
Line
Count
Source (jump to first uncovered line)
1
//===--- CGClass.cpp - Emit LLVM Code for C++ classes -----------*- C++ -*-===//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
// This contains code dealing with C++ code generation of classes
10
//
11
//===----------------------------------------------------------------------===//
12
13
#include "CGBlocks.h"
14
#include "CGCXXABI.h"
15
#include "CGDebugInfo.h"
16
#include "CGRecordLayout.h"
17
#include "CodeGenFunction.h"
18
#include "TargetInfo.h"
19
#include "clang/AST/CXXInheritance.h"
20
#include "clang/AST/DeclTemplate.h"
21
#include "clang/AST/EvaluatedExprVisitor.h"
22
#include "clang/AST/RecordLayout.h"
23
#include "clang/AST/StmtCXX.h"
24
#include "clang/Basic/CodeGenOptions.h"
25
#include "clang/Basic/TargetBuiltins.h"
26
#include "clang/CodeGen/CGFunctionInfo.h"
27
#include "llvm/IR/Intrinsics.h"
28
#include "llvm/IR/Metadata.h"
29
#include "llvm/Transforms/Utils/SanitizerStats.h"
30
31
using namespace clang;
32
using namespace CodeGen;
33
34
/// Return the best known alignment for an unknown pointer to a
35
/// particular class.
36
732k
CharUnits CodeGenModule::getClassPointerAlignment(const CXXRecordDecl *RD) {
37
732k
  if (!RD->isCompleteDefinition())
38
0
    return CharUnits::One(); // Hopefully won't be used anywhere.
39
732k
40
732k
  auto &layout = getContext().getASTRecordLayout(RD);
41
732k
42
732k
  // If the class is final, then we know that the pointer points to an
43
732k
  // object of that type and can use the full alignment.
44
732k
  if (RD->hasAttr<FinalAttr>()) {
45
1.69k
    return layout.getAlignment();
46
1.69k
47
1.69k
  // Otherwise, we have to assume it could be a subclass.
48
731k
  } else {
49
731k
    return layout.getNonVirtualAlignment();
50
731k
  }
51
732k
}
52
53
/// Return the best known alignment for a pointer to a virtual base,
54
/// given the alignment of a pointer to the derived class.
55
CharUnits CodeGenModule::getVBaseAlignment(CharUnits actualDerivedAlign,
56
                                           const CXXRecordDecl *derivedClass,
57
2.12k
                                           const CXXRecordDecl *vbaseClass) {
58
2.12k
  // The basic idea here is that an underaligned derived pointer might
59
2.12k
  // indicate an underaligned base pointer.
60
2.12k
61
2.12k
  assert(vbaseClass->isCompleteDefinition());
62
2.12k
  auto &baseLayout = getContext().getASTRecordLayout(vbaseClass);
63
2.12k
  CharUnits expectedVBaseAlign = baseLayout.getNonVirtualAlignment();
64
2.12k
65
2.12k
  return getDynamicOffsetAlignment(actualDerivedAlign, derivedClass,
66
2.12k
                                   expectedVBaseAlign);
67
2.12k
}
68
69
CharUnits
70
CodeGenModule::getDynamicOffsetAlignment(CharUnits actualBaseAlign,
71
                                         const CXXRecordDecl *baseDecl,
72
2.32k
                                         CharUnits expectedTargetAlign) {
73
2.32k
  // If the base is an incomplete type (which is, alas, possible with
74
2.32k
  // member pointers), be pessimistic.
75
2.32k
  if (!baseDecl->isCompleteDefinition())
76
5
    return std::min(actualBaseAlign, expectedTargetAlign);
77
2.32k
78
2.32k
  auto &baseLayout = getContext().getASTRecordLayout(baseDecl);
79
2.32k
  CharUnits expectedBaseAlign = baseLayout.getNonVirtualAlignment();
80
2.32k
81
2.32k
  // If the class is properly aligned, assume the target offset is, too.
82
2.32k
  //
83
2.32k
  // This actually isn't necessarily the right thing to do --- if the
84
2.32k
  // class is a complete object, but it's only properly aligned for a
85
2.32k
  // base subobject, then the alignments of things relative to it are
86
2.32k
  // probably off as well.  (Note that this requires the alignment of
87
2.32k
  // the target to be greater than the NV alignment of the derived
88
2.32k
  // class.)
89
2.32k
  //
90
2.32k
  // However, our approach to this kind of under-alignment can only
91
2.32k
  // ever be best effort; after all, we're never going to propagate
92
2.32k
  // alignments through variables or parameters.  Note, in particular,
93
2.32k
  // that constructing a polymorphic type in an address that's less
94
2.32k
  // than pointer-aligned will generally trap in the constructor,
95
2.32k
  // unless we someday add some sort of attribute to change the
96
2.32k
  // assumed alignment of 'this'.  So our goal here is pretty much
97
2.32k
  // just to allow the user to explicitly say that a pointer is
98
2.32k
  // under-aligned and then safely access its fields and vtables.
99
2.32k
  if (actualBaseAlign >= expectedBaseAlign) {
100
2.32k
    return expectedTargetAlign;
101
2.32k
  }
102
0
103
0
  // Otherwise, we might be offset by an arbitrary multiple of the
104
0
  // actual alignment.  The correct adjustment is to take the min of
105
0
  // the two alignments.
106
0
  return std::min(actualBaseAlign, expectedTargetAlign);
107
0
}
108
109
96.9k
Address CodeGenFunction::LoadCXXThisAddress() {
110
96.9k
  assert(CurFuncDecl && "loading 'this' without a func declaration?");
111
96.9k
  assert(isa<CXXMethodDecl>(CurFuncDecl));
112
96.9k
113
96.9k
  // Lazily compute CXXThisAlignment.
114
96.9k
  if (CXXThisAlignment.isZero()) {
115
70.0k
    // Just use the best known alignment for the parent.
116
70.0k
    // TODO: if we're currently emitting a complete-object ctor/dtor,
117
70.0k
    // we can always use the complete-object alignment.
118
70.0k
    auto RD = cast<CXXMethodDecl>(CurFuncDecl)->getParent();
119
70.0k
    CXXThisAlignment = CGM.getClassPointerAlignment(RD);
120
70.0k
  }
121
96.9k
122
96.9k
  return Address(LoadCXXThis(), CXXThisAlignment);
123
96.9k
}
124
125
/// Emit the address of a field using a member data pointer.
126
///
127
/// \param E Only used for emergency diagnostics
128
Address
129
CodeGenFunction::EmitCXXMemberDataPointerAddress(const Expr *E, Address base,
130
                                                 llvm::Value *memberPtr,
131
                                      const MemberPointerType *memberPtrType,
132
                                                 LValueBaseInfo *BaseInfo,
133
98
                                                 TBAAAccessInfo *TBAAInfo) {
134
98
  // Ask the ABI to compute the actual address.
135
98
  llvm::Value *ptr =
136
98
    CGM.getCXXABI().EmitMemberDataPointerAddress(*this, E, base,
137
98
                                                 memberPtr, memberPtrType);
138
98
139
98
  QualType memberType = memberPtrType->getPointeeType();
140
98
  CharUnits memberAlign = getNaturalTypeAlignment(memberType, BaseInfo,
141
98
                                                  TBAAInfo);
142
98
  memberAlign =
143
98
    CGM.getDynamicOffsetAlignment(base.getAlignment(),
144
98
                            memberPtrType->getClass()->getAsCXXRecordDecl(),
145
98
                                  memberAlign);
146
98
  return Address(ptr, memberAlign);
147
98
}
148
149
CharUnits CodeGenModule::computeNonVirtualBaseClassOffset(
150
    const CXXRecordDecl *DerivedClass, CastExpr::path_const_iterator Start,
151
82.4k
    CastExpr::path_const_iterator End) {
152
82.4k
  CharUnits Offset = CharUnits::Zero();
153
82.4k
154
82.4k
  const ASTContext &Context = getContext();
155
82.4k
  const CXXRecordDecl *RD = DerivedClass;
156
82.4k
157
175k
  for (CastExpr::path_const_iterator I = Start; I != End; 
++I92.6k
) {
158
92.6k
    const CXXBaseSpecifier *Base = *I;
159
92.6k
    assert(!Base->isVirtual() && "Should not see virtual bases here!");
160
92.6k
161
92.6k
    // Get the layout.
162
92.6k
    const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
163
92.6k
164
92.6k
    const CXXRecordDecl *BaseDecl =
165
92.6k
      cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl());
166
92.6k
167
92.6k
    // Add the offset.
168
92.6k
    Offset += Layout.getBaseClassOffset(BaseDecl);
169
92.6k
170
92.6k
    RD = BaseDecl;
171
92.6k
  }
172
82.4k
173
82.4k
  return Offset;
174
82.4k
}
175
176
llvm::Constant *
177
CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl,
178
                                   CastExpr::path_const_iterator PathBegin,
179
3.12k
                                   CastExpr::path_const_iterator PathEnd) {
180
3.12k
  assert(PathBegin != PathEnd && "Base path should not be empty!");
181
3.12k
182
3.12k
  CharUnits Offset =
183
3.12k
      computeNonVirtualBaseClassOffset(ClassDecl, PathBegin, PathEnd);
184
3.12k
  if (Offset.isZero())
185
3.08k
    return nullptr;
186
37
187
37
  llvm::Type *PtrDiffTy =
188
37
  Types.ConvertType(getContext().getPointerDiffType());
189
37
190
37
  return llvm::ConstantInt::get(PtrDiffTy, Offset.getQuantity());
191
37
}
192
193
/// Gets the address of a direct base class within a complete object.
194
/// This should only be used for (1) non-virtual bases or (2) virtual bases
195
/// when the type is known to be complete (e.g. in complete destructors).
196
///
197
/// The object pointed to by 'This' is assumed to be non-null.
198
Address
199
CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(Address This,
200
                                                   const CXXRecordDecl *Derived,
201
                                                   const CXXRecordDecl *Base,
202
19.3k
                                                   bool BaseIsVirtual) {
203
19.3k
  // 'this' must be a pointer (in some address space) to Derived.
204
19.3k
  assert(This.getElementType() == ConvertType(Derived));
205
19.3k
206
19.3k
  // Compute the offset of the virtual base.
207
19.3k
  CharUnits Offset;
208
19.3k
  const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived);
209
19.3k
  if (BaseIsVirtual)
210
1.18k
    Offset = Layout.getVBaseClassOffset(Base);
211
18.1k
  else
212
18.1k
    Offset = Layout.getBaseClassOffset(Base);
213
19.3k
214
19.3k
  // Shift and cast down to the base type.
215
19.3k
  // TODO: for complete types, this should be possible with a GEP.
216
19.3k
  Address V = This;
217
19.3k
  if (!Offset.isZero()) {
218
2.54k
    V = Builder.CreateElementBitCast(V, Int8Ty);
219
2.54k
    V = Builder.CreateConstInBoundsByteGEP(V, Offset);
220
2.54k
  }
221
19.3k
  V = Builder.CreateElementBitCast(V, ConvertType(Base));
222
19.3k
223
19.3k
  return V;
224
19.3k
}
225
226
static Address
227
ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, Address addr,
228
                                CharUnits nonVirtualOffset,
229
                                llvm::Value *virtualOffset,
230
                                const CXXRecordDecl *derivedClass,
231
5.07k
                                const CXXRecordDecl *nearestVBase) {
232
5.07k
  // Assert that we have something to do.
233
5.07k
  assert(!nonVirtualOffset.isZero() || virtualOffset != nullptr);
234
5.07k
235
5.07k
  // Compute the offset from the static and dynamic components.
236
5.07k
  llvm::Value *baseOffset;
237
5.07k
  if (!nonVirtualOffset.isZero()) {
238
3.08k
    baseOffset = llvm::ConstantInt::get(CGF.PtrDiffTy,
239
3.08k
                                        nonVirtualOffset.getQuantity());
240
3.08k
    if (virtualOffset) {
241
34
      baseOffset = CGF.Builder.CreateAdd(virtualOffset, baseOffset);
242
34
    }
243
3.08k
  } else {
244
1.99k
    baseOffset = virtualOffset;
245
1.99k
  }
246
5.07k
247
5.07k
  // Apply the base offset.
248
5.07k
  llvm::Value *ptr = addr.getPointer();
249
5.07k
  ptr = CGF.Builder.CreateBitCast(ptr, CGF.Int8PtrTy);
250
5.07k
  ptr = CGF.Builder.CreateInBoundsGEP(ptr, baseOffset, "add.ptr");
251
5.07k
252
5.07k
  // If we have a virtual component, the alignment of the result will
253
5.07k
  // be relative only to the known alignment of that vbase.
254
5.07k
  CharUnits alignment;
255
5.07k
  if (virtualOffset) {
256
2.02k
    assert(nearestVBase && "virtual offset without vbase?");
257
2.02k
    alignment = CGF.CGM.getVBaseAlignment(addr.getAlignment(),
258
2.02k
                                          derivedClass, nearestVBase);
259
3.04k
  } else {
260
3.04k
    alignment = addr.getAlignment();
261
3.04k
  }
262
5.07k
  alignment = alignment.alignmentAtOffset(nonVirtualOffset);
263
5.07k
264
5.07k
  return Address(ptr, alignment);
265
5.07k
}
266
267
Address CodeGenFunction::GetAddressOfBaseClass(
268
    Address Value, const CXXRecordDecl *Derived,
269
    CastExpr::path_const_iterator PathBegin,
270
    CastExpr::path_const_iterator PathEnd, bool NullCheckValue,
271
79.2k
    SourceLocation Loc) {
272
79.2k
  assert(PathBegin != PathEnd && "Base path should not be empty!");
273
79.2k
274
79.2k
  CastExpr::path_const_iterator Start = PathBegin;
275
79.2k
  const CXXRecordDecl *VBase = nullptr;
276
79.2k
277
79.2k
  // Sema has done some convenient canonicalization here: if the
278
79.2k
  // access path involved any virtual steps, the conversion path will
279
79.2k
  // *start* with a step down to the correct virtual base subobject,
280
79.2k
  // and hence will not require any further steps.
281
79.2k
  if ((*Start)->isVirtual()) {
282
1.43k
    VBase =
283
1.43k
      cast<CXXRecordDecl>((*Start)->getType()->getAs<RecordType>()->getDecl());
284
1.43k
    ++Start;
285
1.43k
  }
286
79.2k
287
79.2k
  // Compute the static offset of the ultimate destination within its
288
79.2k
  // allocating subobject (the virtual base, if there is one, or else
289
79.2k
  // the "complete" object that we see).
290
79.2k
  CharUnits NonVirtualOffset = CGM.computeNonVirtualBaseClassOffset(
291
79.2k
      VBase ? 
VBase1.43k
:
Derived77.8k
, Start, PathEnd);
292
79.2k
293
79.2k
  // If there's a virtual step, we can sometimes "devirtualize" it.
294
79.2k
  // For now, that's limited to when the derived type is final.
295
79.2k
  // TODO: "devirtualize" this for accesses to known-complete objects.
296
79.2k
  if (VBase && 
Derived->hasAttr<FinalAttr>()1.43k
) {
297
2
    const ASTRecordLayout &layout = getContext().getASTRecordLayout(Derived);
298
2
    CharUnits vBaseOffset = layout.getVBaseClassOffset(VBase);
299
2
    NonVirtualOffset += vBaseOffset;
300
2
    VBase = nullptr; // we no longer have a virtual step
301
2
  }
302
79.2k
303
79.2k
  // Get the base pointer type.
304
79.2k
  llvm::Type *BasePtrTy =
305
79.2k
      ConvertType((PathEnd[-1])->getType())
306
79.2k
          ->getPointerTo(Value.getType()->getPointerAddressSpace());
307
79.2k
308
79.2k
  QualType DerivedTy = getContext().getRecordType(Derived);
309
79.2k
  CharUnits DerivedAlign = CGM.getClassPointerAlignment(Derived);
310
79.2k
311
79.2k
  // If the static offset is zero and we don't have a virtual step,
312
79.2k
  // just do a bitcast; null checks are unnecessary.
313
79.2k
  if (NonVirtualOffset.isZero() && 
!VBase76.9k
) {
314
75.5k
    if (sanitizePerformTypeCheck()) {
315
19
      SanitizerSet SkippedChecks;
316
19
      SkippedChecks.set(SanitizerKind::Null, !NullCheckValue);
317
19
      EmitTypeCheck(TCK_Upcast, Loc, Value.getPointer(),
318
19
                    DerivedTy, DerivedAlign, SkippedChecks);
319
19
    }
320
75.5k
    return Builder.CreateBitCast(Value, BasePtrTy);
321
75.5k
  }
322
3.71k
323
3.71k
  llvm::BasicBlock *origBB = nullptr;
324
3.71k
  llvm::BasicBlock *endBB = nullptr;
325
3.71k
326
3.71k
  // Skip over the offset (and the vtable load) if we're supposed to
327
3.71k
  // null-check the pointer.
328
3.71k
  if (NullCheckValue) {
329
30
    origBB = Builder.GetInsertBlock();
330
30
    llvm::BasicBlock *notNullBB = createBasicBlock("cast.notnull");
331
30
    endBB = createBasicBlock("cast.end");
332
30
333
30
    llvm::Value *isNull = Builder.CreateIsNull(Value.getPointer());
334
30
    Builder.CreateCondBr(isNull, endBB, notNullBB);
335
30
    EmitBlock(notNullBB);
336
30
  }
337
3.71k
338
3.71k
  if (sanitizePerformTypeCheck()) {
339
3
    SanitizerSet SkippedChecks;
340
3
    SkippedChecks.set(SanitizerKind::Null, true);
341
3
    EmitTypeCheck(VBase ? TCK_UpcastToVirtualBase : 
TCK_Upcast0
, Loc,
342
3
                  Value.getPointer(), DerivedTy, DerivedAlign, SkippedChecks);
343
3
  }
344
3.71k
345
3.71k
  // Compute the virtual offset.
346
3.71k
  llvm::Value *VirtualOffset = nullptr;
347
3.71k
  if (VBase) {
348
1.43k
    VirtualOffset =
349
1.43k
      CGM.getCXXABI().GetVirtualBaseClassOffset(*this, Value, Derived, VBase);
350
1.43k
  }
351
3.71k
352
3.71k
  // Apply both offsets.
353
3.71k
  Value = ApplyNonVirtualAndVirtualOffset(*this, Value, NonVirtualOffset,
354
3.71k
                                          VirtualOffset, Derived, VBase);
355
3.71k
356
3.71k
  // Cast to the destination type.
357
3.71k
  Value = Builder.CreateBitCast(Value, BasePtrTy);
358
3.71k
359
3.71k
  // Build a phi if we needed a null check.
360
3.71k
  if (NullCheckValue) {
361
30
    llvm::BasicBlock *notNullBB = Builder.GetInsertBlock();
362
30
    Builder.CreateBr(endBB);
363
30
    EmitBlock(endBB);
364
30
365
30
    llvm::PHINode *PHI = Builder.CreatePHI(BasePtrTy, 2, "cast.result");
366
30
    PHI->addIncoming(Value.getPointer(), notNullBB);
367
30
    PHI->addIncoming(llvm::Constant::getNullValue(BasePtrTy), origBB);
368
30
    Value = Address(PHI, Value.getAlignment());
369
30
  }
370
3.71k
371
3.71k
  return Value;
372
3.71k
}
373
374
Address
375
CodeGenFunction::GetAddressOfDerivedClass(Address BaseAddr,
376
                                          const CXXRecordDecl *Derived,
377
                                        CastExpr::path_const_iterator PathBegin,
378
                                          CastExpr::path_const_iterator PathEnd,
379
3.06k
                                          bool NullCheckValue) {
380
3.06k
  assert(PathBegin != PathEnd && "Base path should not be empty!");
381
3.06k
382
3.06k
  QualType DerivedTy =
383
3.06k
    getContext().getCanonicalType(getContext().getTagDeclType(Derived));
384
3.06k
  llvm::Type *DerivedPtrTy = ConvertType(DerivedTy)->getPointerTo();
385
3.06k
386
3.06k
  llvm::Value *NonVirtualOffset =
387
3.06k
    CGM.GetNonVirtualBaseClassOffset(Derived, PathBegin, PathEnd);
388
3.06k
389
3.06k
  if (!NonVirtualOffset) {
390
3.05k
    // No offset, we can just cast back.
391
3.05k
    return Builder.CreateBitCast(BaseAddr, DerivedPtrTy);
392
3.05k
  }
393
11
394
11
  llvm::BasicBlock *CastNull = nullptr;
395
11
  llvm::BasicBlock *CastNotNull = nullptr;
396
11
  llvm::BasicBlock *CastEnd = nullptr;
397
11
398
11
  if (NullCheckValue) {
399
5
    CastNull = createBasicBlock("cast.null");
400
5
    CastNotNull = createBasicBlock("cast.notnull");
401
5
    CastEnd = createBasicBlock("cast.end");
402
5
403
5
    llvm::Value *IsNull = Builder.CreateIsNull(BaseAddr.getPointer());
404
5
    Builder.CreateCondBr(IsNull, CastNull, CastNotNull);
405
5
    EmitBlock(CastNotNull);
406
5
  }
407
11
408
11
  // Apply the offset.
409
11
  llvm::Value *Value = Builder.CreateBitCast(BaseAddr.getPointer(), Int8PtrTy);
410
11
  Value = Builder.CreateInBoundsGEP(Value, Builder.CreateNeg(NonVirtualOffset),
411
11
                                    "sub.ptr");
412
11
413
11
  // Just cast.
414
11
  Value = Builder.CreateBitCast(Value, DerivedPtrTy);
415
11
416
11
  // Produce a PHI if we had a null-check.
417
11
  if (NullCheckValue) {
418
5
    Builder.CreateBr(CastEnd);
419
5
    EmitBlock(CastNull);
420
5
    Builder.CreateBr(CastEnd);
421
5
    EmitBlock(CastEnd);
422
5
423
5
    llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2);
424
5
    PHI->addIncoming(Value, CastNotNull);
425
5
    PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), CastNull);
426
5
    Value = PHI;
427
5
  }
428
11
429
11
  return Address(Value, CGM.getClassPointerAlignment(Derived));
430
11
}
431
432
llvm::Value *CodeGenFunction::GetVTTParameter(GlobalDecl GD,
433
                                              bool ForVirtualBase,
434
52.9k
                                              bool Delegating) {
435
52.9k
  if (!CGM.getCXXABI().NeedsVTTParameter(GD)) {
436
52.2k
    // This constructor/destructor does not need a VTT parameter.
437
52.2k
    return nullptr;
438
52.2k
  }
439
666
440
666
  const CXXRecordDecl *RD = cast<CXXMethodDecl>(CurCodeDecl)->getParent();
441
666
  const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent();
442
666
443
666
  llvm::Value *VTT;
444
666
445
666
  uint64_t SubVTTIndex;
446
666
447
666
  if (Delegating) {
448
2
    // If this is a delegating constructor call, just load the VTT.
449
2
    return LoadCXXVTT();
450
664
  } else if (RD == Base) {
451
178
    // If the record matches the base, this is the complete ctor/dtor
452
178
    // variant calling the base variant in a class with virtual bases.
453
178
    assert(!CGM.getCXXABI().NeedsVTTParameter(CurGD) &&
454
178
           "doing no-op VTT offset in base dtor/ctor?");
455
178
    assert(!ForVirtualBase && "Can't have same class as virtual base!");
456
178
    SubVTTIndex = 0;
457
486
  } else {
458
486
    const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD);
459
486
    CharUnits BaseOffset = ForVirtualBase ?
460
7
      Layout.getVBaseClassOffset(Base) :
461
486
      
Layout.getBaseClassOffset(Base)479
;
462
486
463
486
    SubVTTIndex =
464
486
      CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset));
465
486
    assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!");
466
486
  }
467
666
468
666
  
if (664
CGM.getCXXABI().NeedsVTTParameter(CurGD)664
) {
469
218
    // A VTT parameter was passed to the constructor, use it.
470
218
    VTT = LoadCXXVTT();
471
218
    VTT = Builder.CreateConstInBoundsGEP1_64(VTT, SubVTTIndex);
472
446
  } else {
473
446
    // We're the complete constructor, so get the VTT by name.
474
446
    VTT = CGM.getVTables().GetAddrOfVTT(RD);
475
446
    VTT = Builder.CreateConstInBoundsGEP2_64(VTT, 0, SubVTTIndex);
476
446
  }
477
664
478
664
  return VTT;
479
666
}
480
481
namespace {
482
  /// Call the destructor for a direct base class.
483
  struct CallBaseDtor final : EHScopeStack::Cleanup {
484
    const CXXRecordDecl *BaseClass;
485
    bool BaseIsVirtual;
486
    CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual)
487
5.94k
      : BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {}
488
489
4.82k
    void Emit(CodeGenFunction &CGF, Flags flags) override {
490
4.82k
      const CXXRecordDecl *DerivedClass =
491
4.82k
        cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent();
492
4.82k
493
4.82k
      const CXXDestructorDecl *D = BaseClass->getDestructor();
494
4.82k
      // We are already inside a destructor, so presumably the object being
495
4.82k
      // destroyed should have the expected type.
496
4.82k
      QualType ThisTy = D->getThisObjectType();
497
4.82k
      Address Addr =
498
4.82k
        CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThisAddress(),
499
4.82k
                                                  DerivedClass, BaseClass,
500
4.82k
                                                  BaseIsVirtual);
501
4.82k
      CGF.EmitCXXDestructorCall(D, Dtor_Base, BaseIsVirtual,
502
4.82k
                                /*Delegating=*/false, Addr, ThisTy);
503
4.82k
    }
504
  };
505
506
  /// A visitor which checks whether an initializer uses 'this' in a
507
  /// way which requires the vtable to be properly set.
508
  struct DynamicThisUseChecker : ConstEvaluatedExprVisitor<DynamicThisUseChecker> {
509
    typedef ConstEvaluatedExprVisitor<DynamicThisUseChecker> super;
510
511
    bool UsesThis;
512
513
14.5k
    DynamicThisUseChecker(const ASTContext &C) : super(C), UsesThis(false) {}
514
515
    // Black-list all explicit and implicit references to 'this'.
516
    //
517
    // Do we need to worry about external references to 'this' derived
518
    // from arbitrary code?  If so, then anything which runs arbitrary
519
    // external code might potentially access the vtable.
520
267
    void VisitCXXThisExpr(const CXXThisExpr *E) { UsesThis = true; }
521
  };
522
} // end anonymous namespace
523
524
14.5k
static bool BaseInitializerUsesThis(ASTContext &C, const Expr *Init) {
525
14.5k
  DynamicThisUseChecker Checker(C);
526
14.5k
  Checker.Visit(Init);
527
14.5k
  return Checker.UsesThis;
528
14.5k
}
529
530
static void EmitBaseInitializer(CodeGenFunction &CGF,
531
                                const CXXRecordDecl *ClassDecl,
532
14.5k
                                CXXCtorInitializer *BaseInit) {
533
14.5k
  assert(BaseInit->isBaseInitializer() &&
534
14.5k
         "Must have base initializer!");
535
14.5k
536
14.5k
  Address ThisPtr = CGF.LoadCXXThisAddress();
537
14.5k
538
14.5k
  const Type *BaseType = BaseInit->getBaseClass();
539
14.5k
  CXXRecordDecl *BaseClassDecl =
540
14.5k
    cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl());
541
14.5k
542
14.5k
  bool isBaseVirtual = BaseInit->isBaseVirtual();
543
14.5k
544
14.5k
  // If the initializer for the base (other than the constructor
545
14.5k
  // itself) accesses 'this' in any way, we need to initialize the
546
14.5k
  // vtables.
547
14.5k
  if (BaseInitializerUsesThis(CGF.getContext(), BaseInit->getInit()))
548
267
    CGF.InitializeVTablePointers(ClassDecl);
549
14.5k
550
14.5k
  // We can pretend to be a complete class because it only matters for
551
14.5k
  // virtual bases, and we only do virtual bases for complete ctors.
552
14.5k
  Address V =
553
14.5k
    CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl,
554
14.5k
                                              BaseClassDecl,
555
14.5k
                                              isBaseVirtual);
556
14.5k
  AggValueSlot AggSlot =
557
14.5k
      AggValueSlot::forAddr(
558
14.5k
          V, Qualifiers(),
559
14.5k
          AggValueSlot::IsDestructed,
560
14.5k
          AggValueSlot::DoesNotNeedGCBarriers,
561
14.5k
          AggValueSlot::IsNotAliased,
562
14.5k
          CGF.getOverlapForBaseInit(ClassDecl, BaseClassDecl, isBaseVirtual));
563
14.5k
564
14.5k
  CGF.EmitAggExpr(BaseInit->getInit(), AggSlot);
565
14.5k
566
14.5k
  if (CGF.CGM.getLangOpts().Exceptions &&
567
14.5k
      
!BaseClassDecl->hasTrivialDestructor()7.41k
)
568
2.50k
    CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl,
569
2.50k
                                          isBaseVirtual);
570
14.5k
}
571
572
180k
static bool isMemcpyEquivalentSpecialMember(const CXXMethodDecl *D) {
573
180k
  auto *CD = dyn_cast<CXXConstructorDecl>(D);
574
180k
  if (!(CD && 
CD->isCopyOrMoveConstructor()180k
) &&
575
180k
      
!D->isCopyAssignmentOperator()155k
&&
!D->isMoveAssignmentOperator()154k
)
576
154k
    return false;
577
25.9k
578
25.9k
  // We can emit a memcpy for a trivial copy or move constructor/assignment.
579
25.9k
  if (D->isTrivial() && 
!D->getParent()->mayInsertExtraPadding()13.7k
)
580
13.7k
    return true;
581
12.2k
582
12.2k
  // We *must* emit a memcpy for a defaulted union copy or move op.
583
12.2k
  if (D->getParent()->isUnion() && 
D->isDefaulted()4
)
584
2
    return true;
585
12.2k
586
12.2k
  return false;
587
12.2k
}
588
589
static void EmitLValueForAnyFieldInitialization(CodeGenFunction &CGF,
590
                                                CXXCtorInitializer *MemberInit,
591
32.7k
                                                LValue &LHS) {
592
32.7k
  FieldDecl *Field = MemberInit->getAnyMember();
593
32.7k
  if (MemberInit->isIndirectMemberInitializer()) {
594
125
    // If we are initializing an anonymous union field, drill down to the field.
595
125
    IndirectFieldDecl *IndirectField = MemberInit->getIndirectMember();
596
125
    for (const auto *I : IndirectField->chain())
597
258
      LHS = CGF.EmitLValueForFieldInitialization(LHS, cast<FieldDecl>(I));
598
32.6k
  } else {
599
32.6k
    LHS = CGF.EmitLValueForFieldInitialization(LHS, Field);
600
32.6k
  }
601
32.7k
}
602
603
static void EmitMemberInitializer(CodeGenFunction &CGF,
604
                                  const CXXRecordDecl *ClassDecl,
605
                                  CXXCtorInitializer *MemberInit,
606
                                  const CXXConstructorDecl *Constructor,
607
32.7k
                                  FunctionArgList &Args) {
608
32.7k
  ApplyDebugLocation Loc(CGF, MemberInit->getSourceLocation());
609
32.7k
  assert(MemberInit->isAnyMemberInitializer() &&
610
32.7k
         "Must have member initializer!");
611
32.7k
  assert(MemberInit->getInit() && "Must have initializer!");
612
32.7k
613
32.7k
  // non-static data member initializers.
614
32.7k
  FieldDecl *Field = MemberInit->getAnyMember();
615
32.7k
  QualType FieldType = Field->getType();
616
32.7k
617
32.7k
  llvm::Value *ThisPtr = CGF.LoadCXXThis();
618
32.7k
  QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl);
619
32.7k
  LValue LHS;
620
32.7k
621
32.7k
  // If a base constructor is being emitted, create an LValue that has the
622
32.7k
  // non-virtual alignment.
623
32.7k
  if (CGF.CurGD.getCtorType() == Ctor_Base)
624
32.5k
    LHS = CGF.MakeNaturalAlignPointeeAddrLValue(ThisPtr, RecordTy);
625
246
  else
626
246
    LHS = CGF.MakeNaturalAlignAddrLValue(ThisPtr, RecordTy);
627
32.7k
628
32.7k
  EmitLValueForAnyFieldInitialization(CGF, MemberInit, LHS);
629
32.7k
630
32.7k
  // Special case: if we are in a copy or move constructor, and we are copying
631
32.7k
  // an array of PODs or classes with trivial copy constructors, ignore the
632
32.7k
  // AST and perform the copy we know is equivalent.
633
32.7k
  // FIXME: This is hacky at best... if we had a bit more explicit information
634
32.7k
  // in the AST, we could generalize it more easily.
635
32.7k
  const ConstantArrayType *Array
636
32.7k
    = CGF.getContext().getAsConstantArrayType(FieldType);
637
32.7k
  if (Array && 
Constructor->isDefaulted()480
&&
638
32.7k
      
Constructor->isCopyOrMoveConstructor()96
) {
639
23
    QualType BaseElementTy = CGF.getContext().getBaseElementType(Array);
640
23
    CXXConstructExpr *CE = dyn_cast<CXXConstructExpr>(MemberInit->getInit());
641
23
    if (BaseElementTy.isPODType(CGF.getContext()) ||
642
23
        
(17
CE17
&&
isMemcpyEquivalentSpecialMember(CE->getConstructor())0
)) {
643
6
      unsigned SrcArgIndex =
644
6
          CGF.CGM.getCXXABI().getSrcArgforCopyCtor(Constructor, Args);
645
6
      llvm::Value *SrcPtr
646
6
        = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(Args[SrcArgIndex]));
647
6
      LValue ThisRHSLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy);
648
6
      LValue Src = CGF.EmitLValueForFieldInitialization(ThisRHSLV, Field);
649
6
650
6
      // Copy the aggregate.
651
6
      CGF.EmitAggregateCopy(LHS, Src, FieldType, CGF.getOverlapForFieldInit(Field),
652
6
                            LHS.isVolatileQualified());
653
6
      // Ensure that we destroy the objects if an exception is thrown later in
654
6
      // the constructor.
655
6
      QualType::DestructionKind dtorKind = FieldType.isDestructedType();
656
6
      if (CGF.needsEHCleanup(dtorKind))
657
0
        CGF.pushEHDestroy(dtorKind, LHS.getAddress(), FieldType);
658
6
      return;
659
6
    }
660
32.7k
  }
661
32.7k
662
32.7k
  CGF.EmitInitializerForField(Field, LHS, MemberInit->getInit());
663
32.7k
}
664
665
void CodeGenFunction::EmitInitializerForField(FieldDecl *Field, LValue LHS,
666
33.4k
                                              Expr *Init) {
667
33.4k
  QualType FieldType = Field->getType();
668
33.4k
  switch (getEvaluationKind(FieldType)) {
669
33.4k
  case TEK_Scalar:
670
20.6k
    if (LHS.isSimple()) {
671
20.5k
      EmitExprAsInit(Init, Field, LHS, false);
672
20.5k
    } else {
673
74
      RValue RHS = RValue::get(EmitScalarExpr(Init));
674
74
      EmitStoreThroughLValue(RHS, LHS);
675
74
    }
676
20.6k
    break;
677
33.4k
  case TEK_Complex:
678
6
    EmitComplexExprIntoLValue(Init, LHS, /*isInit*/ true);
679
6
    break;
680
33.4k
  case TEK_Aggregate: {
681
12.8k
    AggValueSlot Slot =
682
12.8k
        AggValueSlot::forLValue(
683
12.8k
            LHS,
684
12.8k
            AggValueSlot::IsDestructed,
685
12.8k
            AggValueSlot::DoesNotNeedGCBarriers,
686
12.8k
            AggValueSlot::IsNotAliased,
687
12.8k
            getOverlapForFieldInit(Field),
688
12.8k
            AggValueSlot::IsNotZeroed,
689
12.8k
            // Checks are made by the code that calls constructor.
690
12.8k
            AggValueSlot::IsSanitizerChecked);
691
12.8k
    EmitAggExpr(Init, Slot);
692
12.8k
    break;
693
33.4k
  }
694
33.4k
  }
695
33.4k
696
33.4k
  // Ensure that we destroy this object if an exception is thrown
697
33.4k
  // later in the constructor.
698
33.4k
  QualType::DestructionKind dtorKind = FieldType.isDestructedType();
699
33.4k
  if (needsEHCleanup(dtorKind))
700
2.39k
    pushEHDestroy(dtorKind, LHS.getAddress(), FieldType);
701
33.4k
}
702
703
/// Checks whether the given constructor is a valid subject for the
704
/// complete-to-base constructor delegation optimization, i.e.
705
/// emitting the complete constructor as a simple call to the base
706
/// constructor.
707
bool CodeGenFunction::IsConstructorDelegationValid(
708
23.2k
    const CXXConstructorDecl *Ctor) {
709
23.2k
710
23.2k
  // Currently we disable the optimization for classes with virtual
711
23.2k
  // bases because (1) the addresses of parameter variables need to be
712
23.2k
  // consistent across all initializers but (2) the delegate function
713
23.2k
  // call necessarily creates a second copy of the parameter variable.
714
23.2k
  //
715
23.2k
  // The limiting example (purely theoretical AFAIK):
716
23.2k
  //   struct A { A(int &c) { c++; } };
717
23.2k
  //   struct B : virtual A {
718
23.2k
  //     B(int count) : A(count) { printf("%d\n", count); }
719
23.2k
  //   };
720
23.2k
  // ...although even this example could in principle be emitted as a
721
23.2k
  // delegation since the address of the parameter doesn't escape.
722
23.2k
  if (Ctor->getParent()->getNumVBases()) {
723
663
    // TODO: white-list trivial vbase initializers.  This case wouldn't
724
663
    // be subject to the restrictions below.
725
663
726
663
    // TODO: white-list cases where:
727
663
    //  - there are no non-reference parameters to the constructor
728
663
    //  - the initializers don't access any non-reference parameters
729
663
    //  - the initializers don't take the address of non-reference
730
663
    //    parameters
731
663
    //  - etc.
732
663
    // If we ever add any of the above cases, remember that:
733
663
    //  - function-try-blocks will always blacklist this optimization
734
663
    //  - we need to perform the constructor prologue and cleanup in
735
663
    //    EmitConstructorBody.
736
663
737
663
    return false;
738
663
  }
739
22.5k
740
22.5k
  // We also disable the optimization for variadic functions because
741
22.5k
  // it's impossible to "re-pass" varargs.
742
22.5k
  if (Ctor->getType()->getAs<FunctionProtoType>()->isVariadic())
743
10
    return false;
744
22.5k
745
22.5k
  // FIXME: Decide if we can do a delegation of a delegating constructor.
746
22.5k
  if (Ctor->isDelegatingConstructor())
747
42
    return false;
748
22.5k
749
22.5k
  return true;
750
22.5k
}
751
752
// Emit code in ctor (Prologue==true) or dtor (Prologue==false)
753
// to poison the extra field paddings inserted under
754
// -fsanitize-address-field-padding=1|2.
755
72.6k
void CodeGenFunction::EmitAsanPrologueOrEpilogue(bool Prologue) {
756
72.6k
  ASTContext &Context = getContext();
757
72.6k
  const CXXRecordDecl *ClassDecl =
758
72.6k
      Prologue ? 
cast<CXXConstructorDecl>(CurGD.getDecl())->getParent()52.0k
759
72.6k
               : 
cast<CXXDestructorDecl>(CurGD.getDecl())->getParent()20.5k
;
760
72.6k
  if (!ClassDecl->mayInsertExtraPadding()) 
return72.6k
;
761
41
762
41
  struct SizeAndOffset {
763
41
    uint64_t Size;
764
41
    uint64_t Offset;
765
41
  };
766
41
767
41
  unsigned PtrSize = CGM.getDataLayout().getPointerSizeInBits();
768
41
  const ASTRecordLayout &Info = Context.getASTRecordLayout(ClassDecl);
769
41
770
41
  // Populate sizes and offsets of fields.
771
41
  SmallVector<SizeAndOffset, 16> SSV(Info.getFieldCount());
772
153
  for (unsigned i = 0, e = Info.getFieldCount(); i != e; 
++i112
)
773
112
    SSV[i].Offset =
774
112
        Context.toCharUnitsFromBits(Info.getFieldOffset(i)).getQuantity();
775
41
776
41
  size_t NumFields = 0;
777
112
  for (const auto *Field : ClassDecl->fields()) {
778
112
    const FieldDecl *D = Field;
779
112
    std::pair<CharUnits, CharUnits> FieldInfo =
780
112
        Context.getTypeInfoInChars(D->getType());
781
112
    CharUnits FieldSize = FieldInfo.first;
782
112
    assert(NumFields < SSV.size());
783
112
    SSV[NumFields].Size = D->isBitField() ? 
00
: FieldSize.getQuantity();
784
112
    NumFields++;
785
112
  }
786
41
  assert(NumFields == SSV.size());
787
41
  if (SSV.size() <= 1) 
return0
;
788
41
789
41
  // We will insert calls to __asan_* run-time functions.
790
41
  // LLVM AddressSanitizer pass may decide to inline them later.
791
41
  llvm::Type *Args[2] = {IntPtrTy, IntPtrTy};
792
41
  llvm::FunctionType *FTy =
793
41
      llvm::FunctionType::get(CGM.VoidTy, Args, false);
794
41
  llvm::FunctionCallee F = CGM.CreateRuntimeFunction(
795
41
      FTy, Prologue ? 
"__asan_poison_intra_object_redzone"22
796
41
                    : 
"__asan_unpoison_intra_object_redzone"19
);
797
41
798
41
  llvm::Value *ThisPtr = LoadCXXThis();
799
41
  ThisPtr = Builder.CreatePtrToInt(ThisPtr, IntPtrTy);
800
41
  uint64_t TypeSize = Info.getNonVirtualSize().getQuantity();
801
41
  // For each field check if it has sufficient padding,
802
41
  // if so (un)poison it with a call.
803
153
  for (size_t i = 0; i < SSV.size(); 
i++112
) {
804
112
    uint64_t AsanAlignment = 8;
805
112
    uint64_t NextField = i == SSV.size() - 1 ? 
TypeSize41
:
SSV[i + 1].Offset71
;
806
112
    uint64_t PoisonSize = NextField - SSV[i].Offset - SSV[i].Size;
807
112
    uint64_t EndOffset = SSV[i].Offset + SSV[i].Size;
808
112
    if (PoisonSize < AsanAlignment || 
!SSV[i].Size94
||
809
112
        
(NextField % AsanAlignment) != 094
)
810
18
      continue;
811
94
    Builder.CreateCall(
812
94
        F, {Builder.CreateAdd(ThisPtr, Builder.getIntN(PtrSize, EndOffset)),
813
94
            Builder.getIntN(PtrSize, PoisonSize)});
814
94
  }
815
41
}
816
817
/// EmitConstructorBody - Emits the body of the current constructor.
818
52.0k
void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) {
819
52.0k
  EmitAsanPrologueOrEpilogue(true);
820
52.0k
  const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl());
821
52.0k
  CXXCtorType CtorType = CurGD.getCtorType();
822
52.0k
823
52.0k
  assert((CGM.getTarget().getCXXABI().hasConstructorVariants() ||
824
52.0k
          CtorType == Ctor_Complete) &&
825
52.0k
         "can only generate complete ctor for this ABI");
826
52.0k
827
52.0k
  // Before we go any further, try the complete->base constructor
828
52.0k
  // delegation optimization.
829
52.0k
  if (CtorType == Ctor_Complete && 
IsConstructorDelegationValid(Ctor)23.1k
&&
830
52.0k
      
CGM.getTarget().getCXXABI().hasConstructorVariants()22.4k
) {
831
21.7k
    EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args, Ctor->getEndLoc());
832
21.7k
    return;
833
21.7k
  }
834
30.3k
835
30.3k
  const FunctionDecl *Definition = nullptr;
836
30.3k
  Stmt *Body = Ctor->getBody(Definition);
837
30.3k
  assert(Definition == Ctor && "emitting wrong constructor body");
838
30.3k
839
30.3k
  // Enter the function-try-block before the constructor prologue if
840
30.3k
  // applicable.
841
30.3k
  bool IsTryBody = (Body && isa<CXXTryStmt>(Body));
842
30.3k
  if (IsTryBody)
843
3
    EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);
844
30.3k
845
30.3k
  incrementProfileCounter(Body);
846
30.3k
847
30.3k
  RunCleanupsScope RunCleanups(*this);
848
30.3k
849
30.3k
  // TODO: in restricted cases, we can emit the vbase initializers of
850
30.3k
  // a complete ctor and then delegate to the base ctor.
851
30.3k
852
30.3k
  // Emit the constructor prologue, i.e. the base and member
853
30.3k
  // initializers.
854
30.3k
  EmitCtorPrologue(Ctor, CtorType, Args);
855
30.3k
856
30.3k
  // Emit the body of the statement.
857
30.3k
  if (IsTryBody)
858
3
    EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());
859
30.3k
  else if (Body)
860
30.3k
    EmitStmt(Body);
861
30.3k
862
30.3k
  // Emit any cleanup blocks associated with the member or base
863
30.3k
  // initializers, which includes (along the exceptional path) the
864
30.3k
  // destructors for those members and bases that were fully
865
30.3k
  // constructed.
866
30.3k
  RunCleanups.ForceCleanup();
867
30.3k
868
30.3k
  if (IsTryBody)
869
3
    ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);
870
30.3k
}
871
872
namespace {
873
  /// RAII object to indicate that codegen is copying the value representation
874
  /// instead of the object representation. Useful when copying a struct or
875
  /// class which has uninitialized members and we're only performing
876
  /// lvalue-to-rvalue conversion on the object but not its members.
877
  class CopyingValueRepresentation {
878
  public:
879
    explicit CopyingValueRepresentation(CodeGenFunction &CGF)
880
312
        : CGF(CGF), OldSanOpts(CGF.SanOpts) {
881
312
      CGF.SanOpts.set(SanitizerKind::Bool, false);
882
312
      CGF.SanOpts.set(SanitizerKind::Enum, false);
883
312
    }
884
312
    ~CopyingValueRepresentation() {
885
312
      CGF.SanOpts = OldSanOpts;
886
312
    }
887
  private:
888
    CodeGenFunction &CGF;
889
    SanitizerSet OldSanOpts;
890
  };
891
} // end anonymous namespace
892
893
namespace {
894
  class FieldMemcpyizer {
895
  public:
896
    FieldMemcpyizer(CodeGenFunction &CGF, const CXXRecordDecl *ClassDecl,
897
                    const VarDecl *SrcRec)
898
      : CGF(CGF), ClassDecl(ClassDecl), SrcRec(SrcRec),
899
        RecLayout(CGF.getContext().getASTRecordLayout(ClassDecl)),
900
        FirstField(nullptr), LastField(nullptr), FirstFieldOffset(0),
901
31.1k
        LastFieldOffset(0), LastAddedFieldIndex(0) {}
902
903
2.11k
    bool isMemcpyableField(FieldDecl *F) const {
904
2.11k
      // Never memcpy fields when we are adding poisoned paddings.
905
2.11k
      if (CGF.getContext().getLangOpts().SanitizeAddressFieldPadding)
906
8
        return false;
907
2.10k
      Qualifiers Qual = F->getType().getQualifiers();
908
2.10k
      if (Qual.hasVolatile() || 
Qual.hasObjCLifetime()2.10k
)
909
7
        return false;
910
2.10k
      return true;
911
2.10k
    }
912
913
2.09k
    void addMemcpyableField(FieldDecl *F) {
914
2.09k
      if (!FirstField)
915
617
        addInitialField(F);
916
1.48k
      else
917
1.48k
        addNextField(F);
918
2.09k
    }
919
920
305
    CharUnits getMemcpySize(uint64_t FirstByteOffset) const {
921
305
      ASTContext &Ctx = CGF.getContext();
922
305
      unsigned LastFieldSize =
923
305
          LastField->isBitField()
924
305
              ? 
LastField->getBitWidthValue(Ctx)6
925
305
              : Ctx.toBits(
926
299
                    Ctx.getTypeInfoDataSizeInChars(LastField->getType()).first);
927
305
      uint64_t MemcpySizeBits = LastFieldOffset + LastFieldSize -
928
305
                                FirstByteOffset + Ctx.getCharWidth() - 1;
929
305
      CharUnits MemcpySize = Ctx.toCharUnitsFromBits(MemcpySizeBits);
930
305
      return MemcpySize;
931
305
    }
932
933
2.38k
    void emitMemcpy() {
934
2.38k
      // Give the subclass a chance to bail out if it feels the memcpy isn't
935
2.38k
      // worth it (e.g. Hasn't aggregated enough data).
936
2.38k
      if (!FirstField) {
937
2.07k
        return;
938
2.07k
      }
939
305
940
305
      uint64_t FirstByteOffset;
941
305
      if (FirstField->isBitField()) {
942
12
        const CGRecordLayout &RL =
943
12
          CGF.getTypes().getCGRecordLayout(FirstField->getParent());
944
12
        const CGBitFieldInfo &BFInfo = RL.getBitFieldInfo(FirstField);
945
12
        // FirstFieldOffset is not appropriate for bitfields,
946
12
        // we need to use the storage offset instead.
947
12
        FirstByteOffset = CGF.getContext().toBits(BFInfo.StorageOffset);
948
293
      } else {
949
293
        FirstByteOffset = FirstFieldOffset;
950
293
      }
951
305
952
305
      CharUnits MemcpySize = getMemcpySize(FirstByteOffset);
953
305
      QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl);
954
305
      Address ThisPtr = CGF.LoadCXXThisAddress();
955
305
      LValue DestLV = CGF.MakeAddrLValue(ThisPtr, RecordTy);
956
305
      LValue Dest = CGF.EmitLValueForFieldInitialization(DestLV, FirstField);
957
305
      llvm::Value *SrcPtr = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(SrcRec));
958
305
      LValue SrcLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy);
959
305
      LValue Src = CGF.EmitLValueForFieldInitialization(SrcLV, FirstField);
960
305
961
305
      emitMemcpyIR(Dest.isBitField() ? 
Dest.getBitFieldAddress()12
:
Dest.getAddress()293
,
962
305
                   Src.isBitField() ? 
Src.getBitFieldAddress()12
:
Src.getAddress()293
,
963
305
                   MemcpySize);
964
305
      reset();
965
305
    }
966
967
65.1k
    void reset() {
968
65.1k
      FirstField = nullptr;
969
65.1k
    }
970
971
  protected:
972
    CodeGenFunction &CGF;
973
    const CXXRecordDecl *ClassDecl;
974
975
  private:
976
305
    void emitMemcpyIR(Address DestPtr, Address SrcPtr, CharUnits Size) {
977
305
      llvm::PointerType *DPT = DestPtr.getType();
978
305
      llvm::Type *DBP =
979
305
        llvm::Type::getInt8PtrTy(CGF.getLLVMContext(), DPT->getAddressSpace());
980
305
      DestPtr = CGF.Builder.CreateBitCast(DestPtr, DBP);
981
305
982
305
      llvm::PointerType *SPT = SrcPtr.getType();
983
305
      llvm::Type *SBP =
984
305
        llvm::Type::getInt8PtrTy(CGF.getLLVMContext(), SPT->getAddressSpace());
985
305
      SrcPtr = CGF.Builder.CreateBitCast(SrcPtr, SBP);
986
305
987
305
      CGF.Builder.CreateMemCpy(DestPtr, SrcPtr, Size.getQuantity());
988
305
    }
989
990
617
    void addInitialField(FieldDecl *F) {
991
617
      FirstField = F;
992
617
      LastField = F;
993
617
      FirstFieldOffset = RecLayout.getFieldOffset(F->getFieldIndex());
994
617
      LastFieldOffset = FirstFieldOffset;
995
617
      LastAddedFieldIndex = F->getFieldIndex();
996
617
    }
997
998
1.48k
    void addNextField(FieldDecl *F) {
999
1.48k
      // For the most part, the following invariant will hold:
1000
1.48k
      //   F->getFieldIndex() == LastAddedFieldIndex + 1
1001
1.48k
      // The one exception is that Sema won't add a copy-initializer for an
1002
1.48k
      // unnamed bitfield, which will show up here as a gap in the sequence.
1003
1.48k
      assert(F->getFieldIndex() >= LastAddedFieldIndex + 1 &&
1004
1.48k
             "Cannot aggregate fields out of order.");
1005
1.48k
      LastAddedFieldIndex = F->getFieldIndex();
1006
1.48k
1007
1.48k
      // The 'first' and 'last' fields are chosen by offset, rather than field
1008
1.48k
      // index. This allows the code to support bitfields, as well as regular
1009
1.48k
      // fields.
1010
1.48k
      uint64_t FOffset = RecLayout.getFieldOffset(F->getFieldIndex());
1011
1.48k
      if (FOffset < FirstFieldOffset) {
1012
0
        FirstField = F;
1013
0
        FirstFieldOffset = FOffset;
1014
1.48k
      } else if (FOffset >= LastFieldOffset) {
1015
1.48k
        LastField = F;
1016
1.48k
        LastFieldOffset = FOffset;
1017
1.48k
      }
1018
1.48k
    }
1019
1020
    const VarDecl *SrcRec;
1021
    const ASTRecordLayout &RecLayout;
1022
    FieldDecl *FirstField;
1023
    FieldDecl *LastField;
1024
    uint64_t FirstFieldOffset, LastFieldOffset;
1025
    unsigned LastAddedFieldIndex;
1026
  };
1027
1028
  class ConstructorMemcpyizer : public FieldMemcpyizer {
1029
  private:
1030
    /// Get source argument for copy constructor. Returns null if not a copy
1031
    /// constructor.
1032
    static const VarDecl *getTrivialCopySource(CodeGenFunction &CGF,
1033
                                               const CXXConstructorDecl *CD,
1034
30.3k
                                               FunctionArgList &Args) {
1035
30.3k
      if (CD->isCopyOrMoveConstructor() && 
CD->isDefaulted()1.64k
)
1036
606
        return Args[CGF.CGM.getCXXABI().getSrcArgforCopyCtor(CD, Args)];
1037
29.7k
      return nullptr;
1038
29.7k
    }
1039
1040
    // Returns true if a CXXCtorInitializer represents a member initialization
1041
    // that can be rolled into a memcpy.
1042
34.1k
    bool isMemberInitMemcpyable(CXXCtorInitializer *MemberInit) const {
1043
34.1k
      if (!MemcpyableCtor)
1044
31.6k
        return false;
1045
2.47k
      FieldDecl *Field = MemberInit->getMember();
1046
2.47k
      assert(Field && "No field for member init.");
1047
2.47k
      QualType FieldType = Field->getType();
1048
2.47k
      CXXConstructExpr *CE = dyn_cast<CXXConstructExpr>(MemberInit->getInit());
1049
2.47k
1050
2.47k
      // Bail out on non-memcpyable, not-trivially-copyable members.
1051
2.47k
      if (!(CE && 
isMemcpyEquivalentSpecialMember(CE->getConstructor())967
) &&
1052
2.47k
          
!(2.35k
FieldType.isTriviallyCopyableType(CGF.getContext())2.35k
||
1053
2.35k
            
FieldType->isReferenceType()898
))
1054
890
        return false;
1055
1.58k
1056
1.58k
      // Bail out on volatile fields.
1057
1.58k
      if (!isMemcpyableField(Field))
1058
6
        return false;
1059
1.57k
1060
1.57k
      // Otherwise we're good.
1061
1.57k
      return true;
1062
1.57k
    }
1063
1064
  public:
1065
    ConstructorMemcpyizer(CodeGenFunction &CGF, const CXXConstructorDecl *CD,
1066
                          FunctionArgList &Args)
1067
      : FieldMemcpyizer(CGF, CD->getParent(), getTrivialCopySource(CGF, CD, Args)),
1068
        ConstructorDecl(CD),
1069
        MemcpyableCtor(CD->isDefaulted() &&
1070
                       CD->isCopyOrMoveConstructor() &&
1071
                       CGF.getLangOpts().getGC() == LangOptions::NonGC),
1072
30.3k
        Args(Args) { }
1073
1074
34.1k
    void addMemberInitializer(CXXCtorInitializer *MemberInit) {
1075
34.1k
      if (isMemberInitMemcpyable(MemberInit)) {
1076
1.57k
        AggregatedInits.push_back(MemberInit);
1077
1.57k
        addMemcpyableField(MemberInit->getMember());
1078
32.5k
      } else {
1079
32.5k
        emitAggregatedInits();
1080
32.5k
        EmitMemberInitializer(CGF, ConstructorDecl->getParent(), MemberInit,
1081
32.5k
                              ConstructorDecl, Args);
1082
32.5k
      }
1083
34.1k
    }
1084
1085
62.9k
    void emitAggregatedInits() {
1086
62.9k
      if (AggregatedInits.size() <= 1) {
1087
62.7k
        // This memcpy is too small to be worthwhile. Fall back on default
1088
62.7k
        // codegen.
1089
62.7k
        if (!AggregatedInits.empty()) {
1090
193
          CopyingValueRepresentation CVR(CGF);
1091
193
          EmitMemberInitializer(CGF, ConstructorDecl->getParent(),
1092
193
                                AggregatedInits[0], ConstructorDecl, Args);
1093
193
          AggregatedInits.clear();
1094
193
        }
1095
62.7k
        reset();
1096
62.7k
        return;
1097
62.7k
      }
1098
210
1099
210
      pushEHDestructors();
1100
210
      emitMemcpy();
1101
210
      AggregatedInits.clear();
1102
210
    }
1103
1104
210
    void pushEHDestructors() {
1105
210
      Address ThisPtr = CGF.LoadCXXThisAddress();
1106
210
      QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl);
1107
210
      LValue LHS = CGF.MakeAddrLValue(ThisPtr, RecordTy);
1108
210
1109
1.59k
      for (unsigned i = 0; i < AggregatedInits.size(); 
++i1.38k
) {
1110
1.38k
        CXXCtorInitializer *MemberInit = AggregatedInits[i];
1111
1.38k
        QualType FieldType = MemberInit->getAnyMember()->getType();
1112
1.38k
        QualType::DestructionKind dtorKind = FieldType.isDestructedType();
1113
1.38k
        if (!CGF.needsEHCleanup(dtorKind))
1114
1.37k
          continue;
1115
7
        LValue FieldLHS = LHS;
1116
7
        EmitLValueForAnyFieldInitialization(CGF, MemberInit, FieldLHS);
1117
7
        CGF.pushEHDestroy(dtorKind, FieldLHS.getAddress(), FieldType);
1118
7
      }
1119
210
    }
1120
1121
30.3k
    void finish() {
1122
30.3k
      emitAggregatedInits();
1123
30.3k
    }
1124
1125
  private:
1126
    const CXXConstructorDecl *ConstructorDecl;
1127
    bool MemcpyableCtor;
1128
    FunctionArgList &Args;
1129
    SmallVector<CXXCtorInitializer*, 16> AggregatedInits;
1130
  };
1131
1132
  class AssignmentMemcpyizer : public FieldMemcpyizer {
1133
  private:
1134
    // Returns the memcpyable field copied by the given statement, if one
1135
    // exists. Otherwise returns null.
1136
1.92k
    FieldDecl *getMemcpyableField(Stmt *S) {
1137
1.92k
      if (!AssignmentsMemcpyable)
1138
11
        return nullptr;
1139
1.91k
      if (BinaryOperator *BO = dyn_cast<BinaryOperator>(S)) {
1140
485
        // Recognise trivial assignments.
1141
485
        if (BO->getOpcode() != BO_Assign)
1142
0
          return nullptr;
1143
485
        MemberExpr *ME = dyn_cast<MemberExpr>(BO->getLHS());
1144
485
        if (!ME)
1145
0
          return nullptr;
1146
485
        FieldDecl *Field = dyn_cast<FieldDecl>(ME->getMemberDecl());
1147
485
        if (!Field || !isMemcpyableField(Field))
1148
9
          return nullptr;
1149
476
        Stmt *RHS = BO->getRHS();
1150
476
        if (ImplicitCastExpr *EC = dyn_cast<ImplicitCastExpr>(RHS))
1151
476
          RHS = EC->getSubExpr();
1152
476
        if (!RHS)
1153
0
          return nullptr;
1154
476
        if (MemberExpr *ME2 = dyn_cast<MemberExpr>(RHS)) {
1155
474
          if (ME2->getMemberDecl() == Field)
1156
474
            return Field;
1157
2
        }
1158
2
        return nullptr;
1159
1.42k
      } else if (CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(S)) {
1160
618
        CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(MCE->getCalleeDecl());
1161
618
        if (!(MD && isMemcpyEquivalentSpecialMember(MD)))
1162
445
          return nullptr;
1163
173
        MemberExpr *IOA = dyn_cast<MemberExpr>(MCE->getImplicitObjectArgument());
1164
173
        if (!IOA)
1165
145
          return nullptr;
1166
28
        FieldDecl *Field = dyn_cast<FieldDecl>(IOA->getMemberDecl());
1167
28
        if (!Field || !isMemcpyableField(Field))
1168
0
          return nullptr;
1169
28
        MemberExpr *Arg0 = dyn_cast<MemberExpr>(MCE->getArg(0));
1170
28
        if (!Arg0 || 
Field != dyn_cast<FieldDecl>(Arg0->getMemberDecl())27
)
1171
1
          return nullptr;
1172
27
        return Field;
1173
811
      } else if (CallExpr *CE = dyn_cast<CallExpr>(S)) {
1174
21
        FunctionDecl *FD = dyn_cast<FunctionDecl>(CE->getCalleeDecl());
1175
21
        if (!FD || FD->getBuiltinID() != Builtin::BI__builtin_memcpy)
1176
0
          return nullptr;
1177
21
        Expr *DstPtr = CE->getArg(0);
1178
21
        if (ImplicitCastExpr *DC = dyn_cast<ImplicitCastExpr>(DstPtr))
1179
21
          DstPtr = DC->getSubExpr();
1180
21
        UnaryOperator *DUO = dyn_cast<UnaryOperator>(DstPtr);
1181
21
        if (!DUO || DUO->getOpcode() != UO_AddrOf)
1182
0
          return nullptr;
1183
21
        MemberExpr *ME = dyn_cast<MemberExpr>(DUO->getSubExpr());
1184
21
        if (!ME)
1185
0
          return nullptr;
1186
21
        FieldDecl *Field = dyn_cast<FieldDecl>(ME->getMemberDecl());
1187
21
        if (!Field || !isMemcpyableField(Field))
1188
0
          return nullptr;
1189
21
        Expr *SrcPtr = CE->getArg(1);
1190
21
        if (ImplicitCastExpr *SC = dyn_cast<ImplicitCastExpr>(SrcPtr))
1191
21
          SrcPtr = SC->getSubExpr();
1192
21
        UnaryOperator *SUO = dyn_cast<UnaryOperator>(SrcPtr);
1193
21
        if (!SUO || SUO->getOpcode() != UO_AddrOf)
1194
0
          return nullptr;
1195
21
        MemberExpr *ME2 = dyn_cast<MemberExpr>(SUO->getSubExpr());
1196
21
        if (!ME2 || Field != dyn_cast<FieldDecl>(ME2->getMemberDecl()))
1197
0
          return nullptr;
1198
21
        return Field;
1199
21
      }
1200
790
1201
790
      return nullptr;
1202
790
    }
1203
1204
    bool AssignmentsMemcpyable;
1205
    SmallVector<Stmt*, 16> AggregatedStmts;
1206
1207
  public:
1208
    AssignmentMemcpyizer(CodeGenFunction &CGF, const CXXMethodDecl *AD,
1209
                         FunctionArgList &Args)
1210
      : FieldMemcpyizer(CGF, AD->getParent(), Args[Args.size() - 1]),
1211
771
        AssignmentsMemcpyable(CGF.getLangOpts().getGC() == LangOptions::NonGC) {
1212
771
      assert(Args.size() == 2);
1213
771
    }
1214
1215
1.92k
    void emitAssignment(Stmt *S) {
1216
1.92k
      FieldDecl *F = getMemcpyableField(S);
1217
1.92k
      if (F) {
1218
522
        addMemcpyableField(F);
1219
522
        AggregatedStmts.push_back(S);
1220
1.40k
      } else {
1221
1.40k
        emitAggregatedStmts();
1222
1.40k
        CGF.EmitStmt(S);
1223
1.40k
      }
1224
1.92k
    }
1225
1226
2.17k
    void emitAggregatedStmts() {
1227
2.17k
      if (AggregatedStmts.size() <= 1) {
1228
2.07k
        if (!AggregatedStmts.empty()) {
1229
119
          CopyingValueRepresentation CVR(CGF);
1230
119
          CGF.EmitStmt(AggregatedStmts[0]);
1231
119
        }
1232
2.07k
        reset();
1233
2.07k
      }
1234
2.17k
1235
2.17k
      emitMemcpy();
1236
2.17k
      AggregatedStmts.clear();
1237
2.17k
    }
1238
1239
771
    void finish() {
1240
771
      emitAggregatedStmts();
1241
771
    }
1242
  };
1243
} // end anonymous namespace
1244
1245
27
static bool isInitializerOfDynamicClass(const CXXCtorInitializer *BaseInit) {
1246
27
  const Type *BaseType = BaseInit->getBaseClass();
1247
27
  const auto *BaseClassDecl =
1248
27
          cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl());
1249
27
  return BaseClassDecl->isDynamicClass();
1250
27
}
1251
1252
/// EmitCtorPrologue - This routine generates necessary code to initialize
1253
/// base classes and non-static data members belonging to this constructor.
1254
void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD,
1255
                                       CXXCtorType CtorType,
1256
30.4k
                                       FunctionArgList &Args) {
1257
30.4k
  if (CD->isDelegatingConstructor())
1258
49
    return EmitDelegatingCXXConstructorCall(CD, Args);
1259
30.3k
1260
30.3k
  const CXXRecordDecl *ClassDecl = CD->getParent();
1261
30.3k
1262
30.3k
  CXXConstructorDecl::init_const_iterator B = CD->init_begin(),
1263
30.3k
                                          E = CD->init_end();
1264
30.3k
1265
30.3k
  // Virtual base initializers first, if any. They aren't needed if:
1266
30.3k
  // - This is a base ctor variant
1267
30.3k
  // - There are no vbases
1268
30.3k
  // - The class is abstract, so a complete object of it cannot be constructed
1269
30.3k
  //
1270
30.3k
  // The check for an abstract class is necessary because sema may not have
1271
30.3k
  // marked virtual base destructors referenced.
1272
30.3k
  bool ConstructVBases = CtorType != Ctor_Base &&
1273
30.3k
                         
ClassDecl->getNumVBases() != 01.47k
&&
1274
30.3k
                         
!ClassDecl->isAbstract()671
;
1275
30.3k
1276
30.3k
  // In the Microsoft C++ ABI, there are no constructor variants. Instead, the
1277
30.3k
  // constructor of a class with virtual bases takes an additional parameter to
1278
30.3k
  // conditionally construct the virtual bases. Emit that check here.
1279
30.3k
  llvm::BasicBlock *BaseCtorContinueBB = nullptr;
1280
30.3k
  if (ConstructVBases &&
1281
30.3k
      
!CGM.getTarget().getCXXABI().hasConstructorVariants()668
) {
1282
377
    BaseCtorContinueBB =
1283
377
        CGM.getCXXABI().EmitCtorCompleteObjectHandler(*this, ClassDecl);
1284
377
    assert(BaseCtorContinueBB);
1285
377
  }
1286
30.3k
1287
30.3k
  llvm::Value *const OldThis = CXXThisValue;
1288
31.5k
  for (; B != E && 
(*B)->isBaseInitializer()25.5k
&&
(*B)->isBaseVirtual()12.4k
;
B++1.13k
) {
1289
1.13k
    if (!ConstructVBases)
1290
283
      continue;
1291
856
    if (CGM.getCodeGenOpts().StrictVTablePointers &&
1292
856
        
CGM.getCodeGenOpts().OptimizationLevel > 03
&&
1293
856
        
isInitializerOfDynamicClass(*B)3
)
1294
1
      CXXThisValue = Builder.CreateLaunderInvariantGroup(LoadCXXThis());
1295
856
    EmitBaseInitializer(*this, ClassDecl, *B);
1296
856
  }
1297
30.3k
1298
30.3k
  if (BaseCtorContinueBB) {
1299
377
    // Complete object handler should continue to the remaining initializers.
1300
377
    Builder.CreateBr(BaseCtorContinueBB);
1301
377
    EmitBlock(BaseCtorContinueBB);
1302
377
  }
1303
30.3k
1304
30.3k
  // Then, non-virtual base initializers.
1305
44.0k
  for (; B != E && 
(*B)->isBaseInitializer()30.9k
;
B++13.6k
) {
1306
13.6k
    assert(!(*B)->isBaseVirtual());
1307
13.6k
1308
13.6k
    if (CGM.getCodeGenOpts().StrictVTablePointers &&
1309
13.6k
        
CGM.getCodeGenOpts().OptimizationLevel > 024
&&
1310
13.6k
        
isInitializerOfDynamicClass(*B)24
)
1311
23
      CXXThisValue = Builder.CreateLaunderInvariantGroup(LoadCXXThis());
1312
13.6k
    EmitBaseInitializer(*this, ClassDecl, *B);
1313
13.6k
  }
1314
30.3k
1315
30.3k
  CXXThisValue = OldThis;
1316
30.3k
1317
30.3k
  InitializeVTablePointers(ClassDecl);
1318
30.3k
1319
30.3k
  // And finally, initialize class members.
1320
30.3k
  FieldConstructionScope FCS(*this, LoadCXXThisAddress());
1321
30.3k
  ConstructorMemcpyizer CM(*this, CD, Args);
1322
64.5k
  for (; B != E; 
B++34.1k
) {
1323
34.1k
    CXXCtorInitializer *Member = (*B);
1324
34.1k
    assert(!Member->isBaseInitializer());
1325
34.1k
    assert(Member->isAnyMemberInitializer() &&
1326
34.1k
           "Delegating initializer on non-delegating constructor");
1327
34.1k
    CM.addMemberInitializer(Member);
1328
34.1k
  }
1329
30.3k
  CM.finish();
1330
30.3k
}
1331
1332
static bool
1333
FieldHasTrivialDestructorBody(ASTContext &Context, const FieldDecl *Field);
1334
1335
static bool
1336
HasTrivialDestructorBody(ASTContext &Context,
1337
                         const CXXRecordDecl *BaseClassDecl,
1338
                         const CXXRecordDecl *MostDerivedClassDecl)
1339
908
{
1340
908
  // If the destructor is trivial we don't have to check anything else.
1341
908
  if (BaseClassDecl->hasTrivialDestructor())
1342
428
    return true;
1343
480
1344
480
  if (!BaseClassDecl->getDestructor()->hasTrivialBody())
1345
345
    return false;
1346
135
1347
135
  // Check fields.
1348
135
  for (const auto *Field : BaseClassDecl->fields())
1349
144
    if (!FieldHasTrivialDestructorBody(Context, Field))
1350
97
      return false;
1351
135
1352
135
  // Check non-virtual bases.
1353
135
  
for (const auto &I : BaseClassDecl->bases())38
{
1354
30
    if (I.isVirtual())
1355
1
      continue;
1356
29
1357
29
    const CXXRecordDecl *NonVirtualBase =
1358
29
      cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
1359
29
    if (!HasTrivialDestructorBody(Context, NonVirtualBase,
1360
29
                                  MostDerivedClassDecl))
1361
16
      return false;
1362
29
  }
1363
38
1364
38
  
if (22
BaseClassDecl == MostDerivedClassDecl22
) {
1365
15
    // Check virtual bases.
1366
15
    for (const auto &I : BaseClassDecl->vbases()) {
1367
1
      const CXXRecordDecl *VirtualBase =
1368
1
        cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
1369
1
      if (!HasTrivialDestructorBody(Context, VirtualBase,
1370
1
                                    MostDerivedClassDecl))
1371
1
        return false;
1372
1
    }
1373
15
  }
1374
22
1375
22
  
return true21
;
1376
22
}
1377
1378
static bool
1379
FieldHasTrivialDestructorBody(ASTContext &Context,
1380
                                          const FieldDecl *Field)
1381
2.43k
{
1382
2.43k
  QualType FieldBaseElementType = Context.getBaseElementType(Field->getType());
1383
2.43k
1384
2.43k
  const RecordType *RT = FieldBaseElementType->getAs<RecordType>();
1385
2.43k
  if (!RT)
1386
1.55k
    return true;
1387
881
1388
881
  CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl());
1389
881
1390
881
  // The destructor for an implicit anonymous union member is never invoked.
1391
881
  if (FieldClassDecl->isUnion() && 
FieldClassDecl->isAnonymousStructOrUnion()3
)
1392
3
    return false;
1393
878
1394
878
  return HasTrivialDestructorBody(Context, FieldClassDecl, FieldClassDecl);
1395
878
}
1396
1397
/// CanSkipVTablePointerInitialization - Check whether we need to initialize
1398
/// any vtable pointers before calling this destructor.
1399
static bool CanSkipVTablePointerInitialization(CodeGenFunction &CGF,
1400
11.1k
                                               const CXXDestructorDecl *Dtor) {
1401
11.1k
  const CXXRecordDecl *ClassDecl = Dtor->getParent();
1402
11.1k
  if (!ClassDecl->isDynamicClass())
1403
8.51k
    return true;
1404
2.63k
1405
2.63k
  if (!Dtor->hasTrivialBody())
1406
391
    return false;
1407
2.24k
1408
2.24k
  // Check the fields.
1409
2.24k
  for (const auto *Field : ClassDecl->fields())
1410
2.21k
    if (!FieldHasTrivialDestructorBody(CGF.getContext(), Field))
1411
342
      return false;
1412
2.24k
1413
2.24k
  
return true1.90k
;
1414
2.24k
}
1415
1416
/// EmitDestructorBody - Emits the body of the current destructor.
1417
23.2k
void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) {
1418
23.2k
  const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl());
1419
23.2k
  CXXDtorType DtorType = CurGD.getDtorType();
1420
23.2k
1421
23.2k
  // For an abstract class, non-base destructors are never used (and can't
1422
23.2k
  // be emitted in general, because vbase dtors may not have been validated
1423
23.2k
  // by Sema), but the Itanium ABI doesn't make them optional and Clang may
1424
23.2k
  // in fact emit references to them from other compilations, so emit them
1425
23.2k
  // as functions containing a trap instruction.
1426
23.2k
  if (DtorType != Dtor_Base && 
Dtor->getParent()->isAbstract()12.1k
) {
1427
937
    llvm::CallInst *TrapCall = EmitTrapCall(llvm::Intrinsic::trap);
1428
937
    TrapCall->setDoesNotReturn();
1429
937
    TrapCall->setDoesNotThrow();
1430
937
    Builder.CreateUnreachable();
1431
937
    Builder.ClearInsertionPoint();
1432
937
    return;
1433
937
  }
1434
22.3k
1435
22.3k
  Stmt *Body = Dtor->getBody();
1436
22.3k
  if (Body)
1437
22.2k
    incrementProfileCounter(Body);
1438
22.3k
1439
22.3k
  // The call to operator delete in a deleting destructor happens
1440
22.3k
  // outside of the function-try-block, which means it's always
1441
22.3k
  // possible to delegate the destructor body to the complete
1442
22.3k
  // destructor.  Do so.
1443
22.3k
  if (DtorType == Dtor_Deleting) {
1444
1.79k
    RunCleanupsScope DtorEpilogue(*this);
1445
1.79k
    EnterDtorCleanups(Dtor, Dtor_Deleting);
1446
1.79k
    if (HaveInsertPoint()) {
1447
1.79k
      QualType ThisTy = Dtor->getThisObjectType();
1448
1.79k
      EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false,
1449
1.79k
                            /*Delegating=*/false, LoadCXXThisAddress(), ThisTy);
1450
1.79k
    }
1451
1.79k
    return;
1452
1.79k
  }
1453
20.5k
1454
20.5k
  // If the body is a function-try-block, enter the try before
1455
20.5k
  // anything else.
1456
20.5k
  bool isTryBody = (Body && 
isa<CXXTryStmt>(Body)20.5k
);
1457
20.5k
  if (isTryBody)
1458
12
    EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);
1459
20.5k
  EmitAsanPrologueOrEpilogue(false);
1460
20.5k
1461
20.5k
  // Enter the epilogue cleanups.
1462
20.5k
  RunCleanupsScope DtorEpilogue(*this);
1463
20.5k
1464
20.5k
  // If this is the complete variant, just invoke the base variant;
1465
20.5k
  // the epilogue will destruct the virtual bases.  But we can't do
1466
20.5k
  // this optimization if the body is a function-try-block, because
1467
20.5k
  // we'd introduce *two* handler blocks.  In the Microsoft ABI, we
1468
20.5k
  // always delegate because we might not have a definition in this TU.
1469
20.5k
  switch (DtorType) {
1470
20.5k
  
case Dtor_Comdat: 0
llvm_unreachable0
("not expecting a COMDAT");
1471
20.5k
  
case Dtor_Deleting: 0
llvm_unreachable0
("already handled deleting case");
1472
20.5k
1473
20.5k
  case Dtor_Complete:
1474
9.41k
    assert((Body || getTarget().getCXXABI().isMicrosoft()) &&
1475
9.41k
           "can't emit a dtor without a body for non-Microsoft ABIs");
1476
9.41k
1477
9.41k
    // Enter the cleanup scopes for virtual bases.
1478
9.41k
    EnterDtorCleanups(Dtor, Dtor_Complete);
1479
9.41k
1480
9.41k
    if (!isTryBody) {
1481
9.40k
      QualType ThisTy = Dtor->getThisObjectType();
1482
9.40k
      EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false,
1483
9.40k
                            /*Delegating=*/false, LoadCXXThisAddress(), ThisTy);
1484
9.40k
      break;
1485
9.40k
    }
1486
6
1487
6
    // Fallthrough: act like we're in the base variant.
1488
6
    LLVM_FALLTHROUGH;
1489
6
1490
11.1k
  case Dtor_Base:
1491
11.1k
    assert(Body);
1492
11.1k
1493
11.1k
    // Enter the cleanup scopes for fields and non-virtual bases.
1494
11.1k
    EnterDtorCleanups(Dtor, Dtor_Base);
1495
11.1k
1496
11.1k
    // Initialize the vtable pointers before entering the body.
1497
11.1k
    if (!CanSkipVTablePointerInitialization(*this, Dtor)) {
1498
733
      // Insert the llvm.launder.invariant.group intrinsic before initializing
1499
733
      // the vptrs to cancel any previous assumptions we might have made.
1500
733
      if (CGM.getCodeGenOpts().StrictVTablePointers &&
1501
733
          
CGM.getCodeGenOpts().OptimizationLevel > 02
)
1502
2
        CXXThisValue = Builder.CreateLaunderInvariantGroup(LoadCXXThis());
1503
733
      InitializeVTablePointers(Dtor->getParent());
1504
733
    }
1505
11.1k
1506
11.1k
    if (isTryBody)
1507
12
      EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());
1508
11.1k
    else if (Body)
1509
11.1k
      EmitStmt(Body);
1510
0
    else {
1511
0
      assert(Dtor->isImplicit() && "bodyless dtor not implicit");
1512
0
      // nothing to do besides what's in the epilogue
1513
0
    }
1514
11.1k
    // -fapple-kext must inline any call to this dtor into
1515
11.1k
    // the caller's body.
1516
11.1k
    if (getLangOpts().AppleKext)
1517
4
      CurFn->addFnAttr(llvm::Attribute::AlwaysInline);
1518
11.1k
1519
11.1k
    break;
1520
20.5k
  }
1521
20.5k
1522
20.5k
  // Jump out through the epilogue cleanups.
1523
20.5k
  DtorEpilogue.ForceCleanup();
1524
20.5k
1525
20.5k
  // Exit the try if applicable.
1526
20.5k
  if (isTryBody)
1527
12
    ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);
1528
20.5k
}
1529
1530
771
void CodeGenFunction::emitImplicitAssignmentOperatorBody(FunctionArgList &Args) {
1531
771
  const CXXMethodDecl *AssignOp = cast<CXXMethodDecl>(CurGD.getDecl());
1532
771
  const Stmt *RootS = AssignOp->getBody();
1533
771
  assert(isa<CompoundStmt>(RootS) &&
1534
771
         "Body of an implicit assignment operator should be compound stmt.");
1535
771
  const CompoundStmt *RootCS = cast<CompoundStmt>(RootS);
1536
771
1537
771
  LexicalScope Scope(*this, RootCS->getSourceRange());
1538
771
1539
771
  incrementProfileCounter(RootCS);
1540
771
  AssignmentMemcpyizer AM(*this, AssignOp, Args);
1541
771
  for (auto *I : RootCS->body())
1542
1.92k
    AM.emitAssignment(I);
1543
771
  AM.finish();
1544
771
}
1545
1546
namespace {
1547
  llvm::Value *LoadThisForDtorDelete(CodeGenFunction &CGF,
1548
2.01k
                                     const CXXDestructorDecl *DD) {
1549
2.01k
    if (Expr *ThisArg = DD->getOperatorDeleteThisArg())
1550
4
      return CGF.EmitScalarExpr(ThisArg);
1551
2.00k
    return CGF.LoadCXXThis();
1552
2.00k
  }
1553
1554
  /// Call the operator delete associated with the current destructor.
1555
  struct CallDtorDelete final : EHScopeStack::Cleanup {
1556
1.59k
    CallDtorDelete() {}
1557
1558
1.81k
    void Emit(CodeGenFunction &CGF, Flags flags) override {
1559
1.81k
      const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl);
1560
1.81k
      const CXXRecordDecl *ClassDecl = Dtor->getParent();
1561
1.81k
      CGF.EmitDeleteCall(Dtor->getOperatorDelete(),
1562
1.81k
                         LoadThisForDtorDelete(CGF, Dtor),
1563
1.81k
                         CGF.getContext().getTagDeclType(ClassDecl));
1564
1.81k
    }
1565
  };
1566
1567
  void EmitConditionalDtorDeleteCall(CodeGenFunction &CGF,
1568
                                     llvm::Value *ShouldDeleteCondition,
1569
199
                                     bool ReturnAfterDelete) {
1570
199
    llvm::BasicBlock *callDeleteBB = CGF.createBasicBlock("dtor.call_delete");
1571
199
    llvm::BasicBlock *continueBB = CGF.createBasicBlock("dtor.continue");
1572
199
    llvm::Value *ShouldCallDelete
1573
199
      = CGF.Builder.CreateIsNull(ShouldDeleteCondition);
1574
199
    CGF.Builder.CreateCondBr(ShouldCallDelete, continueBB, callDeleteBB);
1575
199
1576
199
    CGF.EmitBlock(callDeleteBB);
1577
199
    const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl);
1578
199
    const CXXRecordDecl *ClassDecl = Dtor->getParent();
1579
199
    CGF.EmitDeleteCall(Dtor->getOperatorDelete(),
1580
199
                       LoadThisForDtorDelete(CGF, Dtor),
1581
199
                       CGF.getContext().getTagDeclType(ClassDecl));
1582
199
    assert(Dtor->getOperatorDelete()->isDestroyingOperatorDelete() ==
1583
199
               ReturnAfterDelete &&
1584
199
           "unexpected value for ReturnAfterDelete");
1585
199
    if (ReturnAfterDelete)
1586
2
      CGF.EmitBranchThroughCleanup(CGF.ReturnBlock);
1587
197
    else
1588
197
      CGF.Builder.CreateBr(continueBB);
1589
199
1590
199
    CGF.EmitBlock(continueBB);
1591
199
  }
1592
1593
  struct CallDtorDeleteConditional final : EHScopeStack::Cleanup {
1594
    llvm::Value *ShouldDeleteCondition;
1595
1596
  public:
1597
    CallDtorDeleteConditional(llvm::Value *ShouldDeleteCondition)
1598
197
        : ShouldDeleteCondition(ShouldDeleteCondition) {
1599
197
      assert(ShouldDeleteCondition != nullptr);
1600
197
    }
1601
1602
197
    void Emit(CodeGenFunction &CGF, Flags flags) override {
1603
197
      EmitConditionalDtorDeleteCall(CGF, ShouldDeleteCondition,
1604
197
                                    /*ReturnAfterDelete*/false);
1605
197
    }
1606
  };
1607
1608
  class DestroyField  final : public EHScopeStack::Cleanup {
1609
    const FieldDecl *field;
1610
    CodeGenFunction::Destroyer *destroyer;
1611
    bool useEHCleanupForArray;
1612
1613
  public:
1614
    DestroyField(const FieldDecl *field, CodeGenFunction::Destroyer *destroyer,
1615
                 bool useEHCleanupForArray)
1616
        : field(field), destroyer(destroyer),
1617
3.75k
          useEHCleanupForArray(useEHCleanupForArray) {}
1618
1619
4.10k
    void Emit(CodeGenFunction &CGF, Flags flags) override {
1620
4.10k
      // Find the address of the field.
1621
4.10k
      Address thisValue = CGF.LoadCXXThisAddress();
1622
4.10k
      QualType RecordTy = CGF.getContext().getTagDeclType(field->getParent());
1623
4.10k
      LValue ThisLV = CGF.MakeAddrLValue(thisValue, RecordTy);
1624
4.10k
      LValue LV = CGF.EmitLValueForField(ThisLV, field);
1625
4.10k
      assert(LV.isSimple());
1626
4.10k
1627
4.10k
      CGF.emitDestroy(LV.getAddress(), field->getType(), destroyer,
1628
4.10k
                      flags.isForNormalCleanup() && 
useEHCleanupForArray3.75k
);
1629
4.10k
    }
1630
  };
1631
1632
 static void EmitSanitizerDtorCallback(CodeGenFunction &CGF, llvm::Value *Ptr,
1633
56
             CharUnits::QuantityType PoisonSize) {
1634
56
   CodeGenFunction::SanitizerScope SanScope(&CGF);
1635
56
   // Pass in void pointer and size of region as arguments to runtime
1636
56
   // function
1637
56
   llvm::Value *Args[] = {CGF.Builder.CreateBitCast(Ptr, CGF.VoidPtrTy),
1638
56
                          llvm::ConstantInt::get(CGF.SizeTy, PoisonSize)};
1639
56
1640
56
   llvm::Type *ArgTypes[] = {CGF.VoidPtrTy, CGF.SizeTy};
1641
56
1642
56
   llvm::FunctionType *FnType =
1643
56
       llvm::FunctionType::get(CGF.VoidTy, ArgTypes, false);
1644
56
   llvm::FunctionCallee Fn =
1645
56
       CGF.CGM.CreateRuntimeFunction(FnType, "__sanitizer_dtor_callback");
1646
56
   CGF.EmitNounwindRuntimeCall(Fn, Args);
1647
56
 }
1648
1649
  class SanitizeDtorMembers final : public EHScopeStack::Cleanup {
1650
    const CXXDestructorDecl *Dtor;
1651
1652
  public:
1653
38
    SanitizeDtorMembers(const CXXDestructorDecl *Dtor) : Dtor(Dtor) {}
1654
1655
    // Generate function call for handling object poisoning.
1656
    // Disables tail call elimination, to prevent the current stack frame
1657
    // from disappearing from the stack trace.
1658
38
    void Emit(CodeGenFunction &CGF, Flags flags) override {
1659
38
      const ASTRecordLayout &Layout =
1660
38
          CGF.getContext().getASTRecordLayout(Dtor->getParent());
1661
38
1662
38
      // Nothing to poison.
1663
38
      if (Layout.getFieldCount() == 0)
1664
0
        return;
1665
38
1666
38
      // Prevent the current stack frame from disappearing from the stack trace.
1667
38
      CGF.CurFn->addFnAttr("disable-tail-calls", "true");
1668
38
1669
38
      // Construct pointer to region to begin poisoning, and calculate poison
1670
38
      // size, so that only members declared in this class are poisoned.
1671
38
      ASTContext &Context = CGF.getContext();
1672
38
      unsigned fieldIndex = 0;
1673
38
      int startIndex = -1;
1674
38
      // RecordDecl::field_iterator Field;
1675
68
      for (const FieldDecl *Field : Dtor->getParent()->fields()) {
1676
68
        // Poison field if it is trivial
1677
68
        if (FieldHasTrivialDestructorBody(Context, Field)) {
1678
62
          // Start sanitizing at this field
1679
62
          if (startIndex < 0)
1680
40
            startIndex = fieldIndex;
1681
62
1682
62
          // Currently on the last field, and it must be poisoned with the
1683
62
          // current block.
1684
62
          if (fieldIndex == Layout.getFieldCount() - 1) {
1685
36
            PoisonMembers(CGF, startIndex, Layout.getFieldCount());
1686
36
          }
1687
62
        } else 
if (6
startIndex >= 06
) {
1688
4
          // No longer within a block of memory to poison, so poison the block
1689
4
          PoisonMembers(CGF, startIndex, fieldIndex);
1690
4
          // Re-set the start index
1691
4
          startIndex = -1;
1692
4
        }
1693
68
        fieldIndex += 1;
1694
68
      }
1695
38
    }
1696
1697
  private:
1698
    /// \param layoutStartOffset index of the ASTRecordLayout field to
1699
    ///     start poisoning (inclusive)
1700
    /// \param layoutEndOffset index of the ASTRecordLayout field to
1701
    ///     end poisoning (exclusive)
1702
    void PoisonMembers(CodeGenFunction &CGF, unsigned layoutStartOffset,
1703
40
                     unsigned layoutEndOffset) {
1704
40
      ASTContext &Context = CGF.getContext();
1705
40
      const ASTRecordLayout &Layout =
1706
40
          Context.getASTRecordLayout(Dtor->getParent());
1707
40
1708
40
      llvm::ConstantInt *OffsetSizePtr = llvm::ConstantInt::get(
1709
40
          CGF.SizeTy,
1710
40
          Context.toCharUnitsFromBits(Layout.getFieldOffset(layoutStartOffset))
1711
40
              .getQuantity());
1712
40
1713
40
      llvm::Value *OffsetPtr = CGF.Builder.CreateGEP(
1714
40
          CGF.Builder.CreateBitCast(CGF.LoadCXXThis(), CGF.Int8PtrTy),
1715
40
          OffsetSizePtr);
1716
40
1717
40
      CharUnits::QuantityType PoisonSize;
1718
40
      if (layoutEndOffset >= Layout.getFieldCount()) {
1719
36
        PoisonSize = Layout.getNonVirtualSize().getQuantity() -
1720
36
                     Context.toCharUnitsFromBits(
1721
36
                                Layout.getFieldOffset(layoutStartOffset))
1722
36
                         .getQuantity();
1723
36
      } else {
1724
4
        PoisonSize = Context.toCharUnitsFromBits(
1725
4
                                Layout.getFieldOffset(layoutEndOffset) -
1726
4
                                Layout.getFieldOffset(layoutStartOffset))
1727
4
                         .getQuantity();
1728
4
      }
1729
40
1730
40
      if (PoisonSize == 0)
1731
2
        return;
1732
38
1733
38
      EmitSanitizerDtorCallback(CGF, OffsetPtr, PoisonSize);
1734
38
    }
1735
  };
1736
1737
 class SanitizeDtorVTable final : public EHScopeStack::Cleanup {
1738
    const CXXDestructorDecl *Dtor;
1739
1740
  public:
1741
18
    SanitizeDtorVTable(const CXXDestructorDecl *Dtor) : Dtor(Dtor) {}
1742
1743
    // Generate function call for handling vtable pointer poisoning.
1744
18
    void Emit(CodeGenFunction &CGF, Flags flags) override {
1745
18
      assert(Dtor->getParent()->isDynamicClass());
1746
18
      (void)Dtor;
1747
18
      ASTContext &Context = CGF.getContext();
1748
18
      // Poison vtable and vtable ptr if they exist for this class.
1749
18
      llvm::Value *VTablePtr = CGF.LoadCXXThis();
1750
18
1751
18
      CharUnits::QuantityType PoisonSize =
1752
18
          Context.toCharUnitsFromBits(CGF.PointerWidthInBits).getQuantity();
1753
18
      // Pass in void pointer and size of region as arguments to runtime
1754
18
      // function
1755
18
      EmitSanitizerDtorCallback(CGF, VTablePtr, PoisonSize);
1756
18
    }
1757
 };
1758
} // end anonymous namespace
1759
1760
/// Emit all code that comes at the end of class's
1761
/// destructor. This is to call destructors on members and base classes
1762
/// in reverse order of their construction.
1763
///
1764
/// For a deleting destructor, this also handles the case where a destroying
1765
/// operator delete completely overrides the definition.
1766
void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD,
1767
22.3k
                                        CXXDtorType DtorType) {
1768
22.3k
  assert((!DD->isTrivial() || DD->hasAttr<DLLExportAttr>()) &&
1769
22.3k
         "Should not emit dtor epilogue for non-exported trivial dtor!");
1770
22.3k
1771
22.3k
  // The deleting-destructor phase just needs to call the appropriate
1772
22.3k
  // operator delete that Sema picked up.
1773
22.3k
  if (DtorType == Dtor_Deleting) {
1774
1.79k
    assert(DD->getOperatorDelete() &&
1775
1.79k
           "operator delete missing - EnterDtorCleanups");
1776
1.79k
    if (CXXStructorImplicitParamValue) {
1777
199
      // If there is an implicit param to the deleting dtor, it's a boolean
1778
199
      // telling whether this is a deleting destructor.
1779
199
      if (DD->getOperatorDelete()->isDestroyingOperatorDelete())
1780
2
        EmitConditionalDtorDeleteCall(*this, CXXStructorImplicitParamValue,
1781
2
                                      /*ReturnAfterDelete*/true);
1782
197
      else
1783
197
        EHStack.pushCleanup<CallDtorDeleteConditional>(
1784
197
            NormalAndEHCleanup, CXXStructorImplicitParamValue);
1785
1.59k
    } else {
1786
1.59k
      if (DD->getOperatorDelete()->isDestroyingOperatorDelete()) {
1787
2
        const CXXRecordDecl *ClassDecl = DD->getParent();
1788
2
        EmitDeleteCall(DD->getOperatorDelete(),
1789
2
                       LoadThisForDtorDelete(*this, DD),
1790
2
                       getContext().getTagDeclType(ClassDecl));
1791
2
        EmitBranchThroughCleanup(ReturnBlock);
1792
1.59k
      } else {
1793
1.59k
        EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup);
1794
1.59k
      }
1795
1.59k
    }
1796
1.79k
    return;
1797
1.79k
  }
1798
20.5k
1799
20.5k
  const CXXRecordDecl *ClassDecl = DD->getParent();
1800
20.5k
1801
20.5k
  // Unions have no bases and do not call field destructors.
1802
20.5k
  if (ClassDecl->isUnion())
1803
4
    return;
1804
20.5k
1805
20.5k
  // The complete-destructor phase just destructs all the virtual bases.
1806
20.5k
  if (DtorType == Dtor_Complete) {
1807
9.41k
    // Poison the vtable pointer such that access after the base
1808
9.41k
    // and member destructors are invoked is invalid.
1809
9.41k
    if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor &&
1810
9.41k
        
SanOpts.has(SanitizerKind::Memory)39
&&
ClassDecl->getNumVBases()38
&&
1811
9.41k
        
ClassDecl->isPolymorphic()4
)
1812
4
      EHStack.pushCleanup<SanitizeDtorVTable>(NormalAndEHCleanup, DD);
1813
9.41k
1814
9.41k
    // We push them in the forward order so that they'll be popped in
1815
9.41k
    // the reverse order.
1816
9.41k
    for (const auto &Base : ClassDecl->vbases()) {
1817
306
      CXXRecordDecl *BaseClassDecl
1818
306
        = cast<CXXRecordDecl>(Base.getType()->getAs<RecordType>()->getDecl());
1819
306
1820
306
      // Ignore trivial destructors.
1821
306
      if (BaseClassDecl->hasTrivialDestructor())
1822
44
        continue;
1823
262
1824
262
      EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
1825
262
                                        BaseClassDecl,
1826
262
                                        /*BaseIsVirtual*/ true);
1827
262
    }
1828
9.41k
1829
9.41k
    return;
1830
9.41k
  }
1831
11.1k
1832
11.1k
  assert(DtorType == Dtor_Base);
1833
11.1k
  // Poison the vtable pointer if it has no virtual bases, but inherits
1834
11.1k
  // virtual functions.
1835
11.1k
  if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor &&
1836
11.1k
      
SanOpts.has(SanitizerKind::Memory)39
&&
!ClassDecl->getNumVBases()38
&&
1837
11.1k
      
ClassDecl->isPolymorphic()34
)
1838
14
    EHStack.pushCleanup<SanitizeDtorVTable>(NormalAndEHCleanup, DD);
1839
11.1k
1840
11.1k
  // Destroy non-virtual bases.
1841
11.1k
  for (const auto &Base : ClassDecl->bases()) {
1842
4.99k
    // Ignore virtual bases.
1843
4.99k
    if (Base.isVirtual())
1844
105
      continue;
1845
4.88k
1846
4.88k
    CXXRecordDecl *BaseClassDecl = Base.getType()->getAsCXXRecordDecl();
1847
4.88k
1848
4.88k
    // Ignore trivial destructors.
1849
4.88k
    if (BaseClassDecl->hasTrivialDestructor())
1850
1.71k
      continue;
1851
3.17k
1852
3.17k
    EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
1853
3.17k
                                      BaseClassDecl,
1854
3.17k
                                      /*BaseIsVirtual*/ false);
1855
3.17k
  }
1856
11.1k
1857
11.1k
  // Poison fields such that access after their destructors are
1858
11.1k
  // invoked, and before the base class destructor runs, is invalid.
1859
11.1k
  if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor &&
1860
11.1k
      
SanOpts.has(SanitizerKind::Memory)39
)
1861
38
    EHStack.pushCleanup<SanitizeDtorMembers>(NormalAndEHCleanup, DD);
1862
11.1k
1863
11.1k
  // Destroy direct fields.
1864
21.3k
  for (const auto *Field : ClassDecl->fields()) {
1865
21.3k
    QualType type = Field->getType();
1866
21.3k
    QualType::DestructionKind dtorKind = type.isDestructedType();
1867
21.3k
    if (!dtorKind) 
continue17.5k
;
1868
3.75k
1869
3.75k
    // Anonymous union members do not have their destructors called.
1870
3.75k
    const RecordType *RT = type->getAsUnionType();
1871
3.75k
    if (RT && 
RT->getDecl()->isAnonymousStructOrUnion()3
)
continue3
;
1872
3.75k
1873
3.75k
    CleanupKind cleanupKind = getCleanupKind(dtorKind);
1874
3.75k
    EHStack.pushCleanup<DestroyField>(cleanupKind, Field,
1875
3.75k
                                      getDestroyer(dtorKind),
1876
3.75k
                                      cleanupKind & EHCleanup);
1877
3.75k
  }
1878
11.1k
}
1879
1880
/// EmitCXXAggrConstructorCall - Emit a loop to call a particular
1881
/// constructor for each of several members of an array.
1882
///
1883
/// \param ctor the constructor to call for each element
1884
/// \param arrayType the type of the array to initialize
1885
/// \param arrayBegin an arrayType*
1886
/// \param zeroInitialize true if each element should be
1887
///   zero-initialized before it is constructed
1888
void CodeGenFunction::EmitCXXAggrConstructorCall(
1889
    const CXXConstructorDecl *ctor, const ArrayType *arrayType,
1890
    Address arrayBegin, const CXXConstructExpr *E, bool NewPointerIsChecked,
1891
1.10k
    bool zeroInitialize) {
1892
1.10k
  QualType elementType;
1893
1.10k
  llvm::Value *numElements =
1894
1.10k
    emitArrayLength(arrayType, elementType, arrayBegin);
1895
1.10k
1896
1.10k
  EmitCXXAggrConstructorCall(ctor, numElements, arrayBegin, E,
1897
1.10k
                             NewPointerIsChecked, zeroInitialize);
1898
1.10k
}
1899
1900
/// EmitCXXAggrConstructorCall - Emit a loop to call a particular
1901
/// constructor for each of several members of an array.
1902
///
1903
/// \param ctor the constructor to call for each element
1904
/// \param numElements the number of elements in the array;
1905
///   may be zero
1906
/// \param arrayBase a T*, where T is the type constructed by ctor
1907
/// \param zeroInitialize true if each element should be
1908
///   zero-initialized before it is constructed
1909
void CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor,
1910
                                                 llvm::Value *numElements,
1911
                                                 Address arrayBase,
1912
                                                 const CXXConstructExpr *E,
1913
                                                 bool NewPointerIsChecked,
1914
1.20k
                                                 bool zeroInitialize) {
1915
1.20k
  // It's legal for numElements to be zero.  This can happen both
1916
1.20k
  // dynamically, because x can be zero in 'new A[x]', and statically,
1917
1.20k
  // because of GCC extensions that permit zero-length arrays.  There
1918
1.20k
  // are probably legitimate places where we could assume that this
1919
1.20k
  // doesn't happen, but it's not clear that it's worth it.
1920
1.20k
  llvm::BranchInst *zeroCheckBranch = nullptr;
1921
1.20k
1922
1.20k
  // Optimize for a constant count.
1923
1.20k
  llvm::ConstantInt *constantCount
1924
1.20k
    = dyn_cast<llvm::ConstantInt>(numElements);
1925
1.20k
  if (constantCount) {
1926
1.14k
    // Just skip out if the constant count is zero.
1927
1.14k
    if (constantCount->isZero()) 
return0
;
1928
58
1929
58
  // Otherwise, emit the check.
1930
58
  } else {
1931
58
    llvm::BasicBlock *loopBB = createBasicBlock("new.ctorloop");
1932
58
    llvm::Value *iszero = Builder.CreateIsNull(numElements, "isempty");
1933
58
    zeroCheckBranch = Builder.CreateCondBr(iszero, loopBB, loopBB);
1934
58
    EmitBlock(loopBB);
1935
58
  }
1936
1.20k
1937
1.20k
  // Find the end of the array.
1938
1.20k
  llvm::Value *arrayBegin = arrayBase.getPointer();
1939
1.20k
  llvm::Value *arrayEnd = Builder.CreateInBoundsGEP(arrayBegin, numElements,
1940
1.20k
                                                    "arrayctor.end");
1941
1.20k
1942
1.20k
  // Enter the loop, setting up a phi for the current location to initialize.
1943
1.20k
  llvm::BasicBlock *entryBB = Builder.GetInsertBlock();
1944
1.20k
  llvm::BasicBlock *loopBB = createBasicBlock("arrayctor.loop");
1945
1.20k
  EmitBlock(loopBB);
1946
1.20k
  llvm::PHINode *cur = Builder.CreatePHI(arrayBegin->getType(), 2,
1947
1.20k
                                         "arrayctor.cur");
1948
1.20k
  cur->addIncoming(arrayBegin, entryBB);
1949
1.20k
1950
1.20k
  // Inside the loop body, emit the constructor call on the array element.
1951
1.20k
1952
1.20k
  // The alignment of the base, adjusted by the size of a single element,
1953
1.20k
  // provides a conservative estimate of the alignment of every element.
1954
1.20k
  // (This assumes we never start tracking offsetted alignments.)
1955
1.20k
  //
1956
1.20k
  // Note that these are complete objects and so we don't need to
1957
1.20k
  // use the non-virtual size or alignment.
1958
1.20k
  QualType type = getContext().getTypeDeclType(ctor->getParent());
1959
1.20k
  CharUnits eltAlignment =
1960
1.20k
    arrayBase.getAlignment()
1961
1.20k
             .alignmentOfArrayElement(getContext().getTypeSizeInChars(type));
1962
1.20k
  Address curAddr = Address(cur, eltAlignment);
1963
1.20k
1964
1.20k
  // Zero initialize the storage, if requested.
1965
1.20k
  if (zeroInitialize)
1966
2
    EmitNullInitialization(curAddr, type);
1967
1.20k
1968
1.20k
  // C++ [class.temporary]p4:
1969
1.20k
  // There are two contexts in which temporaries are destroyed at a different
1970
1.20k
  // point than the end of the full-expression. The first context is when a
1971
1.20k
  // default constructor is called to initialize an element of an array.
1972
1.20k
  // If the constructor has one or more default arguments, the destruction of
1973
1.20k
  // every temporary created in a default argument expression is sequenced
1974
1.20k
  // before the construction of the next array element, if any.
1975
1.20k
1976
1.20k
  {
1977
1.20k
    RunCleanupsScope Scope(*this);
1978
1.20k
1979
1.20k
    // Evaluate the constructor and its arguments in a regular
1980
1.20k
    // partial-destroy cleanup.
1981
1.20k
    if (getLangOpts().Exceptions &&
1982
1.20k
        
!ctor->getParent()->hasTrivialDestructor()357
) {
1983
43
      Destroyer *destroyer = destroyCXXObject;
1984
43
      pushRegularPartialArrayCleanup(arrayBegin, cur, type, eltAlignment,
1985
43
                                     *destroyer);
1986
43
    }
1987
1.20k
    auto currAVS = AggValueSlot::forAddr(
1988
1.20k
        curAddr, type.getQualifiers(), AggValueSlot::IsDestructed,
1989
1.20k
        AggValueSlot::DoesNotNeedGCBarriers, AggValueSlot::IsNotAliased,
1990
1.20k
        AggValueSlot::DoesNotOverlap, AggValueSlot::IsNotZeroed,
1991
1.20k
        NewPointerIsChecked ? 
AggValueSlot::IsSanitizerChecked417
1992
1.20k
                            : 
AggValueSlot::IsNotSanitizerChecked783
);
1993
1.20k
    EmitCXXConstructorCall(ctor, Ctor_Complete, /*ForVirtualBase=*/false,
1994
1.20k
                           /*Delegating=*/false, currAVS, E);
1995
1.20k
  }
1996
1.20k
1997
1.20k
  // Go to the next element.
1998
1.20k
  llvm::Value *next =
1999
1.20k
    Builder.CreateInBoundsGEP(cur, llvm::ConstantInt::get(SizeTy, 1),
2000
1.20k
                              "arrayctor.next");
2001
1.20k
  cur->addIncoming(next, Builder.GetInsertBlock());
2002
1.20k
2003
1.20k
  // Check whether that's the end of the loop.
2004
1.20k
  llvm::Value *done = Builder.CreateICmpEQ(next, arrayEnd, "arrayctor.done");
2005
1.20k
  llvm::BasicBlock *contBB = createBasicBlock("arrayctor.cont");
2006
1.20k
  Builder.CreateCondBr(done, contBB, loopBB);
2007
1.20k
2008
1.20k
  // Patch the earlier check to skip over the loop.
2009
1.20k
  if (zeroCheckBranch) 
zeroCheckBranch->setSuccessor(0, contBB)58
;
2010
1.20k
2011
1.20k
  EmitBlock(contBB);
2012
1.20k
}
2013
2014
void CodeGenFunction::destroyCXXObject(CodeGenFunction &CGF,
2015
                                       Address addr,
2016
36.6k
                                       QualType type) {
2017
36.6k
  const RecordType *rtype = type->castAs<RecordType>();
2018
36.6k
  const CXXRecordDecl *record = cast<CXXRecordDecl>(rtype->getDecl());
2019
36.6k
  const CXXDestructorDecl *dtor = record->getDestructor();
2020
36.6k
  assert(!dtor->isTrivial());
2021
36.6k
  CGF.EmitCXXDestructorCall(dtor, Dtor_Complete, /*for vbase*/ false,
2022
36.6k
                            /*Delegating=*/false, addr, type);
2023
36.6k
}
2024
2025
void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D,
2026
                                             CXXCtorType Type,
2027
                                             bool ForVirtualBase,
2028
                                             bool Delegating,
2029
                                             AggValueSlot ThisAVS,
2030
85.3k
                                             const CXXConstructExpr *E) {
2031
85.3k
  CallArgList Args;
2032
85.3k
  Address This = ThisAVS.getAddress();
2033
85.3k
  LangAS SlotAS = ThisAVS.getQualifiers().getAddressSpace();
2034
85.3k
  QualType ThisType = D->getThisType();
2035
85.3k
  LangAS ThisAS = ThisType.getTypePtr()->getPointeeType().getAddressSpace();
2036
85.3k
  llvm::Value *ThisPtr = This.getPointer();
2037
85.3k
2038
85.3k
  if (SlotAS != ThisAS) {
2039
29
    unsigned TargetThisAS = getContext().getTargetAddressSpace(ThisAS);
2040
29
    llvm::Type *NewType =
2041
29
        ThisPtr->getType()->getPointerElementType()->getPointerTo(TargetThisAS);
2042
29
    ThisPtr = getTargetHooks().performAddrSpaceCast(*this, This.getPointer(),
2043
29
                                                    ThisAS, SlotAS, NewType);
2044
29
  }
2045
85.3k
2046
85.3k
  // Push the this ptr.
2047
85.3k
  Args.add(RValue::get(ThisPtr), D->getThisType());
2048
85.3k
2049
85.3k
  // If this is a trivial constructor, emit a memcpy now before we lose
2050
85.3k
  // the alignment information on the argument.
2051
85.3k
  // FIXME: It would be better to preserve alignment information into CallArg.
2052
85.3k
  if (isMemcpyEquivalentSpecialMember(D)) {
2053
13.4k
    assert(E->getNumArgs() == 1 && "unexpected argcount for trivial ctor");
2054
13.4k
2055
13.4k
    const Expr *Arg = E->getArg(0);
2056
13.4k
    LValue Src = EmitLValue(Arg);
2057
13.4k
    QualType DestTy = getContext().getTypeDeclType(D->getParent());
2058
13.4k
    LValue Dest = MakeAddrLValue(This, DestTy);
2059
13.4k
    EmitAggregateCopyCtor(Dest, Src, ThisAVS.mayOverlap());
2060
13.4k
    return;
2061
13.4k
  }
2062
71.9k
2063
71.9k
  // Add the rest of the user-supplied arguments.
2064
71.9k
  const FunctionProtoType *FPT = D->getType()->castAs<FunctionProtoType>();
2065
71.9k
  EvaluationOrder Order = E->isListInitialization()
2066
71.9k
                              ? 
EvaluationOrder::ForceLeftToRight768
2067
71.9k
                              : 
EvaluationOrder::Default71.1k
;
2068
71.9k
  EmitCallArgs(Args, FPT, E->arguments(), E->getConstructor(),
2069
71.9k
               /*ParamsToSkip*/ 0, Order);
2070
71.9k
2071
71.9k
  EmitCXXConstructorCall(D, Type, ForVirtualBase, Delegating, This, Args,
2072
71.9k
                         ThisAVS.mayOverlap(), E->getExprLoc(),
2073
71.9k
                         ThisAVS.isSanitizerChecked());
2074
71.9k
}
2075
2076
static bool canEmitDelegateCallArgs(CodeGenFunction &CGF,
2077
                                    const CXXConstructorDecl *Ctor,
2078
126
                                    CXXCtorType Type, CallArgList &Args) {
2079
126
  // We can't forward a variadic call.
2080
126
  if (Ctor->isVariadic())
2081
31
    return false;
2082
95
2083
95
  if (CGF.getTarget().getCXXABI().areArgsDestroyedLeftToRightInCallee()) {
2084
32
    // If the parameters are callee-cleanup, it's not safe to forward.
2085
32
    for (auto *P : Ctor->parameters())
2086
56
      if (P->getType().isDestructedType())
2087
16
        return false;
2088
32
2089
32
    // Likewise if they're inalloca.
2090
32
    const CGFunctionInfo &Info =
2091
16
        CGF.CGM.getTypes().arrangeCXXConstructorCall(Args, Ctor, Type, 0, 0);
2092
16
    if (Info.usesInAlloca())
2093
0
      return false;
2094
79
  }
2095
79
2096
79
  // Anything else should be OK.
2097
79
  return true;
2098
79
}
2099
2100
void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D,
2101
                                             CXXCtorType Type,
2102
                                             bool ForVirtualBase,
2103
                                             bool Delegating,
2104
                                             Address This,
2105
                                             CallArgList &Args,
2106
                                             AggValueSlot::Overlap_t Overlap,
2107
                                             SourceLocation Loc,
2108
93.8k
                                             bool NewPointerIsChecked) {
2109
93.8k
  const CXXRecordDecl *ClassDecl = D->getParent();
2110
93.8k
2111
93.8k
  if (!NewPointerIsChecked)
2112
58.4k
    EmitTypeCheck(CodeGenFunction::TCK_ConstructorCall, Loc, This.getPointer(),
2113
58.4k
                  getContext().getRecordType(ClassDecl), CharUnits::Zero());
2114
93.8k
2115
93.8k
  if (D->isTrivial() && 
D->isDefaultConstructor()41
) {
2116
14
    assert(Args.size() == 1 && "trivial default ctor with args");
2117
14
    return;
2118
14
  }
2119
93.8k
2120
93.8k
  // If this is a trivial constructor, just emit what's needed. If this is a
2121
93.8k
  // union copy constructor, we must emit a memcpy, because the AST does not
2122
93.8k
  // model that copy.
2123
93.8k
  if (isMemcpyEquivalentSpecialMember(D)) {
2124
24
    assert(Args.size() == 2 && "unexpected argcount for trivial ctor");
2125
24
2126
24
    QualType SrcTy = D->getParamDecl(0)->getType().getNonReferenceType();
2127
24
    Address Src(Args[1].getRValue(*this).getScalarVal(),
2128
24
                getNaturalTypeAlignment(SrcTy));
2129
24
    LValue SrcLVal = MakeAddrLValue(Src, SrcTy);
2130
24
    QualType DestTy = getContext().getTypeDeclType(ClassDecl);
2131
24
    LValue DestLVal = MakeAddrLValue(This, DestTy);
2132
24
    EmitAggregateCopyCtor(DestLVal, SrcLVal, Overlap);
2133
24
    return;
2134
24
  }
2135
93.7k
2136
93.7k
  bool PassPrototypeArgs = true;
2137
93.7k
  // Check whether we can actually emit the constructor before trying to do so.
2138
93.7k
  if (auto Inherited = D->getInheritedConstructor()) {
2139
136
    PassPrototypeArgs = getTypes().inheritingCtorHasParams(Inherited, Type);
2140
136
    if (PassPrototypeArgs && 
!canEmitDelegateCallArgs(*this, D, Type, Args)126
) {
2141
47
      EmitInlinedInheritingCXXConstructorCall(D, Type, ForVirtualBase,
2142
47
                                              Delegating, Args);
2143
47
      return;
2144
47
    }
2145
93.7k
  }
2146
93.7k
2147
93.7k
  // Insert any ABI-specific implicit constructor arguments.
2148
93.7k
  CGCXXABI::AddedStructorArgs ExtraArgs =
2149
93.7k
      CGM.getCXXABI().addImplicitConstructorArgs(*this, D, Type, ForVirtualBase,
2150
93.7k
                                                 Delegating, Args);
2151
93.7k
2152
93.7k
  // Emit the call.
2153
93.7k
  llvm::Constant *CalleePtr = CGM.getAddrOfCXXStructor(GlobalDecl(D, Type));
2154
93.7k
  const CGFunctionInfo &Info = CGM.getTypes().arrangeCXXConstructorCall(
2155
93.7k
      Args, D, Type, ExtraArgs.Prefix, ExtraArgs.Suffix, PassPrototypeArgs);
2156
93.7k
  CGCallee Callee = CGCallee::forDirect(CalleePtr, GlobalDecl(D, Type));
2157
93.7k
  EmitCall(Info, Callee, ReturnValueSlot(), Args);
2158
93.7k
2159
93.7k
  // Generate vtable assumptions if we're constructing a complete object
2160
93.7k
  // with a vtable.  We don't do this for base subobjects for two reasons:
2161
93.7k
  // first, it's incorrect for classes with virtual bases, and second, we're
2162
93.7k
  // about to overwrite the vptrs anyway.
2163
93.7k
  // We also have to make sure if we can refer to vtable:
2164
93.7k
  // - Otherwise we can refer to vtable if it's safe to speculatively emit.
2165
93.7k
  // FIXME: If vtable is used by ctor/dtor, or if vtable is external and we are
2166
93.7k
  // sure that definition of vtable is not hidden,
2167
93.7k
  // then we are always safe to refer to it.
2168
93.7k
  // FIXME: It looks like InstCombine is very inefficient on dealing with
2169
93.7k
  // assumes. Make assumption loads require -fstrict-vtable-pointers temporarily.
2170
93.7k
  if (CGM.getCodeGenOpts().OptimizationLevel > 0 &&
2171
93.7k
      
ClassDecl->isDynamicClass()76.2k
&&
Type != Ctor_Base8.42k
&&
2172
93.7k
      
CGM.getCXXABI().canSpeculativelyEmitVTable(ClassDecl)3.41k
&&
2173
93.7k
      
CGM.getCodeGenOpts().StrictVTablePointers698
)
2174
56
    EmitVTableAssumptionLoads(ClassDecl, This);
2175
93.7k
}
2176
2177
void CodeGenFunction::EmitInheritedCXXConstructorCall(
2178
    const CXXConstructorDecl *D, bool ForVirtualBase, Address This,
2179
107
    bool InheritedFromVBase, const CXXInheritedCtorInitExpr *E) {
2180
107
  CallArgList Args;
2181
107
  CallArg ThisArg(RValue::get(This.getPointer()), D->getThisType());
2182
107
2183
107
  // Forward the parameters.
2184
107
  if (InheritedFromVBase &&
2185
107
      
CGM.getTarget().getCXXABI().hasConstructorVariants()17
) {
2186
10
    // Nothing to do; this construction is not responsible for constructing
2187
10
    // the base class containing the inherited constructor.
2188
10
    // FIXME: Can we just pass undef's for the remaining arguments if we don't
2189
10
    // have constructor variants?
2190
10
    Args.push_back(ThisArg);
2191
97
  } else if (!CXXInheritedCtorInitExprArgs.empty()) {
2192
47
    // The inheriting constructor was inlined; just inject its arguments.
2193
47
    assert(CXXInheritedCtorInitExprArgs.size() >= D->getNumParams() &&
2194
47
           "wrong number of parameters for inherited constructor call");
2195
47
    Args = CXXInheritedCtorInitExprArgs;
2196
47
    Args[0] = ThisArg;
2197
50
  } else {
2198
50
    // The inheriting constructor was not inlined. Emit delegating arguments.
2199
50
    Args.push_back(ThisArg);
2200
50
    const auto *OuterCtor = cast<CXXConstructorDecl>(CurCodeDecl);
2201
50
    assert(OuterCtor->getNumParams() == D->getNumParams());
2202
50
    assert(!OuterCtor->isVariadic() && "should have been inlined");
2203
50
2204
131
    for (const auto *Param : OuterCtor->parameters()) {
2205
131
      assert(getContext().hasSameUnqualifiedType(
2206
131
          OuterCtor->getParamDecl(Param->getFunctionScopeIndex())->getType(),
2207
131
          Param->getType()));
2208
131
      EmitDelegateCallArg(Args, Param, E->getLocation());
2209
131
2210
131
      // Forward __attribute__(pass_object_size).
2211
131
      if (Param->hasAttr<PassObjectSizeAttr>()) {
2212
22
        auto *POSParam = SizeArguments[Param];
2213
22
        assert(POSParam && "missing pass_object_size value for forwarding");
2214
22
        EmitDelegateCallArg(Args, POSParam, E->getLocation());
2215
22
      }
2216
131
    }
2217
50
  }
2218
107
2219
107
  EmitCXXConstructorCall(D, Ctor_Base, ForVirtualBase, /*Delegating*/false,
2220
107
                         This, Args, AggValueSlot::MayOverlap,
2221
107
                         E->getLocation(), /*NewPointerIsChecked*/true);
2222
107
}
2223
2224
void CodeGenFunction::EmitInlinedInheritingCXXConstructorCall(
2225
    const CXXConstructorDecl *Ctor, CXXCtorType CtorType, bool ForVirtualBase,
2226
47
    bool Delegating, CallArgList &Args) {
2227
47
  GlobalDecl GD(Ctor, CtorType);
2228
47
  InlinedInheritingConstructorScope Scope(*this, GD);
2229
47
  ApplyInlineDebugLocation DebugScope(*this, GD);
2230
47
  RunCleanupsScope RunCleanups(*this);
2231
47
2232
47
  // Save the arguments to be passed to the inherited constructor.
2233
47
  CXXInheritedCtorInitExprArgs = Args;
2234
47
2235
47
  FunctionArgList Params;
2236
47
  QualType RetType = BuildFunctionArgList(CurGD, Params);
2237
47
  FnRetTy = RetType;
2238
47
2239
47
  // Insert any ABI-specific implicit constructor arguments.
2240
47
  CGM.getCXXABI().addImplicitConstructorArgs(*this, Ctor, CtorType,
2241
47
                                             ForVirtualBase, Delegating, Args);
2242
47
2243
47
  // Emit a simplified prolog. We only need to emit the implicit params.
2244
47
  assert(Args.size() >= Params.size() && "too few arguments for call");
2245
332
  for (unsigned I = 0, N = Args.size(); I != N; 
++I285
) {
2246
285
    if (I < Params.size() && 
isa<ImplicitParamDecl>(Params[I])231
) {
2247
61
      const RValue &RV = Args[I].getRValue(*this);
2248
61
      assert(!RV.isComplex() && "complex indirect params not supported");
2249
61
      ParamValue Val = RV.isScalar()
2250
61
                           ? ParamValue::forDirect(RV.getScalarVal())
2251
61
                           : 
ParamValue::forIndirect(RV.getAggregateAddress())0
;
2252
61
      EmitParmDecl(*Params[I], Val, I + 1);
2253
61
    }
2254
285
  }
2255
47
2256
47
  // Create a return value slot if the ABI implementation wants one.
2257
47
  // FIXME: This is dumb, we should ask the ABI not to try to set the return
2258
47
  // value instead.
2259
47
  if (!RetType->isVoidType())
2260
28
    ReturnValue = CreateIRTemp(RetType, "retval.inhctor");
2261
47
2262
47
  CGM.getCXXABI().EmitInstanceFunctionProlog(*this);
2263
47
  CXXThisValue = CXXABIThisValue;
2264
47
2265
47
  // Directly emit the constructor initializers.
2266
47
  EmitCtorPrologue(Ctor, CtorType, Params);
2267
47
}
2268
2269
62
void CodeGenFunction::EmitVTableAssumptionLoad(const VPtr &Vptr, Address This) {
2270
62
  llvm::Value *VTableGlobal =
2271
62
      CGM.getCXXABI().getVTableAddressPoint(Vptr.Base, Vptr.VTableClass);
2272
62
  if (!VTableGlobal)
2273
0
    return;
2274
62
2275
62
  // We can just use the base offset in the complete class.
2276
62
  CharUnits NonVirtualOffset = Vptr.Base.getBaseOffset();
2277
62
2278
62
  if (!NonVirtualOffset.isZero())
2279
4
    This =
2280
4
        ApplyNonVirtualAndVirtualOffset(*this, This, NonVirtualOffset, nullptr,
2281
4
                                        Vptr.VTableClass, Vptr.NearestVBase);
2282
62
2283
62
  llvm::Value *VPtrValue =
2284
62
      GetVTablePtr(This, VTableGlobal->getType(), Vptr.VTableClass);
2285
62
  llvm::Value *Cmp =
2286
62
      Builder.CreateICmpEQ(VPtrValue, VTableGlobal, "cmp.vtables");
2287
62
  Builder.CreateAssumption(Cmp);
2288
62
}
2289
2290
void CodeGenFunction::EmitVTableAssumptionLoads(const CXXRecordDecl *ClassDecl,
2291
56
                                                Address This) {
2292
56
  if (CGM.getCXXABI().doStructorsInitializeVPtrs(ClassDecl))
2293
56
    for (const VPtr &Vptr : getVTablePointers(ClassDecl))
2294
62
      EmitVTableAssumptionLoad(Vptr, This);
2295
56
}
2296
2297
void
2298
CodeGenFunction::EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D,
2299
                                                Address This, Address Src,
2300
79
                                                const CXXConstructExpr *E) {
2301
79
  const FunctionProtoType *FPT = D->getType()->castAs<FunctionProtoType>();
2302
79
2303
79
  CallArgList Args;
2304
79
2305
79
  // Push the this ptr.
2306
79
  Args.add(RValue::get(This.getPointer()), D->getThisType());
2307
79
2308
79
  // Push the src ptr.
2309
79
  QualType QT = *(FPT->param_type_begin());
2310
79
  llvm::Type *t = CGM.getTypes().ConvertType(QT);
2311
79
  Src = Builder.CreateBitCast(Src, t);
2312
79
  Args.add(RValue::get(Src.getPointer()), QT);
2313
79
2314
79
  // Skip over first argument (Src).
2315
79
  EmitCallArgs(Args, FPT, drop_begin(E->arguments(), 1), E->getConstructor(),
2316
79
               /*ParamsToSkip*/ 1);
2317
79
2318
79
  EmitCXXConstructorCall(D, Ctor_Complete, /*ForVirtualBase*/false,
2319
79
                         /*Delegating*/false, This, Args,
2320
79
                         AggValueSlot::MayOverlap, E->getExprLoc(),
2321
79
                         /*NewPointerIsChecked*/false);
2322
79
}
2323
2324
void
2325
CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor,
2326
                                                CXXCtorType CtorType,
2327
                                                const FunctionArgList &Args,
2328
21.7k
                                                SourceLocation Loc) {
2329
21.7k
  CallArgList DelegateArgs;
2330
21.7k
2331
21.7k
  FunctionArgList::const_iterator I = Args.begin(), E = Args.end();
2332
21.7k
  assert(I != E && "no parameters to constructor");
2333
21.7k
2334
21.7k
  // this
2335
21.7k
  Address This = LoadCXXThisAddress();
2336
21.7k
  DelegateArgs.add(RValue::get(This.getPointer()), (*I)->getType());
2337
21.7k
  ++I;
2338
21.7k
2339
21.7k
  // FIXME: The location of the VTT parameter in the parameter list is
2340
21.7k
  // specific to the Itanium ABI and shouldn't be hardcoded here.
2341
21.7k
  if (CGM.getCXXABI().NeedsVTTParameter(CurGD)) {
2342
0
    assert(I != E && "cannot skip vtt parameter, already done with args");
2343
0
    assert((*I)->getType()->isPointerType() &&
2344
0
           "skipping parameter not of vtt type");
2345
0
    ++I;
2346
0
  }
2347
21.7k
2348
21.7k
  // Explicit arguments.
2349
44.1k
  for (; I != E; 
++I22.4k
) {
2350
22.4k
    const VarDecl *param = *I;
2351
22.4k
    // FIXME: per-argument source location
2352
22.4k
    EmitDelegateCallArg(DelegateArgs, param, Loc);
2353
22.4k
  }
2354
21.7k
2355
21.7k
  EmitCXXConstructorCall(Ctor, CtorType, /*ForVirtualBase=*/false,
2356
21.7k
                         /*Delegating=*/true, This, DelegateArgs,
2357
21.7k
                         AggValueSlot::MayOverlap, Loc,
2358
21.7k
                         /*NewPointerIsChecked=*/true);
2359
21.7k
}
2360
2361
namespace {
2362
  struct CallDelegatingCtorDtor final : EHScopeStack::Cleanup {
2363
    const CXXDestructorDecl *Dtor;
2364
    Address Addr;
2365
    CXXDtorType Type;
2366
2367
    CallDelegatingCtorDtor(const CXXDestructorDecl *D, Address Addr,
2368
                           CXXDtorType Type)
2369
17
      : Dtor(D), Addr(Addr), Type(Type) {}
2370
2371
4
    void Emit(CodeGenFunction &CGF, Flags flags) override {
2372
4
      // We are calling the destructor from within the constructor.
2373
4
      // Therefore, "this" should have the expected type.
2374
4
      QualType ThisTy = Dtor->getThisObjectType();
2375
4
      CGF.EmitCXXDestructorCall(Dtor, Type, /*ForVirtualBase=*/false,
2376
4
                                /*Delegating=*/true, Addr, ThisTy);
2377
4
    }
2378
  };
2379
} // end anonymous namespace
2380
2381
void
2382
CodeGenFunction::EmitDelegatingCXXConstructorCall(const CXXConstructorDecl *Ctor,
2383
49
                                                  const FunctionArgList &Args) {
2384
49
  assert(Ctor->isDelegatingConstructor());
2385
49
2386
49
  Address ThisPtr = LoadCXXThisAddress();
2387
49
2388
49
  AggValueSlot AggSlot =
2389
49
    AggValueSlot::forAddr(ThisPtr, Qualifiers(),
2390
49
                          AggValueSlot::IsDestructed,
2391
49
                          AggValueSlot::DoesNotNeedGCBarriers,
2392
49
                          AggValueSlot::IsNotAliased,
2393
49
                          AggValueSlot::MayOverlap,
2394
49
                          AggValueSlot::IsNotZeroed,
2395
49
                          // Checks are made by the code that calls constructor.
2396
49
                          AggValueSlot::IsSanitizerChecked);
2397
49
2398
49
  EmitAggExpr(Ctor->init_begin()[0]->getInit(), AggSlot);
2399
49
2400
49
  const CXXRecordDecl *ClassDecl = Ctor->getParent();
2401
49
  if (CGM.getLangOpts().Exceptions && 
!ClassDecl->hasTrivialDestructor()21
) {
2402
17
    CXXDtorType Type =
2403
17
      CurGD.getCtorType() == Ctor_Complete ? 
Dtor_Complete14
:
Dtor_Base3
;
2404
17
2405
17
    EHStack.pushCleanup<CallDelegatingCtorDtor>(EHCleanup,
2406
17
                                                ClassDecl->getDestructor(),
2407
17
                                                ThisPtr, Type);
2408
17
  }
2409
49
}
2410
2411
void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD,
2412
                                            CXXDtorType Type,
2413
                                            bool ForVirtualBase,
2414
                                            bool Delegating, Address This,
2415
53.5k
                                            QualType ThisTy) {
2416
53.5k
  CGM.getCXXABI().EmitDestructorCall(*this, DD, Type, ForVirtualBase,
2417
53.5k
                                     Delegating, This, ThisTy);
2418
53.5k
}
2419
2420
namespace {
2421
  struct CallLocalDtor final : EHScopeStack::Cleanup {
2422
    const CXXDestructorDecl *Dtor;
2423
    Address Addr;
2424
    QualType Ty;
2425
2426
    CallLocalDtor(const CXXDestructorDecl *D, Address Addr, QualType Ty)
2427
10
        : Dtor(D), Addr(Addr), Ty(Ty) {}
2428
2429
10
    void Emit(CodeGenFunction &CGF, Flags flags) override {
2430
10
      CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete,
2431
10
                                /*ForVirtualBase=*/false,
2432
10
                                /*Delegating=*/false, Addr, Ty);
2433
10
    }
2434
  };
2435
} // end anonymous namespace
2436
2437
void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D,
2438
10
                                            QualType T, Address Addr) {
2439
10
  EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr, T);
2440
10
}
2441
2442
21
void CodeGenFunction::PushDestructorCleanup(QualType T, Address Addr) {
2443
21
  CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl();
2444
21
  if (!ClassDecl) 
return0
;
2445
21
  if (ClassDecl->hasTrivialDestructor()) 
return11
;
2446
10
2447
10
  const CXXDestructorDecl *D = ClassDecl->getDestructor();
2448
10
  assert(D && D->isUsed() && "destructor not marked as used!");
2449
10
  PushDestructorCleanup(D, T, Addr);
2450
10
}
2451
2452
7.57k
void CodeGenFunction::InitializeVTablePointer(const VPtr &Vptr) {
2453
7.57k
  // Compute the address point.
2454
7.57k
  llvm::Value *VTableAddressPoint =
2455
7.57k
      CGM.getCXXABI().getVTableAddressPointInStructor(
2456
7.57k
          *this, Vptr.VTableClass, Vptr.Base, Vptr.NearestVBase);
2457
7.57k
2458
7.57k
  if (!VTableAddressPoint)
2459
714
    return;
2460
6.86k
2461
6.86k
  // Compute where to store the address point.
2462
6.86k
  llvm::Value *VirtualOffset = nullptr;
2463
6.86k
  CharUnits NonVirtualOffset = CharUnits::Zero();
2464
6.86k
2465
6.86k
  if (CGM.getCXXABI().isVirtualOffsetNeededForVTableField(*this, Vptr)) {
2466
589
    // We need to use the virtual base offset offset because the virtual base
2467
589
    // might have a different offset in the most derived class.
2468
589
2469
589
    VirtualOffset = CGM.getCXXABI().GetVirtualBaseClassOffset(
2470
589
        *this, LoadCXXThisAddress(), Vptr.VTableClass, Vptr.NearestVBase);
2471
589
    NonVirtualOffset = Vptr.OffsetFromNearestVBase;
2472
6.27k
  } else {
2473
6.27k
    // We can just use the base offset in the complete class.
2474
6.27k
    NonVirtualOffset = Vptr.Base.getBaseOffset();
2475
6.27k
  }
2476
6.86k
2477
6.86k
  // Apply the offsets.
2478
6.86k
  Address VTableField = LoadCXXThisAddress();
2479
6.86k
2480
6.86k
  if (!NonVirtualOffset.isZero() || 
VirtualOffset6.07k
)
2481
1.34k
    VTableField = ApplyNonVirtualAndVirtualOffset(
2482
1.34k
        *this, VTableField, NonVirtualOffset, VirtualOffset, Vptr.VTableClass,
2483
1.34k
        Vptr.NearestVBase);
2484
6.86k
2485
6.86k
  // Finally, store the address point. Use the same LLVM types as the field to
2486
6.86k
  // support optimization.
2487
6.86k
  llvm::Type *VTablePtrTy =
2488
6.86k
      llvm::FunctionType::get(CGM.Int32Ty, /*isVarArg=*/true)
2489
6.86k
          ->getPointerTo()
2490
6.86k
          ->getPointerTo();
2491
6.86k
  VTableField = Builder.CreateBitCast(VTableField, VTablePtrTy->getPointerTo());
2492
6.86k
  VTableAddressPoint = Builder.CreateBitCast(VTableAddressPoint, VTablePtrTy);
2493
6.86k
2494
6.86k
  llvm::StoreInst *Store = Builder.CreateStore(VTableAddressPoint, VTableField);
2495
6.86k
  TBAAAccessInfo TBAAInfo = CGM.getTBAAVTablePtrAccessInfo(VTablePtrTy);
2496
6.86k
  CGM.DecorateInstructionWithTBAA(Store, TBAAInfo);
2497
6.86k
  if (CGM.getCodeGenOpts().OptimizationLevel > 0 &&
2498
6.86k
      
CGM.getCodeGenOpts().StrictVTablePointers5.25k
)
2499
54
    CGM.DecorateInstructionWithInvariantGroup(Store, Vptr.VTableClass);
2500
6.86k
}
2501
2502
CodeGenFunction::VPtrsVector
2503
5.84k
CodeGenFunction::getVTablePointers(const CXXRecordDecl *VTableClass) {
2504
5.84k
  CodeGenFunction::VPtrsVector VPtrsResult;
2505
5.84k
  VisitedVirtualBasesSetTy VBases;
2506
5.84k
  getVTablePointers(BaseSubobject(VTableClass, CharUnits::Zero()),
2507
5.84k
                    /*NearestVBase=*/nullptr,
2508
5.84k
                    /*OffsetFromNearestVBase=*/CharUnits::Zero(),
2509
5.84k
                    /*BaseIsNonVirtualPrimaryBase=*/false, VTableClass, VBases,
2510
5.84k
                    VPtrsResult);
2511
5.84k
  return VPtrsResult;
2512
5.84k
}
2513
2514
void CodeGenFunction::getVTablePointers(BaseSubobject Base,
2515
                                        const CXXRecordDecl *NearestVBase,
2516
                                        CharUnits OffsetFromNearestVBase,
2517
                                        bool BaseIsNonVirtualPrimaryBase,
2518
                                        const CXXRecordDecl *VTableClass,
2519
                                        VisitedVirtualBasesSetTy &VBases,
2520
13.8k
                                        VPtrsVector &Vptrs) {
2521
13.8k
  // If this base is a non-virtual primary base the address point has already
2522
13.8k
  // been set.
2523
13.8k
  if (!BaseIsNonVirtualPrimaryBase) {
2524
7.63k
    // Initialize the vtable pointer for this base.
2525
7.63k
    VPtr Vptr = {Base, NearestVBase, OffsetFromNearestVBase, VTableClass};
2526
7.63k
    Vptrs.push_back(Vptr);
2527
7.63k
  }
2528
13.8k
2529
13.8k
  const CXXRecordDecl *RD = Base.getBase();
2530
13.8k
2531
13.8k
  // Traverse bases.
2532
13.8k
  for (const auto &I : RD->bases()) {
2533
9.11k
    CXXRecordDecl *BaseDecl
2534
9.11k
      = cast<CXXRecordDecl>(I.getType()->getAs<RecordType>()->getDecl());
2535
9.11k
2536
9.11k
    // Ignore classes without a vtable.
2537
9.11k
    if (!BaseDecl->isDynamicClass())
2538
904
      continue;
2539
8.21k
2540
8.21k
    CharUnits BaseOffset;
2541
8.21k
    CharUnits BaseOffsetFromNearestVBase;
2542
8.21k
    bool BaseDeclIsNonVirtualPrimaryBase;
2543
8.21k
2544
8.21k
    if (I.isVirtual()) {
2545
1.25k
      // Check if we've visited this virtual base before.
2546
1.25k
      if (!VBases.insert(BaseDecl).second)
2547
213
        continue;
2548
1.03k
2549
1.03k
      const ASTRecordLayout &Layout =
2550
1.03k
        getContext().getASTRecordLayout(VTableClass);
2551
1.03k
2552
1.03k
      BaseOffset = Layout.getVBaseClassOffset(BaseDecl);
2553
1.03k
      BaseOffsetFromNearestVBase = CharUnits::Zero();
2554
1.03k
      BaseDeclIsNonVirtualPrimaryBase = false;
2555
6.96k
    } else {
2556
6.96k
      const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD);
2557
6.96k
2558
6.96k
      BaseOffset = Base.getBaseOffset() + Layout.getBaseClassOffset(BaseDecl);
2559
6.96k
      BaseOffsetFromNearestVBase =
2560
6.96k
        OffsetFromNearestVBase + Layout.getBaseClassOffset(BaseDecl);
2561
6.96k
      BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl;
2562
6.96k
    }
2563
8.21k
2564
8.21k
    getVTablePointers(
2565
7.99k
        BaseSubobject(BaseDecl, BaseOffset),
2566
7.99k
        I.isVirtual() ? 
BaseDecl1.03k
:
NearestVBase6.96k
, BaseOffsetFromNearestVBase,
2567
7.99k
        BaseDeclIsNonVirtualPrimaryBase, VTableClass, VBases, Vptrs);
2568
7.99k
  }
2569
13.8k
}
2570
2571
31.3k
void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) {
2572
31.3k
  // Ignore classes without a vtable.
2573
31.3k
  if (!RD->isDynamicClass())
2574
25.5k
    return;
2575
5.79k
2576
5.79k
  // Initialize the vtable pointers for this class and all of its bases.
2577
5.79k
  if (CGM.getCXXABI().doStructorsInitializeVPtrs(RD))
2578
5.79k
    for (const VPtr &Vptr : getVTablePointers(RD))
2579
7.57k
      InitializeVTablePointer(Vptr);
2580
5.79k
2581
5.79k
  if (RD->getNumVBases())
2582
1.21k
    CGM.getCXXABI().initializeHiddenVirtualInheritanceMembers(*this, RD);
2583
5.79k
}
2584
2585
llvm::Value *CodeGenFunction::GetVTablePtr(Address This,
2586
                                           llvm::Type *VTableTy,
2587
12.6k
                                           const CXXRecordDecl *RD) {
2588
12.6k
  Address VTablePtrSrc = Builder.CreateElementBitCast(This, VTableTy);
2589
12.6k
  llvm::Instruction *VTable = Builder.CreateLoad(VTablePtrSrc, "vtable");
2590
12.6k
  TBAAAccessInfo TBAAInfo = CGM.getTBAAVTablePtrAccessInfo(VTableTy);
2591
12.6k
  CGM.DecorateInstructionWithTBAA(VTable, TBAAInfo);
2592
12.6k
2593
12.6k
  if (CGM.getCodeGenOpts().OptimizationLevel > 0 &&
2594
12.6k
      
CGM.getCodeGenOpts().StrictVTablePointers12.0k
)
2595
118
    CGM.DecorateInstructionWithInvariantGroup(VTable, RD);
2596
12.6k
2597
12.6k
  return VTable;
2598
12.6k
}
2599
2600
// If a class has a single non-virtual base and does not introduce or override
2601
// virtual member functions or fields, it will have the same layout as its base.
2602
// This function returns the least derived such class.
2603
//
2604
// Casting an instance of a base class to such a derived class is technically
2605
// undefined behavior, but it is a relatively common hack for introducing member
2606
// functions on class instances with specific properties (e.g. llvm::Operator)
2607
// that works under most compilers and should not have security implications, so
2608
// we allow it by default. It can be disabled with -fsanitize=cfi-cast-strict.
2609
static const CXXRecordDecl *
2610
59
LeastDerivedClassWithSameLayout(const CXXRecordDecl *RD) {
2611
59
  if (!RD->field_empty())
2612
1
    return RD;
2613
58
2614
58
  if (RD->getNumVBases() != 0)
2615
12
    return RD;
2616
46
2617
46
  if (RD->getNumBases() != 1)
2618
31
    return RD;
2619
15
2620
28
  
for (const CXXMethodDecl *MD : RD->methods())15
{
2621
28
    if (MD->isVirtual()) {
2622
9
      // Virtual member functions are only ok if they are implicit destructors
2623
9
      // because the implicit destructor will have the same semantics as the
2624
9
      // base class's destructor if no fields are added.
2625
9
      if (isa<CXXDestructorDecl>(MD) && 
MD->isImplicit()0
)
2626
0
        continue;
2627
9
      return RD;
2628
9
    }
2629
28
  }
2630
15
2631
15
  return LeastDerivedClassWithSameLayout(
2632
6
      RD->bases_begin()->getType()->getAsCXXRecordDecl());
2633
15
}
2634
2635
void CodeGenFunction::EmitTypeMetadataCodeForVCall(const CXXRecordDecl *RD,
2636
                                                   llvm::Value *VTable,
2637
10.4k
                                                   SourceLocation Loc) {
2638
10.4k
  if (SanOpts.has(SanitizerKind::CFIVCall))
2639
33
    EmitVTablePtrCheckForCall(RD, VTable, CodeGenFunction::CFITCK_VCall, Loc);
2640
10.4k
  else if (CGM.getCodeGenOpts().WholeProgramVTables &&
2641
10.4k
           
CGM.HasHiddenLTOVisibility(RD)52
) {
2642
38
    llvm::Metadata *MD =
2643
38
        CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0));
2644
38
    llvm::Value *TypeId =
2645
38
        llvm::MetadataAsValue::get(CGM.getLLVMContext(), MD);
2646
38
2647
38
    llvm::Value *CastedVTable = Builder.CreateBitCast(VTable, Int8PtrTy);
2648
38
    llvm::Value *TypeTest =
2649
38
        Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::type_test),
2650
38
                           {CastedVTable, TypeId});
2651
38
    Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::assume), TypeTest);
2652
38
  }
2653
10.4k
}
2654
2655
void CodeGenFunction::EmitVTablePtrCheckForCall(const CXXRecordDecl *RD,
2656
                                                llvm::Value *VTable,
2657
                                                CFITypeCheckKind TCK,
2658
40
                                                SourceLocation Loc) {
2659
40
  if (!SanOpts.has(SanitizerKind::CFICastStrict))
2660
38
    RD = LeastDerivedClassWithSameLayout(RD);
2661
40
2662
40
  EmitVTablePtrCheck(RD, VTable, TCK, Loc);
2663
40
}
2664
2665
void CodeGenFunction::EmitVTablePtrCheckForCast(QualType T,
2666
                                                llvm::Value *Derived,
2667
                                                bool MayBeNull,
2668
                                                CFITypeCheckKind TCK,
2669
21
                                                SourceLocation Loc) {
2670
21
  if (!getLangOpts().CPlusPlus)
2671
0
    return;
2672
21
2673
21
  auto *ClassTy = T->getAs<RecordType>();
2674
21
  if (!ClassTy)
2675
0
    return;
2676
21
2677
21
  const CXXRecordDecl *ClassDecl = cast<CXXRecordDecl>(ClassTy->getDecl());
2678
21
2679
21
  if (!ClassDecl->isCompleteDefinition() || !ClassDecl->isDynamicClass())
2680
0
    return;
2681
21
2682
21
  if (!SanOpts.has(SanitizerKind::CFICastStrict))
2683
15
    ClassDecl = LeastDerivedClassWithSameLayout(ClassDecl);
2684
21
2685
21
  llvm::BasicBlock *ContBlock = nullptr;
2686
21
2687
21
  if (MayBeNull) {
2688
14
    llvm::Value *DerivedNotNull =
2689
14
        Builder.CreateIsNotNull(Derived, "cast.nonnull");
2690
14
2691
14
    llvm::BasicBlock *CheckBlock = createBasicBlock("cast.check");
2692
14
    ContBlock = createBasicBlock("cast.cont");
2693
14
2694
14
    Builder.CreateCondBr(DerivedNotNull, CheckBlock, ContBlock);
2695
14
2696
14
    EmitBlock(CheckBlock);
2697
14
  }
2698
21
2699
21
  llvm::Value *VTable;
2700
21
  std::tie(VTable, ClassDecl) = CGM.getCXXABI().LoadVTablePtr(
2701
21
      *this, Address(Derived, getPointerAlign()), ClassDecl);
2702
21
2703
21
  EmitVTablePtrCheck(ClassDecl, VTable, TCK, Loc);
2704
21
2705
21
  if (MayBeNull) {
2706
14
    Builder.CreateBr(ContBlock);
2707
14
    EmitBlock(ContBlock);
2708
14
  }
2709
21
}
2710
2711
void CodeGenFunction::EmitVTablePtrCheck(const CXXRecordDecl *RD,
2712
                                         llvm::Value *VTable,
2713
                                         CFITypeCheckKind TCK,
2714
61
                                         SourceLocation Loc) {
2715
61
  if (!CGM.getCodeGenOpts().SanitizeCfiCrossDso &&
2716
61
      
!CGM.HasHiddenLTOVisibility(RD)59
)
2717
0
    return;
2718
61
2719
61
  SanitizerMask M;
2720
61
  llvm::SanitizerStatKind SSK;
2721
61
  switch (TCK) {
2722
61
  case CFITCK_VCall:
2723
33
    M = SanitizerKind::CFIVCall;
2724
33
    SSK = llvm::SanStat_CFI_VCall;
2725
33
    break;
2726
61
  case CFITCK_NVCall:
2727
7
    M = SanitizerKind::CFINVCall;
2728
7
    SSK = llvm::SanStat_CFI_NVCall;
2729
7
    break;
2730
61
  case CFITCK_DerivedCast:
2731
6
    M = SanitizerKind::CFIDerivedCast;
2732
6
    SSK = llvm::SanStat_CFI_DerivedCast;
2733
6
    break;
2734
61
  case CFITCK_UnrelatedCast:
2735
15
    M = SanitizerKind::CFIUnrelatedCast;
2736
15
    SSK = llvm::SanStat_CFI_UnrelatedCast;
2737
15
    break;
2738
61
  case CFITCK_ICall:
2739
0
  case CFITCK_NVMFCall:
2740
0
  case CFITCK_VMFCall:
2741
0
    llvm_unreachable("unexpected sanitizer kind");
2742
61
  }
2743
61
2744
61
  std::string TypeName = RD->getQualifiedNameAsString();
2745
61
  if (getContext().getSanitizerBlacklist().isBlacklistedType(M, TypeName))
2746
2
    return;
2747
59
2748
59
  SanitizerScope SanScope(this);
2749
59
  EmitSanitizerStatReport(SSK);
2750
59
2751
59
  llvm::Metadata *MD =
2752
59
      CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0));
2753
59
  llvm::Value *TypeId = llvm::MetadataAsValue::get(getLLVMContext(), MD);
2754
59
2755
59
  llvm::Value *CastedVTable = Builder.CreateBitCast(VTable, Int8PtrTy);
2756
59
  llvm::Value *TypeTest = Builder.CreateCall(
2757
59
      CGM.getIntrinsic(llvm::Intrinsic::type_test), {CastedVTable, TypeId});
2758
59
2759
59
  llvm::Constant *StaticData[] = {
2760
59
      llvm::ConstantInt::get(Int8Ty, TCK),
2761
59
      EmitCheckSourceLocation(Loc),
2762
59
      EmitCheckTypeDescriptor(QualType(RD->getTypeForDecl(), 0)),
2763
59
  };
2764
59
2765
59
  auto CrossDsoTypeId = CGM.CreateCrossDsoCfiTypeId(MD);
2766
59
  if (CGM.getCodeGenOpts().SanitizeCfiCrossDso && 
CrossDsoTypeId2
) {
2767
2
    EmitCfiSlowPathCheck(M, TypeTest, CrossDsoTypeId, CastedVTable, StaticData);
2768
2
    return;
2769
2
  }
2770
57
2771
57
  if (CGM.getCodeGenOpts().SanitizeTrap.has(M)) {
2772
28
    EmitTrapCheck(TypeTest);
2773
28
    return;
2774
28
  }
2775
29
2776
29
  llvm::Value *AllVtables = llvm::MetadataAsValue::get(
2777
29
      CGM.getLLVMContext(),
2778
29
      llvm::MDString::get(CGM.getLLVMContext(), "all-vtables"));
2779
29
  llvm::Value *ValidVtable = Builder.CreateCall(
2780
29
      CGM.getIntrinsic(llvm::Intrinsic::type_test), {CastedVTable, AllVtables});
2781
29
  EmitCheck(std::make_pair(TypeTest, M), SanitizerHandler::CFICheckFail,
2782
29
            StaticData, {CastedVTable, ValidVtable});
2783
29
}
2784
2785
10.6k
bool CodeGenFunction::ShouldEmitVTableTypeCheckedLoad(const CXXRecordDecl *RD) {
2786
10.6k
  if (!CGM.getCodeGenOpts().WholeProgramVTables ||
2787
10.6k
      
!SanOpts.has(SanitizerKind::CFIVCall)64
||
2788
10.6k
      
!CGM.getCodeGenOpts().SanitizeTrap.has(SanitizerKind::CFIVCall)12
||
2789
10.6k
      
!CGM.HasHiddenLTOVisibility(RD)11
)
2790
10.6k
    return false;
2791
11
2792
11
  std::string TypeName = RD->getQualifiedNameAsString();
2793
11
  return !getContext().getSanitizerBlacklist().isBlacklistedType(
2794
11
      SanitizerKind::CFIVCall, TypeName);
2795
11
}
2796
2797
llvm::Value *CodeGenFunction::EmitVTableTypeCheckedLoad(
2798
11
    const CXXRecordDecl *RD, llvm::Value *VTable, uint64_t VTableByteOffset) {
2799
11
  SanitizerScope SanScope(this);
2800
11
2801
11
  EmitSanitizerStatReport(llvm::SanStat_CFI_VCall);
2802
11
2803
11
  llvm::Metadata *MD =
2804
11
      CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0));
2805
11
  llvm::Value *TypeId = llvm::MetadataAsValue::get(CGM.getLLVMContext(), MD);
2806
11
2807
11
  llvm::Value *CastedVTable = Builder.CreateBitCast(VTable, Int8PtrTy);
2808
11
  llvm::Value *CheckedLoad = Builder.CreateCall(
2809
11
      CGM.getIntrinsic(llvm::Intrinsic::type_checked_load),
2810
11
      {CastedVTable, llvm::ConstantInt::get(Int32Ty, VTableByteOffset),
2811
11
       TypeId});
2812
11
  llvm::Value *CheckResult = Builder.CreateExtractValue(CheckedLoad, 1);
2813
11
2814
11
  EmitCheck(std::make_pair(CheckResult, SanitizerKind::CFIVCall),
2815
11
            SanitizerHandler::CFICheckFail, nullptr, nullptr);
2816
11
2817
11
  return Builder.CreateBitCast(
2818
11
      Builder.CreateExtractValue(CheckedLoad, 0),
2819
11
      cast<llvm::PointerType>(VTable->getType())->getElementType());
2820
11
}
2821
2822
void CodeGenFunction::EmitForwardingCallToLambda(
2823
                                      const CXXMethodDecl *callOperator,
2824
98
                                      CallArgList &callArgs) {
2825
98
  // Get the address of the call operator.
2826
98
  const CGFunctionInfo &calleeFnInfo =
2827
98
    CGM.getTypes().arrangeCXXMethodDeclaration(callOperator);
2828
98
  llvm::Constant *calleePtr =
2829
98
    CGM.GetAddrOfFunction(GlobalDecl(callOperator),
2830
98
                          CGM.getTypes().GetFunctionType(calleeFnInfo));
2831
98
2832
98
  // Prepare the return slot.
2833
98
  const FunctionProtoType *FPT =
2834
98
    callOperator->getType()->castAs<FunctionProtoType>();
2835
98
  QualType resultType = FPT->getReturnType();
2836
98
  ReturnValueSlot returnSlot;
2837
98
  if (!resultType->isVoidType() &&
2838
98
      
calleeFnInfo.getReturnInfo().getKind() == ABIArgInfo::Indirect21
&&
2839
98
      
!hasScalarEvaluationKind(calleeFnInfo.getReturnType())1
)
2840
1
    returnSlot = ReturnValueSlot(ReturnValue, resultType.isVolatileQualified());
2841
98
2842
98
  // We don't need to separately arrange the call arguments because
2843
98
  // the call can't be variadic anyway --- it's impossible to forward
2844
98
  // variadic arguments.
2845
98
2846
98
  // Now emit our call.
2847
98
  auto callee = CGCallee::forDirect(calleePtr, GlobalDecl(callOperator));
2848
98
  RValue RV = EmitCall(calleeFnInfo, callee, returnSlot, callArgs);
2849
98
2850
98
  // If necessary, copy the returned value into the slot.
2851
98
  if (!resultType->isVoidType() && 
returnSlot.isNull()21
) {
2852
20
    if (getLangOpts().ObjCAutoRefCount && 
resultType->isObjCRetainableType()5
) {
2853
2
      RV = RValue::get(EmitARCRetainAutoreleasedReturnValue(RV.getScalarVal()));
2854
2
    }
2855
20
    EmitReturnOfRValue(RV, resultType);
2856
20
  } else
2857
78
    EmitBranchThroughCleanup(ReturnBlock);
2858
98
}
2859
2860
13
void CodeGenFunction::EmitLambdaBlockInvokeBody() {
2861
13
  const BlockDecl *BD = BlockInfo->getBlockDecl();
2862
13
  const VarDecl *variable = BD->capture_begin()->getVariable();
2863
13
  const CXXRecordDecl *Lambda = variable->getType()->getAsCXXRecordDecl();
2864
13
  const CXXMethodDecl *CallOp = Lambda->getLambdaCallOperator();
2865
13
2866
13
  if (CallOp->isVariadic()) {
2867
0
    // FIXME: Making this work correctly is nasty because it requires either
2868
0
    // cloning the body of the call operator or making the call operator
2869
0
    // forward.
2870
0
    CGM.ErrorUnsupported(CurCodeDecl, "lambda conversion to variadic function");
2871
0
    return;
2872
0
  }
2873
13
2874
13
  // Start building arguments for forwarding call
2875
13
  CallArgList CallArgs;
2876
13
2877
13
  QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda));
2878
13
  Address ThisPtr = GetAddrOfBlockDecl(variable);
2879
13
  CallArgs.add(RValue::get(ThisPtr.getPointer()), ThisType);
2880
13
2881
13
  // Add the rest of the parameters.
2882
13
  for (auto param : BD->parameters())
2883
1
    EmitDelegateCallArg(CallArgs, param, param->getBeginLoc());
2884
13
2885
13
  assert(!Lambda->isGenericLambda() &&
2886
13
            "generic lambda interconversion to block not implemented");
2887
13
  EmitForwardingCallToLambda(CallOp, CallArgs);
2888
13
}
2889
2890
85
void CodeGenFunction::EmitLambdaDelegatingInvokeBody(const CXXMethodDecl *MD) {
2891
85
  const CXXRecordDecl *Lambda = MD->getParent();
2892
85
2893
85
  // Start building arguments for forwarding call
2894
85
  CallArgList CallArgs;
2895
85
2896
85
  QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda));
2897
85
  llvm::Value *ThisPtr = llvm::UndefValue::get(getTypes().ConvertType(ThisType));
2898
85
  CallArgs.add(RValue::get(ThisPtr), ThisType);
2899
85
2900
85
  // Add the rest of the parameters.
2901
85
  for (auto Param : MD->parameters())
2902
50
    EmitDelegateCallArg(CallArgs, Param, Param->getBeginLoc());
2903
85
2904
85
  const CXXMethodDecl *CallOp = Lambda->getLambdaCallOperator();
2905
85
  // For a generic lambda, find the corresponding call operator specialization
2906
85
  // to which the call to the static-invoker shall be forwarded.
2907
85
  if (Lambda->isGenericLambda()) {
2908
0
    assert(MD->isFunctionTemplateSpecialization());
2909
0
    const TemplateArgumentList *TAL = MD->getTemplateSpecializationArgs();
2910
0
    FunctionTemplateDecl *CallOpTemplate = CallOp->getDescribedFunctionTemplate();
2911
0
    void *InsertPos = nullptr;
2912
0
    FunctionDecl *CorrespondingCallOpSpecialization =
2913
0
        CallOpTemplate->findSpecialization(TAL->asArray(), InsertPos);
2914
0
    assert(CorrespondingCallOpSpecialization);
2915
0
    CallOp = cast<CXXMethodDecl>(CorrespondingCallOpSpecialization);
2916
0
  }
2917
85
  EmitForwardingCallToLambda(CallOp, CallArgs);
2918
85
}
2919
2920
85
void CodeGenFunction::EmitLambdaStaticInvokeBody(const CXXMethodDecl *MD) {
2921
85
  if (MD->isVariadic()) {
2922
0
    // FIXME: Making this work correctly is nasty because it requires either
2923
0
    // cloning the body of the call operator or making the call operator forward.
2924
0
    CGM.ErrorUnsupported(MD, "lambda conversion to variadic function");
2925
0
    return;
2926
0
  }
2927
85
2928
85
  EmitLambdaDelegatingInvokeBody(MD);
2929
85
}