/Users/buildslave/jenkins/sharedspace/clang-stage2-coverage-R@2/llvm/tools/clang/lib/CodeGen/CGClass.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | //===--- CGClass.cpp - Emit LLVM Code for C++ classes -----------*- C++ -*-===// |
2 | | // |
3 | | // The LLVM Compiler Infrastructure |
4 | | // |
5 | | // This file is distributed under the University of Illinois Open Source |
6 | | // License. See LICENSE.TXT for details. |
7 | | // |
8 | | //===----------------------------------------------------------------------===// |
9 | | // |
10 | | // This contains code dealing with C++ code generation of classes |
11 | | // |
12 | | //===----------------------------------------------------------------------===// |
13 | | |
14 | | #include "CGBlocks.h" |
15 | | #include "CGCXXABI.h" |
16 | | #include "CGDebugInfo.h" |
17 | | #include "CGRecordLayout.h" |
18 | | #include "CodeGenFunction.h" |
19 | | #include "clang/AST/CXXInheritance.h" |
20 | | #include "clang/AST/DeclTemplate.h" |
21 | | #include "clang/AST/EvaluatedExprVisitor.h" |
22 | | #include "clang/AST/RecordLayout.h" |
23 | | #include "clang/AST/StmtCXX.h" |
24 | | #include "clang/Basic/TargetBuiltins.h" |
25 | | #include "clang/CodeGen/CGFunctionInfo.h" |
26 | | #include "clang/Frontend/CodeGenOptions.h" |
27 | | #include "llvm/IR/Intrinsics.h" |
28 | | #include "llvm/IR/Metadata.h" |
29 | | #include "llvm/Transforms/Utils/SanitizerStats.h" |
30 | | |
31 | | using namespace clang; |
32 | | using namespace CodeGen; |
33 | | |
34 | | /// Return the best known alignment for an unknown pointer to a |
35 | | /// particular class. |
36 | 157k | CharUnits CodeGenModule::getClassPointerAlignment(const CXXRecordDecl *RD) { |
37 | 157k | if (!RD->isCompleteDefinition()) |
38 | 0 | return CharUnits::One(); // Hopefully won't be used anywhere. |
39 | 157k | |
40 | 157k | auto &layout = getContext().getASTRecordLayout(RD); |
41 | 157k | |
42 | 157k | // If the class is final, then we know that the pointer points to an |
43 | 157k | // object of that type and can use the full alignment. |
44 | 157k | if (RD->hasAttr<FinalAttr>()157k ) { |
45 | 562 | return layout.getAlignment(); |
46 | 562 | |
47 | 562 | // Otherwise, we have to assume it could be a subclass. |
48 | 0 | } else { |
49 | 156k | return layout.getNonVirtualAlignment(); |
50 | 156k | } |
51 | 0 | } |
52 | | |
53 | | /// Return the best known alignment for a pointer to a virtual base, |
54 | | /// given the alignment of a pointer to the derived class. |
55 | | CharUnits CodeGenModule::getVBaseAlignment(CharUnits actualDerivedAlign, |
56 | | const CXXRecordDecl *derivedClass, |
57 | 470 | const CXXRecordDecl *vbaseClass) { |
58 | 470 | // The basic idea here is that an underaligned derived pointer might |
59 | 470 | // indicate an underaligned base pointer. |
60 | 470 | |
61 | 470 | assert(vbaseClass->isCompleteDefinition()); |
62 | 470 | auto &baseLayout = getContext().getASTRecordLayout(vbaseClass); |
63 | 470 | CharUnits expectedVBaseAlign = baseLayout.getNonVirtualAlignment(); |
64 | 470 | |
65 | 470 | return getDynamicOffsetAlignment(actualDerivedAlign, derivedClass, |
66 | 470 | expectedVBaseAlign); |
67 | 470 | } |
68 | | |
69 | | CharUnits |
70 | | CodeGenModule::getDynamicOffsetAlignment(CharUnits actualBaseAlign, |
71 | | const CXXRecordDecl *baseDecl, |
72 | 623 | CharUnits expectedTargetAlign) { |
73 | 623 | // If the base is an incomplete type (which is, alas, possible with |
74 | 623 | // member pointers), be pessimistic. |
75 | 623 | if (!baseDecl->isCompleteDefinition()) |
76 | 4 | return std::min(actualBaseAlign, expectedTargetAlign); |
77 | 619 | |
78 | 619 | auto &baseLayout = getContext().getASTRecordLayout(baseDecl); |
79 | 619 | CharUnits expectedBaseAlign = baseLayout.getNonVirtualAlignment(); |
80 | 619 | |
81 | 619 | // If the class is properly aligned, assume the target offset is, too. |
82 | 619 | // |
83 | 619 | // This actually isn't necessarily the right thing to do --- if the |
84 | 619 | // class is a complete object, but it's only properly aligned for a |
85 | 619 | // base subobject, then the alignments of things relative to it are |
86 | 619 | // probably off as well. (Note that this requires the alignment of |
87 | 619 | // the target to be greater than the NV alignment of the derived |
88 | 619 | // class.) |
89 | 619 | // |
90 | 619 | // However, our approach to this kind of under-alignment can only |
91 | 619 | // ever be best effort; after all, we're never going to propagate |
92 | 619 | // alignments through variables or parameters. Note, in particular, |
93 | 619 | // that constructing a polymorphic type in an address that's less |
94 | 619 | // than pointer-aligned will generally trap in the constructor, |
95 | 619 | // unless we someday add some sort of attribute to change the |
96 | 619 | // assumed alignment of 'this'. So our goal here is pretty much |
97 | 619 | // just to allow the user to explicitly say that a pointer is |
98 | 619 | // under-aligned and then safely access its fields and vtables. |
99 | 619 | if (actualBaseAlign >= expectedBaseAlign619 ) { |
100 | 619 | return expectedTargetAlign; |
101 | 619 | } |
102 | 0 |
|
103 | 0 | // Otherwise, we might be offset by an arbitrary multiple of the |
104 | 0 | // actual alignment. The correct adjustment is to take the min of |
105 | 0 | // the two alignments. |
106 | 0 | return std::min(actualBaseAlign, expectedTargetAlign); |
107 | 0 | } |
108 | | |
109 | 38.2k | Address CodeGenFunction::LoadCXXThisAddress() { |
110 | 38.2k | assert(CurFuncDecl && "loading 'this' without a func declaration?"); |
111 | 38.2k | assert(isa<CXXMethodDecl>(CurFuncDecl)); |
112 | 38.2k | |
113 | 38.2k | // Lazily compute CXXThisAlignment. |
114 | 38.2k | if (CXXThisAlignment.isZero()38.2k ) { |
115 | 24.8k | // Just use the best known alignment for the parent. |
116 | 24.8k | // TODO: if we're currently emitting a complete-object ctor/dtor, |
117 | 24.8k | // we can always use the complete-object alignment. |
118 | 24.8k | auto RD = cast<CXXMethodDecl>(CurFuncDecl)->getParent(); |
119 | 24.8k | CXXThisAlignment = CGM.getClassPointerAlignment(RD); |
120 | 24.8k | } |
121 | 38.2k | |
122 | 38.2k | return Address(LoadCXXThis(), CXXThisAlignment); |
123 | 38.2k | } |
124 | | |
125 | | /// Emit the address of a field using a member data pointer. |
126 | | /// |
127 | | /// \param E Only used for emergency diagnostics |
128 | | Address |
129 | | CodeGenFunction::EmitCXXMemberDataPointerAddress(const Expr *E, Address base, |
130 | | llvm::Value *memberPtr, |
131 | | const MemberPointerType *memberPtrType, |
132 | 74 | LValueBaseInfo *BaseInfo) { |
133 | 74 | // Ask the ABI to compute the actual address. |
134 | 74 | llvm::Value *ptr = |
135 | 74 | CGM.getCXXABI().EmitMemberDataPointerAddress(*this, E, base, |
136 | 74 | memberPtr, memberPtrType); |
137 | 74 | |
138 | 74 | QualType memberType = memberPtrType->getPointeeType(); |
139 | 74 | CharUnits memberAlign = getNaturalTypeAlignment(memberType, BaseInfo); |
140 | 74 | memberAlign = |
141 | 74 | CGM.getDynamicOffsetAlignment(base.getAlignment(), |
142 | 74 | memberPtrType->getClass()->getAsCXXRecordDecl(), |
143 | 74 | memberAlign); |
144 | 74 | return Address(ptr, memberAlign); |
145 | 74 | } |
146 | | |
147 | | CharUnits CodeGenModule::computeNonVirtualBaseClassOffset( |
148 | | const CXXRecordDecl *DerivedClass, CastExpr::path_const_iterator Start, |
149 | 16.7k | CastExpr::path_const_iterator End) { |
150 | 16.7k | CharUnits Offset = CharUnits::Zero(); |
151 | 16.7k | |
152 | 16.7k | const ASTContext &Context = getContext(); |
153 | 16.7k | const CXXRecordDecl *RD = DerivedClass; |
154 | 16.7k | |
155 | 36.1k | for (CastExpr::path_const_iterator I = Start; I != End36.1k ; ++I19.3k ) { |
156 | 19.3k | const CXXBaseSpecifier *Base = *I; |
157 | 19.3k | assert(!Base->isVirtual() && "Should not see virtual bases here!"); |
158 | 19.3k | |
159 | 19.3k | // Get the layout. |
160 | 19.3k | const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD); |
161 | 19.3k | |
162 | 19.3k | const CXXRecordDecl *BaseDecl = |
163 | 19.3k | cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl()); |
164 | 19.3k | |
165 | 19.3k | // Add the offset. |
166 | 19.3k | Offset += Layout.getBaseClassOffset(BaseDecl); |
167 | 19.3k | |
168 | 19.3k | RD = BaseDecl; |
169 | 19.3k | } |
170 | 16.7k | |
171 | 16.7k | return Offset; |
172 | 16.7k | } |
173 | | |
174 | | llvm::Constant * |
175 | | CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl, |
176 | | CastExpr::path_const_iterator PathBegin, |
177 | 179 | CastExpr::path_const_iterator PathEnd) { |
178 | 179 | assert(PathBegin != PathEnd && "Base path should not be empty!"); |
179 | 179 | |
180 | 179 | CharUnits Offset = |
181 | 179 | computeNonVirtualBaseClassOffset(ClassDecl, PathBegin, PathEnd); |
182 | 179 | if (Offset.isZero()) |
183 | 143 | return nullptr; |
184 | 36 | |
185 | 36 | llvm::Type *PtrDiffTy = |
186 | 36 | Types.ConvertType(getContext().getPointerDiffType()); |
187 | 36 | |
188 | 36 | return llvm::ConstantInt::get(PtrDiffTy, Offset.getQuantity()); |
189 | 36 | } |
190 | | |
191 | | /// Gets the address of a direct base class within a complete object. |
192 | | /// This should only be used for (1) non-virtual bases or (2) virtual bases |
193 | | /// when the type is known to be complete (e.g. in complete destructors). |
194 | | /// |
195 | | /// The object pointed to by 'This' is assumed to be non-null. |
196 | | Address |
197 | | CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(Address This, |
198 | | const CXXRecordDecl *Derived, |
199 | | const CXXRecordDecl *Base, |
200 | 7.90k | bool BaseIsVirtual) { |
201 | 7.90k | // 'this' must be a pointer (in some address space) to Derived. |
202 | 7.90k | assert(This.getElementType() == ConvertType(Derived)); |
203 | 7.90k | |
204 | 7.90k | // Compute the offset of the virtual base. |
205 | 7.90k | CharUnits Offset; |
206 | 7.90k | const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived); |
207 | 7.90k | if (BaseIsVirtual) |
208 | 805 | Offset = Layout.getVBaseClassOffset(Base); |
209 | 7.90k | else |
210 | 7.10k | Offset = Layout.getBaseClassOffset(Base); |
211 | 7.90k | |
212 | 7.90k | // Shift and cast down to the base type. |
213 | 7.90k | // TODO: for complete types, this should be possible with a GEP. |
214 | 7.90k | Address V = This; |
215 | 7.90k | if (!Offset.isZero()7.90k ) { |
216 | 2.16k | V = Builder.CreateElementBitCast(V, Int8Ty); |
217 | 2.16k | V = Builder.CreateConstInBoundsByteGEP(V, Offset); |
218 | 2.16k | } |
219 | 7.90k | V = Builder.CreateElementBitCast(V, ConvertType(Base)); |
220 | 7.90k | |
221 | 7.90k | return V; |
222 | 7.90k | } |
223 | | |
224 | | static Address |
225 | | ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, Address addr, |
226 | | CharUnits nonVirtualOffset, |
227 | | llvm::Value *virtualOffset, |
228 | | const CXXRecordDecl *derivedClass, |
229 | 2.41k | const CXXRecordDecl *nearestVBase) { |
230 | 2.41k | // Assert that we have something to do. |
231 | 2.41k | assert(!nonVirtualOffset.isZero() || virtualOffset != nullptr); |
232 | 2.41k | |
233 | 2.41k | // Compute the offset from the static and dynamic components. |
234 | 2.41k | llvm::Value *baseOffset; |
235 | 2.41k | if (!nonVirtualOffset.isZero()2.41k ) { |
236 | 2.07k | baseOffset = llvm::ConstantInt::get(CGF.PtrDiffTy, |
237 | 2.07k | nonVirtualOffset.getQuantity()); |
238 | 2.07k | if (virtualOffset2.07k ) { |
239 | 34 | baseOffset = CGF.Builder.CreateAdd(virtualOffset, baseOffset); |
240 | 34 | } |
241 | 2.41k | } else { |
242 | 337 | baseOffset = virtualOffset; |
243 | 337 | } |
244 | 2.41k | |
245 | 2.41k | // Apply the base offset. |
246 | 2.41k | llvm::Value *ptr = addr.getPointer(); |
247 | 2.41k | ptr = CGF.Builder.CreateBitCast(ptr, CGF.Int8PtrTy); |
248 | 2.41k | ptr = CGF.Builder.CreateInBoundsGEP(ptr, baseOffset, "add.ptr"); |
249 | 2.41k | |
250 | 2.41k | // If we have a virtual component, the alignment of the result will |
251 | 2.41k | // be relative only to the known alignment of that vbase. |
252 | 2.41k | CharUnits alignment; |
253 | 2.41k | if (virtualOffset2.41k ) { |
254 | 371 | assert(nearestVBase && "virtual offset without vbase?"); |
255 | 371 | alignment = CGF.CGM.getVBaseAlignment(addr.getAlignment(), |
256 | 371 | derivedClass, nearestVBase); |
257 | 2.41k | } else { |
258 | 2.04k | alignment = addr.getAlignment(); |
259 | 2.04k | } |
260 | 2.41k | alignment = alignment.alignmentAtOffset(nonVirtualOffset); |
261 | 2.41k | |
262 | 2.41k | return Address(ptr, alignment); |
263 | 2.41k | } |
264 | | |
265 | | Address CodeGenFunction::GetAddressOfBaseClass( |
266 | | Address Value, const CXXRecordDecl *Derived, |
267 | | CastExpr::path_const_iterator PathBegin, |
268 | | CastExpr::path_const_iterator PathEnd, bool NullCheckValue, |
269 | 16.5k | SourceLocation Loc) { |
270 | 16.5k | assert(PathBegin != PathEnd && "Base path should not be empty!"); |
271 | 16.5k | |
272 | 16.5k | CastExpr::path_const_iterator Start = PathBegin; |
273 | 16.5k | const CXXRecordDecl *VBase = nullptr; |
274 | 16.5k | |
275 | 16.5k | // Sema has done some convenient canonicalization here: if the |
276 | 16.5k | // access path involved any virtual steps, the conversion path will |
277 | 16.5k | // *start* with a step down to the correct virtual base subobject, |
278 | 16.5k | // and hence will not require any further steps. |
279 | 16.5k | if ((*Start)->isVirtual()16.5k ) { |
280 | 73 | VBase = |
281 | 73 | cast<CXXRecordDecl>((*Start)->getType()->getAs<RecordType>()->getDecl()); |
282 | 73 | ++Start; |
283 | 73 | } |
284 | 16.5k | |
285 | 16.5k | // Compute the static offset of the ultimate destination within its |
286 | 16.5k | // allocating subobject (the virtual base, if there is one, or else |
287 | 16.5k | // the "complete" object that we see). |
288 | 16.5k | CharUnits NonVirtualOffset = CGM.computeNonVirtualBaseClassOffset( |
289 | 16.5k | VBase ? VBase73 : Derived16.4k , Start, PathEnd); |
290 | 16.5k | |
291 | 16.5k | // If there's a virtual step, we can sometimes "devirtualize" it. |
292 | 16.5k | // For now, that's limited to when the derived type is final. |
293 | 16.5k | // TODO: "devirtualize" this for accesses to known-complete objects. |
294 | 16.5k | if (VBase && 16.5k Derived->hasAttr<FinalAttr>()73 ) { |
295 | 2 | const ASTRecordLayout &layout = getContext().getASTRecordLayout(Derived); |
296 | 2 | CharUnits vBaseOffset = layout.getVBaseClassOffset(VBase); |
297 | 2 | NonVirtualOffset += vBaseOffset; |
298 | 2 | VBase = nullptr; // we no longer have a virtual step |
299 | 2 | } |
300 | 16.5k | |
301 | 16.5k | // Get the base pointer type. |
302 | 16.5k | llvm::Type *BasePtrTy = |
303 | 16.5k | ConvertType((PathEnd[-1])->getType())->getPointerTo(); |
304 | 16.5k | |
305 | 16.5k | QualType DerivedTy = getContext().getRecordType(Derived); |
306 | 16.5k | CharUnits DerivedAlign = CGM.getClassPointerAlignment(Derived); |
307 | 16.5k | |
308 | 16.5k | // If the static offset is zero and we don't have a virtual step, |
309 | 16.5k | // just do a bitcast; null checks are unnecessary. |
310 | 16.5k | if (NonVirtualOffset.isZero() && 16.5k !VBase15.0k ) { |
311 | 15.0k | if (sanitizePerformTypeCheck()15.0k ) { |
312 | 19 | SanitizerSet SkippedChecks; |
313 | 19 | SkippedChecks.set(SanitizerKind::Null, !NullCheckValue); |
314 | 19 | EmitTypeCheck(TCK_Upcast, Loc, Value.getPointer(), |
315 | 19 | DerivedTy, DerivedAlign, SkippedChecks); |
316 | 19 | } |
317 | 15.0k | return Builder.CreateBitCast(Value, BasePtrTy); |
318 | 15.0k | } |
319 | 1.51k | |
320 | 1.51k | llvm::BasicBlock *origBB = nullptr; |
321 | 1.51k | llvm::BasicBlock *endBB = nullptr; |
322 | 1.51k | |
323 | 1.51k | // Skip over the offset (and the vtable load) if we're supposed to |
324 | 1.51k | // null-check the pointer. |
325 | 1.51k | if (NullCheckValue1.51k ) { |
326 | 32 | origBB = Builder.GetInsertBlock(); |
327 | 32 | llvm::BasicBlock *notNullBB = createBasicBlock("cast.notnull"); |
328 | 32 | endBB = createBasicBlock("cast.end"); |
329 | 32 | |
330 | 32 | llvm::Value *isNull = Builder.CreateIsNull(Value.getPointer()); |
331 | 32 | Builder.CreateCondBr(isNull, endBB, notNullBB); |
332 | 32 | EmitBlock(notNullBB); |
333 | 32 | } |
334 | 1.51k | |
335 | 1.51k | if (sanitizePerformTypeCheck()1.51k ) { |
336 | 3 | SanitizerSet SkippedChecks; |
337 | 3 | SkippedChecks.set(SanitizerKind::Null, true); |
338 | 3 | EmitTypeCheck(VBase ? TCK_UpcastToVirtualBase3 : TCK_Upcast0 , Loc, |
339 | 3 | Value.getPointer(), DerivedTy, DerivedAlign, SkippedChecks); |
340 | 3 | } |
341 | 1.51k | |
342 | 1.51k | // Compute the virtual offset. |
343 | 1.51k | llvm::Value *VirtualOffset = nullptr; |
344 | 1.51k | if (VBase1.51k ) { |
345 | 71 | VirtualOffset = |
346 | 71 | CGM.getCXXABI().GetVirtualBaseClassOffset(*this, Value, Derived, VBase); |
347 | 71 | } |
348 | 1.51k | |
349 | 1.51k | // Apply both offsets. |
350 | 1.51k | Value = ApplyNonVirtualAndVirtualOffset(*this, Value, NonVirtualOffset, |
351 | 1.51k | VirtualOffset, Derived, VBase); |
352 | 1.51k | |
353 | 1.51k | // Cast to the destination type. |
354 | 1.51k | Value = Builder.CreateBitCast(Value, BasePtrTy); |
355 | 1.51k | |
356 | 1.51k | // Build a phi if we needed a null check. |
357 | 1.51k | if (NullCheckValue1.51k ) { |
358 | 32 | llvm::BasicBlock *notNullBB = Builder.GetInsertBlock(); |
359 | 32 | Builder.CreateBr(endBB); |
360 | 32 | EmitBlock(endBB); |
361 | 32 | |
362 | 32 | llvm::PHINode *PHI = Builder.CreatePHI(BasePtrTy, 2, "cast.result"); |
363 | 32 | PHI->addIncoming(Value.getPointer(), notNullBB); |
364 | 32 | PHI->addIncoming(llvm::Constant::getNullValue(BasePtrTy), origBB); |
365 | 32 | Value = Address(PHI, Value.getAlignment()); |
366 | 32 | } |
367 | 16.5k | |
368 | 16.5k | return Value; |
369 | 16.5k | } |
370 | | |
371 | | Address |
372 | | CodeGenFunction::GetAddressOfDerivedClass(Address BaseAddr, |
373 | | const CXXRecordDecl *Derived, |
374 | | CastExpr::path_const_iterator PathBegin, |
375 | | CastExpr::path_const_iterator PathEnd, |
376 | 120 | bool NullCheckValue) { |
377 | 120 | assert(PathBegin != PathEnd && "Base path should not be empty!"); |
378 | 120 | |
379 | 120 | QualType DerivedTy = |
380 | 120 | getContext().getCanonicalType(getContext().getTagDeclType(Derived)); |
381 | 120 | llvm::Type *DerivedPtrTy = ConvertType(DerivedTy)->getPointerTo(); |
382 | 120 | |
383 | 120 | llvm::Value *NonVirtualOffset = |
384 | 120 | CGM.GetNonVirtualBaseClassOffset(Derived, PathBegin, PathEnd); |
385 | 120 | |
386 | 120 | if (!NonVirtualOffset120 ) { |
387 | 110 | // No offset, we can just cast back. |
388 | 110 | return Builder.CreateBitCast(BaseAddr, DerivedPtrTy); |
389 | 110 | } |
390 | 10 | |
391 | 10 | llvm::BasicBlock *CastNull = nullptr; |
392 | 10 | llvm::BasicBlock *CastNotNull = nullptr; |
393 | 10 | llvm::BasicBlock *CastEnd = nullptr; |
394 | 10 | |
395 | 10 | if (NullCheckValue10 ) { |
396 | 5 | CastNull = createBasicBlock("cast.null"); |
397 | 5 | CastNotNull = createBasicBlock("cast.notnull"); |
398 | 5 | CastEnd = createBasicBlock("cast.end"); |
399 | 5 | |
400 | 5 | llvm::Value *IsNull = Builder.CreateIsNull(BaseAddr.getPointer()); |
401 | 5 | Builder.CreateCondBr(IsNull, CastNull, CastNotNull); |
402 | 5 | EmitBlock(CastNotNull); |
403 | 5 | } |
404 | 10 | |
405 | 10 | // Apply the offset. |
406 | 10 | llvm::Value *Value = Builder.CreateBitCast(BaseAddr.getPointer(), Int8PtrTy); |
407 | 10 | Value = Builder.CreateGEP(Value, Builder.CreateNeg(NonVirtualOffset), |
408 | 10 | "sub.ptr"); |
409 | 10 | |
410 | 10 | // Just cast. |
411 | 10 | Value = Builder.CreateBitCast(Value, DerivedPtrTy); |
412 | 10 | |
413 | 10 | // Produce a PHI if we had a null-check. |
414 | 10 | if (NullCheckValue10 ) { |
415 | 5 | Builder.CreateBr(CastEnd); |
416 | 5 | EmitBlock(CastNull); |
417 | 5 | Builder.CreateBr(CastEnd); |
418 | 5 | EmitBlock(CastEnd); |
419 | 5 | |
420 | 5 | llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2); |
421 | 5 | PHI->addIncoming(Value, CastNotNull); |
422 | 5 | PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), CastNull); |
423 | 5 | Value = PHI; |
424 | 5 | } |
425 | 120 | |
426 | 120 | return Address(Value, CGM.getClassPointerAlignment(Derived)); |
427 | 120 | } |
428 | | |
429 | | llvm::Value *CodeGenFunction::GetVTTParameter(GlobalDecl GD, |
430 | | bool ForVirtualBase, |
431 | 20.8k | bool Delegating) { |
432 | 20.8k | if (!CGM.getCXXABI().NeedsVTTParameter(GD)20.8k ) { |
433 | 20.7k | // This constructor/destructor does not need a VTT parameter. |
434 | 20.7k | return nullptr; |
435 | 20.7k | } |
436 | 151 | |
437 | 151 | const CXXRecordDecl *RD = cast<CXXMethodDecl>(CurCodeDecl)->getParent(); |
438 | 151 | const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent(); |
439 | 151 | |
440 | 151 | llvm::Value *VTT; |
441 | 151 | |
442 | 151 | uint64_t SubVTTIndex; |
443 | 151 | |
444 | 151 | if (Delegating151 ) { |
445 | 2 | // If this is a delegating constructor call, just load the VTT. |
446 | 2 | return LoadCXXVTT(); |
447 | 149 | } else if (149 RD == Base149 ) { |
448 | 55 | // If the record matches the base, this is the complete ctor/dtor |
449 | 55 | // variant calling the base variant in a class with virtual bases. |
450 | 55 | assert(!CGM.getCXXABI().NeedsVTTParameter(CurGD) && |
451 | 55 | "doing no-op VTT offset in base dtor/ctor?"); |
452 | 55 | assert(!ForVirtualBase && "Can't have same class as virtual base!"); |
453 | 55 | SubVTTIndex = 0; |
454 | 149 | } else { |
455 | 94 | const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD); |
456 | 94 | CharUnits BaseOffset = ForVirtualBase ? |
457 | 6 | Layout.getVBaseClassOffset(Base) : |
458 | 88 | Layout.getBaseClassOffset(Base); |
459 | 149 | |
460 | 149 | SubVTTIndex = |
461 | 149 | CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset)); |
462 | 149 | assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!"); |
463 | 149 | } |
464 | 151 | |
465 | 149 | if (149 CGM.getCXXABI().NeedsVTTParameter(CurGD)149 ) { |
466 | 19 | // A VTT parameter was passed to the constructor, use it. |
467 | 19 | VTT = LoadCXXVTT(); |
468 | 19 | VTT = Builder.CreateConstInBoundsGEP1_64(VTT, SubVTTIndex); |
469 | 149 | } else { |
470 | 130 | // We're the complete constructor, so get the VTT by name. |
471 | 130 | VTT = CGM.getVTables().GetAddrOfVTT(RD); |
472 | 130 | VTT = Builder.CreateConstInBoundsGEP2_64(VTT, 0, SubVTTIndex); |
473 | 130 | } |
474 | 149 | |
475 | 149 | return VTT; |
476 | 20.8k | } |
477 | | |
478 | | namespace { |
479 | | /// Call the destructor for a direct base class. |
480 | | struct CallBaseDtor final : EHScopeStack::Cleanup { |
481 | | const CXXRecordDecl *BaseClass; |
482 | | bool BaseIsVirtual; |
483 | | CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual) |
484 | 4.05k | : BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {} |
485 | | |
486 | 3.73k | void Emit(CodeGenFunction &CGF, Flags flags) override { |
487 | 3.73k | const CXXRecordDecl *DerivedClass = |
488 | 3.73k | cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent(); |
489 | 3.73k | |
490 | 3.73k | const CXXDestructorDecl *D = BaseClass->getDestructor(); |
491 | 3.73k | Address Addr = |
492 | 3.73k | CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThisAddress(), |
493 | 3.73k | DerivedClass, BaseClass, |
494 | 3.73k | BaseIsVirtual); |
495 | 3.73k | CGF.EmitCXXDestructorCall(D, Dtor_Base, BaseIsVirtual, |
496 | 3.73k | /*Delegating=*/false, Addr); |
497 | 3.73k | } |
498 | | }; |
499 | | |
500 | | /// A visitor which checks whether an initializer uses 'this' in a |
501 | | /// way which requires the vtable to be properly set. |
502 | | struct DynamicThisUseChecker : ConstEvaluatedExprVisitor<DynamicThisUseChecker> { |
503 | | typedef ConstEvaluatedExprVisitor<DynamicThisUseChecker> super; |
504 | | |
505 | | bool UsesThis; |
506 | | |
507 | 4.16k | DynamicThisUseChecker(const ASTContext &C) : super(C), UsesThis(false) {} |
508 | | |
509 | | // Black-list all explicit and implicit references to 'this'. |
510 | | // |
511 | | // Do we need to worry about external references to 'this' derived |
512 | | // from arbitrary code? If so, then anything which runs arbitrary |
513 | | // external code might potentially access the vtable. |
514 | 1 | void VisitCXXThisExpr(const CXXThisExpr *E) { UsesThis = true; } |
515 | | }; |
516 | | } // end anonymous namespace |
517 | | |
518 | 4.16k | static bool BaseInitializerUsesThis(ASTContext &C, const Expr *Init) { |
519 | 4.16k | DynamicThisUseChecker Checker(C); |
520 | 4.16k | Checker.Visit(Init); |
521 | 4.16k | return Checker.UsesThis; |
522 | 4.16k | } |
523 | | |
524 | | static void EmitBaseInitializer(CodeGenFunction &CGF, |
525 | | const CXXRecordDecl *ClassDecl, |
526 | | CXXCtorInitializer *BaseInit, |
527 | 4.27k | CXXCtorType CtorType) { |
528 | 4.27k | assert(BaseInit->isBaseInitializer() && |
529 | 4.27k | "Must have base initializer!"); |
530 | 4.27k | |
531 | 4.27k | Address ThisPtr = CGF.LoadCXXThisAddress(); |
532 | 4.27k | |
533 | 4.27k | const Type *BaseType = BaseInit->getBaseClass(); |
534 | 4.27k | CXXRecordDecl *BaseClassDecl = |
535 | 4.27k | cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl()); |
536 | 4.27k | |
537 | 4.27k | bool isBaseVirtual = BaseInit->isBaseVirtual(); |
538 | 4.27k | |
539 | 4.27k | // The base constructor doesn't construct virtual bases. |
540 | 4.27k | if (CtorType == Ctor_Base && 4.27k isBaseVirtual3.06k ) |
541 | 109 | return; |
542 | 4.16k | |
543 | 4.16k | // If the initializer for the base (other than the constructor |
544 | 4.16k | // itself) accesses 'this' in any way, we need to initialize the |
545 | 4.16k | // vtables. |
546 | 4.16k | if (4.16k BaseInitializerUsesThis(CGF.getContext(), BaseInit->getInit())4.16k ) |
547 | 1 | CGF.InitializeVTablePointers(ClassDecl); |
548 | 4.16k | |
549 | 4.16k | // We can pretend to be a complete class because it only matters for |
550 | 4.16k | // virtual bases, and we only do virtual bases for complete ctors. |
551 | 4.16k | Address V = |
552 | 4.16k | CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl, |
553 | 4.16k | BaseClassDecl, |
554 | 4.16k | isBaseVirtual); |
555 | 4.16k | AggValueSlot AggSlot = |
556 | 4.16k | AggValueSlot::forAddr(V, Qualifiers(), |
557 | 4.16k | AggValueSlot::IsDestructed, |
558 | 4.16k | AggValueSlot::DoesNotNeedGCBarriers, |
559 | 4.16k | AggValueSlot::IsNotAliased); |
560 | 4.16k | |
561 | 4.16k | CGF.EmitAggExpr(BaseInit->getInit(), AggSlot); |
562 | 4.16k | |
563 | 4.16k | if (CGF.CGM.getLangOpts().Exceptions && |
564 | 2.13k | !BaseClassDecl->hasTrivialDestructor()) |
565 | 1.68k | CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl, |
566 | 1.68k | isBaseVirtual); |
567 | 4.27k | } |
568 | | |
569 | 40.3k | static bool isMemcpyEquivalentSpecialMember(const CXXMethodDecl *D) { |
570 | 40.3k | auto *CD = dyn_cast<CXXConstructorDecl>(D); |
571 | 40.3k | if (!(CD && 40.3k CD->isCopyOrMoveConstructor()40.0k ) && |
572 | 40.3k | !D->isCopyAssignmentOperator()35.2k && !D->isMoveAssignmentOperator()34.9k ) |
573 | 34.8k | return false; |
574 | 5.53k | |
575 | 5.53k | // We can emit a memcpy for a trivial copy or move constructor/assignment. |
576 | 5.53k | if (5.53k D->isTrivial() && 5.53k !D->getParent()->mayInsertExtraPadding()1.47k ) |
577 | 1.47k | return true; |
578 | 4.06k | |
579 | 4.06k | // We *must* emit a memcpy for a defaulted union copy or move op. |
580 | 4.06k | if (4.06k D->getParent()->isUnion() && 4.06k D->isDefaulted()4 ) |
581 | 2 | return true; |
582 | 4.06k | |
583 | 4.06k | return false; |
584 | 4.06k | } |
585 | | |
586 | | static void EmitLValueForAnyFieldInitialization(CodeGenFunction &CGF, |
587 | | CXXCtorInitializer *MemberInit, |
588 | 8.15k | LValue &LHS) { |
589 | 8.15k | FieldDecl *Field = MemberInit->getAnyMember(); |
590 | 8.15k | if (MemberInit->isIndirectMemberInitializer()8.15k ) { |
591 | 26 | // If we are initializing an anonymous union field, drill down to the field. |
592 | 26 | IndirectFieldDecl *IndirectField = MemberInit->getIndirectMember(); |
593 | 26 | for (const auto *I : IndirectField->chain()) |
594 | 60 | LHS = CGF.EmitLValueForFieldInitialization(LHS, cast<FieldDecl>(I)); |
595 | 8.15k | } else { |
596 | 8.13k | LHS = CGF.EmitLValueForFieldInitialization(LHS, Field); |
597 | 8.13k | } |
598 | 8.15k | } |
599 | | |
600 | | static void EmitMemberInitializer(CodeGenFunction &CGF, |
601 | | const CXXRecordDecl *ClassDecl, |
602 | | CXXCtorInitializer *MemberInit, |
603 | | const CXXConstructorDecl *Constructor, |
604 | 8.14k | FunctionArgList &Args) { |
605 | 8.14k | ApplyDebugLocation Loc(CGF, MemberInit->getSourceLocation()); |
606 | 8.14k | assert(MemberInit->isAnyMemberInitializer() && |
607 | 8.14k | "Must have member initializer!"); |
608 | 8.14k | assert(MemberInit->getInit() && "Must have initializer!"); |
609 | 8.14k | |
610 | 8.14k | // non-static data member initializers. |
611 | 8.14k | FieldDecl *Field = MemberInit->getAnyMember(); |
612 | 8.14k | QualType FieldType = Field->getType(); |
613 | 8.14k | |
614 | 8.14k | llvm::Value *ThisPtr = CGF.LoadCXXThis(); |
615 | 8.14k | QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl); |
616 | 8.14k | LValue LHS = CGF.MakeNaturalAlignAddrLValue(ThisPtr, RecordTy); |
617 | 8.14k | |
618 | 8.14k | EmitLValueForAnyFieldInitialization(CGF, MemberInit, LHS); |
619 | 8.14k | |
620 | 8.14k | // Special case: if we are in a copy or move constructor, and we are copying |
621 | 8.14k | // an array of PODs or classes with trivial copy constructors, ignore the |
622 | 8.14k | // AST and perform the copy we know is equivalent. |
623 | 8.14k | // FIXME: This is hacky at best... if we had a bit more explicit information |
624 | 8.14k | // in the AST, we could generalize it more easily. |
625 | 8.14k | const ConstantArrayType *Array |
626 | 8.14k | = CGF.getContext().getAsConstantArrayType(FieldType); |
627 | 8.14k | if (Array && 8.14k Constructor->isDefaulted()130 && |
628 | 8.14k | Constructor->isCopyOrMoveConstructor()34 ) { |
629 | 21 | QualType BaseElementTy = CGF.getContext().getBaseElementType(Array); |
630 | 21 | CXXConstructExpr *CE = dyn_cast<CXXConstructExpr>(MemberInit->getInit()); |
631 | 21 | if (BaseElementTy.isPODType(CGF.getContext()) || |
632 | 21 | (CE && 15 isMemcpyEquivalentSpecialMember(CE->getConstructor())0 )) { |
633 | 6 | unsigned SrcArgIndex = |
634 | 6 | CGF.CGM.getCXXABI().getSrcArgforCopyCtor(Constructor, Args); |
635 | 6 | llvm::Value *SrcPtr |
636 | 6 | = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(Args[SrcArgIndex])); |
637 | 6 | LValue ThisRHSLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy); |
638 | 6 | LValue Src = CGF.EmitLValueForFieldInitialization(ThisRHSLV, Field); |
639 | 6 | |
640 | 6 | // Copy the aggregate. |
641 | 6 | CGF.EmitAggregateCopy(LHS.getAddress(), Src.getAddress(), FieldType, |
642 | 6 | LHS.isVolatileQualified()); |
643 | 6 | // Ensure that we destroy the objects if an exception is thrown later in |
644 | 6 | // the constructor. |
645 | 6 | QualType::DestructionKind dtorKind = FieldType.isDestructedType(); |
646 | 6 | if (CGF.needsEHCleanup(dtorKind)) |
647 | 0 | CGF.pushEHDestroy(dtorKind, LHS.getAddress(), FieldType); |
648 | 6 | return; |
649 | 6 | } |
650 | 8.13k | } |
651 | 8.13k | |
652 | 8.13k | CGF.EmitInitializerForField(Field, LHS, MemberInit->getInit()); |
653 | 8.13k | } |
654 | | |
655 | | void CodeGenFunction::EmitInitializerForField(FieldDecl *Field, LValue LHS, |
656 | 8.92k | Expr *Init) { |
657 | 8.92k | QualType FieldType = Field->getType(); |
658 | 8.92k | switch (getEvaluationKind(FieldType)) { |
659 | 6.25k | case TEK_Scalar: |
660 | 6.25k | if (LHS.isSimple()6.25k ) { |
661 | 6.21k | EmitExprAsInit(Init, Field, LHS, false); |
662 | 6.25k | } else { |
663 | 37 | RValue RHS = RValue::get(EmitScalarExpr(Init)); |
664 | 37 | EmitStoreThroughLValue(RHS, LHS); |
665 | 37 | } |
666 | 6.25k | break; |
667 | 4 | case TEK_Complex: |
668 | 4 | EmitComplexExprIntoLValue(Init, LHS, /*isInit*/ true); |
669 | 4 | break; |
670 | 2.67k | case TEK_Aggregate: { |
671 | 2.67k | AggValueSlot Slot = |
672 | 2.67k | AggValueSlot::forLValue(LHS, |
673 | 2.67k | AggValueSlot::IsDestructed, |
674 | 2.67k | AggValueSlot::DoesNotNeedGCBarriers, |
675 | 2.67k | AggValueSlot::IsNotAliased); |
676 | 2.67k | EmitAggExpr(Init, Slot); |
677 | 2.67k | break; |
678 | 8.92k | } |
679 | 8.92k | } |
680 | 8.92k | |
681 | 8.92k | // Ensure that we destroy this object if an exception is thrown |
682 | 8.92k | // later in the constructor. |
683 | 8.92k | QualType::DestructionKind dtorKind = FieldType.isDestructedType(); |
684 | 8.92k | if (needsEHCleanup(dtorKind)) |
685 | 1.52k | pushEHDestroy(dtorKind, LHS.getAddress(), FieldType); |
686 | 8.92k | } |
687 | | |
688 | | /// Checks whether the given constructor is a valid subject for the |
689 | | /// complete-to-base constructor delegation optimization, i.e. |
690 | | /// emitting the complete constructor as a simple call to the base |
691 | | /// constructor. |
692 | | bool CodeGenFunction::IsConstructorDelegationValid( |
693 | 7.26k | const CXXConstructorDecl *Ctor) { |
694 | 7.26k | |
695 | 7.26k | // Currently we disable the optimization for classes with virtual |
696 | 7.26k | // bases because (1) the addresses of parameter variables need to be |
697 | 7.26k | // consistent across all initializers but (2) the delegate function |
698 | 7.26k | // call necessarily creates a second copy of the parameter variable. |
699 | 7.26k | // |
700 | 7.26k | // The limiting example (purely theoretical AFAIK): |
701 | 7.26k | // struct A { A(int &c) { c++; } }; |
702 | 7.26k | // struct B : virtual A { |
703 | 7.26k | // B(int count) : A(count) { printf("%d\n", count); } |
704 | 7.26k | // }; |
705 | 7.26k | // ...although even this example could in principle be emitted as a |
706 | 7.26k | // delegation since the address of the parameter doesn't escape. |
707 | 7.26k | if (Ctor->getParent()->getNumVBases()7.26k ) { |
708 | 490 | // TODO: white-list trivial vbase initializers. This case wouldn't |
709 | 490 | // be subject to the restrictions below. |
710 | 490 | |
711 | 490 | // TODO: white-list cases where: |
712 | 490 | // - there are no non-reference parameters to the constructor |
713 | 490 | // - the initializers don't access any non-reference parameters |
714 | 490 | // - the initializers don't take the address of non-reference |
715 | 490 | // parameters |
716 | 490 | // - etc. |
717 | 490 | // If we ever add any of the above cases, remember that: |
718 | 490 | // - function-try-blocks will always blacklist this optimization |
719 | 490 | // - we need to perform the constructor prologue and cleanup in |
720 | 490 | // EmitConstructorBody. |
721 | 490 | |
722 | 490 | return false; |
723 | 490 | } |
724 | 6.77k | |
725 | 6.77k | // We also disable the optimization for variadic functions because |
726 | 6.77k | // it's impossible to "re-pass" varargs. |
727 | 6.77k | if (6.77k Ctor->getType()->getAs<FunctionProtoType>()->isVariadic()6.77k ) |
728 | 9 | return false; |
729 | 6.76k | |
730 | 6.76k | // FIXME: Decide if we can do a delegation of a delegating constructor. |
731 | 6.76k | if (6.76k Ctor->isDelegatingConstructor()6.76k ) |
732 | 3 | return false; |
733 | 6.76k | |
734 | 6.76k | return true; |
735 | 6.76k | } |
736 | | |
737 | | // Emit code in ctor (Prologue==true) or dtor (Prologue==false) |
738 | | // to poison the extra field paddings inserted under |
739 | | // -fsanitize-address-field-padding=1|2. |
740 | 24.1k | void CodeGenFunction::EmitAsanPrologueOrEpilogue(bool Prologue) { |
741 | 24.1k | ASTContext &Context = getContext(); |
742 | 24.1k | const CXXRecordDecl *ClassDecl = |
743 | 14.9k | Prologue ? cast<CXXConstructorDecl>(CurGD.getDecl())->getParent() |
744 | 9.23k | : cast<CXXDestructorDecl>(CurGD.getDecl())->getParent(); |
745 | 24.1k | if (!ClassDecl->mayInsertExtraPadding()24.1k ) return24.1k ; |
746 | 41 | |
747 | 41 | struct SizeAndOffset { |
748 | 41 | uint64_t Size; |
749 | 41 | uint64_t Offset; |
750 | 41 | }; |
751 | 41 | |
752 | 41 | unsigned PtrSize = CGM.getDataLayout().getPointerSizeInBits(); |
753 | 41 | const ASTRecordLayout &Info = Context.getASTRecordLayout(ClassDecl); |
754 | 41 | |
755 | 41 | // Populate sizes and offsets of fields. |
756 | 41 | SmallVector<SizeAndOffset, 16> SSV(Info.getFieldCount()); |
757 | 153 | for (unsigned i = 0, e = Info.getFieldCount(); i != e153 ; ++i112 ) |
758 | 112 | SSV[i].Offset = |
759 | 112 | Context.toCharUnitsFromBits(Info.getFieldOffset(i)).getQuantity(); |
760 | 41 | |
761 | 41 | size_t NumFields = 0; |
762 | 112 | for (const auto *Field : ClassDecl->fields()) { |
763 | 112 | const FieldDecl *D = Field; |
764 | 112 | std::pair<CharUnits, CharUnits> FieldInfo = |
765 | 112 | Context.getTypeInfoInChars(D->getType()); |
766 | 112 | CharUnits FieldSize = FieldInfo.first; |
767 | 112 | assert(NumFields < SSV.size()); |
768 | 112 | SSV[NumFields].Size = D->isBitField() ? 00 : FieldSize.getQuantity()112 ; |
769 | 112 | NumFields++; |
770 | 112 | } |
771 | 41 | assert(NumFields == SSV.size()); |
772 | 41 | if (SSV.size() <= 141 ) return0 ; |
773 | 41 | |
774 | 41 | // We will insert calls to __asan_* run-time functions. |
775 | 41 | // LLVM AddressSanitizer pass may decide to inline them later. |
776 | 41 | llvm::Type *Args[2] = {IntPtrTy, IntPtrTy}; |
777 | 41 | llvm::FunctionType *FTy = |
778 | 41 | llvm::FunctionType::get(CGM.VoidTy, Args, false); |
779 | 41 | llvm::Constant *F = CGM.CreateRuntimeFunction( |
780 | 22 | FTy, Prologue ? "__asan_poison_intra_object_redzone" |
781 | 19 | : "__asan_unpoison_intra_object_redzone"); |
782 | 41 | |
783 | 41 | llvm::Value *ThisPtr = LoadCXXThis(); |
784 | 41 | ThisPtr = Builder.CreatePtrToInt(ThisPtr, IntPtrTy); |
785 | 41 | uint64_t TypeSize = Info.getNonVirtualSize().getQuantity(); |
786 | 41 | // For each field check if it has sufficient padding, |
787 | 41 | // if so (un)poison it with a call. |
788 | 153 | for (size_t i = 0; i < SSV.size()153 ; i++112 ) { |
789 | 112 | uint64_t AsanAlignment = 8; |
790 | 112 | uint64_t NextField = i == SSV.size() - 1 ? TypeSize41 : SSV[i + 1].Offset71 ; |
791 | 112 | uint64_t PoisonSize = NextField - SSV[i].Offset - SSV[i].Size; |
792 | 112 | uint64_t EndOffset = SSV[i].Offset + SSV[i].Size; |
793 | 112 | if (PoisonSize < AsanAlignment || 112 !SSV[i].Size94 || |
794 | 94 | (NextField % AsanAlignment) != 0) |
795 | 18 | continue; |
796 | 94 | Builder.CreateCall( |
797 | 94 | F, {Builder.CreateAdd(ThisPtr, Builder.getIntN(PtrSize, EndOffset)), |
798 | 94 | Builder.getIntN(PtrSize, PoisonSize)}); |
799 | 94 | } |
800 | 24.1k | } |
801 | | |
802 | | /// EmitConstructorBody - Emits the body of the current constructor. |
803 | 14.9k | void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) { |
804 | 14.9k | EmitAsanPrologueOrEpilogue(true); |
805 | 14.9k | const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl()); |
806 | 14.9k | CXXCtorType CtorType = CurGD.getCtorType(); |
807 | 14.9k | |
808 | 14.9k | assert((CGM.getTarget().getCXXABI().hasConstructorVariants() || |
809 | 14.9k | CtorType == Ctor_Complete) && |
810 | 14.9k | "can only generate complete ctor for this ABI"); |
811 | 14.9k | |
812 | 14.9k | // Before we go any further, try the complete->base constructor |
813 | 14.9k | // delegation optimization. |
814 | 14.9k | if (CtorType == Ctor_Complete && 14.9k IsConstructorDelegationValid(Ctor)7.24k && |
815 | 14.9k | CGM.getTarget().getCXXABI().hasConstructorVariants()6.74k ) { |
816 | 6.06k | EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args, Ctor->getLocEnd()); |
817 | 6.06k | return; |
818 | 6.06k | } |
819 | 8.84k | |
820 | 8.84k | const FunctionDecl *Definition = nullptr; |
821 | 8.84k | Stmt *Body = Ctor->getBody(Definition); |
822 | 8.84k | assert(Definition == Ctor && "emitting wrong constructor body"); |
823 | 8.84k | |
824 | 8.84k | // Enter the function-try-block before the constructor prologue if |
825 | 8.84k | // applicable. |
826 | 8.84k | bool IsTryBody = (Body && isa<CXXTryStmt>(Body)); |
827 | 8.84k | if (IsTryBody) |
828 | 3 | EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true); |
829 | 8.84k | |
830 | 8.84k | incrementProfileCounter(Body); |
831 | 8.84k | |
832 | 8.84k | RunCleanupsScope RunCleanups(*this); |
833 | 8.84k | |
834 | 8.84k | // TODO: in restricted cases, we can emit the vbase initializers of |
835 | 8.84k | // a complete ctor and then delegate to the base ctor. |
836 | 8.84k | |
837 | 8.84k | // Emit the constructor prologue, i.e. the base and member |
838 | 8.84k | // initializers. |
839 | 8.84k | EmitCtorPrologue(Ctor, CtorType, Args); |
840 | 8.84k | |
841 | 8.84k | // Emit the body of the statement. |
842 | 8.84k | if (IsTryBody) |
843 | 3 | EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); |
844 | 8.83k | else if (8.83k Body8.83k ) |
845 | 8.83k | EmitStmt(Body); |
846 | 8.84k | |
847 | 8.84k | // Emit any cleanup blocks associated with the member or base |
848 | 8.84k | // initializers, which includes (along the exceptional path) the |
849 | 8.84k | // destructors for those members and bases that were fully |
850 | 8.84k | // constructed. |
851 | 8.84k | RunCleanups.ForceCleanup(); |
852 | 8.84k | |
853 | 8.84k | if (IsTryBody) |
854 | 3 | ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true); |
855 | 14.9k | } |
856 | | |
857 | | namespace { |
858 | | /// RAII object to indicate that codegen is copying the value representation |
859 | | /// instead of the object representation. Useful when copying a struct or |
860 | | /// class which has uninitialized members and we're only performing |
861 | | /// lvalue-to-rvalue conversion on the object but not its members. |
862 | | class CopyingValueRepresentation { |
863 | | public: |
864 | | explicit CopyingValueRepresentation(CodeGenFunction &CGF) |
865 | 175 | : CGF(CGF), OldSanOpts(CGF.SanOpts) { |
866 | 175 | CGF.SanOpts.set(SanitizerKind::Bool, false); |
867 | 175 | CGF.SanOpts.set(SanitizerKind::Enum, false); |
868 | 175 | } |
869 | 175 | ~CopyingValueRepresentation() { |
870 | 175 | CGF.SanOpts = OldSanOpts; |
871 | 175 | } |
872 | | private: |
873 | | CodeGenFunction &CGF; |
874 | | SanitizerSet OldSanOpts; |
875 | | }; |
876 | | } // end anonymous namespace |
877 | | |
878 | | namespace { |
879 | | class FieldMemcpyizer { |
880 | | public: |
881 | | FieldMemcpyizer(CodeGenFunction &CGF, const CXXRecordDecl *ClassDecl, |
882 | | const VarDecl *SrcRec) |
883 | | : CGF(CGF), ClassDecl(ClassDecl), SrcRec(SrcRec), |
884 | | RecLayout(CGF.getContext().getASTRecordLayout(ClassDecl)), |
885 | | FirstField(nullptr), LastField(nullptr), FirstFieldOffset(0), |
886 | 9.46k | LastFieldOffset(0), LastAddedFieldIndex(0) {} |
887 | | |
888 | 1.24k | bool isMemcpyableField(FieldDecl *F) const { |
889 | 1.24k | // Never memcpy fields when we are adding poisoned paddings. |
890 | 1.24k | if (CGF.getContext().getLangOpts().SanitizeAddressFieldPadding) |
891 | 8 | return false; |
892 | 1.23k | Qualifiers Qual = F->getType().getQualifiers(); |
893 | 1.23k | if (Qual.hasVolatile() || 1.23k Qual.hasObjCLifetime()1.23k ) |
894 | 6 | return false; |
895 | 1.23k | return true; |
896 | 1.23k | } |
897 | | |
898 | 1.22k | void addMemcpyableField(FieldDecl *F) { |
899 | 1.22k | if (!FirstField) |
900 | 438 | addInitialField(F); |
901 | 1.22k | else |
902 | 791 | addNextField(F); |
903 | 1.22k | } |
904 | | |
905 | 263 | CharUnits getMemcpySize(uint64_t FirstByteOffset) const { |
906 | 263 | unsigned LastFieldSize = |
907 | 263 | LastField->isBitField() ? |
908 | 6 | LastField->getBitWidthValue(CGF.getContext()) : |
909 | 257 | CGF.getContext().getTypeSize(LastField->getType()); |
910 | 263 | uint64_t MemcpySizeBits = |
911 | 263 | LastFieldOffset + LastFieldSize - FirstByteOffset + |
912 | 263 | CGF.getContext().getCharWidth() - 1; |
913 | 263 | CharUnits MemcpySize = |
914 | 263 | CGF.getContext().toCharUnitsFromBits(MemcpySizeBits); |
915 | 263 | return MemcpySize; |
916 | 263 | } |
917 | | |
918 | 1.72k | void emitMemcpy() { |
919 | 1.72k | // Give the subclass a chance to bail out if it feels the memcpy isn't |
920 | 1.72k | // worth it (e.g. Hasn't aggregated enough data). |
921 | 1.72k | if (!FirstField1.72k ) { |
922 | 1.45k | return; |
923 | 1.45k | } |
924 | 263 | |
925 | 263 | uint64_t FirstByteOffset; |
926 | 263 | if (FirstField->isBitField()263 ) { |
927 | 12 | const CGRecordLayout &RL = |
928 | 12 | CGF.getTypes().getCGRecordLayout(FirstField->getParent()); |
929 | 12 | const CGBitFieldInfo &BFInfo = RL.getBitFieldInfo(FirstField); |
930 | 12 | // FirstFieldOffset is not appropriate for bitfields, |
931 | 12 | // we need to use the storage offset instead. |
932 | 12 | FirstByteOffset = CGF.getContext().toBits(BFInfo.StorageOffset); |
933 | 263 | } else { |
934 | 251 | FirstByteOffset = FirstFieldOffset; |
935 | 251 | } |
936 | 263 | |
937 | 263 | CharUnits MemcpySize = getMemcpySize(FirstByteOffset); |
938 | 263 | QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl); |
939 | 263 | Address ThisPtr = CGF.LoadCXXThisAddress(); |
940 | 263 | LValue DestLV = CGF.MakeAddrLValue(ThisPtr, RecordTy); |
941 | 263 | LValue Dest = CGF.EmitLValueForFieldInitialization(DestLV, FirstField); |
942 | 263 | llvm::Value *SrcPtr = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(SrcRec)); |
943 | 263 | LValue SrcLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy); |
944 | 263 | LValue Src = CGF.EmitLValueForFieldInitialization(SrcLV, FirstField); |
945 | 263 | |
946 | 263 | emitMemcpyIR(Dest.isBitField() ? Dest.getBitFieldAddress()12 : Dest.getAddress()251 , |
947 | 263 | Src.isBitField() ? Src.getBitFieldAddress()12 : Src.getAddress()251 , |
948 | 1.72k | MemcpySize); |
949 | 1.72k | reset(); |
950 | 1.72k | } |
951 | | |
952 | 18.4k | void reset() { |
953 | 18.4k | FirstField = nullptr; |
954 | 18.4k | } |
955 | | |
956 | | protected: |
957 | | CodeGenFunction &CGF; |
958 | | const CXXRecordDecl *ClassDecl; |
959 | | |
960 | | private: |
961 | 263 | void emitMemcpyIR(Address DestPtr, Address SrcPtr, CharUnits Size) { |
962 | 263 | llvm::PointerType *DPT = DestPtr.getType(); |
963 | 263 | llvm::Type *DBP = |
964 | 263 | llvm::Type::getInt8PtrTy(CGF.getLLVMContext(), DPT->getAddressSpace()); |
965 | 263 | DestPtr = CGF.Builder.CreateBitCast(DestPtr, DBP); |
966 | 263 | |
967 | 263 | llvm::PointerType *SPT = SrcPtr.getType(); |
968 | 263 | llvm::Type *SBP = |
969 | 263 | llvm::Type::getInt8PtrTy(CGF.getLLVMContext(), SPT->getAddressSpace()); |
970 | 263 | SrcPtr = CGF.Builder.CreateBitCast(SrcPtr, SBP); |
971 | 263 | |
972 | 263 | CGF.Builder.CreateMemCpy(DestPtr, SrcPtr, Size.getQuantity()); |
973 | 263 | } |
974 | | |
975 | 438 | void addInitialField(FieldDecl *F) { |
976 | 438 | FirstField = F; |
977 | 438 | LastField = F; |
978 | 438 | FirstFieldOffset = RecLayout.getFieldOffset(F->getFieldIndex()); |
979 | 438 | LastFieldOffset = FirstFieldOffset; |
980 | 438 | LastAddedFieldIndex = F->getFieldIndex(); |
981 | 438 | } |
982 | | |
983 | 791 | void addNextField(FieldDecl *F) { |
984 | 791 | // For the most part, the following invariant will hold: |
985 | 791 | // F->getFieldIndex() == LastAddedFieldIndex + 1 |
986 | 791 | // The one exception is that Sema won't add a copy-initializer for an |
987 | 791 | // unnamed bitfield, which will show up here as a gap in the sequence. |
988 | 791 | assert(F->getFieldIndex() >= LastAddedFieldIndex + 1 && |
989 | 791 | "Cannot aggregate fields out of order."); |
990 | 791 | LastAddedFieldIndex = F->getFieldIndex(); |
991 | 791 | |
992 | 791 | // The 'first' and 'last' fields are chosen by offset, rather than field |
993 | 791 | // index. This allows the code to support bitfields, as well as regular |
994 | 791 | // fields. |
995 | 791 | uint64_t FOffset = RecLayout.getFieldOffset(F->getFieldIndex()); |
996 | 791 | if (FOffset < FirstFieldOffset791 ) { |
997 | 0 | FirstField = F; |
998 | 0 | FirstFieldOffset = FOffset; |
999 | 791 | } else if (791 FOffset > LastFieldOffset791 ) { |
1000 | 791 | LastField = F; |
1001 | 791 | LastFieldOffset = FOffset; |
1002 | 791 | } |
1003 | 791 | } |
1004 | | |
1005 | | const VarDecl *SrcRec; |
1006 | | const ASTRecordLayout &RecLayout; |
1007 | | FieldDecl *FirstField; |
1008 | | FieldDecl *LastField; |
1009 | | uint64_t FirstFieldOffset, LastFieldOffset; |
1010 | | unsigned LastAddedFieldIndex; |
1011 | | }; |
1012 | | |
1013 | | class ConstructorMemcpyizer : public FieldMemcpyizer { |
1014 | | private: |
1015 | | /// Get source argument for copy constructor. Returns null if not a copy |
1016 | | /// constructor. |
1017 | | static const VarDecl *getTrivialCopySource(CodeGenFunction &CGF, |
1018 | | const CXXConstructorDecl *CD, |
1019 | 8.87k | FunctionArgList &Args) { |
1020 | 8.87k | if (CD->isCopyOrMoveConstructor() && 8.87k CD->isDefaulted()825 ) |
1021 | 421 | return Args[CGF.CGM.getCXXABI().getSrcArgforCopyCtor(CD, Args)]; |
1022 | 8.45k | return nullptr; |
1023 | 8.45k | } |
1024 | | |
1025 | | // Returns true if a CXXCtorInitializer represents a member initialization |
1026 | | // that can be rolled into a memcpy. |
1027 | 8.90k | bool isMemberInitMemcpyable(CXXCtorInitializer *MemberInit) const { |
1028 | 8.90k | if (!MemcpyableCtor) |
1029 | 7.64k | return false; |
1030 | 1.25k | FieldDecl *Field = MemberInit->getMember(); |
1031 | 1.25k | assert(Field && "No field for member init."); |
1032 | 1.25k | QualType FieldType = Field->getType(); |
1033 | 1.25k | CXXConstructExpr *CE = dyn_cast<CXXConstructExpr>(MemberInit->getInit()); |
1034 | 1.25k | |
1035 | 1.25k | // Bail out on non-memcpyable, not-trivially-copyable members. |
1036 | 1.25k | if (!(CE && 1.25k isMemcpyEquivalentSpecialMember(CE->getConstructor())461 ) && |
1037 | 1.16k | !(FieldType.isTriviallyCopyableType(CGF.getContext()) || |
1038 | 404 | FieldType->isReferenceType())) |
1039 | 396 | return false; |
1040 | 856 | |
1041 | 856 | // Bail out on volatile fields. |
1042 | 856 | if (856 !isMemcpyableField(Field)856 ) |
1043 | 6 | return false; |
1044 | 850 | |
1045 | 850 | // Otherwise we're good. |
1046 | 850 | return true; |
1047 | 850 | } |
1048 | | |
1049 | | public: |
1050 | | ConstructorMemcpyizer(CodeGenFunction &CGF, const CXXConstructorDecl *CD, |
1051 | | FunctionArgList &Args) |
1052 | | : FieldMemcpyizer(CGF, CD->getParent(), getTrivialCopySource(CGF, CD, Args)), |
1053 | | ConstructorDecl(CD), |
1054 | | MemcpyableCtor(CD->isDefaulted() && |
1055 | | CD->isCopyOrMoveConstructor() && |
1056 | | CGF.getLangOpts().getGC() == LangOptions::NonGC), |
1057 | 8.87k | Args(Args) { } |
1058 | | |
1059 | 8.90k | void addMemberInitializer(CXXCtorInitializer *MemberInit) { |
1060 | 8.90k | if (isMemberInitMemcpyable(MemberInit)8.90k ) { |
1061 | 850 | AggregatedInits.push_back(MemberInit); |
1062 | 850 | addMemcpyableField(MemberInit->getMember()); |
1063 | 8.90k | } else { |
1064 | 8.05k | emitAggregatedInits(); |
1065 | 8.05k | EmitMemberInitializer(CGF, ConstructorDecl->getParent(), MemberInit, |
1066 | 8.05k | ConstructorDecl, Args); |
1067 | 8.05k | } |
1068 | 8.90k | } |
1069 | | |
1070 | 16.9k | void emitAggregatedInits() { |
1071 | 16.9k | if (AggregatedInits.size() <= 116.9k ) { |
1072 | 16.7k | // This memcpy is too small to be worthwhile. Fall back on default |
1073 | 16.7k | // codegen. |
1074 | 16.7k | if (!AggregatedInits.empty()16.7k ) { |
1075 | 95 | CopyingValueRepresentation CVR(CGF); |
1076 | 95 | EmitMemberInitializer(CGF, ConstructorDecl->getParent(), |
1077 | 95 | AggregatedInits[0], ConstructorDecl, Args); |
1078 | 95 | AggregatedInits.clear(); |
1079 | 95 | } |
1080 | 16.7k | reset(); |
1081 | 16.7k | return; |
1082 | 16.7k | } |
1083 | 192 | |
1084 | 192 | pushEHDestructors(); |
1085 | 192 | emitMemcpy(); |
1086 | 192 | AggregatedInits.clear(); |
1087 | 192 | } |
1088 | | |
1089 | 192 | void pushEHDestructors() { |
1090 | 192 | Address ThisPtr = CGF.LoadCXXThisAddress(); |
1091 | 192 | QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl); |
1092 | 192 | LValue LHS = CGF.MakeAddrLValue(ThisPtr, RecordTy); |
1093 | 192 | |
1094 | 947 | for (unsigned i = 0; i < AggregatedInits.size()947 ; ++i755 ) { |
1095 | 755 | CXXCtorInitializer *MemberInit = AggregatedInits[i]; |
1096 | 755 | QualType FieldType = MemberInit->getAnyMember()->getType(); |
1097 | 755 | QualType::DestructionKind dtorKind = FieldType.isDestructedType(); |
1098 | 755 | if (!CGF.needsEHCleanup(dtorKind)) |
1099 | 743 | continue; |
1100 | 12 | LValue FieldLHS = LHS; |
1101 | 12 | EmitLValueForAnyFieldInitialization(CGF, MemberInit, FieldLHS); |
1102 | 12 | CGF.pushEHDestroy(dtorKind, FieldLHS.getAddress(), FieldType); |
1103 | 12 | } |
1104 | 192 | } |
1105 | | |
1106 | 8.87k | void finish() { |
1107 | 8.87k | emitAggregatedInits(); |
1108 | 8.87k | } |
1109 | | |
1110 | | private: |
1111 | | const CXXConstructorDecl *ConstructorDecl; |
1112 | | bool MemcpyableCtor; |
1113 | | FunctionArgList &Args; |
1114 | | SmallVector<CXXCtorInitializer*, 16> AggregatedInits; |
1115 | | }; |
1116 | | |
1117 | | class AssignmentMemcpyizer : public FieldMemcpyizer { |
1118 | | private: |
1119 | | // Returns the memcpyable field copied by the given statement, if one |
1120 | | // exists. Otherwise returns null. |
1121 | 1.32k | FieldDecl *getMemcpyableField(Stmt *S) { |
1122 | 1.32k | if (!AssignmentsMemcpyable) |
1123 | 11 | return nullptr; |
1124 | 1.31k | if (BinaryOperator *1.31k BO1.31k = dyn_cast<BinaryOperator>(S)) { |
1125 | 348 | // Recognise trivial assignments. |
1126 | 348 | if (BO->getOpcode() != BO_Assign) |
1127 | 0 | return nullptr; |
1128 | 348 | MemberExpr *ME = dyn_cast<MemberExpr>(BO->getLHS()); |
1129 | 348 | if (!ME) |
1130 | 0 | return nullptr; |
1131 | 348 | FieldDecl *Field = dyn_cast<FieldDecl>(ME->getMemberDecl()); |
1132 | 348 | if (!Field || 348 !isMemcpyableField(Field)348 ) |
1133 | 8 | return nullptr; |
1134 | 340 | Stmt *RHS = BO->getRHS(); |
1135 | 340 | if (ImplicitCastExpr *EC = dyn_cast<ImplicitCastExpr>(RHS)) |
1136 | 340 | RHS = EC->getSubExpr(); |
1137 | 340 | if (!RHS) |
1138 | 0 | return nullptr; |
1139 | 340 | if (MemberExpr *340 ME2340 = dyn_cast<MemberExpr>(RHS)) { |
1140 | 338 | if (ME2->getMemberDecl() == Field) |
1141 | 338 | return Field; |
1142 | 2 | } |
1143 | 2 | return nullptr; |
1144 | 964 | } else if (CXXMemberCallExpr *964 MCE964 = dyn_cast<CXXMemberCallExpr>(S)) { |
1145 | 353 | CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(MCE->getCalleeDecl()); |
1146 | 353 | if (!(MD && 353 isMemcpyEquivalentSpecialMember(MD)353 )) |
1147 | 192 | return nullptr; |
1148 | 161 | MemberExpr *IOA = dyn_cast<MemberExpr>(MCE->getImplicitObjectArgument()); |
1149 | 161 | if (!IOA) |
1150 | 132 | return nullptr; |
1151 | 29 | FieldDecl *Field = dyn_cast<FieldDecl>(IOA->getMemberDecl()); |
1152 | 29 | if (!Field || 29 !isMemcpyableField(Field)29 ) |
1153 | 0 | return nullptr; |
1154 | 29 | MemberExpr *Arg0 = dyn_cast<MemberExpr>(MCE->getArg(0)); |
1155 | 29 | if (!Arg0 || 29 Field != dyn_cast<FieldDecl>(Arg0->getMemberDecl())28 ) |
1156 | 1 | return nullptr; |
1157 | 28 | return Field; |
1158 | 611 | } else if (CallExpr *611 CE611 = dyn_cast<CallExpr>(S)) { |
1159 | 13 | FunctionDecl *FD = dyn_cast<FunctionDecl>(CE->getCalleeDecl()); |
1160 | 13 | if (!FD || 13 FD->getBuiltinID() != Builtin::BI__builtin_memcpy13 ) |
1161 | 0 | return nullptr; |
1162 | 13 | Expr *DstPtr = CE->getArg(0); |
1163 | 13 | if (ImplicitCastExpr *DC = dyn_cast<ImplicitCastExpr>(DstPtr)) |
1164 | 13 | DstPtr = DC->getSubExpr(); |
1165 | 13 | UnaryOperator *DUO = dyn_cast<UnaryOperator>(DstPtr); |
1166 | 13 | if (!DUO || 13 DUO->getOpcode() != UO_AddrOf13 ) |
1167 | 0 | return nullptr; |
1168 | 13 | MemberExpr *ME = dyn_cast<MemberExpr>(DUO->getSubExpr()); |
1169 | 13 | if (!ME) |
1170 | 0 | return nullptr; |
1171 | 13 | FieldDecl *Field = dyn_cast<FieldDecl>(ME->getMemberDecl()); |
1172 | 13 | if (!Field || 13 !isMemcpyableField(Field)13 ) |
1173 | 0 | return nullptr; |
1174 | 13 | Expr *SrcPtr = CE->getArg(1); |
1175 | 13 | if (ImplicitCastExpr *SC = dyn_cast<ImplicitCastExpr>(SrcPtr)) |
1176 | 13 | SrcPtr = SC->getSubExpr(); |
1177 | 13 | UnaryOperator *SUO = dyn_cast<UnaryOperator>(SrcPtr); |
1178 | 13 | if (!SUO || 13 SUO->getOpcode() != UO_AddrOf13 ) |
1179 | 0 | return nullptr; |
1180 | 13 | MemberExpr *ME2 = dyn_cast<MemberExpr>(SUO->getSubExpr()); |
1181 | 13 | if (!ME2 || 13 Field != dyn_cast<FieldDecl>(ME2->getMemberDecl())13 ) |
1182 | 0 | return nullptr; |
1183 | 13 | return Field; |
1184 | 13 | } |
1185 | 598 | |
1186 | 598 | return nullptr; |
1187 | 598 | } |
1188 | | |
1189 | | bool AssignmentsMemcpyable; |
1190 | | SmallVector<Stmt*, 16> AggregatedStmts; |
1191 | | |
1192 | | public: |
1193 | | AssignmentMemcpyizer(CodeGenFunction &CGF, const CXXMethodDecl *AD, |
1194 | | FunctionArgList &Args) |
1195 | | : FieldMemcpyizer(CGF, AD->getParent(), Args[Args.size() - 1]), |
1196 | 586 | AssignmentsMemcpyable(CGF.getLangOpts().getGC() == LangOptions::NonGC) { |
1197 | 586 | assert(Args.size() == 2); |
1198 | 586 | } |
1199 | | |
1200 | 1.32k | void emitAssignment(Stmt *S) { |
1201 | 1.32k | FieldDecl *F = getMemcpyableField(S); |
1202 | 1.32k | if (F1.32k ) { |
1203 | 379 | addMemcpyableField(F); |
1204 | 379 | AggregatedStmts.push_back(S); |
1205 | 1.32k | } else { |
1206 | 944 | emitAggregatedStmts(); |
1207 | 944 | CGF.EmitStmt(S); |
1208 | 944 | } |
1209 | 1.32k | } |
1210 | | |
1211 | 1.53k | void emitAggregatedStmts() { |
1212 | 1.53k | if (AggregatedStmts.size() <= 11.53k ) { |
1213 | 1.45k | if (!AggregatedStmts.empty()1.45k ) { |
1214 | 80 | CopyingValueRepresentation CVR(CGF); |
1215 | 80 | CGF.EmitStmt(AggregatedStmts[0]); |
1216 | 80 | } |
1217 | 1.45k | reset(); |
1218 | 1.45k | } |
1219 | 1.53k | |
1220 | 1.53k | emitMemcpy(); |
1221 | 1.53k | AggregatedStmts.clear(); |
1222 | 1.53k | } |
1223 | | |
1224 | 586 | void finish() { |
1225 | 586 | emitAggregatedStmts(); |
1226 | 586 | } |
1227 | | }; |
1228 | | } // end anonymous namespace |
1229 | | |
1230 | 14 | static bool isInitializerOfDynamicClass(const CXXCtorInitializer *BaseInit) { |
1231 | 14 | const Type *BaseType = BaseInit->getBaseClass(); |
1232 | 14 | const auto *BaseClassDecl = |
1233 | 14 | cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl()); |
1234 | 14 | return BaseClassDecl->isDynamicClass(); |
1235 | 14 | } |
1236 | | |
1237 | | /// EmitCtorPrologue - This routine generates necessary code to initialize |
1238 | | /// base classes and non-static data members belonging to this constructor. |
1239 | | void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD, |
1240 | | CXXCtorType CtorType, |
1241 | 8.88k | FunctionArgList &Args) { |
1242 | 8.88k | if (CD->isDelegatingConstructor()) |
1243 | 10 | return EmitDelegatingCXXConstructorCall(CD, Args); |
1244 | 8.87k | |
1245 | 8.87k | const CXXRecordDecl *ClassDecl = CD->getParent(); |
1246 | 8.87k | |
1247 | 8.87k | CXXConstructorDecl::init_const_iterator B = CD->init_begin(), |
1248 | 8.87k | E = CD->init_end(); |
1249 | 8.87k | |
1250 | 8.87k | llvm::BasicBlock *BaseCtorContinueBB = nullptr; |
1251 | 8.87k | if (ClassDecl->getNumVBases() && |
1252 | 8.87k | !CGM.getTarget().getCXXABI().hasConstructorVariants()602 ) { |
1253 | 366 | // The ABIs that don't have constructor variants need to put a branch |
1254 | 366 | // before the virtual base initialization code. |
1255 | 366 | BaseCtorContinueBB = |
1256 | 366 | CGM.getCXXABI().EmitCtorCompleteObjectHandler(*this, ClassDecl); |
1257 | 366 | assert(BaseCtorContinueBB); |
1258 | 366 | } |
1259 | 8.87k | |
1260 | 8.87k | llvm::Value *const OldThis = CXXThisValue; |
1261 | 8.87k | // Virtual base initializers first. |
1262 | 9.65k | for (; B != E && 9.65k (*B)->isBaseInitializer()6.66k && (*B)->isBaseVirtual()3.56k ; B++782 ) { |
1263 | 782 | if (CGM.getCodeGenOpts().StrictVTablePointers && |
1264 | 4 | CGM.getCodeGenOpts().OptimizationLevel > 0 && |
1265 | 4 | isInitializerOfDynamicClass(*B)) |
1266 | 0 | CXXThisValue = Builder.CreateInvariantGroupBarrier(LoadCXXThis()); |
1267 | 782 | EmitBaseInitializer(*this, ClassDecl, *B, CtorType); |
1268 | 782 | } |
1269 | 8.87k | |
1270 | 8.87k | if (BaseCtorContinueBB8.87k ) { |
1271 | 366 | // Complete object handler should continue to the remaining initializers. |
1272 | 366 | Builder.CreateBr(BaseCtorContinueBB); |
1273 | 366 | EmitBlock(BaseCtorContinueBB); |
1274 | 366 | } |
1275 | 8.87k | |
1276 | 8.87k | // Then, non-virtual base initializers. |
1277 | 12.3k | for (; B != E && 12.3k (*B)->isBaseInitializer()7.34k ; B++3.49k ) { |
1278 | 3.49k | assert(!(*B)->isBaseVirtual()); |
1279 | 3.49k | |
1280 | 3.49k | if (CGM.getCodeGenOpts().StrictVTablePointers && |
1281 | 10 | CGM.getCodeGenOpts().OptimizationLevel > 0 && |
1282 | 10 | isInitializerOfDynamicClass(*B)) |
1283 | 9 | CXXThisValue = Builder.CreateInvariantGroupBarrier(LoadCXXThis()); |
1284 | 3.49k | EmitBaseInitializer(*this, ClassDecl, *B, CtorType); |
1285 | 3.49k | } |
1286 | 8.87k | |
1287 | 8.87k | CXXThisValue = OldThis; |
1288 | 8.87k | |
1289 | 8.87k | InitializeVTablePointers(ClassDecl); |
1290 | 8.87k | |
1291 | 8.87k | // And finally, initialize class members. |
1292 | 8.87k | FieldConstructionScope FCS(*this, LoadCXXThisAddress()); |
1293 | 8.87k | ConstructorMemcpyizer CM(*this, CD, Args); |
1294 | 17.7k | for (; B != E17.7k ; B++8.90k ) { |
1295 | 8.90k | CXXCtorInitializer *Member = (*B); |
1296 | 8.90k | assert(!Member->isBaseInitializer()); |
1297 | 8.90k | assert(Member->isAnyMemberInitializer() && |
1298 | 8.90k | "Delegating initializer on non-delegating constructor"); |
1299 | 8.90k | CM.addMemberInitializer(Member); |
1300 | 8.90k | } |
1301 | 8.88k | CM.finish(); |
1302 | 8.88k | } |
1303 | | |
1304 | | static bool |
1305 | | FieldHasTrivialDestructorBody(ASTContext &Context, const FieldDecl *Field); |
1306 | | |
1307 | | static bool |
1308 | | HasTrivialDestructorBody(ASTContext &Context, |
1309 | | const CXXRecordDecl *BaseClassDecl, |
1310 | | const CXXRecordDecl *MostDerivedClassDecl) |
1311 | 244 | { |
1312 | 244 | // If the destructor is trivial we don't have to check anything else. |
1313 | 244 | if (BaseClassDecl->hasTrivialDestructor()) |
1314 | 13 | return true; |
1315 | 231 | |
1316 | 231 | if (231 !BaseClassDecl->getDestructor()->hasTrivialBody()231 ) |
1317 | 182 | return false; |
1318 | 49 | |
1319 | 49 | // Check fields. |
1320 | 49 | for (const auto *Field : BaseClassDecl->fields()) |
1321 | 48 | if (48 !FieldHasTrivialDestructorBody(Context, Field)48 ) |
1322 | 15 | return false; |
1323 | 34 | |
1324 | 34 | // Check non-virtual bases. |
1325 | 34 | for (const auto &I : BaseClassDecl->bases()) 34 { |
1326 | 28 | if (I.isVirtual()) |
1327 | 1 | continue; |
1328 | 27 | |
1329 | 27 | const CXXRecordDecl *NonVirtualBase = |
1330 | 27 | cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl()); |
1331 | 27 | if (!HasTrivialDestructorBody(Context, NonVirtualBase, |
1332 | 27 | MostDerivedClassDecl)) |
1333 | 27 | return false; |
1334 | 7 | } |
1335 | 7 | |
1336 | 7 | if (7 BaseClassDecl == MostDerivedClassDecl7 ) { |
1337 | 7 | // Check virtual bases. |
1338 | 1 | for (const auto &I : BaseClassDecl->vbases()) { |
1339 | 1 | const CXXRecordDecl *VirtualBase = |
1340 | 1 | cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl()); |
1341 | 1 | if (!HasTrivialDestructorBody(Context, VirtualBase, |
1342 | 1 | MostDerivedClassDecl)) |
1343 | 1 | return false; |
1344 | 6 | } |
1345 | 7 | } |
1346 | 6 | |
1347 | 6 | return true; |
1348 | 6 | } |
1349 | | |
1350 | | static bool |
1351 | | FieldHasTrivialDestructorBody(ASTContext &Context, |
1352 | | const FieldDecl *Field) |
1353 | 570 | { |
1354 | 570 | QualType FieldBaseElementType = Context.getBaseElementType(Field->getType()); |
1355 | 570 | |
1356 | 570 | const RecordType *RT = FieldBaseElementType->getAs<RecordType>(); |
1357 | 570 | if (!RT) |
1358 | 352 | return true; |
1359 | 218 | |
1360 | 218 | CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl()); |
1361 | 218 | |
1362 | 218 | // The destructor for an implicit anonymous union member is never invoked. |
1363 | 218 | if (FieldClassDecl->isUnion() && 218 FieldClassDecl->isAnonymousStructOrUnion()2 ) |
1364 | 2 | return false; |
1365 | 216 | |
1366 | 216 | return HasTrivialDestructorBody(Context, FieldClassDecl, FieldClassDecl); |
1367 | 216 | } |
1368 | | |
1369 | | /// CanSkipVTablePointerInitialization - Check whether we need to initialize |
1370 | | /// any vtable pointers before calling this destructor. |
1371 | | static bool CanSkipVTablePointerInitialization(CodeGenFunction &CGF, |
1372 | 5.19k | const CXXDestructorDecl *Dtor) { |
1373 | 5.19k | const CXXRecordDecl *ClassDecl = Dtor->getParent(); |
1374 | 5.19k | if (!ClassDecl->isDynamicClass()) |
1375 | 3.05k | return true; |
1376 | 2.13k | |
1377 | 2.13k | if (2.13k !Dtor->hasTrivialBody()2.13k ) |
1378 | 405 | return false; |
1379 | 1.73k | |
1380 | 1.73k | // Check the fields. |
1381 | 1.73k | for (const auto *Field : ClassDecl->fields()) |
1382 | 454 | if (454 !FieldHasTrivialDestructorBody(CGF.getContext(), Field)454 ) |
1383 | 178 | return false; |
1384 | 1.55k | |
1385 | 1.55k | return true; |
1386 | 1.55k | } |
1387 | | |
1388 | | /// EmitDestructorBody - Emits the body of the current destructor. |
1389 | 11.5k | void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) { |
1390 | 11.5k | const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl()); |
1391 | 11.5k | CXXDtorType DtorType = CurGD.getDtorType(); |
1392 | 11.5k | |
1393 | 11.5k | // For an abstract class, non-base destructors are never used (and can't |
1394 | 11.5k | // be emitted in general, because vbase dtors may not have been validated |
1395 | 11.5k | // by Sema), but the Itanium ABI doesn't make them optional and Clang may |
1396 | 11.5k | // in fact emit references to them from other compilations, so emit them |
1397 | 11.5k | // as functions containing a trap instruction. |
1398 | 11.5k | if (DtorType != Dtor_Base && 11.5k Dtor->getParent()->isAbstract()6.39k ) { |
1399 | 1.15k | llvm::CallInst *TrapCall = EmitTrapCall(llvm::Intrinsic::trap); |
1400 | 1.15k | TrapCall->setDoesNotReturn(); |
1401 | 1.15k | TrapCall->setDoesNotThrow(); |
1402 | 1.15k | Builder.CreateUnreachable(); |
1403 | 1.15k | Builder.ClearInsertionPoint(); |
1404 | 1.15k | return; |
1405 | 1.15k | } |
1406 | 10.4k | |
1407 | 10.4k | Stmt *Body = Dtor->getBody(); |
1408 | 10.4k | if (Body) |
1409 | 10.2k | incrementProfileCounter(Body); |
1410 | 10.4k | |
1411 | 10.4k | // The call to operator delete in a deleting destructor happens |
1412 | 10.4k | // outside of the function-try-block, which means it's always |
1413 | 10.4k | // possible to delegate the destructor body to the complete |
1414 | 10.4k | // destructor. Do so. |
1415 | 10.4k | if (DtorType == Dtor_Deleting10.4k ) { |
1416 | 1.18k | EnterDtorCleanups(Dtor, Dtor_Deleting); |
1417 | 1.18k | EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false, |
1418 | 1.18k | /*Delegating=*/false, LoadCXXThisAddress()); |
1419 | 1.18k | PopCleanupBlock(); |
1420 | 1.18k | return; |
1421 | 1.18k | } |
1422 | 9.23k | |
1423 | 9.23k | // If the body is a function-try-block, enter the try before |
1424 | 9.23k | // anything else. |
1425 | 9.23k | bool isTryBody = (Body && 9.23k isa<CXXTryStmt>(Body)9.13k ); |
1426 | 9.23k | if (isTryBody) |
1427 | 12 | EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true); |
1428 | 9.23k | EmitAsanPrologueOrEpilogue(false); |
1429 | 9.23k | |
1430 | 9.23k | // Enter the epilogue cleanups. |
1431 | 9.23k | RunCleanupsScope DtorEpilogue(*this); |
1432 | 9.23k | |
1433 | 9.23k | // If this is the complete variant, just invoke the base variant; |
1434 | 9.23k | // the epilogue will destruct the virtual bases. But we can't do |
1435 | 9.23k | // this optimization if the body is a function-try-block, because |
1436 | 9.23k | // we'd introduce *two* handler blocks. In the Microsoft ABI, we |
1437 | 9.23k | // always delegate because we might not have a definition in this TU. |
1438 | 9.23k | switch (DtorType) { |
1439 | 0 | case Dtor_Comdat: 0 llvm_unreachable0 ("not expecting a COMDAT"); |
1440 | 0 | case Dtor_Deleting: 0 llvm_unreachable0 ("already handled deleting case"); |
1441 | 9.23k | |
1442 | 4.05k | case Dtor_Complete: |
1443 | 4.05k | assert((Body || getTarget().getCXXABI().isMicrosoft()) && |
1444 | 4.05k | "can't emit a dtor without a body for non-Microsoft ABIs"); |
1445 | 4.05k | |
1446 | 4.05k | // Enter the cleanup scopes for virtual bases. |
1447 | 4.05k | EnterDtorCleanups(Dtor, Dtor_Complete); |
1448 | 4.05k | |
1449 | 4.05k | if (!isTryBody4.05k ) { |
1450 | 4.04k | EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false, |
1451 | 4.04k | /*Delegating=*/false, LoadCXXThisAddress()); |
1452 | 4.04k | break; |
1453 | 4.04k | } |
1454 | 6 | |
1455 | 6 | // Fallthrough: act like we're in the base variant. |
1456 | 6 | LLVM_FALLTHROUGH6 ; |
1457 | 6 | |
1458 | 5.19k | case Dtor_Base: |
1459 | 5.19k | assert(Body); |
1460 | 5.19k | |
1461 | 5.19k | // Enter the cleanup scopes for fields and non-virtual bases. |
1462 | 5.19k | EnterDtorCleanups(Dtor, Dtor_Base); |
1463 | 5.19k | |
1464 | 5.19k | // Initialize the vtable pointers before entering the body. |
1465 | 5.19k | if (!CanSkipVTablePointerInitialization(*this, Dtor)5.19k ) { |
1466 | 583 | // Insert the llvm.invariant.group.barrier intrinsic before initializing |
1467 | 583 | // the vptrs to cancel any previous assumptions we might have made. |
1468 | 583 | if (CGM.getCodeGenOpts().StrictVTablePointers && |
1469 | 2 | CGM.getCodeGenOpts().OptimizationLevel > 0) |
1470 | 2 | CXXThisValue = Builder.CreateInvariantGroupBarrier(LoadCXXThis()); |
1471 | 583 | InitializeVTablePointers(Dtor->getParent()); |
1472 | 583 | } |
1473 | 5.19k | |
1474 | 5.19k | if (isTryBody) |
1475 | 12 | EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); |
1476 | 5.17k | else if (5.17k Body5.17k ) |
1477 | 5.17k | EmitStmt(Body); |
1478 | 0 | else { |
1479 | 0 | assert(Dtor->isImplicit() && "bodyless dtor not implicit"); |
1480 | 0 | // nothing to do besides what's in the epilogue |
1481 | 0 | } |
1482 | 5.19k | // -fapple-kext must inline any call to this dtor into |
1483 | 5.19k | // the caller's body. |
1484 | 5.19k | if (getLangOpts().AppleKext) |
1485 | 5 | CurFn->addFnAttr(llvm::Attribute::AlwaysInline); |
1486 | 4.05k | |
1487 | 4.05k | break; |
1488 | 9.23k | } |
1489 | 9.23k | |
1490 | 9.23k | // Jump out through the epilogue cleanups. |
1491 | 9.23k | DtorEpilogue.ForceCleanup(); |
1492 | 9.23k | |
1493 | 9.23k | // Exit the try if applicable. |
1494 | 9.23k | if (isTryBody) |
1495 | 12 | ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true); |
1496 | 11.5k | } |
1497 | | |
1498 | 586 | void CodeGenFunction::emitImplicitAssignmentOperatorBody(FunctionArgList &Args) { |
1499 | 586 | const CXXMethodDecl *AssignOp = cast<CXXMethodDecl>(CurGD.getDecl()); |
1500 | 586 | const Stmt *RootS = AssignOp->getBody(); |
1501 | 586 | assert(isa<CompoundStmt>(RootS) && |
1502 | 586 | "Body of an implicit assignment operator should be compound stmt."); |
1503 | 586 | const CompoundStmt *RootCS = cast<CompoundStmt>(RootS); |
1504 | 586 | |
1505 | 586 | LexicalScope Scope(*this, RootCS->getSourceRange()); |
1506 | 586 | |
1507 | 586 | incrementProfileCounter(RootCS); |
1508 | 586 | AssignmentMemcpyizer AM(*this, AssignOp, Args); |
1509 | 586 | for (auto *I : RootCS->body()) |
1510 | 1.32k | AM.emitAssignment(I); |
1511 | 586 | AM.finish(); |
1512 | 586 | } |
1513 | | |
1514 | | namespace { |
1515 | | /// Call the operator delete associated with the current destructor. |
1516 | | struct CallDtorDelete final : EHScopeStack::Cleanup { |
1517 | 1.00k | CallDtorDelete() {} |
1518 | | |
1519 | 1.82k | void Emit(CodeGenFunction &CGF, Flags flags) override { |
1520 | 1.82k | const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl); |
1521 | 1.82k | const CXXRecordDecl *ClassDecl = Dtor->getParent(); |
1522 | 1.82k | CGF.EmitDeleteCall(Dtor->getOperatorDelete(), CGF.LoadCXXThis(), |
1523 | 1.82k | CGF.getContext().getTagDeclType(ClassDecl)); |
1524 | 1.82k | } |
1525 | | }; |
1526 | | |
1527 | | struct CallDtorDeleteConditional final : EHScopeStack::Cleanup { |
1528 | | llvm::Value *ShouldDeleteCondition; |
1529 | | |
1530 | | public: |
1531 | | CallDtorDeleteConditional(llvm::Value *ShouldDeleteCondition) |
1532 | 179 | : ShouldDeleteCondition(ShouldDeleteCondition) { |
1533 | 179 | assert(ShouldDeleteCondition != nullptr); |
1534 | 179 | } |
1535 | | |
1536 | 180 | void Emit(CodeGenFunction &CGF, Flags flags) override { |
1537 | 180 | llvm::BasicBlock *callDeleteBB = CGF.createBasicBlock("dtor.call_delete"); |
1538 | 180 | llvm::BasicBlock *continueBB = CGF.createBasicBlock("dtor.continue"); |
1539 | 180 | llvm::Value *ShouldCallDelete |
1540 | 180 | = CGF.Builder.CreateIsNull(ShouldDeleteCondition); |
1541 | 180 | CGF.Builder.CreateCondBr(ShouldCallDelete, continueBB, callDeleteBB); |
1542 | 180 | |
1543 | 180 | CGF.EmitBlock(callDeleteBB); |
1544 | 180 | const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl); |
1545 | 180 | const CXXRecordDecl *ClassDecl = Dtor->getParent(); |
1546 | 180 | CGF.EmitDeleteCall(Dtor->getOperatorDelete(), CGF.LoadCXXThis(), |
1547 | 180 | CGF.getContext().getTagDeclType(ClassDecl)); |
1548 | 180 | CGF.Builder.CreateBr(continueBB); |
1549 | 180 | |
1550 | 180 | CGF.EmitBlock(continueBB); |
1551 | 180 | } |
1552 | | }; |
1553 | | |
1554 | | class DestroyField final : public EHScopeStack::Cleanup { |
1555 | | const FieldDecl *field; |
1556 | | CodeGenFunction::Destroyer *destroyer; |
1557 | | bool useEHCleanupForArray; |
1558 | | |
1559 | | public: |
1560 | | DestroyField(const FieldDecl *field, CodeGenFunction::Destroyer *destroyer, |
1561 | | bool useEHCleanupForArray) |
1562 | | : field(field), destroyer(destroyer), |
1563 | 1.68k | useEHCleanupForArray(useEHCleanupForArray) {} |
1564 | | |
1565 | 2.62k | void Emit(CodeGenFunction &CGF, Flags flags) override { |
1566 | 2.62k | // Find the address of the field. |
1567 | 2.62k | Address thisValue = CGF.LoadCXXThisAddress(); |
1568 | 2.62k | QualType RecordTy = CGF.getContext().getTagDeclType(field->getParent()); |
1569 | 2.62k | LValue ThisLV = CGF.MakeAddrLValue(thisValue, RecordTy); |
1570 | 2.62k | LValue LV = CGF.EmitLValueForField(ThisLV, field); |
1571 | 2.62k | assert(LV.isSimple()); |
1572 | 2.62k | |
1573 | 2.62k | CGF.emitDestroy(LV.getAddress(), field->getType(), destroyer, |
1574 | 1.68k | flags.isForNormalCleanup() && useEHCleanupForArray); |
1575 | 2.62k | } |
1576 | | }; |
1577 | | |
1578 | | static void EmitSanitizerDtorCallback(CodeGenFunction &CGF, llvm::Value *Ptr, |
1579 | 56 | CharUnits::QuantityType PoisonSize) { |
1580 | 56 | CodeGenFunction::SanitizerScope SanScope(&CGF); |
1581 | 56 | // Pass in void pointer and size of region as arguments to runtime |
1582 | 56 | // function |
1583 | 56 | llvm::Value *Args[] = {CGF.Builder.CreateBitCast(Ptr, CGF.VoidPtrTy), |
1584 | 56 | llvm::ConstantInt::get(CGF.SizeTy, PoisonSize)}; |
1585 | 56 | |
1586 | 56 | llvm::Type *ArgTypes[] = {CGF.VoidPtrTy, CGF.SizeTy}; |
1587 | 56 | |
1588 | 56 | llvm::FunctionType *FnType = |
1589 | 56 | llvm::FunctionType::get(CGF.VoidTy, ArgTypes, false); |
1590 | 56 | llvm::Value *Fn = |
1591 | 56 | CGF.CGM.CreateRuntimeFunction(FnType, "__sanitizer_dtor_callback"); |
1592 | 56 | CGF.EmitNounwindRuntimeCall(Fn, Args); |
1593 | 56 | } |
1594 | | |
1595 | | class SanitizeDtorMembers final : public EHScopeStack::Cleanup { |
1596 | | const CXXDestructorDecl *Dtor; |
1597 | | |
1598 | | public: |
1599 | 38 | SanitizeDtorMembers(const CXXDestructorDecl *Dtor) : Dtor(Dtor) {} |
1600 | | |
1601 | | // Generate function call for handling object poisoning. |
1602 | | // Disables tail call elimination, to prevent the current stack frame |
1603 | | // from disappearing from the stack trace. |
1604 | 38 | void Emit(CodeGenFunction &CGF, Flags flags) override { |
1605 | 38 | const ASTRecordLayout &Layout = |
1606 | 38 | CGF.getContext().getASTRecordLayout(Dtor->getParent()); |
1607 | 38 | |
1608 | 38 | // Nothing to poison. |
1609 | 38 | if (Layout.getFieldCount() == 0) |
1610 | 0 | return; |
1611 | 38 | |
1612 | 38 | // Prevent the current stack frame from disappearing from the stack trace. |
1613 | 38 | CGF.CurFn->addFnAttr("disable-tail-calls", "true"); |
1614 | 38 | |
1615 | 38 | // Construct pointer to region to begin poisoning, and calculate poison |
1616 | 38 | // size, so that only members declared in this class are poisoned. |
1617 | 38 | ASTContext &Context = CGF.getContext(); |
1618 | 38 | unsigned fieldIndex = 0; |
1619 | 38 | int startIndex = -1; |
1620 | 38 | // RecordDecl::field_iterator Field; |
1621 | 68 | for (const FieldDecl *Field : Dtor->getParent()->fields()) { |
1622 | 68 | // Poison field if it is trivial |
1623 | 68 | if (FieldHasTrivialDestructorBody(Context, Field)68 ) { |
1624 | 62 | // Start sanitizing at this field |
1625 | 62 | if (startIndex < 0) |
1626 | 40 | startIndex = fieldIndex; |
1627 | 62 | |
1628 | 62 | // Currently on the last field, and it must be poisoned with the |
1629 | 62 | // current block. |
1630 | 62 | if (fieldIndex == Layout.getFieldCount() - 162 ) { |
1631 | 36 | PoisonMembers(CGF, startIndex, Layout.getFieldCount()); |
1632 | 36 | } |
1633 | 68 | } else if (6 startIndex >= 06 ) { |
1634 | 4 | // No longer within a block of memory to poison, so poison the block |
1635 | 4 | PoisonMembers(CGF, startIndex, fieldIndex); |
1636 | 4 | // Re-set the start index |
1637 | 4 | startIndex = -1; |
1638 | 4 | } |
1639 | 68 | fieldIndex += 1; |
1640 | 68 | } |
1641 | 38 | } |
1642 | | |
1643 | | private: |
1644 | | /// \param layoutStartOffset index of the ASTRecordLayout field to |
1645 | | /// start poisoning (inclusive) |
1646 | | /// \param layoutEndOffset index of the ASTRecordLayout field to |
1647 | | /// end poisoning (exclusive) |
1648 | | void PoisonMembers(CodeGenFunction &CGF, unsigned layoutStartOffset, |
1649 | 40 | unsigned layoutEndOffset) { |
1650 | 40 | ASTContext &Context = CGF.getContext(); |
1651 | 40 | const ASTRecordLayout &Layout = |
1652 | 40 | Context.getASTRecordLayout(Dtor->getParent()); |
1653 | 40 | |
1654 | 40 | llvm::ConstantInt *OffsetSizePtr = llvm::ConstantInt::get( |
1655 | 40 | CGF.SizeTy, |
1656 | 40 | Context.toCharUnitsFromBits(Layout.getFieldOffset(layoutStartOffset)) |
1657 | 40 | .getQuantity()); |
1658 | 40 | |
1659 | 40 | llvm::Value *OffsetPtr = CGF.Builder.CreateGEP( |
1660 | 40 | CGF.Builder.CreateBitCast(CGF.LoadCXXThis(), CGF.Int8PtrTy), |
1661 | 40 | OffsetSizePtr); |
1662 | 40 | |
1663 | 40 | CharUnits::QuantityType PoisonSize; |
1664 | 40 | if (layoutEndOffset >= Layout.getFieldCount()40 ) { |
1665 | 36 | PoisonSize = Layout.getNonVirtualSize().getQuantity() - |
1666 | 36 | Context.toCharUnitsFromBits( |
1667 | 36 | Layout.getFieldOffset(layoutStartOffset)) |
1668 | 36 | .getQuantity(); |
1669 | 40 | } else { |
1670 | 4 | PoisonSize = Context.toCharUnitsFromBits( |
1671 | 4 | Layout.getFieldOffset(layoutEndOffset) - |
1672 | 4 | Layout.getFieldOffset(layoutStartOffset)) |
1673 | 4 | .getQuantity(); |
1674 | 4 | } |
1675 | 40 | |
1676 | 40 | if (PoisonSize == 0) |
1677 | 2 | return; |
1678 | 38 | |
1679 | 38 | EmitSanitizerDtorCallback(CGF, OffsetPtr, PoisonSize); |
1680 | 38 | } |
1681 | | }; |
1682 | | |
1683 | | class SanitizeDtorVTable final : public EHScopeStack::Cleanup { |
1684 | | const CXXDestructorDecl *Dtor; |
1685 | | |
1686 | | public: |
1687 | 18 | SanitizeDtorVTable(const CXXDestructorDecl *Dtor) : Dtor(Dtor) {} |
1688 | | |
1689 | | // Generate function call for handling vtable pointer poisoning. |
1690 | 18 | void Emit(CodeGenFunction &CGF, Flags flags) override { |
1691 | 18 | assert(Dtor->getParent()->isDynamicClass()); |
1692 | 18 | (void)Dtor; |
1693 | 18 | ASTContext &Context = CGF.getContext(); |
1694 | 18 | // Poison vtable and vtable ptr if they exist for this class. |
1695 | 18 | llvm::Value *VTablePtr = CGF.LoadCXXThis(); |
1696 | 18 | |
1697 | 18 | CharUnits::QuantityType PoisonSize = |
1698 | 18 | Context.toCharUnitsFromBits(CGF.PointerWidthInBits).getQuantity(); |
1699 | 18 | // Pass in void pointer and size of region as arguments to runtime |
1700 | 18 | // function |
1701 | 18 | EmitSanitizerDtorCallback(CGF, VTablePtr, PoisonSize); |
1702 | 18 | } |
1703 | | }; |
1704 | | } // end anonymous namespace |
1705 | | |
1706 | | /// \brief Emit all code that comes at the end of class's |
1707 | | /// destructor. This is to call destructors on members and base classes |
1708 | | /// in reverse order of their construction. |
1709 | | void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD, |
1710 | 10.4k | CXXDtorType DtorType) { |
1711 | 10.4k | assert((!DD->isTrivial() || DD->hasAttr<DLLExportAttr>()) && |
1712 | 10.4k | "Should not emit dtor epilogue for non-exported trivial dtor!"); |
1713 | 10.4k | |
1714 | 10.4k | // The deleting-destructor phase just needs to call the appropriate |
1715 | 10.4k | // operator delete that Sema picked up. |
1716 | 10.4k | if (DtorType == Dtor_Deleting10.4k ) { |
1717 | 1.18k | assert(DD->getOperatorDelete() && |
1718 | 1.18k | "operator delete missing - EnterDtorCleanups"); |
1719 | 1.18k | if (CXXStructorImplicitParamValue1.18k ) { |
1720 | 179 | // If there is an implicit param to the deleting dtor, it's a boolean |
1721 | 179 | // telling whether we should call delete at the end of the dtor. |
1722 | 179 | EHStack.pushCleanup<CallDtorDeleteConditional>( |
1723 | 179 | NormalAndEHCleanup, CXXStructorImplicitParamValue); |
1724 | 1.18k | } else { |
1725 | 1.00k | EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup); |
1726 | 1.00k | } |
1727 | 1.18k | return; |
1728 | 1.18k | } |
1729 | 9.24k | |
1730 | 9.24k | const CXXRecordDecl *ClassDecl = DD->getParent(); |
1731 | 9.24k | |
1732 | 9.24k | // Unions have no bases and do not call field destructors. |
1733 | 9.24k | if (ClassDecl->isUnion()) |
1734 | 4 | return; |
1735 | 9.23k | |
1736 | 9.23k | // The complete-destructor phase just destructs all the virtual bases. |
1737 | 9.23k | if (9.23k DtorType == Dtor_Complete9.23k ) { |
1738 | 4.05k | // Poison the vtable pointer such that access after the base |
1739 | 4.05k | // and member destructors are invoked is invalid. |
1740 | 4.05k | if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor && |
1741 | 4.05k | SanOpts.has(SanitizerKind::Memory)39 && ClassDecl->getNumVBases()38 && |
1742 | 4 | ClassDecl->isPolymorphic()) |
1743 | 4 | EHStack.pushCleanup<SanitizeDtorVTable>(NormalAndEHCleanup, DD); |
1744 | 4.05k | |
1745 | 4.05k | // We push them in the forward order so that they'll be popped in |
1746 | 4.05k | // the reverse order. |
1747 | 166 | for (const auto &Base : ClassDecl->vbases()) { |
1748 | 166 | CXXRecordDecl *BaseClassDecl |
1749 | 166 | = cast<CXXRecordDecl>(Base.getType()->getAs<RecordType>()->getDecl()); |
1750 | 166 | |
1751 | 166 | // Ignore trivial destructors. |
1752 | 166 | if (BaseClassDecl->hasTrivialDestructor()) |
1753 | 44 | continue; |
1754 | 122 | |
1755 | 122 | EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, |
1756 | 122 | BaseClassDecl, |
1757 | 122 | /*BaseIsVirtual*/ true); |
1758 | 122 | } |
1759 | 4.05k | |
1760 | 4.05k | return; |
1761 | 4.05k | } |
1762 | 5.18k | |
1763 | 9.23k | assert(DtorType == Dtor_Base); |
1764 | 5.18k | // Poison the vtable pointer if it has no virtual bases, but inherits |
1765 | 5.18k | // virtual functions. |
1766 | 5.18k | if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor && |
1767 | 5.18k | SanOpts.has(SanitizerKind::Memory)39 && !ClassDecl->getNumVBases()38 && |
1768 | 34 | ClassDecl->isPolymorphic()) |
1769 | 14 | EHStack.pushCleanup<SanitizeDtorVTable>(NormalAndEHCleanup, DD); |
1770 | 5.18k | |
1771 | 5.18k | // Destroy non-virtual bases. |
1772 | 2.75k | for (const auto &Base : ClassDecl->bases()) { |
1773 | 2.75k | // Ignore virtual bases. |
1774 | 2.75k | if (Base.isVirtual()) |
1775 | 81 | continue; |
1776 | 2.66k | |
1777 | 2.66k | CXXRecordDecl *BaseClassDecl = Base.getType()->getAsCXXRecordDecl(); |
1778 | 2.66k | |
1779 | 2.66k | // Ignore trivial destructors. |
1780 | 2.66k | if (BaseClassDecl->hasTrivialDestructor()) |
1781 | 423 | continue; |
1782 | 2.24k | |
1783 | 2.24k | EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, |
1784 | 2.24k | BaseClassDecl, |
1785 | 2.24k | /*BaseIsVirtual*/ false); |
1786 | 2.24k | } |
1787 | 5.18k | |
1788 | 5.18k | // Poison fields such that access after their destructors are |
1789 | 5.18k | // invoked, and before the base class destructor runs, is invalid. |
1790 | 5.18k | if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor && |
1791 | 39 | SanOpts.has(SanitizerKind::Memory)) |
1792 | 38 | EHStack.pushCleanup<SanitizeDtorMembers>(NormalAndEHCleanup, DD); |
1793 | 5.18k | |
1794 | 5.18k | // Destroy direct fields. |
1795 | 8.40k | for (const auto *Field : ClassDecl->fields()) { |
1796 | 8.40k | QualType type = Field->getType(); |
1797 | 8.40k | QualType::DestructionKind dtorKind = type.isDestructedType(); |
1798 | 8.40k | if (!dtorKind8.40k ) continue6.71k ; |
1799 | 1.68k | |
1800 | 1.68k | // Anonymous union members do not have their destructors called. |
1801 | 1.68k | const RecordType *RT = type->getAsUnionType(); |
1802 | 1.68k | if (RT && 1.68k RT->getDecl()->isAnonymousStructOrUnion()3 ) continue3 ; |
1803 | 1.68k | |
1804 | 1.68k | CleanupKind cleanupKind = getCleanupKind(dtorKind); |
1805 | 1.68k | EHStack.pushCleanup<DestroyField>(cleanupKind, Field, |
1806 | 1.68k | getDestroyer(dtorKind), |
1807 | 1.68k | cleanupKind & EHCleanup); |
1808 | 1.68k | } |
1809 | 10.4k | } |
1810 | | |
1811 | | /// EmitCXXAggrConstructorCall - Emit a loop to call a particular |
1812 | | /// constructor for each of several members of an array. |
1813 | | /// |
1814 | | /// \param ctor the constructor to call for each element |
1815 | | /// \param arrayType the type of the array to initialize |
1816 | | /// \param arrayBegin an arrayType* |
1817 | | /// \param zeroInitialize true if each element should be |
1818 | | /// zero-initialized before it is constructed |
1819 | | void CodeGenFunction::EmitCXXAggrConstructorCall( |
1820 | | const CXXConstructorDecl *ctor, const ArrayType *arrayType, |
1821 | 223 | Address arrayBegin, const CXXConstructExpr *E, bool zeroInitialize) { |
1822 | 223 | QualType elementType; |
1823 | 223 | llvm::Value *numElements = |
1824 | 223 | emitArrayLength(arrayType, elementType, arrayBegin); |
1825 | 223 | |
1826 | 223 | EmitCXXAggrConstructorCall(ctor, numElements, arrayBegin, E, zeroInitialize); |
1827 | 223 | } |
1828 | | |
1829 | | /// EmitCXXAggrConstructorCall - Emit a loop to call a particular |
1830 | | /// constructor for each of several members of an array. |
1831 | | /// |
1832 | | /// \param ctor the constructor to call for each element |
1833 | | /// \param numElements the number of elements in the array; |
1834 | | /// may be zero |
1835 | | /// \param arrayBase a T*, where T is the type constructed by ctor |
1836 | | /// \param zeroInitialize true if each element should be |
1837 | | /// zero-initialized before it is constructed |
1838 | | void CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor, |
1839 | | llvm::Value *numElements, |
1840 | | Address arrayBase, |
1841 | | const CXXConstructExpr *E, |
1842 | 293 | bool zeroInitialize) { |
1843 | 293 | // It's legal for numElements to be zero. This can happen both |
1844 | 293 | // dynamically, because x can be zero in 'new A[x]', and statically, |
1845 | 293 | // because of GCC extensions that permit zero-length arrays. There |
1846 | 293 | // are probably legitimate places where we could assume that this |
1847 | 293 | // doesn't happen, but it's not clear that it's worth it. |
1848 | 293 | llvm::BranchInst *zeroCheckBranch = nullptr; |
1849 | 293 | |
1850 | 293 | // Optimize for a constant count. |
1851 | 293 | llvm::ConstantInt *constantCount |
1852 | 293 | = dyn_cast<llvm::ConstantInt>(numElements); |
1853 | 293 | if (constantCount293 ) { |
1854 | 243 | // Just skip out if the constant count is zero. |
1855 | 243 | if (constantCount->isZero()243 ) return0 ; |
1856 | 293 | |
1857 | 293 | // Otherwise, emit the check. |
1858 | 50 | } else { |
1859 | 50 | llvm::BasicBlock *loopBB = createBasicBlock("new.ctorloop"); |
1860 | 50 | llvm::Value *iszero = Builder.CreateIsNull(numElements, "isempty"); |
1861 | 50 | zeroCheckBranch = Builder.CreateCondBr(iszero, loopBB, loopBB); |
1862 | 50 | EmitBlock(loopBB); |
1863 | 50 | } |
1864 | 293 | |
1865 | 293 | // Find the end of the array. |
1866 | 293 | llvm::Value *arrayBegin = arrayBase.getPointer(); |
1867 | 293 | llvm::Value *arrayEnd = Builder.CreateInBoundsGEP(arrayBegin, numElements, |
1868 | 293 | "arrayctor.end"); |
1869 | 293 | |
1870 | 293 | // Enter the loop, setting up a phi for the current location to initialize. |
1871 | 293 | llvm::BasicBlock *entryBB = Builder.GetInsertBlock(); |
1872 | 293 | llvm::BasicBlock *loopBB = createBasicBlock("arrayctor.loop"); |
1873 | 293 | EmitBlock(loopBB); |
1874 | 293 | llvm::PHINode *cur = Builder.CreatePHI(arrayBegin->getType(), 2, |
1875 | 293 | "arrayctor.cur"); |
1876 | 293 | cur->addIncoming(arrayBegin, entryBB); |
1877 | 293 | |
1878 | 293 | // Inside the loop body, emit the constructor call on the array element. |
1879 | 293 | |
1880 | 293 | // The alignment of the base, adjusted by the size of a single element, |
1881 | 293 | // provides a conservative estimate of the alignment of every element. |
1882 | 293 | // (This assumes we never start tracking offsetted alignments.) |
1883 | 293 | // |
1884 | 293 | // Note that these are complete objects and so we don't need to |
1885 | 293 | // use the non-virtual size or alignment. |
1886 | 293 | QualType type = getContext().getTypeDeclType(ctor->getParent()); |
1887 | 293 | CharUnits eltAlignment = |
1888 | 293 | arrayBase.getAlignment() |
1889 | 293 | .alignmentOfArrayElement(getContext().getTypeSizeInChars(type)); |
1890 | 293 | Address curAddr = Address(cur, eltAlignment); |
1891 | 293 | |
1892 | 293 | // Zero initialize the storage, if requested. |
1893 | 293 | if (zeroInitialize) |
1894 | 2 | EmitNullInitialization(curAddr, type); |
1895 | 293 | |
1896 | 293 | // C++ [class.temporary]p4: |
1897 | 293 | // There are two contexts in which temporaries are destroyed at a different |
1898 | 293 | // point than the end of the full-expression. The first context is when a |
1899 | 293 | // default constructor is called to initialize an element of an array. |
1900 | 293 | // If the constructor has one or more default arguments, the destruction of |
1901 | 293 | // every temporary created in a default argument expression is sequenced |
1902 | 293 | // before the construction of the next array element, if any. |
1903 | 293 | |
1904 | 293 | { |
1905 | 293 | RunCleanupsScope Scope(*this); |
1906 | 293 | |
1907 | 293 | // Evaluate the constructor and its arguments in a regular |
1908 | 293 | // partial-destroy cleanup. |
1909 | 293 | if (getLangOpts().Exceptions && |
1910 | 293 | !ctor->getParent()->hasTrivialDestructor()85 ) { |
1911 | 47 | Destroyer *destroyer = destroyCXXObject; |
1912 | 47 | pushRegularPartialArrayCleanup(arrayBegin, cur, type, eltAlignment, |
1913 | 47 | *destroyer); |
1914 | 47 | } |
1915 | 293 | |
1916 | 293 | EmitCXXConstructorCall(ctor, Ctor_Complete, /*ForVirtualBase=*/false, |
1917 | 293 | /*Delegating=*/false, curAddr, E); |
1918 | 293 | } |
1919 | 293 | |
1920 | 293 | // Go to the next element. |
1921 | 293 | llvm::Value *next = |
1922 | 293 | Builder.CreateInBoundsGEP(cur, llvm::ConstantInt::get(SizeTy, 1), |
1923 | 293 | "arrayctor.next"); |
1924 | 293 | cur->addIncoming(next, Builder.GetInsertBlock()); |
1925 | 293 | |
1926 | 293 | // Check whether that's the end of the loop. |
1927 | 293 | llvm::Value *done = Builder.CreateICmpEQ(next, arrayEnd, "arrayctor.done"); |
1928 | 293 | llvm::BasicBlock *contBB = createBasicBlock("arrayctor.cont"); |
1929 | 293 | Builder.CreateCondBr(done, contBB, loopBB); |
1930 | 293 | |
1931 | 293 | // Patch the earlier check to skip over the loop. |
1932 | 293 | if (zeroCheckBranch293 ) zeroCheckBranch->setSuccessor(0, contBB)50 ; |
1933 | 293 | |
1934 | 293 | EmitBlock(contBB); |
1935 | 293 | } |
1936 | | |
1937 | | void CodeGenFunction::destroyCXXObject(CodeGenFunction &CGF, |
1938 | | Address addr, |
1939 | 12.2k | QualType type) { |
1940 | 12.2k | const RecordType *rtype = type->castAs<RecordType>(); |
1941 | 12.2k | const CXXRecordDecl *record = cast<CXXRecordDecl>(rtype->getDecl()); |
1942 | 12.2k | const CXXDestructorDecl *dtor = record->getDestructor(); |
1943 | 12.2k | assert(!dtor->isTrivial()); |
1944 | 12.2k | CGF.EmitCXXDestructorCall(dtor, Dtor_Complete, /*for vbase*/ false, |
1945 | 12.2k | /*Delegating=*/false, addr); |
1946 | 12.2k | } |
1947 | | |
1948 | | void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D, |
1949 | | CXXCtorType Type, |
1950 | | bool ForVirtualBase, |
1951 | | bool Delegating, Address This, |
1952 | 17.2k | const CXXConstructExpr *E) { |
1953 | 17.2k | CallArgList Args; |
1954 | 17.2k | |
1955 | 17.2k | // Push the this ptr. |
1956 | 17.2k | Args.add(RValue::get(This.getPointer()), D->getThisType(getContext())); |
1957 | 17.2k | |
1958 | 17.2k | // If this is a trivial constructor, emit a memcpy now before we lose |
1959 | 17.2k | // the alignment information on the argument. |
1960 | 17.2k | // FIXME: It would be better to preserve alignment information into CallArg. |
1961 | 17.2k | if (isMemcpyEquivalentSpecialMember(D)17.2k ) { |
1962 | 1.22k | assert(E->getNumArgs() == 1 && "unexpected argcount for trivial ctor"); |
1963 | 1.22k | |
1964 | 1.22k | const Expr *Arg = E->getArg(0); |
1965 | 1.22k | QualType SrcTy = Arg->getType(); |
1966 | 1.22k | Address Src = EmitLValue(Arg).getAddress(); |
1967 | 1.22k | QualType DestTy = getContext().getTypeDeclType(D->getParent()); |
1968 | 1.22k | EmitAggregateCopyCtor(This, Src, DestTy, SrcTy); |
1969 | 1.22k | return; |
1970 | 1.22k | } |
1971 | 16.0k | |
1972 | 16.0k | // Add the rest of the user-supplied arguments. |
1973 | 16.0k | const FunctionProtoType *FPT = D->getType()->castAs<FunctionProtoType>(); |
1974 | 16.0k | EvaluationOrder Order = E->isListInitialization() |
1975 | 135 | ? EvaluationOrder::ForceLeftToRight |
1976 | 15.9k | : EvaluationOrder::Default; |
1977 | 17.2k | EmitCallArgs(Args, FPT, E->arguments(), E->getConstructor(), |
1978 | 17.2k | /*ParamsToSkip*/ 0, Order); |
1979 | 17.2k | |
1980 | 17.2k | EmitCXXConstructorCall(D, Type, ForVirtualBase, Delegating, This, Args); |
1981 | 17.2k | } |
1982 | | |
1983 | | static bool canEmitDelegateCallArgs(CodeGenFunction &CGF, |
1984 | | const CXXConstructorDecl *Ctor, |
1985 | 119 | CXXCtorType Type, CallArgList &Args) { |
1986 | 119 | // We can't forward a variadic call. |
1987 | 119 | if (Ctor->isVariadic()) |
1988 | 28 | return false; |
1989 | 91 | |
1990 | 91 | if (91 CGF.getTarget().getCXXABI().areArgsDestroyedLeftToRightInCallee()91 ) { |
1991 | 32 | // If the parameters are callee-cleanup, it's not safe to forward. |
1992 | 32 | for (auto *P : Ctor->parameters()) |
1993 | 56 | if (56 P->getType().isDestructedType()56 ) |
1994 | 16 | return false; |
1995 | 16 | |
1996 | 16 | // Likewise if they're inalloca. |
1997 | 16 | const CGFunctionInfo &Info = |
1998 | 16 | CGF.CGM.getTypes().arrangeCXXConstructorCall(Args, Ctor, Type, 0, 0); |
1999 | 16 | if (Info.usesInAlloca()) |
2000 | 0 | return false; |
2001 | 75 | } |
2002 | 75 | |
2003 | 75 | // Anything else should be OK. |
2004 | 75 | return true; |
2005 | 75 | } |
2006 | | |
2007 | | void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D, |
2008 | | CXXCtorType Type, |
2009 | | bool ForVirtualBase, |
2010 | | bool Delegating, |
2011 | | Address This, |
2012 | 22.3k | CallArgList &Args) { |
2013 | 22.3k | const CXXRecordDecl *ClassDecl = D->getParent(); |
2014 | 22.3k | |
2015 | 22.3k | // C++11 [class.mfct.non-static]p2: |
2016 | 22.3k | // If a non-static member function of a class X is called for an object that |
2017 | 22.3k | // is not of type X, or of a type derived from X, the behavior is undefined. |
2018 | 22.3k | // FIXME: Provide a source location here. |
2019 | 22.3k | EmitTypeCheck(CodeGenFunction::TCK_ConstructorCall, SourceLocation(), |
2020 | 22.3k | This.getPointer(), getContext().getRecordType(ClassDecl)); |
2021 | 22.3k | |
2022 | 22.3k | if (D->isTrivial() && 22.3k D->isDefaultConstructor()23 ) { |
2023 | 10 | assert(Args.size() == 1 && "trivial default ctor with args"); |
2024 | 10 | return; |
2025 | 10 | } |
2026 | 22.2k | |
2027 | 22.2k | // If this is a trivial constructor, just emit what's needed. If this is a |
2028 | 22.2k | // union copy constructor, we must emit a memcpy, because the AST does not |
2029 | 22.2k | // model that copy. |
2030 | 22.2k | if (22.2k isMemcpyEquivalentSpecialMember(D)22.2k ) { |
2031 | 10 | assert(Args.size() == 2 && "unexpected argcount for trivial ctor"); |
2032 | 10 | |
2033 | 10 | QualType SrcTy = D->getParamDecl(0)->getType().getNonReferenceType(); |
2034 | 10 | Address Src(Args[1].RV.getScalarVal(), getNaturalTypeAlignment(SrcTy)); |
2035 | 10 | QualType DestTy = getContext().getTypeDeclType(ClassDecl); |
2036 | 10 | EmitAggregateCopyCtor(This, Src, DestTy, SrcTy); |
2037 | 10 | return; |
2038 | 10 | } |
2039 | 22.2k | |
2040 | 22.2k | bool PassPrototypeArgs = true; |
2041 | 22.2k | // Check whether we can actually emit the constructor before trying to do so. |
2042 | 22.2k | if (auto Inherited22.2k = D->getInheritedConstructor()) { |
2043 | 129 | PassPrototypeArgs = getTypes().inheritingCtorHasParams(Inherited, Type); |
2044 | 129 | if (PassPrototypeArgs && 129 !canEmitDelegateCallArgs(*this, D, Type, Args)119 ) { |
2045 | 44 | EmitInlinedInheritingCXXConstructorCall(D, Type, ForVirtualBase, |
2046 | 44 | Delegating, Args); |
2047 | 44 | return; |
2048 | 44 | } |
2049 | 22.2k | } |
2050 | 22.2k | |
2051 | 22.2k | // Insert any ABI-specific implicit constructor arguments. |
2052 | 22.2k | CGCXXABI::AddedStructorArgs ExtraArgs = |
2053 | 22.2k | CGM.getCXXABI().addImplicitConstructorArgs(*this, D, Type, ForVirtualBase, |
2054 | 22.2k | Delegating, Args); |
2055 | 22.2k | |
2056 | 22.2k | // Emit the call. |
2057 | 22.2k | llvm::Constant *CalleePtr = |
2058 | 22.2k | CGM.getAddrOfCXXStructor(D, getFromCtorType(Type)); |
2059 | 22.2k | const CGFunctionInfo &Info = CGM.getTypes().arrangeCXXConstructorCall( |
2060 | 22.2k | Args, D, Type, ExtraArgs.Prefix, ExtraArgs.Suffix, PassPrototypeArgs); |
2061 | 22.2k | CGCallee Callee = CGCallee::forDirect(CalleePtr, D); |
2062 | 22.2k | EmitCall(Info, Callee, ReturnValueSlot(), Args); |
2063 | 22.2k | |
2064 | 22.2k | // Generate vtable assumptions if we're constructing a complete object |
2065 | 22.2k | // with a vtable. We don't do this for base subobjects for two reasons: |
2066 | 22.2k | // first, it's incorrect for classes with virtual bases, and second, we're |
2067 | 22.2k | // about to overwrite the vptrs anyway. |
2068 | 22.2k | // We also have to make sure if we can refer to vtable: |
2069 | 22.2k | // - Otherwise we can refer to vtable if it's safe to speculatively emit. |
2070 | 22.2k | // FIXME: If vtable is used by ctor/dtor, or if vtable is external and we are |
2071 | 22.2k | // sure that definition of vtable is not hidden, |
2072 | 22.2k | // then we are always safe to refer to it. |
2073 | 22.2k | // FIXME: It looks like InstCombine is very inefficient on dealing with |
2074 | 22.2k | // assumes. Make assumption loads require -fstrict-vtable-pointers temporarily. |
2075 | 22.2k | if (CGM.getCodeGenOpts().OptimizationLevel > 0 && |
2076 | 22.2k | ClassDecl->isDynamicClass()14.6k && Type != Ctor_Base4.97k && |
2077 | 1.84k | CGM.getCXXABI().canSpeculativelyEmitVTable(ClassDecl) && |
2078 | 204 | CGM.getCodeGenOpts().StrictVTablePointers) |
2079 | 25 | EmitVTableAssumptionLoads(ClassDecl, This); |
2080 | 22.3k | } |
2081 | | |
2082 | | void CodeGenFunction::EmitInheritedCXXConstructorCall( |
2083 | | const CXXConstructorDecl *D, bool ForVirtualBase, Address This, |
2084 | 101 | bool InheritedFromVBase, const CXXInheritedCtorInitExpr *E) { |
2085 | 101 | CallArgList Args; |
2086 | 101 | CallArg ThisArg(RValue::get(This.getPointer()), D->getThisType(getContext()), |
2087 | 101 | /*NeedsCopy=*/false); |
2088 | 101 | |
2089 | 101 | // Forward the parameters. |
2090 | 101 | if (InheritedFromVBase && |
2091 | 101 | CGM.getTarget().getCXXABI().hasConstructorVariants()17 ) { |
2092 | 10 | // Nothing to do; this construction is not responsible for constructing |
2093 | 10 | // the base class containing the inherited constructor. |
2094 | 10 | // FIXME: Can we just pass undef's for the remaining arguments if we don't |
2095 | 10 | // have constructor variants? |
2096 | 10 | Args.push_back(ThisArg); |
2097 | 101 | } else if (91 !CXXInheritedCtorInitExprArgs.empty()91 ) { |
2098 | 44 | // The inheriting constructor was inlined; just inject its arguments. |
2099 | 44 | assert(CXXInheritedCtorInitExprArgs.size() >= D->getNumParams() && |
2100 | 44 | "wrong number of parameters for inherited constructor call"); |
2101 | 44 | Args = CXXInheritedCtorInitExprArgs; |
2102 | 44 | Args[0] = ThisArg; |
2103 | 91 | } else { |
2104 | 47 | // The inheriting constructor was not inlined. Emit delegating arguments. |
2105 | 47 | Args.push_back(ThisArg); |
2106 | 47 | const auto *OuterCtor = cast<CXXConstructorDecl>(CurCodeDecl); |
2107 | 47 | assert(OuterCtor->getNumParams() == D->getNumParams()); |
2108 | 47 | assert(!OuterCtor->isVariadic() && "should have been inlined"); |
2109 | 47 | |
2110 | 128 | for (const auto *Param : OuterCtor->parameters()) { |
2111 | 128 | assert(getContext().hasSameUnqualifiedType( |
2112 | 128 | OuterCtor->getParamDecl(Param->getFunctionScopeIndex())->getType(), |
2113 | 128 | Param->getType())); |
2114 | 128 | EmitDelegateCallArg(Args, Param, E->getLocation()); |
2115 | 128 | |
2116 | 128 | // Forward __attribute__(pass_object_size). |
2117 | 128 | if (Param->hasAttr<PassObjectSizeAttr>()128 ) { |
2118 | 22 | auto *POSParam = SizeArguments[Param]; |
2119 | 22 | assert(POSParam && "missing pass_object_size value for forwarding"); |
2120 | 22 | EmitDelegateCallArg(Args, POSParam, E->getLocation()); |
2121 | 22 | } |
2122 | 128 | } |
2123 | 91 | } |
2124 | 101 | |
2125 | 101 | EmitCXXConstructorCall(D, Ctor_Base, ForVirtualBase, /*Delegating*/false, |
2126 | 101 | This, Args); |
2127 | 101 | } |
2128 | | |
2129 | | void CodeGenFunction::EmitInlinedInheritingCXXConstructorCall( |
2130 | | const CXXConstructorDecl *Ctor, CXXCtorType CtorType, bool ForVirtualBase, |
2131 | 44 | bool Delegating, CallArgList &Args) { |
2132 | 44 | GlobalDecl GD(Ctor, CtorType); |
2133 | 44 | InlinedInheritingConstructorScope Scope(*this, GD); |
2134 | 44 | ApplyInlineDebugLocation DebugScope(*this, GD); |
2135 | 44 | |
2136 | 44 | // Save the arguments to be passed to the inherited constructor. |
2137 | 44 | CXXInheritedCtorInitExprArgs = Args; |
2138 | 44 | |
2139 | 44 | FunctionArgList Params; |
2140 | 44 | QualType RetType = BuildFunctionArgList(CurGD, Params); |
2141 | 44 | FnRetTy = RetType; |
2142 | 44 | |
2143 | 44 | // Insert any ABI-specific implicit constructor arguments. |
2144 | 44 | CGM.getCXXABI().addImplicitConstructorArgs(*this, Ctor, CtorType, |
2145 | 44 | ForVirtualBase, Delegating, Args); |
2146 | 44 | |
2147 | 44 | // Emit a simplified prolog. We only need to emit the implicit params. |
2148 | 44 | assert(Args.size() >= Params.size() && "too few arguments for call"); |
2149 | 319 | for (unsigned I = 0, N = Args.size(); I != N319 ; ++I275 ) { |
2150 | 275 | if (I < Params.size() && 275 isa<ImplicitParamDecl>(Params[I])221 ) { |
2151 | 58 | const RValue &RV = Args[I].RV; |
2152 | 58 | assert(!RV.isComplex() && "complex indirect params not supported"); |
2153 | 58 | ParamValue Val = RV.isScalar() |
2154 | 58 | ? ParamValue::forDirect(RV.getScalarVal()) |
2155 | 0 | : ParamValue::forIndirect(RV.getAggregateAddress()); |
2156 | 58 | EmitParmDecl(*Params[I], Val, I + 1); |
2157 | 58 | } |
2158 | 275 | } |
2159 | 44 | |
2160 | 44 | // Create a return value slot if the ABI implementation wants one. |
2161 | 44 | // FIXME: This is dumb, we should ask the ABI not to try to set the return |
2162 | 44 | // value instead. |
2163 | 44 | if (!RetType->isVoidType()) |
2164 | 28 | ReturnValue = CreateIRTemp(RetType, "retval.inhctor"); |
2165 | 44 | |
2166 | 44 | CGM.getCXXABI().EmitInstanceFunctionProlog(*this); |
2167 | 44 | CXXThisValue = CXXABIThisValue; |
2168 | 44 | |
2169 | 44 | // Directly emit the constructor initializers. |
2170 | 44 | EmitCtorPrologue(Ctor, CtorType, Params); |
2171 | 44 | } |
2172 | | |
2173 | 29 | void CodeGenFunction::EmitVTableAssumptionLoad(const VPtr &Vptr, Address This) { |
2174 | 29 | llvm::Value *VTableGlobal = |
2175 | 29 | CGM.getCXXABI().getVTableAddressPoint(Vptr.Base, Vptr.VTableClass); |
2176 | 29 | if (!VTableGlobal) |
2177 | 0 | return; |
2178 | 29 | |
2179 | 29 | // We can just use the base offset in the complete class. |
2180 | 29 | CharUnits NonVirtualOffset = Vptr.Base.getBaseOffset(); |
2181 | 29 | |
2182 | 29 | if (!NonVirtualOffset.isZero()) |
2183 | 3 | This = |
2184 | 3 | ApplyNonVirtualAndVirtualOffset(*this, This, NonVirtualOffset, nullptr, |
2185 | 3 | Vptr.VTableClass, Vptr.NearestVBase); |
2186 | 29 | |
2187 | 29 | llvm::Value *VPtrValue = |
2188 | 29 | GetVTablePtr(This, VTableGlobal->getType(), Vptr.VTableClass); |
2189 | 29 | llvm::Value *Cmp = |
2190 | 29 | Builder.CreateICmpEQ(VPtrValue, VTableGlobal, "cmp.vtables"); |
2191 | 29 | Builder.CreateAssumption(Cmp); |
2192 | 29 | } |
2193 | | |
2194 | | void CodeGenFunction::EmitVTableAssumptionLoads(const CXXRecordDecl *ClassDecl, |
2195 | 25 | Address This) { |
2196 | 25 | if (CGM.getCXXABI().doStructorsInitializeVPtrs(ClassDecl)) |
2197 | 25 | for (const VPtr &Vptr : getVTablePointers(ClassDecl)) |
2198 | 29 | EmitVTableAssumptionLoad(Vptr, This); |
2199 | 25 | } |
2200 | | |
2201 | | void |
2202 | | CodeGenFunction::EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D, |
2203 | | Address This, Address Src, |
2204 | 66 | const CXXConstructExpr *E) { |
2205 | 66 | const FunctionProtoType *FPT = D->getType()->castAs<FunctionProtoType>(); |
2206 | 66 | |
2207 | 66 | CallArgList Args; |
2208 | 66 | |
2209 | 66 | // Push the this ptr. |
2210 | 66 | Args.add(RValue::get(This.getPointer()), D->getThisType(getContext())); |
2211 | 66 | |
2212 | 66 | // Push the src ptr. |
2213 | 66 | QualType QT = *(FPT->param_type_begin()); |
2214 | 66 | llvm::Type *t = CGM.getTypes().ConvertType(QT); |
2215 | 66 | Src = Builder.CreateBitCast(Src, t); |
2216 | 66 | Args.add(RValue::get(Src.getPointer()), QT); |
2217 | 66 | |
2218 | 66 | // Skip over first argument (Src). |
2219 | 66 | EmitCallArgs(Args, FPT, drop_begin(E->arguments(), 1), E->getConstructor(), |
2220 | 66 | /*ParamsToSkip*/ 1); |
2221 | 66 | |
2222 | 66 | EmitCXXConstructorCall(D, Ctor_Complete, false, false, This, Args); |
2223 | 66 | } |
2224 | | |
2225 | | void |
2226 | | CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor, |
2227 | | CXXCtorType CtorType, |
2228 | | const FunctionArgList &Args, |
2229 | 6.06k | SourceLocation Loc) { |
2230 | 6.06k | CallArgList DelegateArgs; |
2231 | 6.06k | |
2232 | 6.06k | FunctionArgList::const_iterator I = Args.begin(), E = Args.end(); |
2233 | 6.06k | assert(I != E && "no parameters to constructor"); |
2234 | 6.06k | |
2235 | 6.06k | // this |
2236 | 6.06k | Address This = LoadCXXThisAddress(); |
2237 | 6.06k | DelegateArgs.add(RValue::get(This.getPointer()), (*I)->getType()); |
2238 | 6.06k | ++I; |
2239 | 6.06k | |
2240 | 6.06k | // FIXME: The location of the VTT parameter in the parameter list is |
2241 | 6.06k | // specific to the Itanium ABI and shouldn't be hardcoded here. |
2242 | 6.06k | if (CGM.getCXXABI().NeedsVTTParameter(CurGD)6.06k ) { |
2243 | 0 | assert(I != E && "cannot skip vtt parameter, already done with args"); |
2244 | 0 | assert((*I)->getType()->isPointerType() && |
2245 | 0 | "skipping parameter not of vtt type"); |
2246 | 0 | ++I; |
2247 | 0 | } |
2248 | 6.06k | |
2249 | 6.06k | // Explicit arguments. |
2250 | 9.87k | for (; I != E9.87k ; ++I3.80k ) { |
2251 | 3.80k | const VarDecl *param = *I; |
2252 | 3.80k | // FIXME: per-argument source location |
2253 | 3.80k | EmitDelegateCallArg(DelegateArgs, param, Loc); |
2254 | 3.80k | } |
2255 | 6.06k | |
2256 | 6.06k | EmitCXXConstructorCall(Ctor, CtorType, /*ForVirtualBase=*/false, |
2257 | 6.06k | /*Delegating=*/true, This, DelegateArgs); |
2258 | 6.06k | } |
2259 | | |
2260 | | namespace { |
2261 | | struct CallDelegatingCtorDtor final : EHScopeStack::Cleanup { |
2262 | | const CXXDestructorDecl *Dtor; |
2263 | | Address Addr; |
2264 | | CXXDtorType Type; |
2265 | | |
2266 | | CallDelegatingCtorDtor(const CXXDestructorDecl *D, Address Addr, |
2267 | | CXXDtorType Type) |
2268 | 6 | : Dtor(D), Addr(Addr), Type(Type) {} |
2269 | | |
2270 | 4 | void Emit(CodeGenFunction &CGF, Flags flags) override { |
2271 | 4 | CGF.EmitCXXDestructorCall(Dtor, Type, /*ForVirtualBase=*/false, |
2272 | 4 | /*Delegating=*/true, Addr); |
2273 | 4 | } |
2274 | | }; |
2275 | | } // end anonymous namespace |
2276 | | |
2277 | | void |
2278 | | CodeGenFunction::EmitDelegatingCXXConstructorCall(const CXXConstructorDecl *Ctor, |
2279 | 10 | const FunctionArgList &Args) { |
2280 | 10 | assert(Ctor->isDelegatingConstructor()); |
2281 | 10 | |
2282 | 10 | Address ThisPtr = LoadCXXThisAddress(); |
2283 | 10 | |
2284 | 10 | AggValueSlot AggSlot = |
2285 | 10 | AggValueSlot::forAddr(ThisPtr, Qualifiers(), |
2286 | 10 | AggValueSlot::IsDestructed, |
2287 | 10 | AggValueSlot::DoesNotNeedGCBarriers, |
2288 | 10 | AggValueSlot::IsNotAliased); |
2289 | 10 | |
2290 | 10 | EmitAggExpr(Ctor->init_begin()[0]->getInit(), AggSlot); |
2291 | 10 | |
2292 | 10 | const CXXRecordDecl *ClassDecl = Ctor->getParent(); |
2293 | 10 | if (CGM.getLangOpts().Exceptions && 10 !ClassDecl->hasTrivialDestructor()8 ) { |
2294 | 6 | CXXDtorType Type = |
2295 | 6 | CurGD.getCtorType() == Ctor_Complete ? Dtor_Complete3 : Dtor_Base3 ; |
2296 | 6 | |
2297 | 6 | EHStack.pushCleanup<CallDelegatingCtorDtor>(EHCleanup, |
2298 | 6 | ClassDecl->getDestructor(), |
2299 | 6 | ThisPtr, Type); |
2300 | 6 | } |
2301 | 10 | } |
2302 | | |
2303 | | void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD, |
2304 | | CXXDtorType Type, |
2305 | | bool ForVirtualBase, |
2306 | | bool Delegating, |
2307 | 21.7k | Address This) { |
2308 | 21.7k | CGM.getCXXABI().EmitDestructorCall(*this, DD, Type, ForVirtualBase, |
2309 | 21.7k | Delegating, This); |
2310 | 21.7k | } |
2311 | | |
2312 | | namespace { |
2313 | | struct CallLocalDtor final : EHScopeStack::Cleanup { |
2314 | | const CXXDestructorDecl *Dtor; |
2315 | | Address Addr; |
2316 | | |
2317 | | CallLocalDtor(const CXXDestructorDecl *D, Address Addr) |
2318 | 37 | : Dtor(D), Addr(Addr) {} |
2319 | | |
2320 | 46 | void Emit(CodeGenFunction &CGF, Flags flags) override { |
2321 | 46 | CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, |
2322 | 46 | /*ForVirtualBase=*/false, |
2323 | 46 | /*Delegating=*/false, Addr); |
2324 | 46 | } |
2325 | | }; |
2326 | | } // end anonymous namespace |
2327 | | |
2328 | | void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D, |
2329 | 37 | Address Addr) { |
2330 | 37 | EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr); |
2331 | 37 | } |
2332 | | |
2333 | 12 | void CodeGenFunction::PushDestructorCleanup(QualType T, Address Addr) { |
2334 | 12 | CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl(); |
2335 | 12 | if (!ClassDecl12 ) return0 ; |
2336 | 12 | if (12 ClassDecl->hasTrivialDestructor()12 ) return3 ; |
2337 | 9 | |
2338 | 9 | const CXXDestructorDecl *D = ClassDecl->getDestructor(); |
2339 | 9 | assert(D && D->isUsed() && "destructor not marked as used!"); |
2340 | 9 | PushDestructorCleanup(D, Addr); |
2341 | 9 | } |
2342 | | |
2343 | 5.49k | void CodeGenFunction::InitializeVTablePointer(const VPtr &Vptr) { |
2344 | 5.49k | // Compute the address point. |
2345 | 5.49k | llvm::Value *VTableAddressPoint = |
2346 | 5.49k | CGM.getCXXABI().getVTableAddressPointInStructor( |
2347 | 5.49k | *this, Vptr.VTableClass, Vptr.Base, Vptr.NearestVBase); |
2348 | 5.49k | |
2349 | 5.49k | if (!VTableAddressPoint) |
2350 | 685 | return; |
2351 | 4.81k | |
2352 | 4.81k | // Compute where to store the address point. |
2353 | 4.81k | llvm::Value *VirtualOffset = nullptr; |
2354 | 4.81k | CharUnits NonVirtualOffset = CharUnits::Zero(); |
2355 | 4.81k | |
2356 | 4.81k | if (CGM.getCXXABI().isVirtualOffsetNeededForVTableField(*this, Vptr)4.81k ) { |
2357 | 300 | // We need to use the virtual base offset offset because the virtual base |
2358 | 300 | // might have a different offset in the most derived class. |
2359 | 300 | |
2360 | 300 | VirtualOffset = CGM.getCXXABI().GetVirtualBaseClassOffset( |
2361 | 300 | *this, LoadCXXThisAddress(), Vptr.VTableClass, Vptr.NearestVBase); |
2362 | 300 | NonVirtualOffset = Vptr.OffsetFromNearestVBase; |
2363 | 4.81k | } else { |
2364 | 4.51k | // We can just use the base offset in the complete class. |
2365 | 4.51k | NonVirtualOffset = Vptr.Base.getBaseOffset(); |
2366 | 4.51k | } |
2367 | 4.81k | |
2368 | 4.81k | // Apply the offsets. |
2369 | 4.81k | Address VTableField = LoadCXXThisAddress(); |
2370 | 4.81k | |
2371 | 4.81k | if (!NonVirtualOffset.isZero() || 4.81k VirtualOffset4.18k ) |
2372 | 901 | VTableField = ApplyNonVirtualAndVirtualOffset( |
2373 | 901 | *this, VTableField, NonVirtualOffset, VirtualOffset, Vptr.VTableClass, |
2374 | 901 | Vptr.NearestVBase); |
2375 | 4.81k | |
2376 | 4.81k | // Finally, store the address point. Use the same LLVM types as the field to |
2377 | 4.81k | // support optimization. |
2378 | 4.81k | llvm::Type *VTablePtrTy = |
2379 | 4.81k | llvm::FunctionType::get(CGM.Int32Ty, /*isVarArg=*/true) |
2380 | 4.81k | ->getPointerTo() |
2381 | 4.81k | ->getPointerTo(); |
2382 | 4.81k | VTableField = Builder.CreateBitCast(VTableField, VTablePtrTy->getPointerTo()); |
2383 | 4.81k | VTableAddressPoint = Builder.CreateBitCast(VTableAddressPoint, VTablePtrTy); |
2384 | 4.81k | |
2385 | 4.81k | llvm::StoreInst *Store = Builder.CreateStore(VTableAddressPoint, VTableField); |
2386 | 4.81k | CGM.DecorateInstructionWithTBAA(Store, CGM.getTBAAInfoForVTablePtr()); |
2387 | 4.81k | if (CGM.getCodeGenOpts().OptimizationLevel > 0 && |
2388 | 3.31k | CGM.getCodeGenOpts().StrictVTablePointers) |
2389 | 20 | CGM.DecorateInstructionWithInvariantGroup(Store, Vptr.VTableClass); |
2390 | 5.49k | } |
2391 | | |
2392 | | CodeGenFunction::VPtrsVector |
2393 | 4.21k | CodeGenFunction::getVTablePointers(const CXXRecordDecl *VTableClass) { |
2394 | 4.21k | CodeGenFunction::VPtrsVector VPtrsResult; |
2395 | 4.21k | VisitedVirtualBasesSetTy VBases; |
2396 | 4.21k | getVTablePointers(BaseSubobject(VTableClass, CharUnits::Zero()), |
2397 | 4.21k | /*NearestVBase=*/nullptr, |
2398 | 4.21k | /*OffsetFromNearestVBase=*/CharUnits::Zero(), |
2399 | 4.21k | /*BaseIsNonVirtualPrimaryBase=*/false, VTableClass, VBases, |
2400 | 4.21k | VPtrsResult); |
2401 | 4.21k | return VPtrsResult; |
2402 | 4.21k | } |
2403 | | |
2404 | | void CodeGenFunction::getVTablePointers(BaseSubobject Base, |
2405 | | const CXXRecordDecl *NearestVBase, |
2406 | | CharUnits OffsetFromNearestVBase, |
2407 | | bool BaseIsNonVirtualPrimaryBase, |
2408 | | const CXXRecordDecl *VTableClass, |
2409 | | VisitedVirtualBasesSetTy &VBases, |
2410 | 9.67k | VPtrsVector &Vptrs) { |
2411 | 9.67k | // If this base is a non-virtual primary base the address point has already |
2412 | 9.67k | // been set. |
2413 | 9.67k | if (!BaseIsNonVirtualPrimaryBase9.67k ) { |
2414 | 5.52k | // Initialize the vtable pointer for this base. |
2415 | 5.52k | VPtr Vptr = {Base, NearestVBase, OffsetFromNearestVBase, VTableClass}; |
2416 | 5.52k | Vptrs.push_back(Vptr); |
2417 | 5.52k | } |
2418 | 9.67k | |
2419 | 9.67k | const CXXRecordDecl *RD = Base.getBase(); |
2420 | 9.67k | |
2421 | 9.67k | // Traverse bases. |
2422 | 6.59k | for (const auto &I : RD->bases()) { |
2423 | 6.59k | CXXRecordDecl *BaseDecl |
2424 | 6.59k | = cast<CXXRecordDecl>(I.getType()->getAs<RecordType>()->getDecl()); |
2425 | 6.59k | |
2426 | 6.59k | // Ignore classes without a vtable. |
2427 | 6.59k | if (!BaseDecl->isDynamicClass()) |
2428 | 1.06k | continue; |
2429 | 5.53k | |
2430 | 5.53k | CharUnits BaseOffset; |
2431 | 5.53k | CharUnits BaseOffsetFromNearestVBase; |
2432 | 5.53k | bool BaseDeclIsNonVirtualPrimaryBase; |
2433 | 5.53k | |
2434 | 5.53k | if (I.isVirtual()5.53k ) { |
2435 | 545 | // Check if we've visited this virtual base before. |
2436 | 545 | if (!VBases.insert(BaseDecl).second) |
2437 | 64 | continue; |
2438 | 481 | |
2439 | 481 | const ASTRecordLayout &Layout = |
2440 | 481 | getContext().getASTRecordLayout(VTableClass); |
2441 | 481 | |
2442 | 481 | BaseOffset = Layout.getVBaseClassOffset(BaseDecl); |
2443 | 481 | BaseOffsetFromNearestVBase = CharUnits::Zero(); |
2444 | 481 | BaseDeclIsNonVirtualPrimaryBase = false; |
2445 | 5.53k | } else { |
2446 | 4.98k | const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD); |
2447 | 4.98k | |
2448 | 4.98k | BaseOffset = Base.getBaseOffset() + Layout.getBaseClassOffset(BaseDecl); |
2449 | 4.98k | BaseOffsetFromNearestVBase = |
2450 | 4.98k | OffsetFromNearestVBase + Layout.getBaseClassOffset(BaseDecl); |
2451 | 4.98k | BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl; |
2452 | 4.98k | } |
2453 | 5.53k | |
2454 | 5.46k | getVTablePointers( |
2455 | 5.46k | BaseSubobject(BaseDecl, BaseOffset), |
2456 | 5.46k | I.isVirtual() ? BaseDecl481 : NearestVBase4.98k , BaseOffsetFromNearestVBase, |
2457 | 6.59k | BaseDeclIsNonVirtualPrimaryBase, VTableClass, VBases, Vptrs); |
2458 | 6.59k | } |
2459 | 9.67k | } |
2460 | | |
2461 | 9.45k | void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) { |
2462 | 9.45k | // Ignore classes without a vtable. |
2463 | 9.45k | if (!RD->isDynamicClass()) |
2464 | 5.26k | return; |
2465 | 4.19k | |
2466 | 4.19k | // Initialize the vtable pointers for this class and all of its bases. |
2467 | 4.19k | if (4.19k CGM.getCXXABI().doStructorsInitializeVPtrs(RD)4.19k ) |
2468 | 4.18k | for (const VPtr &Vptr : getVTablePointers(RD)) |
2469 | 5.49k | InitializeVTablePointer(Vptr); |
2470 | 4.19k | |
2471 | 4.19k | if (RD->getNumVBases()) |
2472 | 634 | CGM.getCXXABI().initializeHiddenVirtualInheritanceMembers(*this, RD); |
2473 | 9.45k | } |
2474 | | |
2475 | | llvm::Value *CodeGenFunction::GetVTablePtr(Address This, |
2476 | | llvm::Type *VTableTy, |
2477 | 5.17k | const CXXRecordDecl *RD) { |
2478 | 5.17k | Address VTablePtrSrc = Builder.CreateElementBitCast(This, VTableTy); |
2479 | 5.17k | llvm::Instruction *VTable = Builder.CreateLoad(VTablePtrSrc, "vtable"); |
2480 | 5.17k | CGM.DecorateInstructionWithTBAA(VTable, CGM.getTBAAInfoForVTablePtr()); |
2481 | 5.17k | |
2482 | 5.17k | if (CGM.getCodeGenOpts().OptimizationLevel > 0 && |
2483 | 4.60k | CGM.getCodeGenOpts().StrictVTablePointers) |
2484 | 61 | CGM.DecorateInstructionWithInvariantGroup(VTable, RD); |
2485 | 5.17k | |
2486 | 5.17k | return VTable; |
2487 | 5.17k | } |
2488 | | |
2489 | | // If a class has a single non-virtual base and does not introduce or override |
2490 | | // virtual member functions or fields, it will have the same layout as its base. |
2491 | | // This function returns the least derived such class. |
2492 | | // |
2493 | | // Casting an instance of a base class to such a derived class is technically |
2494 | | // undefined behavior, but it is a relatively common hack for introducing member |
2495 | | // functions on class instances with specific properties (e.g. llvm::Operator) |
2496 | | // that works under most compilers and should not have security implications, so |
2497 | | // we allow it by default. It can be disabled with -fsanitize=cfi-cast-strict. |
2498 | | static const CXXRecordDecl * |
2499 | 56 | LeastDerivedClassWithSameLayout(const CXXRecordDecl *RD) { |
2500 | 56 | if (!RD->field_empty()) |
2501 | 1 | return RD; |
2502 | 55 | |
2503 | 55 | if (55 RD->getNumVBases() != 055 ) |
2504 | 11 | return RD; |
2505 | 44 | |
2506 | 44 | if (44 RD->getNumBases() != 144 ) |
2507 | 29 | return RD; |
2508 | 15 | |
2509 | 15 | for (const CXXMethodDecl *MD : RD->methods()) 15 { |
2510 | 25 | if (MD->isVirtual()25 ) { |
2511 | 9 | // Virtual member functions are only ok if they are implicit destructors |
2512 | 9 | // because the implicit destructor will have the same semantics as the |
2513 | 9 | // base class's destructor if no fields are added. |
2514 | 9 | if (isa<CXXDestructorDecl>(MD) && 9 MD->isImplicit()0 ) |
2515 | 0 | continue; |
2516 | 9 | return RD; |
2517 | 9 | } |
2518 | 25 | } |
2519 | 6 | |
2520 | 6 | return LeastDerivedClassWithSameLayout( |
2521 | 6 | RD->bases_begin()->getType()->getAsCXXRecordDecl()); |
2522 | 6 | } |
2523 | | |
2524 | | void CodeGenFunction::EmitTypeMetadataCodeForVCall(const CXXRecordDecl *RD, |
2525 | | llvm::Value *VTable, |
2526 | 4.73k | SourceLocation Loc) { |
2527 | 4.73k | if (SanOpts.has(SanitizerKind::CFIVCall)) |
2528 | 32 | EmitVTablePtrCheckForCall(RD, VTable, CodeGenFunction::CFITCK_VCall, Loc); |
2529 | 4.70k | else if (4.70k CGM.getCodeGenOpts().WholeProgramVTables && |
2530 | 4.70k | CGM.HasHiddenLTOVisibility(RD)52 ) { |
2531 | 38 | llvm::Metadata *MD = |
2532 | 38 | CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0)); |
2533 | 38 | llvm::Value *TypeId = |
2534 | 38 | llvm::MetadataAsValue::get(CGM.getLLVMContext(), MD); |
2535 | 38 | |
2536 | 38 | llvm::Value *CastedVTable = Builder.CreateBitCast(VTable, Int8PtrTy); |
2537 | 38 | llvm::Value *TypeTest = |
2538 | 38 | Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::type_test), |
2539 | 38 | {CastedVTable, TypeId}); |
2540 | 38 | Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::assume), TypeTest); |
2541 | 38 | } |
2542 | 4.73k | } |
2543 | | |
2544 | | void CodeGenFunction::EmitVTablePtrCheckForCall(const CXXRecordDecl *RD, |
2545 | | llvm::Value *VTable, |
2546 | | CFITypeCheckKind TCK, |
2547 | 38 | SourceLocation Loc) { |
2548 | 38 | if (!SanOpts.has(SanitizerKind::CFICastStrict)) |
2549 | 36 | RD = LeastDerivedClassWithSameLayout(RD); |
2550 | 38 | |
2551 | 38 | EmitVTablePtrCheck(RD, VTable, TCK, Loc); |
2552 | 38 | } |
2553 | | |
2554 | | void CodeGenFunction::EmitVTablePtrCheckForCast(QualType T, |
2555 | | llvm::Value *Derived, |
2556 | | bool MayBeNull, |
2557 | | CFITypeCheckKind TCK, |
2558 | 20 | SourceLocation Loc) { |
2559 | 20 | if (!getLangOpts().CPlusPlus) |
2560 | 0 | return; |
2561 | 20 | |
2562 | 20 | auto *ClassTy = T->getAs<RecordType>(); |
2563 | 20 | if (!ClassTy) |
2564 | 0 | return; |
2565 | 20 | |
2566 | 20 | const CXXRecordDecl *ClassDecl = cast<CXXRecordDecl>(ClassTy->getDecl()); |
2567 | 20 | |
2568 | 20 | if (!ClassDecl->isCompleteDefinition() || 20 !ClassDecl->isDynamicClass()20 ) |
2569 | 0 | return; |
2570 | 20 | |
2571 | 20 | if (20 !SanOpts.has(SanitizerKind::CFICastStrict)20 ) |
2572 | 14 | ClassDecl = LeastDerivedClassWithSameLayout(ClassDecl); |
2573 | 20 | |
2574 | 20 | llvm::BasicBlock *ContBlock = nullptr; |
2575 | 20 | |
2576 | 20 | if (MayBeNull20 ) { |
2577 | 14 | llvm::Value *DerivedNotNull = |
2578 | 14 | Builder.CreateIsNotNull(Derived, "cast.nonnull"); |
2579 | 14 | |
2580 | 14 | llvm::BasicBlock *CheckBlock = createBasicBlock("cast.check"); |
2581 | 14 | ContBlock = createBasicBlock("cast.cont"); |
2582 | 14 | |
2583 | 14 | Builder.CreateCondBr(DerivedNotNull, CheckBlock, ContBlock); |
2584 | 14 | |
2585 | 14 | EmitBlock(CheckBlock); |
2586 | 14 | } |
2587 | 20 | |
2588 | 20 | llvm::Value *VTable = |
2589 | 20 | GetVTablePtr(Address(Derived, getPointerAlign()), Int8PtrTy, ClassDecl); |
2590 | 20 | |
2591 | 20 | EmitVTablePtrCheck(ClassDecl, VTable, TCK, Loc); |
2592 | 20 | |
2593 | 20 | if (MayBeNull20 ) { |
2594 | 14 | Builder.CreateBr(ContBlock); |
2595 | 14 | EmitBlock(ContBlock); |
2596 | 14 | } |
2597 | 20 | } |
2598 | | |
2599 | | void CodeGenFunction::EmitVTablePtrCheck(const CXXRecordDecl *RD, |
2600 | | llvm::Value *VTable, |
2601 | | CFITypeCheckKind TCK, |
2602 | 58 | SourceLocation Loc) { |
2603 | 58 | if (!CGM.getCodeGenOpts().SanitizeCfiCrossDso && |
2604 | 56 | !CGM.HasHiddenLTOVisibility(RD)) |
2605 | 0 | return; |
2606 | 58 | |
2607 | 58 | SanitizerMask M; |
2608 | 58 | llvm::SanitizerStatKind SSK; |
2609 | 58 | switch (TCK) { |
2610 | 32 | case CFITCK_VCall: |
2611 | 32 | M = SanitizerKind::CFIVCall; |
2612 | 32 | SSK = llvm::SanStat_CFI_VCall; |
2613 | 32 | break; |
2614 | 6 | case CFITCK_NVCall: |
2615 | 6 | M = SanitizerKind::CFINVCall; |
2616 | 6 | SSK = llvm::SanStat_CFI_NVCall; |
2617 | 6 | break; |
2618 | 5 | case CFITCK_DerivedCast: |
2619 | 5 | M = SanitizerKind::CFIDerivedCast; |
2620 | 5 | SSK = llvm::SanStat_CFI_DerivedCast; |
2621 | 5 | break; |
2622 | 15 | case CFITCK_UnrelatedCast: |
2623 | 15 | M = SanitizerKind::CFIUnrelatedCast; |
2624 | 15 | SSK = llvm::SanStat_CFI_UnrelatedCast; |
2625 | 15 | break; |
2626 | 0 | case CFITCK_ICall: |
2627 | 0 | llvm_unreachable("not expecting CFITCK_ICall"); |
2628 | 58 | } |
2629 | 58 | |
2630 | 58 | std::string TypeName = RD->getQualifiedNameAsString(); |
2631 | 58 | if (getContext().getSanitizerBlacklist().isBlacklistedType(M, TypeName)) |
2632 | 2 | return; |
2633 | 56 | |
2634 | 56 | SanitizerScope SanScope(this); |
2635 | 56 | EmitSanitizerStatReport(SSK); |
2636 | 56 | |
2637 | 56 | llvm::Metadata *MD = |
2638 | 56 | CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0)); |
2639 | 56 | llvm::Value *TypeId = llvm::MetadataAsValue::get(getLLVMContext(), MD); |
2640 | 56 | |
2641 | 56 | llvm::Value *CastedVTable = Builder.CreateBitCast(VTable, Int8PtrTy); |
2642 | 56 | llvm::Value *TypeTest = Builder.CreateCall( |
2643 | 56 | CGM.getIntrinsic(llvm::Intrinsic::type_test), {CastedVTable, TypeId}); |
2644 | 56 | |
2645 | 56 | llvm::Constant *StaticData[] = { |
2646 | 56 | llvm::ConstantInt::get(Int8Ty, TCK), |
2647 | 56 | EmitCheckSourceLocation(Loc), |
2648 | 56 | EmitCheckTypeDescriptor(QualType(RD->getTypeForDecl(), 0)), |
2649 | 56 | }; |
2650 | 56 | |
2651 | 56 | auto CrossDsoTypeId = CGM.CreateCrossDsoCfiTypeId(MD); |
2652 | 56 | if (CGM.getCodeGenOpts().SanitizeCfiCrossDso && 56 CrossDsoTypeId2 ) { |
2653 | 2 | EmitCfiSlowPathCheck(M, TypeTest, CrossDsoTypeId, CastedVTable, StaticData); |
2654 | 2 | return; |
2655 | 2 | } |
2656 | 54 | |
2657 | 54 | if (54 CGM.getCodeGenOpts().SanitizeTrap.has(M)54 ) { |
2658 | 25 | EmitTrapCheck(TypeTest); |
2659 | 25 | return; |
2660 | 25 | } |
2661 | 29 | |
2662 | 29 | llvm::Value *AllVtables = llvm::MetadataAsValue::get( |
2663 | 29 | CGM.getLLVMContext(), |
2664 | 29 | llvm::MDString::get(CGM.getLLVMContext(), "all-vtables")); |
2665 | 29 | llvm::Value *ValidVtable = Builder.CreateCall( |
2666 | 29 | CGM.getIntrinsic(llvm::Intrinsic::type_test), {CastedVTable, AllVtables}); |
2667 | 29 | EmitCheck(std::make_pair(TypeTest, M), SanitizerHandler::CFICheckFail, |
2668 | 29 | StaticData, {CastedVTable, ValidVtable}); |
2669 | 29 | } |
2670 | | |
2671 | 4.87k | bool CodeGenFunction::ShouldEmitVTableTypeCheckedLoad(const CXXRecordDecl *RD) { |
2672 | 4.87k | if (!CGM.getCodeGenOpts().WholeProgramVTables || |
2673 | 64 | !SanOpts.has(SanitizerKind::CFIVCall) || |
2674 | 12 | !CGM.getCodeGenOpts().SanitizeTrap.has(SanitizerKind::CFIVCall) || |
2675 | 11 | !CGM.HasHiddenLTOVisibility(RD)) |
2676 | 4.86k | return false; |
2677 | 11 | |
2678 | 11 | std::string TypeName = RD->getQualifiedNameAsString(); |
2679 | 11 | return !getContext().getSanitizerBlacklist().isBlacklistedType( |
2680 | 11 | SanitizerKind::CFIVCall, TypeName); |
2681 | 11 | } |
2682 | | |
2683 | | llvm::Value *CodeGenFunction::EmitVTableTypeCheckedLoad( |
2684 | 11 | const CXXRecordDecl *RD, llvm::Value *VTable, uint64_t VTableByteOffset) { |
2685 | 11 | SanitizerScope SanScope(this); |
2686 | 11 | |
2687 | 11 | EmitSanitizerStatReport(llvm::SanStat_CFI_VCall); |
2688 | 11 | |
2689 | 11 | llvm::Metadata *MD = |
2690 | 11 | CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0)); |
2691 | 11 | llvm::Value *TypeId = llvm::MetadataAsValue::get(CGM.getLLVMContext(), MD); |
2692 | 11 | |
2693 | 11 | llvm::Value *CastedVTable = Builder.CreateBitCast(VTable, Int8PtrTy); |
2694 | 11 | llvm::Value *CheckedLoad = Builder.CreateCall( |
2695 | 11 | CGM.getIntrinsic(llvm::Intrinsic::type_checked_load), |
2696 | 11 | {CastedVTable, llvm::ConstantInt::get(Int32Ty, VTableByteOffset), |
2697 | 11 | TypeId}); |
2698 | 11 | llvm::Value *CheckResult = Builder.CreateExtractValue(CheckedLoad, 1); |
2699 | 11 | |
2700 | 11 | EmitCheck(std::make_pair(CheckResult, SanitizerKind::CFIVCall), |
2701 | 11 | SanitizerHandler::CFICheckFail, nullptr, nullptr); |
2702 | 11 | |
2703 | 11 | return Builder.CreateBitCast( |
2704 | 11 | Builder.CreateExtractValue(CheckedLoad, 0), |
2705 | 11 | cast<llvm::PointerType>(VTable->getType())->getElementType()); |
2706 | 11 | } |
2707 | | |
2708 | | void CodeGenFunction::EmitForwardingCallToLambda( |
2709 | | const CXXMethodDecl *callOperator, |
2710 | 19 | CallArgList &callArgs) { |
2711 | 19 | // Get the address of the call operator. |
2712 | 19 | const CGFunctionInfo &calleeFnInfo = |
2713 | 19 | CGM.getTypes().arrangeCXXMethodDeclaration(callOperator); |
2714 | 19 | llvm::Constant *calleePtr = |
2715 | 19 | CGM.GetAddrOfFunction(GlobalDecl(callOperator), |
2716 | 19 | CGM.getTypes().GetFunctionType(calleeFnInfo)); |
2717 | 19 | |
2718 | 19 | // Prepare the return slot. |
2719 | 19 | const FunctionProtoType *FPT = |
2720 | 19 | callOperator->getType()->castAs<FunctionProtoType>(); |
2721 | 19 | QualType resultType = FPT->getReturnType(); |
2722 | 19 | ReturnValueSlot returnSlot; |
2723 | 19 | if (!resultType->isVoidType() && |
2724 | 9 | calleeFnInfo.getReturnInfo().getKind() == ABIArgInfo::Indirect && |
2725 | 1 | !hasScalarEvaluationKind(calleeFnInfo.getReturnType())) |
2726 | 1 | returnSlot = ReturnValueSlot(ReturnValue, resultType.isVolatileQualified()); |
2727 | 19 | |
2728 | 19 | // We don't need to separately arrange the call arguments because |
2729 | 19 | // the call can't be variadic anyway --- it's impossible to forward |
2730 | 19 | // variadic arguments. |
2731 | 19 | |
2732 | 19 | // Now emit our call. |
2733 | 19 | auto callee = CGCallee::forDirect(calleePtr, callOperator); |
2734 | 19 | RValue RV = EmitCall(calleeFnInfo, callee, returnSlot, callArgs); |
2735 | 19 | |
2736 | 19 | // If necessary, copy the returned value into the slot. |
2737 | 19 | if (!resultType->isVoidType() && 19 returnSlot.isNull()9 ) |
2738 | 8 | EmitReturnOfRValue(RV, resultType); |
2739 | 19 | else |
2740 | 11 | EmitBranchThroughCleanup(ReturnBlock); |
2741 | 19 | } |
2742 | | |
2743 | 7 | void CodeGenFunction::EmitLambdaBlockInvokeBody() { |
2744 | 7 | const BlockDecl *BD = BlockInfo->getBlockDecl(); |
2745 | 7 | const VarDecl *variable = BD->capture_begin()->getVariable(); |
2746 | 7 | const CXXRecordDecl *Lambda = variable->getType()->getAsCXXRecordDecl(); |
2747 | 7 | const CXXMethodDecl *CallOp = Lambda->getLambdaCallOperator(); |
2748 | 7 | |
2749 | 7 | if (CallOp->isVariadic()7 ) { |
2750 | 0 | // FIXME: Making this work correctly is nasty because it requires either |
2751 | 0 | // cloning the body of the call operator or making the call operator |
2752 | 0 | // forward. |
2753 | 0 | CGM.ErrorUnsupported(CurCodeDecl, "lambda conversion to variadic function"); |
2754 | 0 | return; |
2755 | 0 | } |
2756 | 7 | |
2757 | 7 | // Start building arguments for forwarding call |
2758 | 7 | CallArgList CallArgs; |
2759 | 7 | |
2760 | 7 | QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda)); |
2761 | 7 | Address ThisPtr = GetAddrOfBlockDecl(variable, false); |
2762 | 7 | CallArgs.add(RValue::get(ThisPtr.getPointer()), ThisType); |
2763 | 7 | |
2764 | 7 | // Add the rest of the parameters. |
2765 | 7 | for (auto param : BD->parameters()) |
2766 | 0 | EmitDelegateCallArg(CallArgs, param, param->getLocStart()); |
2767 | 7 | |
2768 | 7 | assert(!Lambda->isGenericLambda() && |
2769 | 7 | "generic lambda interconversion to block not implemented"); |
2770 | 7 | EmitForwardingCallToLambda(CallOp, CallArgs); |
2771 | 7 | } |
2772 | | |
2773 | 12 | void CodeGenFunction::EmitLambdaDelegatingInvokeBody(const CXXMethodDecl *MD) { |
2774 | 12 | const CXXRecordDecl *Lambda = MD->getParent(); |
2775 | 12 | |
2776 | 12 | // Start building arguments for forwarding call |
2777 | 12 | CallArgList CallArgs; |
2778 | 12 | |
2779 | 12 | QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda)); |
2780 | 12 | llvm::Value *ThisPtr = llvm::UndefValue::get(getTypes().ConvertType(ThisType)); |
2781 | 12 | CallArgs.add(RValue::get(ThisPtr), ThisType); |
2782 | 12 | |
2783 | 12 | // Add the rest of the parameters. |
2784 | 12 | for (auto Param : MD->parameters()) |
2785 | 2 | EmitDelegateCallArg(CallArgs, Param, Param->getLocStart()); |
2786 | 12 | |
2787 | 12 | const CXXMethodDecl *CallOp = Lambda->getLambdaCallOperator(); |
2788 | 12 | // For a generic lambda, find the corresponding call operator specialization |
2789 | 12 | // to which the call to the static-invoker shall be forwarded. |
2790 | 12 | if (Lambda->isGenericLambda()12 ) { |
2791 | 0 | assert(MD->isFunctionTemplateSpecialization()); |
2792 | 0 | const TemplateArgumentList *TAL = MD->getTemplateSpecializationArgs(); |
2793 | 0 | FunctionTemplateDecl *CallOpTemplate = CallOp->getDescribedFunctionTemplate(); |
2794 | 0 | void *InsertPos = nullptr; |
2795 | 0 | FunctionDecl *CorrespondingCallOpSpecialization = |
2796 | 0 | CallOpTemplate->findSpecialization(TAL->asArray(), InsertPos); |
2797 | 0 | assert(CorrespondingCallOpSpecialization); |
2798 | 0 | CallOp = cast<CXXMethodDecl>(CorrespondingCallOpSpecialization); |
2799 | 0 | } |
2800 | 12 | EmitForwardingCallToLambda(CallOp, CallArgs); |
2801 | 12 | } |
2802 | | |
2803 | 12 | void CodeGenFunction::EmitLambdaStaticInvokeBody(const CXXMethodDecl *MD) { |
2804 | 12 | if (MD->isVariadic()12 ) { |
2805 | 0 | // FIXME: Making this work correctly is nasty because it requires either |
2806 | 0 | // cloning the body of the call operator or making the call operator forward. |
2807 | 0 | CGM.ErrorUnsupported(MD, "lambda conversion to variadic function"); |
2808 | 0 | return; |
2809 | 0 | } |
2810 | 12 | |
2811 | 12 | EmitLambdaDelegatingInvokeBody(MD); |
2812 | 12 | } |