/Users/buildslave/jenkins/workspace/coverage/llvm-project/clang/lib/AST/ASTContext.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | //===- ASTContext.cpp - Context to hold long-lived AST nodes --------------===// |
2 | | // |
3 | | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | | // See https://llvm.org/LICENSE.txt for license information. |
5 | | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | | // |
7 | | //===----------------------------------------------------------------------===// |
8 | | // |
9 | | // This file implements the ASTContext interface. |
10 | | // |
11 | | //===----------------------------------------------------------------------===// |
12 | | |
13 | | #include "clang/AST/ASTContext.h" |
14 | | #include "CXXABI.h" |
15 | | #include "Interp/Context.h" |
16 | | #include "clang/AST/APValue.h" |
17 | | #include "clang/AST/ASTConcept.h" |
18 | | #include "clang/AST/ASTMutationListener.h" |
19 | | #include "clang/AST/ASTTypeTraits.h" |
20 | | #include "clang/AST/Attr.h" |
21 | | #include "clang/AST/AttrIterator.h" |
22 | | #include "clang/AST/CharUnits.h" |
23 | | #include "clang/AST/Comment.h" |
24 | | #include "clang/AST/Decl.h" |
25 | | #include "clang/AST/DeclBase.h" |
26 | | #include "clang/AST/DeclCXX.h" |
27 | | #include "clang/AST/DeclContextInternals.h" |
28 | | #include "clang/AST/DeclObjC.h" |
29 | | #include "clang/AST/DeclOpenMP.h" |
30 | | #include "clang/AST/DeclTemplate.h" |
31 | | #include "clang/AST/DeclarationName.h" |
32 | | #include "clang/AST/DependenceFlags.h" |
33 | | #include "clang/AST/Expr.h" |
34 | | #include "clang/AST/ExprCXX.h" |
35 | | #include "clang/AST/ExprConcepts.h" |
36 | | #include "clang/AST/ExternalASTSource.h" |
37 | | #include "clang/AST/Mangle.h" |
38 | | #include "clang/AST/MangleNumberingContext.h" |
39 | | #include "clang/AST/NestedNameSpecifier.h" |
40 | | #include "clang/AST/ParentMapContext.h" |
41 | | #include "clang/AST/RawCommentList.h" |
42 | | #include "clang/AST/RecordLayout.h" |
43 | | #include "clang/AST/Stmt.h" |
44 | | #include "clang/AST/TemplateBase.h" |
45 | | #include "clang/AST/TemplateName.h" |
46 | | #include "clang/AST/Type.h" |
47 | | #include "clang/AST/TypeLoc.h" |
48 | | #include "clang/AST/UnresolvedSet.h" |
49 | | #include "clang/AST/VTableBuilder.h" |
50 | | #include "clang/Basic/AddressSpaces.h" |
51 | | #include "clang/Basic/Builtins.h" |
52 | | #include "clang/Basic/CommentOptions.h" |
53 | | #include "clang/Basic/ExceptionSpecificationType.h" |
54 | | #include "clang/Basic/IdentifierTable.h" |
55 | | #include "clang/Basic/LLVM.h" |
56 | | #include "clang/Basic/LangOptions.h" |
57 | | #include "clang/Basic/Linkage.h" |
58 | | #include "clang/Basic/Module.h" |
59 | | #include "clang/Basic/ObjCRuntime.h" |
60 | | #include "clang/Basic/SanitizerBlacklist.h" |
61 | | #include "clang/Basic/SourceLocation.h" |
62 | | #include "clang/Basic/SourceManager.h" |
63 | | #include "clang/Basic/Specifiers.h" |
64 | | #include "clang/Basic/TargetCXXABI.h" |
65 | | #include "clang/Basic/TargetInfo.h" |
66 | | #include "clang/Basic/XRayLists.h" |
67 | | #include "llvm/ADT/APFixedPoint.h" |
68 | | #include "llvm/ADT/APInt.h" |
69 | | #include "llvm/ADT/APSInt.h" |
70 | | #include "llvm/ADT/ArrayRef.h" |
71 | | #include "llvm/ADT/DenseMap.h" |
72 | | #include "llvm/ADT/DenseSet.h" |
73 | | #include "llvm/ADT/FoldingSet.h" |
74 | | #include "llvm/ADT/None.h" |
75 | | #include "llvm/ADT/Optional.h" |
76 | | #include "llvm/ADT/PointerUnion.h" |
77 | | #include "llvm/ADT/STLExtras.h" |
78 | | #include "llvm/ADT/SmallPtrSet.h" |
79 | | #include "llvm/ADT/SmallVector.h" |
80 | | #include "llvm/ADT/StringExtras.h" |
81 | | #include "llvm/ADT/StringRef.h" |
82 | | #include "llvm/ADT/Triple.h" |
83 | | #include "llvm/Support/Capacity.h" |
84 | | #include "llvm/Support/Casting.h" |
85 | | #include "llvm/Support/Compiler.h" |
86 | | #include "llvm/Support/ErrorHandling.h" |
87 | | #include "llvm/Support/MathExtras.h" |
88 | | #include "llvm/Support/raw_ostream.h" |
89 | | #include <algorithm> |
90 | | #include <cassert> |
91 | | #include <cstddef> |
92 | | #include <cstdint> |
93 | | #include <cstdlib> |
94 | | #include <map> |
95 | | #include <memory> |
96 | | #include <string> |
97 | | #include <tuple> |
98 | | #include <utility> |
99 | | |
100 | | using namespace clang; |
101 | | |
102 | | enum FloatingRank { |
103 | | BFloat16Rank, Float16Rank, HalfRank, FloatRank, DoubleRank, LongDoubleRank, Float128Rank |
104 | | }; |
105 | | |
106 | | /// \returns location that is relevant when searching for Doc comments related |
107 | | /// to \p D. |
108 | | static SourceLocation getDeclLocForCommentSearch(const Decl *D, |
109 | 39.7k | SourceManager &SourceMgr) { |
110 | 39.7k | assert(D); |
111 | | |
112 | | // User can not attach documentation to implicit declarations. |
113 | 39.7k | if (D->isImplicit()) |
114 | 9.07k | return {}; |
115 | | |
116 | | // User can not attach documentation to implicit instantiations. |
117 | 30.6k | if (const auto *FD = dyn_cast<FunctionDecl>(D)) { |
118 | 14.3k | if (FD->getTemplateSpecializationKind() == TSK_ImplicitInstantiation) |
119 | 136 | return {}; |
120 | 30.5k | } |
121 | | |
122 | 30.5k | if (const auto *VD = dyn_cast<VarDecl>(D)) { |
123 | 6.74k | if (VD->isStaticDataMember() && |
124 | 50 | VD->getTemplateSpecializationKind() == TSK_ImplicitInstantiation) |
125 | 8 | return {}; |
126 | 30.5k | } |
127 | | |
128 | 30.5k | if (const auto *CRD = dyn_cast<CXXRecordDecl>(D)) { |
129 | 1.82k | if (CRD->getTemplateSpecializationKind() == TSK_ImplicitInstantiation) |
130 | 101 | return {}; |
131 | 30.4k | } |
132 | | |
133 | 30.4k | if (const auto *CTSD = dyn_cast<ClassTemplateSpecializationDecl>(D)) { |
134 | 135 | TemplateSpecializationKind TSK = CTSD->getSpecializationKind(); |
135 | 135 | if (TSK == TSK_ImplicitInstantiation || |
136 | 135 | TSK == TSK_Undeclared) |
137 | 18 | return {}; |
138 | 30.4k | } |
139 | | |
140 | 30.4k | if (const auto *ED = dyn_cast<EnumDecl>(D)) { |
141 | 152 | if (ED->getTemplateSpecializationKind() == TSK_ImplicitInstantiation) |
142 | 4 | return {}; |
143 | 30.4k | } |
144 | 30.4k | if (const auto *TD = dyn_cast<TagDecl>(D)) { |
145 | | // When tag declaration (but not definition!) is part of the |
146 | | // decl-specifier-seq of some other declaration, it doesn't get comment |
147 | 2.12k | if (TD->isEmbeddedInDeclarator() && !TD->isCompleteDefinition()205 ) |
148 | 31 | return {}; |
149 | 30.4k | } |
150 | | // TODO: handle comments for function parameters properly. |
151 | 30.4k | if (isa<ParmVarDecl>(D)) |
152 | 3.08k | return {}; |
153 | | |
154 | | // TODO: we could look up template parameter documentation in the template |
155 | | // documentation. |
156 | 27.3k | if (isa<TemplateTypeParmDecl>(D) || |
157 | 26.6k | isa<NonTypeTemplateParmDecl>(D) || |
158 | 26.4k | isa<TemplateTemplateParmDecl>(D)) |
159 | 890 | return {}; |
160 | | |
161 | | // Find declaration location. |
162 | | // For Objective-C declarations we generally don't expect to have multiple |
163 | | // declarators, thus use declaration starting location as the "declaration |
164 | | // location". |
165 | | // For all other declarations multiple declarators are used quite frequently, |
166 | | // so we use the location of the identifier as the "declaration location". |
167 | 26.4k | if (isa<ObjCMethodDecl>(D) || isa<ObjCContainerDecl>(D)26.0k || |
168 | 25.5k | isa<ObjCPropertyDecl>(D) || |
169 | 25.4k | isa<RedeclarableTemplateDecl>(D) || |
170 | 24.6k | isa<ClassTemplateSpecializationDecl>(D) || |
171 | | // Allow association with Y across {} in `typedef struct X {} Y`. |
172 | 24.4k | isa<TypedefDecl>(D)) |
173 | 2.53k | return D->getBeginLoc(); |
174 | 23.8k | else { |
175 | 23.8k | const SourceLocation DeclLoc = D->getLocation(); |
176 | 23.8k | if (DeclLoc.isMacroID()) { |
177 | 65 | if (isa<TypedefDecl>(D)) { |
178 | | // If location of the typedef name is in a macro, it is because being |
179 | | // declared via a macro. Try using declaration's starting location as |
180 | | // the "declaration location". |
181 | 0 | return D->getBeginLoc(); |
182 | 65 | } else if (const auto *TD = dyn_cast<TagDecl>(D)) { |
183 | | // If location of the tag decl is inside a macro, but the spelling of |
184 | | // the tag name comes from a macro argument, it looks like a special |
185 | | // macro like NS_ENUM is being used to define the tag decl. In that |
186 | | // case, adjust the source location to the expansion loc so that we can |
187 | | // attach the comment to the tag decl. |
188 | 3 | if (SourceMgr.isMacroArgExpansion(DeclLoc) && |
189 | 3 | TD->isCompleteDefinition()) |
190 | 3 | return SourceMgr.getExpansionLoc(DeclLoc); |
191 | 23.8k | } |
192 | 65 | } |
193 | 23.8k | return DeclLoc; |
194 | 23.8k | } |
195 | | |
196 | 0 | return {}; |
197 | 0 | } |
198 | | |
199 | | RawComment *ASTContext::getRawCommentForDeclNoCacheImpl( |
200 | | const Decl *D, const SourceLocation RepresentativeLocForDecl, |
201 | 2.51k | const std::map<unsigned, RawComment *> &CommentsInTheFile) const { |
202 | | // If the declaration doesn't map directly to a location in a file, we |
203 | | // can't find the comment. |
204 | 2.51k | if (RepresentativeLocForDecl.isInvalid() || |
205 | 2.51k | !RepresentativeLocForDecl.isFileID()) |
206 | 0 | return nullptr; |
207 | | |
208 | | // If there are no comments anywhere, we won't find anything. |
209 | 2.51k | if (CommentsInTheFile.empty()) |
210 | 0 | return nullptr; |
211 | | |
212 | | // Decompose the location for the declaration and find the beginning of the |
213 | | // file buffer. |
214 | 2.51k | const std::pair<FileID, unsigned> DeclLocDecomp = |
215 | 2.51k | SourceMgr.getDecomposedLoc(RepresentativeLocForDecl); |
216 | | |
217 | | // Slow path. |
218 | 2.51k | auto OffsetCommentBehindDecl = |
219 | 2.51k | CommentsInTheFile.lower_bound(DeclLocDecomp.second); |
220 | | |
221 | | // First check whether we have a trailing comment. |
222 | 2.51k | if (OffsetCommentBehindDecl != CommentsInTheFile.end()) { |
223 | 1.86k | RawComment *CommentBehindDecl = OffsetCommentBehindDecl->second; |
224 | 1.86k | if ((CommentBehindDecl->isDocumentation() || |
225 | 22 | LangOpts.CommentOpts.ParseAllComments) && |
226 | 1.86k | CommentBehindDecl->isTrailingComment() && |
227 | 187 | (isa<FieldDecl>(D) || isa<EnumConstantDecl>(D)157 || isa<VarDecl>(D)141 || |
228 | 134 | isa<ObjCMethodDecl>(D)77 || isa<ObjCPropertyDecl>(D)65 )) { |
229 | | |
230 | | // Check that Doxygen trailing comment comes after the declaration, starts |
231 | | // on the same line and in the same file as the declaration. |
232 | 134 | if (SourceMgr.getLineNumber(DeclLocDecomp.first, DeclLocDecomp.second) == |
233 | 134 | Comments.getCommentBeginLine(CommentBehindDecl, DeclLocDecomp.first, |
234 | 116 | OffsetCommentBehindDecl->first)) { |
235 | 116 | return CommentBehindDecl; |
236 | 116 | } |
237 | 2.40k | } |
238 | 1.86k | } |
239 | | |
240 | | // The comment just after the declaration was not a trailing comment. |
241 | | // Let's look at the previous comment. |
242 | 2.40k | if (OffsetCommentBehindDecl == CommentsInTheFile.begin()) |
243 | 207 | return nullptr; |
244 | | |
245 | 2.19k | auto OffsetCommentBeforeDecl = --OffsetCommentBehindDecl; |
246 | 2.19k | RawComment *CommentBeforeDecl = OffsetCommentBeforeDecl->second; |
247 | | |
248 | | // Check that we actually have a non-member Doxygen comment. |
249 | 2.19k | if (!(CommentBeforeDecl->isDocumentation() || |
250 | 27 | LangOpts.CommentOpts.ParseAllComments) || |
251 | 2.19k | CommentBeforeDecl->isTrailingComment()) |
252 | 50 | return nullptr; |
253 | | |
254 | | // Decompose the end of the comment. |
255 | 2.14k | const unsigned CommentEndOffset = |
256 | 2.14k | Comments.getCommentEndOffset(CommentBeforeDecl); |
257 | | |
258 | | // Get the corresponding buffer. |
259 | 2.14k | bool Invalid = false; |
260 | 2.14k | const char *Buffer = SourceMgr.getBufferData(DeclLocDecomp.first, |
261 | 2.14k | &Invalid).data(); |
262 | 2.14k | if (Invalid) |
263 | 0 | return nullptr; |
264 | | |
265 | | // Extract text between the comment and declaration. |
266 | 2.14k | StringRef Text(Buffer + CommentEndOffset, |
267 | 2.14k | DeclLocDecomp.second - CommentEndOffset); |
268 | | |
269 | | // There should be no other declarations or preprocessor directives between |
270 | | // comment and declaration. |
271 | 2.14k | if (Text.find_first_of(";{}#@") != StringRef::npos) |
272 | 548 | return nullptr; |
273 | | |
274 | 1.59k | return CommentBeforeDecl; |
275 | 1.59k | } |
276 | | |
277 | 38.7k | RawComment *ASTContext::getRawCommentForDeclNoCache(const Decl *D) const { |
278 | 38.7k | const SourceLocation DeclLoc = getDeclLocForCommentSearch(D, SourceMgr); |
279 | | |
280 | | // If the declaration doesn't map directly to a location in a file, we |
281 | | // can't find the comment. |
282 | 38.7k | if (DeclLoc.isInvalid() || !DeclLoc.isFileID()23.4k ) |
283 | 15.2k | return nullptr; |
284 | | |
285 | 23.4k | if (ExternalSource && !CommentsLoaded12.8k ) { |
286 | 226 | ExternalSource->ReadComments(); |
287 | 226 | CommentsLoaded = true; |
288 | 226 | } |
289 | | |
290 | 23.4k | if (Comments.empty()) |
291 | 21.8k | return nullptr; |
292 | | |
293 | 1.54k | const FileID File = SourceMgr.getDecomposedLoc(DeclLoc).first; |
294 | 1.54k | const auto CommentsInThisFile = Comments.getCommentsInFile(File); |
295 | 1.54k | if (!CommentsInThisFile || CommentsInThisFile->empty()1.54k ) |
296 | 4 | return nullptr; |
297 | | |
298 | 1.54k | return getRawCommentForDeclNoCacheImpl(D, DeclLoc, *CommentsInThisFile); |
299 | 1.54k | } |
300 | | |
301 | 24.5M | void ASTContext::addComment(const RawComment &RC) { |
302 | 24.5M | assert(LangOpts.RetainCommentsFromSystemHeaders || |
303 | 24.5M | !SourceMgr.isInSystemHeader(RC.getSourceRange().getBegin())); |
304 | 24.5M | Comments.addComment(RC, LangOpts.CommentOpts, BumpAlloc); |
305 | 24.5M | } |
306 | | |
307 | | /// If we have a 'templated' declaration for a template, adjust 'D' to |
308 | | /// refer to the actual template. |
309 | | /// If we have an implicit instantiation, adjust 'D' to refer to template. |
310 | 24.3k | static const Decl &adjustDeclToTemplate(const Decl &D) { |
311 | 24.3k | if (const auto *FD = dyn_cast<FunctionDecl>(&D)) { |
312 | | // Is this function declaration part of a function template? |
313 | 5.81k | if (const FunctionTemplateDecl *FTD = FD->getDescribedFunctionTemplate()) |
314 | 3 | return *FTD; |
315 | | |
316 | | // Nothing to do if function is not an implicit instantiation. |
317 | 5.81k | if (FD->getTemplateSpecializationKind() != TSK_ImplicitInstantiation) |
318 | 5.78k | return D; |
319 | | |
320 | | // Function is an implicit instantiation of a function template? |
321 | 28 | if (const FunctionTemplateDecl *FTD = FD->getPrimaryTemplate()) |
322 | 4 | return *FTD; |
323 | | |
324 | | // Function is instantiated from a member definition of a class template? |
325 | 24 | if (const FunctionDecl *MemberDecl = |
326 | 24 | FD->getInstantiatedFromMemberFunction()) |
327 | 24 | return *MemberDecl; |
328 | | |
329 | 0 | return D; |
330 | 0 | } |
331 | 18.5k | if (const auto *VD = dyn_cast<VarDecl>(&D)) { |
332 | | // Static data member is instantiated from a member definition of a class |
333 | | // template? |
334 | 4.37k | if (VD->isStaticDataMember()) |
335 | 82 | if (const VarDecl *MemberDecl = VD->getInstantiatedFromStaticDataMember()) |
336 | 6 | return *MemberDecl; |
337 | | |
338 | 4.37k | return D; |
339 | 4.37k | } |
340 | 14.1k | if (const auto *CRD = dyn_cast<CXXRecordDecl>(&D)) { |
341 | | // Is this class declaration part of a class template? |
342 | 2.30k | if (const ClassTemplateDecl *CTD = CRD->getDescribedClassTemplate()) |
343 | 48 | return *CTD; |
344 | | |
345 | | // Class is an implicit instantiation of a class template or partial |
346 | | // specialization? |
347 | 2.26k | if (const auto *CTSD = dyn_cast<ClassTemplateSpecializationDecl>(CRD)) { |
348 | 202 | if (CTSD->getSpecializationKind() != TSK_ImplicitInstantiation) |
349 | 169 | return D; |
350 | 33 | llvm::PointerUnion<ClassTemplateDecl *, |
351 | 33 | ClassTemplatePartialSpecializationDecl *> |
352 | 33 | PU = CTSD->getSpecializedTemplateOrPartial(); |
353 | 33 | return PU.is<ClassTemplateDecl *>() |
354 | 33 | ? *static_cast<const Decl *>(PU.get<ClassTemplateDecl *>()) |
355 | 0 | : *static_cast<const Decl *>( |
356 | 0 | PU.get<ClassTemplatePartialSpecializationDecl *>()); |
357 | 33 | } |
358 | | |
359 | | // Class is instantiated from a member definition of a class template? |
360 | 2.05k | if (const MemberSpecializationInfo *Info = |
361 | 4 | CRD->getMemberSpecializationInfo()) |
362 | 4 | return *Info->getInstantiatedFrom(); |
363 | | |
364 | 2.05k | return D; |
365 | 2.05k | } |
366 | 11.8k | if (const auto *ED = dyn_cast<EnumDecl>(&D)) { |
367 | | // Enum is instantiated from a member definition of a class template? |
368 | 500 | if (const EnumDecl *MemberDecl = ED->getInstantiatedFromMemberEnum()) |
369 | 4 | return *MemberDecl; |
370 | | |
371 | 496 | return D; |
372 | 496 | } |
373 | | // FIXME: Adjust alias templates? |
374 | 11.3k | return D; |
375 | 11.3k | } |
376 | | |
377 | | const RawComment *ASTContext::getRawCommentForAnyRedecl( |
378 | | const Decl *D, |
379 | 16.0k | const Decl **OriginalDecl) const { |
380 | 16.0k | if (!D) { |
381 | 0 | if (OriginalDecl) |
382 | 0 | OriginalDecl = nullptr; |
383 | 0 | return nullptr; |
384 | 0 | } |
385 | | |
386 | 16.0k | D = &adjustDeclToTemplate(*D); |
387 | | |
388 | | // Any comment directly attached to D? |
389 | 16.0k | { |
390 | 16.0k | auto DeclComment = DeclRawComments.find(D); |
391 | 16.0k | if (DeclComment != DeclRawComments.end()) { |
392 | 2.61k | if (OriginalDecl) |
393 | 845 | *OriginalDecl = D; |
394 | 2.61k | return DeclComment->second; |
395 | 2.61k | } |
396 | 13.3k | } |
397 | | |
398 | | // Any comment attached to any redeclaration of D? |
399 | 13.3k | const Decl *CanonicalD = D->getCanonicalDecl(); |
400 | 13.3k | if (!CanonicalD) |
401 | 0 | return nullptr; |
402 | | |
403 | 13.3k | { |
404 | 13.3k | auto RedeclComment = RedeclChainComments.find(CanonicalD); |
405 | 13.3k | if (RedeclComment != RedeclChainComments.end()) { |
406 | 108 | if (OriginalDecl) |
407 | 18 | *OriginalDecl = RedeclComment->second; |
408 | 108 | auto CommentAtRedecl = DeclRawComments.find(RedeclComment->second); |
409 | 108 | assert(CommentAtRedecl != DeclRawComments.end() && |
410 | 108 | "This decl is supposed to have comment attached."); |
411 | 108 | return CommentAtRedecl->second; |
412 | 108 | } |
413 | 13.2k | } |
414 | | |
415 | | // Any redeclarations of D that we haven't checked for comments yet? |
416 | | // We can't use DenseMap::iterator directly since it'd get invalid. |
417 | 13.2k | auto LastCheckedRedecl = [this, CanonicalD]() -> const Decl * { |
418 | 13.2k | auto LookupRes = CommentlessRedeclChains.find(CanonicalD); |
419 | 13.2k | if (LookupRes != CommentlessRedeclChains.end()) |
420 | 8.19k | return LookupRes->second; |
421 | 5.09k | return nullptr; |
422 | 5.09k | }(); |
423 | | |
424 | 34.4k | for (const auto Redecl : D->redecls()) { |
425 | 34.4k | assert(Redecl); |
426 | | // Skip all redeclarations that have been checked previously. |
427 | 34.4k | if (LastCheckedRedecl) { |
428 | 19.0k | if (LastCheckedRedecl == Redecl) { |
429 | 8.18k | LastCheckedRedecl = nullptr; |
430 | 8.18k | } |
431 | 19.0k | continue; |
432 | 19.0k | } |
433 | 15.4k | const RawComment *RedeclComment = getRawCommentForDeclNoCache(Redecl); |
434 | 15.4k | if (RedeclComment) { |
435 | 940 | cacheRawCommentForDecl(*Redecl, *RedeclComment); |
436 | 940 | if (OriginalDecl) |
437 | 2 | *OriginalDecl = Redecl; |
438 | 940 | return RedeclComment; |
439 | 940 | } |
440 | 14.4k | CommentlessRedeclChains[CanonicalD] = Redecl; |
441 | 14.4k | } |
442 | | |
443 | 12.3k | if (OriginalDecl) |
444 | 6.28k | *OriginalDecl = nullptr; |
445 | 12.3k | return nullptr; |
446 | 13.2k | } |
447 | | |
448 | | void ASTContext::cacheRawCommentForDecl(const Decl &OriginalD, |
449 | 1.64k | const RawComment &Comment) const { |
450 | 1.64k | assert(Comment.isDocumentation() || LangOpts.CommentOpts.ParseAllComments); |
451 | 1.64k | DeclRawComments.try_emplace(&OriginalD, &Comment); |
452 | 1.64k | const Decl *const CanonicalDecl = OriginalD.getCanonicalDecl(); |
453 | 1.64k | RedeclChainComments.try_emplace(CanonicalDecl, &OriginalD); |
454 | 1.64k | CommentlessRedeclChains.erase(CanonicalDecl); |
455 | 1.64k | } |
456 | | |
457 | | static void addRedeclaredMethods(const ObjCMethodDecl *ObjCMethod, |
458 | 365 | SmallVectorImpl<const NamedDecl *> &Redeclared) { |
459 | 365 | const DeclContext *DC = ObjCMethod->getDeclContext(); |
460 | 365 | if (const auto *IMD = dyn_cast<ObjCImplDecl>(DC)) { |
461 | 82 | const ObjCInterfaceDecl *ID = IMD->getClassInterface(); |
462 | 82 | if (!ID) |
463 | 0 | return; |
464 | | // Add redeclared method here. |
465 | 82 | for (const auto *Ext : ID->known_extensions()) { |
466 | 10 | if (ObjCMethodDecl *RedeclaredMethod = |
467 | 3 | Ext->getMethod(ObjCMethod->getSelector(), |
468 | 3 | ObjCMethod->isInstanceMethod())) |
469 | 3 | Redeclared.push_back(RedeclaredMethod); |
470 | 10 | } |
471 | 82 | } |
472 | 365 | } |
473 | | |
474 | | void ASTContext::attachCommentsToJustParsedDecls(ArrayRef<Decl *> Decls, |
475 | 1.36k | const Preprocessor *PP) { |
476 | 1.36k | if (Comments.empty() || Decls.empty()1.04k ) |
477 | 325 | return; |
478 | | |
479 | 1.04k | FileID File; |
480 | 1.04k | for (Decl *D : Decls) { |
481 | 1.04k | SourceLocation Loc = D->getLocation(); |
482 | 1.04k | if (Loc.isValid()) { |
483 | | // See if there are any new comments that are not attached to a decl. |
484 | | // The location doesn't have to be precise - we care only about the file. |
485 | 1.04k | File = SourceMgr.getDecomposedLoc(Loc).first; |
486 | 1.04k | break; |
487 | 1.04k | } |
488 | 1.04k | } |
489 | | |
490 | 1.04k | if (File.isInvalid()) |
491 | 0 | return; |
492 | | |
493 | 1.04k | auto CommentsInThisFile = Comments.getCommentsInFile(File); |
494 | 1.04k | if (!CommentsInThisFile || CommentsInThisFile->empty() || |
495 | 1.04k | CommentsInThisFile->rbegin()->second->isAttached()) |
496 | 0 | return; |
497 | | |
498 | | // There is at least one comment not attached to a decl. |
499 | | // Maybe it should be attached to one of Decls? |
500 | | // |
501 | | // Note that this way we pick up not only comments that precede the |
502 | | // declaration, but also comments that *follow* the declaration -- thanks to |
503 | | // the lookahead in the lexer: we've consumed the semicolon and looked |
504 | | // ahead through comments. |
505 | | |
506 | 1.06k | for (const Decl *D : Decls)1.04k { |
507 | 1.06k | assert(D); |
508 | 1.06k | if (D->isInvalidDecl()) |
509 | 7 | continue; |
510 | | |
511 | 1.05k | D = &adjustDeclToTemplate(*D); |
512 | | |
513 | 1.05k | const SourceLocation DeclLoc = getDeclLocForCommentSearch(D, SourceMgr); |
514 | | |
515 | 1.05k | if (DeclLoc.isInvalid() || !DeclLoc.isFileID()1.05k ) |
516 | 1 | continue; |
517 | | |
518 | 1.05k | if (DeclRawComments.count(D) > 0) |
519 | 81 | continue; |
520 | | |
521 | 973 | if (RawComment *const DocComment = |
522 | 703 | getRawCommentForDeclNoCacheImpl(D, DeclLoc, *CommentsInThisFile)) { |
523 | 703 | cacheRawCommentForDecl(*D, *DocComment); |
524 | 703 | comments::FullComment *FC = DocComment->parse(*this, PP, D); |
525 | 703 | ParsedComments[D->getCanonicalDecl()] = FC; |
526 | 703 | } |
527 | 973 | } |
528 | 1.04k | } |
529 | | |
530 | | comments::FullComment *ASTContext::cloneFullComment(comments::FullComment *FC, |
531 | 106 | const Decl *D) const { |
532 | 106 | auto *ThisDeclInfo = new (*this) comments::DeclInfo; |
533 | 106 | ThisDeclInfo->CommentDecl = D; |
534 | 106 | ThisDeclInfo->IsFilled = false; |
535 | 106 | ThisDeclInfo->fill(); |
536 | 106 | ThisDeclInfo->CommentDecl = FC->getDecl(); |
537 | 106 | if (!ThisDeclInfo->TemplateParameters) |
538 | 97 | ThisDeclInfo->TemplateParameters = FC->getDeclInfo()->TemplateParameters; |
539 | 106 | comments::FullComment *CFC = |
540 | 106 | new (*this) comments::FullComment(FC->getBlocks(), |
541 | 106 | ThisDeclInfo); |
542 | 106 | return CFC; |
543 | 106 | } |
544 | | |
545 | 23.2k | comments::FullComment *ASTContext::getLocalCommentForDeclUncached(const Decl *D) const { |
546 | 23.2k | const RawComment *RC = getRawCommentForDeclNoCache(D); |
547 | 23.2k | return RC ? RC->parse(*this, nullptr, D)54 : nullptr; |
548 | 23.2k | } |
549 | | |
550 | | comments::FullComment *ASTContext::getCommentForDecl( |
551 | | const Decl *D, |
552 | 7.35k | const Preprocessor *PP) const { |
553 | 7.35k | if (!D || D->isInvalidDecl()) |
554 | 90 | return nullptr; |
555 | 7.26k | D = &adjustDeclToTemplate(*D); |
556 | | |
557 | 7.26k | const Decl *Canonical = D->getCanonicalDecl(); |
558 | 7.26k | llvm::DenseMap<const Decl *, comments::FullComment *>::iterator Pos = |
559 | 7.26k | ParsedComments.find(Canonical); |
560 | | |
561 | 7.26k | if (Pos != ParsedComments.end()) { |
562 | 114 | if (Canonical != D) { |
563 | 32 | comments::FullComment *FC = Pos->second; |
564 | 32 | comments::FullComment *CFC = cloneFullComment(FC, D); |
565 | 32 | return CFC; |
566 | 32 | } |
567 | 82 | return Pos->second; |
568 | 82 | } |
569 | | |
570 | 7.15k | const Decl *OriginalDecl = nullptr; |
571 | | |
572 | 7.15k | const RawComment *RC = getRawCommentForAnyRedecl(D, &OriginalDecl); |
573 | 7.15k | if (!RC) { |
574 | 6.28k | if (isa<ObjCMethodDecl>(D) || isa<FunctionDecl>(D)5.87k ) { |
575 | 1.52k | SmallVector<const NamedDecl*, 8> Overridden; |
576 | 1.52k | const auto *OMD = dyn_cast<ObjCMethodDecl>(D); |
577 | 1.52k | if (OMD && OMD->isPropertyAccessor()407 ) |
578 | 116 | if (const ObjCPropertyDecl *PDecl = OMD->findPropertyDecl()) |
579 | 116 | if (comments::FullComment *FC = getCommentForDecl(PDecl, PP)) |
580 | 42 | return cloneFullComment(FC, D); |
581 | 1.48k | if (OMD) |
582 | 365 | addRedeclaredMethods(OMD, Overridden); |
583 | 1.48k | getOverriddenMethods(dyn_cast<NamedDecl>(D), Overridden); |
584 | 1.54k | for (unsigned i = 0, e = Overridden.size(); i < e; i++62 ) |
585 | 66 | if (comments::FullComment *FC = getCommentForDecl(Overridden[i], PP)) |
586 | 4 | return cloneFullComment(FC, D); |
587 | 1.48k | } |
588 | 4.76k | else if (const auto *TD = dyn_cast<TypedefNameDecl>(D)) { |
589 | | // Attach any tag type's documentation to its typedef if latter |
590 | | // does not have one of its own. |
591 | 379 | QualType QT = TD->getUnderlyingType(); |
592 | 379 | if (const auto *TT = QT->getAs<TagType>()) |
593 | 61 | if (const Decl *TD = TT->getDecl()) |
594 | 61 | if (comments::FullComment *FC = getCommentForDecl(TD, PP)) |
595 | 4 | return cloneFullComment(FC, D); |
596 | 4.38k | } |
597 | 4.38k | else if (const auto *IC = dyn_cast<ObjCInterfaceDecl>(D)) { |
598 | 480 | while (IC->getSuperClass()) { |
599 | 98 | IC = IC->getSuperClass(); |
600 | 98 | if (comments::FullComment *FC = getCommentForDecl(IC, PP)) |
601 | 5 | return cloneFullComment(FC, D); |
602 | 98 | } |
603 | 387 | } |
604 | 3.99k | else if (const auto *CD = dyn_cast<ObjCCategoryDecl>(D)) { |
605 | 56 | if (const ObjCInterfaceDecl *IC = CD->getClassInterface()) |
606 | 56 | if (comments::FullComment *FC = getCommentForDecl(IC, PP)) |
607 | 1 | return cloneFullComment(FC, D); |
608 | 3.93k | } |
609 | 3.93k | else if (const auto *RD = dyn_cast<CXXRecordDecl>(D)) { |
610 | 674 | if (!(RD = RD->getDefinition())) |
611 | 19 | return nullptr; |
612 | | // Check non-virtual bases. |
613 | 655 | for (const auto &I : RD->bases()) { |
614 | 217 | if (I.isVirtual() || (I.getAccessSpecifier() != AS_public)147 ) |
615 | 76 | continue; |
616 | 141 | QualType Ty = I.getType(); |
617 | 141 | if (Ty.isNull()) |
618 | 0 | continue; |
619 | 141 | if (const CXXRecordDecl *NonVirtualBase = Ty->getAsCXXRecordDecl()) { |
620 | 141 | if (!(NonVirtualBase= NonVirtualBase->getDefinition())) |
621 | 0 | continue; |
622 | | |
623 | 141 | if (comments::FullComment *FC = getCommentForDecl((NonVirtualBase), PP)) |
624 | 8 | return cloneFullComment(FC, D); |
625 | 141 | } |
626 | 141 | } |
627 | | // Check virtual bases. |
628 | 647 | for (const auto &I : RD->vbases()) { |
629 | 116 | if (I.getAccessSpecifier() != AS_public) |
630 | 21 | continue; |
631 | 95 | QualType Ty = I.getType(); |
632 | 95 | if (Ty.isNull()) |
633 | 0 | continue; |
634 | 95 | if (const CXXRecordDecl *VirtualBase = Ty->getAsCXXRecordDecl()) { |
635 | 95 | if (!(VirtualBase= VirtualBase->getDefinition())) |
636 | 0 | continue; |
637 | 95 | if (comments::FullComment *FC = getCommentForDecl((VirtualBase), PP)) |
638 | 10 | return cloneFullComment(FC, D); |
639 | 95 | } |
640 | 95 | } |
641 | 647 | } |
642 | 6.19k | return nullptr; |
643 | 865 | } |
644 | | |
645 | | // If the RawComment was attached to other redeclaration of this Decl, we |
646 | | // should parse the comment in context of that other Decl. This is important |
647 | | // because comments can contain references to parameter names which can be |
648 | | // different across redeclarations. |
649 | 865 | if (D != OriginalDecl && OriginalDecl18 ) |
650 | 18 | return getCommentForDecl(OriginalDecl, PP); |
651 | | |
652 | 847 | comments::FullComment *FC = RC->parse(*this, PP, D); |
653 | 847 | ParsedComments[Canonical] = FC; |
654 | 847 | return FC; |
655 | 847 | } |
656 | | |
657 | | void |
658 | | ASTContext::CanonicalTemplateTemplateParm::Profile(llvm::FoldingSetNodeID &ID, |
659 | | const ASTContext &C, |
660 | 21.7k | TemplateTemplateParmDecl *Parm) { |
661 | 21.7k | ID.AddInteger(Parm->getDepth()); |
662 | 21.7k | ID.AddInteger(Parm->getPosition()); |
663 | 21.7k | ID.AddBoolean(Parm->isParameterPack()); |
664 | | |
665 | 21.7k | TemplateParameterList *Params = Parm->getTemplateParameters(); |
666 | 21.7k | ID.AddInteger(Params->size()); |
667 | 21.7k | for (TemplateParameterList::const_iterator P = Params->begin(), |
668 | 21.7k | PEnd = Params->end(); |
669 | 55.1k | P != PEnd; ++P33.4k ) { |
670 | 33.4k | if (const auto *TTP = dyn_cast<TemplateTypeParmDecl>(*P)) { |
671 | 31.0k | ID.AddInteger(0); |
672 | 31.0k | ID.AddBoolean(TTP->isParameterPack()); |
673 | 31.0k | const TypeConstraint *TC = TTP->getTypeConstraint(); |
674 | 31.0k | ID.AddBoolean(TC != nullptr); |
675 | 31.0k | if (TC) |
676 | 1 | TC->getImmediatelyDeclaredConstraint()->Profile(ID, C, |
677 | 1 | /*Canonical=*/true); |
678 | 31.0k | if (TTP->isExpandedParameterPack()) { |
679 | 0 | ID.AddBoolean(true); |
680 | 0 | ID.AddInteger(TTP->getNumExpansionParameters()); |
681 | 0 | } else |
682 | 31.0k | ID.AddBoolean(false); |
683 | 31.0k | continue; |
684 | 31.0k | } |
685 | | |
686 | 2.33k | if (const auto *NTTP = dyn_cast<NonTypeTemplateParmDecl>(*P)) { |
687 | 2.31k | ID.AddInteger(1); |
688 | 2.31k | ID.AddBoolean(NTTP->isParameterPack()); |
689 | 2.31k | ID.AddPointer(NTTP->getType().getCanonicalType().getAsOpaquePtr()); |
690 | 2.31k | if (NTTP->isExpandedParameterPack()) { |
691 | 1 | ID.AddBoolean(true); |
692 | 1 | ID.AddInteger(NTTP->getNumExpansionTypes()); |
693 | 4 | for (unsigned I = 0, N = NTTP->getNumExpansionTypes(); I != N; ++I3 ) { |
694 | 3 | QualType T = NTTP->getExpansionType(I); |
695 | 3 | ID.AddPointer(T.getCanonicalType().getAsOpaquePtr()); |
696 | 3 | } |
697 | 1 | } else |
698 | 2.31k | ID.AddBoolean(false); |
699 | 2.31k | continue; |
700 | 2.31k | } |
701 | | |
702 | 22 | auto *TTP = cast<TemplateTemplateParmDecl>(*P); |
703 | 22 | ID.AddInteger(2); |
704 | 22 | Profile(ID, C, TTP); |
705 | 22 | } |
706 | 21.7k | Expr *RequiresClause = Parm->getTemplateParameters()->getRequiresClause(); |
707 | 21.7k | ID.AddBoolean(RequiresClause != nullptr); |
708 | 21.7k | if (RequiresClause) |
709 | 0 | RequiresClause->Profile(ID, C, /*Canonical=*/true); |
710 | 21.7k | } |
711 | | |
712 | | static Expr * |
713 | | canonicalizeImmediatelyDeclaredConstraint(const ASTContext &C, Expr *IDC, |
714 | 1 | QualType ConstrainedType) { |
715 | | // This is a bit ugly - we need to form a new immediately-declared |
716 | | // constraint that references the new parameter; this would ideally |
717 | | // require semantic analysis (e.g. template<C T> struct S {}; - the |
718 | | // converted arguments of C<T> could be an argument pack if C is |
719 | | // declared as template<typename... T> concept C = ...). |
720 | | // We don't have semantic analysis here so we dig deep into the |
721 | | // ready-made constraint expr and change the thing manually. |
722 | 1 | ConceptSpecializationExpr *CSE; |
723 | 1 | if (const auto *Fold = dyn_cast<CXXFoldExpr>(IDC)) |
724 | 0 | CSE = cast<ConceptSpecializationExpr>(Fold->getLHS()); |
725 | 1 | else |
726 | 1 | CSE = cast<ConceptSpecializationExpr>(IDC); |
727 | 1 | ArrayRef<TemplateArgument> OldConverted = CSE->getTemplateArguments(); |
728 | 1 | SmallVector<TemplateArgument, 3> NewConverted; |
729 | 1 | NewConverted.reserve(OldConverted.size()); |
730 | 1 | if (OldConverted.front().getKind() == TemplateArgument::Pack) { |
731 | | // The case: |
732 | | // template<typename... T> concept C = true; |
733 | | // template<C<int> T> struct S; -> constraint is C<{T, int}> |
734 | 0 | NewConverted.push_back(ConstrainedType); |
735 | 0 | for (auto &Arg : OldConverted.front().pack_elements().drop_front(1)) |
736 | 0 | NewConverted.push_back(Arg); |
737 | 0 | TemplateArgument NewPack(NewConverted); |
738 | |
|
739 | 0 | NewConverted.clear(); |
740 | 0 | NewConverted.push_back(NewPack); |
741 | 0 | assert(OldConverted.size() == 1 && |
742 | 0 | "Template parameter pack should be the last parameter"); |
743 | 1 | } else { |
744 | 1 | assert(OldConverted.front().getKind() == TemplateArgument::Type && |
745 | 1 | "Unexpected first argument kind for immediately-declared " |
746 | 1 | "constraint"); |
747 | 1 | NewConverted.push_back(ConstrainedType); |
748 | 1 | for (auto &Arg : OldConverted.drop_front(1)) |
749 | 0 | NewConverted.push_back(Arg); |
750 | 1 | } |
751 | 1 | Expr *NewIDC = ConceptSpecializationExpr::Create( |
752 | 1 | C, CSE->getNamedConcept(), NewConverted, nullptr, |
753 | 1 | CSE->isInstantiationDependent(), CSE->containsUnexpandedParameterPack()); |
754 | | |
755 | 1 | if (auto *OrigFold = dyn_cast<CXXFoldExpr>(IDC)) |
756 | 0 | NewIDC = new (C) CXXFoldExpr( |
757 | 0 | OrigFold->getType(), /*Callee*/nullptr, SourceLocation(), NewIDC, |
758 | 0 | BinaryOperatorKind::BO_LAnd, SourceLocation(), /*RHS=*/nullptr, |
759 | 0 | SourceLocation(), /*NumExpansions=*/None); |
760 | 1 | return NewIDC; |
761 | 1 | } |
762 | | |
763 | | TemplateTemplateParmDecl * |
764 | | ASTContext::getCanonicalTemplateTemplateParmDecl( |
765 | 12.5k | TemplateTemplateParmDecl *TTP) const { |
766 | | // Check if we already have a canonical template template parameter. |
767 | 12.5k | llvm::FoldingSetNodeID ID; |
768 | 12.5k | CanonicalTemplateTemplateParm::Profile(ID, *this, TTP); |
769 | 12.5k | void *InsertPos = nullptr; |
770 | 12.5k | CanonicalTemplateTemplateParm *Canonical |
771 | 12.5k | = CanonTemplateTemplateParms.FindNodeOrInsertPos(ID, InsertPos); |
772 | 12.5k | if (Canonical) |
773 | 8.92k | return Canonical->getParam(); |
774 | | |
775 | | // Build a canonical template parameter list. |
776 | 3.65k | TemplateParameterList *Params = TTP->getTemplateParameters(); |
777 | 3.65k | SmallVector<NamedDecl *, 4> CanonParams; |
778 | 3.65k | CanonParams.reserve(Params->size()); |
779 | 3.65k | for (TemplateParameterList::const_iterator P = Params->begin(), |
780 | 3.65k | PEnd = Params->end(); |
781 | 9.19k | P != PEnd; ++P5.54k ) { |
782 | 5.54k | if (const auto *TTP = dyn_cast<TemplateTypeParmDecl>(*P)) { |
783 | 4.35k | TemplateTypeParmDecl *NewTTP = TemplateTypeParmDecl::Create(*this, |
784 | 4.35k | getTranslationUnitDecl(), SourceLocation(), SourceLocation(), |
785 | 4.35k | TTP->getDepth(), TTP->getIndex(), nullptr, false, |
786 | 4.35k | TTP->isParameterPack(), TTP->hasTypeConstraint(), |
787 | 4.35k | TTP->isExpandedParameterPack() ? |
788 | 4.35k | llvm::Optional<unsigned>(TTP->getNumExpansionParameters())0 : None); |
789 | 4.35k | if (const auto *TC = TTP->getTypeConstraint()) { |
790 | 1 | QualType ParamAsArgument(NewTTP->getTypeForDecl(), 0); |
791 | 1 | Expr *NewIDC = canonicalizeImmediatelyDeclaredConstraint( |
792 | 1 | *this, TC->getImmediatelyDeclaredConstraint(), |
793 | 1 | ParamAsArgument); |
794 | 1 | TemplateArgumentListInfo CanonArgsAsWritten; |
795 | 1 | if (auto *Args = TC->getTemplateArgsAsWritten()) |
796 | 0 | for (const auto &ArgLoc : Args->arguments()) |
797 | 0 | CanonArgsAsWritten.addArgument( |
798 | 0 | TemplateArgumentLoc(ArgLoc.getArgument(), |
799 | 0 | TemplateArgumentLocInfo())); |
800 | 1 | NewTTP->setTypeConstraint( |
801 | 1 | NestedNameSpecifierLoc(), |
802 | 1 | DeclarationNameInfo(TC->getNamedConcept()->getDeclName(), |
803 | 1 | SourceLocation()), /*FoundDecl=*/nullptr, |
804 | | // Actually canonicalizing a TemplateArgumentLoc is difficult so we |
805 | | // simply omit the ArgsAsWritten |
806 | 1 | TC->getNamedConcept(), /*ArgsAsWritten=*/nullptr, NewIDC); |
807 | 1 | } |
808 | 4.35k | CanonParams.push_back(NewTTP); |
809 | 1.18k | } else if (const auto *NTTP = dyn_cast<NonTypeTemplateParmDecl>(*P)) { |
810 | 1.17k | QualType T = getCanonicalType(NTTP->getType()); |
811 | 1.17k | TypeSourceInfo *TInfo = getTrivialTypeSourceInfo(T); |
812 | 1.17k | NonTypeTemplateParmDecl *Param; |
813 | 1.17k | if (NTTP->isExpandedParameterPack()) { |
814 | 1 | SmallVector<QualType, 2> ExpandedTypes; |
815 | 1 | SmallVector<TypeSourceInfo *, 2> ExpandedTInfos; |
816 | 4 | for (unsigned I = 0, N = NTTP->getNumExpansionTypes(); I != N; ++I3 ) { |
817 | 3 | ExpandedTypes.push_back(getCanonicalType(NTTP->getExpansionType(I))); |
818 | 3 | ExpandedTInfos.push_back( |
819 | 3 | getTrivialTypeSourceInfo(ExpandedTypes.back())); |
820 | 3 | } |
821 | | |
822 | 1 | Param = NonTypeTemplateParmDecl::Create(*this, getTranslationUnitDecl(), |
823 | 1 | SourceLocation(), |
824 | 1 | SourceLocation(), |
825 | 1 | NTTP->getDepth(), |
826 | 1 | NTTP->getPosition(), nullptr, |
827 | 1 | T, |
828 | 1 | TInfo, |
829 | 1 | ExpandedTypes, |
830 | 1 | ExpandedTInfos); |
831 | 1.17k | } else { |
832 | 1.17k | Param = NonTypeTemplateParmDecl::Create(*this, getTranslationUnitDecl(), |
833 | 1.17k | SourceLocation(), |
834 | 1.17k | SourceLocation(), |
835 | 1.17k | NTTP->getDepth(), |
836 | 1.17k | NTTP->getPosition(), nullptr, |
837 | 1.17k | T, |
838 | 1.17k | NTTP->isParameterPack(), |
839 | 1.17k | TInfo); |
840 | 1.17k | } |
841 | 1.17k | if (AutoType *AT = T->getContainedAutoType()) { |
842 | 3 | if (AT->isConstrained()) { |
843 | 0 | Param->setPlaceholderTypeConstraint( |
844 | 0 | canonicalizeImmediatelyDeclaredConstraint( |
845 | 0 | *this, NTTP->getPlaceholderTypeConstraint(), T)); |
846 | 0 | } |
847 | 3 | } |
848 | 1.17k | CanonParams.push_back(Param); |
849 | | |
850 | 1.17k | } else |
851 | 4 | CanonParams.push_back(getCanonicalTemplateTemplateParmDecl( |
852 | 4 | cast<TemplateTemplateParmDecl>(*P))); |
853 | 5.54k | } |
854 | | |
855 | 3.65k | Expr *CanonRequiresClause = nullptr; |
856 | 3.65k | if (Expr *RequiresClause = TTP->getTemplateParameters()->getRequiresClause()) |
857 | 0 | CanonRequiresClause = RequiresClause; |
858 | | |
859 | 3.65k | TemplateTemplateParmDecl *CanonTTP |
860 | 3.65k | = TemplateTemplateParmDecl::Create(*this, getTranslationUnitDecl(), |
861 | 3.65k | SourceLocation(), TTP->getDepth(), |
862 | 3.65k | TTP->getPosition(), |
863 | 3.65k | TTP->isParameterPack(), |
864 | 3.65k | nullptr, |
865 | 3.65k | TemplateParameterList::Create(*this, SourceLocation(), |
866 | 3.65k | SourceLocation(), |
867 | 3.65k | CanonParams, |
868 | 3.65k | SourceLocation(), |
869 | 3.65k | CanonRequiresClause)); |
870 | | |
871 | | // Get the new insert position for the node we care about. |
872 | 3.65k | Canonical = CanonTemplateTemplateParms.FindNodeOrInsertPos(ID, InsertPos); |
873 | 3.65k | assert(!Canonical && "Shouldn't be in the map!"); |
874 | 3.65k | (void)Canonical; |
875 | | |
876 | | // Create the canonical template template parameter entry. |
877 | 3.65k | Canonical = new (*this) CanonicalTemplateTemplateParm(CanonTTP); |
878 | 3.65k | CanonTemplateTemplateParms.InsertNode(Canonical, InsertPos); |
879 | 3.65k | return CanonTTP; |
880 | 3.65k | } |
881 | | |
882 | 86.5k | CXXABI *ASTContext::createCXXABI(const TargetInfo &T) { |
883 | 86.5k | if (!LangOpts.CPlusPlus) return nullptr17.9k ; |
884 | | |
885 | 68.6k | switch (T.getCXXABI().getKind()) { |
886 | 31 | case TargetCXXABI::AppleARM64: |
887 | 58 | case TargetCXXABI::Fuchsia: |
888 | 135 | case TargetCXXABI::GenericARM: // Same as Itanium at this level |
889 | 164 | case TargetCXXABI::iOS: |
890 | 171 | case TargetCXXABI::WatchOS: |
891 | 395 | case TargetCXXABI::GenericAArch64: |
892 | 443 | case TargetCXXABI::GenericMIPS: |
893 | 61.6k | case TargetCXXABI::GenericItanium: |
894 | 61.6k | case TargetCXXABI::WebAssembly: |
895 | 61.7k | case TargetCXXABI::XL: |
896 | 61.7k | return CreateItaniumCXXABI(*this); |
897 | 6.90k | case TargetCXXABI::Microsoft: |
898 | 6.90k | return CreateMicrosoftCXXABI(*this); |
899 | 0 | } |
900 | 0 | llvm_unreachable("Invalid CXXABI type!"); |
901 | 0 | } |
902 | | |
903 | 8 | interp::Context &ASTContext::getInterpContext() { |
904 | 8 | if (!InterpContext) { |
905 | 1 | InterpContext.reset(new interp::Context(*this)); |
906 | 1 | } |
907 | 8 | return *InterpContext.get(); |
908 | 8 | } |
909 | | |
910 | 3.15M | ParentMapContext &ASTContext::getParentMapContext() { |
911 | 3.15M | if (!ParentMapCtx) |
912 | 19.8k | ParentMapCtx.reset(new ParentMapContext(*this)); |
913 | 3.15M | return *ParentMapCtx.get(); |
914 | 3.15M | } |
915 | | |
916 | | static const LangASMap *getAddressSpaceMap(const TargetInfo &T, |
917 | 86.5k | const LangOptions &LOpts) { |
918 | 86.5k | if (LOpts.FakeAddressSpaceMap) { |
919 | | // The fake address space map must have a distinct entry for each |
920 | | // language-specific address space. |
921 | 31 | static const unsigned FakeAddrSpaceMap[] = { |
922 | 31 | 0, // Default |
923 | 31 | 1, // opencl_global |
924 | 31 | 3, // opencl_local |
925 | 31 | 2, // opencl_constant |
926 | 31 | 0, // opencl_private |
927 | 31 | 4, // opencl_generic |
928 | 31 | 5, // opencl_global_device |
929 | 31 | 6, // opencl_global_host |
930 | 31 | 7, // cuda_device |
931 | 31 | 8, // cuda_constant |
932 | 31 | 9, // cuda_shared |
933 | 31 | 10, // ptr32_sptr |
934 | 31 | 11, // ptr32_uptr |
935 | 31 | 12 // ptr64 |
936 | 31 | }; |
937 | 31 | return &FakeAddrSpaceMap; |
938 | 86.5k | } else { |
939 | 86.5k | return &T.getAddressSpaceMap(); |
940 | 86.5k | } |
941 | 86.5k | } |
942 | | |
943 | | static bool isAddrSpaceMapManglingEnabled(const TargetInfo &TI, |
944 | 86.5k | const LangOptions &LangOpts) { |
945 | 86.5k | switch (LangOpts.getAddressSpaceMapMangling()) { |
946 | 86.5k | case LangOptions::ASMM_Target: |
947 | 86.5k | return TI.useAddressSpaceMapMangling(); |
948 | 2 | case LangOptions::ASMM_On: |
949 | 2 | return true; |
950 | 3 | case LangOptions::ASMM_Off: |
951 | 3 | return false; |
952 | 0 | } |
953 | 0 | llvm_unreachable("getAddressSpaceMapMangling() doesn't cover anything."); |
954 | 0 | } |
955 | | |
956 | | ASTContext::ASTContext(LangOptions &LOpts, SourceManager &SM, |
957 | | IdentifierTable &idents, SelectorTable &sels, |
958 | | Builtin::Context &builtins) |
959 | | : ConstantArrayTypes(this_()), FunctionProtoTypes(this_()), |
960 | | TemplateSpecializationTypes(this_()), |
961 | | DependentTemplateSpecializationTypes(this_()), AutoTypes(this_()), |
962 | | SubstTemplateTemplateParmPacks(this_()), |
963 | | CanonTemplateTemplateParms(this_()), SourceMgr(SM), LangOpts(LOpts), |
964 | | SanitizerBL(new SanitizerBlacklist(LangOpts.SanitizerBlacklistFiles, SM)), |
965 | | XRayFilter(new XRayFunctionFilter(LangOpts.XRayAlwaysInstrumentFiles, |
966 | | LangOpts.XRayNeverInstrumentFiles, |
967 | | LangOpts.XRayAttrListFiles, SM)), |
968 | | PrintingPolicy(LOpts), Idents(idents), Selectors(sels), |
969 | | BuiltinInfo(builtins), DeclarationNames(*this), Comments(SM), |
970 | | CommentCommandTraits(BumpAlloc, LOpts.CommentOpts), |
971 | 86.5k | CompCategories(this_()), LastSDM(nullptr, 0) { |
972 | 86.5k | TUDecl = TranslationUnitDecl::Create(*this); |
973 | 86.5k | TraversalScope = {TUDecl}; |
974 | 86.5k | } |
975 | | |
976 | 81.9k | ASTContext::~ASTContext() { |
977 | | // Release the DenseMaps associated with DeclContext objects. |
978 | | // FIXME: Is this the ideal solution? |
979 | 81.9k | ReleaseDeclContextMaps(); |
980 | | |
981 | | // Call all of the deallocation functions on all of their targets. |
982 | 81.9k | for (auto &Pair : Deallocations) |
983 | 511k | (Pair.first)(Pair.second); |
984 | | |
985 | | // ASTRecordLayout objects in ASTRecordLayouts must always be destroyed |
986 | | // because they can contain DenseMaps. |
987 | 81.9k | for (llvm::DenseMap<const ObjCContainerDecl*, |
988 | 81.9k | const ASTRecordLayout*>::iterator |
989 | 85.9k | I = ObjCLayouts.begin(), E = ObjCLayouts.end(); I != E; ) |
990 | | // Increment in loop to prevent using deallocated memory. |
991 | 3.99k | if (auto *R = const_cast<ASTRecordLayout *>((I++)->second)) |
992 | 3.26k | R->Destroy(*this); |
993 | | |
994 | 81.9k | for (llvm::DenseMap<const RecordDecl*, const ASTRecordLayout*>::iterator |
995 | 252k | I = ASTRecordLayouts.begin(), E = ASTRecordLayouts.end(); I != E; ) { |
996 | | // Increment in loop to prevent using deallocated memory. |
997 | 170k | if (auto *R = const_cast<ASTRecordLayout *>((I++)->second)) |
998 | 170k | R->Destroy(*this); |
999 | 170k | } |
1000 | | |
1001 | 81.9k | for (llvm::DenseMap<const Decl*, AttrVec*>::iterator A = DeclAttrs.begin(), |
1002 | 81.9k | AEnd = DeclAttrs.end(); |
1003 | 13.3M | A != AEnd; ++A13.2M ) |
1004 | 13.2M | A->second->~AttrVec(); |
1005 | | |
1006 | 81.9k | for (const auto &Value : ModuleInitializers) |
1007 | 640 | Value.second->~PerModuleInitializers(); |
1008 | 81.9k | } |
1009 | | |
1010 | 3 | void ASTContext::setTraversalScope(const std::vector<Decl *> &TopLevelDecls) { |
1011 | 3 | TraversalScope = TopLevelDecls; |
1012 | 3 | getParentMapContext().clear(); |
1013 | 3 | } |
1014 | | |
1015 | 1.25M | void ASTContext::AddDeallocation(void (*Callback)(void *), void *Data) const { |
1016 | 1.25M | Deallocations.push_back({Callback, Data}); |
1017 | 1.25M | } |
1018 | | |
1019 | | void |
1020 | 35.5k | ASTContext::setExternalSource(IntrusiveRefCntPtr<ExternalASTSource> Source) { |
1021 | 35.5k | ExternalSource = std::move(Source); |
1022 | 35.5k | } |
1023 | | |
1024 | 3 | void ASTContext::PrintStats() const { |
1025 | 3 | llvm::errs() << "\n*** AST Context Stats:\n"; |
1026 | 3 | llvm::errs() << " " << Types.size() << " types total.\n"; |
1027 | | |
1028 | 3 | unsigned counts[] = { |
1029 | 156 | #define TYPE(Name, Parent) 0, |
1030 | 3 | #define ABSTRACT_TYPE(Name, Parent) |
1031 | 3 | #include "clang/AST/TypeNodes.inc" |
1032 | 3 | 0 // Extra |
1033 | 3 | }; |
1034 | | |
1035 | 235 | for (unsigned i = 0, e = Types.size(); i != e; ++i232 ) { |
1036 | 232 | Type *T = Types[i]; |
1037 | 232 | counts[(unsigned)T->getTypeClass()]++; |
1038 | 232 | } |
1039 | | |
1040 | 3 | unsigned Idx = 0; |
1041 | 3 | unsigned TotalBytes = 0; |
1042 | 3 | #define TYPE(Name, Parent) \ |
1043 | 156 | if (counts[Idx]) \ |
1044 | 25 | llvm::errs() << " " << counts[Idx] << " " << #Name \ |
1045 | 25 | << " types, " << sizeof(Name##Type) << " each " \ |
1046 | 25 | << "(" << counts[Idx] * sizeof(Name##Type) \ |
1047 | 25 | << " bytes)\n"; \ |
1048 | 156 | TotalBytes += counts[Idx] * sizeof(Name##Type); \ |
1049 | 156 | ++Idx; |
1050 | 3 | #define ABSTRACT_TYPE(Name, Parent) |
1051 | 3 | #include "clang/AST/TypeNodes.inc" |
1052 | | |
1053 | 3 | llvm::errs() << "Total bytes = " << TotalBytes << "\n"; |
1054 | | |
1055 | | // Implicit special member functions. |
1056 | 3 | llvm::errs() << NumImplicitDefaultConstructorsDeclared << "/" |
1057 | 3 | << NumImplicitDefaultConstructors |
1058 | 3 | << " implicit default constructors created\n"; |
1059 | 3 | llvm::errs() << NumImplicitCopyConstructorsDeclared << "/" |
1060 | 3 | << NumImplicitCopyConstructors |
1061 | 3 | << " implicit copy constructors created\n"; |
1062 | 3 | if (getLangOpts().CPlusPlus) |
1063 | 1 | llvm::errs() << NumImplicitMoveConstructorsDeclared << "/" |
1064 | 1 | << NumImplicitMoveConstructors |
1065 | 1 | << " implicit move constructors created\n"; |
1066 | 3 | llvm::errs() << NumImplicitCopyAssignmentOperatorsDeclared << "/" |
1067 | 3 | << NumImplicitCopyAssignmentOperators |
1068 | 3 | << " implicit copy assignment operators created\n"; |
1069 | 3 | if (getLangOpts().CPlusPlus) |
1070 | 1 | llvm::errs() << NumImplicitMoveAssignmentOperatorsDeclared << "/" |
1071 | 1 | << NumImplicitMoveAssignmentOperators |
1072 | 1 | << " implicit move assignment operators created\n"; |
1073 | 3 | llvm::errs() << NumImplicitDestructorsDeclared << "/" |
1074 | 3 | << NumImplicitDestructors |
1075 | 3 | << " implicit destructors created\n"; |
1076 | | |
1077 | 3 | if (ExternalSource) { |
1078 | 3 | llvm::errs() << "\n"; |
1079 | 3 | ExternalSource->PrintStats(); |
1080 | 3 | } |
1081 | | |
1082 | 3 | BumpAlloc.PrintStats(); |
1083 | 3 | } |
1084 | | |
1085 | | void ASTContext::mergeDefinitionIntoModule(NamedDecl *ND, Module *M, |
1086 | 3.27k | bool NotifyListeners) { |
1087 | 3.27k | if (NotifyListeners) |
1088 | 2.99k | if (auto *Listener = getASTMutationListener()) |
1089 | 2.56k | Listener->RedefinedHiddenDefinition(ND, M); |
1090 | | |
1091 | 3.27k | MergedDefModules[cast<NamedDecl>(ND->getCanonicalDecl())].push_back(M); |
1092 | 3.27k | } |
1093 | | |
1094 | 568 | void ASTContext::deduplicateMergedDefinitonsFor(NamedDecl *ND) { |
1095 | 568 | auto It = MergedDefModules.find(cast<NamedDecl>(ND->getCanonicalDecl())); |
1096 | 568 | if (It == MergedDefModules.end()) |
1097 | 0 | return; |
1098 | | |
1099 | 568 | auto &Merged = It->second; |
1100 | 568 | llvm::DenseSet<Module*> Found; |
1101 | 568 | for (Module *&M : Merged) |
1102 | 3.98k | if (!Found.insert(M).second) |
1103 | 0 | M = nullptr; |
1104 | 568 | Merged.erase(std::remove(Merged.begin(), Merged.end(), nullptr), Merged.end()); |
1105 | 568 | } |
1106 | | |
1107 | | ArrayRef<Module *> |
1108 | 35.0k | ASTContext::getModulesWithMergedDefinition(const NamedDecl *Def) { |
1109 | 35.0k | auto MergedIt = |
1110 | 35.0k | MergedDefModules.find(cast<NamedDecl>(Def->getCanonicalDecl())); |
1111 | 35.0k | if (MergedIt == MergedDefModules.end()) |
1112 | 33.9k | return None; |
1113 | 1.10k | return MergedIt->second; |
1114 | 1.10k | } |
1115 | | |
1116 | 284 | void ASTContext::PerModuleInitializers::resolve(ASTContext &Ctx) { |
1117 | 284 | if (LazyInitializers.empty()) |
1118 | 228 | return; |
1119 | | |
1120 | 56 | auto *Source = Ctx.getExternalSource(); |
1121 | 56 | assert(Source && "lazy initializers but no external source"); |
1122 | | |
1123 | 56 | auto LazyInits = std::move(LazyInitializers); |
1124 | 56 | LazyInitializers.clear(); |
1125 | | |
1126 | 56 | for (auto ID : LazyInits) |
1127 | 70 | Initializers.push_back(Source->GetExternalDecl(ID)); |
1128 | | |
1129 | 56 | assert(LazyInitializers.empty() && |
1130 | 56 | "GetExternalDecl for lazy module initializer added more inits"); |
1131 | 56 | } |
1132 | | |
1133 | 63.0k | void ASTContext::addModuleInitializer(Module *M, Decl *D) { |
1134 | | // One special case: if we add a module initializer that imports another |
1135 | | // module, and that module's only initializer is an ImportDecl, simplify. |
1136 | 63.0k | if (const auto *ID = dyn_cast<ImportDecl>(D)) { |
1137 | 62.5k | auto It = ModuleInitializers.find(ID->getImportedModule()); |
1138 | | |
1139 | | // Maybe the ImportDecl does nothing at all. (Common case.) |
1140 | 62.5k | if (It == ModuleInitializers.end()) |
1141 | 62.4k | return; |
1142 | | |
1143 | | // Maybe the ImportDecl only imports another ImportDecl. |
1144 | 100 | auto &Imported = *It->second; |
1145 | 100 | if (Imported.Initializers.size() + Imported.LazyInitializers.size() == 1) { |
1146 | 50 | Imported.resolve(*this); |
1147 | 50 | auto *OnlyDecl = Imported.Initializers.front(); |
1148 | 50 | if (isa<ImportDecl>(OnlyDecl)) |
1149 | 0 | D = OnlyDecl; |
1150 | 50 | } |
1151 | 100 | } |
1152 | | |
1153 | 512 | auto *&Inits = ModuleInitializers[M]; |
1154 | 512 | if (!Inits) |
1155 | 277 | Inits = new (*this) PerModuleInitializers; |
1156 | 512 | Inits->Initializers.push_back(D); |
1157 | 512 | } |
1158 | | |
1159 | 365 | void ASTContext::addLazyModuleInitializers(Module *M, ArrayRef<uint32_t> IDs) { |
1160 | 365 | auto *&Inits = ModuleInitializers[M]; |
1161 | 365 | if (!Inits) |
1162 | 365 | Inits = new (*this) PerModuleInitializers; |
1163 | 365 | Inits->LazyInitializers.insert(Inits->LazyInitializers.end(), |
1164 | 365 | IDs.begin(), IDs.end()); |
1165 | 365 | } |
1166 | | |
1167 | 160k | ArrayRef<Decl *> ASTContext::getModuleInitializers(Module *M) { |
1168 | 160k | auto It = ModuleInitializers.find(M); |
1169 | 160k | if (It == ModuleInitializers.end()) |
1170 | 160k | return None; |
1171 | | |
1172 | 234 | auto *Inits = It->second; |
1173 | 234 | Inits->resolve(*this); |
1174 | 234 | return Inits->Initializers; |
1175 | 234 | } |
1176 | | |
1177 | 14.5M | ExternCContextDecl *ASTContext::getExternCContextDecl() const { |
1178 | 14.5M | if (!ExternCContext) |
1179 | 60.5k | ExternCContext = ExternCContextDecl::Create(*this, getTranslationUnitDecl()); |
1180 | | |
1181 | 14.5M | return ExternCContext; |
1182 | 14.5M | } |
1183 | | |
1184 | | BuiltinTemplateDecl * |
1185 | | ASTContext::buildBuiltinTemplateDecl(BuiltinTemplateKind BTK, |
1186 | 1.04k | const IdentifierInfo *II) const { |
1187 | 1.04k | auto *BuiltinTemplate = BuiltinTemplateDecl::Create(*this, TUDecl, II, BTK); |
1188 | 1.04k | BuiltinTemplate->setImplicit(); |
1189 | 1.04k | TUDecl->addDecl(BuiltinTemplate); |
1190 | | |
1191 | 1.04k | return BuiltinTemplate; |
1192 | 1.04k | } |
1193 | | |
1194 | | BuiltinTemplateDecl * |
1195 | 880 | ASTContext::getMakeIntegerSeqDecl() const { |
1196 | 880 | if (!MakeIntegerSeqDecl) |
1197 | 585 | MakeIntegerSeqDecl = buildBuiltinTemplateDecl(BTK__make_integer_seq, |
1198 | 585 | getMakeIntegerSeqName()); |
1199 | 880 | return MakeIntegerSeqDecl; |
1200 | 880 | } |
1201 | | |
1202 | | BuiltinTemplateDecl * |
1203 | 966 | ASTContext::getTypePackElementDecl() const { |
1204 | 966 | if (!TypePackElementDecl) |
1205 | 457 | TypePackElementDecl = buildBuiltinTemplateDecl(BTK__type_pack_element, |
1206 | 457 | getTypePackElementName()); |
1207 | 966 | return TypePackElementDecl; |
1208 | 966 | } |
1209 | | |
1210 | | RecordDecl *ASTContext::buildImplicitRecord(StringRef Name, |
1211 | 155k | RecordDecl::TagKind TK) const { |
1212 | 155k | SourceLocation Loc; |
1213 | 155k | RecordDecl *NewDecl; |
1214 | 155k | if (getLangOpts().CPlusPlus) |
1215 | 123k | NewDecl = CXXRecordDecl::Create(*this, TK, getTranslationUnitDecl(), Loc, |
1216 | 123k | Loc, &Idents.get(Name)); |
1217 | 32.5k | else |
1218 | 32.5k | NewDecl = RecordDecl::Create(*this, TK, getTranslationUnitDecl(), Loc, Loc, |
1219 | 32.5k | &Idents.get(Name)); |
1220 | 155k | NewDecl->setImplicit(); |
1221 | 155k | NewDecl->addAttr(TypeVisibilityAttr::CreateImplicit( |
1222 | 155k | const_cast<ASTContext &>(*this), TypeVisibilityAttr::Default)); |
1223 | 155k | return NewDecl; |
1224 | 155k | } |
1225 | | |
1226 | | TypedefDecl *ASTContext::buildImplicitTypedef(QualType T, |
1227 | 554k | StringRef Name) const { |
1228 | 554k | TypeSourceInfo *TInfo = getTrivialTypeSourceInfo(T); |
1229 | 554k | TypedefDecl *NewDecl = TypedefDecl::Create( |
1230 | 554k | const_cast<ASTContext &>(*this), getTranslationUnitDecl(), |
1231 | 554k | SourceLocation(), SourceLocation(), &Idents.get(Name), TInfo); |
1232 | 554k | NewDecl->setImplicit(); |
1233 | 554k | return NewDecl; |
1234 | 554k | } |
1235 | | |
1236 | 65.2k | TypedefDecl *ASTContext::getInt128Decl() const { |
1237 | 65.2k | if (!Int128Decl) |
1238 | 63.6k | Int128Decl = buildImplicitTypedef(Int128Ty, "__int128_t"); |
1239 | 65.2k | return Int128Decl; |
1240 | 65.2k | } |
1241 | | |
1242 | 65.2k | TypedefDecl *ASTContext::getUInt128Decl() const { |
1243 | 65.2k | if (!UInt128Decl) |
1244 | 63.6k | UInt128Decl = buildImplicitTypedef(UnsignedInt128Ty, "__uint128_t"); |
1245 | 65.2k | return UInt128Decl; |
1246 | 65.2k | } |
1247 | | |
1248 | 5.42M | void ASTContext::InitBuiltinType(CanQualType &R, BuiltinType::Kind K) { |
1249 | 5.42M | auto *Ty = new (*this, TypeAlignment) BuiltinType(K); |
1250 | 5.42M | R = CanQualType::CreateUnsafe(QualType(Ty, 0)); |
1251 | 5.42M | Types.push_back(Ty); |
1252 | 5.42M | } |
1253 | | |
1254 | | void ASTContext::InitBuiltinTypes(const TargetInfo &Target, |
1255 | 86.5k | const TargetInfo *AuxTarget) { |
1256 | 86.5k | assert((!this->Target || this->Target == &Target) && |
1257 | 86.5k | "Incorrect target reinitialization"); |
1258 | 86.5k | assert(VoidTy.isNull() && "Context reinitialized?"); |
1259 | | |
1260 | 86.5k | this->Target = &Target; |
1261 | 86.5k | this->AuxTarget = AuxTarget; |
1262 | | |
1263 | 86.5k | ABI.reset(createCXXABI(Target)); |
1264 | 86.5k | AddrSpaceMap = getAddressSpaceMap(Target, LangOpts); |
1265 | 86.5k | AddrSpaceMapMangling = isAddrSpaceMapManglingEnabled(Target, LangOpts); |
1266 | | |
1267 | | // C99 6.2.5p19. |
1268 | 86.5k | InitBuiltinType(VoidTy, BuiltinType::Void); |
1269 | | |
1270 | | // C99 6.2.5p2. |
1271 | 86.5k | InitBuiltinType(BoolTy, BuiltinType::Bool); |
1272 | | // C99 6.2.5p3. |
1273 | 86.5k | if (LangOpts.CharIsSigned) |
1274 | 86.2k | InitBuiltinType(CharTy, BuiltinType::Char_S); |
1275 | 305 | else |
1276 | 305 | InitBuiltinType(CharTy, BuiltinType::Char_U); |
1277 | | // C99 6.2.5p4. |
1278 | 86.5k | InitBuiltinType(SignedCharTy, BuiltinType::SChar); |
1279 | 86.5k | InitBuiltinType(ShortTy, BuiltinType::Short); |
1280 | 86.5k | InitBuiltinType(IntTy, BuiltinType::Int); |
1281 | 86.5k | InitBuiltinType(LongTy, BuiltinType::Long); |
1282 | 86.5k | InitBuiltinType(LongLongTy, BuiltinType::LongLong); |
1283 | | |
1284 | | // C99 6.2.5p6. |
1285 | 86.5k | InitBuiltinType(UnsignedCharTy, BuiltinType::UChar); |
1286 | 86.5k | InitBuiltinType(UnsignedShortTy, BuiltinType::UShort); |
1287 | 86.5k | InitBuiltinType(UnsignedIntTy, BuiltinType::UInt); |
1288 | 86.5k | InitBuiltinType(UnsignedLongTy, BuiltinType::ULong); |
1289 | 86.5k | InitBuiltinType(UnsignedLongLongTy, BuiltinType::ULongLong); |
1290 | | |
1291 | | // C99 6.2.5p10. |
1292 | 86.5k | InitBuiltinType(FloatTy, BuiltinType::Float); |
1293 | 86.5k | InitBuiltinType(DoubleTy, BuiltinType::Double); |
1294 | 86.5k | InitBuiltinType(LongDoubleTy, BuiltinType::LongDouble); |
1295 | | |
1296 | | // GNU extension, __float128 for IEEE quadruple precision |
1297 | 86.5k | InitBuiltinType(Float128Ty, BuiltinType::Float128); |
1298 | | |
1299 | | // C11 extension ISO/IEC TS 18661-3 |
1300 | 86.5k | InitBuiltinType(Float16Ty, BuiltinType::Float16); |
1301 | | |
1302 | | // ISO/IEC JTC1 SC22 WG14 N1169 Extension |
1303 | 86.5k | InitBuiltinType(ShortAccumTy, BuiltinType::ShortAccum); |
1304 | 86.5k | InitBuiltinType(AccumTy, BuiltinType::Accum); |
1305 | 86.5k | InitBuiltinType(LongAccumTy, BuiltinType::LongAccum); |
1306 | 86.5k | InitBuiltinType(UnsignedShortAccumTy, BuiltinType::UShortAccum); |
1307 | 86.5k | InitBuiltinType(UnsignedAccumTy, BuiltinType::UAccum); |
1308 | 86.5k | InitBuiltinType(UnsignedLongAccumTy, BuiltinType::ULongAccum); |
1309 | 86.5k | InitBuiltinType(ShortFractTy, BuiltinType::ShortFract); |
1310 | 86.5k | InitBuiltinType(FractTy, BuiltinType::Fract); |
1311 | 86.5k | InitBuiltinType(LongFractTy, BuiltinType::LongFract); |
1312 | 86.5k | InitBuiltinType(UnsignedShortFractTy, BuiltinType::UShortFract); |
1313 | 86.5k | InitBuiltinType(UnsignedFractTy, BuiltinType::UFract); |
1314 | 86.5k | InitBuiltinType(UnsignedLongFractTy, BuiltinType::ULongFract); |
1315 | 86.5k | InitBuiltinType(SatShortAccumTy, BuiltinType::SatShortAccum); |
1316 | 86.5k | InitBuiltinType(SatAccumTy, BuiltinType::SatAccum); |
1317 | 86.5k | InitBuiltinType(SatLongAccumTy, BuiltinType::SatLongAccum); |
1318 | 86.5k | InitBuiltinType(SatUnsignedShortAccumTy, BuiltinType::SatUShortAccum); |
1319 | 86.5k | InitBuiltinType(SatUnsignedAccumTy, BuiltinType::SatUAccum); |
1320 | 86.5k | InitBuiltinType(SatUnsignedLongAccumTy, BuiltinType::SatULongAccum); |
1321 | 86.5k | InitBuiltinType(SatShortFractTy, BuiltinType::SatShortFract); |
1322 | 86.5k | InitBuiltinType(SatFractTy, BuiltinType::SatFract); |
1323 | 86.5k | InitBuiltinType(SatLongFractTy, BuiltinType::SatLongFract); |
1324 | 86.5k | InitBuiltinType(SatUnsignedShortFractTy, BuiltinType::SatUShortFract); |
1325 | 86.5k | InitBuiltinType(SatUnsignedFractTy, BuiltinType::SatUFract); |
1326 | 86.5k | InitBuiltinType(SatUnsignedLongFractTy, BuiltinType::SatULongFract); |
1327 | | |
1328 | | // GNU extension, 128-bit integers. |
1329 | 86.5k | InitBuiltinType(Int128Ty, BuiltinType::Int128); |
1330 | 86.5k | InitBuiltinType(UnsignedInt128Ty, BuiltinType::UInt128); |
1331 | | |
1332 | | // C++ 3.9.1p5 |
1333 | 86.5k | if (TargetInfo::isTypeSigned(Target.getWCharType())) |
1334 | 75.3k | InitBuiltinType(WCharTy, BuiltinType::WChar_S); |
1335 | 11.1k | else // -fshort-wchar makes wchar_t be unsigned. |
1336 | 11.1k | InitBuiltinType(WCharTy, BuiltinType::WChar_U); |
1337 | 86.5k | if (LangOpts.CPlusPlus && LangOpts.WChar68.6k ) |
1338 | 68.5k | WideCharTy = WCharTy; |
1339 | 17.9k | else { |
1340 | | // C99 (or C++ using -fno-wchar). |
1341 | 17.9k | WideCharTy = getFromTargetType(Target.getWCharType()); |
1342 | 17.9k | } |
1343 | | |
1344 | 86.5k | WIntTy = getFromTargetType(Target.getWIntType()); |
1345 | | |
1346 | | // C++20 (proposed) |
1347 | 86.5k | InitBuiltinType(Char8Ty, BuiltinType::Char8); |
1348 | | |
1349 | 86.5k | if (LangOpts.CPlusPlus) // C++0x 3.9.1p5, extension for C++ |
1350 | 68.6k | InitBuiltinType(Char16Ty, BuiltinType::Char16); |
1351 | 17.9k | else // C99 |
1352 | 17.9k | Char16Ty = getFromTargetType(Target.getChar16Type()); |
1353 | | |
1354 | 86.5k | if (LangOpts.CPlusPlus) // C++0x 3.9.1p5, extension for C++ |
1355 | 68.6k | InitBuiltinType(Char32Ty, BuiltinType::Char32); |
1356 | 17.9k | else // C99 |
1357 | 17.9k | Char32Ty = getFromTargetType(Target.getChar32Type()); |
1358 | | |
1359 | | // Placeholder type for type-dependent expressions whose type is |
1360 | | // completely unknown. No code should ever check a type against |
1361 | | // DependentTy and users should never see it; however, it is here to |
1362 | | // help diagnose failures to properly check for type-dependent |
1363 | | // expressions. |
1364 | 86.5k | InitBuiltinType(DependentTy, BuiltinType::Dependent); |
1365 | | |
1366 | | // Placeholder type for functions. |
1367 | 86.5k | InitBuiltinType(OverloadTy, BuiltinType::Overload); |
1368 | | |
1369 | | // Placeholder type for bound members. |
1370 | 86.5k | InitBuiltinType(BoundMemberTy, BuiltinType::BoundMember); |
1371 | | |
1372 | | // Placeholder type for pseudo-objects. |
1373 | 86.5k | InitBuiltinType(PseudoObjectTy, BuiltinType::PseudoObject); |
1374 | | |
1375 | | // "any" type; useful for debugger-like clients. |
1376 | 86.5k | InitBuiltinType(UnknownAnyTy, BuiltinType::UnknownAny); |
1377 | | |
1378 | | // Placeholder type for unbridged ARC casts. |
1379 | 86.5k | InitBuiltinType(ARCUnbridgedCastTy, BuiltinType::ARCUnbridgedCast); |
1380 | | |
1381 | | // Placeholder type for builtin functions. |
1382 | 86.5k | InitBuiltinType(BuiltinFnTy, BuiltinType::BuiltinFn); |
1383 | | |
1384 | | // Placeholder type for OMP array sections. |
1385 | 86.5k | if (LangOpts.OpenMP) { |
1386 | 11.4k | InitBuiltinType(OMPArraySectionTy, BuiltinType::OMPArraySection); |
1387 | 11.4k | InitBuiltinType(OMPArrayShapingTy, BuiltinType::OMPArrayShaping); |
1388 | 11.4k | InitBuiltinType(OMPIteratorTy, BuiltinType::OMPIterator); |
1389 | 11.4k | } |
1390 | 86.5k | if (LangOpts.MatrixTypes) |
1391 | 32 | InitBuiltinType(IncompleteMatrixIdxTy, BuiltinType::IncompleteMatrixIdx); |
1392 | | |
1393 | | // C99 6.2.5p11. |
1394 | 86.5k | FloatComplexTy = getComplexType(FloatTy); |
1395 | 86.5k | DoubleComplexTy = getComplexType(DoubleTy); |
1396 | 86.5k | LongDoubleComplexTy = getComplexType(LongDoubleTy); |
1397 | 86.5k | Float128ComplexTy = getComplexType(Float128Ty); |
1398 | | |
1399 | | // Builtin types for 'id', 'Class', and 'SEL'. |
1400 | 86.5k | InitBuiltinType(ObjCBuiltinIdTy, BuiltinType::ObjCId); |
1401 | 86.5k | InitBuiltinType(ObjCBuiltinClassTy, BuiltinType::ObjCClass); |
1402 | 86.5k | InitBuiltinType(ObjCBuiltinSelTy, BuiltinType::ObjCSel); |
1403 | | |
1404 | 86.5k | if (LangOpts.OpenCL) { |
1405 | 596 | #define IMAGE_TYPE(ImgType, Id, SingletonId, Access, Suffix) \ |
1406 | 21.4k | InitBuiltinType(SingletonId, BuiltinType::Id); |
1407 | 596 | #include "clang/Basic/OpenCLImageTypes.def" |
1408 | | |
1409 | 596 | InitBuiltinType(OCLSamplerTy, BuiltinType::OCLSampler); |
1410 | 596 | InitBuiltinType(OCLEventTy, BuiltinType::OCLEvent); |
1411 | 596 | InitBuiltinType(OCLClkEventTy, BuiltinType::OCLClkEvent); |
1412 | 596 | InitBuiltinType(OCLQueueTy, BuiltinType::OCLQueue); |
1413 | 596 | InitBuiltinType(OCLReserveIDTy, BuiltinType::OCLReserveID); |
1414 | | |
1415 | 596 | #define EXT_OPAQUE_TYPE(ExtType, Id, Ext) \ |
1416 | 7.15k | InitBuiltinType(Id##Ty, BuiltinType::Id); |
1417 | 596 | #include "clang/Basic/OpenCLExtensionTypes.def" |
1418 | 596 | } |
1419 | | |
1420 | 86.5k | if (Target.hasAArch64SVETypes()) { |
1421 | 2.29k | #define SVE_TYPE(Name, Id, SingletonId) \ |
1422 | 112k | InitBuiltinType(SingletonId, BuiltinType::Id); |
1423 | 2.29k | #include "clang/Basic/AArch64SVEACLETypes.def" |
1424 | 2.29k | } |
1425 | | |
1426 | 86.5k | if (Target.getTriple().isPPC64() && |
1427 | 3.64k | Target.hasFeature("paired-vector-memops")) { |
1428 | 26 | if (Target.hasFeature("mma")) { |
1429 | 25 | #define PPC_VECTOR_MMA_TYPE(Name, Id, Size) \ |
1430 | 25 | InitBuiltinType(Id##Ty, BuiltinType::Id); |
1431 | 25 | #include "clang/Basic/PPCTypes.def" |
1432 | 25 | } |
1433 | 26 | #define PPC_VECTOR_VSX_TYPE(Name, Id, Size) \ |
1434 | 26 | InitBuiltinType(Id##Ty, BuiltinType::Id); |
1435 | 26 | #include "clang/Basic/PPCTypes.def" |
1436 | 26 | } |
1437 | | |
1438 | | // Builtin type for __objc_yes and __objc_no |
1439 | 86.5k | ObjCBuiltinBoolTy = (Target.useSignedCharForObjCBool() ? |
1440 | 86.1k | SignedCharTy : BoolTy395 ); |
1441 | | |
1442 | 86.5k | ObjCConstantStringType = QualType(); |
1443 | | |
1444 | 86.5k | ObjCSuperType = QualType(); |
1445 | | |
1446 | | // void * type |
1447 | 86.5k | if (LangOpts.OpenCLVersion >= 200) { |
1448 | 166 | auto Q = VoidTy.getQualifiers(); |
1449 | 166 | Q.setAddressSpace(LangAS::opencl_generic); |
1450 | 166 | VoidPtrTy = getPointerType(getCanonicalType( |
1451 | 166 | getQualifiedType(VoidTy.getUnqualifiedType(), Q))); |
1452 | 86.3k | } else { |
1453 | 86.3k | VoidPtrTy = getPointerType(VoidTy); |
1454 | 86.3k | } |
1455 | | |
1456 | | // nullptr type (C++0x 2.14.7) |
1457 | 86.5k | InitBuiltinType(NullPtrTy, BuiltinType::NullPtr); |
1458 | | |
1459 | | // half type (OpenCL 6.1.1.1) / ARM NEON __fp16 |
1460 | 86.5k | InitBuiltinType(HalfTy, BuiltinType::Half); |
1461 | | |
1462 | 86.5k | InitBuiltinType(BFloat16Ty, BuiltinType::BFloat16); |
1463 | | |
1464 | | // Builtin type used to help define __builtin_va_list. |
1465 | 86.5k | VaListTagDecl = nullptr; |
1466 | | |
1467 | | // MSVC predeclares struct _GUID, and we need it to create MSGuidDecls. |
1468 | 86.5k | if (LangOpts.MicrosoftExt || LangOpts.Borland76.1k ) { |
1469 | 10.4k | MSGuidTagDecl = buildImplicitRecord("_GUID"); |
1470 | 10.4k | TUDecl->addDecl(MSGuidTagDecl); |
1471 | 10.4k | } |
1472 | 86.5k | } |
1473 | | |
1474 | 941k | DiagnosticsEngine &ASTContext::getDiagnostics() const { |
1475 | 941k | return SourceMgr.getDiagnostics(); |
1476 | 941k | } |
1477 | | |
1478 | 1.18G | AttrVec& ASTContext::getDeclAttrs(const Decl *D) { |
1479 | 1.18G | AttrVec *&Result = DeclAttrs[D]; |
1480 | 1.18G | if (!Result) { |
1481 | 20.1M | void *Mem = Allocate(sizeof(AttrVec)); |
1482 | 20.1M | Result = new (Mem) AttrVec; |
1483 | 20.1M | } |
1484 | | |
1485 | 1.18G | return *Result; |
1486 | 1.18G | } |
1487 | | |
1488 | | /// Erase the attributes corresponding to the given declaration. |
1489 | 5.92k | void ASTContext::eraseDeclAttrs(const Decl *D) { |
1490 | 5.92k | llvm::DenseMap<const Decl*, AttrVec*>::iterator Pos = DeclAttrs.find(D); |
1491 | 5.92k | if (Pos != DeclAttrs.end()) { |
1492 | 5.92k | Pos->second->~AttrVec(); |
1493 | 5.92k | DeclAttrs.erase(Pos); |
1494 | 5.92k | } |
1495 | 5.92k | } |
1496 | | |
1497 | | // FIXME: Remove ? |
1498 | | MemberSpecializationInfo * |
1499 | 0 | ASTContext::getInstantiatedFromStaticDataMember(const VarDecl *Var) { |
1500 | 0 | assert(Var->isStaticDataMember() && "Not a static data member"); |
1501 | 0 | return getTemplateOrSpecializationInfo(Var) |
1502 | 0 | .dyn_cast<MemberSpecializationInfo *>(); |
1503 | 0 | } |
1504 | | |
1505 | | ASTContext::TemplateOrSpecializationInfo |
1506 | 73.8M | ASTContext::getTemplateOrSpecializationInfo(const VarDecl *Var) { |
1507 | 73.8M | llvm::DenseMap<const VarDecl *, TemplateOrSpecializationInfo>::iterator Pos = |
1508 | 73.8M | TemplateOrInstantiation.find(Var); |
1509 | 73.8M | if (Pos == TemplateOrInstantiation.end()) |
1510 | 37.2M | return {}; |
1511 | | |
1512 | 36.5M | return Pos->second; |
1513 | 36.5M | } |
1514 | | |
1515 | | void |
1516 | | ASTContext::setInstantiatedFromStaticDataMember(VarDecl *Inst, VarDecl *Tmpl, |
1517 | | TemplateSpecializationKind TSK, |
1518 | 252k | SourceLocation PointOfInstantiation) { |
1519 | 252k | assert(Inst->isStaticDataMember() && "Not a static data member"); |
1520 | 252k | assert(Tmpl->isStaticDataMember() && "Not a static data member"); |
1521 | 252k | setTemplateOrSpecializationInfo(Inst, new (*this) MemberSpecializationInfo( |
1522 | 252k | Tmpl, TSK, PointOfInstantiation)); |
1523 | 252k | } |
1524 | | |
1525 | | void |
1526 | | ASTContext::setTemplateOrSpecializationInfo(VarDecl *Inst, |
1527 | 256k | TemplateOrSpecializationInfo TSI) { |
1528 | 256k | assert(!TemplateOrInstantiation[Inst] && |
1529 | 256k | "Already noted what the variable was instantiated from"); |
1530 | 256k | TemplateOrInstantiation[Inst] = TSI; |
1531 | 256k | } |
1532 | | |
1533 | | NamedDecl * |
1534 | 7.67k | ASTContext::getInstantiatedFromUsingDecl(NamedDecl *UUD) { |
1535 | 7.67k | auto Pos = InstantiatedFromUsingDecl.find(UUD); |
1536 | 7.67k | if (Pos == InstantiatedFromUsingDecl.end()) |
1537 | 7.19k | return nullptr; |
1538 | | |
1539 | 484 | return Pos->second; |
1540 | 484 | } |
1541 | | |
1542 | | void |
1543 | 1.53k | ASTContext::setInstantiatedFromUsingDecl(NamedDecl *Inst, NamedDecl *Pattern) { |
1544 | 1.53k | assert((isa<UsingDecl>(Pattern) || |
1545 | 1.53k | isa<UnresolvedUsingValueDecl>(Pattern) || |
1546 | 1.53k | isa<UnresolvedUsingTypenameDecl>(Pattern)) && |
1547 | 1.53k | "pattern decl is not a using decl"); |
1548 | 1.53k | assert((isa<UsingDecl>(Inst) || |
1549 | 1.53k | isa<UnresolvedUsingValueDecl>(Inst) || |
1550 | 1.53k | isa<UnresolvedUsingTypenameDecl>(Inst)) && |
1551 | 1.53k | "instantiation did not produce a using decl"); |
1552 | 1.53k | assert(!InstantiatedFromUsingDecl[Inst] && "pattern already exists"); |
1553 | 1.53k | InstantiatedFromUsingDecl[Inst] = Pattern; |
1554 | 1.53k | } |
1555 | | |
1556 | | UsingShadowDecl * |
1557 | 24.2k | ASTContext::getInstantiatedFromUsingShadowDecl(UsingShadowDecl *Inst) { |
1558 | 24.2k | llvm::DenseMap<UsingShadowDecl*, UsingShadowDecl*>::const_iterator Pos |
1559 | 24.2k | = InstantiatedFromUsingShadowDecl.find(Inst); |
1560 | 24.2k | if (Pos == InstantiatedFromUsingShadowDecl.end()) |
1561 | 24.1k | return nullptr; |
1562 | | |
1563 | 90 | return Pos->second; |
1564 | 90 | } |
1565 | | |
1566 | | void |
1567 | | ASTContext::setInstantiatedFromUsingShadowDecl(UsingShadowDecl *Inst, |
1568 | 325 | UsingShadowDecl *Pattern) { |
1569 | 325 | assert(!InstantiatedFromUsingShadowDecl[Inst] && "pattern already exists"); |
1570 | 325 | InstantiatedFromUsingShadowDecl[Inst] = Pattern; |
1571 | 325 | } |
1572 | | |
1573 | 43.7k | FieldDecl *ASTContext::getInstantiatedFromUnnamedFieldDecl(FieldDecl *Field) { |
1574 | 43.7k | llvm::DenseMap<FieldDecl *, FieldDecl *>::iterator Pos |
1575 | 43.7k | = InstantiatedFromUnnamedFieldDecl.find(Field); |
1576 | 43.7k | if (Pos == InstantiatedFromUnnamedFieldDecl.end()) |
1577 | 40.0k | return nullptr; |
1578 | | |
1579 | 3.71k | return Pos->second; |
1580 | 3.71k | } |
1581 | | |
1582 | | void ASTContext::setInstantiatedFromUnnamedFieldDecl(FieldDecl *Inst, |
1583 | 1.66k | FieldDecl *Tmpl) { |
1584 | 1.66k | assert(!Inst->getDeclName() && "Instantiated field decl is not unnamed"); |
1585 | 1.66k | assert(!Tmpl->getDeclName() && "Template field decl is not unnamed"); |
1586 | 1.66k | assert(!InstantiatedFromUnnamedFieldDecl[Inst] && |
1587 | 1.66k | "Already noted what unnamed field was instantiated from"); |
1588 | | |
1589 | 1.66k | InstantiatedFromUnnamedFieldDecl[Inst] = Tmpl; |
1590 | 1.66k | } |
1591 | | |
1592 | | ASTContext::overridden_cxx_method_iterator |
1593 | 2.90k | ASTContext::overridden_methods_begin(const CXXMethodDecl *Method) const { |
1594 | 2.90k | return overridden_methods(Method).begin(); |
1595 | 2.90k | } |
1596 | | |
1597 | | ASTContext::overridden_cxx_method_iterator |
1598 | 1.09k | ASTContext::overridden_methods_end(const CXXMethodDecl *Method) const { |
1599 | 1.09k | return overridden_methods(Method).end(); |
1600 | 1.09k | } |
1601 | | |
1602 | | unsigned |
1603 | 8.27M | ASTContext::overridden_methods_size(const CXXMethodDecl *Method) const { |
1604 | 8.27M | auto Range = overridden_methods(Method); |
1605 | 8.27M | return Range.end() - Range.begin(); |
1606 | 8.27M | } |
1607 | | |
1608 | | ASTContext::overridden_method_range |
1609 | 10.5M | ASTContext::overridden_methods(const CXXMethodDecl *Method) const { |
1610 | 10.5M | llvm::DenseMap<const CXXMethodDecl *, CXXMethodVector>::const_iterator Pos = |
1611 | 10.5M | OverriddenMethods.find(Method->getCanonicalDecl()); |
1612 | 10.5M | if (Pos == OverriddenMethods.end()) |
1613 | 10.3M | return overridden_method_range(nullptr, nullptr); |
1614 | 217k | return overridden_method_range(Pos->second.begin(), Pos->second.end()); |
1615 | 217k | } |
1616 | | |
1617 | | void ASTContext::addOverriddenMethod(const CXXMethodDecl *Method, |
1618 | 33.6k | const CXXMethodDecl *Overridden) { |
1619 | 33.6k | assert(Method->isCanonicalDecl() && Overridden->isCanonicalDecl()); |
1620 | 33.6k | OverriddenMethods[Method].push_back(Overridden); |
1621 | 33.6k | } |
1622 | | |
1623 | | void ASTContext::getOverriddenMethods( |
1624 | | const NamedDecl *D, |
1625 | 8.14k | SmallVectorImpl<const NamedDecl *> &Overridden) const { |
1626 | 8.14k | assert(D); |
1627 | | |
1628 | 8.14k | if (const auto *CXXMethod = dyn_cast<CXXMethodDecl>(D)) { |
1629 | 1.09k | Overridden.append(overridden_methods_begin(CXXMethod), |
1630 | 1.09k | overridden_methods_end(CXXMethod)); |
1631 | 1.09k | return; |
1632 | 1.09k | } |
1633 | | |
1634 | 7.04k | const auto *Method = dyn_cast<ObjCMethodDecl>(D); |
1635 | 7.04k | if (!Method) |
1636 | 6.22k | return; |
1637 | | |
1638 | 822 | SmallVector<const ObjCMethodDecl *, 8> OverDecls; |
1639 | 822 | Method->getOverriddenMethods(OverDecls); |
1640 | 822 | Overridden.append(OverDecls.begin(), OverDecls.end()); |
1641 | 822 | } |
1642 | | |
1643 | 79.7k | void ASTContext::addedLocalImportDecl(ImportDecl *Import) { |
1644 | 79.7k | assert(!Import->getNextLocalImport() && |
1645 | 79.7k | "Import declaration already in the chain"); |
1646 | 79.7k | assert(!Import->isFromASTFile() && "Non-local import declaration"); |
1647 | 79.7k | if (!FirstLocalImport) { |
1648 | 3.99k | FirstLocalImport = Import; |
1649 | 3.99k | LastLocalImport = Import; |
1650 | 3.99k | return; |
1651 | 3.99k | } |
1652 | | |
1653 | 75.7k | LastLocalImport->setNextLocalImport(Import); |
1654 | 75.7k | LastLocalImport = Import; |
1655 | 75.7k | } |
1656 | | |
1657 | | //===----------------------------------------------------------------------===// |
1658 | | // Type Sizing and Analysis |
1659 | | //===----------------------------------------------------------------------===// |
1660 | | |
1661 | | /// getFloatTypeSemantics - Return the APFloat 'semantics' for the specified |
1662 | | /// scalar floating point type. |
1663 | 234k | const llvm::fltSemantics &ASTContext::getFloatTypeSemantics(QualType T) const { |
1664 | 234k | switch (T->castAs<BuiltinType>()->getKind()) { |
1665 | 0 | default: |
1666 | 0 | llvm_unreachable("Not a floating point type!"); |
1667 | 197 | case BuiltinType::BFloat16: |
1668 | 197 | return Target->getBFloat16Format(); |
1669 | 480 | case BuiltinType::Float16: |
1670 | 4.54k | case BuiltinType::Half: |
1671 | 4.54k | return Target->getHalfFormat(); |
1672 | 104k | case BuiltinType::Float: return Target->getFloatFormat(); |
1673 | 109k | case BuiltinType::Double: return Target->getDoubleFormat(); |
1674 | 15.1k | case BuiltinType::LongDouble: |
1675 | 15.1k | if (getLangOpts().OpenMP && getLangOpts().OpenMPIsDevice1.55k ) |
1676 | 155 | return AuxTarget->getLongDoubleFormat(); |
1677 | 15.0k | return Target->getLongDoubleFormat(); |
1678 | 1.27k | case BuiltinType::Float128: |
1679 | 1.27k | if (getLangOpts().OpenMP && getLangOpts().OpenMPIsDevice27 ) |
1680 | 12 | return AuxTarget->getFloat128Format(); |
1681 | 1.26k | return Target->getFloat128Format(); |
1682 | 234k | } |
1683 | 234k | } |
1684 | | |
1685 | 1.49M | CharUnits ASTContext::getDeclAlign(const Decl *D, bool ForAlignof) const { |
1686 | 1.49M | unsigned Align = Target->getCharWidth(); |
1687 | | |
1688 | 1.49M | bool UseAlignAttrOnly = false; |
1689 | 1.49M | if (unsigned AlignFromAttr = D->getMaxAlignment()) { |
1690 | 1.96k | Align = AlignFromAttr; |
1691 | | |
1692 | | // __attribute__((aligned)) can increase or decrease alignment |
1693 | | // *except* on a struct or struct member, where it only increases |
1694 | | // alignment unless 'packed' is also specified. |
1695 | | // |
1696 | | // It is an error for alignas to decrease alignment, so we can |
1697 | | // ignore that possibility; Sema should diagnose it. |
1698 | 1.96k | if (isa<FieldDecl>(D)) { |
1699 | 6 | UseAlignAttrOnly = D->hasAttr<PackedAttr>() || |
1700 | 4 | cast<FieldDecl>(D)->getParent()->hasAttr<PackedAttr>(); |
1701 | 1.95k | } else { |
1702 | 1.95k | UseAlignAttrOnly = true; |
1703 | 1.95k | } |
1704 | 1.96k | } |
1705 | 1.48M | else if (isa<FieldDecl>(D)) |
1706 | 74 | UseAlignAttrOnly = |
1707 | 74 | D->hasAttr<PackedAttr>() || |
1708 | 58 | cast<FieldDecl>(D)->getParent()->hasAttr<PackedAttr>(); |
1709 | | |
1710 | | // If we're using the align attribute only, just ignore everything |
1711 | | // else about the declaration and its type. |
1712 | 1.49M | if (UseAlignAttrOnly) { |
1713 | | // do nothing |
1714 | 1.48M | } else if (const auto *VD = dyn_cast<ValueDecl>(D)) { |
1715 | 1.48M | QualType T = VD->getType(); |
1716 | 1.48M | if (const auto *RT = T->getAs<ReferenceType>()) { |
1717 | 92.0k | if (ForAlignof) |
1718 | 2 | T = RT->getPointeeType(); |
1719 | 92.0k | else |
1720 | 92.0k | T = getPointerType(RT->getPointeeType()); |
1721 | 92.0k | } |
1722 | 1.48M | QualType BaseT = getBaseElementType(T); |
1723 | 1.48M | if (T->isFunctionType()) |
1724 | 3.16k | Align = getTypeInfoImpl(T.getTypePtr()).Align; |
1725 | 1.48M | else if (!BaseT->isIncompleteType()) { |
1726 | | // Adjust alignments of declarations with array type by the |
1727 | | // large-array alignment on the target. |
1728 | 1.48M | if (const ArrayType *arrayType = getAsArrayType(T)) { |
1729 | 27.9k | unsigned MinWidth = Target->getLargeArrayMinWidth(); |
1730 | 27.9k | if (!ForAlignof && MinWidth27.8k ) { |
1731 | 7.02k | if (isa<VariableArrayType>(arrayType)) |
1732 | 356 | Align = std::max(Align, Target->getLargeArrayAlign()); |
1733 | 6.67k | else if (isa<ConstantArrayType>(arrayType) && |
1734 | 6.61k | MinWidth <= getTypeSize(cast<ConstantArrayType>(arrayType))) |
1735 | 3.81k | Align = std::max(Align, Target->getLargeArrayAlign()); |
1736 | 7.02k | } |
1737 | 27.9k | } |
1738 | 1.48M | Align = std::max(Align, getPreferredTypeAlign(T.getTypePtr())); |
1739 | 1.48M | if (BaseT.getQualifiers().hasUnaligned()) |
1740 | 50 | Align = Target->getCharWidth(); |
1741 | 1.48M | if (const auto *VD = dyn_cast<VarDecl>(D)) { |
1742 | 1.48M | if (VD->hasGlobalStorage() && !ForAlignof223k ) { |
1743 | 223k | uint64_t TypeSize = getTypeSize(T.getTypePtr()); |
1744 | 223k | Align = std::max(Align, getTargetInfo().getMinGlobalAlign(TypeSize)); |
1745 | 223k | } |
1746 | 1.48M | } |
1747 | 1.48M | } |
1748 | | |
1749 | | // Fields can be subject to extra alignment constraints, like if |
1750 | | // the field is packed, the struct is packed, or the struct has a |
1751 | | // a max-field-alignment constraint (#pragma pack). So calculate |
1752 | | // the actual alignment of the field within the struct, and then |
1753 | | // (as we're expected to) constrain that by the alignment of the type. |
1754 | 1.48M | if (const auto *Field = dyn_cast<FieldDecl>(VD)) { |
1755 | 39 | const RecordDecl *Parent = Field->getParent(); |
1756 | | // We can only produce a sensible answer if the record is valid. |
1757 | 39 | if (!Parent->isInvalidDecl()) { |
1758 | 37 | const ASTRecordLayout &Layout = getASTRecordLayout(Parent); |
1759 | | |
1760 | | // Start with the record's overall alignment. |
1761 | 37 | unsigned FieldAlign = toBits(Layout.getAlignment()); |
1762 | | |
1763 | | // Use the GCD of that and the offset within the record. |
1764 | 37 | uint64_t Offset = Layout.getFieldOffset(Field->getFieldIndex()); |
1765 | 37 | if (Offset > 0) { |
1766 | | // Alignment is always a power of 2, so the GCD will be a power of 2, |
1767 | | // which means we get to do this crazy thing instead of Euclid's. |
1768 | 18 | uint64_t LowBitOfOffset = Offset & (~Offset + 1); |
1769 | 18 | if (LowBitOfOffset < FieldAlign) |
1770 | 0 | FieldAlign = static_cast<unsigned>(LowBitOfOffset); |
1771 | 18 | } |
1772 | | |
1773 | 37 | Align = std::min(Align, FieldAlign); |
1774 | 37 | } |
1775 | 39 | } |
1776 | 1.48M | } |
1777 | | |
1778 | 1.49M | return toCharUnitsFromBits(Align); |
1779 | 1.49M | } |
1780 | | |
1781 | 5.24k | CharUnits ASTContext::getExnObjectAlignment() const { |
1782 | 5.24k | return toCharUnitsFromBits(Target->getExnObjectAlignment()); |
1783 | 5.24k | } |
1784 | | |
1785 | | // getTypeInfoDataSizeInChars - Return the size of a type, in |
1786 | | // chars. If the type is a record, its data size is returned. This is |
1787 | | // the size of the memcpy that's performed when assigning this type |
1788 | | // using a trivial copy/move assignment operator. |
1789 | 11.5k | TypeInfoChars ASTContext::getTypeInfoDataSizeInChars(QualType T) const { |
1790 | 11.5k | TypeInfoChars Info = getTypeInfoInChars(T); |
1791 | | |
1792 | | // In C++, objects can sometimes be allocated into the tail padding |
1793 | | // of a base-class subobject. We decide whether that's possible |
1794 | | // during class layout, so here we can just trust the layout results. |
1795 | 11.5k | if (getLangOpts().CPlusPlus) { |
1796 | 9.97k | if (const auto *RT = T->getAs<RecordType>()) { |
1797 | 8.25k | const ASTRecordLayout &layout = getASTRecordLayout(RT->getDecl()); |
1798 | 8.25k | Info.Width = layout.getDataSize(); |
1799 | 8.25k | } |
1800 | 9.97k | } |
1801 | | |
1802 | 11.5k | return Info; |
1803 | 11.5k | } |
1804 | | |
1805 | | /// getConstantArrayInfoInChars - Performing the computation in CharUnits |
1806 | | /// instead of in bits prevents overflowing the uint64_t for some large arrays. |
1807 | | TypeInfoChars |
1808 | | static getConstantArrayInfoInChars(const ASTContext &Context, |
1809 | 129k | const ConstantArrayType *CAT) { |
1810 | 129k | TypeInfoChars EltInfo = Context.getTypeInfoInChars(CAT->getElementType()); |
1811 | 129k | uint64_t Size = CAT->getSize().getZExtValue(); |
1812 | 129k | assert((Size == 0 || static_cast<uint64_t>(EltInfo.Width.getQuantity()) <= |
1813 | 129k | (uint64_t)(-1)/Size) && |
1814 | 129k | "Overflow in array type char size evaluation"); |
1815 | 129k | uint64_t Width = EltInfo.Width.getQuantity() * Size; |
1816 | 129k | unsigned Align = EltInfo.Align.getQuantity(); |
1817 | 129k | if (!Context.getTargetInfo().getCXXABI().isMicrosoft() || |
1818 | 317 | Context.getTargetInfo().getPointerWidth(0) == 64) |
1819 | 129k | Width = llvm::alignTo(Width, Align); |
1820 | 129k | return TypeInfoChars(CharUnits::fromQuantity(Width), |
1821 | 129k | CharUnits::fromQuantity(Align), |
1822 | 129k | EltInfo.AlignIsRequired); |
1823 | 129k | } |
1824 | | |
1825 | 3.82M | TypeInfoChars ASTContext::getTypeInfoInChars(const Type *T) const { |
1826 | 3.82M | if (const auto *CAT = dyn_cast<ConstantArrayType>(T)) |
1827 | 129k | return getConstantArrayInfoInChars(*this, CAT); |
1828 | 3.69M | TypeInfo Info = getTypeInfo(T); |
1829 | 3.69M | return TypeInfoChars(toCharUnitsFromBits(Info.Width), |
1830 | 3.69M | toCharUnitsFromBits(Info.Align), |
1831 | 3.69M | Info.AlignIsRequired); |
1832 | 3.69M | } |
1833 | | |
1834 | 3.80M | TypeInfoChars ASTContext::getTypeInfoInChars(QualType T) const { |
1835 | 3.80M | return getTypeInfoInChars(T.getTypePtr()); |
1836 | 3.80M | } |
1837 | | |
1838 | 2.86k | bool ASTContext::isAlignmentRequired(const Type *T) const { |
1839 | 2.86k | return getTypeInfo(T).AlignIsRequired; |
1840 | 2.86k | } |
1841 | | |
1842 | 2.86k | bool ASTContext::isAlignmentRequired(QualType T) const { |
1843 | 2.86k | return isAlignmentRequired(T.getTypePtr()); |
1844 | 2.86k | } |
1845 | | |
1846 | | unsigned ASTContext::getTypeAlignIfKnown(QualType T, |
1847 | 954 | bool NeedsPreferredAlignment) const { |
1848 | | // An alignment on a typedef overrides anything else. |
1849 | 954 | if (const auto *TT = T->getAs<TypedefType>()) |
1850 | 8 | if (unsigned Align = TT->getDecl()->getMaxAlignment()) |
1851 | 0 | return Align; |
1852 | | |
1853 | | // If we have an (array of) complete type, we're done. |
1854 | 954 | T = getBaseElementType(T); |
1855 | 954 | if (!T->isIncompleteType()) |
1856 | 951 | return NeedsPreferredAlignment ? getPreferredTypeAlign(T)38 : getTypeAlign(T)913 ; |
1857 | | |
1858 | | // If we had an array type, its element type might be a typedef |
1859 | | // type with an alignment attribute. |
1860 | 3 | if (const auto *TT = T->getAs<TypedefType>()) |
1861 | 0 | if (unsigned Align = TT->getDecl()->getMaxAlignment()) |
1862 | 0 | return Align; |
1863 | | |
1864 | | // Otherwise, see if the declaration of the type had an attribute. |
1865 | 3 | if (const auto *TT = T->getAs<TagType>()) |
1866 | 0 | return TT->getDecl()->getMaxAlignment(); |
1867 | | |
1868 | 3 | return 0; |
1869 | 3 | } |
1870 | | |
1871 | 102M | TypeInfo ASTContext::getTypeInfo(const Type *T) const { |
1872 | 102M | TypeInfoMap::iterator I = MemoizedTypeInfo.find(T); |
1873 | 102M | if (I != MemoizedTypeInfo.end()) |
1874 | 100M | return I->second; |
1875 | | |
1876 | | // This call can invalidate MemoizedTypeInfo[T], so we need a second lookup. |
1877 | 1.74M | TypeInfo TI = getTypeInfoImpl(T); |
1878 | 1.74M | MemoizedTypeInfo[T] = TI; |
1879 | 1.74M | return TI; |
1880 | 1.74M | } |
1881 | | |
1882 | | /// getTypeInfoImpl - Return the size of the specified type, in bits. This |
1883 | | /// method does not work on incomplete types. |
1884 | | /// |
1885 | | /// FIXME: Pointers into different addr spaces could have different sizes and |
1886 | | /// alignment requirements: getPointerInfo should take an AddrSpace, this |
1887 | | /// should take a QualType, &c. |
1888 | 1.74M | TypeInfo ASTContext::getTypeInfoImpl(const Type *T) const { |
1889 | 1.74M | uint64_t Width = 0; |
1890 | 1.74M | unsigned Align = 8; |
1891 | 1.74M | bool AlignIsRequired = false; |
1892 | 1.74M | unsigned AS = 0; |
1893 | 1.74M | switch (T->getTypeClass()) { |
1894 | 0 | #define TYPE(Class, Base) |
1895 | 0 | #define ABSTRACT_TYPE(Class, Base) |
1896 | 0 | #define NON_CANONICAL_TYPE(Class, Base) |
1897 | 0 | #define DEPENDENT_TYPE(Class, Base) case Type::Class: |
1898 | 0 | #define NON_CANONICAL_UNLESS_DEPENDENT_TYPE(Class, Base) \ |
1899 | 34.2k | case Type::Class: \ |
1900 | 34.2k | assert(!T->isDependentType() && "should not see dependent types here"); \ |
1901 | 34.2k | return getTypeInfo(cast<Class##Type>(T)->desugar().getTypePtr()); |
1902 | 0 | #include "clang/AST/TypeNodes.inc" |
1903 | 0 | llvm_unreachable("Should not see dependent types"); |
1904 | |
|
1905 | 31 | case Type::FunctionNoProto: |
1906 | 3.58k | case Type::FunctionProto: |
1907 | | // GCC extension: alignof(function) = 32 bits |
1908 | 3.58k | Width = 0; |
1909 | 3.58k | Align = 32; |
1910 | 3.58k | break; |
1911 | | |
1912 | 165 | case Type::IncompleteArray: |
1913 | 3.54k | case Type::VariableArray: |
1914 | 57.2k | case Type::ConstantArray: { |
1915 | | // Model non-constant sized arrays as size zero, but track the alignment. |
1916 | 57.2k | uint64_t Size = 0; |
1917 | 57.2k | if (const auto *CAT = dyn_cast<ConstantArrayType>(T)) |
1918 | 53.7k | Size = CAT->getSize().getZExtValue(); |
1919 | | |
1920 | 57.2k | TypeInfo EltInfo = getTypeInfo(cast<ArrayType>(T)->getElementType()); |
1921 | 57.2k | assert((Size == 0 || EltInfo.Width <= (uint64_t)(-1) / Size) && |
1922 | 57.2k | "Overflow in array type bit size evaluation"); |
1923 | 57.2k | Width = EltInfo.Width * Size; |
1924 | 57.2k | Align = EltInfo.Align; |
1925 | 57.2k | AlignIsRequired = EltInfo.AlignIsRequired; |
1926 | 57.2k | if (!getTargetInfo().getCXXABI().isMicrosoft() || |
1927 | 270 | getTargetInfo().getPointerWidth(0) == 64) |
1928 | 57.1k | Width = llvm::alignTo(Width, Align); |
1929 | 57.2k | break; |
1930 | 3.54k | } |
1931 | | |
1932 | 634 | case Type::ExtVector: |
1933 | 24.6k | case Type::Vector: { |
1934 | 24.6k | const auto *VT = cast<VectorType>(T); |
1935 | 24.6k | TypeInfo EltInfo = getTypeInfo(VT->getElementType()); |
1936 | 24.6k | Width = EltInfo.Width * VT->getNumElements(); |
1937 | 24.6k | Align = Width; |
1938 | | // If the alignment is not a power of 2, round up to the next power of 2. |
1939 | | // This happens for non-power-of-2 length vectors. |
1940 | 24.6k | if (Align & (Align-1)) { |
1941 | 1.02k | Align = llvm::NextPowerOf2(Align); |
1942 | 1.02k | Width = llvm::alignTo(Width, Align); |
1943 | 1.02k | } |
1944 | | // Adjust the alignment based on the target max. |
1945 | 24.6k | uint64_t TargetVectorAlign = Target->getMaxVectorAlign(); |
1946 | 24.6k | if (TargetVectorAlign && TargetVectorAlign < Align19.4k ) |
1947 | 6.04k | Align = TargetVectorAlign; |
1948 | 24.6k | if (VT->getVectorKind() == VectorType::SveFixedLengthDataVector) |
1949 | | // Adjust the alignment for fixed-length SVE vectors. This is important |
1950 | | // for non-power-of-2 vector lengths. |
1951 | 305 | Align = 128; |
1952 | 24.3k | else if (VT->getVectorKind() == VectorType::SveFixedLengthPredicateVector) |
1953 | | // Adjust the alignment for fixed-length SVE predicates. |
1954 | 29 | Align = 16; |
1955 | 24.6k | break; |
1956 | 634 | } |
1957 | | |
1958 | 106 | case Type::ConstantMatrix: { |
1959 | 106 | const auto *MT = cast<ConstantMatrixType>(T); |
1960 | 106 | TypeInfo ElementInfo = getTypeInfo(MT->getElementType()); |
1961 | | // The internal layout of a matrix value is implementation defined. |
1962 | | // Initially be ABI compatible with arrays with respect to alignment and |
1963 | | // size. |
1964 | 106 | Width = ElementInfo.Width * MT->getNumRows() * MT->getNumColumns(); |
1965 | 106 | Align = ElementInfo.Align; |
1966 | 106 | break; |
1967 | 634 | } |
1968 | | |
1969 | 298k | case Type::Builtin: |
1970 | 298k | switch (cast<BuiltinType>(T)->getKind()) { |
1971 | 0 | default: llvm_unreachable("Unknown builtin type!"); |
1972 | 93 | case BuiltinType::Void: |
1973 | | // GCC extension: alignof(void) = 8 bits. |
1974 | 93 | Width = 0; |
1975 | 93 | Align = 8; |
1976 | 93 | break; |
1977 | 6.09k | case BuiltinType::Bool: |
1978 | 6.09k | Width = Target->getBoolWidth(); |
1979 | 6.09k | Align = Target->getBoolAlign(); |
1980 | 6.09k | break; |
1981 | 44.0k | case BuiltinType::Char_S: |
1982 | 44.2k | case BuiltinType::Char_U: |
1983 | 54.2k | case BuiltinType::UChar: |
1984 | 58.6k | case BuiltinType::SChar: |
1985 | 58.6k | case BuiltinType::Char8: |
1986 | 58.6k | Width = Target->getCharWidth(); |
1987 | 58.6k | Align = Target->getCharAlign(); |
1988 | 58.6k | break; |
1989 | 22.0k | case BuiltinType::WChar_S: |
1990 | 22.7k | case BuiltinType::WChar_U: |
1991 | 22.7k | Width = Target->getWCharWidth(); |
1992 | 22.7k | Align = Target->getWCharAlign(); |
1993 | 22.7k | break; |
1994 | 490 | case BuiltinType::Char16: |
1995 | 490 | Width = Target->getChar16Width(); |
1996 | 490 | Align = Target->getChar16Align(); |
1997 | 490 | break; |
1998 | 490 | case BuiltinType::Char32: |
1999 | 490 | Width = Target->getChar32Width(); |
2000 | 490 | Align = Target->getChar32Align(); |
2001 | 490 | break; |
2002 | 7.00k | case BuiltinType::UShort: |
2003 | 25.7k | case BuiltinType::Short: |
2004 | 25.7k | Width = Target->getShortWidth(); |
2005 | 25.7k | Align = Target->getShortAlign(); |
2006 | 25.7k | break; |
2007 | 17.8k | case BuiltinType::UInt: |
2008 | 64.5k | case BuiltinType::Int: |
2009 | 64.5k | Width = Target->getIntWidth(); |
2010 | 64.5k | Align = Target->getIntAlign(); |
2011 | 64.5k | break; |
2012 | 55.7k | case BuiltinType::ULong: |
2013 | 78.4k | case BuiltinType::Long: |
2014 | 78.4k | Width = Target->getLongWidth(); |
2015 | 78.4k | Align = Target->getLongAlign(); |
2016 | 78.4k | break; |
2017 | 4.81k | case BuiltinType::ULongLong: |
2018 | 10.9k | case BuiltinType::LongLong: |
2019 | 10.9k | Width = Target->getLongLongWidth(); |
2020 | 10.9k | Align = Target->getLongLongAlign(); |
2021 | 10.9k | break; |
2022 | 863 | case BuiltinType::Int128: |
2023 | 1.74k | case BuiltinType::UInt128: |
2024 | 1.74k | Width = 128; |
2025 | 1.74k | Align = 128; // int128_t is 128-bit aligned on all targets. |
2026 | 1.74k | break; |
2027 | 37 | case BuiltinType::ShortAccum: |
2028 | 78 | case BuiltinType::UShortAccum: |
2029 | 113 | case BuiltinType::SatShortAccum: |
2030 | 148 | case BuiltinType::SatUShortAccum: |
2031 | 148 | Width = Target->getShortAccumWidth(); |
2032 | 148 | Align = Target->getShortAccumAlign(); |
2033 | 148 | break; |
2034 | 48 | case BuiltinType::Accum: |
2035 | 85 | case BuiltinType::UAccum: |
2036 | 109 | case BuiltinType::SatAccum: |
2037 | 131 | case BuiltinType::SatUAccum: |
2038 | 131 | Width = Target->getAccumWidth(); |
2039 | 131 | Align = Target->getAccumAlign(); |
2040 | 131 | break; |
2041 | 36 | case BuiltinType::LongAccum: |
2042 | 65 | case BuiltinType::ULongAccum: |
2043 | 87 | case BuiltinType::SatLongAccum: |
2044 | 107 | case BuiltinType::SatULongAccum: |
2045 | 107 | Width = Target->getLongAccumWidth(); |
2046 | 107 | Align = Target->getLongAccumAlign(); |
2047 | 107 | break; |
2048 | 38 | case BuiltinType::ShortFract: |
2049 | 63 | case BuiltinType::UShortFract: |
2050 | 80 | case BuiltinType::SatShortFract: |
2051 | 88 | case BuiltinType::SatUShortFract: |
2052 | 88 | Width = Target->getShortFractWidth(); |
2053 | 88 | Align = Target->getShortFractAlign(); |
2054 | 88 | break; |
2055 | 51 | case BuiltinType::Fract: |
2056 | 86 | case BuiltinType::UFract: |
2057 | 100 | case BuiltinType::SatFract: |
2058 | 124 | case BuiltinType::SatUFract: |
2059 | 124 | Width = Target->getFractWidth(); |
2060 | 124 | Align = Target->getFractAlign(); |
2061 | 124 | break; |
2062 | 33 | case BuiltinType::LongFract: |
2063 | 58 | case BuiltinType::ULongFract: |
2064 | 70 | case BuiltinType::SatLongFract: |
2065 | 76 | case BuiltinType::SatULongFract: |
2066 | 76 | Width = Target->getLongFractWidth(); |
2067 | 76 | Align = Target->getLongFractAlign(); |
2068 | 76 | break; |
2069 | 205 | case BuiltinType::BFloat16: |
2070 | 205 | Width = Target->getBFloat16Width(); |
2071 | 205 | Align = Target->getBFloat16Align(); |
2072 | 205 | break; |
2073 | 20 | case BuiltinType::Float16: |
2074 | 2.01k | case BuiltinType::Half: |
2075 | 2.01k | if (Target->hasFloat16Type() || !getLangOpts().OpenMP36 || |
2076 | 2.01k | !getLangOpts().OpenMPIsDevice0 ) { |
2077 | 2.01k | Width = Target->getHalfWidth(); |
2078 | 2.01k | Align = Target->getHalfAlign(); |
2079 | 0 | } else { |
2080 | 0 | assert(getLangOpts().OpenMP && getLangOpts().OpenMPIsDevice && |
2081 | 0 | "Expected OpenMP device compilation."); |
2082 | 0 | Width = AuxTarget->getHalfWidth(); |
2083 | 0 | Align = AuxTarget->getHalfAlign(); |
2084 | 0 | } |
2085 | 2.01k | break; |
2086 | 8.63k | case BuiltinType::Float: |
2087 | 8.63k | Width = Target->getFloatWidth(); |
2088 | 8.63k | Align = Target->getFloatAlign(); |
2089 | 8.63k | break; |
2090 | 8.50k | case BuiltinType::Double: |
2091 | 8.50k | Width = Target->getDoubleWidth(); |
2092 | 8.50k | Align = Target->getDoubleAlign(); |
2093 | 8.50k | break; |
2094 | 1.49k | case BuiltinType::LongDouble: |
2095 | 1.49k | if (getLangOpts().OpenMP && getLangOpts().OpenMPIsDevice59 && |
2096 | 18 | (Target->getLongDoubleWidth() != AuxTarget->getLongDoubleWidth() || |
2097 | 17 | Target->getLongDoubleAlign() != AuxTarget->getLongDoubleAlign()1 )) { |
2098 | 17 | Width = AuxTarget->getLongDoubleWidth(); |
2099 | 17 | Align = AuxTarget->getLongDoubleAlign(); |
2100 | 1.48k | } else { |
2101 | 1.48k | Width = Target->getLongDoubleWidth(); |
2102 | 1.48k | Align = Target->getLongDoubleAlign(); |
2103 | 1.48k | } |
2104 | 1.49k | break; |
2105 | 26 | case BuiltinType::Float128: |
2106 | 26 | if (Target->hasFloat128Type() || !getLangOpts().OpenMP4 || |
2107 | 24 | !getLangOpts().OpenMPIsDevice2 ) { |
2108 | 24 | Width = Target->getFloat128Width(); |
2109 | 24 | Align = Target->getFloat128Align(); |
2110 | 2 | } else { |
2111 | 2 | assert(getLangOpts().OpenMP && getLangOpts().OpenMPIsDevice && |
2112 | 2 | "Expected OpenMP device compilation."); |
2113 | 2 | Width = AuxTarget->getFloat128Width(); |
2114 | 2 | Align = AuxTarget->getFloat128Align(); |
2115 | 2 | } |
2116 | 26 | break; |
2117 | 245 | case BuiltinType::NullPtr: |
2118 | 245 | Width = Target->getPointerWidth(0); // C++ 3.9.1p11: sizeof(nullptr_t) |
2119 | 245 | Align = Target->getPointerAlign(0); // == sizeof(void*) |
2120 | 245 | break; |
2121 | 24 | case BuiltinType::ObjCId: |
2122 | 45 | case BuiltinType::ObjCClass: |
2123 | 49 | case BuiltinType::ObjCSel: |
2124 | 49 | Width = Target->getPointerWidth(0); |
2125 | 49 | Align = Target->getPointerAlign(0); |
2126 | 49 | break; |
2127 | 11 | case BuiltinType::OCLSampler: |
2128 | 15 | case BuiltinType::OCLEvent: |
2129 | 22 | case BuiltinType::OCLClkEvent: |
2130 | 34 | case BuiltinType::OCLQueue: |
2131 | 38 | case BuiltinType::OCLReserveID: |
2132 | 38 | #define IMAGE_TYPE(ImgType, Id, SingletonId, Access, Suffix) \ |
2133 | 2.36k | case BuiltinType::Id: |
2134 | 2.36k | #include "clang/Basic/OpenCLImageTypes.def"38 |
2135 | 2.36k | #define EXT_OPAQUE_TYPE(ExtType, Id, Ext) \ |
2136 | 966 | case BuiltinType::Id: |
2137 | 86 | #include "clang/Basic/OpenCLExtensionTypes.def"74 |
2138 | 86 | AS = getTargetAddressSpace( |
2139 | 86 | Target->getOpenCLTypeAddrSpace(getOpenCLTypeKind(T))); |
2140 | 86 | Width = Target->getPointerWidth(AS); |
2141 | 86 | Align = Target->getPointerAlign(AS); |
2142 | 86 | break; |
2143 | | // The SVE types are effectively target-specific. The length of an |
2144 | | // SVE_VECTOR_TYPE is only known at runtime, but it is always a multiple |
2145 | | // of 128 bits. There is one predicate bit for each vector byte, so the |
2146 | | // length of an SVE_PREDICATE_TYPE is always a multiple of 16 bits. |
2147 | | // |
2148 | | // Because the length is only known at runtime, we use a dummy value |
2149 | | // of 0 for the static length. The alignment values are those defined |
2150 | | // by the Procedure Call Standard for the Arm Architecture. |
2151 | 880 | #define SVE_VECTOR_TYPE(Name, MangledName, Id, SingletonId, NumEls, ElBits, \ |
2152 | 880 | IsSigned, IsFP, IsBF) \ |
2153 | 6.00k | case BuiltinType::Id: \ |
2154 | 6.00k | Width = 0; \ |
2155 | 6.00k | Align = 128; \ |
2156 | 6.00k | break; |
2157 | 880 | #define SVE_PREDICATE_TYPE(Name, MangledName, Id, SingletonId, NumEls) \ |
2158 | 612 | case BuiltinType::Id: \ |
2159 | 612 | Width = 0; \ |
2160 | 612 | Align = 16; \ |
2161 | 612 | break; |
2162 | 880 | #include "clang/Basic/AArch64SVEACLETypes.def"86 |
2163 | 880 | #define PPC_VECTOR_TYPE(Name, Id, Size) \ |
2164 | 8 | case BuiltinType::Id: \ |
2165 | 8 | Width = Size; \ |
2166 | 8 | Align = Size; \ |
2167 | 8 | break; |
2168 | 85 | #include "clang/Basic/PPCTypes.def" |
2169 | 298k | } |
2170 | 298k | break; |
2171 | 36.0k | case Type::ObjCObjectPointer: |
2172 | 36.0k | Width = Target->getPointerWidth(0); |
2173 | 36.0k | Align = Target->getPointerAlign(0); |
2174 | 36.0k | break; |
2175 | 1.56k | case Type::BlockPointer: |
2176 | 1.56k | AS = getTargetAddressSpace(cast<BlockPointerType>(T)->getPointeeType()); |
2177 | 1.56k | Width = Target->getPointerWidth(AS); |
2178 | 1.56k | Align = Target->getPointerAlign(AS); |
2179 | 1.56k | break; |
2180 | 101k | case Type::LValueReference: |
2181 | 120k | case Type::RValueReference: |
2182 | | // alignof and sizeof should never enter this code path here, so we go |
2183 | | // the pointer route. |
2184 | 120k | AS = getTargetAddressSpace(cast<ReferenceType>(T)->getPointeeType()); |
2185 | 120k | Width = Target->getPointerWidth(AS); |
2186 | 120k | Align = Target->getPointerAlign(AS); |
2187 | 120k | break; |
2188 | 322k | case Type::Pointer: |
2189 | 322k | AS = getTargetAddressSpace(cast<PointerType>(T)->getPointeeType()); |
2190 | 322k | Width = Target->getPointerWidth(AS); |
2191 | 322k | Align = Target->getPointerAlign(AS); |
2192 | 322k | break; |
2193 | 1.61k | case Type::MemberPointer: { |
2194 | 1.61k | const auto *MPT = cast<MemberPointerType>(T); |
2195 | 1.61k | CXXABI::MemberPointerInfo MPI = ABI->getMemberPointerInfo(MPT); |
2196 | 1.61k | Width = MPI.Width; |
2197 | 1.61k | Align = MPI.Align; |
2198 | 1.61k | break; |
2199 | 101k | } |
2200 | 761 | case Type::Complex: { |
2201 | | // Complex types have the same alignment as their elements, but twice the |
2202 | | // size. |
2203 | 761 | TypeInfo EltInfo = getTypeInfo(cast<ComplexType>(T)->getElementType()); |
2204 | 761 | Width = EltInfo.Width * 2; |
2205 | 761 | Align = EltInfo.Align; |
2206 | 761 | break; |
2207 | 101k | } |
2208 | 10 | case Type::ObjCObject: |
2209 | 10 | return getTypeInfo(cast<ObjCObjectType>(T)->getBaseType().getTypePtr()); |
2210 | 0 | case Type::Adjusted: |
2211 | 1.73k | case Type::Decayed: |
2212 | 1.73k | return getTypeInfo(cast<AdjustedType>(T)->getAdjustedType().getTypePtr()); |
2213 | 4.00k | case Type::ObjCInterface: { |
2214 | 4.00k | const auto *ObjCI = cast<ObjCInterfaceType>(T); |
2215 | 4.00k | if (ObjCI->getDecl()->isInvalidDecl()) { |
2216 | 1 | Width = 8; |
2217 | 1 | Align = 8; |
2218 | 1 | break; |
2219 | 1 | } |
2220 | 3.99k | const ASTRecordLayout &Layout = getASTObjCInterfaceLayout(ObjCI->getDecl()); |
2221 | 3.99k | Width = toBits(Layout.getSize()); |
2222 | 3.99k | Align = toBits(Layout.getAlignment()); |
2223 | 3.99k | break; |
2224 | 3.99k | } |
2225 | 418 | case Type::ExtInt: { |
2226 | 418 | const auto *EIT = cast<ExtIntType>(T); |
2227 | 418 | Align = |
2228 | 418 | std::min(static_cast<unsigned>(std::max( |
2229 | 418 | getCharWidth(), llvm::PowerOf2Ceil(EIT->getNumBits()))), |
2230 | 418 | Target->getLongLongAlign()); |
2231 | 418 | Width = llvm::alignTo(EIT->getNumBits(), Align); |
2232 | 418 | break; |
2233 | 3.99k | } |
2234 | 241k | case Type::Record: |
2235 | 263k | case Type::Enum: { |
2236 | 263k | const auto *TT = cast<TagType>(T); |
2237 | | |
2238 | 263k | if (TT->getDecl()->isInvalidDecl()) { |
2239 | 61 | Width = 8; |
2240 | 61 | Align = 8; |
2241 | 61 | break; |
2242 | 61 | } |
2243 | | |
2244 | 263k | if (const auto *ET = dyn_cast<EnumType>(TT)) { |
2245 | 21.9k | const EnumDecl *ED = ET->getDecl(); |
2246 | 21.9k | TypeInfo Info = |
2247 | 21.9k | getTypeInfo(ED->getIntegerType()->getUnqualifiedDesugaredType()); |
2248 | 21.9k | if (unsigned AttrAlign = ED->getMaxAlignment()) { |
2249 | 1 | Info.Align = AttrAlign; |
2250 | 1 | Info.AlignIsRequired = true; |
2251 | 1 | } |
2252 | 21.9k | return Info; |
2253 | 21.9k | } |
2254 | | |
2255 | 241k | const auto *RT = cast<RecordType>(TT); |
2256 | 241k | const RecordDecl *RD = RT->getDecl(); |
2257 | 241k | const ASTRecordLayout &Layout = getASTRecordLayout(RD); |
2258 | 241k | Width = toBits(Layout.getSize()); |
2259 | 241k | Align = toBits(Layout.getAlignment()); |
2260 | 241k | AlignIsRequired = RD->hasAttr<AlignedAttr>(); |
2261 | 241k | break; |
2262 | 241k | } |
2263 | | |
2264 | 62.4k | case Type::SubstTemplateTypeParm: |
2265 | 62.4k | return getTypeInfo(cast<SubstTemplateTypeParmType>(T)-> |
2266 | 62.4k | getReplacementType().getTypePtr()); |
2267 | | |
2268 | 971 | case Type::Auto: |
2269 | 994 | case Type::DeducedTemplateSpecialization: { |
2270 | 994 | const auto *A = cast<DeducedType>(T); |
2271 | 994 | assert(!A->getDeducedType().isNull() && |
2272 | 994 | "cannot request the size of an undeduced or dependent auto type"); |
2273 | 994 | return getTypeInfo(A->getDeducedType().getTypePtr()); |
2274 | 971 | } |
2275 | | |
2276 | 178 | case Type::Paren: |
2277 | 178 | return getTypeInfo(cast<ParenType>(T)->getInnerType().getTypePtr()); |
2278 | | |
2279 | 304 | case Type::MacroQualified: |
2280 | 304 | return getTypeInfo( |
2281 | 304 | cast<MacroQualifiedType>(T)->getUnderlyingType().getTypePtr()); |
2282 | | |
2283 | 28 | case Type::ObjCTypeParam: |
2284 | 28 | return getTypeInfo(cast<ObjCTypeParamType>(T)->desugar().getTypePtr()); |
2285 | | |
2286 | 345k | case Type::Typedef: { |
2287 | 345k | const TypedefNameDecl *Typedef = cast<TypedefType>(T)->getDecl(); |
2288 | 345k | TypeInfo Info = getTypeInfo(Typedef->getUnderlyingType().getTypePtr()); |
2289 | | // If the typedef has an aligned attribute on it, it overrides any computed |
2290 | | // alignment we have. This violates the GCC documentation (which says that |
2291 | | // attribute(aligned) can only round up) but matches its implementation. |
2292 | 345k | if (unsigned AttrAlign = Typedef->getMaxAlignment()) { |
2293 | 4.73k | Align = AttrAlign; |
2294 | 4.73k | AlignIsRequired = true; |
2295 | 340k | } else { |
2296 | 340k | Align = Info.Align; |
2297 | 340k | AlignIsRequired = Info.AlignIsRequired; |
2298 | 340k | } |
2299 | 345k | Width = Info.Width; |
2300 | 345k | break; |
2301 | 971 | } |
2302 | | |
2303 | 154k | case Type::Elaborated: |
2304 | 154k | return getTypeInfo(cast<ElaboratedType>(T)->getNamedType().getTypePtr()); |
2305 | | |
2306 | 8.15k | case Type::Attributed: |
2307 | 8.15k | return getTypeInfo( |
2308 | 8.15k | cast<AttributedType>(T)->getEquivalentType().getTypePtr()); |
2309 | | |
2310 | 1.25k | case Type::Atomic: { |
2311 | | // Start with the base type information. |
2312 | 1.25k | TypeInfo Info = getTypeInfo(cast<AtomicType>(T)->getValueType()); |
2313 | 1.25k | Width = Info.Width; |
2314 | 1.25k | Align = Info.Align; |
2315 | | |
2316 | 1.25k | if (!Width) { |
2317 | | // An otherwise zero-sized type should still generate an |
2318 | | // atomic operation. |
2319 | 4 | Width = Target->getCharWidth(); |
2320 | 4 | assert(Align); |
2321 | 1.25k | } else if (Width <= Target->getMaxAtomicPromoteWidth()) { |
2322 | | // If the size of the type doesn't exceed the platform's max |
2323 | | // atomic promotion width, make the size and alignment more |
2324 | | // favorable to atomic operations: |
2325 | | |
2326 | | // Round the size up to a power of 2. |
2327 | 1.12k | if (!llvm::isPowerOf2_64(Width)) |
2328 | 16 | Width = llvm::NextPowerOf2(Width); |
2329 | | |
2330 | | // Set the alignment equal to the size. |
2331 | 1.12k | Align = static_cast<unsigned>(Width); |
2332 | 1.12k | } |
2333 | 1.25k | } |
2334 | 1.25k | break; |
2335 | | |
2336 | 48 | case Type::Pipe: |
2337 | 48 | Width = Target->getPointerWidth(getTargetAddressSpace(LangAS::opencl_global)); |
2338 | 48 | Align = Target->getPointerAlign(getTargetAddressSpace(LangAS::opencl_global)); |
2339 | 48 | break; |
2340 | 1.45M | } |
2341 | | |
2342 | 1.45M | assert(llvm::isPowerOf2_32(Align) && "Alignment must be power of 2"); |
2343 | 1.45M | return TypeInfo(Width, Align, AlignIsRequired); |
2344 | 1.45M | } |
2345 | | |
2346 | 448 | unsigned ASTContext::getTypeUnadjustedAlign(const Type *T) const { |
2347 | 448 | UnadjustedAlignMap::iterator I = MemoizedUnadjustedAlign.find(T); |
2348 | 448 | if (I != MemoizedUnadjustedAlign.end()) |
2349 | 95 | return I->second; |
2350 | | |
2351 | 353 | unsigned UnadjustedAlign; |
2352 | 353 | if (const auto *RT = T->getAs<RecordType>()) { |
2353 | 275 | const RecordDecl *RD = RT->getDecl(); |
2354 | 275 | const ASTRecordLayout &Layout = getASTRecordLayout(RD); |
2355 | 275 | UnadjustedAlign = toBits(Layout.getUnadjustedAlignment()); |
2356 | 78 | } else if (const auto *ObjCI = T->getAs<ObjCInterfaceType>()) { |
2357 | 0 | const ASTRecordLayout &Layout = getASTObjCInterfaceLayout(ObjCI->getDecl()); |
2358 | 0 | UnadjustedAlign = toBits(Layout.getUnadjustedAlignment()); |
2359 | 78 | } else { |
2360 | 78 | UnadjustedAlign = getTypeAlign(T->getUnqualifiedDesugaredType()); |
2361 | 78 | } |
2362 | | |
2363 | 353 | MemoizedUnadjustedAlign[T] = UnadjustedAlign; |
2364 | 353 | return UnadjustedAlign; |
2365 | 353 | } |
2366 | | |
2367 | 329 | unsigned ASTContext::getOpenMPDefaultSimdAlign(QualType T) const { |
2368 | 329 | unsigned SimdAlign = getTargetInfo().getSimdDefaultAlign(); |
2369 | 329 | return SimdAlign; |
2370 | 329 | } |
2371 | | |
2372 | | /// toCharUnitsFromBits - Convert a size in bits to a size in characters. |
2373 | 13.4M | CharUnits ASTContext::toCharUnitsFromBits(int64_t BitSize) const { |
2374 | 13.4M | return CharUnits::fromQuantity(BitSize / getCharWidth()); |
2375 | 13.4M | } |
2376 | | |
2377 | | /// toBits - Convert a size in characters to a size in characters. |
2378 | 6.60M | int64_t ASTContext::toBits(CharUnits CharSize) const { |
2379 | 6.60M | return CharSize.getQuantity() * getCharWidth(); |
2380 | 6.60M | } |
2381 | | |
2382 | | /// getTypeSizeInChars - Return the size of the specified type, in characters. |
2383 | | /// This method does not work on incomplete types. |
2384 | 2.87M | CharUnits ASTContext::getTypeSizeInChars(QualType T) const { |
2385 | 2.87M | return getTypeInfoInChars(T).Width; |
2386 | 2.87M | } |
2387 | 16.3k | CharUnits ASTContext::getTypeSizeInChars(const Type *T) const { |
2388 | 16.3k | return getTypeInfoInChars(T).Width; |
2389 | 16.3k | } |
2390 | | |
2391 | | /// getTypeAlignInChars - Return the ABI-specified alignment of a type, in |
2392 | | /// characters. This method does not work on incomplete types. |
2393 | 1.14M | CharUnits ASTContext::getTypeAlignInChars(QualType T) const { |
2394 | 1.14M | return toCharUnitsFromBits(getTypeAlign(T)); |
2395 | 1.14M | } |
2396 | 2.21k | CharUnits ASTContext::getTypeAlignInChars(const Type *T) const { |
2397 | 2.21k | return toCharUnitsFromBits(getTypeAlign(T)); |
2398 | 2.21k | } |
2399 | | |
2400 | | /// getTypeUnadjustedAlignInChars - Return the ABI-specified alignment of a |
2401 | | /// type, in characters, before alignment adustments. This method does |
2402 | | /// not work on incomplete types. |
2403 | 375 | CharUnits ASTContext::getTypeUnadjustedAlignInChars(QualType T) const { |
2404 | 375 | return toCharUnitsFromBits(getTypeUnadjustedAlign(T)); |
2405 | 375 | } |
2406 | 0 | CharUnits ASTContext::getTypeUnadjustedAlignInChars(const Type *T) const { |
2407 | 0 | return toCharUnitsFromBits(getTypeUnadjustedAlign(T)); |
2408 | 0 | } |
2409 | | |
2410 | | /// getPreferredTypeAlign - Return the "preferred" alignment of the specified |
2411 | | /// type for the current target in bits. This can be different than the ABI |
2412 | | /// alignment in cases where it is beneficial for performance or backwards |
2413 | | /// compatibility preserving to overalign a data type. (Note: despite the name, |
2414 | | /// the preferred alignment is ABI-impacting, and not an optimization.) |
2415 | 1.56M | unsigned ASTContext::getPreferredTypeAlign(const Type *T) const { |
2416 | 1.56M | TypeInfo TI = getTypeInfo(T); |
2417 | 1.56M | unsigned ABIAlign = TI.Align; |
2418 | | |
2419 | 1.56M | T = T->getBaseElementTypeUnsafe(); |
2420 | | |
2421 | | // The preferred alignment of member pointers is that of a pointer. |
2422 | 1.56M | if (T->isMemberPointerType()) |
2423 | 1.25k | return getPreferredTypeAlign(getPointerDiffType().getTypePtr()); |
2424 | | |
2425 | 1.56M | if (!Target->allowsLargerPreferedTypeAlignment()) |
2426 | 312 | return ABIAlign; |
2427 | | |
2428 | 1.56M | if (const auto *RT = T->getAs<RecordType>()) { |
2429 | 72.6k | if (TI.AlignIsRequired || RT->getDecl()->isInvalidDecl()72.1k ) |
2430 | 529 | return ABIAlign; |
2431 | | |
2432 | 72.1k | unsigned PreferredAlign = static_cast<unsigned>( |
2433 | 72.1k | toBits(getASTRecordLayout(RT->getDecl()).PreferredAlignment)); |
2434 | 72.1k | assert(PreferredAlign >= ABIAlign && |
2435 | 72.1k | "PreferredAlign should be at least as large as ABIAlign."); |
2436 | 72.1k | return PreferredAlign; |
2437 | 72.1k | } |
2438 | | |
2439 | | // Double (and, for targets supporting AIX `power` alignment, long double) and |
2440 | | // long long should be naturally aligned (despite requiring less alignment) if |
2441 | | // possible. |
2442 | 1.49M | if (const auto *CT = T->getAs<ComplexType>()) |
2443 | 1.51k | T = CT->getElementType().getTypePtr(); |
2444 | 1.49M | if (const auto *ET = T->getAs<EnumType>()) |
2445 | 1.66k | T = ET->getDecl()->getIntegerType().getTypePtr(); |
2446 | 1.49M | if (T->isSpecificBuiltinType(BuiltinType::Double) || |
2447 | 1.47M | T->isSpecificBuiltinType(BuiltinType::LongLong) || |
2448 | 1.47M | T->isSpecificBuiltinType(BuiltinType::ULongLong) || |
2449 | 1.46M | (T->isSpecificBuiltinType(BuiltinType::LongDouble) && |
2450 | 1.95k | Target->defaultsToAIXPowerAlignment())) |
2451 | | // Don't increase the alignment if an alignment attribute was specified on a |
2452 | | // typedef declaration. |
2453 | 24.4k | if (!TI.AlignIsRequired) |
2454 | 21.8k | return std::max(ABIAlign, (unsigned)getTypeSize(T)); |
2455 | | |
2456 | 1.47M | return ABIAlign; |
2457 | 1.47M | } |
2458 | | |
2459 | | /// getTargetDefaultAlignForAttributeAligned - Return the default alignment |
2460 | | /// for __attribute__((aligned)) on this target, to be used if no alignment |
2461 | | /// value is specified. |
2462 | 59 | unsigned ASTContext::getTargetDefaultAlignForAttributeAligned() const { |
2463 | 59 | return getTargetInfo().getDefaultAlignForAttributeAligned(); |
2464 | 59 | } |
2465 | | |
2466 | | /// getAlignOfGlobalVar - Return the alignment in bits that should be given |
2467 | | /// to a global variable of the specified type. |
2468 | 66.8k | unsigned ASTContext::getAlignOfGlobalVar(QualType T) const { |
2469 | 66.8k | uint64_t TypeSize = getTypeSize(T.getTypePtr()); |
2470 | 66.8k | return std::max(getPreferredTypeAlign(T), |
2471 | 66.8k | getTargetInfo().getMinGlobalAlign(TypeSize)); |
2472 | 66.8k | } |
2473 | | |
2474 | | /// getAlignOfGlobalVarInChars - Return the alignment in characters that |
2475 | | /// should be given to a global variable of the specified type. |
2476 | 66.8k | CharUnits ASTContext::getAlignOfGlobalVarInChars(QualType T) const { |
2477 | 66.8k | return toCharUnitsFromBits(getAlignOfGlobalVar(T)); |
2478 | 66.8k | } |
2479 | | |
2480 | 24 | CharUnits ASTContext::getOffsetOfBaseWithVBPtr(const CXXRecordDecl *RD) const { |
2481 | 24 | CharUnits Offset = CharUnits::Zero(); |
2482 | 24 | const ASTRecordLayout *Layout = &getASTRecordLayout(RD); |
2483 | 30 | while (const CXXRecordDecl *Base = Layout->getBaseSharingVBPtr()) { |
2484 | 6 | Offset += Layout->getBaseClassOffset(Base); |
2485 | 6 | Layout = &getASTRecordLayout(Base); |
2486 | 6 | } |
2487 | 24 | return Offset; |
2488 | 24 | } |
2489 | | |
2490 | 221 | CharUnits ASTContext::getMemberPointerPathAdjustment(const APValue &MP) const { |
2491 | 221 | const ValueDecl *MPD = MP.getMemberPointerDecl(); |
2492 | 221 | CharUnits ThisAdjustment = CharUnits::Zero(); |
2493 | 221 | ArrayRef<const CXXRecordDecl*> Path = MP.getMemberPointerPath(); |
2494 | 221 | bool DerivedMember = MP.isMemberPointerToDerivedMember(); |
2495 | 221 | const CXXRecordDecl *RD = cast<CXXRecordDecl>(MPD->getDeclContext()); |
2496 | 325 | for (unsigned I = 0, N = Path.size(); I != N; ++I104 ) { |
2497 | 104 | const CXXRecordDecl *Base = RD; |
2498 | 104 | const CXXRecordDecl *Derived = Path[I]; |
2499 | 104 | if (DerivedMember) |
2500 | 22 | std::swap(Base, Derived); |
2501 | 104 | ThisAdjustment += getASTRecordLayout(Derived).getBaseClassOffset(Base); |
2502 | 104 | RD = Path[I]; |
2503 | 104 | } |
2504 | 221 | if (DerivedMember) |
2505 | 19 | ThisAdjustment = -ThisAdjustment; |
2506 | 221 | return ThisAdjustment; |
2507 | 221 | } |
2508 | | |
2509 | | /// DeepCollectObjCIvars - |
2510 | | /// This routine first collects all declared, but not synthesized, ivars in |
2511 | | /// super class and then collects all ivars, including those synthesized for |
2512 | | /// current class. This routine is used for implementation of current class |
2513 | | /// when all ivars, declared and synthesized are known. |
2514 | | void ASTContext::DeepCollectObjCIvars(const ObjCInterfaceDecl *OI, |
2515 | | bool leafClass, |
2516 | 194 | SmallVectorImpl<const ObjCIvarDecl*> &Ivars) const { |
2517 | 194 | if (const ObjCInterfaceDecl *SuperClass = OI->getSuperClass()) |
2518 | 38 | DeepCollectObjCIvars(SuperClass, false, Ivars); |
2519 | 194 | if (!leafClass) { |
2520 | 38 | for (const auto *I : OI->ivars()) |
2521 | 123 | Ivars.push_back(I); |
2522 | 156 | } else { |
2523 | 156 | auto *IDecl = const_cast<ObjCInterfaceDecl *>(OI); |
2524 | 479 | for (const ObjCIvarDecl *Iv = IDecl->all_declared_ivar_begin(); Iv; |
2525 | 323 | Iv= Iv->getNextIvar()) |
2526 | 323 | Ivars.push_back(Iv); |
2527 | 156 | } |
2528 | 194 | } |
2529 | | |
2530 | | /// CollectInheritedProtocols - Collect all protocols in current class and |
2531 | | /// those inherited by it. |
2532 | | void ASTContext::CollectInheritedProtocols(const Decl *CDecl, |
2533 | 1.35k | llvm::SmallPtrSet<ObjCProtocolDecl*, 8> &Protocols) { |
2534 | 1.35k | if (const auto *OI = dyn_cast<ObjCInterfaceDecl>(CDecl)) { |
2535 | | // We can use protocol_iterator here instead of |
2536 | | // all_referenced_protocol_iterator since we are walking all categories. |
2537 | 547 | for (auto *Proto : OI->all_referenced_protocols()) { |
2538 | 547 | CollectInheritedProtocols(Proto, Protocols); |
2539 | 547 | } |
2540 | | |
2541 | | // Categories of this Interface. |
2542 | 720 | for (const auto *Cat : OI->visible_categories()) |
2543 | 15 | CollectInheritedProtocols(Cat, Protocols); |
2544 | | |
2545 | 720 | if (ObjCInterfaceDecl *SD = OI->getSuperClass()) |
2546 | 671 | while (290 SD) { |
2547 | 381 | CollectInheritedProtocols(SD, Protocols); |
2548 | 381 | SD = SD->getSuperClass(); |
2549 | 381 | } |
2550 | 638 | } else if (const auto *OC = dyn_cast<ObjCCategoryDecl>(CDecl)) { |
2551 | 15 | for (auto *Proto : OC->protocols()) { |
2552 | 15 | CollectInheritedProtocols(Proto, Protocols); |
2553 | 15 | } |
2554 | 623 | } else if (const auto *OP = dyn_cast<ObjCProtocolDecl>(CDecl)) { |
2555 | | // Insert the protocol. |
2556 | 623 | if (!Protocols.insert( |
2557 | 623 | const_cast<ObjCProtocolDecl *>(OP->getCanonicalDecl())).second) |
2558 | 107 | return; |
2559 | | |
2560 | 516 | for (auto *Proto : OP->protocols()) |
2561 | 17 | CollectInheritedProtocols(Proto, Protocols); |
2562 | 516 | } |
2563 | 1.35k | } |
2564 | | |
2565 | | static bool unionHasUniqueObjectRepresentations(const ASTContext &Context, |
2566 | 12 | const RecordDecl *RD) { |
2567 | 12 | assert(RD->isUnion() && "Must be union type"); |
2568 | 12 | CharUnits UnionSize = Context.getTypeSizeInChars(RD->getTypeForDecl()); |
2569 | | |
2570 | 12 | for (const auto *Field : RD->fields()) { |
2571 | 12 | if (!Context.hasUniqueObjectRepresentations(Field->getType())) |
2572 | 0 | return false; |
2573 | 12 | CharUnits FieldSize = Context.getTypeSizeInChars(Field->getType()); |
2574 | 12 | if (FieldSize != UnionSize) |
2575 | 6 | return false; |
2576 | 12 | } |
2577 | 6 | return !RD->field_empty(); |
2578 | 12 | } |
2579 | | |
2580 | 21 | static bool isStructEmpty(QualType Ty) { |
2581 | 21 | const RecordDecl *RD = Ty->castAs<RecordType>()->getDecl(); |
2582 | | |
2583 | 21 | if (!RD->field_empty()) |
2584 | 15 | return false; |
2585 | | |
2586 | 6 | if (const auto *ClassDecl = dyn_cast<CXXRecordDecl>(RD)) |
2587 | 6 | return ClassDecl->isEmpty(); |
2588 | | |
2589 | 0 | return true; |
2590 | 0 | } |
2591 | | |
2592 | | static llvm::Optional<int64_t> |
2593 | | structHasUniqueObjectRepresentations(const ASTContext &Context, |
2594 | 92 | const RecordDecl *RD) { |
2595 | 92 | assert(!RD->isUnion() && "Must be struct/class type"); |
2596 | 92 | const auto &Layout = Context.getASTRecordLayout(RD); |
2597 | | |
2598 | 92 | int64_t CurOffsetInBits = 0; |
2599 | 92 | if (const auto *ClassDecl = dyn_cast<CXXRecordDecl>(RD)) { |
2600 | 92 | if (ClassDecl->isDynamicClass()) |
2601 | 0 | return llvm::None; |
2602 | | |
2603 | 92 | SmallVector<std::pair<QualType, int64_t>, 4> Bases; |
2604 | 21 | for (const auto &Base : ClassDecl->bases()) { |
2605 | | // Empty types can be inherited from, and non-empty types can potentially |
2606 | | // have tail padding, so just make sure there isn't an error. |
2607 | 21 | if (!isStructEmpty(Base.getType())) { |
2608 | 15 | llvm::Optional<int64_t> Size = structHasUniqueObjectRepresentations( |
2609 | 15 | Context, Base.getType()->castAs<RecordType>()->getDecl()); |
2610 | 15 | if (!Size) |
2611 | 3 | return llvm::None; |
2612 | 12 | Bases.emplace_back(Base.getType(), Size.getValue()); |
2613 | 12 | } |
2614 | 21 | } |
2615 | | |
2616 | 89 | llvm::sort(Bases, [&](const std::pair<QualType, int64_t> &L, |
2617 | 0 | const std::pair<QualType, int64_t> &R) { |
2618 | 0 | return Layout.getBaseClassOffset(L.first->getAsCXXRecordDecl()) < |
2619 | 0 | Layout.getBaseClassOffset(R.first->getAsCXXRecordDecl()); |
2620 | 0 | }); |
2621 | | |
2622 | 12 | for (const auto &Base : Bases) { |
2623 | 12 | int64_t BaseOffset = Context.toBits( |
2624 | 12 | Layout.getBaseClassOffset(Base.first->getAsCXXRecordDecl())); |
2625 | 12 | int64_t BaseSize = Base.second; |
2626 | 12 | if (BaseOffset != CurOffsetInBits) |
2627 | 0 | return llvm::None; |
2628 | 12 | CurOffsetInBits = BaseOffset + BaseSize; |
2629 | 12 | } |
2630 | 89 | } |
2631 | | |
2632 | 116 | for (const auto *Field : RD->fields())89 { |
2633 | 116 | if (!Field->getType()->isReferenceType() && |
2634 | 107 | !Context.hasUniqueObjectRepresentations(Field->getType())) |
2635 | 3 | return llvm::None; |
2636 | | |
2637 | 113 | int64_t FieldSizeInBits = |
2638 | 113 | Context.toBits(Context.getTypeSizeInChars(Field->getType())); |
2639 | 113 | if (Field->isBitField()) { |
2640 | 27 | int64_t BitfieldSize = Field->getBitWidthValue(Context); |
2641 | | |
2642 | 27 | if (BitfieldSize > FieldSizeInBits) |
2643 | 6 | return llvm::None; |
2644 | 21 | FieldSizeInBits = BitfieldSize; |
2645 | 21 | } |
2646 | | |
2647 | 107 | int64_t FieldOffsetInBits = Context.getFieldOffset(Field); |
2648 | | |
2649 | 107 | if (FieldOffsetInBits != CurOffsetInBits) |
2650 | 9 | return llvm::None; |
2651 | | |
2652 | 98 | CurOffsetInBits = FieldSizeInBits + FieldOffsetInBits; |
2653 | 98 | } |
2654 | | |
2655 | 71 | return CurOffsetInBits; |
2656 | 89 | } |
2657 | | |
2658 | 503 | bool ASTContext::hasUniqueObjectRepresentations(QualType Ty) const { |
2659 | | // C++17 [meta.unary.prop]: |
2660 | | // The predicate condition for a template specialization |
2661 | | // has_unique_object_representations<T> shall be |
2662 | | // satisfied if and only if: |
2663 | | // (9.1) - T is trivially copyable, and |
2664 | | // (9.2) - any two objects of type T with the same value have the same |
2665 | | // object representation, where two objects |
2666 | | // of array or non-union class type are considered to have the same value |
2667 | | // if their respective sequences of |
2668 | | // direct subobjects have the same values, and two objects of union type |
2669 | | // are considered to have the same |
2670 | | // value if they have the same active member and the corresponding members |
2671 | | // have the same value. |
2672 | | // The set of scalar types for which this condition holds is |
2673 | | // implementation-defined. [ Note: If a type has padding |
2674 | | // bits, the condition does not hold; otherwise, the condition holds true |
2675 | | // for unsigned integral types. -- end note ] |
2676 | 503 | assert(!Ty.isNull() && "Null QualType sent to unique object rep check"); |
2677 | | |
2678 | | // Arrays are unique only if their element type is unique. |
2679 | 503 | if (Ty->isArrayType()) |
2680 | 24 | return hasUniqueObjectRepresentations(getBaseElementType(Ty)); |
2681 | | |
2682 | | // (9.1) - T is trivially copyable... |
2683 | 479 | if (!Ty.isTriviallyCopyableType(*this)) |
2684 | 108 | return false; |
2685 | | |
2686 | | // All integrals and enums are unique. |
2687 | 371 | if (Ty->isIntegralOrEnumerationType()) |
2688 | 183 | return true; |
2689 | | |
2690 | | // All other pointers are unique. |
2691 | 188 | if (Ty->isPointerType()) |
2692 | 36 | return true; |
2693 | | |
2694 | 152 | if (Ty->isMemberPointerType()) { |
2695 | 36 | const auto *MPT = Ty->getAs<MemberPointerType>(); |
2696 | 36 | return !ABI->getMemberPointerInfo(MPT).HasPadding; |
2697 | 36 | } |
2698 | | |
2699 | 116 | if (Ty->isRecordType()) { |
2700 | 92 | const RecordDecl *Record = Ty->castAs<RecordType>()->getDecl(); |
2701 | | |
2702 | 92 | if (Record->isInvalidDecl()) |
2703 | 3 | return false; |
2704 | | |
2705 | 89 | if (Record->isUnion()) |
2706 | 12 | return unionHasUniqueObjectRepresentations(*this, Record); |
2707 | | |
2708 | 77 | Optional<int64_t> StructSize = |
2709 | 77 | structHasUniqueObjectRepresentations(*this, Record); |
2710 | | |
2711 | 77 | return StructSize && |
2712 | 59 | StructSize.getValue() == static_cast<int64_t>(getTypeSize(Ty)); |
2713 | 77 | } |
2714 | | |
2715 | | // FIXME: More cases to handle here (list by rsmith): |
2716 | | // vectors (careful about, eg, vector of 3 foo) |
2717 | | // _Complex int and friends |
2718 | | // _Atomic T |
2719 | | // Obj-C block pointers |
2720 | | // Obj-C object pointers |
2721 | | // and perhaps OpenCL's various builtin types (pipe, sampler_t, event_t, |
2722 | | // clk_event_t, queue_t, reserve_id_t) |
2723 | | // There're also Obj-C class types and the Obj-C selector type, but I think it |
2724 | | // makes sense for those to return false here. |
2725 | | |
2726 | 24 | return false; |
2727 | 24 | } |
2728 | | |
2729 | 2.59k | unsigned ASTContext::CountNonClassIvars(const ObjCInterfaceDecl *OI) const { |
2730 | 2.59k | unsigned count = 0; |
2731 | | // Count ivars declared in class extension. |
2732 | 2.59k | for (const auto *Ext : OI->known_extensions()) |
2733 | 44 | count += Ext->ivar_size(); |
2734 | | |
2735 | | // Count ivar defined in this class's implementation. This |
2736 | | // includes synthesized ivars. |
2737 | 2.59k | if (ObjCImplementationDecl *ImplDecl = OI->getImplementation()) |
2738 | 2.59k | count += ImplDecl->ivar_size(); |
2739 | | |
2740 | 2.59k | return count; |
2741 | 2.59k | } |
2742 | | |
2743 | 308 | bool ASTContext::isSentinelNullExpr(const Expr *E) { |
2744 | 308 | if (!E) |
2745 | 0 | return false; |
2746 | | |
2747 | | // nullptr_t is always treated as null. |
2748 | 308 | if (E->getType()->isNullPtrType()) return true19 ; |
2749 | | |
2750 | 289 | if (E->getType()->isAnyPointerType() && |
2751 | 262 | E->IgnoreParenCasts()->isNullPointerConstant(*this, |
2752 | 262 | Expr::NPC_ValueDependentIsNull)) |
2753 | 261 | return true; |
2754 | | |
2755 | | // Unfortunately, __null has type 'int'. |
2756 | 28 | if (isa<GNUNullExpr>(E)) return true3 ; |
2757 | | |
2758 | 25 | return false; |
2759 | 25 | } |
2760 | | |
2761 | | /// Get the implementation of ObjCInterfaceDecl, or nullptr if none |
2762 | | /// exists. |
2763 | 1.04M | ObjCImplementationDecl *ASTContext::getObjCImplementation(ObjCInterfaceDecl *D) { |
2764 | 1.04M | llvm::DenseMap<ObjCContainerDecl*, ObjCImplDecl*>::iterator |
2765 | 1.04M | I = ObjCImpls.find(D); |
2766 | 1.04M | if (I != ObjCImpls.end()) |
2767 | 47.7k | return cast<ObjCImplementationDecl>(I->second); |
2768 | 994k | return nullptr; |
2769 | 994k | } |
2770 | | |
2771 | | /// Get the implementation of ObjCCategoryDecl, or nullptr if none |
2772 | | /// exists. |
2773 | 1.37M | ObjCCategoryImplDecl *ASTContext::getObjCImplementation(ObjCCategoryDecl *D) { |
2774 | 1.37M | llvm::DenseMap<ObjCContainerDecl*, ObjCImplDecl*>::iterator |
2775 | 1.37M | I = ObjCImpls.find(D); |
2776 | 1.37M | if (I != ObjCImpls.end()) |
2777 | 3.45k | return cast<ObjCCategoryImplDecl>(I->second); |
2778 | 1.37M | return nullptr; |
2779 | 1.37M | } |
2780 | | |
2781 | | /// Set the implementation of ObjCInterfaceDecl. |
2782 | | void ASTContext::setObjCImplementation(ObjCInterfaceDecl *IFaceD, |
2783 | 4.84k | ObjCImplementationDecl *ImplD) { |
2784 | 4.84k | assert(IFaceD && ImplD && "Passed null params"); |
2785 | 4.84k | ObjCImpls[IFaceD] = ImplD; |
2786 | 4.84k | } |
2787 | | |
2788 | | /// Set the implementation of ObjCCategoryDecl. |
2789 | | void ASTContext::setObjCImplementation(ObjCCategoryDecl *CatD, |
2790 | 516 | ObjCCategoryImplDecl *ImplD) { |
2791 | 516 | assert(CatD && ImplD && "Passed null params"); |
2792 | 516 | ObjCImpls[CatD] = ImplD; |
2793 | 516 | } |
2794 | | |
2795 | | const ObjCMethodDecl * |
2796 | 54 | ASTContext::getObjCMethodRedeclaration(const ObjCMethodDecl *MD) const { |
2797 | 54 | return ObjCMethodRedecls.lookup(MD); |
2798 | 54 | } |
2799 | | |
2800 | | void ASTContext::setObjCMethodRedeclaration(const ObjCMethodDecl *MD, |
2801 | 16 | const ObjCMethodDecl *Redecl) { |
2802 | 16 | assert(!getObjCMethodRedeclaration(MD) && "MD already has a redeclaration"); |
2803 | 16 | ObjCMethodRedecls[MD] = Redecl; |
2804 | 16 | } |
2805 | | |
2806 | | const ObjCInterfaceDecl *ASTContext::getObjContainingInterface( |
2807 | 740 | const NamedDecl *ND) const { |
2808 | 740 | if (const auto *ID = dyn_cast<ObjCInterfaceDecl>(ND->getDeclContext())) |
2809 | 207 | return ID; |
2810 | 533 | if (const auto *CD = dyn_cast<ObjCCategoryDecl>(ND->getDeclContext())) |
2811 | 28 | return CD->getClassInterface(); |
2812 | 505 | if (const auto *IMD = dyn_cast<ObjCImplDecl>(ND->getDeclContext())) |
2813 | 53 | return IMD->getClassInterface(); |
2814 | | |
2815 | 452 | return nullptr; |
2816 | 452 | } |
2817 | | |
2818 | | /// Get the copy initialization expression of VarDecl, or nullptr if |
2819 | | /// none exists. |
2820 | 625 | BlockVarCopyInit ASTContext::getBlockVarCopyInit(const VarDecl *VD) const { |
2821 | 625 | assert(VD && "Passed null params"); |
2822 | 625 | assert(VD->hasAttr<BlocksAttr>() && |
2823 | 625 | "getBlockVarCopyInits - not __block var"); |
2824 | 625 | auto I = BlockVarCopyInits.find(VD); |
2825 | 625 | if (I != BlockVarCopyInits.end()) |
2826 | 80 | return I->second; |
2827 | 545 | return {nullptr, false}; |
2828 | 545 | } |
2829 | | |
2830 | | /// Set the copy initialization expression of a block var decl. |
2831 | | void ASTContext::setBlockVarCopyInit(const VarDecl*VD, Expr *CopyExpr, |
2832 | 24 | bool CanThrow) { |
2833 | 24 | assert(VD && CopyExpr && "Passed null params"); |
2834 | 24 | assert(VD->hasAttr<BlocksAttr>() && |
2835 | 24 | "setBlockVarCopyInits - not __block var"); |
2836 | 24 | BlockVarCopyInits[VD].setExprAndFlag(CopyExpr, CanThrow); |
2837 | 24 | } |
2838 | | |
2839 | | TypeSourceInfo *ASTContext::CreateTypeSourceInfo(QualType T, |
2840 | 99.7M | unsigned DataSize) const { |
2841 | 99.7M | if (!DataSize) |
2842 | 85.7M | DataSize = TypeLoc::getFullDataSizeForType(T); |
2843 | 99.7M | else |
2844 | 99.7M | assert(DataSize == TypeLoc::getFullDataSizeForType(T) && |
2845 | 99.7M | "incorrect data size provided to CreateTypeSourceInfo!"); |
2846 | | |
2847 | 99.7M | auto *TInfo = |
2848 | 99.7M | (TypeSourceInfo*)BumpAlloc.Allocate(sizeof(TypeSourceInfo) + DataSize, 8); |
2849 | 99.7M | new (TInfo) TypeSourceInfo(T); |
2850 | 99.7M | return TInfo; |
2851 | 99.7M | } |
2852 | | |
2853 | | TypeSourceInfo *ASTContext::getTrivialTypeSourceInfo(QualType T, |
2854 | 8.48M | SourceLocation L) const { |
2855 | 8.48M | TypeSourceInfo *DI = CreateTypeSourceInfo(T); |
2856 | 8.48M | DI->getTypeLoc().initialize(const_cast<ASTContext &>(*this), L); |
2857 | 8.48M | return DI; |
2858 | 8.48M | } |
2859 | | |
2860 | | const ASTRecordLayout & |
2861 | 13.6k | ASTContext::getASTObjCInterfaceLayout(const ObjCInterfaceDecl *D) const { |
2862 | 13.6k | return getObjCLayout(D, nullptr); |
2863 | 13.6k | } |
2864 | | |
2865 | | const ASTRecordLayout & |
2866 | | ASTContext::getASTObjCImplementationLayout( |
2867 | 3.58k | const ObjCImplementationDecl *D) const { |
2868 | 3.58k | return getObjCLayout(D->getClassInterface(), D); |
2869 | 3.58k | } |
2870 | | |
2871 | | //===----------------------------------------------------------------------===// |
2872 | | // Type creation/memoization methods |
2873 | | //===----------------------------------------------------------------------===// |
2874 | | |
2875 | | QualType |
2876 | 541k | ASTContext::getExtQualType(const Type *baseType, Qualifiers quals) const { |
2877 | 541k | unsigned fastQuals = quals.getFastQualifiers(); |
2878 | 541k | quals.removeFastQualifiers(); |
2879 | | |
2880 | | // Check if we've already instantiated this type. |
2881 | 541k | llvm::FoldingSetNodeID ID; |
2882 | 541k | ExtQuals::Profile(ID, baseType, quals); |
2883 | 541k | void *insertPos = nullptr; |
2884 | 541k | if (ExtQuals *eq = ExtQualNodes.FindNodeOrInsertPos(ID, insertPos)) { |
2885 | 498k | assert(eq->getQualifiers() == quals); |
2886 | 498k | return QualType(eq, fastQuals); |
2887 | 498k | } |
2888 | | |
2889 | | // If the base type is not canonical, make the appropriate canonical type. |
2890 | 42.2k | QualType canon; |
2891 | 42.2k | if (!baseType->isCanonicalUnqualified()) { |
2892 | 17.1k | SplitQualType canonSplit = baseType->getCanonicalTypeInternal().split(); |
2893 | 17.1k | canonSplit.Quals.addConsistentQualifiers(quals); |
2894 | 17.1k | canon = getExtQualType(canonSplit.Ty, canonSplit.Quals); |
2895 | | |
2896 | | // Re-find the insert position. |
2897 | 17.1k | (void) ExtQualNodes.FindNodeOrInsertPos(ID, insertPos); |
2898 | 17.1k | } |
2899 | | |
2900 | 42.2k | auto *eq = new (*this, TypeAlignment) ExtQuals(baseType, canon, quals); |
2901 | 42.2k | ExtQualNodes.InsertNode(eq, insertPos); |
2902 | 42.2k | return QualType(eq, fastQuals); |
2903 | 42.2k | } |
2904 | | |
2905 | | QualType ASTContext::getAddrSpaceQualType(QualType T, |
2906 | 3.90M | LangAS AddressSpace) const { |
2907 | 3.90M | QualType CanT = getCanonicalType(T); |
2908 | 3.90M | if (CanT.getAddressSpace() == AddressSpace) |
2909 | 3.41M | return T; |
2910 | | |
2911 | | // If we are composing extended qualifiers together, merge together |
2912 | | // into one ExtQuals node. |
2913 | 486k | QualifierCollector Quals; |
2914 | 486k | const Type *TypeNode = Quals.strip(T); |
2915 | | |
2916 | | // If this type already has an address space specified, it cannot get |
2917 | | // another one. |
2918 | 486k | assert(!Quals.hasAddressSpace() && |
2919 | 486k | "Type cannot be in multiple addr spaces!"); |
2920 | 486k | Quals.addAddressSpace(AddressSpace); |
2921 | | |
2922 | 486k | return getExtQualType(TypeNode, Quals); |
2923 | 486k | } |
2924 | | |
2925 | 133 | QualType ASTContext::removeAddrSpaceQualType(QualType T) const { |
2926 | | // If the type is not qualified with an address space, just return it |
2927 | | // immediately. |
2928 | 133 | if (!T.hasAddressSpace()) |
2929 | 29 | return T; |
2930 | | |
2931 | | // If we are composing extended qualifiers together, merge together |
2932 | | // into one ExtQuals node. |
2933 | 104 | QualifierCollector Quals; |
2934 | 104 | const Type *TypeNode; |
2935 | | |
2936 | 104 | while (T.hasAddressSpace()) { |
2937 | 104 | TypeNode = Quals.strip(T); |
2938 | | |
2939 | | // If the type no longer has an address space after stripping qualifiers, |
2940 | | // jump out. |
2941 | 104 | if (!QualType(TypeNode, 0).hasAddressSpace()) |
2942 | 104 | break; |
2943 | | |
2944 | | // There might be sugar in the way. Strip it and try again. |
2945 | 0 | T = T.getSingleStepDesugaredType(*this); |
2946 | 0 | } |
2947 | | |
2948 | 104 | Quals.removeAddressSpace(); |
2949 | | |
2950 | | // Removal of the address space can mean there are no longer any |
2951 | | // non-fast qualifiers, so creating an ExtQualType isn't possible (asserts) |
2952 | | // or required. |
2953 | 104 | if (Quals.hasNonFastQualifiers()) |
2954 | 0 | return getExtQualType(TypeNode, Quals); |
2955 | 104 | else |
2956 | 104 | return QualType(TypeNode, Quals.getFastQualifiers()); |
2957 | 104 | } |
2958 | | |
2959 | | QualType ASTContext::getObjCGCQualType(QualType T, |
2960 | 230 | Qualifiers::GC GCAttr) const { |
2961 | 230 | QualType CanT = getCanonicalType(T); |
2962 | 230 | if (CanT.getObjCGCAttr() == GCAttr) |
2963 | 1 | return T; |
2964 | | |
2965 | 229 | if (const auto *ptr = T->getAs<PointerType>()) { |
2966 | 87 | QualType Pointee = ptr->getPointeeType(); |
2967 | 87 | if (Pointee->isAnyPointerType()) { |
2968 | 12 | QualType ResultType = getObjCGCQualType(Pointee, GCAttr); |
2969 | 12 | return getPointerType(ResultType); |
2970 | 12 | } |
2971 | 217 | } |
2972 | | |
2973 | | // If we are composing extended qualifiers together, merge together |
2974 | | // into one ExtQuals node. |
2975 | 217 | QualifierCollector Quals; |
2976 | 217 | const Type *TypeNode = Quals.strip(T); |
2977 | | |
2978 | | // If this type already has an ObjCGC specified, it cannot get |
2979 | | // another one. |
2980 | 217 | assert(!Quals.hasObjCGCAttr() && |
2981 | 217 | "Type cannot have multiple ObjCGCs!"); |
2982 | 217 | Quals.addObjCGCAttr(GCAttr); |
2983 | | |
2984 | 217 | return getExtQualType(TypeNode, Quals); |
2985 | 217 | } |
2986 | | |
2987 | 158M | QualType ASTContext::removePtrSizeAddrSpace(QualType T) const { |
2988 | 158M | if (const PointerType *Ptr = T->getAs<PointerType>()) { |
2989 | 9.52M | QualType Pointee = Ptr->getPointeeType(); |
2990 | 9.52M | if (isPtrSizeAddressSpace(Pointee.getAddressSpace())) { |
2991 | 3 | return getPointerType(removeAddrSpaceQualType(Pointee)); |
2992 | 3 | } |
2993 | 158M | } |
2994 | 158M | return T; |
2995 | 158M | } |
2996 | | |
2997 | | const FunctionType *ASTContext::adjustFunctionType(const FunctionType *T, |
2998 | 9.11k | FunctionType::ExtInfo Info) { |
2999 | 9.11k | if (T->getExtInfo() == Info) |
3000 | 18 | return T; |
3001 | | |
3002 | 9.09k | QualType Result; |
3003 | 9.09k | if (const auto *FNPT = dyn_cast<FunctionNoProtoType>(T)) { |
3004 | 94 | Result = getFunctionNoProtoType(FNPT->getReturnType(), Info); |
3005 | 8.99k | } else { |
3006 | 8.99k | const auto *FPT = cast<FunctionProtoType>(T); |
3007 | 8.99k | FunctionProtoType::ExtProtoInfo EPI = FPT->getExtProtoInfo(); |
3008 | 8.99k | EPI.ExtInfo = Info; |
3009 | 8.99k | Result = getFunctionType(FPT->getReturnType(), FPT->getParamTypes(), EPI); |
3010 | 8.99k | } |
3011 | | |
3012 | 9.09k | return cast<FunctionType>(Result.getTypePtr()); |
3013 | 9.09k | } |
3014 | | |
3015 | | void ASTContext::adjustDeducedFunctionResultType(FunctionDecl *FD, |
3016 | 5.47k | QualType ResultType) { |
3017 | 5.47k | FD = FD->getMostRecentDecl(); |
3018 | 5.54k | while (true) { |
3019 | 5.54k | const auto *FPT = FD->getType()->castAs<FunctionProtoType>(); |
3020 | 5.54k | FunctionProtoType::ExtProtoInfo EPI = FPT->getExtProtoInfo(); |
3021 | 5.54k | FD->setType(getFunctionType(ResultType, FPT->getParamTypes(), EPI)); |
3022 | 5.54k | if (FunctionDecl *Next = FD->getPreviousDecl()) |
3023 | 71 | FD = Next; |
3024 | 5.47k | else |
3025 | 5.47k | break; |
3026 | 5.54k | } |
3027 | 5.47k | if (ASTMutationListener *L = getASTMutationListener()) |
3028 | 61 | L->DeducedReturnType(FD, ResultType); |
3029 | 5.47k | } |
3030 | | |
3031 | | /// Get a function type and produce the equivalent function type with the |
3032 | | /// specified exception specification. Type sugar that can be present on a |
3033 | | /// declaration of a function with an exception specification is permitted |
3034 | | /// and preserved. Other type sugar (for instance, typedefs) is not. |
3035 | | QualType ASTContext::getFunctionTypeWithExceptionSpec( |
3036 | 652k | QualType Orig, const FunctionProtoType::ExceptionSpecInfo &ESI) { |
3037 | | // Might have some parens. |
3038 | 652k | if (const auto *PT = dyn_cast<ParenType>(Orig)) |
3039 | 15 | return getParenType( |
3040 | 15 | getFunctionTypeWithExceptionSpec(PT->getInnerType(), ESI)); |
3041 | | |
3042 | | // Might be wrapped in a macro qualified type. |
3043 | 652k | if (const auto *MQT = dyn_cast<MacroQualifiedType>(Orig)) |
3044 | 2 | return getMacroQualifiedType( |
3045 | 2 | getFunctionTypeWithExceptionSpec(MQT->getUnderlyingType(), ESI), |
3046 | 2 | MQT->getMacroIdentifier()); |
3047 | | |
3048 | | // Might have a calling-convention attribute. |
3049 | 652k | if (const auto *AT = dyn_cast<AttributedType>(Orig)) |
3050 | 2 | return getAttributedType( |
3051 | 2 | AT->getAttrKind(), |
3052 | 2 | getFunctionTypeWithExceptionSpec(AT->getModifiedType(), ESI), |
3053 | 2 | getFunctionTypeWithExceptionSpec(AT->getEquivalentType(), ESI)); |
3054 | | |
3055 | | // Anything else must be a function type. Rebuild it with the new exception |
3056 | | // specification. |
3057 | 652k | const auto *Proto = Orig->castAs<FunctionProtoType>(); |
3058 | 652k | return getFunctionType( |
3059 | 652k | Proto->getReturnType(), Proto->getParamTypes(), |
3060 | 652k | Proto->getExtProtoInfo().withExceptionSpec(ESI)); |
3061 | 652k | } |
3062 | | |
3063 | | bool ASTContext::hasSameFunctionTypeIgnoringExceptionSpec(QualType T, |
3064 | 629k | QualType U) { |
3065 | 629k | return hasSameType(T, U) || |
3066 | 23.0k | (getLangOpts().CPlusPlus17 && |
3067 | 191 | hasSameType(getFunctionTypeWithExceptionSpec(T, EST_None), |
3068 | 191 | getFunctionTypeWithExceptionSpec(U, EST_None))); |
3069 | 629k | } |
3070 | | |
3071 | 324 | QualType ASTContext::getFunctionTypeWithoutPtrSizes(QualType T) { |
3072 | 324 | if (const auto *Proto = T->getAs<FunctionProtoType>()) { |
3073 | 250 | QualType RetTy = removePtrSizeAddrSpace(Proto->getReturnType()); |
3074 | 250 | SmallVector<QualType, 16> Args(Proto->param_types()); |
3075 | 581 | for (unsigned i = 0, n = Args.size(); i != n; ++i331 ) |
3076 | 331 | Args[i] = removePtrSizeAddrSpace(Args[i]); |
3077 | 250 | return getFunctionType(RetTy, Args, Proto->getExtProtoInfo()); |
3078 | 250 | } |
3079 | | |
3080 | 74 | if (const FunctionNoProtoType *Proto = T->getAs<FunctionNoProtoType>()) { |
3081 | 74 | QualType RetTy = removePtrSizeAddrSpace(Proto->getReturnType()); |
3082 | 74 | return getFunctionNoProtoType(RetTy, Proto->getExtInfo()); |
3083 | 74 | } |
3084 | | |
3085 | 0 | return T; |
3086 | 0 | } |
3087 | | |
3088 | 162 | bool ASTContext::hasSameFunctionTypeIgnoringPtrSizes(QualType T, QualType U) { |
3089 | 162 | return hasSameType(T, U) || |
3090 | 162 | hasSameType(getFunctionTypeWithoutPtrSizes(T), |
3091 | 162 | getFunctionTypeWithoutPtrSizes(U)); |
3092 | 162 | } |
3093 | | |
3094 | | void ASTContext::adjustExceptionSpec( |
3095 | | FunctionDecl *FD, const FunctionProtoType::ExceptionSpecInfo &ESI, |
3096 | 576k | bool AsWritten) { |
3097 | | // Update the type. |
3098 | 576k | QualType Updated = |
3099 | 576k | getFunctionTypeWithExceptionSpec(FD->getType(), ESI); |
3100 | 576k | FD->setType(Updated); |
3101 | | |
3102 | 576k | if (!AsWritten) |
3103 | 546k | return; |
3104 | | |
3105 | | // Update the type in the type source information too. |
3106 | 29.6k | if (TypeSourceInfo *TSInfo = FD->getTypeSourceInfo()) { |
3107 | | // If the type and the type-as-written differ, we may need to update |
3108 | | // the type-as-written too. |
3109 | 29.6k | if (TSInfo->getType() != FD->getType()) |
3110 | 29.6k | Updated = getFunctionTypeWithExceptionSpec(TSInfo->getType(), ESI); |
3111 | | |
3112 | | // FIXME: When we get proper type location information for exceptions, |
3113 | | // we'll also have to rebuild the TypeSourceInfo. For now, we just patch |
3114 | | // up the TypeSourceInfo; |
3115 | 29.6k | assert(TypeLoc::getFullDataSizeForType(Updated) == |
3116 | 29.6k | TypeLoc::getFullDataSizeForType(TSInfo->getType()) && |
3117 | 29.6k | "TypeLoc size mismatch from updating exception specification"); |
3118 | 29.6k | TSInfo->overrideType(Updated); |
3119 | 29.6k | } |
3120 | 29.6k | } |
3121 | | |
3122 | | /// getComplexType - Return the uniqued reference to the type for a complex |
3123 | | /// number with the specified element type. |
3124 | 358k | QualType ASTContext::getComplexType(QualType T) const { |
3125 | | // Unique pointers, to guarantee there is only one pointer of a particular |
3126 | | // structure. |
3127 | 358k | llvm::FoldingSetNodeID ID; |
3128 | 358k | ComplexType::Profile(ID, T); |
3129 | | |
3130 | 358k | void *InsertPos = nullptr; |
3131 | 358k | if (ComplexType *CT = ComplexTypes.FindNodeOrInsertPos(ID, InsertPos)) |
3132 | 12.4k | return QualType(CT, 0); |
3133 | | |
3134 | | // If the pointee type isn't canonical, this won't be a canonical type either, |
3135 | | // so fill in the canonical type field. |
3136 | 346k | QualType Canonical; |
3137 | 346k | if (!T.isCanonical()) { |
3138 | 4 | Canonical = getComplexType(getCanonicalType(T)); |
3139 | | |
3140 | | // Get the new insert position for the node we care about. |
3141 | 4 | ComplexType *NewIP = ComplexTypes.FindNodeOrInsertPos(ID, InsertPos); |
3142 | 4 | assert(!NewIP && "Shouldn't be in the map!"); (void)NewIP; |
3143 | 4 | } |
3144 | 346k | auto *New = new (*this, TypeAlignment) ComplexType(T, Canonical); |
3145 | 346k | Types.push_back(New); |
3146 | 346k | ComplexTypes.InsertNode(New, InsertPos); |
3147 | 346k | return QualType(New, 0); |
3148 | 346k | } |
3149 | | |
3150 | | /// getPointerType - Return the uniqued reference to the type for a pointer to |
3151 | | /// the specified type. |
3152 | 21.8M | QualType ASTContext::getPointerType(QualType T) const { |
3153 | | // Unique pointers, to guarantee there is only one pointer of a particular |
3154 | | // structure. |
3155 | 21.8M | llvm::FoldingSetNodeID ID; |
3156 | 21.8M | PointerType::Profile(ID, T); |
3157 | | |
3158 | 21.8M | void *InsertPos = nullptr; |
3159 | 21.8M | if (PointerType *PT = PointerTypes.FindNodeOrInsertPos(ID, InsertPos)) |
3160 | 17.1M | return QualType(PT, 0); |
3161 | | |
3162 | | // If the pointee type isn't canonical, this won't be a canonical type either, |
3163 | | // so fill in the canonical type field. |
3164 | 4.65M | QualType Canonical; |
3165 | 4.65M | if (!T.isCanonical()) { |
3166 | 1.66M | Canonical = getPointerType(getCanonicalType(T)); |
3167 | | |
3168 | | // Get the new insert position for the node we care about. |
3169 | 1.66M | PointerType *NewIP = PointerTypes.FindNodeOrInsertPos(ID, InsertPos); |
3170 | 1.66M | assert(!NewIP && "Shouldn't be in the map!"); (void)NewIP; |
3171 | 1.66M | } |
3172 | 4.65M | auto *New = new (*this, TypeAlignment) PointerType(T, Canonical); |
3173 | 4.65M | Types.push_back(New); |
3174 | 4.65M | PointerTypes.InsertNode(New, InsertPos); |
3175 | 4.65M | return QualType(New, 0); |
3176 | 4.65M | } |
3177 | | |
3178 | 34 | QualType ASTContext::getAdjustedType(QualType Orig, QualType New) const { |
3179 | 34 | llvm::FoldingSetNodeID ID; |
3180 | 34 | AdjustedType::Profile(ID, Orig, New); |
3181 | 34 | void *InsertPos = nullptr; |
3182 | 34 | AdjustedType *AT = AdjustedTypes.FindNodeOrInsertPos(ID, InsertPos); |
3183 | 34 | if (AT) |
3184 | 7 | return QualType(AT, 0); |
3185 | | |
3186 | 27 | QualType Canonical = getCanonicalType(New); |
3187 | | |
3188 | | // Get the new insert position for the node we care about. |
3189 | 27 | AT = AdjustedTypes.FindNodeOrInsertPos(ID, InsertPos); |
3190 | 27 | assert(!AT && "Shouldn't be in the map!"); |
3191 | | |
3192 | 27 | AT = new (*this, TypeAlignment) |
3193 | 27 | AdjustedType(Type::Adjusted, Orig, New, Canonical); |
3194 | 27 | Types.push_back(AT); |
3195 | 27 | AdjustedTypes.InsertNode(AT, InsertPos); |
3196 | 27 | return QualType(AT, 0); |
3197 | 27 | } |
3198 | | |
3199 | 101k | QualType ASTContext::getDecayedType(QualType T) const { |
3200 | 101k | assert((T->isArrayType() || T->isFunctionType()) && "T does not decay"); |
3201 | | |
3202 | 101k | QualType Decayed; |
3203 | | |
3204 | | // C99 6.7.5.3p7: |
3205 | | // A declaration of a parameter as "array of type" shall be |
3206 | | // adjusted to "qualified pointer to type", where the type |
3207 | | // qualifiers (if any) are those specified within the [ and ] of |
3208 | | // the array type derivation. |
3209 | 101k | if (T->isArrayType()) |
3210 | 99.3k | Decayed = getArrayDecayedType(T); |
3211 | | |
3212 | | // C99 6.7.5.3p8: |
3213 | | // A declaration of a parameter as "function returning type" |
3214 | | // shall be adjusted to "pointer to function returning type", as |
3215 | | // in 6.3.2.1. |
3216 | 101k | if (T->isFunctionType()) |
3217 | 1.89k | Decayed = getPointerType(T); |
3218 | | |
3219 | 101k | llvm::FoldingSetNodeID ID; |
3220 | 101k | AdjustedType::Profile(ID, T, Decayed); |
3221 | 101k | void *InsertPos = nullptr; |
3222 | 101k | AdjustedType *AT = AdjustedTypes.FindNodeOrInsertPos(ID, InsertPos); |
3223 | 101k | if (AT) |
3224 | 65.8k | return QualType(AT, 0); |
3225 | | |
3226 | 35.3k | QualType Canonical = getCanonicalType(Decayed); |
3227 | | |
3228 | | // Get the new insert position for the node we care about. |
3229 | 35.3k | AT = AdjustedTypes.FindNodeOrInsertPos(ID, InsertPos); |
3230 | 35.3k | assert(!AT && "Shouldn't be in the map!"); |
3231 | | |
3232 | 35.3k | AT = new (*this, TypeAlignment) DecayedType(T, Decayed, Canonical); |
3233 | 35.3k | Types.push_back(AT); |
3234 | 35.3k | AdjustedTypes.InsertNode(AT, InsertPos); |
3235 | 35.3k | return QualType(AT, 0); |
3236 | 35.3k | } |
3237 | | |
3238 | | /// getBlockPointerType - Return the uniqued reference to the type for |
3239 | | /// a pointer to the specified block. |
3240 | 123k | QualType ASTContext::getBlockPointerType(QualType T) const { |
3241 | 123k | assert(T->isFunctionType() && "block of function types only"); |
3242 | | // Unique pointers, to guarantee there is only one block of a particular |
3243 | | // structure. |
3244 | 123k | llvm::FoldingSetNodeID ID; |
3245 | 123k | BlockPointerType::Profile(ID, T); |
3246 | | |
3247 | 123k | void *InsertPos = nullptr; |
3248 | 123k | if (BlockPointerType *PT = |
3249 | 41.9k | BlockPointerTypes.FindNodeOrInsertPos(ID, InsertPos)) |
3250 | 41.9k | return QualType(PT, 0); |
3251 | | |
3252 | | // If the block pointee type isn't canonical, this won't be a canonical |
3253 | | // type either so fill in the canonical type field. |
3254 | 81.3k | QualType Canonical; |
3255 | 81.3k | if (!T.isCanonical()) { |
3256 | 42.1k | Canonical = getBlockPointerType(getCanonicalType(T)); |
3257 | | |
3258 | | // Get the new insert position for the node we care about. |
3259 | 42.1k | BlockPointerType *NewIP = |
3260 | 42.1k | BlockPointerTypes.FindNodeOrInsertPos(ID, InsertPos); |
3261 | 42.1k | assert(!NewIP && "Shouldn't be in the map!"); (void)NewIP; |
3262 | 42.1k | } |
3263 | 81.3k | auto *New = new (*this, TypeAlignment) BlockPointerType(T, Canonical); |
3264 | 81.3k | Types.push_back(New); |
3265 | 81.3k | BlockPointerTypes.InsertNode(New, InsertPos); |
3266 | 81.3k | return QualType(New, 0); |
3267 | 81.3k | } |
3268 | | |
3269 | | /// getLValueReferenceType - Return the uniqued reference to the type for an |
3270 | | /// lvalue reference to the specified type. |
3271 | | QualType |
3272 | 8.63M | ASTContext::getLValueReferenceType(QualType T, bool SpelledAsLValue) const { |
3273 | 8.63M | assert(getCanonicalType(T) != OverloadTy && |
3274 | 8.63M | "Unresolved overloaded function type"); |
3275 | | |
3276 | | // Unique pointers, to guarantee there is only one pointer of a particular |
3277 | | // structure. |
3278 | 8.63M | llvm::FoldingSetNodeID ID; |
3279 | 8.63M | ReferenceType::Profile(ID, T, SpelledAsLValue); |
3280 | | |
3281 | 8.63M | void *InsertPos = nullptr; |
3282 | 8.63M | if (LValueReferenceType *RT = |
3283 | 6.60M | LValueReferenceTypes.FindNodeOrInsertPos(ID, InsertPos)) |
3284 | 6.60M | return QualType(RT, 0); |
3285 | | |
3286 | 2.02M | const auto *InnerRef = T->getAs<ReferenceType>(); |
3287 | | |
3288 | | // If the referencee type isn't canonical, this won't be a canonical type |
3289 | | // either, so fill in the canonical type field. |
3290 | 2.02M | QualType Canonical; |
3291 | 2.02M | if (!SpelledAsLValue || InnerRef1.99M || !T.isCanonical()1.99M ) { |
3292 | 1.31M | QualType PointeeType = (InnerRef ? InnerRef->getPointeeType()31.8k : T); |
3293 | 1.34M | Canonical = getLValueReferenceType(getCanonicalType(PointeeType)); |
3294 | | |
3295 | | // Get the new insert position for the node we care about. |
3296 | 1.34M | LValueReferenceType *NewIP = |
3297 | 1.34M | LValueReferenceTypes.FindNodeOrInsertPos(ID, InsertPos); |
3298 | 1.34M | assert(!NewIP && "Shouldn't be in the map!"); (void)NewIP; |
3299 | 1.34M | } |
3300 | | |
3301 | 2.02M | auto *New = new (*this, TypeAlignment) LValueReferenceType(T, Canonical, |
3302 | 2.02M | SpelledAsLValue); |
3303 | 2.02M | Types.push_back(New); |
3304 | 2.02M | LValueReferenceTypes.InsertNode(New, InsertPos); |
3305 | | |
3306 | 2.02M | return QualType(New, 0); |
3307 | 2.02M | } |
3308 | | |
3309 | | /// getRValueReferenceType - Return the uniqued reference to the type for an |
3310 | | /// rvalue reference to the specified type. |
3311 | 1.05M | QualType ASTContext::getRValueReferenceType(QualType T) const { |
3312 | | // Unique pointers, to guarantee there is only one pointer of a particular |
3313 | | // structure. |
3314 | 1.05M | llvm::FoldingSetNodeID ID; |
3315 | 1.05M | ReferenceType::Profile(ID, T, false); |
3316 | | |
3317 | 1.05M | void *InsertPos = nullptr; |
3318 | 1.05M | if (RValueReferenceType *RT = |
3319 | 530k | RValueReferenceTypes.FindNodeOrInsertPos(ID, InsertPos)) |
3320 | 530k | return QualType(RT, 0); |
3321 | | |
3322 | 528k | const auto *InnerRef = T->getAs<ReferenceType>(); |
3323 | | |
3324 | | // If the referencee type isn't canonical, this won't be a canonical type |
3325 | | // either, so fill in the canonical type field. |
3326 | 528k | QualType Canonical; |
3327 | 528k | if (InnerRef || !T.isCanonical()527k ) { |
3328 | 334k | QualType PointeeType = (InnerRef ? InnerRef->getPointeeType()539 : T); |
3329 | 335k | Canonical = getRValueReferenceType(getCanonicalType(PointeeType)); |
3330 | | |
3331 | | // Get the new insert position for the node we care about. |
3332 | 335k | RValueReferenceType *NewIP = |
3333 | 335k | RValueReferenceTypes.FindNodeOrInsertPos(ID, InsertPos); |
3334 | 335k | assert(!NewIP && "Shouldn't be in the map!"); (void)NewIP; |
3335 | 335k | } |
3336 | | |
3337 | 528k | auto *New = new (*this, TypeAlignment) RValueReferenceType(T, Canonical); |
3338 | 528k | Types.push_back(New); |
3339 | 528k | RValueReferenceTypes.InsertNode(New, InsertPos); |
3340 | 528k | return QualType(New, 0); |
3341 | 528k | } |
3342 | | |
3343 | | /// getMemberPointerType - Return the uniqued reference to the type for a |
3344 | | /// member pointer to the specified type, in the specified class. |
3345 | 62.5k | QualType ASTContext::getMemberPointerType(QualType T, const Type *Cls) const { |
3346 | | // Unique pointers, to guarantee there is only one pointer of a particular |
3347 | | // structure. |
3348 | 62.5k | llvm::FoldingSetNodeID ID; |
3349 | 62.5k | MemberPointerType::Profile(ID, T, Cls); |
3350 | | |
3351 | 62.5k | void *InsertPos = nullptr; |
3352 | 62.5k | if (MemberPointerType *PT = |
3353 | 16.6k | MemberPointerTypes.FindNodeOrInsertPos(ID, InsertPos)) |
3354 | 16.6k | return QualType(PT, 0); |
3355 | | |
3356 | | // If the pointee or class type isn't canonical, this won't be a canonical |
3357 | | // type either, so fill in the canonical type field. |
3358 | 45.9k | QualType Canonical; |
3359 | 45.9k | if (!T.isCanonical() || !Cls->isCanonicalUnqualified()19.8k ) { |
3360 | 26.3k | Canonical = getMemberPointerType(getCanonicalType(T),getCanonicalType(Cls)); |
3361 | | |
3362 | | // Get the new insert position for the node we care about. |
3363 | 26.3k | MemberPointerType *NewIP = |
3364 | 26.3k | MemberPointerTypes.FindNodeOrInsertPos(ID, InsertPos); |
3365 | 26.3k | assert(!NewIP && "Shouldn't be in the map!"); (void)NewIP; |
3366 | 26.3k | } |
3367 | 45.9k | auto *New = new (*this, TypeAlignment) MemberPointerType(T, Cls, Canonical); |
3368 | 45.9k | Types.push_back(New); |
3369 | 45.9k | MemberPointerTypes.InsertNode(New, InsertPos); |
3370 | 45.9k | return QualType(New, 0); |
3371 | 45.9k | } |
3372 | | |
3373 | | /// getConstantArrayType - Return the unique reference to the type for an |
3374 | | /// array of the specified element type. |
3375 | | QualType ASTContext::getConstantArrayType(QualType EltTy, |
3376 | | const llvm::APInt &ArySizeIn, |
3377 | | const Expr *SizeExpr, |
3378 | | ArrayType::ArraySizeModifier ASM, |
3379 | 4.67M | unsigned IndexTypeQuals) const { |
3380 | 4.67M | assert((EltTy->isDependentType() || |
3381 | 4.67M | EltTy->isIncompleteType() || EltTy->isConstantSizeType()) && |
3382 | 4.67M | "Constant array of VLAs is illegal!"); |
3383 | | |
3384 | | // We only need the size as part of the type if it's instantiation-dependent. |
3385 | 4.67M | if (SizeExpr && !SizeExpr->isInstantiationDependent()287k ) |
3386 | 287k | SizeExpr = nullptr; |
3387 | | |
3388 | | // Convert the array size into a canonical width matching the pointer size for |
3389 | | // the target. |
3390 | 4.67M | llvm::APInt ArySize(ArySizeIn); |
3391 | 4.67M | ArySize = ArySize.zextOrTrunc(Target->getMaxPointerWidth()); |
3392 | | |
3393 | 4.67M | llvm::FoldingSetNodeID ID; |
3394 | 4.67M | ConstantArrayType::Profile(ID, *this, EltTy, ArySize, SizeExpr, ASM, |
3395 | 4.67M | IndexTypeQuals); |
3396 | | |
3397 | 4.67M | void *InsertPos = nullptr; |
3398 | 4.67M | if (ConstantArrayType *ATP = |
3399 | 4.15M | ConstantArrayTypes.FindNodeOrInsertPos(ID, InsertPos)) |
3400 | 4.15M | return QualType(ATP, 0); |
3401 | | |
3402 | | // If the element type isn't canonical or has qualifiers, or the array bound |
3403 | | // is instantiation-dependent, this won't be a canonical type either, so fill |
3404 | | // in the canonical type field. |
3405 | 522k | QualType Canon; |
3406 | 522k | if (!EltTy.isCanonical() || EltTy.hasLocalQualifiers()420k || SizeExpr328k ) { |
3407 | 193k | SplitQualType canonSplit = getCanonicalType(EltTy).split(); |
3408 | 193k | Canon = getConstantArrayType(QualType(canonSplit.Ty, 0), ArySize, nullptr, |
3409 | 193k | ASM, IndexTypeQuals); |
3410 | 193k | Canon = getQualifiedType(Canon, canonSplit.Quals); |
3411 | | |
3412 | | // Get the new insert position for the node we care about. |
3413 | 193k | ConstantArrayType *NewIP = |
3414 | 193k | ConstantArrayTypes.FindNodeOrInsertPos(ID, InsertPos); |
3415 | 193k | assert(!NewIP && "Shouldn't be in the map!"); (void)NewIP; |
3416 | 193k | } |
3417 | | |
3418 | 522k | void *Mem = Allocate( |
3419 | 522k | ConstantArrayType::totalSizeToAlloc<const Expr *>(SizeExpr ? 115 : 0), |
3420 | 522k | TypeAlignment); |
3421 | 522k | auto *New = new (Mem) |
3422 | 522k | ConstantArrayType(EltTy, Canon, ArySize, SizeExpr, ASM, IndexTypeQuals); |
3423 | 522k | ConstantArrayTypes.InsertNode(New, InsertPos); |
3424 | 522k | Types.push_back(New); |
3425 | 522k | return QualType(New, 0); |
3426 | 522k | } |
3427 | | |
3428 | | /// getVariableArrayDecayedType - Turns the given type, which may be |
3429 | | /// variably-modified, into the corresponding type with all the known |
3430 | | /// sizes replaced with [*]. |
3431 | 22.6M | QualType ASTContext::getVariableArrayDecayedType(QualType type) const { |
3432 | | // Vastly most common case. |
3433 | 22.6M | if (!type->isVariablyModifiedType()) return type22.6M ; |
3434 | | |
3435 | 243 | QualType result; |
3436 | | |
3437 | 243 | SplitQualType split = type.getSplitDesugaredType(); |
3438 | 243 | const Type *ty = split.Ty; |
3439 | 243 | switch (ty->getTypeClass()) { |
3440 | 0 | #define TYPE(Class, Base) |
3441 | 0 | #define ABSTRACT_TYPE(Class, Base) |
3442 | 0 | #define NON_CANONICAL_TYPE(Class, Base) case Type::Class: |
3443 | 0 | #include "clang/AST/TypeNodes.inc" |
3444 | 0 | llvm_unreachable("didn't desugar past all non-canonical types?"); |
3445 | | |
3446 | | // These types should never be variably-modified. |
3447 | 0 | case Type::Builtin: |
3448 | 0 | case Type::Complex: |
3449 | 0 | case Type::Vector: |
3450 | 0 | case Type::DependentVector: |
3451 | 0 | case Type::ExtVector: |
3452 | 0 | case Type::DependentSizedExtVector: |
3453 | 0 | case Type::ConstantMatrix: |
3454 | 0 | case Type::DependentSizedMatrix: |
3455 | 0 | case Type::DependentAddressSpace: |
3456 | 0 | case Type::ObjCObject: |
3457 | 0 | case Type::ObjCInterface: |
3458 | 0 | case Type::ObjCObjectPointer: |
3459 | 0 | case Type::Record: |
3460 | 0 | case Type::Enum: |
3461 | 0 | case Type::UnresolvedUsing: |
3462 | 0 | case Type::TypeOfExpr: |
3463 | 0 | case Type::TypeOf: |
3464 | 0 | case Type::Decltype: |
3465 | 0 | case Type::UnaryTransform: |
3466 | 0 | case Type::DependentName: |
3467 | 0 | case Type::InjectedClassName: |
3468 | 0 | case Type::TemplateSpecialization: |
3469 | 0 | case Type::DependentTemplateSpecialization: |
3470 | 0 | case Type::TemplateTypeParm: |
3471 | 0 | case Type::SubstTemplateTypeParmPack: |
3472 | 0 | case Type::Auto: |
3473 | 0 | case Type::DeducedTemplateSpecialization: |
3474 | 0 | case Type::PackExpansion: |
3475 | 0 | case Type::ExtInt: |
3476 | 0 | case Type::DependentExtInt: |
3477 | 0 | llvm_unreachable("type should never be variably-modified"); |
3478 | | |
3479 | | // These types can be variably-modified but should never need to |
3480 | | // further decay. |
3481 | 0 | case Type::FunctionNoProto: |
3482 | 3 | case Type::FunctionProto: |
3483 | 3 | case Type::BlockPointer: |
3484 | 3 | case Type::MemberPointer: |
3485 | 3 | case Type::Pipe: |
3486 | 3 | return type; |
3487 | | |
3488 | | // These types can be variably-modified. All these modifications |
3489 | | // preserve structure except as noted by comments. |
3490 | | // TODO: if we ever care about optimizing VLAs, there are no-op |
3491 | | // optimizations available here. |
3492 | 175 | case Type::Pointer: |
3493 | 175 | result = getPointerType(getVariableArrayDecayedType( |
3494 | 175 | cast<PointerType>(ty)->getPointeeType())); |
3495 | 175 | break; |
3496 | | |
3497 | 1 | case Type::LValueReference: { |
3498 | 1 | const auto *lv = cast<LValueReferenceType>(ty); |
3499 | 1 | result = getLValueReferenceType( |
3500 | 1 | getVariableArrayDecayedType(lv->getPointeeType()), |
3501 | 1 | lv->isSpelledAsLValue()); |
3502 | 1 | break; |
3503 | 3 | } |
3504 | | |
3505 | 0 | case Type::RValueReference: { |
3506 | 0 | const auto *lv = cast<RValueReferenceType>(ty); |
3507 | 0 | result = getRValueReferenceType( |
3508 | 0 | getVariableArrayDecayedType(lv->getPointeeType())); |
3509 | 0 | break; |
3510 | 3 | } |
3511 | | |
3512 | 0 | case Type::Atomic: { |
3513 | 0 | const auto *at = cast<AtomicType>(ty); |
3514 | 0 | result = getAtomicType(getVariableArrayDecayedType(at->getValueType())); |
3515 | 0 | break; |
3516 | 3 | } |
3517 | | |
3518 | 0 | case Type::ConstantArray: { |
3519 | 0 | const auto *cat = cast<ConstantArrayType>(ty); |
3520 | 0 | result = getConstantArrayType( |
3521 | 0 | getVariableArrayDecayedType(cat->getElementType()), |
3522 | 0 | cat->getSize(), |
3523 | 0 | cat->getSizeExpr(), |
3524 | 0 | cat->getSizeModifier(), |
3525 | 0 | cat->getIndexTypeCVRQualifiers()); |
3526 | 0 | break; |
3527 | 3 | } |
3528 | | |
3529 | 0 | case Type::DependentSizedArray: { |
3530 | 0 | const auto *dat = cast<DependentSizedArrayType>(ty); |
3531 | 0 | result = getDependentSizedArrayType( |
3532 | 0 | getVariableArrayDecayedType(dat->getElementType()), |
3533 | 0 | dat->getSizeExpr(), |
3534 | 0 | dat->getSizeModifier(), |
3535 | 0 | dat->getIndexTypeCVRQualifiers(), |
3536 | 0 | dat->getBracketsRange()); |
3537 | 0 | break; |
3538 | 3 | } |
3539 | | |
3540 | | // Turn incomplete types into [*] types. |
3541 | 0 | case Type::IncompleteArray: { |
3542 | 0 | const auto *iat = cast<IncompleteArrayType>(ty); |
3543 | 0 | result = getVariableArrayType( |
3544 | 0 | getVariableArrayDecayedType(iat->getElementType()), |
3545 | 0 | /*size*/ nullptr, |
3546 | 0 | ArrayType::Normal, |
3547 | 0 | iat->getIndexTypeCVRQualifiers(), |
3548 | 0 | SourceRange()); |
3549 | 0 | break; |
3550 | 3 | } |
3551 | | |
3552 | | // Turn VLA types into [*] types. |
3553 | 64 | case Type::VariableArray: { |
3554 | 64 | const auto *vat = cast<VariableArrayType>(ty); |
3555 | 64 | result = getVariableArrayType( |
3556 | 64 | getVariableArrayDecayedType(vat->getElementType()), |
3557 | 64 | /*size*/ nullptr, |
3558 | 64 | ArrayType::Star, |
3559 | 64 | vat->getIndexTypeCVRQualifiers(), |
3560 | 64 | vat->getBracketsRange()); |
3561 | 64 | break; |
3562 | 240 | } |
3563 | 240 | } |
3564 | | |
3565 | | // Apply the top-level qualifiers from the original. |
3566 | 240 | return getQualifiedType(result, split.Quals); |
3567 | 240 | } |
3568 | | |
3569 | | /// getVariableArrayType - Returns a non-unique reference to the type for a |
3570 | | /// variable array of the specified element type. |
3571 | | QualType ASTContext::getVariableArrayType(QualType EltTy, |
3572 | | Expr *NumElts, |
3573 | | ArrayType::ArraySizeModifier ASM, |
3574 | | unsigned IndexTypeQuals, |
3575 | 9.89k | SourceRange Brackets) const { |
3576 | | // Since we don't unique expressions, it isn't possible to unique VLA's |
3577 | | // that have an expression provided for their size. |
3578 | 9.89k | QualType Canon; |
3579 | | |
3580 | | // Be sure to pull qualifiers off the element type. |
3581 | 9.89k | if (!EltTy.isCanonical() || EltTy.hasLocalQualifiers()8.70k ) { |
3582 | 1.21k | SplitQualType canonSplit = getCanonicalType(EltTy).split(); |
3583 | 1.21k | Canon = getVariableArrayType(QualType(canonSplit.Ty, 0), NumElts, ASM, |
3584 | 1.21k | IndexTypeQuals, Brackets); |
3585 | 1.21k | Canon = getQualifiedType(Canon, canonSplit.Quals); |
3586 | 1.21k | } |
3587 | | |
3588 | 9.89k | auto *New = new (*this, TypeAlignment) |
3589 | 9.89k | VariableArrayType(EltTy, Canon, NumElts, ASM, IndexTypeQuals, Brackets); |
3590 | | |
3591 | 9.89k | VariableArrayTypes.push_back(New); |
3592 | 9.89k | Types.push_back(New); |
3593 | 9.89k | return QualType(New, 0); |
3594 | 9.89k | } |
3595 | | |
3596 | | /// getDependentSizedArrayType - Returns a non-unique reference to |
3597 | | /// the type for a dependently-sized array of the specified element |
3598 | | /// type. |
3599 | | QualType ASTContext::getDependentSizedArrayType(QualType elementType, |
3600 | | Expr *numElements, |
3601 | | ArrayType::ArraySizeModifier ASM, |
3602 | | unsigned elementTypeQuals, |
3603 | 19.1k | SourceRange brackets) const { |
3604 | 19.1k | assert((!numElements || numElements->isTypeDependent() || |
3605 | 19.1k | numElements->isValueDependent()) && |
3606 | 19.1k | "Size must be type- or value-dependent!"); |
3607 | | |
3608 | | // Dependently-sized array types that do not have a specified number |
3609 | | // of elements will have their sizes deduced from a dependent |
3610 | | // initializer. We do no canonicalization here at all, which is okay |
3611 | | // because they can't be used in most locations. |
3612 | 19.1k | if (!numElements) { |
3613 | 44 | auto *newType |
3614 | 44 | = new (*this, TypeAlignment) |
3615 | 44 | DependentSizedArrayType(*this, elementType, QualType(), |
3616 | 44 | numElements, ASM, elementTypeQuals, |
3617 | 44 | brackets); |
3618 | 44 | Types.push_back(newType); |
3619 | 44 | return QualType(newType, 0); |
3620 | 44 | } |
3621 | | |
3622 | | // Otherwise, we actually build a new type every time, but we |
3623 | | // also build a canonical type. |
3624 | | |
3625 | 19.0k | SplitQualType canonElementType = getCanonicalType(elementType).split(); |
3626 | | |
3627 | 19.0k | void *insertPos = nullptr; |
3628 | 19.0k | llvm::FoldingSetNodeID ID; |
3629 | 19.0k | DependentSizedArrayType::Profile(ID, *this, |
3630 | 19.0k | QualType(canonElementType.Ty, 0), |
3631 | 19.0k | ASM, elementTypeQuals, numElements); |
3632 | | |
3633 | | // Look for an existing type with these properties. |
3634 | 19.0k | DependentSizedArrayType *canonTy = |
3635 | 19.0k | DependentSizedArrayTypes.FindNodeOrInsertPos(ID, insertPos); |
3636 | | |
3637 | | // If we don't have one, build one. |
3638 | 19.0k | if (!canonTy) { |
3639 | 12.8k | canonTy = new (*this, TypeAlignment) |
3640 | 12.8k | DependentSizedArrayType(*this, QualType(canonElementType.Ty, 0), |
3641 | 12.8k | QualType(), numElements, ASM, elementTypeQuals, |
3642 | 12.8k | brackets); |
3643 | 12.8k | DependentSizedArrayTypes.InsertNode(canonTy, insertPos); |
3644 | 12.8k | Types.push_back(canonTy); |
3645 | 12.8k | } |
3646 | | |
3647 | | // Apply qualifiers from the element type to the array. |
3648 | 19.0k | QualType canon = getQualifiedType(QualType(canonTy,0), |
3649 | 19.0k | canonElementType.Quals); |
3650 | | |
3651 | | // If we didn't need extra canonicalization for the element type or the size |
3652 | | // expression, then just use that as our result. |
3653 | 19.0k | if (QualType(canonElementType.Ty, 0) == elementType && |
3654 | 9.22k | canonTy->getSizeExpr() == numElements) |
3655 | 8.72k | return canon; |
3656 | | |
3657 | | // Otherwise, we need to build a type which follows the spelling |
3658 | | // of the element type. |
3659 | 10.3k | auto *sugaredType |
3660 | 10.3k | = new (*this, TypeAlignment) |
3661 | 10.3k | DependentSizedArrayType(*this, elementType, canon, numElements, |
3662 | 10.3k | ASM, elementTypeQuals, brackets); |
3663 | 10.3k | Types.push_back(sugaredType); |
3664 | 10.3k | return QualType(sugaredType, 0); |
3665 | 10.3k | } |
3666 | | |
3667 | | QualType ASTContext::getIncompleteArrayType(QualType elementType, |
3668 | | ArrayType::ArraySizeModifier ASM, |
3669 | 96.7k | unsigned elementTypeQuals) const { |
3670 | 96.7k | llvm::FoldingSetNodeID ID; |
3671 | 96.7k | IncompleteArrayType::Profile(ID, elementType, ASM, elementTypeQuals); |
3672 | | |
3673 | 96.7k | void *insertPos = nullptr; |
3674 | 96.7k | if (IncompleteArrayType *iat = |
3675 | 43.5k | IncompleteArrayTypes.FindNodeOrInsertPos(ID, insertPos)) |
3676 | 43.5k | return QualType(iat, 0); |
3677 | | |
3678 | | // If the element type isn't canonical, this won't be a canonical type |
3679 | | // either, so fill in the canonical type field. We also have to pull |
3680 | | // qualifiers off the element type. |
3681 | 53.2k | QualType canon; |
3682 | | |
3683 | 53.2k | if (!elementType.isCanonical() || elementType.hasLocalQualifiers()24.2k ) { |
3684 | 31.8k | SplitQualType canonSplit = getCanonicalType(elementType).split(); |
3685 | 31.8k | canon = getIncompleteArrayType(QualType(canonSplit.Ty, 0), |
3686 | 31.8k | ASM, elementTypeQuals); |
3687 | 31.8k | canon = getQualifiedType(canon, canonSplit.Quals); |
3688 | | |
3689 | | // Get the new insert position for the node we care about. |
3690 | 31.8k | IncompleteArrayType *existing = |
3691 | 31.8k | IncompleteArrayTypes.FindNodeOrInsertPos(ID, insertPos); |
3692 | 31.8k | assert(!existing && "Shouldn't be in the map!"); (void) existing; |
3693 | 31.8k | } |
3694 | | |
3695 | 53.2k | auto *newType = new (*this, TypeAlignment) |
3696 | 53.2k | IncompleteArrayType(elementType, canon, ASM, elementTypeQuals); |
3697 | | |
3698 | 53.2k | IncompleteArrayTypes.InsertNode(newType, insertPos); |
3699 | 53.2k | Types.push_back(newType); |
3700 | 53.2k | return QualType(newType, 0); |
3701 | 53.2k | } |
3702 | | |
3703 | | ASTContext::BuiltinVectorTypeInfo |
3704 | 218k | ASTContext::getBuiltinVectorTypeInfo(const BuiltinType *Ty) const { |
3705 | 218k | #define SVE_INT_ELTTY(BITS, ELTS, SIGNED, NUMVECTORS) \ |
3706 | 125k | {getIntTypeForBitwidth(BITS, SIGNED), llvm::ElementCount::getScalable(ELTS), \ |
3707 | 125k | NUMVECTORS}; |
3708 | | |
3709 | 218k | #define SVE_ELTTY(ELTTY, ELTS, NUMVECTORS) \ |
3710 | 92.8k | {ELTTY, llvm::ElementCount::getScalable(ELTS), NUMVECTORS}; |
3711 | | |
3712 | 218k | switch (Ty->getKind()) { |
3713 | 0 | default: |
3714 | 0 | llvm_unreachable("Unsupported builtin vector type"); |
3715 | 11.7k | case BuiltinType::SveInt8: |
3716 | 11.7k | return SVE_INT_ELTTY(8, 16, true, 1); |
3717 | 11.0k | case BuiltinType::SveUint8: |
3718 | 11.0k | return SVE_INT_ELTTY(8, 16, false, 1); |
3719 | 103 | case BuiltinType::SveInt8x2: |
3720 | 103 | return SVE_INT_ELTTY(8, 16, true, 2); |
3721 | 103 | case BuiltinType::SveUint8x2: |
3722 | 103 | return SVE_INT_ELTTY(8, 16, false, 2); |
3723 | 93 | case BuiltinType::SveInt8x3: |
3724 | 93 | return SVE_INT_ELTTY(8, 16, true, 3); |
3725 | 93 | case BuiltinType::SveUint8x3: |
3726 | 93 | return SVE_INT_ELTTY(8, 16, false, 3); |
3727 | 93 | case BuiltinType::SveInt8x4: |
3728 | 93 | return SVE_INT_ELTTY(8, 16, true, 4); |
3729 | 93 | case BuiltinType::SveUint8x4: |
3730 | 93 | return SVE_INT_ELTTY(8, 16, false, 4); |
3731 | 14.5k | case BuiltinType::SveInt16: |
3732 | 14.5k | return SVE_INT_ELTTY(16, 8, true, 1); |
3733 | 12.8k | case BuiltinType::SveUint16: |
3734 | 12.8k | return SVE_INT_ELTTY(16, 8, false, 1); |
3735 | 103 | case BuiltinType::SveInt16x2: |
3736 | 103 | return SVE_INT_ELTTY(16, 8, true, 2); |
3737 | 103 | case BuiltinType::SveUint16x2: |
3738 | 103 | return SVE_INT_ELTTY(16, 8, false, 2); |
3739 | 93 | case BuiltinType::SveInt16x3: |
3740 | 93 | return SVE_INT_ELTTY(16, 8, true, 3); |
3741 | 93 | case BuiltinType::SveUint16x3: |
3742 | 93 | return SVE_INT_ELTTY(16, 8, false, 3); |
3743 | 93 | case BuiltinType::SveInt16x4: |
3744 | 93 | return SVE_INT_ELTTY(16, 8, true, 4); |
3745 | 93 | case BuiltinType::SveUint16x4: |
3746 | 93 | return SVE_INT_ELTTY(16, 8, false, 4); |
3747 | 18.1k | case BuiltinType::SveInt32: |
3748 | 18.1k | return SVE_INT_ELTTY(32, 4, true, 1); |
3749 | 18.3k | case BuiltinType::SveUint32: |
3750 | 18.3k | return SVE_INT_ELTTY(32, 4, false, 1); |
3751 | 103 | case BuiltinType::SveInt32x2: |
3752 | 103 | return SVE_INT_ELTTY(32, 4, true, 2); |
3753 | 103 | case BuiltinType::SveUint32x2: |
3754 | 103 | return SVE_INT_ELTTY(32, 4, false, 2); |
3755 | 93 | case BuiltinType::SveInt32x3: |
3756 | 93 | return SVE_INT_ELTTY(32, 4, true, 3); |
3757 | 93 | case BuiltinType::SveUint32x3: |
3758 | 93 | return SVE_INT_ELTTY(32, 4, false, 3); |
3759 | 93 | case BuiltinType::SveInt32x4: |
3760 | 93 | return SVE_INT_ELTTY(32, 4, true, 4); |
3761 | 93 | case BuiltinType::SveUint32x4: |
3762 | 93 | return SVE_INT_ELTTY(32, 4, false, 4); |
3763 | 17.4k | case BuiltinType::SveInt64: |
3764 | 17.4k | return SVE_INT_ELTTY(64, 2, true, 1); |
3765 | 19.2k | case BuiltinType::SveUint64: |
3766 | 19.2k | return SVE_INT_ELTTY(64, 2, false, 1); |
3767 | 103 | case BuiltinType::SveInt64x2: |
3768 | 103 | return SVE_INT_ELTTY(64, 2, true, 2); |
3769 | 103 | case BuiltinType::SveUint64x2: |
3770 | 103 | return SVE_INT_ELTTY(64, 2, false, 2); |
3771 | 93 | case BuiltinType::SveInt64x3: |
3772 | 93 | return SVE_INT_ELTTY(64, 2, true, 3); |
3773 | 93 | case BuiltinType::SveUint64x3: |
3774 | 93 | return SVE_INT_ELTTY(64, 2, false, 3); |
3775 | 93 | case BuiltinType::SveInt64x4: |
3776 | 93 | return SVE_INT_ELTTY(64, 2, true, 4); |
3777 | 93 | case BuiltinType::SveUint64x4: |
3778 | 93 | return SVE_INT_ELTTY(64, 2, false, 4); |
3779 | 58.7k | case BuiltinType::SveBool: |
3780 | 58.7k | return SVE_ELTTY(BoolTy, 16, 1); |
3781 | 10.0k | case BuiltinType::SveFloat16: |
3782 | 10.0k | return SVE_ELTTY(HalfTy, 8, 1); |
3783 | 103 | case BuiltinType::SveFloat16x2: |
3784 | 103 | return SVE_ELTTY(HalfTy, 8, 2); |
3785 | 93 | case BuiltinType::SveFloat16x3: |
3786 | 93 | return SVE_ELTTY(HalfTy, 8, 3); |
3787 | 93 | case BuiltinType::SveFloat16x4: |
3788 | 93 | return SVE_ELTTY(HalfTy, 8, 4); |
3789 | 10.5k | case BuiltinType::SveFloat32: |
3790 | 10.5k | return SVE_ELTTY(FloatTy, 4, 1); |
3791 | 103 | case BuiltinType::SveFloat32x2: |
3792 | 103 | return SVE_ELTTY(FloatTy, 4, 2); |
3793 | 93 | case BuiltinType::SveFloat32x3: |
3794 | 93 | return SVE_ELTTY(FloatTy, 4, 3); |
3795 | 93 | case BuiltinType::SveFloat32x4: |
3796 | 93 | return SVE_ELTTY(FloatTy, 4, 4); |
3797 | 10.2k | case BuiltinType::SveFloat64: |
3798 | 10.2k | return SVE_ELTTY(DoubleTy, 2, 1); |
3799 | 103 | case BuiltinType::SveFloat64x2: |
3800 | 103 | return SVE_ELTTY(DoubleTy, 2, 2); |
3801 | 93 | case BuiltinType::SveFloat64x3: |
3802 | 93 | return SVE_ELTTY(DoubleTy, 2, 3); |
3803 | 93 | case BuiltinType::SveFloat64x4: |
3804 | 93 | return SVE_ELTTY(DoubleTy, 2, 4); |
3805 | 2.04k | case BuiltinType::SveBFloat16: |
3806 | 2.04k | return SVE_ELTTY(BFloat16Ty, 8, 1); |
3807 | 108 | case BuiltinType::SveBFloat16x2: |
3808 | 108 | return SVE_ELTTY(BFloat16Ty, 8, 2); |
3809 | 120 | case BuiltinType::SveBFloat16x3: |
3810 | 120 | return SVE_ELTTY(BFloat16Ty, 8, 3); |
3811 | 142 | case BuiltinType::SveBFloat16x4: |
3812 | 142 | return SVE_ELTTY(BFloat16Ty, 8, 4);0 |
3813 | 218k | } |
3814 | 218k | } |
3815 | | |
3816 | | /// getScalableVectorType - Return the unique reference to a scalable vector |
3817 | | /// type of the specified element type and size. VectorType must be a built-in |
3818 | | /// type. |
3819 | | QualType ASTContext::getScalableVectorType(QualType EltTy, |
3820 | 1.05M | unsigned NumElts) const { |
3821 | 1.05M | if (Target->hasAArch64SVETypes()) { |
3822 | 1.05M | uint64_t EltTySize = getTypeSize(EltTy); |
3823 | 1.05M | #define SVE_VECTOR_TYPE(Name, MangledName, Id, SingletonId, NumEls, ElBits, \ |
3824 | 1.05M | IsSigned, IsFP, IsBF) \ |
3825 | 9.80M | if (!EltTy->isBooleanType() && \ |
3826 | 6.03M | ((EltTy->hasIntegerRepresentation() && \ |
3827 | 3.32M | EltTy->hasSignedIntegerRepresentation() == IsSigned) || \ |
3828 | 4.18M | (EltTy->hasFloatingRepresentation() && !EltTy->isBFloat16Type()2.71M && \ |
3829 | 2.07M | IsFP && !IsBF0 ) || \ |
3830 | 3.66M | (EltTy->hasFloatingRepresentation() && EltTy->isBFloat16Type()2.19M && \ |
3831 | 104k | IsBF && !IsFP0 )) && \ |
3832 | 2.38M | EltTySize == ElBits && NumElts == NumEls989k ) { \ |
3833 | 973k | return SingletonId; \ |
3834 | 973k | } |
3835 | 1.05M | #define SVE_PREDICATE_TYPE(Name, MangledName, Id, SingletonId, NumEls) \ |
3836 | 78.4k | if (EltTy->isBooleanType() && NumElts == NumEls) \ |
3837 | 78.4k | return SingletonId; |
3838 | 8.83M | #include "clang/Basic/AArch64SVEACLETypes.def"1.05M |
3839 | 8.83M | } |
3840 | 0 | return QualType(); |
3841 | 1.05M | } |
3842 | | |
3843 | | /// getVectorType - Return the unique reference to a vector type of |
3844 | | /// the specified element type and size. VectorType must be a built-in type. |
3845 | | QualType ASTContext::getVectorType(QualType vecType, unsigned NumElts, |
3846 | 4.99M | VectorType::VectorKind VecKind) const { |
3847 | 4.99M | assert(vecType->isBuiltinType()); |
3848 | | |
3849 | | // Check if we've already instantiated a vector of this type. |
3850 | 4.99M | llvm::FoldingSetNodeID ID; |
3851 | 4.99M | VectorType::Profile(ID, vecType, NumElts, Type::Vector, VecKind); |
3852 | | |
3853 | 4.99M | void *InsertPos = nullptr; |
3854 | 4.99M | if (VectorType *VTP = VectorTypes.FindNodeOrInsertPos(ID, InsertPos)) |
3855 | 4.95M | return QualType(VTP, 0); |
3856 | | |
3857 | | // If the element type isn't canonical, this won't be a canonical type either, |
3858 | | // so fill in the canonical type field. |
3859 | 45.3k | QualType Canonical; |
3860 | 45.3k | if (!vecType.isCanonical()) { |
3861 | 10.9k | Canonical = getVectorType(getCanonicalType(vecType), NumElts, VecKind); |
3862 | | |
3863 | | // Get the new insert position for the node we care about. |
3864 | 10.9k | VectorType *NewIP = VectorTypes.FindNodeOrInsertPos(ID, InsertPos); |
3865 | 10.9k | assert(!NewIP && "Shouldn't be in the map!"); (void)NewIP; |
3866 | 10.9k | } |
3867 | 45.3k | auto *New = new (*this, TypeAlignment) |
3868 | 45.3k | VectorType(vecType, NumElts, Canonical, VecKind); |
3869 | 45.3k | VectorTypes.InsertNode(New, InsertPos); |
3870 | 45.3k | Types.push_back(New); |
3871 | 45.3k | return QualType(New, 0); |
3872 | 45.3k | } |
3873 | | |
3874 | | QualType |
3875 | | ASTContext::getDependentVectorType(QualType VecType, Expr *SizeExpr, |
3876 | | SourceLocation AttrLoc, |
3877 | 85 | VectorType::VectorKind VecKind) const { |
3878 | 85 | llvm::FoldingSetNodeID ID; |
3879 | 85 | DependentVectorType::Profile(ID, *this, getCanonicalType(VecType), SizeExpr, |
3880 | 85 | VecKind); |
3881 | 85 | void *InsertPos = nullptr; |
3882 | 85 | DependentVectorType *Canon = |
3883 | 85 | DependentVectorTypes.FindNodeOrInsertPos(ID, InsertPos); |
3884 | 85 | DependentVectorType *New; |
3885 | | |
3886 | 85 | if (Canon) { |
3887 | 5 | New = new (*this, TypeAlignment) DependentVectorType( |
3888 | 5 | *this, VecType, QualType(Canon, 0), SizeExpr, AttrLoc, VecKind); |
3889 | 80 | } else { |
3890 | 80 | QualType CanonVecTy = getCanonicalType(VecType); |
3891 | 80 | if (CanonVecTy == VecType) { |
3892 | 40 | New = new (*this, TypeAlignment) DependentVectorType( |
3893 | 40 | *this, VecType, QualType(), SizeExpr, AttrLoc, VecKind); |
3894 | | |
3895 | 40 | DependentVectorType *CanonCheck = |
3896 | 40 | DependentVectorTypes.FindNodeOrInsertPos(ID, InsertPos); |
3897 | 40 | assert(!CanonCheck && |
3898 | 40 | "Dependent-sized vector_size canonical type broken"); |
3899 | 40 | (void)CanonCheck; |
3900 | 40 | DependentVectorTypes.InsertNode(New, InsertPos); |
3901 | 40 | } else { |
3902 | 40 | QualType CanonTy = getDependentVectorType(CanonVecTy, SizeExpr, |
3903 | 40 | SourceLocation(), VecKind); |
3904 | 40 | New = new (*this, TypeAlignment) DependentVectorType( |
3905 | 40 | *this, VecType, CanonTy, SizeExpr, AttrLoc, VecKind); |
3906 | 40 | } |
3907 | 80 | } |
3908 | | |
3909 | 85 | Types.push_back(New); |
3910 | 85 | return QualType(New, 0); |
3911 | 85 | } |
3912 | | |
3913 | | /// getExtVectorType - Return the unique reference to an extended vector type of |
3914 | | /// the specified element type and size. VectorType must be a built-in type. |
3915 | | QualType |
3916 | 25.4k | ASTContext::getExtVectorType(QualType vecType, unsigned NumElts) const { |
3917 | 25.4k | assert(vecType->isBuiltinType() || vecType->isDependentType()); |
3918 | | |
3919 | | // Check if we've already instantiated a vector of this type. |
3920 | 25.4k | llvm::FoldingSetNodeID ID; |
3921 | 25.4k | VectorType::Profile(ID, vecType, NumElts, Type::ExtVector, |
3922 | 25.4k | VectorType::GenericVector); |
3923 | 25.4k | void *InsertPos = nullptr; |
3924 | 25.4k | if (VectorType *VTP = VectorTypes.FindNodeOrInsertPos(ID, InsertPos)) |
3925 | 21.5k | return QualType(VTP, 0); |
3926 | | |
3927 | | // If the element type isn't canonical, this won't be a canonical type either, |
3928 | | // so fill in the canonical type field. |
3929 | 3.88k | QualType Canonical; |
3930 | 3.88k | if (!vecType.isCanonical()) { |
3931 | 735 | Canonical = getExtVectorType(getCanonicalType(vecType), NumElts); |
3932 | | |
3933 | | // Get the new insert position for the node we care about. |
3934 | 735 | VectorType *NewIP = VectorTypes.FindNodeOrInsertPos(ID, InsertPos); |
3935 | 735 | assert(!NewIP && "Shouldn't be in the map!"); (void)NewIP; |
3936 | 735 | } |
3937 | 3.88k | auto *New = new (*this, TypeAlignment) |
3938 | 3.88k | ExtVectorType(vecType, NumElts, Canonical); |
3939 | 3.88k | VectorTypes.InsertNode(New, InsertPos); |
3940 | 3.88k | Types.push_back(New); |
3941 | 3.88k | return QualType(New, 0); |
3942 | 3.88k | } |
3943 | | |
3944 | | QualType |
3945 | | ASTContext::getDependentSizedExtVectorType(QualType vecType, |
3946 | | Expr *SizeExpr, |
3947 | 42 | SourceLocation AttrLoc) const { |
3948 | 42 | llvm::FoldingSetNodeID ID; |
3949 | 42 | DependentSizedExtVectorType::Profile(ID, *this, getCanonicalType(vecType), |
3950 | 42 | SizeExpr); |
3951 | | |
3952 | 42 | void *InsertPos = nullptr; |
3953 | 42 | DependentSizedExtVectorType *Canon |
3954 | 42 | = DependentSizedExtVectorTypes.FindNodeOrInsertPos(ID, InsertPos); |
3955 | 42 | DependentSizedExtVectorType *New; |
3956 | 42 | if (Canon) { |
3957 | | // We already have a canonical version of this array type; use it as |
3958 | | // the canonical type for a newly-built type. |
3959 | 8 | New = new (*this, TypeAlignment) |
3960 | 8 | DependentSizedExtVectorType(*this, vecType, QualType(Canon, 0), |
3961 | 8 | SizeExpr, AttrLoc); |
3962 | 34 | } else { |
3963 | 34 | QualType CanonVecTy = getCanonicalType(vecType); |
3964 | 34 | if (CanonVecTy == vecType) { |
3965 | 25 | New = new (*this, TypeAlignment) |
3966 | 25 | DependentSizedExtVectorType(*this, vecType, QualType(), SizeExpr, |
3967 | 25 | AttrLoc); |
3968 | | |
3969 | 25 | DependentSizedExtVectorType *CanonCheck |
3970 | 25 | = DependentSizedExtVectorTypes.FindNodeOrInsertPos(ID, InsertPos); |
3971 | 25 | assert(!CanonCheck && "Dependent-sized ext_vector canonical type broken"); |
3972 | 25 | (void)CanonCheck; |
3973 | 25 | DependentSizedExtVectorTypes.InsertNode(New, InsertPos); |
3974 | 9 | } else { |
3975 | 9 | QualType CanonExtTy = getDependentSizedExtVectorType(CanonVecTy, SizeExpr, |
3976 | 9 | SourceLocation()); |
3977 | 9 | New = new (*this, TypeAlignment) DependentSizedExtVectorType( |
3978 | 9 | *this, vecType, CanonExtTy, SizeExpr, AttrLoc); |
3979 | 9 | } |
3980 | 34 | } |
3981 | | |
3982 | 42 | Types.push_back(New); |
3983 | 42 | return QualType(New, 0); |
3984 | 42 | } |
3985 | | |
3986 | | QualType ASTContext::getConstantMatrixType(QualType ElementTy, unsigned NumRows, |
3987 | 399 | unsigned NumColumns) const { |
3988 | 399 | llvm::FoldingSetNodeID ID; |
3989 | 399 | ConstantMatrixType::Profile(ID, ElementTy, NumRows, NumColumns, |
3990 | 399 | Type::ConstantMatrix); |
3991 | | |
3992 | 399 | assert(MatrixType::isValidElementType(ElementTy) && |
3993 | 399 | "need a valid element type"); |
3994 | 399 | assert(ConstantMatrixType::isDimensionValid(NumRows) && |
3995 | 399 | ConstantMatrixType::isDimensionValid(NumColumns) && |
3996 | 399 | "need valid matrix dimensions"); |
3997 | 399 | void *InsertPos = nullptr; |
3998 | 399 | if (ConstantMatrixType *MTP = MatrixTypes.FindNodeOrInsertPos(ID, InsertPos)) |
3999 | 153 | return QualType(MTP, 0); |
4000 | | |
4001 | 246 | QualType Canonical; |
4002 | 246 | if (!ElementTy.isCanonical()) { |
4003 | 84 | Canonical = |
4004 | 84 | getConstantMatrixType(getCanonicalType(ElementTy), NumRows, NumColumns); |
4005 | | |
4006 | 84 | ConstantMatrixType *NewIP = MatrixTypes.FindNodeOrInsertPos(ID, InsertPos); |
4007 | 84 | assert(!NewIP && "Matrix type shouldn't already exist in the map"); |
4008 | 84 | (void)NewIP; |
4009 | 84 | } |
4010 | | |
4011 | 246 | auto *New = new (*this, TypeAlignment) |
4012 | 246 | ConstantMatrixType(ElementTy, NumRows, NumColumns, Canonical); |
4013 | 246 | MatrixTypes.InsertNode(New, InsertPos); |
4014 | 246 | Types.push_back(New); |
4015 | 246 | return QualType(New, 0); |
4016 | 246 | } |
4017 | | |
4018 | | QualType ASTContext::getDependentSizedMatrixType(QualType ElementTy, |
4019 | | Expr *RowExpr, |
4020 | | Expr *ColumnExpr, |
4021 | 49 | SourceLocation AttrLoc) const { |
4022 | 49 | QualType CanonElementTy = getCanonicalType(ElementTy); |
4023 | 49 | llvm::FoldingSetNodeID ID; |
4024 | 49 | DependentSizedMatrixType::Profile(ID, *this, CanonElementTy, RowExpr, |
4025 | 49 | ColumnExpr); |
4026 | | |
4027 | 49 | void *InsertPos = nullptr; |
4028 | 49 | DependentSizedMatrixType *Canon = |
4029 | 49 | DependentSizedMatrixTypes.FindNodeOrInsertPos(ID, InsertPos); |
4030 | | |
4031 | 49 | if (!Canon) { |
4032 | 38 | Canon = new (*this, TypeAlignment) DependentSizedMatrixType( |
4033 | 38 | *this, CanonElementTy, QualType(), RowExpr, ColumnExpr, AttrLoc); |
4034 | 38 | #ifndef NDEBUG |
4035 | 38 | DependentSizedMatrixType *CanonCheck = |
4036 | 38 | DependentSizedMatrixTypes.FindNodeOrInsertPos(ID, InsertPos); |
4037 | 38 | assert(!CanonCheck && "Dependent-sized matrix canonical type broken"); |
4038 | 38 | #endif |
4039 | 38 | DependentSizedMatrixTypes.InsertNode(Canon, InsertPos); |
4040 | 38 | Types.push_back(Canon); |
4041 | 38 | } |
4042 | | |
4043 | | // Already have a canonical version of the matrix type |
4044 | | // |
4045 | | // If it exactly matches the requested type, use it directly. |
4046 | 49 | if (Canon->getElementType() == ElementTy && Canon->getRowExpr() == RowExpr4 && |
4047 | 4 | Canon->getRowExpr() == ColumnExpr) |
4048 | 0 | return QualType(Canon, 0); |
4049 | | |
4050 | | // Use Canon as the canonical type for newly-built type. |
4051 | 49 | DependentSizedMatrixType *New = new (*this, TypeAlignment) |
4052 | 49 | DependentSizedMatrixType(*this, ElementTy, QualType(Canon, 0), RowExpr, |
4053 | 49 | ColumnExpr, AttrLoc); |
4054 | 49 | Types.push_back(New); |
4055 | 49 | return QualType(New, 0); |
4056 | 49 | } |
4057 | | |
4058 | | QualType ASTContext::getDependentAddressSpaceType(QualType PointeeType, |
4059 | | Expr *AddrSpaceExpr, |
4060 | 52 | SourceLocation AttrLoc) const { |
4061 | 52 | assert(AddrSpaceExpr->isInstantiationDependent()); |
4062 | | |
4063 | 52 | QualType canonPointeeType = getCanonicalType(PointeeType); |
4064 | | |
4065 | 52 | void *insertPos = nullptr; |
4066 | 52 | llvm::FoldingSetNodeID ID; |
4067 | 52 | DependentAddressSpaceType::Profile(ID, *this, canonPointeeType, |
4068 | 52 | AddrSpaceExpr); |
4069 | | |
4070 | 52 | DependentAddressSpaceType *canonTy = |
4071 | 52 | DependentAddressSpaceTypes.FindNodeOrInsertPos(ID, insertPos); |
4072 | | |
4073 | 52 | if (!canonTy) { |
4074 | 36 | canonTy = new (*this, TypeAlignment) |
4075 | 36 | DependentAddressSpaceType(*this, canonPointeeType, |
4076 | 36 | QualType(), AddrSpaceExpr, AttrLoc); |
4077 | 36 | DependentAddressSpaceTypes.InsertNode(canonTy, insertPos); |
4078 | 36 | Types.push_back(canonTy); |
4079 | 36 | } |
4080 | | |
4081 | 52 | if (canonPointeeType == PointeeType && |
4082 | 0 | canonTy->getAddrSpaceExpr() == AddrSpaceExpr) |
4083 | 0 | return QualType(canonTy, 0); |
4084 | | |
4085 | 52 | auto *sugaredType |
4086 | 52 | = new (*this, TypeAlignment) |
4087 | 52 | DependentAddressSpaceType(*this, PointeeType, QualType(canonTy, 0), |
4088 | 52 | AddrSpaceExpr, AttrLoc); |
4089 | 52 | Types.push_back(sugaredType); |
4090 | 52 | return QualType(sugaredType, 0); |
4091 | 52 | } |
4092 | | |
4093 | | /// Determine whether \p T is canonical as the result type of a function. |
4094 | 11.5M | static bool isCanonicalResultType(QualType T) { |
4095 | 11.5M | return T.isCanonical() && |
4096 | 7.81M | (T.getObjCLifetime() == Qualifiers::OCL_None || |
4097 | 0 | T.getObjCLifetime() == Qualifiers::OCL_ExplicitNone); |
4098 | 11.5M | } |
4099 | | |
4100 | | /// getFunctionNoProtoType - Return a K&R style C function type like 'int()'. |
4101 | | QualType |
4102 | | ASTContext::getFunctionNoProtoType(QualType ResultTy, |
4103 | 158k | const FunctionType::ExtInfo &Info) const { |
4104 | | // Unique functions, to guarantee there is only one function of a particular |
4105 | | // structure. |
4106 | 158k | llvm::FoldingSetNodeID ID; |
4107 | 158k | FunctionNoProtoType::Profile(ID, ResultTy, Info); |
4108 | | |
4109 | 158k | void *InsertPos = nullptr; |
4110 | 158k | if (FunctionNoProtoType *FT = |
4111 | 144k | FunctionNoProtoTypes.FindNodeOrInsertPos(ID, InsertPos)) |
4112 | 144k | return QualType(FT, 0); |
4113 | | |
4114 | 14.1k | QualType Canonical; |
4115 | 14.1k | if (!isCanonicalResultType(ResultTy)) { |
4116 | 2.66k | Canonical = |
4117 | 2.66k | getFunctionNoProtoType(getCanonicalFunctionResultType(ResultTy), Info); |
4118 | | |
4119 | | // Get the new insert position for the node we care about. |
4120 | 2.66k | FunctionNoProtoType *NewIP = |
4121 | 2.66k | FunctionNoProtoTypes.FindNodeOrInsertPos(ID, InsertPos); |
4122 | 2.66k | assert(!NewIP && "Shouldn't be in the map!"); (void)NewIP; |
4123 | 2.66k | } |
4124 | | |
4125 | 14.1k | auto *New = new (*this, TypeAlignment) |
4126 | 14.1k | FunctionNoProtoType(ResultTy, Canonical, Info); |
4127 | 14.1k | Types.push_back(New); |
4128 | 14.1k | FunctionNoProtoTypes.InsertNode(New, InsertPos); |
4129 | 14.1k | return QualType(New, 0); |
4130 | 14.1k | } |
4131 | | |
4132 | | CanQualType |
4133 | 7.16M | ASTContext::getCanonicalFunctionResultType(QualType ResultType) const { |
4134 | 7.16M | CanQualType CanResultType = getCanonicalType(ResultType); |
4135 | | |
4136 | | // Canonical result types do not have ARC lifetime qualifiers. |
4137 | 7.16M | if (CanResultType.getQualifiers().hasObjCLifetime()) { |
4138 | 49 | Qualifiers Qs = CanResultType.getQualifiers(); |
4139 | 49 | Qs.removeObjCLifetime(); |
4140 | 49 | return CanQualType::CreateUnsafe( |
4141 | 49 | getQualifiedType(CanResultType.getUnqualifiedType(), Qs)); |
4142 | 49 | } |
4143 | | |
4144 | 7.16M | return CanResultType; |
4145 | 7.16M | } |
4146 | | |
4147 | | static bool isCanonicalExceptionSpecification( |
4148 | 13.2M | const FunctionProtoType::ExceptionSpecInfo &ESI, bool NoexceptInType) { |
4149 | 13.2M | if (ESI.Type == EST_None) |
4150 | 11.5M | return true; |
4151 | 1.70M | if (!NoexceptInType) |
4152 | 1.63M | return false; |
4153 | | |
4154 | | // C++17 onwards: exception specification is part of the type, as a simple |
4155 | | // boolean "can this function type throw". |
4156 | 63.8k | if (ESI.Type == EST_BasicNoexcept) |
4157 | 26.4k | return true; |
4158 | | |
4159 | | // A noexcept(expr) specification is (possibly) canonical if expr is |
4160 | | // value-dependent. |
4161 | 37.4k | if (ESI.Type == EST_DependentNoexcept) |
4162 | 3.11k | return true; |
4163 | | |
4164 | | // A dynamic exception specification is canonical if it only contains pack |
4165 | | // expansions (so we can't tell whether it's non-throwing) and all its |
4166 | | // contained types are canonical. |
4167 | 34.2k | if (ESI.Type == EST_Dynamic) { |
4168 | 41 | bool AnyPackExpansions = false; |
4169 | 53 | for (QualType ET : ESI.Exceptions) { |
4170 | 53 | if (!ET.isCanonical()) |
4171 | 19 | return false; |
4172 | 34 | if (ET->getAs<PackExpansionType>()) |
4173 | 3 | AnyPackExpansions = true; |
4174 | 34 | } |
4175 | 22 | return AnyPackExpansions; |
4176 | 34.2k | } |
4177 | | |
4178 | 34.2k | return false; |
4179 | 34.2k | } |
4180 | | |
4181 | | QualType ASTContext::getFunctionTypeInternal( |
4182 | | QualType ResultTy, ArrayRef<QualType> ArgArray, |
4183 | 28.0M | const FunctionProtoType::ExtProtoInfo &EPI, bool OnlyWantCanonical) const { |
4184 | 28.0M | size_t NumArgs = ArgArray.size(); |
4185 | | |
4186 | | // Unique functions, to guarantee there is only one function of a particular |
4187 | | // structure. |
4188 | 28.0M | llvm::FoldingSetNodeID ID; |
4189 | 28.0M | FunctionProtoType::Profile(ID, ResultTy, ArgArray.begin(), NumArgs, EPI, |
4190 | 28.0M | *this, true); |
4191 | | |
4192 | 28.0M | QualType Canonical; |
4193 | 28.0M | bool Unique = false; |
4194 | | |
4195 | 28.0M | void *InsertPos = nullptr; |
4196 | 28.0M | if (FunctionProtoType *FPT = |
4197 | 14.8M | FunctionProtoTypes.FindNodeOrInsertPos(ID, InsertPos)) { |
4198 | 14.8M | QualType Existing = QualType(FPT, 0); |
4199 | | |
4200 | | // If we find a pre-existing equivalent FunctionProtoType, we can just reuse |
4201 | | // it so long as our exception specification doesn't contain a dependent |
4202 | | // noexcept expression, or we're just looking for a canonical type. |
4203 | | // Otherwise, we're going to need to create a type |
4204 | | // sugar node to hold the concrete expression. |
4205 | 14.8M | if (OnlyWantCanonical || !isComputedNoexcept(EPI.ExceptionSpec.Type)13.1M || |
4206 | 1.45k | EPI.ExceptionSpec.NoexceptExpr == FPT->getNoexceptExpr()) |
4207 | 14.8M | return Existing; |
4208 | | |
4209 | | // We need a new type sugar node for this one, to hold the new noexcept |
4210 | | // expression. We do no canonicalization here, but that's OK since we don't |
4211 | | // expect to see the same noexcept expression much more than once. |
4212 | 1.03k | Canonical = getCanonicalType(Existing); |
4213 | 1.03k | Unique = true; |
4214 | 1.03k | } |
4215 | | |
4216 | 13.2M | bool NoexceptInType = getLangOpts().CPlusPlus17; |
4217 | 13.2M | bool IsCanonicalExceptionSpec = |
4218 | 13.2M | isCanonicalExceptionSpecification(EPI.ExceptionSpec, NoexceptInType); |
4219 | | |
4220 | | // Determine whether the type being created is already canonical or not. |
4221 | 13.2M | bool isCanonical = !Unique && IsCanonicalExceptionSpec13.2M && |
4222 | 11.5M | isCanonicalResultType(ResultTy) && !EPI.HasTrailingReturn7.79M ; |
4223 | 28.5M | for (unsigned i = 0; i != NumArgs && isCanonical21.3M ; ++i15.3M ) |
4224 | 15.3M | if (!ArgArray[i].isCanonicalAsParam()) |
4225 | 1.69M | isCanonical = false; |
4226 | | |
4227 | 13.2M | if (OnlyWantCanonical) |
4228 | 13.2M | assert(isCanonical && |
4229 | 13.2M | "given non-canonical parameters constructing canonical type"); |
4230 | | |
4231 | | // If this type isn't canonical, get the canonical version of it if we don't |
4232 | | // already have it. The exception spec is only partially part of the |
4233 | | // canonical type, and only in C++17 onwards. |
4234 | 13.2M | if (!isCanonical && Canonical.isNull()7.15M ) { |
4235 | 7.15M | SmallVector<QualType, 16> CanonicalArgs; |
4236 | 7.15M | CanonicalArgs.reserve(NumArgs); |
4237 | 22.0M | for (unsigned i = 0; i != NumArgs; ++i14.8M ) |
4238 | 14.8M | CanonicalArgs.push_back(getCanonicalParamType(ArgArray[i])); |
4239 | | |
4240 | 7.15M | llvm::SmallVector<QualType, 8> ExceptionTypeStorage; |
4241 | 7.15M | FunctionProtoType::ExtProtoInfo CanonicalEPI = EPI; |
4242 | 7.15M | CanonicalEPI.HasTrailingReturn = false; |
4243 | | |
4244 | 7.15M | if (IsCanonicalExceptionSpec) { |
4245 | | // Exception spec is already OK. |
4246 | 1.66M | } else if (NoexceptInType) { |
4247 | 34.0k | switch (EPI.ExceptionSpec.Type) { |
4248 | 33.2k | case EST_Unparsed: 740 case EST_Unevaluated: 32.5k case EST_Uninstantiated: |
4249 | | // We don't know yet. It shouldn't matter what we pick here; no-one |
4250 | | // should ever look at this. |
4251 | 33.2k | LLVM_FALLTHROUGH; |
4252 | 33.5k | case EST_None: 33.2k case EST_MSAny: 33.2k case EST_NoexceptFalse: |
4253 | 33.5k | CanonicalEPI.ExceptionSpec.Type = EST_None; |
4254 | 33.5k | break; |
4255 | | |
4256 | | // A dynamic exception specification is almost always "not noexcept", |
4257 | | // with the exception that a pack expansion might expand to no types. |
4258 | 38 | case EST_Dynamic: { |
4259 | 38 | bool AnyPacks = false; |
4260 | 51 | for (QualType ET : EPI.ExceptionSpec.Exceptions) { |
4261 | 51 | if (ET->getAs<PackExpansionType>()) |
4262 | 4 | AnyPacks = true; |
4263 | 51 | ExceptionTypeStorage.push_back(getCanonicalType(ET)); |
4264 | 51 | } |
4265 | 38 | if (!AnyPacks) |
4266 | 34 | CanonicalEPI.ExceptionSpec.Type = EST_None; |
4267 | 4 | else { |
4268 | 4 | CanonicalEPI.ExceptionSpec.Type = EST_Dynamic; |
4269 | 4 | CanonicalEPI.ExceptionSpec.Exceptions = ExceptionTypeStorage; |
4270 | 4 | } |
4271 | 38 | break; |
4272 | 33.2k | } |
4273 | | |
4274 | 184 | case EST_DynamicNone: |
4275 | 184 | case EST_BasicNoexcept: |
4276 | 338 | case EST_NoexceptTrue: |
4277 | 494 | case EST_NoThrow: |
4278 | 494 | CanonicalEPI.ExceptionSpec.Type = EST_BasicNoexcept; |
4279 | 494 | break; |
4280 | | |
4281 | 0 | case EST_DependentNoexcept: |
4282 | 0 | llvm_unreachable("dependent noexcept is already canonical"); |
4283 | 1.63M | } |
4284 | 1.63M | } else { |
4285 | 1.63M | CanonicalEPI.ExceptionSpec = FunctionProtoType::ExceptionSpecInfo(); |
4286 | 1.63M | } |
4287 | | |
4288 | | // Adjust the canonical function result type. |
4289 | 7.15M | CanQualType CanResultTy = getCanonicalFunctionResultType(ResultTy); |
4290 | 7.15M | Canonical = |
4291 | 7.15M | getFunctionTypeInternal(CanResultTy, CanonicalArgs, CanonicalEPI, true); |
4292 | | |
4293 | | // Get the new insert position for the node we care about. |
4294 | 7.15M | FunctionProtoType *NewIP = |
4295 | 7.15M | FunctionProtoTypes.FindNodeOrInsertPos(ID, InsertPos); |
4296 | 7.15M | assert(!NewIP && "Shouldn't be in the map!"); (void)NewIP; |
4297 | 7.15M | } |
4298 | | |
4299 | | // Compute the needed size to hold this FunctionProtoType and the |
4300 | | // various trailing objects. |
4301 | 13.2M | auto ESH = FunctionProtoType::getExceptionSpecSize( |
4302 | 13.2M | EPI.ExceptionSpec.Type, EPI.ExceptionSpec.Exceptions.size()); |
4303 | 13.2M | size_t Size = FunctionProtoType::totalSizeToAlloc< |
4304 | 13.2M | QualType, SourceLocation, FunctionType::FunctionTypeExtraBitfields, |
4305 | 13.2M | FunctionType::ExceptionType, Expr *, FunctionDecl *, |
4306 | 13.2M | FunctionProtoType::ExtParameterInfo, Qualifiers>( |
4307 | 13.2M | NumArgs, EPI.Variadic, |
4308 | 13.2M | FunctionProtoType::hasExtraBitfields(EPI.ExceptionSpec.Type), |
4309 | 13.2M | ESH.NumExceptionType, ESH.NumExprPtr, ESH.NumFunctionDeclPtr, |
4310 | 13.2M | EPI.ExtParameterInfos ? NumArgs15.3k : 0, |
4311 | 13.2M | EPI.TypeQuals.hasNonFastQualifiers() ? 1454 : 0); |
4312 | | |
4313 | 13.2M | auto *FTP = (FunctionProtoType *)Allocate(Size, TypeAlignment); |
4314 | 13.2M | FunctionProtoType::ExtProtoInfo newEPI = EPI; |
4315 | 13.2M | new (FTP) FunctionProtoType(ResultTy, ArgArray, Canonical, newEPI); |
4316 | 13.2M | Types.push_back(FTP); |
4317 | 13.2M | if (!Unique) |
4318 | 13.2M | FunctionProtoTypes.InsertNode(FTP, InsertPos); |
4319 | 13.2M | return QualType(FTP, 0); |
4320 | 13.2M | } |
4321 | | |
4322 | 169 | QualType ASTContext::getPipeType(QualType T, bool ReadOnly) const { |
4323 | 169 | llvm::FoldingSetNodeID ID; |
4324 | 169 | PipeType::Profile(ID, T, ReadOnly); |
4325 | | |
4326 | 169 | void *InsertPos = nullptr; |
4327 | 169 | if (PipeType *PT = PipeTypes.FindNodeOrInsertPos(ID, InsertPos)) |
4328 | 63 | return QualType(PT, 0); |
4329 | | |
4330 | | // If the pipe element type isn't canonical, this won't be a canonical type |
4331 | | // either, so fill in the canonical type field. |
4332 | 106 | QualType Canonical; |
4333 | 106 | if (!T.isCanonical()) { |
4334 | 18 | Canonical = getPipeType(getCanonicalType(T), ReadOnly); |
4335 | | |
4336 | | // Get the new insert position for the node we care about. |
4337 | 18 | PipeType *NewIP = PipeTypes.FindNodeOrInsertPos(ID, InsertPos); |
4338 | 18 | assert(!NewIP && "Shouldn't be in the map!"); |
4339 | 18 | (void)NewIP; |
4340 | 18 | } |
4341 | 106 | auto *New = new (*this, TypeAlignment) PipeType(T, Canonical, ReadOnly); |
4342 | 106 | Types.push_back(New); |
4343 | 106 | PipeTypes.InsertNode(New, InsertPos); |
4344 | 106 | return QualType(New, 0); |
4345 | 106 | } |
4346 | | |
4347 | 4.05M | QualType ASTContext::adjustStringLiteralBaseType(QualType Ty) const { |
4348 | | // OpenCL v1.1 s6.5.3: a string literal is in the constant address space. |
4349 | 555 | return LangOpts.OpenCL ? getAddrSpaceQualType(Ty, LangAS::opencl_constant) |
4350 | 4.05M | : Ty; |
4351 | 4.05M | } |
4352 | | |
4353 | 117 | QualType ASTContext::getReadPipeType(QualType T) const { |
4354 | 117 | return getPipeType(T, true); |
4355 | 117 | } |
4356 | | |
4357 | 26 | QualType ASTContext::getWritePipeType(QualType T) const { |
4358 | 26 | return getPipeType(T, false); |
4359 | 26 | } |
4360 | | |
4361 | 919 | QualType ASTContext::getExtIntType(bool IsUnsigned, unsigned NumBits) const { |
4362 | 919 | llvm::FoldingSetNodeID ID; |
4363 | 919 | ExtIntType::Profile(ID, IsUnsigned, NumBits); |
4364 | | |
4365 | 919 | void *InsertPos = nullptr; |
4366 | 919 | if (ExtIntType *EIT = ExtIntTypes.FindNodeOrInsertPos(ID, InsertPos)) |
4367 | 469 | return QualType(EIT, 0); |
4368 | | |
4369 | 450 | auto *New = new (*this, TypeAlignment) ExtIntType(IsUnsigned, NumBits); |
4370 | 450 | ExtIntTypes.InsertNode(New, InsertPos); |
4371 | 450 | Types.push_back(New); |
4372 | 450 | return QualType(New, 0); |
4373 | 450 | } |
4374 | | |
4375 | | QualType ASTContext::getDependentExtIntType(bool IsUnsigned, |
4376 | 4 | Expr *NumBitsExpr) const { |
4377 | 4 | assert(NumBitsExpr->isInstantiationDependent() && "Only good for dependent"); |
4378 | 4 | llvm::FoldingSetNodeID ID; |
4379 | 4 | DependentExtIntType::Profile(ID, *this, IsUnsigned, NumBitsExpr); |
4380 | | |
4381 | 4 | void *InsertPos = nullptr; |
4382 | 4 | if (DependentExtIntType *Existing = |
4383 | 1 | DependentExtIntTypes.FindNodeOrInsertPos(ID, InsertPos)) |
4384 | 1 | return QualType(Existing, 0); |
4385 | | |
4386 | 3 | auto *New = new (*this, TypeAlignment) |
4387 | 3 | DependentExtIntType(*this, IsUnsigned, NumBitsExpr); |
4388 | 3 | DependentExtIntTypes.InsertNode(New, InsertPos); |
4389 | | |
4390 | 3 | Types.push_back(New); |
4391 | 3 | return QualType(New, 0); |
4392 | 3 | } |
4393 | | |
4394 | | #ifndef NDEBUG |
4395 | 3.21M | static bool NeedsInjectedClassNameType(const RecordDecl *D) { |
4396 | 3.21M | if (!isa<CXXRecordDecl>(D)) return false650k ; |
4397 | 2.56M | const auto *RD = cast<CXXRecordDecl>(D); |
4398 | 2.56M | if (isa<ClassTemplatePartialSpecializationDecl>(RD)) |
4399 | 153k | return true; |
4400 | 2.41M | if (RD->getDescribedClassTemplate() && |
4401 | 505k | !isa<ClassTemplateSpecializationDecl>(RD)) |
4402 | 505k | return true; |
4403 | 1.90M | return false; |
4404 | 1.90M | } |
4405 | | #endif |
4406 | | |
4407 | | /// getInjectedClassNameType - Return the unique reference to the |
4408 | | /// injected class name type for the specified templated declaration. |
4409 | | QualType ASTContext::getInjectedClassNameType(CXXRecordDecl *Decl, |
4410 | 658k | QualType TST) const { |
4411 | 658k | assert(NeedsInjectedClassNameType(Decl)); |
4412 | 658k | if (Decl->TypeForDecl) { |
4413 | 0 | assert(isa<InjectedClassNameType>(Decl->TypeForDecl)); |
4414 | 658k | } else if (CXXRecordDecl *PrevDecl = Decl->getPreviousDecl()) { |
4415 | 115k | assert(PrevDecl->TypeForDecl && "previous declaration has no type"); |
4416 | 115k | Decl->TypeForDecl = PrevDecl->TypeForDecl; |
4417 | 115k | assert(isa<InjectedClassNameType>(Decl->TypeForDecl)); |
4418 | 542k | } else { |
4419 | 542k | Type *newType = |
4420 | 542k | new (*this, TypeAlignment) InjectedClassNameType(Decl, TST); |
4421 | 542k | Decl->TypeForDecl = newType; |
4422 | 542k | Types.push_back(newType); |
4423 | 542k | } |
4424 | 658k | return QualType(Decl->TypeForDecl, 0); |
4425 | 658k | } |
4426 | | |
4427 | | /// getTypeDeclType - Return the unique reference to the type for the |
4428 | | /// specified type declaration. |
4429 | 4.48M | QualType ASTContext::getTypeDeclTypeSlow(const TypeDecl *Decl) const { |
4430 | 4.48M | assert(Decl && "Passed null for Decl param"); |
4431 | 4.48M | assert(!Decl->TypeForDecl && "TypeForDecl present in slow case"); |
4432 | | |
4433 | 4.48M | if (const auto *Typedef = dyn_cast<TypedefNameDecl>(Decl)) |
4434 | 1.31M | return getTypedefType(Typedef); |
4435 | | |
4436 | 3.16M | assert(!isa<TemplateTypeParmDecl>(Decl) && |
4437 | 3.16M | "Template type parameter types are always available."); |
4438 | | |
4439 | 3.16M | if (const auto *Record = dyn_cast<RecordDecl>(Decl)) { |
4440 | 2.55M | assert(Record->isFirstDecl() && "struct/union has previous declaration"); |
4441 | 2.55M | assert(!NeedsInjectedClassNameType(Record)); |
4442 | 2.55M | return getRecordType(Record); |
4443 | 607k | } else if (const auto *Enum = dyn_cast<EnumDecl>(Decl)) { |
4444 | 606k | assert(Enum->isFirstDecl() && "enum has previous declaration"); |
4445 | 606k | return getEnumType(Enum); |
4446 | 998 | } else if (const auto *Using = dyn_cast<UnresolvedUsingTypenameDecl>(Decl)) { |
4447 | 998 | Type *newType = new (*this, TypeAlignment) UnresolvedUsingType(Using); |
4448 | 998 | Decl->TypeForDecl = newType; |
4449 | 998 | Types.push_back(newType); |
4450 | 998 | } else |
4451 | 0 | llvm_unreachable("TypeDecl without a type?"); |
4452 | | |
4453 | 998 | return QualType(Decl->TypeForDecl, 0); |
4454 | 3.16M | } |
4455 | | |
4456 | | /// getTypedefType - Return the unique reference to the type for the |
4457 | | /// specified typedef name decl. |
4458 | | QualType ASTContext::getTypedefType(const TypedefNameDecl *Decl, |
4459 | 3.11M | QualType Underlying) const { |
4460 | 3.11M | if (Decl->TypeForDecl) return QualType(Decl->TypeForDecl, 0)202k ; |
4461 | | |
4462 | 2.90M | if (Underlying.isNull()) |
4463 | 2.70M | Underlying = Decl->getUnderlyingType(); |
4464 | 2.90M | QualType Canonical = getCanonicalType(Underlying); |
4465 | 2.90M | auto *newType = new (*this, TypeAlignment) |
4466 | 2.90M | TypedefType(Type::Typedef, Decl, Underlying, Canonical); |
4467 | 2.90M | Decl->TypeForDecl = newType; |
4468 | 2.90M | Types.push_back(newType); |
4469 | 2.90M | return QualType(newType, 0); |
4470 | 2.90M | } |
4471 | | |
4472 | 10.3M | QualType ASTContext::getRecordType(const RecordDecl *Decl) const { |
4473 | 10.3M | if (Decl->TypeForDecl) return QualType(Decl->TypeForDecl, 0)6.76M ; |
4474 | | |
4475 | 3.57M | if (const RecordDecl *PrevDecl = Decl->getPreviousDecl()) |
4476 | 739 | if (PrevDecl->TypeForDecl) |
4477 | 739 | return QualType(Decl->TypeForDecl = PrevDecl->TypeForDecl, 0); |
4478 | | |
4479 | 3.56M | auto *newType = new (*this, TypeAlignment) RecordType(Decl); |
4480 | 3.56M | Decl->TypeForDecl = newType; |
4481 | 3.56M | Types.push_back(newType); |
4482 | 3.56M | return QualType(newType, 0); |
4483 | 3.56M | } |
4484 | | |
4485 | 1.71M | QualType ASTContext::getEnumType(const EnumDecl *Decl) const { |
4486 | 1.71M | if (Decl->TypeForDecl) return QualType(Decl->TypeForDecl, 0)1.10M ; |
4487 | | |
4488 | 610k | if (const EnumDecl *PrevDecl = Decl->getPreviousDecl()) |
4489 | 52 | if (PrevDecl->TypeForDecl) |
4490 | 52 | return QualType(Decl->TypeForDecl = PrevDecl->TypeForDecl, 0); |
4491 | | |
4492 | 610k | auto *newType = new (*this, TypeAlignment) EnumType(Decl); |
4493 | 610k | Decl->TypeForDecl = newType; |
4494 | 610k | Types.push_back(newType); |
4495 | 610k | return QualType(newType, 0); |
4496 | 610k | } |
4497 | | |
4498 | | QualType ASTContext::getAttributedType(attr::Kind attrKind, |
4499 | | QualType modifiedType, |
4500 | 4.29M | QualType equivalentType) { |
4501 | 4.29M | llvm::FoldingSetNodeID id; |
4502 | 4.29M | AttributedType::Profile(id, attrKind, modifiedType, equivalentType); |
4503 | | |
4504 | 4.29M | void *insertPos = nullptr; |
4505 | 4.29M | AttributedType *type = AttributedTypes.FindNodeOrInsertPos(id, insertPos); |
4506 | 4.29M | if (type) return QualType(type, 0)3.78M ; |
4507 | | |
4508 | 509k | QualType canon = getCanonicalType(equivalentType); |
4509 | 509k | type = new (*this, TypeAlignment) |
4510 | 509k | AttributedType(canon, attrKind, modifiedType, equivalentType); |
4511 | | |
4512 | 509k | Types.push_back(type); |
4513 | 509k | AttributedTypes.InsertNode(type, insertPos); |
4514 | | |
4515 | 509k | return QualType(type, 0); |
4516 | 509k | } |
4517 | | |
4518 | | /// Retrieve a substitution-result type. |
4519 | | QualType |
4520 | | ASTContext::getSubstTemplateTypeParmType(const TemplateTypeParmType *Parm, |
4521 | 4.53M | QualType Replacement) const { |
4522 | 4.53M | assert(Replacement.isCanonical() |
4523 | 4.53M | && "replacement types must always be canonical"); |
4524 | | |
4525 | 4.53M | llvm::FoldingSetNodeID ID; |
4526 | 4.53M | SubstTemplateTypeParmType::Profile(ID, Parm, Replacement); |
4527 | 4.53M | void *InsertPos = nullptr; |
4528 | 4.53M | SubstTemplateTypeParmType *SubstParm |
4529 | 4.53M | = SubstTemplateTypeParmTypes.FindNodeOrInsertPos(ID, InsertPos); |
4530 | | |
4531 | 4.53M | if (!SubstParm) { |
4532 | 1.63M | SubstParm = new (*this, TypeAlignment) |
4533 | 1.63M | SubstTemplateTypeParmType(Parm, Replacement); |
4534 | 1.63M | Types.push_back(SubstParm); |
4535 | 1.63M | SubstTemplateTypeParmTypes.InsertNode(SubstParm, InsertPos); |
4536 | 1.63M | } |
4537 | | |
4538 | 4.53M | return QualType(SubstParm, 0); |
4539 | 4.53M | } |
4540 | | |
4541 | | /// Retrieve a |
4542 | | QualType ASTContext::getSubstTemplateTypeParmPackType( |
4543 | | const TemplateTypeParmType *Parm, |
4544 | 68.1k | const TemplateArgument &ArgPack) { |
4545 | 68.1k | #ifndef NDEBUG |
4546 | 166k | for (const auto &P : ArgPack.pack_elements()) { |
4547 | 166k | assert(P.getKind() == TemplateArgument::Type &&"Pack contains a non-type"); |
4548 | 166k | assert(P.getAsType().isCanonical() && "Pack contains non-canonical type"); |
4549 | 166k | } |
4550 | 68.1k | #endif |
4551 | | |
4552 | 68.1k | llvm::FoldingSetNodeID ID; |
4553 | 68.1k | SubstTemplateTypeParmPackType::Profile(ID, Parm, ArgPack); |
4554 | 68.1k | void *InsertPos = nullptr; |
4555 | 68.1k | if (SubstTemplateTypeParmPackType *SubstParm |
4556 | 32.3k | = SubstTemplateTypeParmPackTypes.FindNodeOrInsertPos(ID, InsertPos)) |
4557 | 32.3k | return QualType(SubstParm, 0); |
4558 | | |
4559 | 35.8k | QualType Canon; |
4560 | 35.8k | if (!Parm->isCanonicalUnqualified()) { |
4561 | 19.0k | Canon = getCanonicalType(QualType(Parm, 0)); |
4562 | 19.0k | Canon = getSubstTemplateTypeParmPackType(cast<TemplateTypeParmType>(Canon), |
4563 | 19.0k | ArgPack); |
4564 | 19.0k | SubstTemplateTypeParmPackTypes.FindNodeOrInsertPos(ID, InsertPos); |
4565 | 19.0k | } |
4566 | | |
4567 | 35.8k | auto *SubstParm |
4568 | 35.8k | = new (*this, TypeAlignment) SubstTemplateTypeParmPackType(Parm, Canon, |
4569 | 35.8k | ArgPack); |
4570 | 35.8k | Types.push_back(SubstParm); |
4571 | 35.8k | SubstTemplateTypeParmPackTypes.InsertNode(SubstParm, InsertPos); |
4572 | 35.8k | return QualType(SubstParm, 0); |
4573 | 35.8k | } |
4574 | | |
4575 | | /// Retrieve the template type parameter type for a template |
4576 | | /// parameter or parameter pack with the given depth, index, and (optionally) |
4577 | | /// name. |
4578 | | QualType ASTContext::getTemplateTypeParmType(unsigned Depth, unsigned Index, |
4579 | | bool ParameterPack, |
4580 | 8.85M | TemplateTypeParmDecl *TTPDecl) const { |
4581 | 8.85M | llvm::FoldingSetNodeID ID; |
4582 | 8.85M | TemplateTypeParmType::Profile(ID, Depth, Index, ParameterPack, TTPDecl); |
4583 | 8.85M | void *InsertPos = nullptr; |
4584 | 8.85M | TemplateTypeParmType *TypeParm |
4585 | 8.85M | = TemplateTypeParmTypes.FindNodeOrInsertPos(ID, InsertPos); |
4586 | | |
4587 | 8.85M | if (TypeParm) |
4588 | 5.09M | return QualType(TypeParm, 0); |
4589 | | |
4590 | 3.75M | if (TTPDecl) { |
4591 | 3.70M | QualType Canon = getTemplateTypeParmType(Depth, Index, ParameterPack); |
4592 | 3.70M | TypeParm = new (*this, TypeAlignment) TemplateTypeParmType(TTPDecl, Canon); |
4593 | | |
4594 | 3.70M | TemplateTypeParmType *TypeCheck |
4595 | 3.70M | = TemplateTypeParmTypes.FindNodeOrInsertPos(ID, InsertPos); |
4596 | 3.70M | assert(!TypeCheck && "Template type parameter canonical type broken"); |
4597 | 3.70M | (void)TypeCheck; |
4598 | 3.70M | } else |
4599 | 52.4k | TypeParm = new (*this, TypeAlignment) |
4600 | 52.4k | TemplateTypeParmType(Depth, Index, ParameterPack); |
4601 | | |
4602 | 3.75M | Types.push_back(TypeParm); |
4603 | 3.75M | TemplateTypeParmTypes.InsertNode(TypeParm, InsertPos); |
4604 | | |
4605 | 3.75M | return QualType(TypeParm, 0); |
4606 | 3.75M | } |
4607 | | |
4608 | | TypeSourceInfo * |
4609 | | ASTContext::getTemplateSpecializationTypeInfo(TemplateName Name, |
4610 | | SourceLocation NameLoc, |
4611 | | const TemplateArgumentListInfo &Args, |
4612 | 209k | QualType Underlying) const { |
4613 | 209k | assert(!Name.getAsDependentTemplateName() && |
4614 | 209k | "No dependent template names here!"); |
4615 | 209k | QualType TST = getTemplateSpecializationType(Name, Args, Underlying); |
4616 | | |
4617 | 209k | TypeSourceInfo *DI = CreateTypeSourceInfo(TST); |
4618 | 209k | TemplateSpecializationTypeLoc TL = |
4619 | 209k | DI->getTypeLoc().castAs<TemplateSpecializationTypeLoc>(); |
4620 | 209k | TL.setTemplateKeywordLoc(SourceLocation()); |
4621 | 209k | TL.setTemplateNameLoc(NameLoc); |
4622 | 209k | TL.setLAngleLoc(Args.getLAngleLoc()); |
4623 | 209k | TL.setRAngleLoc(Args.getRAngleLoc()); |
4624 | 628k | for (unsigned i = 0, e = TL.getNumArgs(); i != e; ++i419k ) |
4625 | 419k | TL.setArgLocInfo(i, Args[i].getLocInfo()); |
4626 | 209k | return DI; |
4627 | 209k | } |
4628 | | |
4629 | | QualType |
4630 | | ASTContext::getTemplateSpecializationType(TemplateName Template, |
4631 | | const TemplateArgumentListInfo &Args, |
4632 | 6.12M | QualType Underlying) const { |
4633 | 6.12M | assert(!Template.getAsDependentTemplateName() && |
4634 | 6.12M | "No dependent template names here!"); |
4635 | | |
4636 | 6.12M | SmallVector<TemplateArgument, 4> ArgVec; |
4637 | 6.12M | ArgVec.reserve(Args.size()); |
4638 | 6.12M | for (const TemplateArgumentLoc &Arg : Args.arguments()) |
4639 | 9.58M | ArgVec.push_back(Arg.getArgument()); |
4640 | | |
4641 | 6.12M | return getTemplateSpecializationType(Template, ArgVec, Underlying); |
4642 | 6.12M | } |
4643 | | |
4644 | | #ifndef NDEBUG |
4645 | 0 | static bool hasAnyPackExpansions(ArrayRef<TemplateArgument> Args) { |
4646 | 0 | for (const TemplateArgument &Arg : Args) |
4647 | 0 | if (Arg.isPackExpansion()) |
4648 | 0 | return true; |
4649 | |
|
4650 | 0 | return true; |
4651 | 0 | } |
4652 | | #endif |
4653 | | |
4654 | | QualType |
4655 | | ASTContext::getTemplateSpecializationType(TemplateName Template, |
4656 | | ArrayRef<TemplateArgument> Args, |
4657 | 8.46M | QualType Underlying) const { |
4658 | 8.46M | assert(!Template.getAsDependentTemplateName() && |
4659 | 8.46M | "No dependent template names here!"); |
4660 | | // Look through qualified template names. |
4661 | 8.46M | if (QualifiedTemplateName *QTN = Template.getAsQualifiedTemplateName()) |
4662 | 84.9k | Template = TemplateName(QTN->getTemplateDecl()); |
4663 | | |
4664 | 8.46M | bool IsTypeAlias = |
4665 | 8.46M | Template.getAsTemplateDecl() && |
4666 | 8.46M | isa<TypeAliasTemplateDecl>(Template.getAsTemplateDecl()); |
4667 | 8.46M | QualType CanonType; |
4668 | 8.46M | if (!Underlying.isNull()) |
4669 | 7.83M | CanonType = getCanonicalType(Underlying); |
4670 | 628k | else { |
4671 | | // We can get here with an alias template when the specialization contains |
4672 | | // a pack expansion that does not match up with a parameter pack. |
4673 | 628k | assert((!IsTypeAlias || hasAnyPackExpansions(Args)) && |
4674 | 628k | "Caller must compute aliased type"); |
4675 | 628k | IsTypeAlias = false; |
4676 | 628k | CanonType = getCanonicalTemplateSpecializationType(Template, Args); |
4677 | 628k | } |
4678 | | |
4679 | | // Allocate the (non-canonical) template specialization type, but don't |
4680 | | // try to unique it: these types typically have location information that |
4681 | | // we don't unique and don't want to lose. |
4682 | 8.46M | void *Mem = Allocate(sizeof(TemplateSpecializationType) + |
4683 | 8.46M | sizeof(TemplateArgument) * Args.size() + |
4684 | 7.71M | (IsTypeAlias? sizeof(QualType)742k : 0), |
4685 | 8.46M | TypeAlignment); |
4686 | 8.46M | auto *Spec |
4687 | 8.46M | = new (Mem) TemplateSpecializationType(Template, Args, CanonType, |
4688 | 7.71M | IsTypeAlias ? Underlying742k : QualType()); |
4689 | | |
4690 | 8.46M | Types.push_back(Spec); |
4691 | 8.46M | return QualType(Spec, 0); |
4692 | 8.46M | } |
4693 | | |
4694 | | QualType ASTContext::getCanonicalTemplateSpecializationType( |
4695 | 4.08M | TemplateName Template, ArrayRef<TemplateArgument> Args) const { |
4696 | 4.08M | assert(!Template.getAsDependentTemplateName() && |
4697 | 4.08M | "No dependent template names here!"); |
4698 | | |
4699 | | // Look through qualified template names. |
4700 | 4.08M | if (QualifiedTemplateName *QTN = Template.getAsQualifiedTemplateName()) |
4701 | 38.4k | Template = TemplateName(QTN->getTemplateDecl()); |
4702 | | |
4703 | | // Build the canonical template specialization type. |
4704 | 4.08M | TemplateName CanonTemplate = getCanonicalTemplateName(Template); |
4705 | 4.08M | SmallVector<TemplateArgument, 4> CanonArgs; |
4706 | 4.08M | unsigned NumArgs = Args.size(); |
4707 | 4.08M | CanonArgs.reserve(NumArgs); |
4708 | 4.08M | for (const TemplateArgument &Arg : Args) |
4709 | 7.10M | CanonArgs.push_back(getCanonicalTemplateArgument(Arg)); |
4710 | | |
4711 | | // Determine whether this canonical template specialization type already |
4712 | | // exists. |
4713 | 4.08M | llvm::FoldingSetNodeID ID; |
4714 | 4.08M | TemplateSpecializationType::Profile(ID, CanonTemplate, |
4715 | 4.08M | CanonArgs, *this); |
4716 | | |
4717 | 4.08M | void *InsertPos = nullptr; |
4718 | 4.08M | TemplateSpecializationType *Spec |
4719 | 4.08M | = TemplateSpecializationTypes.FindNodeOrInsertPos(ID, InsertPos); |
4720 | | |
4721 | 4.08M | if (!Spec) { |
4722 | | // Allocate a new canonical template specialization type. |
4723 | 1.59M | void *Mem = Allocate((sizeof(TemplateSpecializationType) + |
4724 | 1.59M | sizeof(TemplateArgument) * NumArgs), |
4725 | 1.59M | TypeAlignment); |
4726 | 1.59M | Spec = new (Mem) TemplateSpecializationType(CanonTemplate, |
4727 | 1.59M | CanonArgs, |
4728 | 1.59M | QualType(), QualType()); |
4729 | 1.59M | Types.push_back(Spec); |
4730 | 1.59M | TemplateSpecializationTypes.InsertNode(Spec, InsertPos); |
4731 | 1.59M | } |
4732 | | |
4733 | 4.08M | assert(Spec->isDependentType() && |
4734 | 4.08M | "Non-dependent template-id type must have a canonical type"); |
4735 | 4.08M | return QualType(Spec, 0); |
4736 | 4.08M | } |
4737 | | |
4738 | | QualType ASTContext::getElaboratedType(ElaboratedTypeKeyword Keyword, |
4739 | | NestedNameSpecifier *NNS, |
4740 | | QualType NamedType, |
4741 | 2.37M | TagDecl *OwnedTagDecl) const { |
4742 | 2.37M | llvm::FoldingSetNodeID ID; |
4743 | 2.37M | ElaboratedType::Profile(ID, Keyword, NNS, NamedType, OwnedTagDecl); |
4744 | | |
4745 | 2.37M | void *InsertPos = nullptr; |
4746 | 2.37M | ElaboratedType *T = ElaboratedTypes.FindNodeOrInsertPos(ID, InsertPos); |
4747 | 2.37M | if (T) |
4748 | 575k | return QualType(T, 0); |
4749 | | |
4750 | 1.79M | QualType Canon = NamedType; |
4751 | 1.79M | if (!Canon.isCanonical()) { |
4752 | 938k | Canon = getCanonicalType(NamedType); |
4753 | 938k | ElaboratedType *CheckT = ElaboratedTypes.FindNodeOrInsertPos(ID, InsertPos); |
4754 | 938k | assert(!CheckT && "Elaborated canonical type broken"); |
4755 | 938k | (void)CheckT; |
4756 | 938k | } |
4757 | | |
4758 | 1.79M | void *Mem = Allocate(ElaboratedType::totalSizeToAlloc<TagDecl *>(!!OwnedTagDecl), |
4759 | 1.79M | TypeAlignment); |
4760 | 1.79M | T = new (Mem) ElaboratedType(Keyword, NNS, NamedType, Canon, OwnedTagDecl); |
4761 | | |
4762 | 1.79M | Types.push_back(T); |
4763 | 1.79M | ElaboratedTypes.InsertNode(T, InsertPos); |
4764 | 1.79M | return QualType(T, 0); |
4765 | 1.79M | } |
4766 | | |
4767 | | QualType |
4768 | 376k | ASTContext::getParenType(QualType InnerType) const { |
4769 | 376k | llvm::FoldingSetNodeID ID; |
4770 | 376k | ParenType::Profile(ID, InnerType); |
4771 | | |
4772 | 376k | void *InsertPos = nullptr; |
4773 | 376k | ParenType *T = ParenTypes.FindNodeOrInsertPos(ID, InsertPos); |
4774 | 376k | if (T) |
4775 | 134k | return QualType(T, 0); |
4776 | | |
4777 | 242k | QualType Canon = InnerType; |
4778 | 242k | if (!Canon.isCanonical()) { |
4779 | 216k | Canon = getCanonicalType(InnerType); |
4780 | 216k | ParenType *CheckT = ParenTypes.FindNodeOrInsertPos(ID, InsertPos); |
4781 | 216k | assert(!CheckT && "Paren canonical type broken"); |
4782 | 216k | (void)CheckT; |
4783 | 216k | } |
4784 | | |
4785 | 242k | T = new (*this, TypeAlignment) ParenType(InnerType, Canon); |
4786 | 242k | Types.push_back(T); |
4787 | 242k | ParenTypes.InsertNode(T, InsertPos); |
4788 | 242k | return QualType(T, 0); |
4789 | 242k | } |
4790 | | |
4791 | | QualType |
4792 | | ASTContext::getMacroQualifiedType(QualType UnderlyingTy, |
4793 | 317k | const IdentifierInfo *MacroII) const { |
4794 | 317k | QualType Canon = UnderlyingTy; |
4795 | 317k | if (!Canon.isCanonical()) |
4796 | 317k | Canon = getCanonicalType(UnderlyingTy); |
4797 | | |
4798 | 317k | auto *newType = new (*this, TypeAlignment) |
4799 | 317k | MacroQualifiedType(UnderlyingTy, Canon, MacroII); |
4800 | 317k | Types.push_back(newType); |
4801 | 317k | return QualType(newType, 0); |
4802 | 317k | } |
4803 | | |
4804 | | QualType ASTContext::getDependentNameType(ElaboratedTypeKeyword Keyword, |
4805 | | NestedNameSpecifier *NNS, |
4806 | | const IdentifierInfo *Name, |
4807 | 2.26M | QualType Canon) const { |
4808 | 2.26M | if (Canon.isNull()) { |
4809 | 2.00M | NestedNameSpecifier *CanonNNS = getCanonicalNestedNameSpecifier(NNS); |
4810 | 2.00M | if (CanonNNS != NNS) |
4811 | 951k | Canon = getDependentNameType(Keyword, CanonNNS, Name); |
4812 | 2.00M | } |
4813 | | |
4814 | 2.26M | llvm::FoldingSetNodeID ID; |
4815 | 2.26M | DependentNameType::Profile(ID, Keyword, NNS, Name); |
4816 | | |
4817 | 2.26M | void *InsertPos = nullptr; |
4818 | 2.26M | DependentNameType *T |
4819 | 2.26M | = DependentNameTypes.FindNodeOrInsertPos(ID, InsertPos); |
4820 | 2.26M | if (T) |
4821 | 687k | return QualType(T, 0); |
4822 | | |
4823 | 1.57M | T = new (*this, TypeAlignment) DependentNameType(Keyword, NNS, Name, Canon); |
4824 | 1.57M | Types.push_back(T); |
4825 | 1.57M | DependentNameTypes.InsertNode(T, InsertPos); |
4826 | 1.57M | return QualType(T, 0); |
4827 | 1.57M | } |
4828 | | |
4829 | | QualType |
4830 | | ASTContext::getDependentTemplateSpecializationType( |
4831 | | ElaboratedTypeKeyword Keyword, |
4832 | | NestedNameSpecifier *NNS, |
4833 | | const IdentifierInfo *Name, |
4834 | 110k | const TemplateArgumentListInfo &Args) const { |
4835 | | // TODO: avoid this copy |
4836 | 110k | SmallVector<TemplateArgument, 16> ArgCopy; |
4837 | 233k | for (unsigned I = 0, E = Args.size(); I != E; ++I122k ) |
4838 | 122k | ArgCopy.push_back(Args[I].getArgument()); |
4839 | 110k | return getDependentTemplateSpecializationType(Keyword, NNS, Name, ArgCopy); |
4840 | 110k | } |
4841 | | |
4842 | | QualType |
4843 | | ASTContext::getDependentTemplateSpecializationType( |
4844 | | ElaboratedTypeKeyword Keyword, |
4845 | | NestedNameSpecifier *NNS, |
4846 | | const IdentifierInfo *Name, |
4847 | 280k | ArrayRef<TemplateArgument> Args) const { |
4848 | 280k | assert((!NNS || NNS->isDependent()) && |
4849 | 280k | "nested-name-specifier must be dependent"); |
4850 | | |
4851 | 280k | llvm::FoldingSetNodeID ID; |
4852 | 280k | DependentTemplateSpecializationType::Profile(ID, *this, Keyword, NNS, |
4853 | 280k | Name, Args); |
4854 | | |
4855 | 280k | void *InsertPos = nullptr; |
4856 | 280k | DependentTemplateSpecializationType *T |
4857 | 280k | = DependentTemplateSpecializationTypes.FindNodeOrInsertPos(ID, InsertPos); |
4858 | 280k | if (T) |
4859 | 59.5k | return QualType(T, 0); |
4860 | | |
4861 | 221k | NestedNameSpecifier *CanonNNS = getCanonicalNestedNameSpecifier(NNS); |
4862 | | |
4863 | 221k | ElaboratedTypeKeyword CanonKeyword = Keyword; |
4864 | 221k | if (Keyword == ETK_None) CanonKeyword = ETK_Typename2.62k ; |
4865 | | |
4866 | 221k | bool AnyNonCanonArgs = false; |
4867 | 221k | unsigned NumArgs = Args.size(); |
4868 | 221k | SmallVector<TemplateArgument, 16> CanonArgs(NumArgs); |
4869 | 468k | for (unsigned I = 0; I != NumArgs; ++I247k ) { |
4870 | 247k | CanonArgs[I] = getCanonicalTemplateArgument(Args[I]); |
4871 | 247k | if (!CanonArgs[I].structurallyEquals(Args[I])) |
4872 | 145k | AnyNonCanonArgs = true; |
4873 | 247k | } |
4874 | | |
4875 | 221k | QualType Canon; |
4876 | 221k | if (AnyNonCanonArgs || CanonNNS != NNS89.5k || CanonKeyword != Keyword81.7k ) { |
4877 | 139k | Canon = getDependentTemplateSpecializationType(CanonKeyword, CanonNNS, |
4878 | 139k | Name, |
4879 | 139k | CanonArgs); |
4880 | | |
4881 | | // Find the insert position again. |
4882 | 139k | DependentTemplateSpecializationTypes.FindNodeOrInsertPos(ID, InsertPos); |
4883 | 139k | } |
4884 | | |
4885 | 221k | void *Mem = Allocate((sizeof(DependentTemplateSpecializationType) + |
4886 | 221k | sizeof(TemplateArgument) * NumArgs), |
4887 | 221k | TypeAlignment); |
4888 | 221k | T = new (Mem) DependentTemplateSpecializationType(Keyword, NNS, |
4889 | 221k | Name, Args, Canon); |
4890 | 221k | Types.push_back(T); |
4891 | 221k | DependentTemplateSpecializationTypes.InsertNode(T, InsertPos); |
4892 | 221k | return QualType(T, 0); |
4893 | 221k | } |
4894 | | |
4895 | 672k | TemplateArgument ASTContext::getInjectedTemplateArg(NamedDecl *Param) { |
4896 | 672k | TemplateArgument Arg; |
4897 | 672k | if (const auto *TTP = dyn_cast<TemplateTypeParmDecl>(Param)) { |
4898 | 543k | QualType ArgType = getTypeDeclType(TTP); |
4899 | 543k | if (TTP->isParameterPack()) |
4900 | 17.9k | ArgType = getPackExpansionType(ArgType, None); |
4901 | | |
4902 | 543k | Arg = TemplateArgument(ArgType); |
4903 | 128k | } else if (auto *NTTP = dyn_cast<NonTypeTemplateParmDecl>(Param)) { |
4904 | 127k | QualType T = |
4905 | 127k | NTTP->getType().getNonPackExpansionType().getNonLValueExprType(*this); |
4906 | | // For class NTTPs, ensure we include the 'const' so the type matches that |
4907 | | // of a real template argument. |
4908 | | // FIXME: It would be more faithful to model this as something like an |
4909 | | // lvalue-to-rvalue conversion applied to a const-qualified lvalue. |
4910 | 127k | if (T->isRecordType()) |
4911 | 46 | T.addConst(); |
4912 | 127k | Expr *E = new (*this) DeclRefExpr( |
4913 | 127k | *this, NTTP, /*enclosing*/ false, T, |
4914 | 127k | Expr::getValueKindForType(NTTP->getType()), NTTP->getLocation()); |
4915 | | |
4916 | 127k | if (NTTP->isParameterPack()) |
4917 | 3.13k | E = new (*this) PackExpansionExpr(DependentTy, E, NTTP->getLocation(), |
4918 | 3.13k | None); |
4919 | 127k | Arg = TemplateArgument(E); |
4920 | 1.23k | } else { |
4921 | 1.23k | auto *TTP = cast<TemplateTemplateParmDecl>(Param); |
4922 | 1.23k | if (TTP->isParameterPack()) |
4923 | 74 | Arg = TemplateArgument(TemplateName(TTP), Optional<unsigned>()); |
4924 | 1.16k | else |
4925 | 1.16k | Arg = TemplateArgument(TemplateName(TTP)); |
4926 | 1.23k | } |
4927 | | |
4928 | 672k | if (Param->isTemplateParameterPack()) |
4929 | 21.2k | Arg = TemplateArgument::CreatePackCopy(*this, Arg); |
4930 | | |
4931 | 672k | return Arg; |
4932 | 672k | } |
4933 | | |
4934 | | void |
4935 | | ASTContext::getInjectedTemplateArgs(const TemplateParameterList *Params, |
4936 | 384k | SmallVectorImpl<TemplateArgument> &Args) { |
4937 | 384k | Args.reserve(Args.size() + Params->size()); |
4938 | | |
4939 | 384k | for (NamedDecl *Param : *Params) |
4940 | 671k | Args.push_back(getInjectedTemplateArg(Param)); |
4941 | 384k | } |
4942 | | |
4943 | | QualType ASTContext::getPackExpansionType(QualType Pattern, |
4944 | | Optional<unsigned> NumExpansions, |
4945 | 582k | bool ExpectPackInType) { |
4946 | 582k | assert((!ExpectPackInType || Pattern->containsUnexpandedParameterPack()) && |
4947 | 582k | "Pack expansions must expand one or more parameter packs"); |
4948 | | |
4949 | 582k | llvm::FoldingSetNodeID ID; |
4950 | 582k | PackExpansionType::Profile(ID, Pattern, NumExpansions); |
4951 | | |
4952 | 582k | void *InsertPos = nullptr; |
4953 | 582k | PackExpansionType *T = PackExpansionTypes.FindNodeOrInsertPos(ID, InsertPos); |
4954 | 582k | if (T) |
4955 | 340k | return QualType(T, 0); |
4956 | | |
4957 | 242k | QualType Canon; |
4958 | 242k | if (!Pattern.isCanonical()) { |
4959 | 218k | Canon = getPackExpansionType(getCanonicalType(Pattern), NumExpansions, |
4960 | 218k | /*ExpectPackInType=*/false); |
4961 | | |
4962 | | // Find the insert position again, in case we inserted an element into |
4963 | | // PackExpansionTypes and invalidated our insert position. |
4964 | 218k | PackExpansionTypes.FindNodeOrInsertPos(ID, InsertPos); |
4965 | 218k | } |
4966 | | |
4967 | 242k | T = new (*this, TypeAlignment) |
4968 | 242k | PackExpansionType(Pattern, Canon, NumExpansions); |
4969 | 242k | Types.push_back(T); |
4970 | 242k | PackExpansionTypes.InsertNode(T, InsertPos); |
4971 | 242k | return QualType(T, 0); |
4972 | 242k | } |
4973 | | |
4974 | | /// CmpProtocolNames - Comparison predicate for sorting protocols |
4975 | | /// alphabetically. |
4976 | | static int CmpProtocolNames(ObjCProtocolDecl *const *LHS, |
4977 | 2.15k | ObjCProtocolDecl *const *RHS) { |
4978 | 2.15k | return DeclarationName::compare((*LHS)->getDeclName(), (*RHS)->getDeclName()); |
4979 | 2.15k | } |
4980 | | |
4981 | 121k | static bool areSortedAndUniqued(ArrayRef<ObjCProtocolDecl *> Protocols) { |
4982 | 121k | if (Protocols.empty()) return true104k ; |
4983 | | |
4984 | 17.5k | if (Protocols[0]->getCanonicalDecl() != Protocols[0]) |
4985 | 101 | return false; |
4986 | | |
4987 | 18.2k | for (unsigned i = 1; 17.4k i != Protocols.size(); ++i853 ) |
4988 | 1.19k | if (CmpProtocolNames(&Protocols[i - 1], &Protocols[i]) >= 0 || |
4989 | 855 | Protocols[i]->getCanonicalDecl() != Protocols[i]) |
4990 | 340 | return false; |
4991 | 17.0k | return true; |
4992 | 17.4k | } |
4993 | | |
4994 | | static void |
4995 | 441 | SortAndUniqueProtocols(SmallVectorImpl<ObjCProtocolDecl *> &Protocols) { |
4996 | | // Sort protocols, keyed by name. |
4997 | 441 | llvm::array_pod_sort(Protocols.begin(), Protocols.end(), CmpProtocolNames); |
4998 | | |
4999 | | // Canonicalize. |
5000 | 441 | for (ObjCProtocolDecl *&P : Protocols) |
5001 | 1.09k | P = P->getCanonicalDecl(); |
5002 | | |
5003 | | // Remove duplicates. |
5004 | 441 | auto ProtocolsEnd = std::unique(Protocols.begin(), Protocols.end()); |
5005 | 441 | Protocols.erase(ProtocolsEnd, Protocols.end()); |
5006 | 441 | } |
5007 | | |
5008 | | QualType ASTContext::getObjCObjectType(QualType BaseType, |
5009 | | ObjCProtocolDecl * const *Protocols, |
5010 | 50.4k | unsigned NumProtocols) const { |
5011 | 50.4k | return getObjCObjectType(BaseType, {}, |
5012 | 50.4k | llvm::makeArrayRef(Protocols, NumProtocols), |
5013 | 50.4k | /*isKindOf=*/false); |
5014 | 50.4k | } |
5015 | | |
5016 | | QualType ASTContext::getObjCObjectType( |
5017 | | QualType baseType, |
5018 | | ArrayRef<QualType> typeArgs, |
5019 | | ArrayRef<ObjCProtocolDecl *> protocols, |
5020 | 286k | bool isKindOf) const { |
5021 | | // If the base type is an interface and there aren't any protocols or |
5022 | | // type arguments to add, then the interface type will do just fine. |
5023 | 286k | if (typeArgs.empty() && protocols.empty()84.6k && !isKindOf54.6k && |
5024 | 53.2k | isa<ObjCInterfaceType>(baseType)) |
5025 | 399 | return baseType; |
5026 | | |
5027 | | // Look in the folding set for an existing type. |
5028 | 286k | llvm::FoldingSetNodeID ID; |
5029 | 286k | ObjCObjectTypeImpl::Profile(ID, baseType, typeArgs, protocols, isKindOf); |
5030 | 286k | void *InsertPos = nullptr; |
5031 | 286k | if (ObjCObjectType *QT = ObjCObjectTypes.FindNodeOrInsertPos(ID, InsertPos)) |
5032 | 164k | return QualType(QT, 0); |
5033 | | |
5034 | | // Determine the type arguments to be used for canonicalization, |
5035 | | // which may be explicitly specified here or written on the base |
5036 | | // type. |
5037 | 121k | ArrayRef<QualType> effectiveTypeArgs = typeArgs; |
5038 | 121k | if (effectiveTypeArgs.empty()) { |
5039 | 69.2k | if (const auto *baseObject = baseType->getAs<ObjCObjectType>()) |
5040 | 7.46k | effectiveTypeArgs = baseObject->getTypeArgs(); |
5041 | 69.2k | } |
5042 | | |
5043 | | // Build the canonical type, which has the canonical base type and a |
5044 | | // sorted-and-uniqued list of protocols and the type arguments |
5045 | | // canonicalized. |
5046 | 121k | QualType canonical; |
5047 | 121k | bool typeArgsAreCanonical = std::all_of(effectiveTypeArgs.begin(), |
5048 | 121k | effectiveTypeArgs.end(), |
5049 | 57.8k | [&](QualType type) { |
5050 | 57.8k | return type.isCanonical(); |
5051 | 57.8k | }); |
5052 | 121k | bool protocolsSorted = areSortedAndUniqued(protocols); |
5053 | 121k | if (!typeArgsAreCanonical || !protocolsSorted90.7k || !baseType.isCanonical()90.2k ) { |
5054 | | // Determine the canonical type arguments. |
5055 | 31.4k | ArrayRef<QualType> canonTypeArgs; |
5056 | 31.4k | SmallVector<QualType, 4> canonTypeArgsVec; |
5057 | 31.4k | if (!typeArgsAreCanonical) { |
5058 | 30.9k | canonTypeArgsVec.reserve(effectiveTypeArgs.size()); |
5059 | 30.9k | for (auto typeArg : effectiveTypeArgs) |
5060 | 37.5k | canonTypeArgsVec.push_back(getCanonicalType(typeArg)); |
5061 | 30.9k | canonTypeArgs = canonTypeArgsVec; |
5062 | 499 | } else { |
5063 | 499 | canonTypeArgs = effectiveTypeArgs; |
5064 | 499 | } |
5065 | | |
5066 | 31.4k | ArrayRef<ObjCProtocolDecl *> canonProtocols; |
5067 | 31.4k | SmallVector<ObjCProtocolDecl*, 8> canonProtocolsVec; |
5068 | 31.4k | if (!protocolsSorted) { |
5069 | 441 | canonProtocolsVec.append(protocols.begin(), protocols.end()); |
5070 | 441 | SortAndUniqueProtocols(canonProtocolsVec); |
5071 | 441 | canonProtocols = canonProtocolsVec; |
5072 | 30.9k | } else { |
5073 | 30.9k | canonProtocols = protocols; |
5074 | 30.9k | } |
5075 | | |
5076 | 31.4k | canonical = getObjCObjectType(getCanonicalType(baseType), canonTypeArgs, |
5077 | 31.4k | canonProtocols, isKindOf); |
5078 | | |
5079 | | // Regenerate InsertPos. |
5080 | 31.4k | ObjCObjectTypes.FindNodeOrInsertPos(ID, InsertPos); |
5081 | 31.4k | } |
5082 | | |
5083 | 121k | unsigned size = sizeof(ObjCObjectTypeImpl); |
5084 | 121k | size += typeArgs.size() * sizeof(QualType); |
5085 | 121k | size += protocols.size() * sizeof(ObjCProtocolDecl *); |
5086 | 121k | void *mem = Allocate(size, TypeAlignment); |
5087 | 121k | auto *T = |
5088 | 121k | new (mem) ObjCObjectTypeImpl(canonical, baseType, typeArgs, protocols, |
5089 | 121k | isKindOf); |
5090 | | |
5091 | 121k | Types.push_back(T); |
5092 | 121k | ObjCObjectTypes.InsertNode(T, InsertPos); |
5093 | 121k | return QualType(T, 0); |
5094 | 121k | } |
5095 | | |
5096 | | /// Apply Objective-C protocol qualifiers to the given type. |
5097 | | /// If this is for the canonical type of a type parameter, we can apply |
5098 | | /// protocol qualifiers on the ObjCObjectPointerType. |
5099 | | QualType |
5100 | | ASTContext::applyObjCProtocolQualifiers(QualType type, |
5101 | | ArrayRef<ObjCProtocolDecl *> protocols, bool &hasError, |
5102 | 30.7k | bool allowOnPointerType) const { |
5103 | 30.7k | hasError = false; |
5104 | | |
5105 | 30.7k | if (const auto *objT = dyn_cast<ObjCTypeParamType>(type.getTypePtr())) { |
5106 | 2.33k | return getObjCTypeParamType(objT->getDecl(), protocols); |
5107 | 2.33k | } |
5108 | | |
5109 | | // Apply protocol qualifiers to ObjCObjectPointerType. |
5110 | 28.3k | if (allowOnPointerType) { |
5111 | 2.40k | if (const auto *objPtr = |
5112 | 2.40k | dyn_cast<ObjCObjectPointerType>(type.getTypePtr())) { |
5113 | 2.40k | const ObjCObjectType *objT = objPtr->getObjectType(); |
5114 | | // Merge protocol lists and construct ObjCObjectType. |
5115 | 2.40k | SmallVector<ObjCProtocolDecl*, 8> protocolsVec; |
5116 | 2.40k | protocolsVec.append(objT->qual_begin(), |
5117 | 2.40k | objT->qual_end()); |
5118 | 2.40k | protocolsVec.append(protocols.begin(), protocols.end()); |
5119 | 2.40k | ArrayRef<ObjCProtocolDecl *> protocols = protocolsVec; |
5120 | 2.40k | type = getObjCObjectType( |
5121 | 2.40k | objT->getBaseType(), |
5122 | 2.40k | objT->getTypeArgsAsWritten(), |
5123 | 2.40k | protocols, |
5124 | 2.40k | objT->isKindOfTypeAsWritten()); |
5125 | 2.40k | return getObjCObjectPointerType(type); |
5126 | 2.40k | } |
5127 | 25.9k | } |
5128 | | |
5129 | | // Apply protocol qualifiers to ObjCObjectType. |
5130 | 25.9k | if (const auto *objT = dyn_cast<ObjCObjectType>(type.getTypePtr())){ |
5131 | | // FIXME: Check for protocols to which the class type is already |
5132 | | // known to conform. |
5133 | | |
5134 | 6.32k | return getObjCObjectType(objT->getBaseType(), |
5135 | 6.32k | objT->getTypeArgsAsWritten(), |
5136 | 6.32k | protocols, |
5137 | 6.32k | objT->isKindOfTypeAsWritten()); |
5138 | 6.32k | } |
5139 | | |
5140 | | // If the canonical type is ObjCObjectType, ... |
5141 | 19.6k | if (type->isObjCObjectType()) { |
5142 | | // Silently overwrite any existing protocol qualifiers. |
5143 | | // TODO: determine whether that's the right thing to do. |
5144 | | |
5145 | | // FIXME: Check for protocols to which the class type is already |
5146 | | // known to conform. |
5147 | 62 | return getObjCObjectType(type, {}, protocols, false); |
5148 | 62 | } |
5149 | | |
5150 | | // id<protocol-list> |
5151 | 19.6k | if (type->isObjCIdType()) { |
5152 | 18.6k | const auto *objPtr = type->castAs<ObjCObjectPointerType>(); |
5153 | 18.6k | type = getObjCObjectType(ObjCBuiltinIdTy, {}, protocols, |
5154 | 18.6k | objPtr->isKindOfType()); |
5155 | 18.6k | return getObjCObjectPointerType(type); |
5156 | 18.6k | } |
5157 | | |
5158 | | // Class<protocol-list> |
5159 | 971 | if (type->isObjCClassType()) { |
5160 | 970 | const auto *objPtr = type->castAs<ObjCObjectPointerType>(); |
5161 | 970 | type = getObjCObjectType(ObjCBuiltinClassTy, {}, protocols, |
5162 | 970 | objPtr->isKindOfType()); |
5163 | 970 | return getObjCObjectPointerType(type); |
5164 | 970 | } |
5165 | | |
5166 | 1 | hasError = true; |
5167 | 1 | return type; |
5168 | 1 | } |
5169 | | |
5170 | | QualType |
5171 | | ASTContext::getObjCTypeParamType(const ObjCTypeParamDecl *Decl, |
5172 | 91.7k | ArrayRef<ObjCProtocolDecl *> protocols) const { |
5173 | | // Look in the folding set for an existing type. |
5174 | 91.7k | llvm::FoldingSetNodeID ID; |
5175 | 91.7k | ObjCTypeParamType::Profile(ID, Decl, Decl->getUnderlyingType(), protocols); |
5176 | 91.7k | void *InsertPos = nullptr; |
5177 | 91.7k | if (ObjCTypeParamType *TypeParam = |
5178 | 0 | ObjCTypeParamTypes.FindNodeOrInsertPos(ID, InsertPos)) |
5179 | 0 | return QualType(TypeParam, 0); |
5180 | | |
5181 | | // We canonicalize to the underlying type. |
5182 | 91.7k | QualType Canonical = getCanonicalType(Decl->getUnderlyingType()); |
5183 | 91.7k | if (!protocols.empty()) { |
5184 | |
|