Coverage Report

Created: 2020-02-25 14:32

/Users/buildslave/jenkins/workspace/coverage/llvm-project/clang/lib/CodeGen/CGCleanup.h
Line
Count
Source (jump to first uncovered line)
1
//===-- CGCleanup.h - Classes for cleanups IR generation --------*- C++ -*-===//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
// These classes support the generation of LLVM IR for cleanups.
10
//
11
//===----------------------------------------------------------------------===//
12
13
#ifndef LLVM_CLANG_LIB_CODEGEN_CGCLEANUP_H
14
#define LLVM_CLANG_LIB_CODEGEN_CGCLEANUP_H
15
16
#include "EHScopeStack.h"
17
18
#include "Address.h"
19
#include "llvm/ADT/SmallPtrSet.h"
20
#include "llvm/ADT/SmallVector.h"
21
22
namespace llvm {
23
class BasicBlock;
24
class Value;
25
class ConstantInt;
26
class AllocaInst;
27
}
28
29
namespace clang {
30
class FunctionDecl;
31
namespace CodeGen {
32
class CodeGenModule;
33
class CodeGenFunction;
34
35
/// The MS C++ ABI needs a pointer to RTTI data plus some flags to describe the
36
/// type of a catch handler, so we use this wrapper.
37
struct CatchTypeInfo {
38
  llvm::Constant *RTTI;
39
  unsigned Flags;
40
};
41
42
/// A protected scope for zero-cost EH handling.
43
class EHScope {
44
  llvm::BasicBlock *CachedLandingPad;
45
  llvm::BasicBlock *CachedEHDispatchBlock;
46
47
  EHScopeStack::stable_iterator EnclosingEHScope;
48
49
  class CommonBitFields {
50
    friend class EHScope;
51
    unsigned Kind : 3;
52
  };
53
  enum { NumCommonBits = 3 };
54
55
protected:
56
  class CatchBitFields {
57
    friend class EHCatchScope;
58
    unsigned : NumCommonBits;
59
60
    unsigned NumHandlers : 32 - NumCommonBits;
61
  };
62
63
  class CleanupBitFields {
64
    friend class EHCleanupScope;
65
    unsigned : NumCommonBits;
66
67
    /// Whether this cleanup needs to be run along normal edges.
68
    unsigned IsNormalCleanup : 1;
69
70
    /// Whether this cleanup needs to be run along exception edges.
71
    unsigned IsEHCleanup : 1;
72
73
    /// Whether this cleanup is currently active.
74
    unsigned IsActive : 1;
75
76
    /// Whether this cleanup is a lifetime marker
77
    unsigned IsLifetimeMarker : 1;
78
79
    /// Whether the normal cleanup should test the activation flag.
80
    unsigned TestFlagInNormalCleanup : 1;
81
82
    /// Whether the EH cleanup should test the activation flag.
83
    unsigned TestFlagInEHCleanup : 1;
84
85
    /// The amount of extra storage needed by the Cleanup.
86
    /// Always a multiple of the scope-stack alignment.
87
    unsigned CleanupSize : 12;
88
  };
89
90
  class FilterBitFields {
91
    friend class EHFilterScope;
92
    unsigned : NumCommonBits;
93
94
    unsigned NumFilters : 32 - NumCommonBits;
95
  };
96
97
  union {
98
    CommonBitFields CommonBits;
99
    CatchBitFields CatchBits;
100
    CleanupBitFields CleanupBits;
101
    FilterBitFields FilterBits;
102
  };
103
104
public:
105
  enum Kind { Cleanup, Catch, Terminate, Filter, PadEnd };
106
107
  EHScope(Kind kind, EHScopeStack::stable_iterator enclosingEHScope)
108
    : CachedLandingPad(nullptr), CachedEHDispatchBlock(nullptr),
109
128k
      EnclosingEHScope(enclosingEHScope) {
110
128k
    CommonBits.Kind = kind;
111
128k
  }
112
113
541k
  Kind getKind() const { return static_cast<Kind>(CommonBits.Kind); }
114
115
34.7k
  llvm::BasicBlock *getCachedLandingPad() const {
116
34.7k
    return CachedLandingPad;
117
34.7k
  }
118
119
10.5k
  void setCachedLandingPad(llvm::BasicBlock *block) {
120
10.5k
    CachedLandingPad = block;
121
10.5k
  }
122
123
77.3k
  llvm::BasicBlock *getCachedEHDispatchBlock() const {
124
77.3k
    return CachedEHDispatchBlock;
125
77.3k
  }
126
127
7.23k
  void setCachedEHDispatchBlock(llvm::BasicBlock *block) {
128
7.23k
    CachedEHDispatchBlock = block;
129
7.23k
  }
130
131
33.9k
  bool hasEHBranches() const {
132
33.9k
    if (llvm::BasicBlock *block = getCachedEHDispatchBlock())
133
7.01k
      return !block->use_empty();
134
26.9k
    return false;
135
26.9k
  }
136
137
169k
  EHScopeStack::stable_iterator getEnclosingEHScope() const {
138
169k
    return EnclosingEHScope;
139
169k
  }
140
};
141
142
/// A scope which attempts to handle some, possibly all, types of
143
/// exceptions.
144
///
145
/// Objective C \@finally blocks are represented using a cleanup scope
146
/// after the catch scope.
147
class EHCatchScope : public EHScope {
148
  // In effect, we have a flexible array member
149
  //   Handler Handlers[0];
150
  // But that's only standard in C99, not C++, so we have to do
151
  // annoying pointer arithmetic instead.
152
153
public:
154
  struct Handler {
155
    /// A type info value, or null (C++ null, not an LLVM null pointer)
156
    /// for a catch-all.
157
    CatchTypeInfo Type;
158
159
    /// The catch handler for this type.
160
    llvm::BasicBlock *Block;
161
162
1.09k
    bool isCatchAll() const { return Type.RTTI == nullptr; }
163
  };
164
165
private:
166
  friend class EHScopeStack;
167
168
1.28k
  Handler *getHandlers() {
169
1.28k
    return reinterpret_cast<Handler*>(this+1);
170
1.28k
  }
171
172
2.73k
  const Handler *getHandlers() const {
173
2.73k
    return reinterpret_cast<const Handler*>(this+1);
174
2.73k
  }
175
176
public:
177
1.35k
  static size_t getSizeForNumHandlers(unsigned N) {
178
1.35k
    return sizeof(EHCatchScope) + N * sizeof(Handler);
179
1.35k
  }
180
181
  EHCatchScope(unsigned numHandlers,
182
               EHScopeStack::stable_iterator enclosingEHScope)
183
561
    : EHScope(Catch, enclosingEHScope) {
184
561
    CatchBits.NumHandlers = numHandlers;
185
561
    assert(CatchBits.NumHandlers == numHandlers && "NumHandlers overflow?");
186
561
  }
187
188
5.56k
  unsigned getNumHandlers() const {
189
5.56k
    return CatchBits.NumHandlers;
190
5.56k
  }
191
192
31
  void setCatchAllHandler(unsigned I, llvm::BasicBlock *Block) {
193
31
    setHandler(I, CatchTypeInfo{nullptr, 0}, Block);
194
31
  }
195
196
29
  void setHandler(unsigned I, llvm::Constant *Type, llvm::BasicBlock *Block) {
197
29
    assert(I < getNumHandlers());
198
29
    getHandlers()[I].Type = CatchTypeInfo{Type, 0};
199
29
    getHandlers()[I].Block = Block;
200
29
  }
201
202
614
  void setHandler(unsigned I, CatchTypeInfo Type, llvm::BasicBlock *Block) {
203
614
    assert(I < getNumHandlers());
204
614
    getHandlers()[I].Type = Type;
205
614
    getHandlers()[I].Block = Block;
206
614
  }
207
208
2.17k
  const Handler &getHandler(unsigned I) const {
209
2.17k
    assert(I < getNumHandlers());
210
2.17k
    return getHandlers()[I];
211
2.17k
  }
212
213
  // Clear all handler blocks.
214
  // FIXME: it's better to always call clearHandlerBlocks in DTOR and have a
215
  // 'takeHandler' or some such function which removes ownership from the
216
  // EHCatchScope object if the handlers should live longer than EHCatchScope.
217
41
  void clearHandlerBlocks() {
218
83
    for (unsigned I = 0, N = getNumHandlers(); I != N; 
++I42
)
219
42
      delete getHandler(I).Block;
220
41
  }
221
222
  typedef const Handler *iterator;
223
568
  iterator begin() const { return getHandlers(); }
224
0
  iterator end() const { return getHandlers() + getNumHandlers(); }
225
226
2.01k
  static bool classof(const EHScope *Scope) {
227
2.01k
    return Scope->getKind() == Catch;
228
2.01k
  }
229
};
230
231
/// A cleanup scope which generates the cleanup blocks lazily.
232
class alignas(8) EHCleanupScope : public EHScope {
233
  /// The nearest normal cleanup scope enclosing this one.
234
  EHScopeStack::stable_iterator EnclosingNormal;
235
236
  /// The nearest EH scope enclosing this one.
237
  EHScopeStack::stable_iterator EnclosingEH;
238
239
  /// The dual entry/exit block along the normal edge.  This is lazily
240
  /// created if needed before the cleanup is popped.
241
  llvm::BasicBlock *NormalBlock;
242
243
  /// An optional i1 variable indicating whether this cleanup has been
244
  /// activated yet.
245
  llvm::AllocaInst *ActiveFlag;
246
247
  /// Extra information required for cleanups that have resolved
248
  /// branches through them.  This has to be allocated on the side
249
  /// because everything on the cleanup stack has be trivially
250
  /// movable.
251
  struct ExtInfo {
252
    /// The destinations of normal branch-afters and branch-throughs.
253
    llvm::SmallPtrSet<llvm::BasicBlock*, 4> Branches;
254
255
    /// Normal branch-afters.
256
    SmallVector<std::pair<llvm::BasicBlock*,llvm::ConstantInt*>, 4>
257
      BranchAfters;
258
  };
259
  mutable struct ExtInfo *ExtInfo;
260
261
  /// The number of fixups required by enclosing scopes (not including
262
  /// this one).  If this is the top cleanup scope, all the fixups
263
  /// from this index onwards belong to this scope.
264
  unsigned FixupDepth;
265
266
7.33k
  struct ExtInfo &getExtInfo() {
267
7.33k
    if (!ExtInfo) 
ExtInfo = new struct ExtInfo()7.11k
;
268
7.33k
    return *ExtInfo;
269
7.33k
  }
270
271
0
  const struct ExtInfo &getExtInfo() const {
272
0
    if (!ExtInfo) ExtInfo = new struct ExtInfo();
273
0
    return *ExtInfo;
274
0
  }
275
276
public:
277
  /// Gets the size required for a lazy cleanup scope with the given
278
  /// cleanup-data requirements.
279
33.1k
  static size_t getSizeForCleanupSize(size_t Size) {
280
33.1k
    return sizeof(EHCleanupScope) + Size;
281
33.1k
  }
282
283
45.9k
  size_t getAllocatedSize() const {
284
45.9k
    return sizeof(EHCleanupScope) + CleanupBits.CleanupSize;
285
45.9k
  }
286
287
  EHCleanupScope(bool isNormal, bool isEH, bool isActive,
288
                 unsigned cleanupSize, unsigned fixupDepth,
289
                 EHScopeStack::stable_iterator enclosingNormal,
290
                 EHScopeStack::stable_iterator enclosingEH)
291
      : EHScope(EHScope::Cleanup, enclosingEH),
292
        EnclosingNormal(enclosingNormal), NormalBlock(nullptr),
293
33.1k
        ActiveFlag(nullptr), ExtInfo(nullptr), FixupDepth(fixupDepth) {
294
33.1k
    CleanupBits.IsNormalCleanup = isNormal;
295
33.1k
    CleanupBits.IsEHCleanup = isEH;
296
33.1k
    CleanupBits.IsActive = isActive;
297
33.1k
    CleanupBits.IsLifetimeMarker = false;
298
33.1k
    CleanupBits.TestFlagInNormalCleanup = false;
299
33.1k
    CleanupBits.TestFlagInEHCleanup = false;
300
33.1k
    CleanupBits.CleanupSize = cleanupSize;
301
33.1k
302
33.1k
    assert(CleanupBits.CleanupSize == cleanupSize && "cleanup size overflow");
303
33.1k
  }
304
305
33.1k
  void Destroy() {
306
33.1k
    delete ExtInfo;
307
33.1k
  }
308
  // Objects of EHCleanupScope are not destructed. Use Destroy().
309
  ~EHCleanupScope() = delete;
310
311
150k
  bool isNormalCleanup() const { return CleanupBits.IsNormalCleanup; }
312
42.5k
  llvm::BasicBlock *getNormalBlock() const { return NormalBlock; }
313
7.12k
  void setNormalBlock(llvm::BasicBlock *BB) { NormalBlock = BB; }
314
315
41.2k
  bool isEHCleanup() const { return CleanupBits.IsEHCleanup; }
316
317
44.0k
  bool isActive() const { return CleanupBits.IsActive; }
318
471
  void setActive(bool A) { CleanupBits.IsActive = A; }
319
320
81.0k
  bool isLifetimeMarker() const { return CleanupBits.IsLifetimeMarker; }
321
4.63k
  void setLifetimeMarker() { CleanupBits.IsLifetimeMarker = true; }
322
323
228
  bool hasActiveFlag() const { return ActiveFlag != nullptr; }
324
532
  Address getActiveFlag() const {
325
532
    return Address(ActiveFlag, CharUnits::One());
326
532
  }
327
280
  void setActiveFlag(Address Var) {
328
280
    assert(Var.getAlignment().isOne());
329
280
    ActiveFlag = cast<llvm::AllocaInst>(Var.getPointer());
330
280
  }
331
332
204
  void setTestFlagInNormalCleanup() {
333
204
    CleanupBits.TestFlagInNormalCleanup = true;
334
204
  }
335
33.1k
  bool shouldTestFlagInNormalCleanup() const {
336
33.1k
    return CleanupBits.TestFlagInNormalCleanup;
337
33.1k
  }
338
339
276
  void setTestFlagInEHCleanup() {
340
276
    CleanupBits.TestFlagInEHCleanup = true;
341
276
  }
342
33.1k
  bool shouldTestFlagInEHCleanup() const {
343
33.1k
    return CleanupBits.TestFlagInEHCleanup;
344
33.1k
  }
345
346
66.3k
  unsigned getFixupDepth() const { return FixupDepth; }
347
80.3k
  EHScopeStack::stable_iterator getEnclosingNormalCleanup() const {
348
80.3k
    return EnclosingNormal;
349
80.3k
  }
350
351
28.8k
  size_t getCleanupSize() const { return CleanupBits.CleanupSize; }
352
62.0k
  void *getCleanupBuffer() { return this + 1; }
353
354
0
  EHScopeStack::Cleanup *getCleanup() {
355
0
    return reinterpret_cast<EHScopeStack::Cleanup*>(getCleanupBuffer());
356
0
  }
357
358
  /// True if this cleanup scope has any branch-afters or branch-throughs.
359
62.0k
  bool hasBranches() const { return ExtInfo && 
!ExtInfo->Branches.empty()14.1k
; }
360
361
  /// Add a branch-after to this cleanup scope.  A branch-after is a
362
  /// branch from a point protected by this (normal) cleanup to a
363
  /// point in the normal cleanup scope immediately containing it.
364
  /// For example,
365
  ///   for (;;) { A a; break; }
366
  /// contains a branch-after.
367
  ///
368
  /// Branch-afters each have their own destination out of the
369
  /// cleanup, guaranteed distinct from anything else threaded through
370
  /// it.  Therefore branch-afters usually force a switch after the
371
  /// cleanup.
372
  void addBranchAfter(llvm::ConstantInt *Index,
373
5.04k
                      llvm::BasicBlock *Block) {
374
5.04k
    struct ExtInfo &ExtInfo = getExtInfo();
375
5.04k
    if (ExtInfo.Branches.insert(Block).second)
376
4.86k
      ExtInfo.BranchAfters.push_back(std::make_pair(Block, Index));
377
5.04k
  }
378
379
  /// Return the number of unique branch-afters on this scope.
380
12.4k
  unsigned getNumBranchAfters() const {
381
12.4k
    return ExtInfo ? 
ExtInfo->BranchAfters.size()12.3k
:
018
;
382
12.4k
  }
383
384
4.86k
  llvm::BasicBlock *getBranchAfterBlock(unsigned I) const {
385
4.86k
    assert(I < getNumBranchAfters());
386
4.86k
    return ExtInfo->BranchAfters[I].first;
387
4.86k
  }
388
389
202
  llvm::ConstantInt *getBranchAfterIndex(unsigned I) const {
390
202
    assert(I < getNumBranchAfters());
391
202
    return ExtInfo->BranchAfters[I].second;
392
202
  }
393
394
  /// Add a branch-through to this cleanup scope.  A branch-through is
395
  /// a branch from a scope protected by this (normal) cleanup to an
396
  /// enclosing scope other than the immediately-enclosing normal
397
  /// cleanup scope.
398
  ///
399
  /// In the following example, the branch through B's scope is a
400
  /// branch-through, while the branch through A's scope is a
401
  /// branch-after:
402
  ///   for (;;) { A a; B b; break; }
403
  ///
404
  /// All branch-throughs have a common destination out of the
405
  /// cleanup, one possibly shared with the fall-through.  Therefore
406
  /// branch-throughs usually don't force a switch after the cleanup.
407
  ///
408
  /// \return true if the branch-through was new to this scope
409
2.29k
  bool addBranchThrough(llvm::BasicBlock *Block) {
410
2.29k
    return getExtInfo().Branches.insert(Block).second;
411
2.29k
  }
412
413
  /// Determines if this cleanup scope has any branch throughs.
414
14.2k
  bool hasBranchThroughs() const {
415
14.2k
    if (!ExtInfo) 
return false28
;
416
14.2k
    return (ExtInfo->BranchAfters.size() != ExtInfo->Branches.size());
417
14.2k
  }
418
419
392k
  static bool classof(const EHScope *Scope) {
420
392k
    return (Scope->getKind() == Cleanup);
421
392k
  }
422
};
423
// NOTE: there's a bunch of different data classes tacked on after an
424
// EHCleanupScope. It is asserted (in EHScopeStack::pushCleanup*) that
425
// they don't require greater alignment than ScopeStackAlignment. So,
426
// EHCleanupScope ought to have alignment equal to that -- not more
427
// (would be misaligned by the stack allocator), and not less (would
428
// break the appended classes).
429
static_assert(alignof(EHCleanupScope) == EHScopeStack::ScopeStackAlignment,
430
              "EHCleanupScope expected alignment");
431
432
/// An exceptions scope which filters exceptions thrown through it.
433
/// Only exceptions matching the filter types will be permitted to be
434
/// thrown.
435
///
436
/// This is used to implement C++ exception specifications.
437
class EHFilterScope : public EHScope {
438
  // Essentially ends in a flexible array member:
439
  // llvm::Value *FilterTypes[0];
440
441
25
  llvm::Value **getFilters() {
442
25
    return reinterpret_cast<llvm::Value**>(this+1);
443
25
  }
444
445
25
  llvm::Value * const *getFilters() const {
446
25
    return reinterpret_cast<llvm::Value* const *>(this+1);
447
25
  }
448
449
public:
450
  EHFilterScope(unsigned numFilters)
451
117
    : EHScope(Filter, EHScopeStack::stable_end()) {
452
117
    FilterBits.NumFilters = numFilters;
453
117
    assert(FilterBits.NumFilters == numFilters && "NumFilters overflow");
454
117
  }
455
456
261
  static size_t getSizeForNumFilters(unsigned numFilters) {
457
261
    return sizeof(EHFilterScope) + numFilters * sizeof(llvm::Value*);
458
261
  }
459
460
248
  unsigned getNumFilters() const { return FilterBits.NumFilters; }
461
462
25
  void setFilter(unsigned i, llvm::Value *filterValue) {
463
25
    assert(i < getNumFilters());
464
25
    getFilters()[i] = filterValue;
465
25
  }
466
467
25
  llvm::Value *getFilter(unsigned i) const {
468
25
    assert(i < getNumFilters());
469
25
    return getFilters()[i];
470
25
  }
471
472
261
  static bool classof(const EHScope *scope) {
473
261
    return scope->getKind() == Filter;
474
261
  }
475
};
476
477
/// An exceptions scope which calls std::terminate if any exception
478
/// reaches it.
479
class EHTerminateScope : public EHScope {
480
public:
481
  EHTerminateScope(EHScopeStack::stable_iterator enclosingEHScope)
482
94.4k
    : EHScope(Terminate, enclosingEHScope) {}
483
188k
  static size_t getSize() { return sizeof(EHTerminateScope); }
484
485
94.4k
  static bool classof(const EHScope *scope) {
486
94.4k
    return scope->getKind() == Terminate;
487
94.4k
  }
488
};
489
490
class EHPadEndScope : public EHScope {
491
public:
492
  EHPadEndScope(EHScopeStack::stable_iterator enclosingEHScope)
493
0
      : EHScope(PadEnd, enclosingEHScope) {}
494
0
  static size_t getSize() { return sizeof(EHPadEndScope); }
495
496
0
  static bool classof(const EHScope *scope) {
497
0
    return scope->getKind() == PadEnd;
498
0
  }
499
};
500
501
/// A non-stable pointer into the scope stack.
502
class EHScopeStack::iterator {
503
  char *Ptr;
504
505
  friend class EHScopeStack;
506
466k
  explicit iterator(char *Ptr) : Ptr(Ptr) {}
507
508
public:
509
0
  iterator() : Ptr(nullptr) {}
510
511
508k
  EHScope *get() const {
512
508k
    return reinterpret_cast<EHScope*>(Ptr);
513
508k
  }
514
515
49.8k
  EHScope *operator->() const { return get(); }
516
432k
  EHScope &operator*() const { return *get(); }
517
518
13.0k
  iterator &operator++() {
519
13.0k
    size_t Size;
520
13.0k
    switch (get()->getKind()) {
521
236
    case EHScope::Catch:
522
236
      Size = EHCatchScope::getSizeForNumHandlers(
523
236
          static_cast<const EHCatchScope *>(get())->getNumHandlers());
524
236
      break;
525
0
526
27
    case EHScope::Filter:
527
27
      Size = EHFilterScope::getSizeForNumFilters(
528
27
          static_cast<const EHFilterScope *>(get())->getNumFilters());
529
27
      break;
530
0
531
12.7k
    case EHScope::Cleanup:
532
12.7k
      Size = static_cast<const EHCleanupScope *>(get())->getAllocatedSize();
533
12.7k
      break;
534
0
535
0
    case EHScope::Terminate:
536
0
      Size = EHTerminateScope::getSize();
537
0
      break;
538
0
539
0
    case EHScope::PadEnd:
540
0
      Size = EHPadEndScope::getSize();
541
0
      break;
542
13.0k
    }
543
13.0k
    Ptr += llvm::alignTo(Size, ScopeStackAlignment);
544
13.0k
    return *this;
545
13.0k
  }
546
547
27
  iterator next() {
548
27
    iterator copy = *this;
549
27
    ++copy;
550
27
    return copy;
551
27
  }
552
553
3.03k
  iterator operator++(int) {
554
3.03k
    iterator copy = *this;
555
3.03k
    operator++();
556
3.03k
    return copy;
557
3.03k
  }
558
559
0
  bool encloses(iterator other) const { return Ptr >= other.Ptr; }
560
0
  bool strictlyEncloses(iterator other) const { return Ptr > other.Ptr; }
561
562
27
  bool operator==(iterator other) const { return Ptr == other.Ptr; }
563
16.1k
  bool operator!=(iterator other) const { return Ptr != other.Ptr; }
564
};
565
566
310k
inline EHScopeStack::iterator EHScopeStack::begin() const {
567
310k
  return iterator(StartOfData);
568
310k
}
569
570
6.17k
inline EHScopeStack::iterator EHScopeStack::end() const {
571
6.17k
  return iterator(EndOfBuffer);
572
6.17k
}
573
574
561
inline void EHScopeStack::popCatch() {
575
561
  assert(!empty() && "popping exception stack when not empty");
576
561
577
561
  EHCatchScope &scope = cast<EHCatchScope>(*begin());
578
561
  InnermostEHScope = scope.getEnclosingEHScope();
579
561
  deallocate(EHCatchScope::getSizeForNumHandlers(scope.getNumHandlers()));
580
561
}
581
582
94.4k
inline void EHScopeStack::popTerminate() {
583
94.4k
  assert(!empty() && "popping exception stack when not empty");
584
94.4k
585
94.4k
  EHTerminateScope &scope = cast<EHTerminateScope>(*begin());
586
94.4k
  InnermostEHScope = scope.getEnclosingEHScope();
587
94.4k
  deallocate(EHTerminateScope::getSize());
588
94.4k
}
589
590
149k
inline EHScopeStack::iterator EHScopeStack::find(stable_iterator sp) const {
591
149k
  assert(sp.isValid() && "finding invalid savepoint");
592
149k
  assert(sp.Size <= stable_begin().Size && "finding savepoint after pop");
593
149k
  return iterator(EndOfBuffer - sp.Size);
594
149k
}
595
596
inline EHScopeStack::stable_iterator
597
10.5k
EHScopeStack::stabilize(iterator ir) const {
598
10.5k
  assert(StartOfData <= ir.Ptr && ir.Ptr <= EndOfBuffer);
599
10.5k
  return stable_iterator(EndOfBuffer - ir.Ptr);
600
10.5k
}
601
602
/// The exceptions personality for a function.
603
struct EHPersonality {
604
  const char *PersonalityFn;
605
606
  // If this is non-null, this personality requires a non-standard
607
  // function for rethrowing an exception after a catchall cleanup.
608
  // This function must have prototype void(void*).
609
  const char *CatchallRethrowFn;
610
611
  static const EHPersonality &get(CodeGenModule &CGM, const FunctionDecl *FD);
612
  static const EHPersonality &get(CodeGenFunction &CGF);
613
614
  static const EHPersonality GNU_C;
615
  static const EHPersonality GNU_C_SJLJ;
616
  static const EHPersonality GNU_C_SEH;
617
  static const EHPersonality GNU_ObjC;
618
  static const EHPersonality GNU_ObjC_SJLJ;
619
  static const EHPersonality GNU_ObjC_SEH;
620
  static const EHPersonality GNUstep_ObjC;
621
  static const EHPersonality GNU_ObjCXX;
622
  static const EHPersonality NeXT_ObjC;
623
  static const EHPersonality GNU_CPlusPlus;
624
  static const EHPersonality GNU_CPlusPlus_SJLJ;
625
  static const EHPersonality GNU_CPlusPlus_SEH;
626
  static const EHPersonality MSVC_except_handler;
627
  static const EHPersonality MSVC_C_specific_handler;
628
  static const EHPersonality MSVC_CxxFrameHandler3;
629
  static const EHPersonality GNU_Wasm_CPlusPlus;
630
631
  /// Does this personality use landingpads or the family of pad instructions
632
  /// designed to form funclets?
633
30.8k
  bool usesFuncletPads() const {
634
30.8k
    return isMSVCPersonality() || 
isWasmPersonality()29.8k
;
635
30.8k
  }
636
637
37.2k
  bool isMSVCPersonality() const {
638
37.2k
    return this == &MSVC_except_handler || 
this == &MSVC_C_specific_handler37.1k
||
639
37.2k
           
this == &MSVC_CxxFrameHandler336.8k
;
640
37.2k
  }
641
642
30.9k
  bool isWasmPersonality() const { return this == &GNU_Wasm_CPlusPlus; }
643
644
174
  bool isMSVCXXPersonality() const { return this == &MSVC_CxxFrameHandler3; }
645
};
646
}
647
}
648
649
#endif