Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
freebsd
GitHub Repository: freebsd/freebsd-src
Path: blob/main/contrib/llvm-project/clang/lib/CodeGen/CGCleanup.h
35233 views
1
//===-- CGCleanup.h - Classes for cleanups IR generation --------*- C++ -*-===//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
// These classes support the generation of LLVM IR for cleanups.
10
//
11
//===----------------------------------------------------------------------===//
12
13
#ifndef LLVM_CLANG_LIB_CODEGEN_CGCLEANUP_H
14
#define LLVM_CLANG_LIB_CODEGEN_CGCLEANUP_H
15
16
#include "EHScopeStack.h"
17
18
#include "Address.h"
19
#include "llvm/ADT/STLExtras.h"
20
#include "llvm/ADT/SetVector.h"
21
#include "llvm/ADT/SmallPtrSet.h"
22
#include "llvm/ADT/SmallVector.h"
23
#include "llvm/IR/Instruction.h"
24
25
namespace llvm {
26
class BasicBlock;
27
class Value;
28
class ConstantInt;
29
}
30
31
namespace clang {
32
class FunctionDecl;
33
namespace CodeGen {
34
class CodeGenModule;
35
class CodeGenFunction;
36
37
/// The MS C++ ABI needs a pointer to RTTI data plus some flags to describe the
38
/// type of a catch handler, so we use this wrapper.
39
struct CatchTypeInfo {
40
llvm::Constant *RTTI;
41
unsigned Flags;
42
};
43
44
/// A protected scope for zero-cost EH handling.
45
class EHScope {
46
public:
47
enum Kind { Cleanup, Catch, Terminate, Filter };
48
49
private:
50
llvm::BasicBlock *CachedLandingPad;
51
llvm::BasicBlock *CachedEHDispatchBlock;
52
53
EHScopeStack::stable_iterator EnclosingEHScope;
54
55
class CommonBitFields {
56
friend class EHScope;
57
LLVM_PREFERRED_TYPE(Kind)
58
unsigned Kind : 3;
59
};
60
enum { NumCommonBits = 3 };
61
62
protected:
63
class CatchBitFields {
64
friend class EHCatchScope;
65
unsigned : NumCommonBits;
66
67
unsigned NumHandlers : 32 - NumCommonBits;
68
};
69
70
class CleanupBitFields {
71
friend class EHCleanupScope;
72
unsigned : NumCommonBits;
73
74
/// Whether this cleanup needs to be run along normal edges.
75
LLVM_PREFERRED_TYPE(bool)
76
unsigned IsNormalCleanup : 1;
77
78
/// Whether this cleanup needs to be run along exception edges.
79
LLVM_PREFERRED_TYPE(bool)
80
unsigned IsEHCleanup : 1;
81
82
/// Whether this cleanup is currently active.
83
LLVM_PREFERRED_TYPE(bool)
84
unsigned IsActive : 1;
85
86
/// Whether this cleanup is a lifetime marker
87
LLVM_PREFERRED_TYPE(bool)
88
unsigned IsLifetimeMarker : 1;
89
90
/// Whether the normal cleanup should test the activation flag.
91
LLVM_PREFERRED_TYPE(bool)
92
unsigned TestFlagInNormalCleanup : 1;
93
94
/// Whether the EH cleanup should test the activation flag.
95
LLVM_PREFERRED_TYPE(bool)
96
unsigned TestFlagInEHCleanup : 1;
97
98
/// The amount of extra storage needed by the Cleanup.
99
/// Always a multiple of the scope-stack alignment.
100
unsigned CleanupSize : 12;
101
};
102
103
class FilterBitFields {
104
friend class EHFilterScope;
105
unsigned : NumCommonBits;
106
107
unsigned NumFilters : 32 - NumCommonBits;
108
};
109
110
union {
111
CommonBitFields CommonBits;
112
CatchBitFields CatchBits;
113
CleanupBitFields CleanupBits;
114
FilterBitFields FilterBits;
115
};
116
117
public:
118
EHScope(Kind kind, EHScopeStack::stable_iterator enclosingEHScope)
119
: CachedLandingPad(nullptr), CachedEHDispatchBlock(nullptr),
120
EnclosingEHScope(enclosingEHScope) {
121
CommonBits.Kind = kind;
122
}
123
124
Kind getKind() const { return static_cast<Kind>(CommonBits.Kind); }
125
126
llvm::BasicBlock *getCachedLandingPad() const {
127
return CachedLandingPad;
128
}
129
130
void setCachedLandingPad(llvm::BasicBlock *block) {
131
CachedLandingPad = block;
132
}
133
134
llvm::BasicBlock *getCachedEHDispatchBlock() const {
135
return CachedEHDispatchBlock;
136
}
137
138
void setCachedEHDispatchBlock(llvm::BasicBlock *block) {
139
CachedEHDispatchBlock = block;
140
}
141
142
bool hasEHBranches() const {
143
if (llvm::BasicBlock *block = getCachedEHDispatchBlock())
144
return !block->use_empty();
145
return false;
146
}
147
148
EHScopeStack::stable_iterator getEnclosingEHScope() const {
149
return EnclosingEHScope;
150
}
151
};
152
153
/// A scope which attempts to handle some, possibly all, types of
154
/// exceptions.
155
///
156
/// Objective C \@finally blocks are represented using a cleanup scope
157
/// after the catch scope.
158
class EHCatchScope : public EHScope {
159
// In effect, we have a flexible array member
160
// Handler Handlers[0];
161
// But that's only standard in C99, not C++, so we have to do
162
// annoying pointer arithmetic instead.
163
164
public:
165
struct Handler {
166
/// A type info value, or null (C++ null, not an LLVM null pointer)
167
/// for a catch-all.
168
CatchTypeInfo Type;
169
170
/// The catch handler for this type.
171
llvm::BasicBlock *Block;
172
173
bool isCatchAll() const { return Type.RTTI == nullptr; }
174
};
175
176
private:
177
friend class EHScopeStack;
178
179
Handler *getHandlers() {
180
return reinterpret_cast<Handler*>(this+1);
181
}
182
183
const Handler *getHandlers() const {
184
return reinterpret_cast<const Handler*>(this+1);
185
}
186
187
public:
188
static size_t getSizeForNumHandlers(unsigned N) {
189
return sizeof(EHCatchScope) + N * sizeof(Handler);
190
}
191
192
EHCatchScope(unsigned numHandlers,
193
EHScopeStack::stable_iterator enclosingEHScope)
194
: EHScope(Catch, enclosingEHScope) {
195
CatchBits.NumHandlers = numHandlers;
196
assert(CatchBits.NumHandlers == numHandlers && "NumHandlers overflow?");
197
}
198
199
unsigned getNumHandlers() const {
200
return CatchBits.NumHandlers;
201
}
202
203
void setCatchAllHandler(unsigned I, llvm::BasicBlock *Block) {
204
setHandler(I, CatchTypeInfo{nullptr, 0}, Block);
205
}
206
207
void setHandler(unsigned I, llvm::Constant *Type, llvm::BasicBlock *Block) {
208
assert(I < getNumHandlers());
209
getHandlers()[I].Type = CatchTypeInfo{Type, 0};
210
getHandlers()[I].Block = Block;
211
}
212
213
void setHandler(unsigned I, CatchTypeInfo Type, llvm::BasicBlock *Block) {
214
assert(I < getNumHandlers());
215
getHandlers()[I].Type = Type;
216
getHandlers()[I].Block = Block;
217
}
218
219
const Handler &getHandler(unsigned I) const {
220
assert(I < getNumHandlers());
221
return getHandlers()[I];
222
}
223
224
// Clear all handler blocks.
225
// FIXME: it's better to always call clearHandlerBlocks in DTOR and have a
226
// 'takeHandler' or some such function which removes ownership from the
227
// EHCatchScope object if the handlers should live longer than EHCatchScope.
228
void clearHandlerBlocks() {
229
for (unsigned I = 0, N = getNumHandlers(); I != N; ++I)
230
delete getHandler(I).Block;
231
}
232
233
typedef const Handler *iterator;
234
iterator begin() const { return getHandlers(); }
235
iterator end() const { return getHandlers() + getNumHandlers(); }
236
237
static bool classof(const EHScope *Scope) {
238
return Scope->getKind() == Catch;
239
}
240
};
241
242
/// A cleanup scope which generates the cleanup blocks lazily.
243
class alignas(8) EHCleanupScope : public EHScope {
244
/// The nearest normal cleanup scope enclosing this one.
245
EHScopeStack::stable_iterator EnclosingNormal;
246
247
/// The nearest EH scope enclosing this one.
248
EHScopeStack::stable_iterator EnclosingEH;
249
250
/// The dual entry/exit block along the normal edge. This is lazily
251
/// created if needed before the cleanup is popped.
252
llvm::BasicBlock *NormalBlock;
253
254
/// An optional i1 variable indicating whether this cleanup has been
255
/// activated yet.
256
Address ActiveFlag;
257
258
/// Extra information required for cleanups that have resolved
259
/// branches through them. This has to be allocated on the side
260
/// because everything on the cleanup stack has be trivially
261
/// movable.
262
struct ExtInfo {
263
/// The destinations of normal branch-afters and branch-throughs.
264
llvm::SmallPtrSet<llvm::BasicBlock*, 4> Branches;
265
266
/// Normal branch-afters.
267
SmallVector<std::pair<llvm::BasicBlock*,llvm::ConstantInt*>, 4>
268
BranchAfters;
269
};
270
mutable struct ExtInfo *ExtInfo;
271
272
/// Erases auxillary allocas and their usages for an unused cleanup.
273
/// Cleanups should mark these allocas as 'used' if the cleanup is
274
/// emitted, otherwise these instructions would be erased.
275
struct AuxillaryAllocas {
276
SmallVector<llvm::Instruction *, 1> AuxAllocas;
277
bool used = false;
278
279
// Records a potentially unused instruction to be erased later.
280
void Add(llvm::AllocaInst *Alloca) { AuxAllocas.push_back(Alloca); }
281
282
// Mark all recorded instructions as used. These will not be erased later.
283
void MarkUsed() {
284
used = true;
285
AuxAllocas.clear();
286
}
287
288
~AuxillaryAllocas() {
289
if (used)
290
return;
291
llvm::SetVector<llvm::Instruction *> Uses;
292
for (auto *Inst : llvm::reverse(AuxAllocas))
293
CollectUses(Inst, Uses);
294
// Delete uses in the reverse order of insertion.
295
for (auto *I : llvm::reverse(Uses))
296
I->eraseFromParent();
297
}
298
299
private:
300
void CollectUses(llvm::Instruction *I,
301
llvm::SetVector<llvm::Instruction *> &Uses) {
302
if (!I || !Uses.insert(I))
303
return;
304
for (auto *User : I->users())
305
CollectUses(cast<llvm::Instruction>(User), Uses);
306
}
307
};
308
mutable struct AuxillaryAllocas *AuxAllocas;
309
310
AuxillaryAllocas &getAuxillaryAllocas() {
311
if (!AuxAllocas) {
312
AuxAllocas = new struct AuxillaryAllocas();
313
}
314
return *AuxAllocas;
315
}
316
317
/// The number of fixups required by enclosing scopes (not including
318
/// this one). If this is the top cleanup scope, all the fixups
319
/// from this index onwards belong to this scope.
320
unsigned FixupDepth;
321
322
struct ExtInfo &getExtInfo() {
323
if (!ExtInfo) ExtInfo = new struct ExtInfo();
324
return *ExtInfo;
325
}
326
327
const struct ExtInfo &getExtInfo() const {
328
if (!ExtInfo) ExtInfo = new struct ExtInfo();
329
return *ExtInfo;
330
}
331
332
public:
333
/// Gets the size required for a lazy cleanup scope with the given
334
/// cleanup-data requirements.
335
static size_t getSizeForCleanupSize(size_t Size) {
336
return sizeof(EHCleanupScope) + Size;
337
}
338
339
size_t getAllocatedSize() const {
340
return sizeof(EHCleanupScope) + CleanupBits.CleanupSize;
341
}
342
343
EHCleanupScope(bool isNormal, bool isEH, unsigned cleanupSize,
344
unsigned fixupDepth,
345
EHScopeStack::stable_iterator enclosingNormal,
346
EHScopeStack::stable_iterator enclosingEH)
347
: EHScope(EHScope::Cleanup, enclosingEH),
348
EnclosingNormal(enclosingNormal), NormalBlock(nullptr),
349
ActiveFlag(Address::invalid()), ExtInfo(nullptr), AuxAllocas(nullptr),
350
FixupDepth(fixupDepth) {
351
CleanupBits.IsNormalCleanup = isNormal;
352
CleanupBits.IsEHCleanup = isEH;
353
CleanupBits.IsActive = true;
354
CleanupBits.IsLifetimeMarker = false;
355
CleanupBits.TestFlagInNormalCleanup = false;
356
CleanupBits.TestFlagInEHCleanup = false;
357
CleanupBits.CleanupSize = cleanupSize;
358
359
assert(CleanupBits.CleanupSize == cleanupSize && "cleanup size overflow");
360
}
361
362
void Destroy() {
363
if (AuxAllocas)
364
delete AuxAllocas;
365
delete ExtInfo;
366
}
367
void AddAuxAllocas(llvm::SmallVector<llvm::AllocaInst *> Allocas) {
368
for (auto *Alloca : Allocas)
369
getAuxillaryAllocas().Add(Alloca);
370
}
371
void MarkEmitted() { getAuxillaryAllocas().MarkUsed(); }
372
// Objects of EHCleanupScope are not destructed. Use Destroy().
373
~EHCleanupScope() = delete;
374
375
bool isNormalCleanup() const { return CleanupBits.IsNormalCleanup; }
376
llvm::BasicBlock *getNormalBlock() const { return NormalBlock; }
377
void setNormalBlock(llvm::BasicBlock *BB) { NormalBlock = BB; }
378
379
bool isEHCleanup() const { return CleanupBits.IsEHCleanup; }
380
381
bool isActive() const { return CleanupBits.IsActive; }
382
void setActive(bool A) { CleanupBits.IsActive = A; }
383
384
bool isLifetimeMarker() const { return CleanupBits.IsLifetimeMarker; }
385
void setLifetimeMarker() { CleanupBits.IsLifetimeMarker = true; }
386
387
bool hasActiveFlag() const { return ActiveFlag.isValid(); }
388
Address getActiveFlag() const {
389
return ActiveFlag;
390
}
391
void setActiveFlag(RawAddress Var) {
392
assert(Var.getAlignment().isOne());
393
ActiveFlag = Var;
394
}
395
396
void setTestFlagInNormalCleanup() {
397
CleanupBits.TestFlagInNormalCleanup = true;
398
}
399
bool shouldTestFlagInNormalCleanup() const {
400
return CleanupBits.TestFlagInNormalCleanup;
401
}
402
403
void setTestFlagInEHCleanup() {
404
CleanupBits.TestFlagInEHCleanup = true;
405
}
406
bool shouldTestFlagInEHCleanup() const {
407
return CleanupBits.TestFlagInEHCleanup;
408
}
409
410
unsigned getFixupDepth() const { return FixupDepth; }
411
EHScopeStack::stable_iterator getEnclosingNormalCleanup() const {
412
return EnclosingNormal;
413
}
414
415
size_t getCleanupSize() const { return CleanupBits.CleanupSize; }
416
void *getCleanupBuffer() { return this + 1; }
417
418
EHScopeStack::Cleanup *getCleanup() {
419
return reinterpret_cast<EHScopeStack::Cleanup*>(getCleanupBuffer());
420
}
421
422
/// True if this cleanup scope has any branch-afters or branch-throughs.
423
bool hasBranches() const { return ExtInfo && !ExtInfo->Branches.empty(); }
424
425
/// Add a branch-after to this cleanup scope. A branch-after is a
426
/// branch from a point protected by this (normal) cleanup to a
427
/// point in the normal cleanup scope immediately containing it.
428
/// For example,
429
/// for (;;) { A a; break; }
430
/// contains a branch-after.
431
///
432
/// Branch-afters each have their own destination out of the
433
/// cleanup, guaranteed distinct from anything else threaded through
434
/// it. Therefore branch-afters usually force a switch after the
435
/// cleanup.
436
void addBranchAfter(llvm::ConstantInt *Index,
437
llvm::BasicBlock *Block) {
438
struct ExtInfo &ExtInfo = getExtInfo();
439
if (ExtInfo.Branches.insert(Block).second)
440
ExtInfo.BranchAfters.push_back(std::make_pair(Block, Index));
441
}
442
443
/// Return the number of unique branch-afters on this scope.
444
unsigned getNumBranchAfters() const {
445
return ExtInfo ? ExtInfo->BranchAfters.size() : 0;
446
}
447
448
llvm::BasicBlock *getBranchAfterBlock(unsigned I) const {
449
assert(I < getNumBranchAfters());
450
return ExtInfo->BranchAfters[I].first;
451
}
452
453
llvm::ConstantInt *getBranchAfterIndex(unsigned I) const {
454
assert(I < getNumBranchAfters());
455
return ExtInfo->BranchAfters[I].second;
456
}
457
458
/// Add a branch-through to this cleanup scope. A branch-through is
459
/// a branch from a scope protected by this (normal) cleanup to an
460
/// enclosing scope other than the immediately-enclosing normal
461
/// cleanup scope.
462
///
463
/// In the following example, the branch through B's scope is a
464
/// branch-through, while the branch through A's scope is a
465
/// branch-after:
466
/// for (;;) { A a; B b; break; }
467
///
468
/// All branch-throughs have a common destination out of the
469
/// cleanup, one possibly shared with the fall-through. Therefore
470
/// branch-throughs usually don't force a switch after the cleanup.
471
///
472
/// \return true if the branch-through was new to this scope
473
bool addBranchThrough(llvm::BasicBlock *Block) {
474
return getExtInfo().Branches.insert(Block).second;
475
}
476
477
/// Determines if this cleanup scope has any branch throughs.
478
bool hasBranchThroughs() const {
479
if (!ExtInfo) return false;
480
return (ExtInfo->BranchAfters.size() != ExtInfo->Branches.size());
481
}
482
483
static bool classof(const EHScope *Scope) {
484
return (Scope->getKind() == Cleanup);
485
}
486
};
487
// NOTE: there's a bunch of different data classes tacked on after an
488
// EHCleanupScope. It is asserted (in EHScopeStack::pushCleanup*) that
489
// they don't require greater alignment than ScopeStackAlignment. So,
490
// EHCleanupScope ought to have alignment equal to that -- not more
491
// (would be misaligned by the stack allocator), and not less (would
492
// break the appended classes).
493
static_assert(alignof(EHCleanupScope) == EHScopeStack::ScopeStackAlignment,
494
"EHCleanupScope expected alignment");
495
496
/// An exceptions scope which filters exceptions thrown through it.
497
/// Only exceptions matching the filter types will be permitted to be
498
/// thrown.
499
///
500
/// This is used to implement C++ exception specifications.
501
class EHFilterScope : public EHScope {
502
// Essentially ends in a flexible array member:
503
// llvm::Value *FilterTypes[0];
504
505
llvm::Value **getFilters() {
506
return reinterpret_cast<llvm::Value**>(this+1);
507
}
508
509
llvm::Value * const *getFilters() const {
510
return reinterpret_cast<llvm::Value* const *>(this+1);
511
}
512
513
public:
514
EHFilterScope(unsigned numFilters)
515
: EHScope(Filter, EHScopeStack::stable_end()) {
516
FilterBits.NumFilters = numFilters;
517
assert(FilterBits.NumFilters == numFilters && "NumFilters overflow");
518
}
519
520
static size_t getSizeForNumFilters(unsigned numFilters) {
521
return sizeof(EHFilterScope) + numFilters * sizeof(llvm::Value*);
522
}
523
524
unsigned getNumFilters() const { return FilterBits.NumFilters; }
525
526
void setFilter(unsigned i, llvm::Value *filterValue) {
527
assert(i < getNumFilters());
528
getFilters()[i] = filterValue;
529
}
530
531
llvm::Value *getFilter(unsigned i) const {
532
assert(i < getNumFilters());
533
return getFilters()[i];
534
}
535
536
static bool classof(const EHScope *scope) {
537
return scope->getKind() == Filter;
538
}
539
};
540
541
/// An exceptions scope which calls std::terminate if any exception
542
/// reaches it.
543
class EHTerminateScope : public EHScope {
544
public:
545
EHTerminateScope(EHScopeStack::stable_iterator enclosingEHScope)
546
: EHScope(Terminate, enclosingEHScope) {}
547
static size_t getSize() { return sizeof(EHTerminateScope); }
548
549
static bool classof(const EHScope *scope) {
550
return scope->getKind() == Terminate;
551
}
552
};
553
554
/// A non-stable pointer into the scope stack.
555
class EHScopeStack::iterator {
556
char *Ptr;
557
558
friend class EHScopeStack;
559
explicit iterator(char *Ptr) : Ptr(Ptr) {}
560
561
public:
562
iterator() : Ptr(nullptr) {}
563
564
EHScope *get() const {
565
return reinterpret_cast<EHScope*>(Ptr);
566
}
567
568
EHScope *operator->() const { return get(); }
569
EHScope &operator*() const { return *get(); }
570
571
iterator &operator++() {
572
size_t Size;
573
switch (get()->getKind()) {
574
case EHScope::Catch:
575
Size = EHCatchScope::getSizeForNumHandlers(
576
static_cast<const EHCatchScope *>(get())->getNumHandlers());
577
break;
578
579
case EHScope::Filter:
580
Size = EHFilterScope::getSizeForNumFilters(
581
static_cast<const EHFilterScope *>(get())->getNumFilters());
582
break;
583
584
case EHScope::Cleanup:
585
Size = static_cast<const EHCleanupScope *>(get())->getAllocatedSize();
586
break;
587
588
case EHScope::Terminate:
589
Size = EHTerminateScope::getSize();
590
break;
591
}
592
Ptr += llvm::alignTo(Size, ScopeStackAlignment);
593
return *this;
594
}
595
596
iterator next() {
597
iterator copy = *this;
598
++copy;
599
return copy;
600
}
601
602
iterator operator++(int) {
603
iterator copy = *this;
604
operator++();
605
return copy;
606
}
607
608
bool encloses(iterator other) const { return Ptr >= other.Ptr; }
609
bool strictlyEncloses(iterator other) const { return Ptr > other.Ptr; }
610
611
bool operator==(iterator other) const { return Ptr == other.Ptr; }
612
bool operator!=(iterator other) const { return Ptr != other.Ptr; }
613
};
614
615
inline EHScopeStack::iterator EHScopeStack::begin() const {
616
return iterator(StartOfData);
617
}
618
619
inline EHScopeStack::iterator EHScopeStack::end() const {
620
return iterator(EndOfBuffer);
621
}
622
623
inline void EHScopeStack::popCatch() {
624
assert(!empty() && "popping exception stack when not empty");
625
626
EHCatchScope &scope = cast<EHCatchScope>(*begin());
627
InnermostEHScope = scope.getEnclosingEHScope();
628
deallocate(EHCatchScope::getSizeForNumHandlers(scope.getNumHandlers()));
629
}
630
631
inline void EHScopeStack::popTerminate() {
632
assert(!empty() && "popping exception stack when not empty");
633
634
EHTerminateScope &scope = cast<EHTerminateScope>(*begin());
635
InnermostEHScope = scope.getEnclosingEHScope();
636
deallocate(EHTerminateScope::getSize());
637
}
638
639
inline EHScopeStack::iterator EHScopeStack::find(stable_iterator sp) const {
640
assert(sp.isValid() && "finding invalid savepoint");
641
assert(sp.Size <= stable_begin().Size && "finding savepoint after pop");
642
return iterator(EndOfBuffer - sp.Size);
643
}
644
645
inline EHScopeStack::stable_iterator
646
EHScopeStack::stabilize(iterator ir) const {
647
assert(StartOfData <= ir.Ptr && ir.Ptr <= EndOfBuffer);
648
return stable_iterator(EndOfBuffer - ir.Ptr);
649
}
650
651
/// The exceptions personality for a function.
652
struct EHPersonality {
653
const char *PersonalityFn;
654
655
// If this is non-null, this personality requires a non-standard
656
// function for rethrowing an exception after a catchall cleanup.
657
// This function must have prototype void(void*).
658
const char *CatchallRethrowFn;
659
660
static const EHPersonality &get(CodeGenModule &CGM, const FunctionDecl *FD);
661
static const EHPersonality &get(CodeGenFunction &CGF);
662
663
static const EHPersonality GNU_C;
664
static const EHPersonality GNU_C_SJLJ;
665
static const EHPersonality GNU_C_SEH;
666
static const EHPersonality GNU_ObjC;
667
static const EHPersonality GNU_ObjC_SJLJ;
668
static const EHPersonality GNU_ObjC_SEH;
669
static const EHPersonality GNUstep_ObjC;
670
static const EHPersonality GNU_ObjCXX;
671
static const EHPersonality NeXT_ObjC;
672
static const EHPersonality GNU_CPlusPlus;
673
static const EHPersonality GNU_CPlusPlus_SJLJ;
674
static const EHPersonality GNU_CPlusPlus_SEH;
675
static const EHPersonality MSVC_except_handler;
676
static const EHPersonality MSVC_C_specific_handler;
677
static const EHPersonality MSVC_CxxFrameHandler3;
678
static const EHPersonality GNU_Wasm_CPlusPlus;
679
static const EHPersonality XL_CPlusPlus;
680
static const EHPersonality ZOS_CPlusPlus;
681
682
/// Does this personality use landingpads or the family of pad instructions
683
/// designed to form funclets?
684
bool usesFuncletPads() const {
685
return isMSVCPersonality() || isWasmPersonality();
686
}
687
688
bool isMSVCPersonality() const {
689
return this == &MSVC_except_handler || this == &MSVC_C_specific_handler ||
690
this == &MSVC_CxxFrameHandler3;
691
}
692
693
bool isWasmPersonality() const { return this == &GNU_Wasm_CPlusPlus; }
694
695
bool isMSVCXXPersonality() const { return this == &MSVC_CxxFrameHandler3; }
696
};
697
}
698
}
699
700
#endif
701
702