Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
freebsd
GitHub Repository: freebsd/freebsd-src
Path: blob/main/contrib/llvm-project/clang/lib/CodeGen/CGClass.cpp
35233 views
1
//===--- CGClass.cpp - Emit LLVM Code for C++ classes -----------*- C++ -*-===//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
// This contains code dealing with C++ code generation of classes
10
//
11
//===----------------------------------------------------------------------===//
12
13
#include "ABIInfoImpl.h"
14
#include "CGBlocks.h"
15
#include "CGCXXABI.h"
16
#include "CGDebugInfo.h"
17
#include "CGRecordLayout.h"
18
#include "CodeGenFunction.h"
19
#include "TargetInfo.h"
20
#include "clang/AST/Attr.h"
21
#include "clang/AST/CXXInheritance.h"
22
#include "clang/AST/CharUnits.h"
23
#include "clang/AST/DeclTemplate.h"
24
#include "clang/AST/EvaluatedExprVisitor.h"
25
#include "clang/AST/RecordLayout.h"
26
#include "clang/AST/StmtCXX.h"
27
#include "clang/Basic/CodeGenOptions.h"
28
#include "clang/Basic/TargetBuiltins.h"
29
#include "clang/CodeGen/CGFunctionInfo.h"
30
#include "llvm/IR/Intrinsics.h"
31
#include "llvm/IR/Metadata.h"
32
#include "llvm/Support/SaveAndRestore.h"
33
#include "llvm/Transforms/Utils/SanitizerStats.h"
34
#include <optional>
35
36
using namespace clang;
37
using namespace CodeGen;
38
39
/// Return the best known alignment for an unknown pointer to a
40
/// particular class.
41
CharUnits CodeGenModule::getClassPointerAlignment(const CXXRecordDecl *RD) {
42
if (!RD->hasDefinition())
43
return CharUnits::One(); // Hopefully won't be used anywhere.
44
45
auto &layout = getContext().getASTRecordLayout(RD);
46
47
// If the class is final, then we know that the pointer points to an
48
// object of that type and can use the full alignment.
49
if (RD->isEffectivelyFinal())
50
return layout.getAlignment();
51
52
// Otherwise, we have to assume it could be a subclass.
53
return layout.getNonVirtualAlignment();
54
}
55
56
/// Return the smallest possible amount of storage that might be allocated
57
/// starting from the beginning of an object of a particular class.
58
///
59
/// This may be smaller than sizeof(RD) if RD has virtual base classes.
60
CharUnits CodeGenModule::getMinimumClassObjectSize(const CXXRecordDecl *RD) {
61
if (!RD->hasDefinition())
62
return CharUnits::One();
63
64
auto &layout = getContext().getASTRecordLayout(RD);
65
66
// If the class is final, then we know that the pointer points to an
67
// object of that type and can use the full alignment.
68
if (RD->isEffectivelyFinal())
69
return layout.getSize();
70
71
// Otherwise, we have to assume it could be a subclass.
72
return std::max(layout.getNonVirtualSize(), CharUnits::One());
73
}
74
75
/// Return the best known alignment for a pointer to a virtual base,
76
/// given the alignment of a pointer to the derived class.
77
CharUnits CodeGenModule::getVBaseAlignment(CharUnits actualDerivedAlign,
78
const CXXRecordDecl *derivedClass,
79
const CXXRecordDecl *vbaseClass) {
80
// The basic idea here is that an underaligned derived pointer might
81
// indicate an underaligned base pointer.
82
83
assert(vbaseClass->isCompleteDefinition());
84
auto &baseLayout = getContext().getASTRecordLayout(vbaseClass);
85
CharUnits expectedVBaseAlign = baseLayout.getNonVirtualAlignment();
86
87
return getDynamicOffsetAlignment(actualDerivedAlign, derivedClass,
88
expectedVBaseAlign);
89
}
90
91
CharUnits
92
CodeGenModule::getDynamicOffsetAlignment(CharUnits actualBaseAlign,
93
const CXXRecordDecl *baseDecl,
94
CharUnits expectedTargetAlign) {
95
// If the base is an incomplete type (which is, alas, possible with
96
// member pointers), be pessimistic.
97
if (!baseDecl->isCompleteDefinition())
98
return std::min(actualBaseAlign, expectedTargetAlign);
99
100
auto &baseLayout = getContext().getASTRecordLayout(baseDecl);
101
CharUnits expectedBaseAlign = baseLayout.getNonVirtualAlignment();
102
103
// If the class is properly aligned, assume the target offset is, too.
104
//
105
// This actually isn't necessarily the right thing to do --- if the
106
// class is a complete object, but it's only properly aligned for a
107
// base subobject, then the alignments of things relative to it are
108
// probably off as well. (Note that this requires the alignment of
109
// the target to be greater than the NV alignment of the derived
110
// class.)
111
//
112
// However, our approach to this kind of under-alignment can only
113
// ever be best effort; after all, we're never going to propagate
114
// alignments through variables or parameters. Note, in particular,
115
// that constructing a polymorphic type in an address that's less
116
// than pointer-aligned will generally trap in the constructor,
117
// unless we someday add some sort of attribute to change the
118
// assumed alignment of 'this'. So our goal here is pretty much
119
// just to allow the user to explicitly say that a pointer is
120
// under-aligned and then safely access its fields and vtables.
121
if (actualBaseAlign >= expectedBaseAlign) {
122
return expectedTargetAlign;
123
}
124
125
// Otherwise, we might be offset by an arbitrary multiple of the
126
// actual alignment. The correct adjustment is to take the min of
127
// the two alignments.
128
return std::min(actualBaseAlign, expectedTargetAlign);
129
}
130
131
Address CodeGenFunction::LoadCXXThisAddress() {
132
assert(CurFuncDecl && "loading 'this' without a func declaration?");
133
auto *MD = cast<CXXMethodDecl>(CurFuncDecl);
134
135
// Lazily compute CXXThisAlignment.
136
if (CXXThisAlignment.isZero()) {
137
// Just use the best known alignment for the parent.
138
// TODO: if we're currently emitting a complete-object ctor/dtor,
139
// we can always use the complete-object alignment.
140
CXXThisAlignment = CGM.getClassPointerAlignment(MD->getParent());
141
}
142
143
return makeNaturalAddressForPointer(
144
LoadCXXThis(), MD->getFunctionObjectParameterType(), CXXThisAlignment,
145
false, nullptr, nullptr, KnownNonNull);
146
}
147
148
/// Emit the address of a field using a member data pointer.
149
///
150
/// \param E Only used for emergency diagnostics
151
Address
152
CodeGenFunction::EmitCXXMemberDataPointerAddress(const Expr *E, Address base,
153
llvm::Value *memberPtr,
154
const MemberPointerType *memberPtrType,
155
LValueBaseInfo *BaseInfo,
156
TBAAAccessInfo *TBAAInfo) {
157
// Ask the ABI to compute the actual address.
158
llvm::Value *ptr =
159
CGM.getCXXABI().EmitMemberDataPointerAddress(*this, E, base,
160
memberPtr, memberPtrType);
161
162
QualType memberType = memberPtrType->getPointeeType();
163
CharUnits memberAlign =
164
CGM.getNaturalTypeAlignment(memberType, BaseInfo, TBAAInfo);
165
memberAlign =
166
CGM.getDynamicOffsetAlignment(base.getAlignment(),
167
memberPtrType->getClass()->getAsCXXRecordDecl(),
168
memberAlign);
169
return Address(ptr, ConvertTypeForMem(memberPtrType->getPointeeType()),
170
memberAlign);
171
}
172
173
CharUnits CodeGenModule::computeNonVirtualBaseClassOffset(
174
const CXXRecordDecl *DerivedClass, CastExpr::path_const_iterator Start,
175
CastExpr::path_const_iterator End) {
176
CharUnits Offset = CharUnits::Zero();
177
178
const ASTContext &Context = getContext();
179
const CXXRecordDecl *RD = DerivedClass;
180
181
for (CastExpr::path_const_iterator I = Start; I != End; ++I) {
182
const CXXBaseSpecifier *Base = *I;
183
assert(!Base->isVirtual() && "Should not see virtual bases here!");
184
185
// Get the layout.
186
const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
187
188
const auto *BaseDecl =
189
cast<CXXRecordDecl>(Base->getType()->castAs<RecordType>()->getDecl());
190
191
// Add the offset.
192
Offset += Layout.getBaseClassOffset(BaseDecl);
193
194
RD = BaseDecl;
195
}
196
197
return Offset;
198
}
199
200
llvm::Constant *
201
CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl,
202
CastExpr::path_const_iterator PathBegin,
203
CastExpr::path_const_iterator PathEnd) {
204
assert(PathBegin != PathEnd && "Base path should not be empty!");
205
206
CharUnits Offset =
207
computeNonVirtualBaseClassOffset(ClassDecl, PathBegin, PathEnd);
208
if (Offset.isZero())
209
return nullptr;
210
211
llvm::Type *PtrDiffTy =
212
getTypes().ConvertType(getContext().getPointerDiffType());
213
214
return llvm::ConstantInt::get(PtrDiffTy, Offset.getQuantity());
215
}
216
217
/// Gets the address of a direct base class within a complete object.
218
/// This should only be used for (1) non-virtual bases or (2) virtual bases
219
/// when the type is known to be complete (e.g. in complete destructors).
220
///
221
/// The object pointed to by 'This' is assumed to be non-null.
222
Address
223
CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(Address This,
224
const CXXRecordDecl *Derived,
225
const CXXRecordDecl *Base,
226
bool BaseIsVirtual) {
227
// 'this' must be a pointer (in some address space) to Derived.
228
assert(This.getElementType() == ConvertType(Derived));
229
230
// Compute the offset of the virtual base.
231
CharUnits Offset;
232
const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived);
233
if (BaseIsVirtual)
234
Offset = Layout.getVBaseClassOffset(Base);
235
else
236
Offset = Layout.getBaseClassOffset(Base);
237
238
// Shift and cast down to the base type.
239
// TODO: for complete types, this should be possible with a GEP.
240
Address V = This;
241
if (!Offset.isZero()) {
242
V = V.withElementType(Int8Ty);
243
V = Builder.CreateConstInBoundsByteGEP(V, Offset);
244
}
245
return V.withElementType(ConvertType(Base));
246
}
247
248
static Address
249
ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, Address addr,
250
CharUnits nonVirtualOffset,
251
llvm::Value *virtualOffset,
252
const CXXRecordDecl *derivedClass,
253
const CXXRecordDecl *nearestVBase) {
254
// Assert that we have something to do.
255
assert(!nonVirtualOffset.isZero() || virtualOffset != nullptr);
256
257
// Compute the offset from the static and dynamic components.
258
llvm::Value *baseOffset;
259
if (!nonVirtualOffset.isZero()) {
260
llvm::Type *OffsetType =
261
(CGF.CGM.getTarget().getCXXABI().isItaniumFamily() &&
262
CGF.CGM.getItaniumVTableContext().isRelativeLayout())
263
? CGF.Int32Ty
264
: CGF.PtrDiffTy;
265
baseOffset =
266
llvm::ConstantInt::get(OffsetType, nonVirtualOffset.getQuantity());
267
if (virtualOffset) {
268
baseOffset = CGF.Builder.CreateAdd(virtualOffset, baseOffset);
269
}
270
} else {
271
baseOffset = virtualOffset;
272
}
273
274
// Apply the base offset.
275
llvm::Value *ptr = addr.emitRawPointer(CGF);
276
ptr = CGF.Builder.CreateInBoundsGEP(CGF.Int8Ty, ptr, baseOffset, "add.ptr");
277
278
// If we have a virtual component, the alignment of the result will
279
// be relative only to the known alignment of that vbase.
280
CharUnits alignment;
281
if (virtualOffset) {
282
assert(nearestVBase && "virtual offset without vbase?");
283
alignment = CGF.CGM.getVBaseAlignment(addr.getAlignment(),
284
derivedClass, nearestVBase);
285
} else {
286
alignment = addr.getAlignment();
287
}
288
alignment = alignment.alignmentAtOffset(nonVirtualOffset);
289
290
return Address(ptr, CGF.Int8Ty, alignment);
291
}
292
293
Address CodeGenFunction::GetAddressOfBaseClass(
294
Address Value, const CXXRecordDecl *Derived,
295
CastExpr::path_const_iterator PathBegin,
296
CastExpr::path_const_iterator PathEnd, bool NullCheckValue,
297
SourceLocation Loc) {
298
assert(PathBegin != PathEnd && "Base path should not be empty!");
299
300
CastExpr::path_const_iterator Start = PathBegin;
301
const CXXRecordDecl *VBase = nullptr;
302
303
// Sema has done some convenient canonicalization here: if the
304
// access path involved any virtual steps, the conversion path will
305
// *start* with a step down to the correct virtual base subobject,
306
// and hence will not require any further steps.
307
if ((*Start)->isVirtual()) {
308
VBase = cast<CXXRecordDecl>(
309
(*Start)->getType()->castAs<RecordType>()->getDecl());
310
++Start;
311
}
312
313
// Compute the static offset of the ultimate destination within its
314
// allocating subobject (the virtual base, if there is one, or else
315
// the "complete" object that we see).
316
CharUnits NonVirtualOffset = CGM.computeNonVirtualBaseClassOffset(
317
VBase ? VBase : Derived, Start, PathEnd);
318
319
// If there's a virtual step, we can sometimes "devirtualize" it.
320
// For now, that's limited to when the derived type is final.
321
// TODO: "devirtualize" this for accesses to known-complete objects.
322
if (VBase && Derived->hasAttr<FinalAttr>()) {
323
const ASTRecordLayout &layout = getContext().getASTRecordLayout(Derived);
324
CharUnits vBaseOffset = layout.getVBaseClassOffset(VBase);
325
NonVirtualOffset += vBaseOffset;
326
VBase = nullptr; // we no longer have a virtual step
327
}
328
329
// Get the base pointer type.
330
llvm::Type *BaseValueTy = ConvertType((PathEnd[-1])->getType());
331
llvm::Type *PtrTy = llvm::PointerType::get(
332
CGM.getLLVMContext(), Value.getType()->getPointerAddressSpace());
333
334
QualType DerivedTy = getContext().getRecordType(Derived);
335
CharUnits DerivedAlign = CGM.getClassPointerAlignment(Derived);
336
337
// If the static offset is zero and we don't have a virtual step,
338
// just do a bitcast; null checks are unnecessary.
339
if (NonVirtualOffset.isZero() && !VBase) {
340
if (sanitizePerformTypeCheck()) {
341
SanitizerSet SkippedChecks;
342
SkippedChecks.set(SanitizerKind::Null, !NullCheckValue);
343
EmitTypeCheck(TCK_Upcast, Loc, Value.emitRawPointer(*this), DerivedTy,
344
DerivedAlign, SkippedChecks);
345
}
346
return Value.withElementType(BaseValueTy);
347
}
348
349
llvm::BasicBlock *origBB = nullptr;
350
llvm::BasicBlock *endBB = nullptr;
351
352
// Skip over the offset (and the vtable load) if we're supposed to
353
// null-check the pointer.
354
if (NullCheckValue) {
355
origBB = Builder.GetInsertBlock();
356
llvm::BasicBlock *notNullBB = createBasicBlock("cast.notnull");
357
endBB = createBasicBlock("cast.end");
358
359
llvm::Value *isNull = Builder.CreateIsNull(Value);
360
Builder.CreateCondBr(isNull, endBB, notNullBB);
361
EmitBlock(notNullBB);
362
}
363
364
if (sanitizePerformTypeCheck()) {
365
SanitizerSet SkippedChecks;
366
SkippedChecks.set(SanitizerKind::Null, true);
367
EmitTypeCheck(VBase ? TCK_UpcastToVirtualBase : TCK_Upcast, Loc,
368
Value.emitRawPointer(*this), DerivedTy, DerivedAlign,
369
SkippedChecks);
370
}
371
372
// Compute the virtual offset.
373
llvm::Value *VirtualOffset = nullptr;
374
if (VBase) {
375
VirtualOffset =
376
CGM.getCXXABI().GetVirtualBaseClassOffset(*this, Value, Derived, VBase);
377
}
378
379
// Apply both offsets.
380
Value = ApplyNonVirtualAndVirtualOffset(*this, Value, NonVirtualOffset,
381
VirtualOffset, Derived, VBase);
382
383
// Cast to the destination type.
384
Value = Value.withElementType(BaseValueTy);
385
386
// Build a phi if we needed a null check.
387
if (NullCheckValue) {
388
llvm::BasicBlock *notNullBB = Builder.GetInsertBlock();
389
Builder.CreateBr(endBB);
390
EmitBlock(endBB);
391
392
llvm::PHINode *PHI = Builder.CreatePHI(PtrTy, 2, "cast.result");
393
PHI->addIncoming(Value.emitRawPointer(*this), notNullBB);
394
PHI->addIncoming(llvm::Constant::getNullValue(PtrTy), origBB);
395
Value = Value.withPointer(PHI, NotKnownNonNull);
396
}
397
398
return Value;
399
}
400
401
Address
402
CodeGenFunction::GetAddressOfDerivedClass(Address BaseAddr,
403
const CXXRecordDecl *Derived,
404
CastExpr::path_const_iterator PathBegin,
405
CastExpr::path_const_iterator PathEnd,
406
bool NullCheckValue) {
407
assert(PathBegin != PathEnd && "Base path should not be empty!");
408
409
QualType DerivedTy =
410
getContext().getCanonicalType(getContext().getTagDeclType(Derived));
411
llvm::Type *DerivedValueTy = ConvertType(DerivedTy);
412
413
llvm::Value *NonVirtualOffset =
414
CGM.GetNonVirtualBaseClassOffset(Derived, PathBegin, PathEnd);
415
416
if (!NonVirtualOffset) {
417
// No offset, we can just cast back.
418
return BaseAddr.withElementType(DerivedValueTy);
419
}
420
421
llvm::BasicBlock *CastNull = nullptr;
422
llvm::BasicBlock *CastNotNull = nullptr;
423
llvm::BasicBlock *CastEnd = nullptr;
424
425
if (NullCheckValue) {
426
CastNull = createBasicBlock("cast.null");
427
CastNotNull = createBasicBlock("cast.notnull");
428
CastEnd = createBasicBlock("cast.end");
429
430
llvm::Value *IsNull = Builder.CreateIsNull(BaseAddr);
431
Builder.CreateCondBr(IsNull, CastNull, CastNotNull);
432
EmitBlock(CastNotNull);
433
}
434
435
// Apply the offset.
436
Address Addr = BaseAddr.withElementType(Int8Ty);
437
Addr = Builder.CreateInBoundsGEP(
438
Addr, Builder.CreateNeg(NonVirtualOffset), Int8Ty,
439
CGM.getClassPointerAlignment(Derived), "sub.ptr");
440
441
// Just cast.
442
Addr = Addr.withElementType(DerivedValueTy);
443
444
// Produce a PHI if we had a null-check.
445
if (NullCheckValue) {
446
Builder.CreateBr(CastEnd);
447
EmitBlock(CastNull);
448
Builder.CreateBr(CastEnd);
449
EmitBlock(CastEnd);
450
451
llvm::Value *Value = Addr.emitRawPointer(*this);
452
llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2);
453
PHI->addIncoming(Value, CastNotNull);
454
PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), CastNull);
455
return Address(PHI, Addr.getElementType(),
456
CGM.getClassPointerAlignment(Derived));
457
}
458
459
return Addr;
460
}
461
462
llvm::Value *CodeGenFunction::GetVTTParameter(GlobalDecl GD,
463
bool ForVirtualBase,
464
bool Delegating) {
465
if (!CGM.getCXXABI().NeedsVTTParameter(GD)) {
466
// This constructor/destructor does not need a VTT parameter.
467
return nullptr;
468
}
469
470
const CXXRecordDecl *RD = cast<CXXMethodDecl>(CurCodeDecl)->getParent();
471
const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent();
472
473
uint64_t SubVTTIndex;
474
475
if (Delegating) {
476
// If this is a delegating constructor call, just load the VTT.
477
return LoadCXXVTT();
478
} else if (RD == Base) {
479
// If the record matches the base, this is the complete ctor/dtor
480
// variant calling the base variant in a class with virtual bases.
481
assert(!CGM.getCXXABI().NeedsVTTParameter(CurGD) &&
482
"doing no-op VTT offset in base dtor/ctor?");
483
assert(!ForVirtualBase && "Can't have same class as virtual base!");
484
SubVTTIndex = 0;
485
} else {
486
const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD);
487
CharUnits BaseOffset = ForVirtualBase ?
488
Layout.getVBaseClassOffset(Base) :
489
Layout.getBaseClassOffset(Base);
490
491
SubVTTIndex =
492
CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset));
493
assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!");
494
}
495
496
if (CGM.getCXXABI().NeedsVTTParameter(CurGD)) {
497
// A VTT parameter was passed to the constructor, use it.
498
llvm::Value *VTT = LoadCXXVTT();
499
return Builder.CreateConstInBoundsGEP1_64(VoidPtrTy, VTT, SubVTTIndex);
500
} else {
501
// We're the complete constructor, so get the VTT by name.
502
llvm::GlobalValue *VTT = CGM.getVTables().GetAddrOfVTT(RD);
503
return Builder.CreateConstInBoundsGEP2_64(
504
VTT->getValueType(), VTT, 0, SubVTTIndex);
505
}
506
}
507
508
namespace {
509
/// Call the destructor for a direct base class.
510
struct CallBaseDtor final : EHScopeStack::Cleanup {
511
const CXXRecordDecl *BaseClass;
512
bool BaseIsVirtual;
513
CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual)
514
: BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {}
515
516
void Emit(CodeGenFunction &CGF, Flags flags) override {
517
const CXXRecordDecl *DerivedClass =
518
cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent();
519
520
const CXXDestructorDecl *D = BaseClass->getDestructor();
521
// We are already inside a destructor, so presumably the object being
522
// destroyed should have the expected type.
523
QualType ThisTy = D->getFunctionObjectParameterType();
524
Address Addr =
525
CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThisAddress(),
526
DerivedClass, BaseClass,
527
BaseIsVirtual);
528
CGF.EmitCXXDestructorCall(D, Dtor_Base, BaseIsVirtual,
529
/*Delegating=*/false, Addr, ThisTy);
530
}
531
};
532
533
/// A visitor which checks whether an initializer uses 'this' in a
534
/// way which requires the vtable to be properly set.
535
struct DynamicThisUseChecker : ConstEvaluatedExprVisitor<DynamicThisUseChecker> {
536
typedef ConstEvaluatedExprVisitor<DynamicThisUseChecker> super;
537
538
bool UsesThis;
539
540
DynamicThisUseChecker(const ASTContext &C) : super(C), UsesThis(false) {}
541
542
// Black-list all explicit and implicit references to 'this'.
543
//
544
// Do we need to worry about external references to 'this' derived
545
// from arbitrary code? If so, then anything which runs arbitrary
546
// external code might potentially access the vtable.
547
void VisitCXXThisExpr(const CXXThisExpr *E) { UsesThis = true; }
548
};
549
} // end anonymous namespace
550
551
static bool BaseInitializerUsesThis(ASTContext &C, const Expr *Init) {
552
DynamicThisUseChecker Checker(C);
553
Checker.Visit(Init);
554
return Checker.UsesThis;
555
}
556
557
static void EmitBaseInitializer(CodeGenFunction &CGF,
558
const CXXRecordDecl *ClassDecl,
559
CXXCtorInitializer *BaseInit) {
560
assert(BaseInit->isBaseInitializer() &&
561
"Must have base initializer!");
562
563
Address ThisPtr = CGF.LoadCXXThisAddress();
564
565
const Type *BaseType = BaseInit->getBaseClass();
566
const auto *BaseClassDecl =
567
cast<CXXRecordDecl>(BaseType->castAs<RecordType>()->getDecl());
568
569
bool isBaseVirtual = BaseInit->isBaseVirtual();
570
571
// If the initializer for the base (other than the constructor
572
// itself) accesses 'this' in any way, we need to initialize the
573
// vtables.
574
if (BaseInitializerUsesThis(CGF.getContext(), BaseInit->getInit()))
575
CGF.InitializeVTablePointers(ClassDecl);
576
577
// We can pretend to be a complete class because it only matters for
578
// virtual bases, and we only do virtual bases for complete ctors.
579
Address V =
580
CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl,
581
BaseClassDecl,
582
isBaseVirtual);
583
AggValueSlot AggSlot =
584
AggValueSlot::forAddr(
585
V, Qualifiers(),
586
AggValueSlot::IsDestructed,
587
AggValueSlot::DoesNotNeedGCBarriers,
588
AggValueSlot::IsNotAliased,
589
CGF.getOverlapForBaseInit(ClassDecl, BaseClassDecl, isBaseVirtual));
590
591
CGF.EmitAggExpr(BaseInit->getInit(), AggSlot);
592
593
if (CGF.CGM.getLangOpts().Exceptions &&
594
!BaseClassDecl->hasTrivialDestructor())
595
CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl,
596
isBaseVirtual);
597
}
598
599
static bool isMemcpyEquivalentSpecialMember(const CXXMethodDecl *D) {
600
auto *CD = dyn_cast<CXXConstructorDecl>(D);
601
if (!(CD && CD->isCopyOrMoveConstructor()) &&
602
!D->isCopyAssignmentOperator() && !D->isMoveAssignmentOperator())
603
return false;
604
605
// We can emit a memcpy for a trivial copy or move constructor/assignment.
606
if (D->isTrivial() && !D->getParent()->mayInsertExtraPadding())
607
return true;
608
609
// We *must* emit a memcpy for a defaulted union copy or move op.
610
if (D->getParent()->isUnion() && D->isDefaulted())
611
return true;
612
613
return false;
614
}
615
616
static void EmitLValueForAnyFieldInitialization(CodeGenFunction &CGF,
617
CXXCtorInitializer *MemberInit,
618
LValue &LHS) {
619
FieldDecl *Field = MemberInit->getAnyMember();
620
if (MemberInit->isIndirectMemberInitializer()) {
621
// If we are initializing an anonymous union field, drill down to the field.
622
IndirectFieldDecl *IndirectField = MemberInit->getIndirectMember();
623
for (const auto *I : IndirectField->chain())
624
LHS = CGF.EmitLValueForFieldInitialization(LHS, cast<FieldDecl>(I));
625
} else {
626
LHS = CGF.EmitLValueForFieldInitialization(LHS, Field);
627
}
628
}
629
630
static void EmitMemberInitializer(CodeGenFunction &CGF,
631
const CXXRecordDecl *ClassDecl,
632
CXXCtorInitializer *MemberInit,
633
const CXXConstructorDecl *Constructor,
634
FunctionArgList &Args) {
635
ApplyDebugLocation Loc(CGF, MemberInit->getSourceLocation());
636
assert(MemberInit->isAnyMemberInitializer() &&
637
"Must have member initializer!");
638
assert(MemberInit->getInit() && "Must have initializer!");
639
640
// non-static data member initializers.
641
FieldDecl *Field = MemberInit->getAnyMember();
642
QualType FieldType = Field->getType();
643
644
llvm::Value *ThisPtr = CGF.LoadCXXThis();
645
QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl);
646
LValue LHS;
647
648
// If a base constructor is being emitted, create an LValue that has the
649
// non-virtual alignment.
650
if (CGF.CurGD.getCtorType() == Ctor_Base)
651
LHS = CGF.MakeNaturalAlignPointeeAddrLValue(ThisPtr, RecordTy);
652
else
653
LHS = CGF.MakeNaturalAlignAddrLValue(ThisPtr, RecordTy);
654
655
EmitLValueForAnyFieldInitialization(CGF, MemberInit, LHS);
656
657
// Special case: if we are in a copy or move constructor, and we are copying
658
// an array of PODs or classes with trivial copy constructors, ignore the
659
// AST and perform the copy we know is equivalent.
660
// FIXME: This is hacky at best... if we had a bit more explicit information
661
// in the AST, we could generalize it more easily.
662
const ConstantArrayType *Array
663
= CGF.getContext().getAsConstantArrayType(FieldType);
664
if (Array && Constructor->isDefaulted() &&
665
Constructor->isCopyOrMoveConstructor()) {
666
QualType BaseElementTy = CGF.getContext().getBaseElementType(Array);
667
CXXConstructExpr *CE = dyn_cast<CXXConstructExpr>(MemberInit->getInit());
668
if (BaseElementTy.isPODType(CGF.getContext()) ||
669
(CE && isMemcpyEquivalentSpecialMember(CE->getConstructor()))) {
670
unsigned SrcArgIndex =
671
CGF.CGM.getCXXABI().getSrcArgforCopyCtor(Constructor, Args);
672
llvm::Value *SrcPtr
673
= CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(Args[SrcArgIndex]));
674
LValue ThisRHSLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy);
675
LValue Src = CGF.EmitLValueForFieldInitialization(ThisRHSLV, Field);
676
677
// Copy the aggregate.
678
CGF.EmitAggregateCopy(LHS, Src, FieldType, CGF.getOverlapForFieldInit(Field),
679
LHS.isVolatileQualified());
680
// Ensure that we destroy the objects if an exception is thrown later in
681
// the constructor.
682
QualType::DestructionKind dtorKind = FieldType.isDestructedType();
683
if (CGF.needsEHCleanup(dtorKind))
684
CGF.pushEHDestroy(dtorKind, LHS.getAddress(), FieldType);
685
return;
686
}
687
}
688
689
CGF.EmitInitializerForField(Field, LHS, MemberInit->getInit());
690
}
691
692
void CodeGenFunction::EmitInitializerForField(FieldDecl *Field, LValue LHS,
693
Expr *Init) {
694
QualType FieldType = Field->getType();
695
switch (getEvaluationKind(FieldType)) {
696
case TEK_Scalar:
697
if (LHS.isSimple()) {
698
EmitExprAsInit(Init, Field, LHS, false);
699
} else {
700
RValue RHS = RValue::get(EmitScalarExpr(Init));
701
EmitStoreThroughLValue(RHS, LHS);
702
}
703
break;
704
case TEK_Complex:
705
EmitComplexExprIntoLValue(Init, LHS, /*isInit*/ true);
706
break;
707
case TEK_Aggregate: {
708
AggValueSlot Slot = AggValueSlot::forLValue(
709
LHS, AggValueSlot::IsDestructed, AggValueSlot::DoesNotNeedGCBarriers,
710
AggValueSlot::IsNotAliased, getOverlapForFieldInit(Field),
711
AggValueSlot::IsNotZeroed,
712
// Checks are made by the code that calls constructor.
713
AggValueSlot::IsSanitizerChecked);
714
EmitAggExpr(Init, Slot);
715
break;
716
}
717
}
718
719
// Ensure that we destroy this object if an exception is thrown
720
// later in the constructor.
721
QualType::DestructionKind dtorKind = FieldType.isDestructedType();
722
if (needsEHCleanup(dtorKind))
723
pushEHDestroy(dtorKind, LHS.getAddress(), FieldType);
724
}
725
726
/// Checks whether the given constructor is a valid subject for the
727
/// complete-to-base constructor delegation optimization, i.e.
728
/// emitting the complete constructor as a simple call to the base
729
/// constructor.
730
bool CodeGenFunction::IsConstructorDelegationValid(
731
const CXXConstructorDecl *Ctor) {
732
733
// Currently we disable the optimization for classes with virtual
734
// bases because (1) the addresses of parameter variables need to be
735
// consistent across all initializers but (2) the delegate function
736
// call necessarily creates a second copy of the parameter variable.
737
//
738
// The limiting example (purely theoretical AFAIK):
739
// struct A { A(int &c) { c++; } };
740
// struct B : virtual A {
741
// B(int count) : A(count) { printf("%d\n", count); }
742
// };
743
// ...although even this example could in principle be emitted as a
744
// delegation since the address of the parameter doesn't escape.
745
if (Ctor->getParent()->getNumVBases()) {
746
// TODO: white-list trivial vbase initializers. This case wouldn't
747
// be subject to the restrictions below.
748
749
// TODO: white-list cases where:
750
// - there are no non-reference parameters to the constructor
751
// - the initializers don't access any non-reference parameters
752
// - the initializers don't take the address of non-reference
753
// parameters
754
// - etc.
755
// If we ever add any of the above cases, remember that:
756
// - function-try-blocks will always exclude this optimization
757
// - we need to perform the constructor prologue and cleanup in
758
// EmitConstructorBody.
759
760
return false;
761
}
762
763
// We also disable the optimization for variadic functions because
764
// it's impossible to "re-pass" varargs.
765
if (Ctor->getType()->castAs<FunctionProtoType>()->isVariadic())
766
return false;
767
768
// FIXME: Decide if we can do a delegation of a delegating constructor.
769
if (Ctor->isDelegatingConstructor())
770
return false;
771
772
return true;
773
}
774
775
// Emit code in ctor (Prologue==true) or dtor (Prologue==false)
776
// to poison the extra field paddings inserted under
777
// -fsanitize-address-field-padding=1|2.
778
void CodeGenFunction::EmitAsanPrologueOrEpilogue(bool Prologue) {
779
ASTContext &Context = getContext();
780
const CXXRecordDecl *ClassDecl =
781
Prologue ? cast<CXXConstructorDecl>(CurGD.getDecl())->getParent()
782
: cast<CXXDestructorDecl>(CurGD.getDecl())->getParent();
783
if (!ClassDecl->mayInsertExtraPadding()) return;
784
785
struct SizeAndOffset {
786
uint64_t Size;
787
uint64_t Offset;
788
};
789
790
unsigned PtrSize = CGM.getDataLayout().getPointerSizeInBits();
791
const ASTRecordLayout &Info = Context.getASTRecordLayout(ClassDecl);
792
793
// Populate sizes and offsets of fields.
794
SmallVector<SizeAndOffset, 16> SSV(Info.getFieldCount());
795
for (unsigned i = 0, e = Info.getFieldCount(); i != e; ++i)
796
SSV[i].Offset =
797
Context.toCharUnitsFromBits(Info.getFieldOffset(i)).getQuantity();
798
799
size_t NumFields = 0;
800
for (const auto *Field : ClassDecl->fields()) {
801
const FieldDecl *D = Field;
802
auto FieldInfo = Context.getTypeInfoInChars(D->getType());
803
CharUnits FieldSize = FieldInfo.Width;
804
assert(NumFields < SSV.size());
805
SSV[NumFields].Size = D->isBitField() ? 0 : FieldSize.getQuantity();
806
NumFields++;
807
}
808
assert(NumFields == SSV.size());
809
if (SSV.size() <= 1) return;
810
811
// We will insert calls to __asan_* run-time functions.
812
// LLVM AddressSanitizer pass may decide to inline them later.
813
llvm::Type *Args[2] = {IntPtrTy, IntPtrTy};
814
llvm::FunctionType *FTy =
815
llvm::FunctionType::get(CGM.VoidTy, Args, false);
816
llvm::FunctionCallee F = CGM.CreateRuntimeFunction(
817
FTy, Prologue ? "__asan_poison_intra_object_redzone"
818
: "__asan_unpoison_intra_object_redzone");
819
820
llvm::Value *ThisPtr = LoadCXXThis();
821
ThisPtr = Builder.CreatePtrToInt(ThisPtr, IntPtrTy);
822
uint64_t TypeSize = Info.getNonVirtualSize().getQuantity();
823
// For each field check if it has sufficient padding,
824
// if so (un)poison it with a call.
825
for (size_t i = 0; i < SSV.size(); i++) {
826
uint64_t AsanAlignment = 8;
827
uint64_t NextField = i == SSV.size() - 1 ? TypeSize : SSV[i + 1].Offset;
828
uint64_t PoisonSize = NextField - SSV[i].Offset - SSV[i].Size;
829
uint64_t EndOffset = SSV[i].Offset + SSV[i].Size;
830
if (PoisonSize < AsanAlignment || !SSV[i].Size ||
831
(NextField % AsanAlignment) != 0)
832
continue;
833
Builder.CreateCall(
834
F, {Builder.CreateAdd(ThisPtr, Builder.getIntN(PtrSize, EndOffset)),
835
Builder.getIntN(PtrSize, PoisonSize)});
836
}
837
}
838
839
/// EmitConstructorBody - Emits the body of the current constructor.
840
void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) {
841
EmitAsanPrologueOrEpilogue(true);
842
const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl());
843
CXXCtorType CtorType = CurGD.getCtorType();
844
845
assert((CGM.getTarget().getCXXABI().hasConstructorVariants() ||
846
CtorType == Ctor_Complete) &&
847
"can only generate complete ctor for this ABI");
848
849
// Before we go any further, try the complete->base constructor
850
// delegation optimization.
851
if (CtorType == Ctor_Complete && IsConstructorDelegationValid(Ctor) &&
852
CGM.getTarget().getCXXABI().hasConstructorVariants()) {
853
EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args, Ctor->getEndLoc());
854
return;
855
}
856
857
const FunctionDecl *Definition = nullptr;
858
Stmt *Body = Ctor->getBody(Definition);
859
assert(Definition == Ctor && "emitting wrong constructor body");
860
861
// Enter the function-try-block before the constructor prologue if
862
// applicable.
863
bool IsTryBody = isa_and_nonnull<CXXTryStmt>(Body);
864
if (IsTryBody)
865
EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);
866
867
incrementProfileCounter(Body);
868
maybeCreateMCDCCondBitmap();
869
870
RunCleanupsScope RunCleanups(*this);
871
872
// TODO: in restricted cases, we can emit the vbase initializers of
873
// a complete ctor and then delegate to the base ctor.
874
875
// Emit the constructor prologue, i.e. the base and member
876
// initializers.
877
EmitCtorPrologue(Ctor, CtorType, Args);
878
879
// Emit the body of the statement.
880
if (IsTryBody)
881
EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());
882
else if (Body)
883
EmitStmt(Body);
884
885
// Emit any cleanup blocks associated with the member or base
886
// initializers, which includes (along the exceptional path) the
887
// destructors for those members and bases that were fully
888
// constructed.
889
RunCleanups.ForceCleanup();
890
891
if (IsTryBody)
892
ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);
893
}
894
895
namespace {
896
/// RAII object to indicate that codegen is copying the value representation
897
/// instead of the object representation. Useful when copying a struct or
898
/// class which has uninitialized members and we're only performing
899
/// lvalue-to-rvalue conversion on the object but not its members.
900
class CopyingValueRepresentation {
901
public:
902
explicit CopyingValueRepresentation(CodeGenFunction &CGF)
903
: CGF(CGF), OldSanOpts(CGF.SanOpts) {
904
CGF.SanOpts.set(SanitizerKind::Bool, false);
905
CGF.SanOpts.set(SanitizerKind::Enum, false);
906
}
907
~CopyingValueRepresentation() {
908
CGF.SanOpts = OldSanOpts;
909
}
910
private:
911
CodeGenFunction &CGF;
912
SanitizerSet OldSanOpts;
913
};
914
} // end anonymous namespace
915
916
namespace {
917
class FieldMemcpyizer {
918
public:
919
FieldMemcpyizer(CodeGenFunction &CGF, const CXXRecordDecl *ClassDecl,
920
const VarDecl *SrcRec)
921
: CGF(CGF), ClassDecl(ClassDecl), SrcRec(SrcRec),
922
RecLayout(CGF.getContext().getASTRecordLayout(ClassDecl)),
923
FirstField(nullptr), LastField(nullptr), FirstFieldOffset(0),
924
LastFieldOffset(0), LastAddedFieldIndex(0) {}
925
926
bool isMemcpyableField(FieldDecl *F) const {
927
// Never memcpy fields when we are adding poisoned paddings.
928
if (CGF.getContext().getLangOpts().SanitizeAddressFieldPadding)
929
return false;
930
Qualifiers Qual = F->getType().getQualifiers();
931
if (Qual.hasVolatile() || Qual.hasObjCLifetime())
932
return false;
933
return true;
934
}
935
936
void addMemcpyableField(FieldDecl *F) {
937
if (isEmptyFieldForLayout(CGF.getContext(), F))
938
return;
939
if (!FirstField)
940
addInitialField(F);
941
else
942
addNextField(F);
943
}
944
945
CharUnits getMemcpySize(uint64_t FirstByteOffset) const {
946
ASTContext &Ctx = CGF.getContext();
947
unsigned LastFieldSize =
948
LastField->isBitField()
949
? LastField->getBitWidthValue(Ctx)
950
: Ctx.toBits(
951
Ctx.getTypeInfoDataSizeInChars(LastField->getType()).Width);
952
uint64_t MemcpySizeBits = LastFieldOffset + LastFieldSize -
953
FirstByteOffset + Ctx.getCharWidth() - 1;
954
CharUnits MemcpySize = Ctx.toCharUnitsFromBits(MemcpySizeBits);
955
return MemcpySize;
956
}
957
958
void emitMemcpy() {
959
// Give the subclass a chance to bail out if it feels the memcpy isn't
960
// worth it (e.g. Hasn't aggregated enough data).
961
if (!FirstField) {
962
return;
963
}
964
965
uint64_t FirstByteOffset;
966
if (FirstField->isBitField()) {
967
const CGRecordLayout &RL =
968
CGF.getTypes().getCGRecordLayout(FirstField->getParent());
969
const CGBitFieldInfo &BFInfo = RL.getBitFieldInfo(FirstField);
970
// FirstFieldOffset is not appropriate for bitfields,
971
// we need to use the storage offset instead.
972
FirstByteOffset = CGF.getContext().toBits(BFInfo.StorageOffset);
973
} else {
974
FirstByteOffset = FirstFieldOffset;
975
}
976
977
CharUnits MemcpySize = getMemcpySize(FirstByteOffset);
978
QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl);
979
Address ThisPtr = CGF.LoadCXXThisAddress();
980
LValue DestLV = CGF.MakeAddrLValue(ThisPtr, RecordTy);
981
LValue Dest = CGF.EmitLValueForFieldInitialization(DestLV, FirstField);
982
llvm::Value *SrcPtr = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(SrcRec));
983
LValue SrcLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy);
984
LValue Src = CGF.EmitLValueForFieldInitialization(SrcLV, FirstField);
985
986
emitMemcpyIR(
987
Dest.isBitField() ? Dest.getBitFieldAddress() : Dest.getAddress(),
988
Src.isBitField() ? Src.getBitFieldAddress() : Src.getAddress(),
989
MemcpySize);
990
reset();
991
}
992
993
void reset() {
994
FirstField = nullptr;
995
}
996
997
protected:
998
CodeGenFunction &CGF;
999
const CXXRecordDecl *ClassDecl;
1000
1001
private:
1002
void emitMemcpyIR(Address DestPtr, Address SrcPtr, CharUnits Size) {
1003
DestPtr = DestPtr.withElementType(CGF.Int8Ty);
1004
SrcPtr = SrcPtr.withElementType(CGF.Int8Ty);
1005
CGF.Builder.CreateMemCpy(DestPtr, SrcPtr, Size.getQuantity());
1006
}
1007
1008
void addInitialField(FieldDecl *F) {
1009
FirstField = F;
1010
LastField = F;
1011
FirstFieldOffset = RecLayout.getFieldOffset(F->getFieldIndex());
1012
LastFieldOffset = FirstFieldOffset;
1013
LastAddedFieldIndex = F->getFieldIndex();
1014
}
1015
1016
void addNextField(FieldDecl *F) {
1017
// For the most part, the following invariant will hold:
1018
// F->getFieldIndex() == LastAddedFieldIndex + 1
1019
// The one exception is that Sema won't add a copy-initializer for an
1020
// unnamed bitfield, which will show up here as a gap in the sequence.
1021
assert(F->getFieldIndex() >= LastAddedFieldIndex + 1 &&
1022
"Cannot aggregate fields out of order.");
1023
LastAddedFieldIndex = F->getFieldIndex();
1024
1025
// The 'first' and 'last' fields are chosen by offset, rather than field
1026
// index. This allows the code to support bitfields, as well as regular
1027
// fields.
1028
uint64_t FOffset = RecLayout.getFieldOffset(F->getFieldIndex());
1029
if (FOffset < FirstFieldOffset) {
1030
FirstField = F;
1031
FirstFieldOffset = FOffset;
1032
} else if (FOffset >= LastFieldOffset) {
1033
LastField = F;
1034
LastFieldOffset = FOffset;
1035
}
1036
}
1037
1038
const VarDecl *SrcRec;
1039
const ASTRecordLayout &RecLayout;
1040
FieldDecl *FirstField;
1041
FieldDecl *LastField;
1042
uint64_t FirstFieldOffset, LastFieldOffset;
1043
unsigned LastAddedFieldIndex;
1044
};
1045
1046
class ConstructorMemcpyizer : public FieldMemcpyizer {
1047
private:
1048
/// Get source argument for copy constructor. Returns null if not a copy
1049
/// constructor.
1050
static const VarDecl *getTrivialCopySource(CodeGenFunction &CGF,
1051
const CXXConstructorDecl *CD,
1052
FunctionArgList &Args) {
1053
if (CD->isCopyOrMoveConstructor() && CD->isDefaulted())
1054
return Args[CGF.CGM.getCXXABI().getSrcArgforCopyCtor(CD, Args)];
1055
return nullptr;
1056
}
1057
1058
// Returns true if a CXXCtorInitializer represents a member initialization
1059
// that can be rolled into a memcpy.
1060
bool isMemberInitMemcpyable(CXXCtorInitializer *MemberInit) const {
1061
if (!MemcpyableCtor)
1062
return false;
1063
FieldDecl *Field = MemberInit->getMember();
1064
assert(Field && "No field for member init.");
1065
QualType FieldType = Field->getType();
1066
CXXConstructExpr *CE = dyn_cast<CXXConstructExpr>(MemberInit->getInit());
1067
1068
// Bail out on non-memcpyable, not-trivially-copyable members.
1069
if (!(CE && isMemcpyEquivalentSpecialMember(CE->getConstructor())) &&
1070
!(FieldType.isTriviallyCopyableType(CGF.getContext()) ||
1071
FieldType->isReferenceType()))
1072
return false;
1073
1074
// Bail out on volatile fields.
1075
if (!isMemcpyableField(Field))
1076
return false;
1077
1078
// Otherwise we're good.
1079
return true;
1080
}
1081
1082
public:
1083
ConstructorMemcpyizer(CodeGenFunction &CGF, const CXXConstructorDecl *CD,
1084
FunctionArgList &Args)
1085
: FieldMemcpyizer(CGF, CD->getParent(), getTrivialCopySource(CGF, CD, Args)),
1086
ConstructorDecl(CD),
1087
MemcpyableCtor(CD->isDefaulted() &&
1088
CD->isCopyOrMoveConstructor() &&
1089
CGF.getLangOpts().getGC() == LangOptions::NonGC),
1090
Args(Args) { }
1091
1092
void addMemberInitializer(CXXCtorInitializer *MemberInit) {
1093
if (isMemberInitMemcpyable(MemberInit)) {
1094
AggregatedInits.push_back(MemberInit);
1095
addMemcpyableField(MemberInit->getMember());
1096
} else {
1097
emitAggregatedInits();
1098
EmitMemberInitializer(CGF, ConstructorDecl->getParent(), MemberInit,
1099
ConstructorDecl, Args);
1100
}
1101
}
1102
1103
void emitAggregatedInits() {
1104
if (AggregatedInits.size() <= 1) {
1105
// This memcpy is too small to be worthwhile. Fall back on default
1106
// codegen.
1107
if (!AggregatedInits.empty()) {
1108
CopyingValueRepresentation CVR(CGF);
1109
EmitMemberInitializer(CGF, ConstructorDecl->getParent(),
1110
AggregatedInits[0], ConstructorDecl, Args);
1111
AggregatedInits.clear();
1112
}
1113
reset();
1114
return;
1115
}
1116
1117
pushEHDestructors();
1118
emitMemcpy();
1119
AggregatedInits.clear();
1120
}
1121
1122
void pushEHDestructors() {
1123
Address ThisPtr = CGF.LoadCXXThisAddress();
1124
QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl);
1125
LValue LHS = CGF.MakeAddrLValue(ThisPtr, RecordTy);
1126
1127
for (unsigned i = 0; i < AggregatedInits.size(); ++i) {
1128
CXXCtorInitializer *MemberInit = AggregatedInits[i];
1129
QualType FieldType = MemberInit->getAnyMember()->getType();
1130
QualType::DestructionKind dtorKind = FieldType.isDestructedType();
1131
if (!CGF.needsEHCleanup(dtorKind))
1132
continue;
1133
LValue FieldLHS = LHS;
1134
EmitLValueForAnyFieldInitialization(CGF, MemberInit, FieldLHS);
1135
CGF.pushEHDestroy(dtorKind, FieldLHS.getAddress(), FieldType);
1136
}
1137
}
1138
1139
void finish() {
1140
emitAggregatedInits();
1141
}
1142
1143
private:
1144
const CXXConstructorDecl *ConstructorDecl;
1145
bool MemcpyableCtor;
1146
FunctionArgList &Args;
1147
SmallVector<CXXCtorInitializer*, 16> AggregatedInits;
1148
};
1149
1150
class AssignmentMemcpyizer : public FieldMemcpyizer {
1151
private:
1152
// Returns the memcpyable field copied by the given statement, if one
1153
// exists. Otherwise returns null.
1154
FieldDecl *getMemcpyableField(Stmt *S) {
1155
if (!AssignmentsMemcpyable)
1156
return nullptr;
1157
if (BinaryOperator *BO = dyn_cast<BinaryOperator>(S)) {
1158
// Recognise trivial assignments.
1159
if (BO->getOpcode() != BO_Assign)
1160
return nullptr;
1161
MemberExpr *ME = dyn_cast<MemberExpr>(BO->getLHS());
1162
if (!ME)
1163
return nullptr;
1164
FieldDecl *Field = dyn_cast<FieldDecl>(ME->getMemberDecl());
1165
if (!Field || !isMemcpyableField(Field))
1166
return nullptr;
1167
Stmt *RHS = BO->getRHS();
1168
if (ImplicitCastExpr *EC = dyn_cast<ImplicitCastExpr>(RHS))
1169
RHS = EC->getSubExpr();
1170
if (!RHS)
1171
return nullptr;
1172
if (MemberExpr *ME2 = dyn_cast<MemberExpr>(RHS)) {
1173
if (ME2->getMemberDecl() == Field)
1174
return Field;
1175
}
1176
return nullptr;
1177
} else if (CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(S)) {
1178
CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(MCE->getCalleeDecl());
1179
if (!(MD && isMemcpyEquivalentSpecialMember(MD)))
1180
return nullptr;
1181
MemberExpr *IOA = dyn_cast<MemberExpr>(MCE->getImplicitObjectArgument());
1182
if (!IOA)
1183
return nullptr;
1184
FieldDecl *Field = dyn_cast<FieldDecl>(IOA->getMemberDecl());
1185
if (!Field || !isMemcpyableField(Field))
1186
return nullptr;
1187
MemberExpr *Arg0 = dyn_cast<MemberExpr>(MCE->getArg(0));
1188
if (!Arg0 || Field != dyn_cast<FieldDecl>(Arg0->getMemberDecl()))
1189
return nullptr;
1190
return Field;
1191
} else if (CallExpr *CE = dyn_cast<CallExpr>(S)) {
1192
FunctionDecl *FD = dyn_cast<FunctionDecl>(CE->getCalleeDecl());
1193
if (!FD || FD->getBuiltinID() != Builtin::BI__builtin_memcpy)
1194
return nullptr;
1195
Expr *DstPtr = CE->getArg(0);
1196
if (ImplicitCastExpr *DC = dyn_cast<ImplicitCastExpr>(DstPtr))
1197
DstPtr = DC->getSubExpr();
1198
UnaryOperator *DUO = dyn_cast<UnaryOperator>(DstPtr);
1199
if (!DUO || DUO->getOpcode() != UO_AddrOf)
1200
return nullptr;
1201
MemberExpr *ME = dyn_cast<MemberExpr>(DUO->getSubExpr());
1202
if (!ME)
1203
return nullptr;
1204
FieldDecl *Field = dyn_cast<FieldDecl>(ME->getMemberDecl());
1205
if (!Field || !isMemcpyableField(Field))
1206
return nullptr;
1207
Expr *SrcPtr = CE->getArg(1);
1208
if (ImplicitCastExpr *SC = dyn_cast<ImplicitCastExpr>(SrcPtr))
1209
SrcPtr = SC->getSubExpr();
1210
UnaryOperator *SUO = dyn_cast<UnaryOperator>(SrcPtr);
1211
if (!SUO || SUO->getOpcode() != UO_AddrOf)
1212
return nullptr;
1213
MemberExpr *ME2 = dyn_cast<MemberExpr>(SUO->getSubExpr());
1214
if (!ME2 || Field != dyn_cast<FieldDecl>(ME2->getMemberDecl()))
1215
return nullptr;
1216
return Field;
1217
}
1218
1219
return nullptr;
1220
}
1221
1222
bool AssignmentsMemcpyable;
1223
SmallVector<Stmt*, 16> AggregatedStmts;
1224
1225
public:
1226
AssignmentMemcpyizer(CodeGenFunction &CGF, const CXXMethodDecl *AD,
1227
FunctionArgList &Args)
1228
: FieldMemcpyizer(CGF, AD->getParent(), Args[Args.size() - 1]),
1229
AssignmentsMemcpyable(CGF.getLangOpts().getGC() == LangOptions::NonGC) {
1230
assert(Args.size() == 2);
1231
}
1232
1233
void emitAssignment(Stmt *S) {
1234
FieldDecl *F = getMemcpyableField(S);
1235
if (F) {
1236
addMemcpyableField(F);
1237
AggregatedStmts.push_back(S);
1238
} else {
1239
emitAggregatedStmts();
1240
CGF.EmitStmt(S);
1241
}
1242
}
1243
1244
void emitAggregatedStmts() {
1245
if (AggregatedStmts.size() <= 1) {
1246
if (!AggregatedStmts.empty()) {
1247
CopyingValueRepresentation CVR(CGF);
1248
CGF.EmitStmt(AggregatedStmts[0]);
1249
}
1250
reset();
1251
}
1252
1253
emitMemcpy();
1254
AggregatedStmts.clear();
1255
}
1256
1257
void finish() {
1258
emitAggregatedStmts();
1259
}
1260
};
1261
} // end anonymous namespace
1262
1263
static bool isInitializerOfDynamicClass(const CXXCtorInitializer *BaseInit) {
1264
const Type *BaseType = BaseInit->getBaseClass();
1265
const auto *BaseClassDecl =
1266
cast<CXXRecordDecl>(BaseType->castAs<RecordType>()->getDecl());
1267
return BaseClassDecl->isDynamicClass();
1268
}
1269
1270
/// EmitCtorPrologue - This routine generates necessary code to initialize
1271
/// base classes and non-static data members belonging to this constructor.
1272
void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD,
1273
CXXCtorType CtorType,
1274
FunctionArgList &Args) {
1275
if (CD->isDelegatingConstructor())
1276
return EmitDelegatingCXXConstructorCall(CD, Args);
1277
1278
const CXXRecordDecl *ClassDecl = CD->getParent();
1279
1280
CXXConstructorDecl::init_const_iterator B = CD->init_begin(),
1281
E = CD->init_end();
1282
1283
// Virtual base initializers first, if any. They aren't needed if:
1284
// - This is a base ctor variant
1285
// - There are no vbases
1286
// - The class is abstract, so a complete object of it cannot be constructed
1287
//
1288
// The check for an abstract class is necessary because sema may not have
1289
// marked virtual base destructors referenced.
1290
bool ConstructVBases = CtorType != Ctor_Base &&
1291
ClassDecl->getNumVBases() != 0 &&
1292
!ClassDecl->isAbstract();
1293
1294
// In the Microsoft C++ ABI, there are no constructor variants. Instead, the
1295
// constructor of a class with virtual bases takes an additional parameter to
1296
// conditionally construct the virtual bases. Emit that check here.
1297
llvm::BasicBlock *BaseCtorContinueBB = nullptr;
1298
if (ConstructVBases &&
1299
!CGM.getTarget().getCXXABI().hasConstructorVariants()) {
1300
BaseCtorContinueBB =
1301
CGM.getCXXABI().EmitCtorCompleteObjectHandler(*this, ClassDecl);
1302
assert(BaseCtorContinueBB);
1303
}
1304
1305
for (; B != E && (*B)->isBaseInitializer() && (*B)->isBaseVirtual(); B++) {
1306
if (!ConstructVBases)
1307
continue;
1308
SaveAndRestore ThisRAII(CXXThisValue);
1309
if (CGM.getCodeGenOpts().StrictVTablePointers &&
1310
CGM.getCodeGenOpts().OptimizationLevel > 0 &&
1311
isInitializerOfDynamicClass(*B))
1312
CXXThisValue = Builder.CreateLaunderInvariantGroup(LoadCXXThis());
1313
EmitBaseInitializer(*this, ClassDecl, *B);
1314
}
1315
1316
if (BaseCtorContinueBB) {
1317
// Complete object handler should continue to the remaining initializers.
1318
Builder.CreateBr(BaseCtorContinueBB);
1319
EmitBlock(BaseCtorContinueBB);
1320
}
1321
1322
// Then, non-virtual base initializers.
1323
for (; B != E && (*B)->isBaseInitializer(); B++) {
1324
assert(!(*B)->isBaseVirtual());
1325
SaveAndRestore ThisRAII(CXXThisValue);
1326
if (CGM.getCodeGenOpts().StrictVTablePointers &&
1327
CGM.getCodeGenOpts().OptimizationLevel > 0 &&
1328
isInitializerOfDynamicClass(*B))
1329
CXXThisValue = Builder.CreateLaunderInvariantGroup(LoadCXXThis());
1330
EmitBaseInitializer(*this, ClassDecl, *B);
1331
}
1332
1333
InitializeVTablePointers(ClassDecl);
1334
1335
// And finally, initialize class members.
1336
FieldConstructionScope FCS(*this, LoadCXXThisAddress());
1337
ConstructorMemcpyizer CM(*this, CD, Args);
1338
for (; B != E; B++) {
1339
CXXCtorInitializer *Member = (*B);
1340
assert(!Member->isBaseInitializer());
1341
assert(Member->isAnyMemberInitializer() &&
1342
"Delegating initializer on non-delegating constructor");
1343
CM.addMemberInitializer(Member);
1344
}
1345
CM.finish();
1346
}
1347
1348
static bool
1349
FieldHasTrivialDestructorBody(ASTContext &Context, const FieldDecl *Field);
1350
1351
static bool
1352
HasTrivialDestructorBody(ASTContext &Context,
1353
const CXXRecordDecl *BaseClassDecl,
1354
const CXXRecordDecl *MostDerivedClassDecl)
1355
{
1356
// If the destructor is trivial we don't have to check anything else.
1357
if (BaseClassDecl->hasTrivialDestructor())
1358
return true;
1359
1360
if (!BaseClassDecl->getDestructor()->hasTrivialBody())
1361
return false;
1362
1363
// Check fields.
1364
for (const auto *Field : BaseClassDecl->fields())
1365
if (!FieldHasTrivialDestructorBody(Context, Field))
1366
return false;
1367
1368
// Check non-virtual bases.
1369
for (const auto &I : BaseClassDecl->bases()) {
1370
if (I.isVirtual())
1371
continue;
1372
1373
const CXXRecordDecl *NonVirtualBase =
1374
cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
1375
if (!HasTrivialDestructorBody(Context, NonVirtualBase,
1376
MostDerivedClassDecl))
1377
return false;
1378
}
1379
1380
if (BaseClassDecl == MostDerivedClassDecl) {
1381
// Check virtual bases.
1382
for (const auto &I : BaseClassDecl->vbases()) {
1383
const CXXRecordDecl *VirtualBase =
1384
cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
1385
if (!HasTrivialDestructorBody(Context, VirtualBase,
1386
MostDerivedClassDecl))
1387
return false;
1388
}
1389
}
1390
1391
return true;
1392
}
1393
1394
static bool
1395
FieldHasTrivialDestructorBody(ASTContext &Context,
1396
const FieldDecl *Field)
1397
{
1398
QualType FieldBaseElementType = Context.getBaseElementType(Field->getType());
1399
1400
const RecordType *RT = FieldBaseElementType->getAs<RecordType>();
1401
if (!RT)
1402
return true;
1403
1404
CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl());
1405
1406
// The destructor for an implicit anonymous union member is never invoked.
1407
if (FieldClassDecl->isUnion() && FieldClassDecl->isAnonymousStructOrUnion())
1408
return true;
1409
1410
return HasTrivialDestructorBody(Context, FieldClassDecl, FieldClassDecl);
1411
}
1412
1413
/// CanSkipVTablePointerInitialization - Check whether we need to initialize
1414
/// any vtable pointers before calling this destructor.
1415
static bool CanSkipVTablePointerInitialization(CodeGenFunction &CGF,
1416
const CXXDestructorDecl *Dtor) {
1417
const CXXRecordDecl *ClassDecl = Dtor->getParent();
1418
if (!ClassDecl->isDynamicClass())
1419
return true;
1420
1421
// For a final class, the vtable pointer is known to already point to the
1422
// class's vtable.
1423
if (ClassDecl->isEffectivelyFinal())
1424
return true;
1425
1426
if (!Dtor->hasTrivialBody())
1427
return false;
1428
1429
// Check the fields.
1430
for (const auto *Field : ClassDecl->fields())
1431
if (!FieldHasTrivialDestructorBody(CGF.getContext(), Field))
1432
return false;
1433
1434
return true;
1435
}
1436
1437
/// EmitDestructorBody - Emits the body of the current destructor.
1438
void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) {
1439
const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl());
1440
CXXDtorType DtorType = CurGD.getDtorType();
1441
1442
// For an abstract class, non-base destructors are never used (and can't
1443
// be emitted in general, because vbase dtors may not have been validated
1444
// by Sema), but the Itanium ABI doesn't make them optional and Clang may
1445
// in fact emit references to them from other compilations, so emit them
1446
// as functions containing a trap instruction.
1447
if (DtorType != Dtor_Base && Dtor->getParent()->isAbstract()) {
1448
llvm::CallInst *TrapCall = EmitTrapCall(llvm::Intrinsic::trap);
1449
TrapCall->setDoesNotReturn();
1450
TrapCall->setDoesNotThrow();
1451
Builder.CreateUnreachable();
1452
Builder.ClearInsertionPoint();
1453
return;
1454
}
1455
1456
Stmt *Body = Dtor->getBody();
1457
if (Body) {
1458
incrementProfileCounter(Body);
1459
maybeCreateMCDCCondBitmap();
1460
}
1461
1462
// The call to operator delete in a deleting destructor happens
1463
// outside of the function-try-block, which means it's always
1464
// possible to delegate the destructor body to the complete
1465
// destructor. Do so.
1466
if (DtorType == Dtor_Deleting) {
1467
RunCleanupsScope DtorEpilogue(*this);
1468
EnterDtorCleanups(Dtor, Dtor_Deleting);
1469
if (HaveInsertPoint()) {
1470
QualType ThisTy = Dtor->getFunctionObjectParameterType();
1471
EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false,
1472
/*Delegating=*/false, LoadCXXThisAddress(), ThisTy);
1473
}
1474
return;
1475
}
1476
1477
// If the body is a function-try-block, enter the try before
1478
// anything else.
1479
bool isTryBody = isa_and_nonnull<CXXTryStmt>(Body);
1480
if (isTryBody)
1481
EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);
1482
EmitAsanPrologueOrEpilogue(false);
1483
1484
// Enter the epilogue cleanups.
1485
RunCleanupsScope DtorEpilogue(*this);
1486
1487
// If this is the complete variant, just invoke the base variant;
1488
// the epilogue will destruct the virtual bases. But we can't do
1489
// this optimization if the body is a function-try-block, because
1490
// we'd introduce *two* handler blocks. In the Microsoft ABI, we
1491
// always delegate because we might not have a definition in this TU.
1492
switch (DtorType) {
1493
case Dtor_Comdat: llvm_unreachable("not expecting a COMDAT");
1494
case Dtor_Deleting: llvm_unreachable("already handled deleting case");
1495
1496
case Dtor_Complete:
1497
assert((Body || getTarget().getCXXABI().isMicrosoft()) &&
1498
"can't emit a dtor without a body for non-Microsoft ABIs");
1499
1500
// Enter the cleanup scopes for virtual bases.
1501
EnterDtorCleanups(Dtor, Dtor_Complete);
1502
1503
if (!isTryBody) {
1504
QualType ThisTy = Dtor->getFunctionObjectParameterType();
1505
EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false,
1506
/*Delegating=*/false, LoadCXXThisAddress(), ThisTy);
1507
break;
1508
}
1509
1510
// Fallthrough: act like we're in the base variant.
1511
[[fallthrough]];
1512
1513
case Dtor_Base:
1514
assert(Body);
1515
1516
// Enter the cleanup scopes for fields and non-virtual bases.
1517
EnterDtorCleanups(Dtor, Dtor_Base);
1518
1519
// Initialize the vtable pointers before entering the body.
1520
if (!CanSkipVTablePointerInitialization(*this, Dtor)) {
1521
// Insert the llvm.launder.invariant.group intrinsic before initializing
1522
// the vptrs to cancel any previous assumptions we might have made.
1523
if (CGM.getCodeGenOpts().StrictVTablePointers &&
1524
CGM.getCodeGenOpts().OptimizationLevel > 0)
1525
CXXThisValue = Builder.CreateLaunderInvariantGroup(LoadCXXThis());
1526
InitializeVTablePointers(Dtor->getParent());
1527
}
1528
1529
if (isTryBody)
1530
EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());
1531
else if (Body)
1532
EmitStmt(Body);
1533
else {
1534
assert(Dtor->isImplicit() && "bodyless dtor not implicit");
1535
// nothing to do besides what's in the epilogue
1536
}
1537
// -fapple-kext must inline any call to this dtor into
1538
// the caller's body.
1539
if (getLangOpts().AppleKext)
1540
CurFn->addFnAttr(llvm::Attribute::AlwaysInline);
1541
1542
break;
1543
}
1544
1545
// Jump out through the epilogue cleanups.
1546
DtorEpilogue.ForceCleanup();
1547
1548
// Exit the try if applicable.
1549
if (isTryBody)
1550
ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);
1551
}
1552
1553
void CodeGenFunction::emitImplicitAssignmentOperatorBody(FunctionArgList &Args) {
1554
const CXXMethodDecl *AssignOp = cast<CXXMethodDecl>(CurGD.getDecl());
1555
const Stmt *RootS = AssignOp->getBody();
1556
assert(isa<CompoundStmt>(RootS) &&
1557
"Body of an implicit assignment operator should be compound stmt.");
1558
const CompoundStmt *RootCS = cast<CompoundStmt>(RootS);
1559
1560
LexicalScope Scope(*this, RootCS->getSourceRange());
1561
1562
incrementProfileCounter(RootCS);
1563
maybeCreateMCDCCondBitmap();
1564
AssignmentMemcpyizer AM(*this, AssignOp, Args);
1565
for (auto *I : RootCS->body())
1566
AM.emitAssignment(I);
1567
AM.finish();
1568
}
1569
1570
namespace {
1571
llvm::Value *LoadThisForDtorDelete(CodeGenFunction &CGF,
1572
const CXXDestructorDecl *DD) {
1573
if (Expr *ThisArg = DD->getOperatorDeleteThisArg())
1574
return CGF.EmitScalarExpr(ThisArg);
1575
return CGF.LoadCXXThis();
1576
}
1577
1578
/// Call the operator delete associated with the current destructor.
1579
struct CallDtorDelete final : EHScopeStack::Cleanup {
1580
CallDtorDelete() {}
1581
1582
void Emit(CodeGenFunction &CGF, Flags flags) override {
1583
const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl);
1584
const CXXRecordDecl *ClassDecl = Dtor->getParent();
1585
CGF.EmitDeleteCall(Dtor->getOperatorDelete(),
1586
LoadThisForDtorDelete(CGF, Dtor),
1587
CGF.getContext().getTagDeclType(ClassDecl));
1588
}
1589
};
1590
1591
void EmitConditionalDtorDeleteCall(CodeGenFunction &CGF,
1592
llvm::Value *ShouldDeleteCondition,
1593
bool ReturnAfterDelete) {
1594
llvm::BasicBlock *callDeleteBB = CGF.createBasicBlock("dtor.call_delete");
1595
llvm::BasicBlock *continueBB = CGF.createBasicBlock("dtor.continue");
1596
llvm::Value *ShouldCallDelete
1597
= CGF.Builder.CreateIsNull(ShouldDeleteCondition);
1598
CGF.Builder.CreateCondBr(ShouldCallDelete, continueBB, callDeleteBB);
1599
1600
CGF.EmitBlock(callDeleteBB);
1601
const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl);
1602
const CXXRecordDecl *ClassDecl = Dtor->getParent();
1603
CGF.EmitDeleteCall(Dtor->getOperatorDelete(),
1604
LoadThisForDtorDelete(CGF, Dtor),
1605
CGF.getContext().getTagDeclType(ClassDecl));
1606
assert(Dtor->getOperatorDelete()->isDestroyingOperatorDelete() ==
1607
ReturnAfterDelete &&
1608
"unexpected value for ReturnAfterDelete");
1609
if (ReturnAfterDelete)
1610
CGF.EmitBranchThroughCleanup(CGF.ReturnBlock);
1611
else
1612
CGF.Builder.CreateBr(continueBB);
1613
1614
CGF.EmitBlock(continueBB);
1615
}
1616
1617
struct CallDtorDeleteConditional final : EHScopeStack::Cleanup {
1618
llvm::Value *ShouldDeleteCondition;
1619
1620
public:
1621
CallDtorDeleteConditional(llvm::Value *ShouldDeleteCondition)
1622
: ShouldDeleteCondition(ShouldDeleteCondition) {
1623
assert(ShouldDeleteCondition != nullptr);
1624
}
1625
1626
void Emit(CodeGenFunction &CGF, Flags flags) override {
1627
EmitConditionalDtorDeleteCall(CGF, ShouldDeleteCondition,
1628
/*ReturnAfterDelete*/false);
1629
}
1630
};
1631
1632
class DestroyField final : public EHScopeStack::Cleanup {
1633
const FieldDecl *field;
1634
CodeGenFunction::Destroyer *destroyer;
1635
bool useEHCleanupForArray;
1636
1637
public:
1638
DestroyField(const FieldDecl *field, CodeGenFunction::Destroyer *destroyer,
1639
bool useEHCleanupForArray)
1640
: field(field), destroyer(destroyer),
1641
useEHCleanupForArray(useEHCleanupForArray) {}
1642
1643
void Emit(CodeGenFunction &CGF, Flags flags) override {
1644
// Find the address of the field.
1645
Address thisValue = CGF.LoadCXXThisAddress();
1646
QualType RecordTy = CGF.getContext().getTagDeclType(field->getParent());
1647
LValue ThisLV = CGF.MakeAddrLValue(thisValue, RecordTy);
1648
LValue LV = CGF.EmitLValueForField(ThisLV, field);
1649
assert(LV.isSimple());
1650
1651
CGF.emitDestroy(LV.getAddress(), field->getType(), destroyer,
1652
flags.isForNormalCleanup() && useEHCleanupForArray);
1653
}
1654
};
1655
1656
class DeclAsInlineDebugLocation {
1657
CGDebugInfo *DI;
1658
llvm::MDNode *InlinedAt;
1659
std::optional<ApplyDebugLocation> Location;
1660
1661
public:
1662
DeclAsInlineDebugLocation(CodeGenFunction &CGF, const NamedDecl &Decl)
1663
: DI(CGF.getDebugInfo()) {
1664
if (!DI)
1665
return;
1666
InlinedAt = DI->getInlinedAt();
1667
DI->setInlinedAt(CGF.Builder.getCurrentDebugLocation());
1668
Location.emplace(CGF, Decl.getLocation());
1669
}
1670
1671
~DeclAsInlineDebugLocation() {
1672
if (!DI)
1673
return;
1674
Location.reset();
1675
DI->setInlinedAt(InlinedAt);
1676
}
1677
};
1678
1679
static void EmitSanitizerDtorCallback(
1680
CodeGenFunction &CGF, StringRef Name, llvm::Value *Ptr,
1681
std::optional<CharUnits::QuantityType> PoisonSize = {}) {
1682
CodeGenFunction::SanitizerScope SanScope(&CGF);
1683
// Pass in void pointer and size of region as arguments to runtime
1684
// function
1685
SmallVector<llvm::Value *, 2> Args = {Ptr};
1686
SmallVector<llvm::Type *, 2> ArgTypes = {CGF.VoidPtrTy};
1687
1688
if (PoisonSize.has_value()) {
1689
Args.emplace_back(llvm::ConstantInt::get(CGF.SizeTy, *PoisonSize));
1690
ArgTypes.emplace_back(CGF.SizeTy);
1691
}
1692
1693
llvm::FunctionType *FnType =
1694
llvm::FunctionType::get(CGF.VoidTy, ArgTypes, false);
1695
llvm::FunctionCallee Fn = CGF.CGM.CreateRuntimeFunction(FnType, Name);
1696
1697
CGF.EmitNounwindRuntimeCall(Fn, Args);
1698
}
1699
1700
static void
1701
EmitSanitizerDtorFieldsCallback(CodeGenFunction &CGF, llvm::Value *Ptr,
1702
CharUnits::QuantityType PoisonSize) {
1703
EmitSanitizerDtorCallback(CGF, "__sanitizer_dtor_callback_fields", Ptr,
1704
PoisonSize);
1705
}
1706
1707
/// Poison base class with a trivial destructor.
1708
struct SanitizeDtorTrivialBase final : EHScopeStack::Cleanup {
1709
const CXXRecordDecl *BaseClass;
1710
bool BaseIsVirtual;
1711
SanitizeDtorTrivialBase(const CXXRecordDecl *Base, bool BaseIsVirtual)
1712
: BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {}
1713
1714
void Emit(CodeGenFunction &CGF, Flags flags) override {
1715
const CXXRecordDecl *DerivedClass =
1716
cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent();
1717
1718
Address Addr = CGF.GetAddressOfDirectBaseInCompleteClass(
1719
CGF.LoadCXXThisAddress(), DerivedClass, BaseClass, BaseIsVirtual);
1720
1721
const ASTRecordLayout &BaseLayout =
1722
CGF.getContext().getASTRecordLayout(BaseClass);
1723
CharUnits BaseSize = BaseLayout.getSize();
1724
1725
if (!BaseSize.isPositive())
1726
return;
1727
1728
// Use the base class declaration location as inline DebugLocation. All
1729
// fields of the class are destroyed.
1730
DeclAsInlineDebugLocation InlineHere(CGF, *BaseClass);
1731
EmitSanitizerDtorFieldsCallback(CGF, Addr.emitRawPointer(CGF),
1732
BaseSize.getQuantity());
1733
1734
// Prevent the current stack frame from disappearing from the stack trace.
1735
CGF.CurFn->addFnAttr("disable-tail-calls", "true");
1736
}
1737
};
1738
1739
class SanitizeDtorFieldRange final : public EHScopeStack::Cleanup {
1740
const CXXDestructorDecl *Dtor;
1741
unsigned StartIndex;
1742
unsigned EndIndex;
1743
1744
public:
1745
SanitizeDtorFieldRange(const CXXDestructorDecl *Dtor, unsigned StartIndex,
1746
unsigned EndIndex)
1747
: Dtor(Dtor), StartIndex(StartIndex), EndIndex(EndIndex) {}
1748
1749
// Generate function call for handling object poisoning.
1750
// Disables tail call elimination, to prevent the current stack frame
1751
// from disappearing from the stack trace.
1752
void Emit(CodeGenFunction &CGF, Flags flags) override {
1753
const ASTContext &Context = CGF.getContext();
1754
const ASTRecordLayout &Layout =
1755
Context.getASTRecordLayout(Dtor->getParent());
1756
1757
// It's a first trivial field so it should be at the begining of a char,
1758
// still round up start offset just in case.
1759
CharUnits PoisonStart = Context.toCharUnitsFromBits(
1760
Layout.getFieldOffset(StartIndex) + Context.getCharWidth() - 1);
1761
llvm::ConstantInt *OffsetSizePtr =
1762
llvm::ConstantInt::get(CGF.SizeTy, PoisonStart.getQuantity());
1763
1764
llvm::Value *OffsetPtr =
1765
CGF.Builder.CreateGEP(CGF.Int8Ty, CGF.LoadCXXThis(), OffsetSizePtr);
1766
1767
CharUnits PoisonEnd;
1768
if (EndIndex >= Layout.getFieldCount()) {
1769
PoisonEnd = Layout.getNonVirtualSize();
1770
} else {
1771
PoisonEnd =
1772
Context.toCharUnitsFromBits(Layout.getFieldOffset(EndIndex));
1773
}
1774
CharUnits PoisonSize = PoisonEnd - PoisonStart;
1775
if (!PoisonSize.isPositive())
1776
return;
1777
1778
// Use the top field declaration location as inline DebugLocation.
1779
DeclAsInlineDebugLocation InlineHere(
1780
CGF, **std::next(Dtor->getParent()->field_begin(), StartIndex));
1781
EmitSanitizerDtorFieldsCallback(CGF, OffsetPtr, PoisonSize.getQuantity());
1782
1783
// Prevent the current stack frame from disappearing from the stack trace.
1784
CGF.CurFn->addFnAttr("disable-tail-calls", "true");
1785
}
1786
};
1787
1788
class SanitizeDtorVTable final : public EHScopeStack::Cleanup {
1789
const CXXDestructorDecl *Dtor;
1790
1791
public:
1792
SanitizeDtorVTable(const CXXDestructorDecl *Dtor) : Dtor(Dtor) {}
1793
1794
// Generate function call for handling vtable pointer poisoning.
1795
void Emit(CodeGenFunction &CGF, Flags flags) override {
1796
assert(Dtor->getParent()->isDynamicClass());
1797
(void)Dtor;
1798
// Poison vtable and vtable ptr if they exist for this class.
1799
llvm::Value *VTablePtr = CGF.LoadCXXThis();
1800
1801
// Pass in void pointer and size of region as arguments to runtime
1802
// function
1803
EmitSanitizerDtorCallback(CGF, "__sanitizer_dtor_callback_vptr",
1804
VTablePtr);
1805
}
1806
};
1807
1808
class SanitizeDtorCleanupBuilder {
1809
ASTContext &Context;
1810
EHScopeStack &EHStack;
1811
const CXXDestructorDecl *DD;
1812
std::optional<unsigned> StartIndex;
1813
1814
public:
1815
SanitizeDtorCleanupBuilder(ASTContext &Context, EHScopeStack &EHStack,
1816
const CXXDestructorDecl *DD)
1817
: Context(Context), EHStack(EHStack), DD(DD), StartIndex(std::nullopt) {}
1818
void PushCleanupForField(const FieldDecl *Field) {
1819
if (isEmptyFieldForLayout(Context, Field))
1820
return;
1821
unsigned FieldIndex = Field->getFieldIndex();
1822
if (FieldHasTrivialDestructorBody(Context, Field)) {
1823
if (!StartIndex)
1824
StartIndex = FieldIndex;
1825
} else if (StartIndex) {
1826
EHStack.pushCleanup<SanitizeDtorFieldRange>(NormalAndEHCleanup, DD,
1827
*StartIndex, FieldIndex);
1828
StartIndex = std::nullopt;
1829
}
1830
}
1831
void End() {
1832
if (StartIndex)
1833
EHStack.pushCleanup<SanitizeDtorFieldRange>(NormalAndEHCleanup, DD,
1834
*StartIndex, -1);
1835
}
1836
};
1837
} // end anonymous namespace
1838
1839
/// Emit all code that comes at the end of class's
1840
/// destructor. This is to call destructors on members and base classes
1841
/// in reverse order of their construction.
1842
///
1843
/// For a deleting destructor, this also handles the case where a destroying
1844
/// operator delete completely overrides the definition.
1845
void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD,
1846
CXXDtorType DtorType) {
1847
assert((!DD->isTrivial() || DD->hasAttr<DLLExportAttr>()) &&
1848
"Should not emit dtor epilogue for non-exported trivial dtor!");
1849
1850
// The deleting-destructor phase just needs to call the appropriate
1851
// operator delete that Sema picked up.
1852
if (DtorType == Dtor_Deleting) {
1853
assert(DD->getOperatorDelete() &&
1854
"operator delete missing - EnterDtorCleanups");
1855
if (CXXStructorImplicitParamValue) {
1856
// If there is an implicit param to the deleting dtor, it's a boolean
1857
// telling whether this is a deleting destructor.
1858
if (DD->getOperatorDelete()->isDestroyingOperatorDelete())
1859
EmitConditionalDtorDeleteCall(*this, CXXStructorImplicitParamValue,
1860
/*ReturnAfterDelete*/true);
1861
else
1862
EHStack.pushCleanup<CallDtorDeleteConditional>(
1863
NormalAndEHCleanup, CXXStructorImplicitParamValue);
1864
} else {
1865
if (DD->getOperatorDelete()->isDestroyingOperatorDelete()) {
1866
const CXXRecordDecl *ClassDecl = DD->getParent();
1867
EmitDeleteCall(DD->getOperatorDelete(),
1868
LoadThisForDtorDelete(*this, DD),
1869
getContext().getTagDeclType(ClassDecl));
1870
EmitBranchThroughCleanup(ReturnBlock);
1871
} else {
1872
EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup);
1873
}
1874
}
1875
return;
1876
}
1877
1878
const CXXRecordDecl *ClassDecl = DD->getParent();
1879
1880
// Unions have no bases and do not call field destructors.
1881
if (ClassDecl->isUnion())
1882
return;
1883
1884
// The complete-destructor phase just destructs all the virtual bases.
1885
if (DtorType == Dtor_Complete) {
1886
// Poison the vtable pointer such that access after the base
1887
// and member destructors are invoked is invalid.
1888
if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor &&
1889
SanOpts.has(SanitizerKind::Memory) && ClassDecl->getNumVBases() &&
1890
ClassDecl->isPolymorphic())
1891
EHStack.pushCleanup<SanitizeDtorVTable>(NormalAndEHCleanup, DD);
1892
1893
// We push them in the forward order so that they'll be popped in
1894
// the reverse order.
1895
for (const auto &Base : ClassDecl->vbases()) {
1896
auto *BaseClassDecl =
1897
cast<CXXRecordDecl>(Base.getType()->castAs<RecordType>()->getDecl());
1898
1899
if (BaseClassDecl->hasTrivialDestructor()) {
1900
// Under SanitizeMemoryUseAfterDtor, poison the trivial base class
1901
// memory. For non-trival base classes the same is done in the class
1902
// destructor.
1903
if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor &&
1904
SanOpts.has(SanitizerKind::Memory) && !BaseClassDecl->isEmpty())
1905
EHStack.pushCleanup<SanitizeDtorTrivialBase>(NormalAndEHCleanup,
1906
BaseClassDecl,
1907
/*BaseIsVirtual*/ true);
1908
} else {
1909
EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, BaseClassDecl,
1910
/*BaseIsVirtual*/ true);
1911
}
1912
}
1913
1914
return;
1915
}
1916
1917
assert(DtorType == Dtor_Base);
1918
// Poison the vtable pointer if it has no virtual bases, but inherits
1919
// virtual functions.
1920
if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor &&
1921
SanOpts.has(SanitizerKind::Memory) && !ClassDecl->getNumVBases() &&
1922
ClassDecl->isPolymorphic())
1923
EHStack.pushCleanup<SanitizeDtorVTable>(NormalAndEHCleanup, DD);
1924
1925
// Destroy non-virtual bases.
1926
for (const auto &Base : ClassDecl->bases()) {
1927
// Ignore virtual bases.
1928
if (Base.isVirtual())
1929
continue;
1930
1931
CXXRecordDecl *BaseClassDecl = Base.getType()->getAsCXXRecordDecl();
1932
1933
if (BaseClassDecl->hasTrivialDestructor()) {
1934
if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor &&
1935
SanOpts.has(SanitizerKind::Memory) && !BaseClassDecl->isEmpty())
1936
EHStack.pushCleanup<SanitizeDtorTrivialBase>(NormalAndEHCleanup,
1937
BaseClassDecl,
1938
/*BaseIsVirtual*/ false);
1939
} else {
1940
EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, BaseClassDecl,
1941
/*BaseIsVirtual*/ false);
1942
}
1943
}
1944
1945
// Poison fields such that access after their destructors are
1946
// invoked, and before the base class destructor runs, is invalid.
1947
bool SanitizeFields = CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor &&
1948
SanOpts.has(SanitizerKind::Memory);
1949
SanitizeDtorCleanupBuilder SanitizeBuilder(getContext(), EHStack, DD);
1950
1951
// Destroy direct fields.
1952
for (const auto *Field : ClassDecl->fields()) {
1953
if (SanitizeFields)
1954
SanitizeBuilder.PushCleanupForField(Field);
1955
1956
QualType type = Field->getType();
1957
QualType::DestructionKind dtorKind = type.isDestructedType();
1958
if (!dtorKind)
1959
continue;
1960
1961
// Anonymous union members do not have their destructors called.
1962
const RecordType *RT = type->getAsUnionType();
1963
if (RT && RT->getDecl()->isAnonymousStructOrUnion())
1964
continue;
1965
1966
CleanupKind cleanupKind = getCleanupKind(dtorKind);
1967
EHStack.pushCleanup<DestroyField>(
1968
cleanupKind, Field, getDestroyer(dtorKind), cleanupKind & EHCleanup);
1969
}
1970
1971
if (SanitizeFields)
1972
SanitizeBuilder.End();
1973
}
1974
1975
/// EmitCXXAggrConstructorCall - Emit a loop to call a particular
1976
/// constructor for each of several members of an array.
1977
///
1978
/// \param ctor the constructor to call for each element
1979
/// \param arrayType the type of the array to initialize
1980
/// \param arrayBegin an arrayType*
1981
/// \param zeroInitialize true if each element should be
1982
/// zero-initialized before it is constructed
1983
void CodeGenFunction::EmitCXXAggrConstructorCall(
1984
const CXXConstructorDecl *ctor, const ArrayType *arrayType,
1985
Address arrayBegin, const CXXConstructExpr *E, bool NewPointerIsChecked,
1986
bool zeroInitialize) {
1987
QualType elementType;
1988
llvm::Value *numElements =
1989
emitArrayLength(arrayType, elementType, arrayBegin);
1990
1991
EmitCXXAggrConstructorCall(ctor, numElements, arrayBegin, E,
1992
NewPointerIsChecked, zeroInitialize);
1993
}
1994
1995
/// EmitCXXAggrConstructorCall - Emit a loop to call a particular
1996
/// constructor for each of several members of an array.
1997
///
1998
/// \param ctor the constructor to call for each element
1999
/// \param numElements the number of elements in the array;
2000
/// may be zero
2001
/// \param arrayBase a T*, where T is the type constructed by ctor
2002
/// \param zeroInitialize true if each element should be
2003
/// zero-initialized before it is constructed
2004
void CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor,
2005
llvm::Value *numElements,
2006
Address arrayBase,
2007
const CXXConstructExpr *E,
2008
bool NewPointerIsChecked,
2009
bool zeroInitialize) {
2010
// It's legal for numElements to be zero. This can happen both
2011
// dynamically, because x can be zero in 'new A[x]', and statically,
2012
// because of GCC extensions that permit zero-length arrays. There
2013
// are probably legitimate places where we could assume that this
2014
// doesn't happen, but it's not clear that it's worth it.
2015
llvm::BranchInst *zeroCheckBranch = nullptr;
2016
2017
// Optimize for a constant count.
2018
llvm::ConstantInt *constantCount
2019
= dyn_cast<llvm::ConstantInt>(numElements);
2020
if (constantCount) {
2021
// Just skip out if the constant count is zero.
2022
if (constantCount->isZero()) return;
2023
2024
// Otherwise, emit the check.
2025
} else {
2026
llvm::BasicBlock *loopBB = createBasicBlock("new.ctorloop");
2027
llvm::Value *iszero = Builder.CreateIsNull(numElements, "isempty");
2028
zeroCheckBranch = Builder.CreateCondBr(iszero, loopBB, loopBB);
2029
EmitBlock(loopBB);
2030
}
2031
2032
// Find the end of the array.
2033
llvm::Type *elementType = arrayBase.getElementType();
2034
llvm::Value *arrayBegin = arrayBase.emitRawPointer(*this);
2035
llvm::Value *arrayEnd = Builder.CreateInBoundsGEP(
2036
elementType, arrayBegin, numElements, "arrayctor.end");
2037
2038
// Enter the loop, setting up a phi for the current location to initialize.
2039
llvm::BasicBlock *entryBB = Builder.GetInsertBlock();
2040
llvm::BasicBlock *loopBB = createBasicBlock("arrayctor.loop");
2041
EmitBlock(loopBB);
2042
llvm::PHINode *cur = Builder.CreatePHI(arrayBegin->getType(), 2,
2043
"arrayctor.cur");
2044
cur->addIncoming(arrayBegin, entryBB);
2045
2046
// Inside the loop body, emit the constructor call on the array element.
2047
2048
// The alignment of the base, adjusted by the size of a single element,
2049
// provides a conservative estimate of the alignment of every element.
2050
// (This assumes we never start tracking offsetted alignments.)
2051
//
2052
// Note that these are complete objects and so we don't need to
2053
// use the non-virtual size or alignment.
2054
QualType type = getContext().getTypeDeclType(ctor->getParent());
2055
CharUnits eltAlignment =
2056
arrayBase.getAlignment()
2057
.alignmentOfArrayElement(getContext().getTypeSizeInChars(type));
2058
Address curAddr = Address(cur, elementType, eltAlignment);
2059
2060
// Zero initialize the storage, if requested.
2061
if (zeroInitialize)
2062
EmitNullInitialization(curAddr, type);
2063
2064
// C++ [class.temporary]p4:
2065
// There are two contexts in which temporaries are destroyed at a different
2066
// point than the end of the full-expression. The first context is when a
2067
// default constructor is called to initialize an element of an array.
2068
// If the constructor has one or more default arguments, the destruction of
2069
// every temporary created in a default argument expression is sequenced
2070
// before the construction of the next array element, if any.
2071
2072
{
2073
RunCleanupsScope Scope(*this);
2074
2075
// Evaluate the constructor and its arguments in a regular
2076
// partial-destroy cleanup.
2077
if (getLangOpts().Exceptions &&
2078
!ctor->getParent()->hasTrivialDestructor()) {
2079
Destroyer *destroyer = destroyCXXObject;
2080
pushRegularPartialArrayCleanup(arrayBegin, cur, type, eltAlignment,
2081
*destroyer);
2082
}
2083
auto currAVS = AggValueSlot::forAddr(
2084
curAddr, type.getQualifiers(), AggValueSlot::IsDestructed,
2085
AggValueSlot::DoesNotNeedGCBarriers, AggValueSlot::IsNotAliased,
2086
AggValueSlot::DoesNotOverlap, AggValueSlot::IsNotZeroed,
2087
NewPointerIsChecked ? AggValueSlot::IsSanitizerChecked
2088
: AggValueSlot::IsNotSanitizerChecked);
2089
EmitCXXConstructorCall(ctor, Ctor_Complete, /*ForVirtualBase=*/false,
2090
/*Delegating=*/false, currAVS, E);
2091
}
2092
2093
// Go to the next element.
2094
llvm::Value *next = Builder.CreateInBoundsGEP(
2095
elementType, cur, llvm::ConstantInt::get(SizeTy, 1), "arrayctor.next");
2096
cur->addIncoming(next, Builder.GetInsertBlock());
2097
2098
// Check whether that's the end of the loop.
2099
llvm::Value *done = Builder.CreateICmpEQ(next, arrayEnd, "arrayctor.done");
2100
llvm::BasicBlock *contBB = createBasicBlock("arrayctor.cont");
2101
Builder.CreateCondBr(done, contBB, loopBB);
2102
2103
// Patch the earlier check to skip over the loop.
2104
if (zeroCheckBranch) zeroCheckBranch->setSuccessor(0, contBB);
2105
2106
EmitBlock(contBB);
2107
}
2108
2109
void CodeGenFunction::destroyCXXObject(CodeGenFunction &CGF,
2110
Address addr,
2111
QualType type) {
2112
const RecordType *rtype = type->castAs<RecordType>();
2113
const CXXRecordDecl *record = cast<CXXRecordDecl>(rtype->getDecl());
2114
const CXXDestructorDecl *dtor = record->getDestructor();
2115
assert(!dtor->isTrivial());
2116
CGF.EmitCXXDestructorCall(dtor, Dtor_Complete, /*for vbase*/ false,
2117
/*Delegating=*/false, addr, type);
2118
}
2119
2120
void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D,
2121
CXXCtorType Type,
2122
bool ForVirtualBase,
2123
bool Delegating,
2124
AggValueSlot ThisAVS,
2125
const CXXConstructExpr *E) {
2126
CallArgList Args;
2127
Address This = ThisAVS.getAddress();
2128
LangAS SlotAS = ThisAVS.getQualifiers().getAddressSpace();
2129
LangAS ThisAS = D->getFunctionObjectParameterType().getAddressSpace();
2130
llvm::Value *ThisPtr =
2131
getAsNaturalPointerTo(This, D->getThisType()->getPointeeType());
2132
2133
if (SlotAS != ThisAS) {
2134
unsigned TargetThisAS = getContext().getTargetAddressSpace(ThisAS);
2135
llvm::Type *NewType =
2136
llvm::PointerType::get(getLLVMContext(), TargetThisAS);
2137
ThisPtr = getTargetHooks().performAddrSpaceCast(*this, ThisPtr, ThisAS,
2138
SlotAS, NewType);
2139
}
2140
2141
// Push the this ptr.
2142
Args.add(RValue::get(ThisPtr), D->getThisType());
2143
2144
// If this is a trivial constructor, emit a memcpy now before we lose
2145
// the alignment information on the argument.
2146
// FIXME: It would be better to preserve alignment information into CallArg.
2147
if (isMemcpyEquivalentSpecialMember(D)) {
2148
assert(E->getNumArgs() == 1 && "unexpected argcount for trivial ctor");
2149
2150
const Expr *Arg = E->getArg(0);
2151
LValue Src = EmitLValue(Arg);
2152
QualType DestTy = getContext().getTypeDeclType(D->getParent());
2153
LValue Dest = MakeAddrLValue(This, DestTy);
2154
EmitAggregateCopyCtor(Dest, Src, ThisAVS.mayOverlap());
2155
return;
2156
}
2157
2158
// Add the rest of the user-supplied arguments.
2159
const FunctionProtoType *FPT = D->getType()->castAs<FunctionProtoType>();
2160
EvaluationOrder Order = E->isListInitialization()
2161
? EvaluationOrder::ForceLeftToRight
2162
: EvaluationOrder::Default;
2163
EmitCallArgs(Args, FPT, E->arguments(), E->getConstructor(),
2164
/*ParamsToSkip*/ 0, Order);
2165
2166
EmitCXXConstructorCall(D, Type, ForVirtualBase, Delegating, This, Args,
2167
ThisAVS.mayOverlap(), E->getExprLoc(),
2168
ThisAVS.isSanitizerChecked());
2169
}
2170
2171
static bool canEmitDelegateCallArgs(CodeGenFunction &CGF,
2172
const CXXConstructorDecl *Ctor,
2173
CXXCtorType Type, CallArgList &Args) {
2174
// We can't forward a variadic call.
2175
if (Ctor->isVariadic())
2176
return false;
2177
2178
if (CGF.getTarget().getCXXABI().areArgsDestroyedLeftToRightInCallee()) {
2179
// If the parameters are callee-cleanup, it's not safe to forward.
2180
for (auto *P : Ctor->parameters())
2181
if (P->needsDestruction(CGF.getContext()))
2182
return false;
2183
2184
// Likewise if they're inalloca.
2185
const CGFunctionInfo &Info =
2186
CGF.CGM.getTypes().arrangeCXXConstructorCall(Args, Ctor, Type, 0, 0);
2187
if (Info.usesInAlloca())
2188
return false;
2189
}
2190
2191
// Anything else should be OK.
2192
return true;
2193
}
2194
2195
void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D,
2196
CXXCtorType Type,
2197
bool ForVirtualBase,
2198
bool Delegating,
2199
Address This,
2200
CallArgList &Args,
2201
AggValueSlot::Overlap_t Overlap,
2202
SourceLocation Loc,
2203
bool NewPointerIsChecked) {
2204
const CXXRecordDecl *ClassDecl = D->getParent();
2205
2206
if (!NewPointerIsChecked)
2207
EmitTypeCheck(CodeGenFunction::TCK_ConstructorCall, Loc, This,
2208
getContext().getRecordType(ClassDecl), CharUnits::Zero());
2209
2210
if (D->isTrivial() && D->isDefaultConstructor()) {
2211
assert(Args.size() == 1 && "trivial default ctor with args");
2212
return;
2213
}
2214
2215
// If this is a trivial constructor, just emit what's needed. If this is a
2216
// union copy constructor, we must emit a memcpy, because the AST does not
2217
// model that copy.
2218
if (isMemcpyEquivalentSpecialMember(D)) {
2219
assert(Args.size() == 2 && "unexpected argcount for trivial ctor");
2220
QualType SrcTy = D->getParamDecl(0)->getType().getNonReferenceType();
2221
Address Src = makeNaturalAddressForPointer(
2222
Args[1].getRValue(*this).getScalarVal(), SrcTy);
2223
LValue SrcLVal = MakeAddrLValue(Src, SrcTy);
2224
QualType DestTy = getContext().getTypeDeclType(ClassDecl);
2225
LValue DestLVal = MakeAddrLValue(This, DestTy);
2226
EmitAggregateCopyCtor(DestLVal, SrcLVal, Overlap);
2227
return;
2228
}
2229
2230
bool PassPrototypeArgs = true;
2231
// Check whether we can actually emit the constructor before trying to do so.
2232
if (auto Inherited = D->getInheritedConstructor()) {
2233
PassPrototypeArgs = getTypes().inheritingCtorHasParams(Inherited, Type);
2234
if (PassPrototypeArgs && !canEmitDelegateCallArgs(*this, D, Type, Args)) {
2235
EmitInlinedInheritingCXXConstructorCall(D, Type, ForVirtualBase,
2236
Delegating, Args);
2237
return;
2238
}
2239
}
2240
2241
// Insert any ABI-specific implicit constructor arguments.
2242
CGCXXABI::AddedStructorArgCounts ExtraArgs =
2243
CGM.getCXXABI().addImplicitConstructorArgs(*this, D, Type, ForVirtualBase,
2244
Delegating, Args);
2245
2246
// Emit the call.
2247
llvm::Constant *CalleePtr = CGM.getAddrOfCXXStructor(GlobalDecl(D, Type));
2248
const CGFunctionInfo &Info = CGM.getTypes().arrangeCXXConstructorCall(
2249
Args, D, Type, ExtraArgs.Prefix, ExtraArgs.Suffix, PassPrototypeArgs);
2250
CGCallee Callee = CGCallee::forDirect(CalleePtr, GlobalDecl(D, Type));
2251
EmitCall(Info, Callee, ReturnValueSlot(), Args, nullptr, false, Loc);
2252
2253
// Generate vtable assumptions if we're constructing a complete object
2254
// with a vtable. We don't do this for base subobjects for two reasons:
2255
// first, it's incorrect for classes with virtual bases, and second, we're
2256
// about to overwrite the vptrs anyway.
2257
// We also have to make sure if we can refer to vtable:
2258
// - Otherwise we can refer to vtable if it's safe to speculatively emit.
2259
// FIXME: If vtable is used by ctor/dtor, or if vtable is external and we are
2260
// sure that definition of vtable is not hidden,
2261
// then we are always safe to refer to it.
2262
// FIXME: It looks like InstCombine is very inefficient on dealing with
2263
// assumes. Make assumption loads require -fstrict-vtable-pointers temporarily.
2264
if (CGM.getCodeGenOpts().OptimizationLevel > 0 &&
2265
ClassDecl->isDynamicClass() && Type != Ctor_Base &&
2266
CGM.getCXXABI().canSpeculativelyEmitVTable(ClassDecl) &&
2267
CGM.getCodeGenOpts().StrictVTablePointers)
2268
EmitVTableAssumptionLoads(ClassDecl, This);
2269
}
2270
2271
void CodeGenFunction::EmitInheritedCXXConstructorCall(
2272
const CXXConstructorDecl *D, bool ForVirtualBase, Address This,
2273
bool InheritedFromVBase, const CXXInheritedCtorInitExpr *E) {
2274
CallArgList Args;
2275
CallArg ThisArg(RValue::get(getAsNaturalPointerTo(
2276
This, D->getThisType()->getPointeeType())),
2277
D->getThisType());
2278
2279
// Forward the parameters.
2280
if (InheritedFromVBase &&
2281
CGM.getTarget().getCXXABI().hasConstructorVariants()) {
2282
// Nothing to do; this construction is not responsible for constructing
2283
// the base class containing the inherited constructor.
2284
// FIXME: Can we just pass undef's for the remaining arguments if we don't
2285
// have constructor variants?
2286
Args.push_back(ThisArg);
2287
} else if (!CXXInheritedCtorInitExprArgs.empty()) {
2288
// The inheriting constructor was inlined; just inject its arguments.
2289
assert(CXXInheritedCtorInitExprArgs.size() >= D->getNumParams() &&
2290
"wrong number of parameters for inherited constructor call");
2291
Args = CXXInheritedCtorInitExprArgs;
2292
Args[0] = ThisArg;
2293
} else {
2294
// The inheriting constructor was not inlined. Emit delegating arguments.
2295
Args.push_back(ThisArg);
2296
const auto *OuterCtor = cast<CXXConstructorDecl>(CurCodeDecl);
2297
assert(OuterCtor->getNumParams() == D->getNumParams());
2298
assert(!OuterCtor->isVariadic() && "should have been inlined");
2299
2300
for (const auto *Param : OuterCtor->parameters()) {
2301
assert(getContext().hasSameUnqualifiedType(
2302
OuterCtor->getParamDecl(Param->getFunctionScopeIndex())->getType(),
2303
Param->getType()));
2304
EmitDelegateCallArg(Args, Param, E->getLocation());
2305
2306
// Forward __attribute__(pass_object_size).
2307
if (Param->hasAttr<PassObjectSizeAttr>()) {
2308
auto *POSParam = SizeArguments[Param];
2309
assert(POSParam && "missing pass_object_size value for forwarding");
2310
EmitDelegateCallArg(Args, POSParam, E->getLocation());
2311
}
2312
}
2313
}
2314
2315
EmitCXXConstructorCall(D, Ctor_Base, ForVirtualBase, /*Delegating*/false,
2316
This, Args, AggValueSlot::MayOverlap,
2317
E->getLocation(), /*NewPointerIsChecked*/true);
2318
}
2319
2320
void CodeGenFunction::EmitInlinedInheritingCXXConstructorCall(
2321
const CXXConstructorDecl *Ctor, CXXCtorType CtorType, bool ForVirtualBase,
2322
bool Delegating, CallArgList &Args) {
2323
GlobalDecl GD(Ctor, CtorType);
2324
InlinedInheritingConstructorScope Scope(*this, GD);
2325
ApplyInlineDebugLocation DebugScope(*this, GD);
2326
RunCleanupsScope RunCleanups(*this);
2327
2328
// Save the arguments to be passed to the inherited constructor.
2329
CXXInheritedCtorInitExprArgs = Args;
2330
2331
FunctionArgList Params;
2332
QualType RetType = BuildFunctionArgList(CurGD, Params);
2333
FnRetTy = RetType;
2334
2335
// Insert any ABI-specific implicit constructor arguments.
2336
CGM.getCXXABI().addImplicitConstructorArgs(*this, Ctor, CtorType,
2337
ForVirtualBase, Delegating, Args);
2338
2339
// Emit a simplified prolog. We only need to emit the implicit params.
2340
assert(Args.size() >= Params.size() && "too few arguments for call");
2341
for (unsigned I = 0, N = Args.size(); I != N; ++I) {
2342
if (I < Params.size() && isa<ImplicitParamDecl>(Params[I])) {
2343
const RValue &RV = Args[I].getRValue(*this);
2344
assert(!RV.isComplex() && "complex indirect params not supported");
2345
ParamValue Val = RV.isScalar()
2346
? ParamValue::forDirect(RV.getScalarVal())
2347
: ParamValue::forIndirect(RV.getAggregateAddress());
2348
EmitParmDecl(*Params[I], Val, I + 1);
2349
}
2350
}
2351
2352
// Create a return value slot if the ABI implementation wants one.
2353
// FIXME: This is dumb, we should ask the ABI not to try to set the return
2354
// value instead.
2355
if (!RetType->isVoidType())
2356
ReturnValue = CreateIRTemp(RetType, "retval.inhctor");
2357
2358
CGM.getCXXABI().EmitInstanceFunctionProlog(*this);
2359
CXXThisValue = CXXABIThisValue;
2360
2361
// Directly emit the constructor initializers.
2362
EmitCtorPrologue(Ctor, CtorType, Params);
2363
}
2364
2365
void CodeGenFunction::EmitVTableAssumptionLoad(const VPtr &Vptr, Address This) {
2366
llvm::Value *VTableGlobal =
2367
CGM.getCXXABI().getVTableAddressPoint(Vptr.Base, Vptr.VTableClass);
2368
if (!VTableGlobal)
2369
return;
2370
2371
// We can just use the base offset in the complete class.
2372
CharUnits NonVirtualOffset = Vptr.Base.getBaseOffset();
2373
2374
if (!NonVirtualOffset.isZero())
2375
This =
2376
ApplyNonVirtualAndVirtualOffset(*this, This, NonVirtualOffset, nullptr,
2377
Vptr.VTableClass, Vptr.NearestVBase);
2378
2379
llvm::Value *VPtrValue =
2380
GetVTablePtr(This, VTableGlobal->getType(), Vptr.VTableClass);
2381
llvm::Value *Cmp =
2382
Builder.CreateICmpEQ(VPtrValue, VTableGlobal, "cmp.vtables");
2383
Builder.CreateAssumption(Cmp);
2384
}
2385
2386
void CodeGenFunction::EmitVTableAssumptionLoads(const CXXRecordDecl *ClassDecl,
2387
Address This) {
2388
if (CGM.getCXXABI().doStructorsInitializeVPtrs(ClassDecl))
2389
for (const VPtr &Vptr : getVTablePointers(ClassDecl))
2390
EmitVTableAssumptionLoad(Vptr, This);
2391
}
2392
2393
void
2394
CodeGenFunction::EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D,
2395
Address This, Address Src,
2396
const CXXConstructExpr *E) {
2397
const FunctionProtoType *FPT = D->getType()->castAs<FunctionProtoType>();
2398
2399
CallArgList Args;
2400
2401
// Push the this ptr.
2402
Args.add(RValue::get(getAsNaturalPointerTo(This, D->getThisType())),
2403
D->getThisType());
2404
2405
// Push the src ptr.
2406
QualType QT = *(FPT->param_type_begin());
2407
llvm::Type *t = CGM.getTypes().ConvertType(QT);
2408
llvm::Value *Val = getAsNaturalPointerTo(Src, D->getThisType());
2409
llvm::Value *SrcVal = Builder.CreateBitCast(Val, t);
2410
Args.add(RValue::get(SrcVal), QT);
2411
2412
// Skip over first argument (Src).
2413
EmitCallArgs(Args, FPT, drop_begin(E->arguments(), 1), E->getConstructor(),
2414
/*ParamsToSkip*/ 1);
2415
2416
EmitCXXConstructorCall(D, Ctor_Complete, /*ForVirtualBase*/false,
2417
/*Delegating*/false, This, Args,
2418
AggValueSlot::MayOverlap, E->getExprLoc(),
2419
/*NewPointerIsChecked*/false);
2420
}
2421
2422
void
2423
CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor,
2424
CXXCtorType CtorType,
2425
const FunctionArgList &Args,
2426
SourceLocation Loc) {
2427
CallArgList DelegateArgs;
2428
2429
FunctionArgList::const_iterator I = Args.begin(), E = Args.end();
2430
assert(I != E && "no parameters to constructor");
2431
2432
// this
2433
Address This = LoadCXXThisAddress();
2434
DelegateArgs.add(RValue::get(getAsNaturalPointerTo(
2435
This, (*I)->getType()->getPointeeType())),
2436
(*I)->getType());
2437
++I;
2438
2439
// FIXME: The location of the VTT parameter in the parameter list is
2440
// specific to the Itanium ABI and shouldn't be hardcoded here.
2441
if (CGM.getCXXABI().NeedsVTTParameter(CurGD)) {
2442
assert(I != E && "cannot skip vtt parameter, already done with args");
2443
assert((*I)->getType()->isPointerType() &&
2444
"skipping parameter not of vtt type");
2445
++I;
2446
}
2447
2448
// Explicit arguments.
2449
for (; I != E; ++I) {
2450
const VarDecl *param = *I;
2451
// FIXME: per-argument source location
2452
EmitDelegateCallArg(DelegateArgs, param, Loc);
2453
}
2454
2455
EmitCXXConstructorCall(Ctor, CtorType, /*ForVirtualBase=*/false,
2456
/*Delegating=*/true, This, DelegateArgs,
2457
AggValueSlot::MayOverlap, Loc,
2458
/*NewPointerIsChecked=*/true);
2459
}
2460
2461
namespace {
2462
struct CallDelegatingCtorDtor final : EHScopeStack::Cleanup {
2463
const CXXDestructorDecl *Dtor;
2464
Address Addr;
2465
CXXDtorType Type;
2466
2467
CallDelegatingCtorDtor(const CXXDestructorDecl *D, Address Addr,
2468
CXXDtorType Type)
2469
: Dtor(D), Addr(Addr), Type(Type) {}
2470
2471
void Emit(CodeGenFunction &CGF, Flags flags) override {
2472
// We are calling the destructor from within the constructor.
2473
// Therefore, "this" should have the expected type.
2474
QualType ThisTy = Dtor->getFunctionObjectParameterType();
2475
CGF.EmitCXXDestructorCall(Dtor, Type, /*ForVirtualBase=*/false,
2476
/*Delegating=*/true, Addr, ThisTy);
2477
}
2478
};
2479
} // end anonymous namespace
2480
2481
void
2482
CodeGenFunction::EmitDelegatingCXXConstructorCall(const CXXConstructorDecl *Ctor,
2483
const FunctionArgList &Args) {
2484
assert(Ctor->isDelegatingConstructor());
2485
2486
Address ThisPtr = LoadCXXThisAddress();
2487
2488
AggValueSlot AggSlot =
2489
AggValueSlot::forAddr(ThisPtr, Qualifiers(),
2490
AggValueSlot::IsDestructed,
2491
AggValueSlot::DoesNotNeedGCBarriers,
2492
AggValueSlot::IsNotAliased,
2493
AggValueSlot::MayOverlap,
2494
AggValueSlot::IsNotZeroed,
2495
// Checks are made by the code that calls constructor.
2496
AggValueSlot::IsSanitizerChecked);
2497
2498
EmitAggExpr(Ctor->init_begin()[0]->getInit(), AggSlot);
2499
2500
const CXXRecordDecl *ClassDecl = Ctor->getParent();
2501
if (CGM.getLangOpts().Exceptions && !ClassDecl->hasTrivialDestructor()) {
2502
CXXDtorType Type =
2503
CurGD.getCtorType() == Ctor_Complete ? Dtor_Complete : Dtor_Base;
2504
2505
EHStack.pushCleanup<CallDelegatingCtorDtor>(EHCleanup,
2506
ClassDecl->getDestructor(),
2507
ThisPtr, Type);
2508
}
2509
}
2510
2511
void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD,
2512
CXXDtorType Type,
2513
bool ForVirtualBase,
2514
bool Delegating, Address This,
2515
QualType ThisTy) {
2516
CGM.getCXXABI().EmitDestructorCall(*this, DD, Type, ForVirtualBase,
2517
Delegating, This, ThisTy);
2518
}
2519
2520
namespace {
2521
struct CallLocalDtor final : EHScopeStack::Cleanup {
2522
const CXXDestructorDecl *Dtor;
2523
Address Addr;
2524
QualType Ty;
2525
2526
CallLocalDtor(const CXXDestructorDecl *D, Address Addr, QualType Ty)
2527
: Dtor(D), Addr(Addr), Ty(Ty) {}
2528
2529
void Emit(CodeGenFunction &CGF, Flags flags) override {
2530
CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete,
2531
/*ForVirtualBase=*/false,
2532
/*Delegating=*/false, Addr, Ty);
2533
}
2534
};
2535
} // end anonymous namespace
2536
2537
void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D,
2538
QualType T, Address Addr) {
2539
EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr, T);
2540
}
2541
2542
void CodeGenFunction::PushDestructorCleanup(QualType T, Address Addr) {
2543
CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl();
2544
if (!ClassDecl) return;
2545
if (ClassDecl->hasTrivialDestructor()) return;
2546
2547
const CXXDestructorDecl *D = ClassDecl->getDestructor();
2548
assert(D && D->isUsed() && "destructor not marked as used!");
2549
PushDestructorCleanup(D, T, Addr);
2550
}
2551
2552
void CodeGenFunction::InitializeVTablePointer(const VPtr &Vptr) {
2553
// Compute the address point.
2554
llvm::Value *VTableAddressPoint =
2555
CGM.getCXXABI().getVTableAddressPointInStructor(
2556
*this, Vptr.VTableClass, Vptr.Base, Vptr.NearestVBase);
2557
2558
if (!VTableAddressPoint)
2559
return;
2560
2561
// Compute where to store the address point.
2562
llvm::Value *VirtualOffset = nullptr;
2563
CharUnits NonVirtualOffset = CharUnits::Zero();
2564
2565
if (CGM.getCXXABI().isVirtualOffsetNeededForVTableField(*this, Vptr)) {
2566
// We need to use the virtual base offset offset because the virtual base
2567
// might have a different offset in the most derived class.
2568
2569
VirtualOffset = CGM.getCXXABI().GetVirtualBaseClassOffset(
2570
*this, LoadCXXThisAddress(), Vptr.VTableClass, Vptr.NearestVBase);
2571
NonVirtualOffset = Vptr.OffsetFromNearestVBase;
2572
} else {
2573
// We can just use the base offset in the complete class.
2574
NonVirtualOffset = Vptr.Base.getBaseOffset();
2575
}
2576
2577
// Apply the offsets.
2578
Address VTableField = LoadCXXThisAddress();
2579
if (!NonVirtualOffset.isZero() || VirtualOffset)
2580
VTableField = ApplyNonVirtualAndVirtualOffset(
2581
*this, VTableField, NonVirtualOffset, VirtualOffset, Vptr.VTableClass,
2582
Vptr.NearestVBase);
2583
2584
// Finally, store the address point. Use the same LLVM types as the field to
2585
// support optimization.
2586
unsigned GlobalsAS = CGM.getDataLayout().getDefaultGlobalsAddressSpace();
2587
llvm::Type *PtrTy = llvm::PointerType::get(CGM.getLLVMContext(), GlobalsAS);
2588
// vtable field is derived from `this` pointer, therefore they should be in
2589
// the same addr space. Note that this might not be LLVM address space 0.
2590
VTableField = VTableField.withElementType(PtrTy);
2591
2592
if (auto AuthenticationInfo = CGM.getVTablePointerAuthInfo(
2593
this, Vptr.Base.getBase(), VTableField.emitRawPointer(*this)))
2594
VTableAddressPoint =
2595
EmitPointerAuthSign(*AuthenticationInfo, VTableAddressPoint);
2596
2597
llvm::StoreInst *Store = Builder.CreateStore(VTableAddressPoint, VTableField);
2598
TBAAAccessInfo TBAAInfo = CGM.getTBAAVTablePtrAccessInfo(PtrTy);
2599
CGM.DecorateInstructionWithTBAA(Store, TBAAInfo);
2600
if (CGM.getCodeGenOpts().OptimizationLevel > 0 &&
2601
CGM.getCodeGenOpts().StrictVTablePointers)
2602
CGM.DecorateInstructionWithInvariantGroup(Store, Vptr.VTableClass);
2603
}
2604
2605
CodeGenFunction::VPtrsVector
2606
CodeGenFunction::getVTablePointers(const CXXRecordDecl *VTableClass) {
2607
CodeGenFunction::VPtrsVector VPtrsResult;
2608
VisitedVirtualBasesSetTy VBases;
2609
getVTablePointers(BaseSubobject(VTableClass, CharUnits::Zero()),
2610
/*NearestVBase=*/nullptr,
2611
/*OffsetFromNearestVBase=*/CharUnits::Zero(),
2612
/*BaseIsNonVirtualPrimaryBase=*/false, VTableClass, VBases,
2613
VPtrsResult);
2614
return VPtrsResult;
2615
}
2616
2617
void CodeGenFunction::getVTablePointers(BaseSubobject Base,
2618
const CXXRecordDecl *NearestVBase,
2619
CharUnits OffsetFromNearestVBase,
2620
bool BaseIsNonVirtualPrimaryBase,
2621
const CXXRecordDecl *VTableClass,
2622
VisitedVirtualBasesSetTy &VBases,
2623
VPtrsVector &Vptrs) {
2624
// If this base is a non-virtual primary base the address point has already
2625
// been set.
2626
if (!BaseIsNonVirtualPrimaryBase) {
2627
// Initialize the vtable pointer for this base.
2628
VPtr Vptr = {Base, NearestVBase, OffsetFromNearestVBase, VTableClass};
2629
Vptrs.push_back(Vptr);
2630
}
2631
2632
const CXXRecordDecl *RD = Base.getBase();
2633
2634
// Traverse bases.
2635
for (const auto &I : RD->bases()) {
2636
auto *BaseDecl =
2637
cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
2638
2639
// Ignore classes without a vtable.
2640
if (!BaseDecl->isDynamicClass())
2641
continue;
2642
2643
CharUnits BaseOffset;
2644
CharUnits BaseOffsetFromNearestVBase;
2645
bool BaseDeclIsNonVirtualPrimaryBase;
2646
2647
if (I.isVirtual()) {
2648
// Check if we've visited this virtual base before.
2649
if (!VBases.insert(BaseDecl).second)
2650
continue;
2651
2652
const ASTRecordLayout &Layout =
2653
getContext().getASTRecordLayout(VTableClass);
2654
2655
BaseOffset = Layout.getVBaseClassOffset(BaseDecl);
2656
BaseOffsetFromNearestVBase = CharUnits::Zero();
2657
BaseDeclIsNonVirtualPrimaryBase = false;
2658
} else {
2659
const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD);
2660
2661
BaseOffset = Base.getBaseOffset() + Layout.getBaseClassOffset(BaseDecl);
2662
BaseOffsetFromNearestVBase =
2663
OffsetFromNearestVBase + Layout.getBaseClassOffset(BaseDecl);
2664
BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl;
2665
}
2666
2667
getVTablePointers(
2668
BaseSubobject(BaseDecl, BaseOffset),
2669
I.isVirtual() ? BaseDecl : NearestVBase, BaseOffsetFromNearestVBase,
2670
BaseDeclIsNonVirtualPrimaryBase, VTableClass, VBases, Vptrs);
2671
}
2672
}
2673
2674
void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) {
2675
// Ignore classes without a vtable.
2676
if (!RD->isDynamicClass())
2677
return;
2678
2679
// Initialize the vtable pointers for this class and all of its bases.
2680
if (CGM.getCXXABI().doStructorsInitializeVPtrs(RD))
2681
for (const VPtr &Vptr : getVTablePointers(RD))
2682
InitializeVTablePointer(Vptr);
2683
2684
if (RD->getNumVBases())
2685
CGM.getCXXABI().initializeHiddenVirtualInheritanceMembers(*this, RD);
2686
}
2687
2688
llvm::Value *CodeGenFunction::GetVTablePtr(Address This,
2689
llvm::Type *VTableTy,
2690
const CXXRecordDecl *RD,
2691
VTableAuthMode AuthMode) {
2692
Address VTablePtrSrc = This.withElementType(VTableTy);
2693
llvm::Instruction *VTable = Builder.CreateLoad(VTablePtrSrc, "vtable");
2694
TBAAAccessInfo TBAAInfo = CGM.getTBAAVTablePtrAccessInfo(VTableTy);
2695
CGM.DecorateInstructionWithTBAA(VTable, TBAAInfo);
2696
2697
if (auto AuthenticationInfo =
2698
CGM.getVTablePointerAuthInfo(this, RD, This.emitRawPointer(*this))) {
2699
if (AuthMode != VTableAuthMode::UnsafeUbsanStrip) {
2700
VTable = cast<llvm::Instruction>(
2701
EmitPointerAuthAuth(*AuthenticationInfo, VTable));
2702
if (AuthMode == VTableAuthMode::MustTrap) {
2703
// This is clearly suboptimal but until we have an ability
2704
// to rely on the authentication intrinsic trapping and force
2705
// an authentication to occur we don't really have a choice.
2706
VTable =
2707
cast<llvm::Instruction>(Builder.CreateBitCast(VTable, Int8PtrTy));
2708
Builder.CreateLoad(RawAddress(VTable, Int8Ty, CGM.getPointerAlign()),
2709
/* IsVolatile */ true);
2710
}
2711
} else {
2712
VTable = cast<llvm::Instruction>(EmitPointerAuthAuth(
2713
CGPointerAuthInfo(0, PointerAuthenticationMode::Strip, false, false,
2714
nullptr),
2715
VTable));
2716
}
2717
}
2718
2719
if (CGM.getCodeGenOpts().OptimizationLevel > 0 &&
2720
CGM.getCodeGenOpts().StrictVTablePointers)
2721
CGM.DecorateInstructionWithInvariantGroup(VTable, RD);
2722
2723
return VTable;
2724
}
2725
2726
// If a class has a single non-virtual base and does not introduce or override
2727
// virtual member functions or fields, it will have the same layout as its base.
2728
// This function returns the least derived such class.
2729
//
2730
// Casting an instance of a base class to such a derived class is technically
2731
// undefined behavior, but it is a relatively common hack for introducing member
2732
// functions on class instances with specific properties (e.g. llvm::Operator)
2733
// that works under most compilers and should not have security implications, so
2734
// we allow it by default. It can be disabled with -fsanitize=cfi-cast-strict.
2735
static const CXXRecordDecl *
2736
LeastDerivedClassWithSameLayout(const CXXRecordDecl *RD) {
2737
if (!RD->field_empty())
2738
return RD;
2739
2740
if (RD->getNumVBases() != 0)
2741
return RD;
2742
2743
if (RD->getNumBases() != 1)
2744
return RD;
2745
2746
for (const CXXMethodDecl *MD : RD->methods()) {
2747
if (MD->isVirtual()) {
2748
// Virtual member functions are only ok if they are implicit destructors
2749
// because the implicit destructor will have the same semantics as the
2750
// base class's destructor if no fields are added.
2751
if (isa<CXXDestructorDecl>(MD) && MD->isImplicit())
2752
continue;
2753
return RD;
2754
}
2755
}
2756
2757
return LeastDerivedClassWithSameLayout(
2758
RD->bases_begin()->getType()->getAsCXXRecordDecl());
2759
}
2760
2761
void CodeGenFunction::EmitTypeMetadataCodeForVCall(const CXXRecordDecl *RD,
2762
llvm::Value *VTable,
2763
SourceLocation Loc) {
2764
if (SanOpts.has(SanitizerKind::CFIVCall))
2765
EmitVTablePtrCheckForCall(RD, VTable, CodeGenFunction::CFITCK_VCall, Loc);
2766
else if (CGM.getCodeGenOpts().WholeProgramVTables &&
2767
// Don't insert type test assumes if we are forcing public
2768
// visibility.
2769
!CGM.AlwaysHasLTOVisibilityPublic(RD)) {
2770
QualType Ty = QualType(RD->getTypeForDecl(), 0);
2771
llvm::Metadata *MD = CGM.CreateMetadataIdentifierForType(Ty);
2772
llvm::Value *TypeId =
2773
llvm::MetadataAsValue::get(CGM.getLLVMContext(), MD);
2774
2775
// If we already know that the call has hidden LTO visibility, emit
2776
// @llvm.type.test(). Otherwise emit @llvm.public.type.test(), which WPD
2777
// will convert to @llvm.type.test() if we assert at link time that we have
2778
// whole program visibility.
2779
llvm::Intrinsic::ID IID = CGM.HasHiddenLTOVisibility(RD)
2780
? llvm::Intrinsic::type_test
2781
: llvm::Intrinsic::public_type_test;
2782
llvm::Value *TypeTest =
2783
Builder.CreateCall(CGM.getIntrinsic(IID), {VTable, TypeId});
2784
Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::assume), TypeTest);
2785
}
2786
}
2787
2788
void CodeGenFunction::EmitVTablePtrCheckForCall(const CXXRecordDecl *RD,
2789
llvm::Value *VTable,
2790
CFITypeCheckKind TCK,
2791
SourceLocation Loc) {
2792
if (!SanOpts.has(SanitizerKind::CFICastStrict))
2793
RD = LeastDerivedClassWithSameLayout(RD);
2794
2795
EmitVTablePtrCheck(RD, VTable, TCK, Loc);
2796
}
2797
2798
void CodeGenFunction::EmitVTablePtrCheckForCast(QualType T, Address Derived,
2799
bool MayBeNull,
2800
CFITypeCheckKind TCK,
2801
SourceLocation Loc) {
2802
if (!getLangOpts().CPlusPlus)
2803
return;
2804
2805
auto *ClassTy = T->getAs<RecordType>();
2806
if (!ClassTy)
2807
return;
2808
2809
const CXXRecordDecl *ClassDecl = cast<CXXRecordDecl>(ClassTy->getDecl());
2810
2811
if (!ClassDecl->isCompleteDefinition() || !ClassDecl->isDynamicClass())
2812
return;
2813
2814
if (!SanOpts.has(SanitizerKind::CFICastStrict))
2815
ClassDecl = LeastDerivedClassWithSameLayout(ClassDecl);
2816
2817
llvm::BasicBlock *ContBlock = nullptr;
2818
2819
if (MayBeNull) {
2820
llvm::Value *DerivedNotNull =
2821
Builder.CreateIsNotNull(Derived.emitRawPointer(*this), "cast.nonnull");
2822
2823
llvm::BasicBlock *CheckBlock = createBasicBlock("cast.check");
2824
ContBlock = createBasicBlock("cast.cont");
2825
2826
Builder.CreateCondBr(DerivedNotNull, CheckBlock, ContBlock);
2827
2828
EmitBlock(CheckBlock);
2829
}
2830
2831
llvm::Value *VTable;
2832
std::tie(VTable, ClassDecl) =
2833
CGM.getCXXABI().LoadVTablePtr(*this, Derived, ClassDecl);
2834
2835
EmitVTablePtrCheck(ClassDecl, VTable, TCK, Loc);
2836
2837
if (MayBeNull) {
2838
Builder.CreateBr(ContBlock);
2839
EmitBlock(ContBlock);
2840
}
2841
}
2842
2843
void CodeGenFunction::EmitVTablePtrCheck(const CXXRecordDecl *RD,
2844
llvm::Value *VTable,
2845
CFITypeCheckKind TCK,
2846
SourceLocation Loc) {
2847
if (!CGM.getCodeGenOpts().SanitizeCfiCrossDso &&
2848
!CGM.HasHiddenLTOVisibility(RD))
2849
return;
2850
2851
SanitizerMask M;
2852
llvm::SanitizerStatKind SSK;
2853
switch (TCK) {
2854
case CFITCK_VCall:
2855
M = SanitizerKind::CFIVCall;
2856
SSK = llvm::SanStat_CFI_VCall;
2857
break;
2858
case CFITCK_NVCall:
2859
M = SanitizerKind::CFINVCall;
2860
SSK = llvm::SanStat_CFI_NVCall;
2861
break;
2862
case CFITCK_DerivedCast:
2863
M = SanitizerKind::CFIDerivedCast;
2864
SSK = llvm::SanStat_CFI_DerivedCast;
2865
break;
2866
case CFITCK_UnrelatedCast:
2867
M = SanitizerKind::CFIUnrelatedCast;
2868
SSK = llvm::SanStat_CFI_UnrelatedCast;
2869
break;
2870
case CFITCK_ICall:
2871
case CFITCK_NVMFCall:
2872
case CFITCK_VMFCall:
2873
llvm_unreachable("unexpected sanitizer kind");
2874
}
2875
2876
std::string TypeName = RD->getQualifiedNameAsString();
2877
if (getContext().getNoSanitizeList().containsType(M, TypeName))
2878
return;
2879
2880
SanitizerScope SanScope(this);
2881
EmitSanitizerStatReport(SSK);
2882
2883
llvm::Metadata *MD =
2884
CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0));
2885
llvm::Value *TypeId = llvm::MetadataAsValue::get(getLLVMContext(), MD);
2886
2887
llvm::Value *TypeTest = Builder.CreateCall(
2888
CGM.getIntrinsic(llvm::Intrinsic::type_test), {VTable, TypeId});
2889
2890
llvm::Constant *StaticData[] = {
2891
llvm::ConstantInt::get(Int8Ty, TCK),
2892
EmitCheckSourceLocation(Loc),
2893
EmitCheckTypeDescriptor(QualType(RD->getTypeForDecl(), 0)),
2894
};
2895
2896
auto CrossDsoTypeId = CGM.CreateCrossDsoCfiTypeId(MD);
2897
if (CGM.getCodeGenOpts().SanitizeCfiCrossDso && CrossDsoTypeId) {
2898
EmitCfiSlowPathCheck(M, TypeTest, CrossDsoTypeId, VTable, StaticData);
2899
return;
2900
}
2901
2902
if (CGM.getCodeGenOpts().SanitizeTrap.has(M)) {
2903
EmitTrapCheck(TypeTest, SanitizerHandler::CFICheckFail);
2904
return;
2905
}
2906
2907
llvm::Value *AllVtables = llvm::MetadataAsValue::get(
2908
CGM.getLLVMContext(),
2909
llvm::MDString::get(CGM.getLLVMContext(), "all-vtables"));
2910
llvm::Value *ValidVtable = Builder.CreateCall(
2911
CGM.getIntrinsic(llvm::Intrinsic::type_test), {VTable, AllVtables});
2912
EmitCheck(std::make_pair(TypeTest, M), SanitizerHandler::CFICheckFail,
2913
StaticData, {VTable, ValidVtable});
2914
}
2915
2916
bool CodeGenFunction::ShouldEmitVTableTypeCheckedLoad(const CXXRecordDecl *RD) {
2917
if (!CGM.getCodeGenOpts().WholeProgramVTables ||
2918
!CGM.HasHiddenLTOVisibility(RD))
2919
return false;
2920
2921
if (CGM.getCodeGenOpts().VirtualFunctionElimination)
2922
return true;
2923
2924
if (!SanOpts.has(SanitizerKind::CFIVCall) ||
2925
!CGM.getCodeGenOpts().SanitizeTrap.has(SanitizerKind::CFIVCall))
2926
return false;
2927
2928
std::string TypeName = RD->getQualifiedNameAsString();
2929
return !getContext().getNoSanitizeList().containsType(SanitizerKind::CFIVCall,
2930
TypeName);
2931
}
2932
2933
llvm::Value *CodeGenFunction::EmitVTableTypeCheckedLoad(
2934
const CXXRecordDecl *RD, llvm::Value *VTable, llvm::Type *VTableTy,
2935
uint64_t VTableByteOffset) {
2936
SanitizerScope SanScope(this);
2937
2938
EmitSanitizerStatReport(llvm::SanStat_CFI_VCall);
2939
2940
llvm::Metadata *MD =
2941
CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0));
2942
llvm::Value *TypeId = llvm::MetadataAsValue::get(CGM.getLLVMContext(), MD);
2943
2944
llvm::Value *CheckedLoad = Builder.CreateCall(
2945
CGM.getIntrinsic(llvm::Intrinsic::type_checked_load),
2946
{VTable, llvm::ConstantInt::get(Int32Ty, VTableByteOffset), TypeId});
2947
llvm::Value *CheckResult = Builder.CreateExtractValue(CheckedLoad, 1);
2948
2949
std::string TypeName = RD->getQualifiedNameAsString();
2950
if (SanOpts.has(SanitizerKind::CFIVCall) &&
2951
!getContext().getNoSanitizeList().containsType(SanitizerKind::CFIVCall,
2952
TypeName)) {
2953
EmitCheck(std::make_pair(CheckResult, SanitizerKind::CFIVCall),
2954
SanitizerHandler::CFICheckFail, {}, {});
2955
}
2956
2957
return Builder.CreateBitCast(Builder.CreateExtractValue(CheckedLoad, 0),
2958
VTableTy);
2959
}
2960
2961
void CodeGenFunction::EmitForwardingCallToLambda(
2962
const CXXMethodDecl *callOperator, CallArgList &callArgs,
2963
const CGFunctionInfo *calleeFnInfo, llvm::Constant *calleePtr) {
2964
// Get the address of the call operator.
2965
if (!calleeFnInfo)
2966
calleeFnInfo = &CGM.getTypes().arrangeCXXMethodDeclaration(callOperator);
2967
2968
if (!calleePtr)
2969
calleePtr =
2970
CGM.GetAddrOfFunction(GlobalDecl(callOperator),
2971
CGM.getTypes().GetFunctionType(*calleeFnInfo));
2972
2973
// Prepare the return slot.
2974
const FunctionProtoType *FPT =
2975
callOperator->getType()->castAs<FunctionProtoType>();
2976
QualType resultType = FPT->getReturnType();
2977
ReturnValueSlot returnSlot;
2978
if (!resultType->isVoidType() &&
2979
calleeFnInfo->getReturnInfo().getKind() == ABIArgInfo::Indirect &&
2980
!hasScalarEvaluationKind(calleeFnInfo->getReturnType()))
2981
returnSlot =
2982
ReturnValueSlot(ReturnValue, resultType.isVolatileQualified(),
2983
/*IsUnused=*/false, /*IsExternallyDestructed=*/true);
2984
2985
// We don't need to separately arrange the call arguments because
2986
// the call can't be variadic anyway --- it's impossible to forward
2987
// variadic arguments.
2988
2989
// Now emit our call.
2990
auto callee = CGCallee::forDirect(calleePtr, GlobalDecl(callOperator));
2991
RValue RV = EmitCall(*calleeFnInfo, callee, returnSlot, callArgs);
2992
2993
// If necessary, copy the returned value into the slot.
2994
if (!resultType->isVoidType() && returnSlot.isNull()) {
2995
if (getLangOpts().ObjCAutoRefCount && resultType->isObjCRetainableType()) {
2996
RV = RValue::get(EmitARCRetainAutoreleasedReturnValue(RV.getScalarVal()));
2997
}
2998
EmitReturnOfRValue(RV, resultType);
2999
} else
3000
EmitBranchThroughCleanup(ReturnBlock);
3001
}
3002
3003
void CodeGenFunction::EmitLambdaBlockInvokeBody() {
3004
const BlockDecl *BD = BlockInfo->getBlockDecl();
3005
const VarDecl *variable = BD->capture_begin()->getVariable();
3006
const CXXRecordDecl *Lambda = variable->getType()->getAsCXXRecordDecl();
3007
const CXXMethodDecl *CallOp = Lambda->getLambdaCallOperator();
3008
3009
if (CallOp->isVariadic()) {
3010
// FIXME: Making this work correctly is nasty because it requires either
3011
// cloning the body of the call operator or making the call operator
3012
// forward.
3013
CGM.ErrorUnsupported(CurCodeDecl, "lambda conversion to variadic function");
3014
return;
3015
}
3016
3017
// Start building arguments for forwarding call
3018
CallArgList CallArgs;
3019
3020
QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda));
3021
Address ThisPtr = GetAddrOfBlockDecl(variable);
3022
CallArgs.add(RValue::get(getAsNaturalPointerTo(ThisPtr, ThisType)), ThisType);
3023
3024
// Add the rest of the parameters.
3025
for (auto *param : BD->parameters())
3026
EmitDelegateCallArg(CallArgs, param, param->getBeginLoc());
3027
3028
assert(!Lambda->isGenericLambda() &&
3029
"generic lambda interconversion to block not implemented");
3030
EmitForwardingCallToLambda(CallOp, CallArgs);
3031
}
3032
3033
void CodeGenFunction::EmitLambdaStaticInvokeBody(const CXXMethodDecl *MD) {
3034
if (MD->isVariadic()) {
3035
// FIXME: Making this work correctly is nasty because it requires either
3036
// cloning the body of the call operator or making the call operator
3037
// forward.
3038
CGM.ErrorUnsupported(MD, "lambda conversion to variadic function");
3039
return;
3040
}
3041
3042
const CXXRecordDecl *Lambda = MD->getParent();
3043
3044
// Start building arguments for forwarding call
3045
CallArgList CallArgs;
3046
3047
QualType LambdaType = getContext().getRecordType(Lambda);
3048
QualType ThisType = getContext().getPointerType(LambdaType);
3049
Address ThisPtr = CreateMemTemp(LambdaType, "unused.capture");
3050
CallArgs.add(RValue::get(ThisPtr.emitRawPointer(*this)), ThisType);
3051
3052
EmitLambdaDelegatingInvokeBody(MD, CallArgs);
3053
}
3054
3055
void CodeGenFunction::EmitLambdaDelegatingInvokeBody(const CXXMethodDecl *MD,
3056
CallArgList &CallArgs) {
3057
// Add the rest of the forwarded parameters.
3058
for (auto *Param : MD->parameters())
3059
EmitDelegateCallArg(CallArgs, Param, Param->getBeginLoc());
3060
3061
const CXXRecordDecl *Lambda = MD->getParent();
3062
const CXXMethodDecl *CallOp = Lambda->getLambdaCallOperator();
3063
// For a generic lambda, find the corresponding call operator specialization
3064
// to which the call to the static-invoker shall be forwarded.
3065
if (Lambda->isGenericLambda()) {
3066
assert(MD->isFunctionTemplateSpecialization());
3067
const TemplateArgumentList *TAL = MD->getTemplateSpecializationArgs();
3068
FunctionTemplateDecl *CallOpTemplate = CallOp->getDescribedFunctionTemplate();
3069
void *InsertPos = nullptr;
3070
FunctionDecl *CorrespondingCallOpSpecialization =
3071
CallOpTemplate->findSpecialization(TAL->asArray(), InsertPos);
3072
assert(CorrespondingCallOpSpecialization);
3073
CallOp = cast<CXXMethodDecl>(CorrespondingCallOpSpecialization);
3074
}
3075
3076
// Special lambda forwarding when there are inalloca parameters.
3077
if (hasInAllocaArg(MD)) {
3078
const CGFunctionInfo *ImplFnInfo = nullptr;
3079
llvm::Function *ImplFn = nullptr;
3080
EmitLambdaInAllocaImplFn(CallOp, &ImplFnInfo, &ImplFn);
3081
3082
EmitForwardingCallToLambda(CallOp, CallArgs, ImplFnInfo, ImplFn);
3083
return;
3084
}
3085
3086
EmitForwardingCallToLambda(CallOp, CallArgs);
3087
}
3088
3089
void CodeGenFunction::EmitLambdaInAllocaCallOpBody(const CXXMethodDecl *MD) {
3090
if (MD->isVariadic()) {
3091
// FIXME: Making this work correctly is nasty because it requires either
3092
// cloning the body of the call operator or making the call operator forward.
3093
CGM.ErrorUnsupported(MD, "lambda conversion to variadic function");
3094
return;
3095
}
3096
3097
// Forward %this argument.
3098
CallArgList CallArgs;
3099
QualType LambdaType = getContext().getRecordType(MD->getParent());
3100
QualType ThisType = getContext().getPointerType(LambdaType);
3101
llvm::Value *ThisArg = CurFn->getArg(0);
3102
CallArgs.add(RValue::get(ThisArg), ThisType);
3103
3104
EmitLambdaDelegatingInvokeBody(MD, CallArgs);
3105
}
3106
3107
void CodeGenFunction::EmitLambdaInAllocaImplFn(
3108
const CXXMethodDecl *CallOp, const CGFunctionInfo **ImplFnInfo,
3109
llvm::Function **ImplFn) {
3110
const CGFunctionInfo &FnInfo =
3111
CGM.getTypes().arrangeCXXMethodDeclaration(CallOp);
3112
llvm::Function *CallOpFn =
3113
cast<llvm::Function>(CGM.GetAddrOfFunction(GlobalDecl(CallOp)));
3114
3115
// Emit function containing the original call op body. __invoke will delegate
3116
// to this function.
3117
SmallVector<CanQualType, 4> ArgTypes;
3118
for (auto I = FnInfo.arg_begin(); I != FnInfo.arg_end(); ++I)
3119
ArgTypes.push_back(I->type);
3120
*ImplFnInfo = &CGM.getTypes().arrangeLLVMFunctionInfo(
3121
FnInfo.getReturnType(), FnInfoOpts::IsDelegateCall, ArgTypes,
3122
FnInfo.getExtInfo(), {}, FnInfo.getRequiredArgs());
3123
3124
// Create mangled name as if this was a method named __impl. If for some
3125
// reason the name doesn't look as expected then just tack __impl to the
3126
// front.
3127
// TODO: Use the name mangler to produce the right name instead of using
3128
// string replacement.
3129
StringRef CallOpName = CallOpFn->getName();
3130
std::string ImplName;
3131
if (size_t Pos = CallOpName.find_first_of("<lambda"))
3132
ImplName = ("?__impl@" + CallOpName.drop_front(Pos)).str();
3133
else
3134
ImplName = ("__impl" + CallOpName).str();
3135
3136
llvm::Function *Fn = CallOpFn->getParent()->getFunction(ImplName);
3137
if (!Fn) {
3138
Fn = llvm::Function::Create(CGM.getTypes().GetFunctionType(**ImplFnInfo),
3139
llvm::GlobalValue::InternalLinkage, ImplName,
3140
CGM.getModule());
3141
CGM.SetInternalFunctionAttributes(CallOp, Fn, **ImplFnInfo);
3142
3143
const GlobalDecl &GD = GlobalDecl(CallOp);
3144
const auto *D = cast<FunctionDecl>(GD.getDecl());
3145
CodeGenFunction(CGM).GenerateCode(GD, Fn, **ImplFnInfo);
3146
CGM.SetLLVMFunctionAttributesForDefinition(D, Fn);
3147
}
3148
*ImplFn = Fn;
3149
}
3150
3151