Path: blob/main/contrib/llvm-project/clang/lib/CodeGen/CGClass.cpp
35233 views
//===--- CGClass.cpp - Emit LLVM Code for C++ classes -----------*- C++ -*-===//1//2// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.3// See https://llvm.org/LICENSE.txt for license information.4// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception5//6//===----------------------------------------------------------------------===//7//8// This contains code dealing with C++ code generation of classes9//10//===----------------------------------------------------------------------===//1112#include "ABIInfoImpl.h"13#include "CGBlocks.h"14#include "CGCXXABI.h"15#include "CGDebugInfo.h"16#include "CGRecordLayout.h"17#include "CodeGenFunction.h"18#include "TargetInfo.h"19#include "clang/AST/Attr.h"20#include "clang/AST/CXXInheritance.h"21#include "clang/AST/CharUnits.h"22#include "clang/AST/DeclTemplate.h"23#include "clang/AST/EvaluatedExprVisitor.h"24#include "clang/AST/RecordLayout.h"25#include "clang/AST/StmtCXX.h"26#include "clang/Basic/CodeGenOptions.h"27#include "clang/Basic/TargetBuiltins.h"28#include "clang/CodeGen/CGFunctionInfo.h"29#include "llvm/IR/Intrinsics.h"30#include "llvm/IR/Metadata.h"31#include "llvm/Support/SaveAndRestore.h"32#include "llvm/Transforms/Utils/SanitizerStats.h"33#include <optional>3435using namespace clang;36using namespace CodeGen;3738/// Return the best known alignment for an unknown pointer to a39/// particular class.40CharUnits CodeGenModule::getClassPointerAlignment(const CXXRecordDecl *RD) {41if (!RD->hasDefinition())42return CharUnits::One(); // Hopefully won't be used anywhere.4344auto &layout = getContext().getASTRecordLayout(RD);4546// If the class is final, then we know that the pointer points to an47// object of that type and can use the full alignment.48if (RD->isEffectivelyFinal())49return layout.getAlignment();5051// Otherwise, we have to assume it could be a subclass.52return layout.getNonVirtualAlignment();53}5455/// Return the smallest possible amount of storage that might be allocated56/// starting from the beginning of an object of a particular class.57///58/// This may be smaller than sizeof(RD) if RD has virtual base classes.59CharUnits CodeGenModule::getMinimumClassObjectSize(const CXXRecordDecl *RD) {60if (!RD->hasDefinition())61return CharUnits::One();6263auto &layout = getContext().getASTRecordLayout(RD);6465// If the class is final, then we know that the pointer points to an66// object of that type and can use the full alignment.67if (RD->isEffectivelyFinal())68return layout.getSize();6970// Otherwise, we have to assume it could be a subclass.71return std::max(layout.getNonVirtualSize(), CharUnits::One());72}7374/// Return the best known alignment for a pointer to a virtual base,75/// given the alignment of a pointer to the derived class.76CharUnits CodeGenModule::getVBaseAlignment(CharUnits actualDerivedAlign,77const CXXRecordDecl *derivedClass,78const CXXRecordDecl *vbaseClass) {79// The basic idea here is that an underaligned derived pointer might80// indicate an underaligned base pointer.8182assert(vbaseClass->isCompleteDefinition());83auto &baseLayout = getContext().getASTRecordLayout(vbaseClass);84CharUnits expectedVBaseAlign = baseLayout.getNonVirtualAlignment();8586return getDynamicOffsetAlignment(actualDerivedAlign, derivedClass,87expectedVBaseAlign);88}8990CharUnits91CodeGenModule::getDynamicOffsetAlignment(CharUnits actualBaseAlign,92const CXXRecordDecl *baseDecl,93CharUnits expectedTargetAlign) {94// If the base is an incomplete type (which is, alas, possible with95// member pointers), be pessimistic.96if (!baseDecl->isCompleteDefinition())97return std::min(actualBaseAlign, expectedTargetAlign);9899auto &baseLayout = getContext().getASTRecordLayout(baseDecl);100CharUnits expectedBaseAlign = baseLayout.getNonVirtualAlignment();101102// If the class is properly aligned, assume the target offset is, too.103//104// This actually isn't necessarily the right thing to do --- if the105// class is a complete object, but it's only properly aligned for a106// base subobject, then the alignments of things relative to it are107// probably off as well. (Note that this requires the alignment of108// the target to be greater than the NV alignment of the derived109// class.)110//111// However, our approach to this kind of under-alignment can only112// ever be best effort; after all, we're never going to propagate113// alignments through variables or parameters. Note, in particular,114// that constructing a polymorphic type in an address that's less115// than pointer-aligned will generally trap in the constructor,116// unless we someday add some sort of attribute to change the117// assumed alignment of 'this'. So our goal here is pretty much118// just to allow the user to explicitly say that a pointer is119// under-aligned and then safely access its fields and vtables.120if (actualBaseAlign >= expectedBaseAlign) {121return expectedTargetAlign;122}123124// Otherwise, we might be offset by an arbitrary multiple of the125// actual alignment. The correct adjustment is to take the min of126// the two alignments.127return std::min(actualBaseAlign, expectedTargetAlign);128}129130Address CodeGenFunction::LoadCXXThisAddress() {131assert(CurFuncDecl && "loading 'this' without a func declaration?");132auto *MD = cast<CXXMethodDecl>(CurFuncDecl);133134// Lazily compute CXXThisAlignment.135if (CXXThisAlignment.isZero()) {136// Just use the best known alignment for the parent.137// TODO: if we're currently emitting a complete-object ctor/dtor,138// we can always use the complete-object alignment.139CXXThisAlignment = CGM.getClassPointerAlignment(MD->getParent());140}141142return makeNaturalAddressForPointer(143LoadCXXThis(), MD->getFunctionObjectParameterType(), CXXThisAlignment,144false, nullptr, nullptr, KnownNonNull);145}146147/// Emit the address of a field using a member data pointer.148///149/// \param E Only used for emergency diagnostics150Address151CodeGenFunction::EmitCXXMemberDataPointerAddress(const Expr *E, Address base,152llvm::Value *memberPtr,153const MemberPointerType *memberPtrType,154LValueBaseInfo *BaseInfo,155TBAAAccessInfo *TBAAInfo) {156// Ask the ABI to compute the actual address.157llvm::Value *ptr =158CGM.getCXXABI().EmitMemberDataPointerAddress(*this, E, base,159memberPtr, memberPtrType);160161QualType memberType = memberPtrType->getPointeeType();162CharUnits memberAlign =163CGM.getNaturalTypeAlignment(memberType, BaseInfo, TBAAInfo);164memberAlign =165CGM.getDynamicOffsetAlignment(base.getAlignment(),166memberPtrType->getClass()->getAsCXXRecordDecl(),167memberAlign);168return Address(ptr, ConvertTypeForMem(memberPtrType->getPointeeType()),169memberAlign);170}171172CharUnits CodeGenModule::computeNonVirtualBaseClassOffset(173const CXXRecordDecl *DerivedClass, CastExpr::path_const_iterator Start,174CastExpr::path_const_iterator End) {175CharUnits Offset = CharUnits::Zero();176177const ASTContext &Context = getContext();178const CXXRecordDecl *RD = DerivedClass;179180for (CastExpr::path_const_iterator I = Start; I != End; ++I) {181const CXXBaseSpecifier *Base = *I;182assert(!Base->isVirtual() && "Should not see virtual bases here!");183184// Get the layout.185const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);186187const auto *BaseDecl =188cast<CXXRecordDecl>(Base->getType()->castAs<RecordType>()->getDecl());189190// Add the offset.191Offset += Layout.getBaseClassOffset(BaseDecl);192193RD = BaseDecl;194}195196return Offset;197}198199llvm::Constant *200CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl,201CastExpr::path_const_iterator PathBegin,202CastExpr::path_const_iterator PathEnd) {203assert(PathBegin != PathEnd && "Base path should not be empty!");204205CharUnits Offset =206computeNonVirtualBaseClassOffset(ClassDecl, PathBegin, PathEnd);207if (Offset.isZero())208return nullptr;209210llvm::Type *PtrDiffTy =211getTypes().ConvertType(getContext().getPointerDiffType());212213return llvm::ConstantInt::get(PtrDiffTy, Offset.getQuantity());214}215216/// Gets the address of a direct base class within a complete object.217/// This should only be used for (1) non-virtual bases or (2) virtual bases218/// when the type is known to be complete (e.g. in complete destructors).219///220/// The object pointed to by 'This' is assumed to be non-null.221Address222CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(Address This,223const CXXRecordDecl *Derived,224const CXXRecordDecl *Base,225bool BaseIsVirtual) {226// 'this' must be a pointer (in some address space) to Derived.227assert(This.getElementType() == ConvertType(Derived));228229// Compute the offset of the virtual base.230CharUnits Offset;231const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived);232if (BaseIsVirtual)233Offset = Layout.getVBaseClassOffset(Base);234else235Offset = Layout.getBaseClassOffset(Base);236237// Shift and cast down to the base type.238// TODO: for complete types, this should be possible with a GEP.239Address V = This;240if (!Offset.isZero()) {241V = V.withElementType(Int8Ty);242V = Builder.CreateConstInBoundsByteGEP(V, Offset);243}244return V.withElementType(ConvertType(Base));245}246247static Address248ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, Address addr,249CharUnits nonVirtualOffset,250llvm::Value *virtualOffset,251const CXXRecordDecl *derivedClass,252const CXXRecordDecl *nearestVBase) {253// Assert that we have something to do.254assert(!nonVirtualOffset.isZero() || virtualOffset != nullptr);255256// Compute the offset from the static and dynamic components.257llvm::Value *baseOffset;258if (!nonVirtualOffset.isZero()) {259llvm::Type *OffsetType =260(CGF.CGM.getTarget().getCXXABI().isItaniumFamily() &&261CGF.CGM.getItaniumVTableContext().isRelativeLayout())262? CGF.Int32Ty263: CGF.PtrDiffTy;264baseOffset =265llvm::ConstantInt::get(OffsetType, nonVirtualOffset.getQuantity());266if (virtualOffset) {267baseOffset = CGF.Builder.CreateAdd(virtualOffset, baseOffset);268}269} else {270baseOffset = virtualOffset;271}272273// Apply the base offset.274llvm::Value *ptr = addr.emitRawPointer(CGF);275ptr = CGF.Builder.CreateInBoundsGEP(CGF.Int8Ty, ptr, baseOffset, "add.ptr");276277// If we have a virtual component, the alignment of the result will278// be relative only to the known alignment of that vbase.279CharUnits alignment;280if (virtualOffset) {281assert(nearestVBase && "virtual offset without vbase?");282alignment = CGF.CGM.getVBaseAlignment(addr.getAlignment(),283derivedClass, nearestVBase);284} else {285alignment = addr.getAlignment();286}287alignment = alignment.alignmentAtOffset(nonVirtualOffset);288289return Address(ptr, CGF.Int8Ty, alignment);290}291292Address CodeGenFunction::GetAddressOfBaseClass(293Address Value, const CXXRecordDecl *Derived,294CastExpr::path_const_iterator PathBegin,295CastExpr::path_const_iterator PathEnd, bool NullCheckValue,296SourceLocation Loc) {297assert(PathBegin != PathEnd && "Base path should not be empty!");298299CastExpr::path_const_iterator Start = PathBegin;300const CXXRecordDecl *VBase = nullptr;301302// Sema has done some convenient canonicalization here: if the303// access path involved any virtual steps, the conversion path will304// *start* with a step down to the correct virtual base subobject,305// and hence will not require any further steps.306if ((*Start)->isVirtual()) {307VBase = cast<CXXRecordDecl>(308(*Start)->getType()->castAs<RecordType>()->getDecl());309++Start;310}311312// Compute the static offset of the ultimate destination within its313// allocating subobject (the virtual base, if there is one, or else314// the "complete" object that we see).315CharUnits NonVirtualOffset = CGM.computeNonVirtualBaseClassOffset(316VBase ? VBase : Derived, Start, PathEnd);317318// If there's a virtual step, we can sometimes "devirtualize" it.319// For now, that's limited to when the derived type is final.320// TODO: "devirtualize" this for accesses to known-complete objects.321if (VBase && Derived->hasAttr<FinalAttr>()) {322const ASTRecordLayout &layout = getContext().getASTRecordLayout(Derived);323CharUnits vBaseOffset = layout.getVBaseClassOffset(VBase);324NonVirtualOffset += vBaseOffset;325VBase = nullptr; // we no longer have a virtual step326}327328// Get the base pointer type.329llvm::Type *BaseValueTy = ConvertType((PathEnd[-1])->getType());330llvm::Type *PtrTy = llvm::PointerType::get(331CGM.getLLVMContext(), Value.getType()->getPointerAddressSpace());332333QualType DerivedTy = getContext().getRecordType(Derived);334CharUnits DerivedAlign = CGM.getClassPointerAlignment(Derived);335336// If the static offset is zero and we don't have a virtual step,337// just do a bitcast; null checks are unnecessary.338if (NonVirtualOffset.isZero() && !VBase) {339if (sanitizePerformTypeCheck()) {340SanitizerSet SkippedChecks;341SkippedChecks.set(SanitizerKind::Null, !NullCheckValue);342EmitTypeCheck(TCK_Upcast, Loc, Value.emitRawPointer(*this), DerivedTy,343DerivedAlign, SkippedChecks);344}345return Value.withElementType(BaseValueTy);346}347348llvm::BasicBlock *origBB = nullptr;349llvm::BasicBlock *endBB = nullptr;350351// Skip over the offset (and the vtable load) if we're supposed to352// null-check the pointer.353if (NullCheckValue) {354origBB = Builder.GetInsertBlock();355llvm::BasicBlock *notNullBB = createBasicBlock("cast.notnull");356endBB = createBasicBlock("cast.end");357358llvm::Value *isNull = Builder.CreateIsNull(Value);359Builder.CreateCondBr(isNull, endBB, notNullBB);360EmitBlock(notNullBB);361}362363if (sanitizePerformTypeCheck()) {364SanitizerSet SkippedChecks;365SkippedChecks.set(SanitizerKind::Null, true);366EmitTypeCheck(VBase ? TCK_UpcastToVirtualBase : TCK_Upcast, Loc,367Value.emitRawPointer(*this), DerivedTy, DerivedAlign,368SkippedChecks);369}370371// Compute the virtual offset.372llvm::Value *VirtualOffset = nullptr;373if (VBase) {374VirtualOffset =375CGM.getCXXABI().GetVirtualBaseClassOffset(*this, Value, Derived, VBase);376}377378// Apply both offsets.379Value = ApplyNonVirtualAndVirtualOffset(*this, Value, NonVirtualOffset,380VirtualOffset, Derived, VBase);381382// Cast to the destination type.383Value = Value.withElementType(BaseValueTy);384385// Build a phi if we needed a null check.386if (NullCheckValue) {387llvm::BasicBlock *notNullBB = Builder.GetInsertBlock();388Builder.CreateBr(endBB);389EmitBlock(endBB);390391llvm::PHINode *PHI = Builder.CreatePHI(PtrTy, 2, "cast.result");392PHI->addIncoming(Value.emitRawPointer(*this), notNullBB);393PHI->addIncoming(llvm::Constant::getNullValue(PtrTy), origBB);394Value = Value.withPointer(PHI, NotKnownNonNull);395}396397return Value;398}399400Address401CodeGenFunction::GetAddressOfDerivedClass(Address BaseAddr,402const CXXRecordDecl *Derived,403CastExpr::path_const_iterator PathBegin,404CastExpr::path_const_iterator PathEnd,405bool NullCheckValue) {406assert(PathBegin != PathEnd && "Base path should not be empty!");407408QualType DerivedTy =409getContext().getCanonicalType(getContext().getTagDeclType(Derived));410llvm::Type *DerivedValueTy = ConvertType(DerivedTy);411412llvm::Value *NonVirtualOffset =413CGM.GetNonVirtualBaseClassOffset(Derived, PathBegin, PathEnd);414415if (!NonVirtualOffset) {416// No offset, we can just cast back.417return BaseAddr.withElementType(DerivedValueTy);418}419420llvm::BasicBlock *CastNull = nullptr;421llvm::BasicBlock *CastNotNull = nullptr;422llvm::BasicBlock *CastEnd = nullptr;423424if (NullCheckValue) {425CastNull = createBasicBlock("cast.null");426CastNotNull = createBasicBlock("cast.notnull");427CastEnd = createBasicBlock("cast.end");428429llvm::Value *IsNull = Builder.CreateIsNull(BaseAddr);430Builder.CreateCondBr(IsNull, CastNull, CastNotNull);431EmitBlock(CastNotNull);432}433434// Apply the offset.435Address Addr = BaseAddr.withElementType(Int8Ty);436Addr = Builder.CreateInBoundsGEP(437Addr, Builder.CreateNeg(NonVirtualOffset), Int8Ty,438CGM.getClassPointerAlignment(Derived), "sub.ptr");439440// Just cast.441Addr = Addr.withElementType(DerivedValueTy);442443// Produce a PHI if we had a null-check.444if (NullCheckValue) {445Builder.CreateBr(CastEnd);446EmitBlock(CastNull);447Builder.CreateBr(CastEnd);448EmitBlock(CastEnd);449450llvm::Value *Value = Addr.emitRawPointer(*this);451llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2);452PHI->addIncoming(Value, CastNotNull);453PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), CastNull);454return Address(PHI, Addr.getElementType(),455CGM.getClassPointerAlignment(Derived));456}457458return Addr;459}460461llvm::Value *CodeGenFunction::GetVTTParameter(GlobalDecl GD,462bool ForVirtualBase,463bool Delegating) {464if (!CGM.getCXXABI().NeedsVTTParameter(GD)) {465// This constructor/destructor does not need a VTT parameter.466return nullptr;467}468469const CXXRecordDecl *RD = cast<CXXMethodDecl>(CurCodeDecl)->getParent();470const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent();471472uint64_t SubVTTIndex;473474if (Delegating) {475// If this is a delegating constructor call, just load the VTT.476return LoadCXXVTT();477} else if (RD == Base) {478// If the record matches the base, this is the complete ctor/dtor479// variant calling the base variant in a class with virtual bases.480assert(!CGM.getCXXABI().NeedsVTTParameter(CurGD) &&481"doing no-op VTT offset in base dtor/ctor?");482assert(!ForVirtualBase && "Can't have same class as virtual base!");483SubVTTIndex = 0;484} else {485const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD);486CharUnits BaseOffset = ForVirtualBase ?487Layout.getVBaseClassOffset(Base) :488Layout.getBaseClassOffset(Base);489490SubVTTIndex =491CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset));492assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!");493}494495if (CGM.getCXXABI().NeedsVTTParameter(CurGD)) {496// A VTT parameter was passed to the constructor, use it.497llvm::Value *VTT = LoadCXXVTT();498return Builder.CreateConstInBoundsGEP1_64(VoidPtrTy, VTT, SubVTTIndex);499} else {500// We're the complete constructor, so get the VTT by name.501llvm::GlobalValue *VTT = CGM.getVTables().GetAddrOfVTT(RD);502return Builder.CreateConstInBoundsGEP2_64(503VTT->getValueType(), VTT, 0, SubVTTIndex);504}505}506507namespace {508/// Call the destructor for a direct base class.509struct CallBaseDtor final : EHScopeStack::Cleanup {510const CXXRecordDecl *BaseClass;511bool BaseIsVirtual;512CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual)513: BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {}514515void Emit(CodeGenFunction &CGF, Flags flags) override {516const CXXRecordDecl *DerivedClass =517cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent();518519const CXXDestructorDecl *D = BaseClass->getDestructor();520// We are already inside a destructor, so presumably the object being521// destroyed should have the expected type.522QualType ThisTy = D->getFunctionObjectParameterType();523Address Addr =524CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThisAddress(),525DerivedClass, BaseClass,526BaseIsVirtual);527CGF.EmitCXXDestructorCall(D, Dtor_Base, BaseIsVirtual,528/*Delegating=*/false, Addr, ThisTy);529}530};531532/// A visitor which checks whether an initializer uses 'this' in a533/// way which requires the vtable to be properly set.534struct DynamicThisUseChecker : ConstEvaluatedExprVisitor<DynamicThisUseChecker> {535typedef ConstEvaluatedExprVisitor<DynamicThisUseChecker> super;536537bool UsesThis;538539DynamicThisUseChecker(const ASTContext &C) : super(C), UsesThis(false) {}540541// Black-list all explicit and implicit references to 'this'.542//543// Do we need to worry about external references to 'this' derived544// from arbitrary code? If so, then anything which runs arbitrary545// external code might potentially access the vtable.546void VisitCXXThisExpr(const CXXThisExpr *E) { UsesThis = true; }547};548} // end anonymous namespace549550static bool BaseInitializerUsesThis(ASTContext &C, const Expr *Init) {551DynamicThisUseChecker Checker(C);552Checker.Visit(Init);553return Checker.UsesThis;554}555556static void EmitBaseInitializer(CodeGenFunction &CGF,557const CXXRecordDecl *ClassDecl,558CXXCtorInitializer *BaseInit) {559assert(BaseInit->isBaseInitializer() &&560"Must have base initializer!");561562Address ThisPtr = CGF.LoadCXXThisAddress();563564const Type *BaseType = BaseInit->getBaseClass();565const auto *BaseClassDecl =566cast<CXXRecordDecl>(BaseType->castAs<RecordType>()->getDecl());567568bool isBaseVirtual = BaseInit->isBaseVirtual();569570// If the initializer for the base (other than the constructor571// itself) accesses 'this' in any way, we need to initialize the572// vtables.573if (BaseInitializerUsesThis(CGF.getContext(), BaseInit->getInit()))574CGF.InitializeVTablePointers(ClassDecl);575576// We can pretend to be a complete class because it only matters for577// virtual bases, and we only do virtual bases for complete ctors.578Address V =579CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl,580BaseClassDecl,581isBaseVirtual);582AggValueSlot AggSlot =583AggValueSlot::forAddr(584V, Qualifiers(),585AggValueSlot::IsDestructed,586AggValueSlot::DoesNotNeedGCBarriers,587AggValueSlot::IsNotAliased,588CGF.getOverlapForBaseInit(ClassDecl, BaseClassDecl, isBaseVirtual));589590CGF.EmitAggExpr(BaseInit->getInit(), AggSlot);591592if (CGF.CGM.getLangOpts().Exceptions &&593!BaseClassDecl->hasTrivialDestructor())594CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl,595isBaseVirtual);596}597598static bool isMemcpyEquivalentSpecialMember(const CXXMethodDecl *D) {599auto *CD = dyn_cast<CXXConstructorDecl>(D);600if (!(CD && CD->isCopyOrMoveConstructor()) &&601!D->isCopyAssignmentOperator() && !D->isMoveAssignmentOperator())602return false;603604// We can emit a memcpy for a trivial copy or move constructor/assignment.605if (D->isTrivial() && !D->getParent()->mayInsertExtraPadding())606return true;607608// We *must* emit a memcpy for a defaulted union copy or move op.609if (D->getParent()->isUnion() && D->isDefaulted())610return true;611612return false;613}614615static void EmitLValueForAnyFieldInitialization(CodeGenFunction &CGF,616CXXCtorInitializer *MemberInit,617LValue &LHS) {618FieldDecl *Field = MemberInit->getAnyMember();619if (MemberInit->isIndirectMemberInitializer()) {620// If we are initializing an anonymous union field, drill down to the field.621IndirectFieldDecl *IndirectField = MemberInit->getIndirectMember();622for (const auto *I : IndirectField->chain())623LHS = CGF.EmitLValueForFieldInitialization(LHS, cast<FieldDecl>(I));624} else {625LHS = CGF.EmitLValueForFieldInitialization(LHS, Field);626}627}628629static void EmitMemberInitializer(CodeGenFunction &CGF,630const CXXRecordDecl *ClassDecl,631CXXCtorInitializer *MemberInit,632const CXXConstructorDecl *Constructor,633FunctionArgList &Args) {634ApplyDebugLocation Loc(CGF, MemberInit->getSourceLocation());635assert(MemberInit->isAnyMemberInitializer() &&636"Must have member initializer!");637assert(MemberInit->getInit() && "Must have initializer!");638639// non-static data member initializers.640FieldDecl *Field = MemberInit->getAnyMember();641QualType FieldType = Field->getType();642643llvm::Value *ThisPtr = CGF.LoadCXXThis();644QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl);645LValue LHS;646647// If a base constructor is being emitted, create an LValue that has the648// non-virtual alignment.649if (CGF.CurGD.getCtorType() == Ctor_Base)650LHS = CGF.MakeNaturalAlignPointeeAddrLValue(ThisPtr, RecordTy);651else652LHS = CGF.MakeNaturalAlignAddrLValue(ThisPtr, RecordTy);653654EmitLValueForAnyFieldInitialization(CGF, MemberInit, LHS);655656// Special case: if we are in a copy or move constructor, and we are copying657// an array of PODs or classes with trivial copy constructors, ignore the658// AST and perform the copy we know is equivalent.659// FIXME: This is hacky at best... if we had a bit more explicit information660// in the AST, we could generalize it more easily.661const ConstantArrayType *Array662= CGF.getContext().getAsConstantArrayType(FieldType);663if (Array && Constructor->isDefaulted() &&664Constructor->isCopyOrMoveConstructor()) {665QualType BaseElementTy = CGF.getContext().getBaseElementType(Array);666CXXConstructExpr *CE = dyn_cast<CXXConstructExpr>(MemberInit->getInit());667if (BaseElementTy.isPODType(CGF.getContext()) ||668(CE && isMemcpyEquivalentSpecialMember(CE->getConstructor()))) {669unsigned SrcArgIndex =670CGF.CGM.getCXXABI().getSrcArgforCopyCtor(Constructor, Args);671llvm::Value *SrcPtr672= CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(Args[SrcArgIndex]));673LValue ThisRHSLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy);674LValue Src = CGF.EmitLValueForFieldInitialization(ThisRHSLV, Field);675676// Copy the aggregate.677CGF.EmitAggregateCopy(LHS, Src, FieldType, CGF.getOverlapForFieldInit(Field),678LHS.isVolatileQualified());679// Ensure that we destroy the objects if an exception is thrown later in680// the constructor.681QualType::DestructionKind dtorKind = FieldType.isDestructedType();682if (CGF.needsEHCleanup(dtorKind))683CGF.pushEHDestroy(dtorKind, LHS.getAddress(), FieldType);684return;685}686}687688CGF.EmitInitializerForField(Field, LHS, MemberInit->getInit());689}690691void CodeGenFunction::EmitInitializerForField(FieldDecl *Field, LValue LHS,692Expr *Init) {693QualType FieldType = Field->getType();694switch (getEvaluationKind(FieldType)) {695case TEK_Scalar:696if (LHS.isSimple()) {697EmitExprAsInit(Init, Field, LHS, false);698} else {699RValue RHS = RValue::get(EmitScalarExpr(Init));700EmitStoreThroughLValue(RHS, LHS);701}702break;703case TEK_Complex:704EmitComplexExprIntoLValue(Init, LHS, /*isInit*/ true);705break;706case TEK_Aggregate: {707AggValueSlot Slot = AggValueSlot::forLValue(708LHS, AggValueSlot::IsDestructed, AggValueSlot::DoesNotNeedGCBarriers,709AggValueSlot::IsNotAliased, getOverlapForFieldInit(Field),710AggValueSlot::IsNotZeroed,711// Checks are made by the code that calls constructor.712AggValueSlot::IsSanitizerChecked);713EmitAggExpr(Init, Slot);714break;715}716}717718// Ensure that we destroy this object if an exception is thrown719// later in the constructor.720QualType::DestructionKind dtorKind = FieldType.isDestructedType();721if (needsEHCleanup(dtorKind))722pushEHDestroy(dtorKind, LHS.getAddress(), FieldType);723}724725/// Checks whether the given constructor is a valid subject for the726/// complete-to-base constructor delegation optimization, i.e.727/// emitting the complete constructor as a simple call to the base728/// constructor.729bool CodeGenFunction::IsConstructorDelegationValid(730const CXXConstructorDecl *Ctor) {731732// Currently we disable the optimization for classes with virtual733// bases because (1) the addresses of parameter variables need to be734// consistent across all initializers but (2) the delegate function735// call necessarily creates a second copy of the parameter variable.736//737// The limiting example (purely theoretical AFAIK):738// struct A { A(int &c) { c++; } };739// struct B : virtual A {740// B(int count) : A(count) { printf("%d\n", count); }741// };742// ...although even this example could in principle be emitted as a743// delegation since the address of the parameter doesn't escape.744if (Ctor->getParent()->getNumVBases()) {745// TODO: white-list trivial vbase initializers. This case wouldn't746// be subject to the restrictions below.747748// TODO: white-list cases where:749// - there are no non-reference parameters to the constructor750// - the initializers don't access any non-reference parameters751// - the initializers don't take the address of non-reference752// parameters753// - etc.754// If we ever add any of the above cases, remember that:755// - function-try-blocks will always exclude this optimization756// - we need to perform the constructor prologue and cleanup in757// EmitConstructorBody.758759return false;760}761762// We also disable the optimization for variadic functions because763// it's impossible to "re-pass" varargs.764if (Ctor->getType()->castAs<FunctionProtoType>()->isVariadic())765return false;766767// FIXME: Decide if we can do a delegation of a delegating constructor.768if (Ctor->isDelegatingConstructor())769return false;770771return true;772}773774// Emit code in ctor (Prologue==true) or dtor (Prologue==false)775// to poison the extra field paddings inserted under776// -fsanitize-address-field-padding=1|2.777void CodeGenFunction::EmitAsanPrologueOrEpilogue(bool Prologue) {778ASTContext &Context = getContext();779const CXXRecordDecl *ClassDecl =780Prologue ? cast<CXXConstructorDecl>(CurGD.getDecl())->getParent()781: cast<CXXDestructorDecl>(CurGD.getDecl())->getParent();782if (!ClassDecl->mayInsertExtraPadding()) return;783784struct SizeAndOffset {785uint64_t Size;786uint64_t Offset;787};788789unsigned PtrSize = CGM.getDataLayout().getPointerSizeInBits();790const ASTRecordLayout &Info = Context.getASTRecordLayout(ClassDecl);791792// Populate sizes and offsets of fields.793SmallVector<SizeAndOffset, 16> SSV(Info.getFieldCount());794for (unsigned i = 0, e = Info.getFieldCount(); i != e; ++i)795SSV[i].Offset =796Context.toCharUnitsFromBits(Info.getFieldOffset(i)).getQuantity();797798size_t NumFields = 0;799for (const auto *Field : ClassDecl->fields()) {800const FieldDecl *D = Field;801auto FieldInfo = Context.getTypeInfoInChars(D->getType());802CharUnits FieldSize = FieldInfo.Width;803assert(NumFields < SSV.size());804SSV[NumFields].Size = D->isBitField() ? 0 : FieldSize.getQuantity();805NumFields++;806}807assert(NumFields == SSV.size());808if (SSV.size() <= 1) return;809810// We will insert calls to __asan_* run-time functions.811// LLVM AddressSanitizer pass may decide to inline them later.812llvm::Type *Args[2] = {IntPtrTy, IntPtrTy};813llvm::FunctionType *FTy =814llvm::FunctionType::get(CGM.VoidTy, Args, false);815llvm::FunctionCallee F = CGM.CreateRuntimeFunction(816FTy, Prologue ? "__asan_poison_intra_object_redzone"817: "__asan_unpoison_intra_object_redzone");818819llvm::Value *ThisPtr = LoadCXXThis();820ThisPtr = Builder.CreatePtrToInt(ThisPtr, IntPtrTy);821uint64_t TypeSize = Info.getNonVirtualSize().getQuantity();822// For each field check if it has sufficient padding,823// if so (un)poison it with a call.824for (size_t i = 0; i < SSV.size(); i++) {825uint64_t AsanAlignment = 8;826uint64_t NextField = i == SSV.size() - 1 ? TypeSize : SSV[i + 1].Offset;827uint64_t PoisonSize = NextField - SSV[i].Offset - SSV[i].Size;828uint64_t EndOffset = SSV[i].Offset + SSV[i].Size;829if (PoisonSize < AsanAlignment || !SSV[i].Size ||830(NextField % AsanAlignment) != 0)831continue;832Builder.CreateCall(833F, {Builder.CreateAdd(ThisPtr, Builder.getIntN(PtrSize, EndOffset)),834Builder.getIntN(PtrSize, PoisonSize)});835}836}837838/// EmitConstructorBody - Emits the body of the current constructor.839void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) {840EmitAsanPrologueOrEpilogue(true);841const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl());842CXXCtorType CtorType = CurGD.getCtorType();843844assert((CGM.getTarget().getCXXABI().hasConstructorVariants() ||845CtorType == Ctor_Complete) &&846"can only generate complete ctor for this ABI");847848// Before we go any further, try the complete->base constructor849// delegation optimization.850if (CtorType == Ctor_Complete && IsConstructorDelegationValid(Ctor) &&851CGM.getTarget().getCXXABI().hasConstructorVariants()) {852EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args, Ctor->getEndLoc());853return;854}855856const FunctionDecl *Definition = nullptr;857Stmt *Body = Ctor->getBody(Definition);858assert(Definition == Ctor && "emitting wrong constructor body");859860// Enter the function-try-block before the constructor prologue if861// applicable.862bool IsTryBody = isa_and_nonnull<CXXTryStmt>(Body);863if (IsTryBody)864EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);865866incrementProfileCounter(Body);867maybeCreateMCDCCondBitmap();868869RunCleanupsScope RunCleanups(*this);870871// TODO: in restricted cases, we can emit the vbase initializers of872// a complete ctor and then delegate to the base ctor.873874// Emit the constructor prologue, i.e. the base and member875// initializers.876EmitCtorPrologue(Ctor, CtorType, Args);877878// Emit the body of the statement.879if (IsTryBody)880EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());881else if (Body)882EmitStmt(Body);883884// Emit any cleanup blocks associated with the member or base885// initializers, which includes (along the exceptional path) the886// destructors for those members and bases that were fully887// constructed.888RunCleanups.ForceCleanup();889890if (IsTryBody)891ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);892}893894namespace {895/// RAII object to indicate that codegen is copying the value representation896/// instead of the object representation. Useful when copying a struct or897/// class which has uninitialized members and we're only performing898/// lvalue-to-rvalue conversion on the object but not its members.899class CopyingValueRepresentation {900public:901explicit CopyingValueRepresentation(CodeGenFunction &CGF)902: CGF(CGF), OldSanOpts(CGF.SanOpts) {903CGF.SanOpts.set(SanitizerKind::Bool, false);904CGF.SanOpts.set(SanitizerKind::Enum, false);905}906~CopyingValueRepresentation() {907CGF.SanOpts = OldSanOpts;908}909private:910CodeGenFunction &CGF;911SanitizerSet OldSanOpts;912};913} // end anonymous namespace914915namespace {916class FieldMemcpyizer {917public:918FieldMemcpyizer(CodeGenFunction &CGF, const CXXRecordDecl *ClassDecl,919const VarDecl *SrcRec)920: CGF(CGF), ClassDecl(ClassDecl), SrcRec(SrcRec),921RecLayout(CGF.getContext().getASTRecordLayout(ClassDecl)),922FirstField(nullptr), LastField(nullptr), FirstFieldOffset(0),923LastFieldOffset(0), LastAddedFieldIndex(0) {}924925bool isMemcpyableField(FieldDecl *F) const {926// Never memcpy fields when we are adding poisoned paddings.927if (CGF.getContext().getLangOpts().SanitizeAddressFieldPadding)928return false;929Qualifiers Qual = F->getType().getQualifiers();930if (Qual.hasVolatile() || Qual.hasObjCLifetime())931return false;932return true;933}934935void addMemcpyableField(FieldDecl *F) {936if (isEmptyFieldForLayout(CGF.getContext(), F))937return;938if (!FirstField)939addInitialField(F);940else941addNextField(F);942}943944CharUnits getMemcpySize(uint64_t FirstByteOffset) const {945ASTContext &Ctx = CGF.getContext();946unsigned LastFieldSize =947LastField->isBitField()948? LastField->getBitWidthValue(Ctx)949: Ctx.toBits(950Ctx.getTypeInfoDataSizeInChars(LastField->getType()).Width);951uint64_t MemcpySizeBits = LastFieldOffset + LastFieldSize -952FirstByteOffset + Ctx.getCharWidth() - 1;953CharUnits MemcpySize = Ctx.toCharUnitsFromBits(MemcpySizeBits);954return MemcpySize;955}956957void emitMemcpy() {958// Give the subclass a chance to bail out if it feels the memcpy isn't959// worth it (e.g. Hasn't aggregated enough data).960if (!FirstField) {961return;962}963964uint64_t FirstByteOffset;965if (FirstField->isBitField()) {966const CGRecordLayout &RL =967CGF.getTypes().getCGRecordLayout(FirstField->getParent());968const CGBitFieldInfo &BFInfo = RL.getBitFieldInfo(FirstField);969// FirstFieldOffset is not appropriate for bitfields,970// we need to use the storage offset instead.971FirstByteOffset = CGF.getContext().toBits(BFInfo.StorageOffset);972} else {973FirstByteOffset = FirstFieldOffset;974}975976CharUnits MemcpySize = getMemcpySize(FirstByteOffset);977QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl);978Address ThisPtr = CGF.LoadCXXThisAddress();979LValue DestLV = CGF.MakeAddrLValue(ThisPtr, RecordTy);980LValue Dest = CGF.EmitLValueForFieldInitialization(DestLV, FirstField);981llvm::Value *SrcPtr = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(SrcRec));982LValue SrcLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy);983LValue Src = CGF.EmitLValueForFieldInitialization(SrcLV, FirstField);984985emitMemcpyIR(986Dest.isBitField() ? Dest.getBitFieldAddress() : Dest.getAddress(),987Src.isBitField() ? Src.getBitFieldAddress() : Src.getAddress(),988MemcpySize);989reset();990}991992void reset() {993FirstField = nullptr;994}995996protected:997CodeGenFunction &CGF;998const CXXRecordDecl *ClassDecl;9991000private:1001void emitMemcpyIR(Address DestPtr, Address SrcPtr, CharUnits Size) {1002DestPtr = DestPtr.withElementType(CGF.Int8Ty);1003SrcPtr = SrcPtr.withElementType(CGF.Int8Ty);1004CGF.Builder.CreateMemCpy(DestPtr, SrcPtr, Size.getQuantity());1005}10061007void addInitialField(FieldDecl *F) {1008FirstField = F;1009LastField = F;1010FirstFieldOffset = RecLayout.getFieldOffset(F->getFieldIndex());1011LastFieldOffset = FirstFieldOffset;1012LastAddedFieldIndex = F->getFieldIndex();1013}10141015void addNextField(FieldDecl *F) {1016// For the most part, the following invariant will hold:1017// F->getFieldIndex() == LastAddedFieldIndex + 11018// The one exception is that Sema won't add a copy-initializer for an1019// unnamed bitfield, which will show up here as a gap in the sequence.1020assert(F->getFieldIndex() >= LastAddedFieldIndex + 1 &&1021"Cannot aggregate fields out of order.");1022LastAddedFieldIndex = F->getFieldIndex();10231024// The 'first' and 'last' fields are chosen by offset, rather than field1025// index. This allows the code to support bitfields, as well as regular1026// fields.1027uint64_t FOffset = RecLayout.getFieldOffset(F->getFieldIndex());1028if (FOffset < FirstFieldOffset) {1029FirstField = F;1030FirstFieldOffset = FOffset;1031} else if (FOffset >= LastFieldOffset) {1032LastField = F;1033LastFieldOffset = FOffset;1034}1035}10361037const VarDecl *SrcRec;1038const ASTRecordLayout &RecLayout;1039FieldDecl *FirstField;1040FieldDecl *LastField;1041uint64_t FirstFieldOffset, LastFieldOffset;1042unsigned LastAddedFieldIndex;1043};10441045class ConstructorMemcpyizer : public FieldMemcpyizer {1046private:1047/// Get source argument for copy constructor. Returns null if not a copy1048/// constructor.1049static const VarDecl *getTrivialCopySource(CodeGenFunction &CGF,1050const CXXConstructorDecl *CD,1051FunctionArgList &Args) {1052if (CD->isCopyOrMoveConstructor() && CD->isDefaulted())1053return Args[CGF.CGM.getCXXABI().getSrcArgforCopyCtor(CD, Args)];1054return nullptr;1055}10561057// Returns true if a CXXCtorInitializer represents a member initialization1058// that can be rolled into a memcpy.1059bool isMemberInitMemcpyable(CXXCtorInitializer *MemberInit) const {1060if (!MemcpyableCtor)1061return false;1062FieldDecl *Field = MemberInit->getMember();1063assert(Field && "No field for member init.");1064QualType FieldType = Field->getType();1065CXXConstructExpr *CE = dyn_cast<CXXConstructExpr>(MemberInit->getInit());10661067// Bail out on non-memcpyable, not-trivially-copyable members.1068if (!(CE && isMemcpyEquivalentSpecialMember(CE->getConstructor())) &&1069!(FieldType.isTriviallyCopyableType(CGF.getContext()) ||1070FieldType->isReferenceType()))1071return false;10721073// Bail out on volatile fields.1074if (!isMemcpyableField(Field))1075return false;10761077// Otherwise we're good.1078return true;1079}10801081public:1082ConstructorMemcpyizer(CodeGenFunction &CGF, const CXXConstructorDecl *CD,1083FunctionArgList &Args)1084: FieldMemcpyizer(CGF, CD->getParent(), getTrivialCopySource(CGF, CD, Args)),1085ConstructorDecl(CD),1086MemcpyableCtor(CD->isDefaulted() &&1087CD->isCopyOrMoveConstructor() &&1088CGF.getLangOpts().getGC() == LangOptions::NonGC),1089Args(Args) { }10901091void addMemberInitializer(CXXCtorInitializer *MemberInit) {1092if (isMemberInitMemcpyable(MemberInit)) {1093AggregatedInits.push_back(MemberInit);1094addMemcpyableField(MemberInit->getMember());1095} else {1096emitAggregatedInits();1097EmitMemberInitializer(CGF, ConstructorDecl->getParent(), MemberInit,1098ConstructorDecl, Args);1099}1100}11011102void emitAggregatedInits() {1103if (AggregatedInits.size() <= 1) {1104// This memcpy is too small to be worthwhile. Fall back on default1105// codegen.1106if (!AggregatedInits.empty()) {1107CopyingValueRepresentation CVR(CGF);1108EmitMemberInitializer(CGF, ConstructorDecl->getParent(),1109AggregatedInits[0], ConstructorDecl, Args);1110AggregatedInits.clear();1111}1112reset();1113return;1114}11151116pushEHDestructors();1117emitMemcpy();1118AggregatedInits.clear();1119}11201121void pushEHDestructors() {1122Address ThisPtr = CGF.LoadCXXThisAddress();1123QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl);1124LValue LHS = CGF.MakeAddrLValue(ThisPtr, RecordTy);11251126for (unsigned i = 0; i < AggregatedInits.size(); ++i) {1127CXXCtorInitializer *MemberInit = AggregatedInits[i];1128QualType FieldType = MemberInit->getAnyMember()->getType();1129QualType::DestructionKind dtorKind = FieldType.isDestructedType();1130if (!CGF.needsEHCleanup(dtorKind))1131continue;1132LValue FieldLHS = LHS;1133EmitLValueForAnyFieldInitialization(CGF, MemberInit, FieldLHS);1134CGF.pushEHDestroy(dtorKind, FieldLHS.getAddress(), FieldType);1135}1136}11371138void finish() {1139emitAggregatedInits();1140}11411142private:1143const CXXConstructorDecl *ConstructorDecl;1144bool MemcpyableCtor;1145FunctionArgList &Args;1146SmallVector<CXXCtorInitializer*, 16> AggregatedInits;1147};11481149class AssignmentMemcpyizer : public FieldMemcpyizer {1150private:1151// Returns the memcpyable field copied by the given statement, if one1152// exists. Otherwise returns null.1153FieldDecl *getMemcpyableField(Stmt *S) {1154if (!AssignmentsMemcpyable)1155return nullptr;1156if (BinaryOperator *BO = dyn_cast<BinaryOperator>(S)) {1157// Recognise trivial assignments.1158if (BO->getOpcode() != BO_Assign)1159return nullptr;1160MemberExpr *ME = dyn_cast<MemberExpr>(BO->getLHS());1161if (!ME)1162return nullptr;1163FieldDecl *Field = dyn_cast<FieldDecl>(ME->getMemberDecl());1164if (!Field || !isMemcpyableField(Field))1165return nullptr;1166Stmt *RHS = BO->getRHS();1167if (ImplicitCastExpr *EC = dyn_cast<ImplicitCastExpr>(RHS))1168RHS = EC->getSubExpr();1169if (!RHS)1170return nullptr;1171if (MemberExpr *ME2 = dyn_cast<MemberExpr>(RHS)) {1172if (ME2->getMemberDecl() == Field)1173return Field;1174}1175return nullptr;1176} else if (CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(S)) {1177CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(MCE->getCalleeDecl());1178if (!(MD && isMemcpyEquivalentSpecialMember(MD)))1179return nullptr;1180MemberExpr *IOA = dyn_cast<MemberExpr>(MCE->getImplicitObjectArgument());1181if (!IOA)1182return nullptr;1183FieldDecl *Field = dyn_cast<FieldDecl>(IOA->getMemberDecl());1184if (!Field || !isMemcpyableField(Field))1185return nullptr;1186MemberExpr *Arg0 = dyn_cast<MemberExpr>(MCE->getArg(0));1187if (!Arg0 || Field != dyn_cast<FieldDecl>(Arg0->getMemberDecl()))1188return nullptr;1189return Field;1190} else if (CallExpr *CE = dyn_cast<CallExpr>(S)) {1191FunctionDecl *FD = dyn_cast<FunctionDecl>(CE->getCalleeDecl());1192if (!FD || FD->getBuiltinID() != Builtin::BI__builtin_memcpy)1193return nullptr;1194Expr *DstPtr = CE->getArg(0);1195if (ImplicitCastExpr *DC = dyn_cast<ImplicitCastExpr>(DstPtr))1196DstPtr = DC->getSubExpr();1197UnaryOperator *DUO = dyn_cast<UnaryOperator>(DstPtr);1198if (!DUO || DUO->getOpcode() != UO_AddrOf)1199return nullptr;1200MemberExpr *ME = dyn_cast<MemberExpr>(DUO->getSubExpr());1201if (!ME)1202return nullptr;1203FieldDecl *Field = dyn_cast<FieldDecl>(ME->getMemberDecl());1204if (!Field || !isMemcpyableField(Field))1205return nullptr;1206Expr *SrcPtr = CE->getArg(1);1207if (ImplicitCastExpr *SC = dyn_cast<ImplicitCastExpr>(SrcPtr))1208SrcPtr = SC->getSubExpr();1209UnaryOperator *SUO = dyn_cast<UnaryOperator>(SrcPtr);1210if (!SUO || SUO->getOpcode() != UO_AddrOf)1211return nullptr;1212MemberExpr *ME2 = dyn_cast<MemberExpr>(SUO->getSubExpr());1213if (!ME2 || Field != dyn_cast<FieldDecl>(ME2->getMemberDecl()))1214return nullptr;1215return Field;1216}12171218return nullptr;1219}12201221bool AssignmentsMemcpyable;1222SmallVector<Stmt*, 16> AggregatedStmts;12231224public:1225AssignmentMemcpyizer(CodeGenFunction &CGF, const CXXMethodDecl *AD,1226FunctionArgList &Args)1227: FieldMemcpyizer(CGF, AD->getParent(), Args[Args.size() - 1]),1228AssignmentsMemcpyable(CGF.getLangOpts().getGC() == LangOptions::NonGC) {1229assert(Args.size() == 2);1230}12311232void emitAssignment(Stmt *S) {1233FieldDecl *F = getMemcpyableField(S);1234if (F) {1235addMemcpyableField(F);1236AggregatedStmts.push_back(S);1237} else {1238emitAggregatedStmts();1239CGF.EmitStmt(S);1240}1241}12421243void emitAggregatedStmts() {1244if (AggregatedStmts.size() <= 1) {1245if (!AggregatedStmts.empty()) {1246CopyingValueRepresentation CVR(CGF);1247CGF.EmitStmt(AggregatedStmts[0]);1248}1249reset();1250}12511252emitMemcpy();1253AggregatedStmts.clear();1254}12551256void finish() {1257emitAggregatedStmts();1258}1259};1260} // end anonymous namespace12611262static bool isInitializerOfDynamicClass(const CXXCtorInitializer *BaseInit) {1263const Type *BaseType = BaseInit->getBaseClass();1264const auto *BaseClassDecl =1265cast<CXXRecordDecl>(BaseType->castAs<RecordType>()->getDecl());1266return BaseClassDecl->isDynamicClass();1267}12681269/// EmitCtorPrologue - This routine generates necessary code to initialize1270/// base classes and non-static data members belonging to this constructor.1271void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD,1272CXXCtorType CtorType,1273FunctionArgList &Args) {1274if (CD->isDelegatingConstructor())1275return EmitDelegatingCXXConstructorCall(CD, Args);12761277const CXXRecordDecl *ClassDecl = CD->getParent();12781279CXXConstructorDecl::init_const_iterator B = CD->init_begin(),1280E = CD->init_end();12811282// Virtual base initializers first, if any. They aren't needed if:1283// - This is a base ctor variant1284// - There are no vbases1285// - The class is abstract, so a complete object of it cannot be constructed1286//1287// The check for an abstract class is necessary because sema may not have1288// marked virtual base destructors referenced.1289bool ConstructVBases = CtorType != Ctor_Base &&1290ClassDecl->getNumVBases() != 0 &&1291!ClassDecl->isAbstract();12921293// In the Microsoft C++ ABI, there are no constructor variants. Instead, the1294// constructor of a class with virtual bases takes an additional parameter to1295// conditionally construct the virtual bases. Emit that check here.1296llvm::BasicBlock *BaseCtorContinueBB = nullptr;1297if (ConstructVBases &&1298!CGM.getTarget().getCXXABI().hasConstructorVariants()) {1299BaseCtorContinueBB =1300CGM.getCXXABI().EmitCtorCompleteObjectHandler(*this, ClassDecl);1301assert(BaseCtorContinueBB);1302}13031304for (; B != E && (*B)->isBaseInitializer() && (*B)->isBaseVirtual(); B++) {1305if (!ConstructVBases)1306continue;1307SaveAndRestore ThisRAII(CXXThisValue);1308if (CGM.getCodeGenOpts().StrictVTablePointers &&1309CGM.getCodeGenOpts().OptimizationLevel > 0 &&1310isInitializerOfDynamicClass(*B))1311CXXThisValue = Builder.CreateLaunderInvariantGroup(LoadCXXThis());1312EmitBaseInitializer(*this, ClassDecl, *B);1313}13141315if (BaseCtorContinueBB) {1316// Complete object handler should continue to the remaining initializers.1317Builder.CreateBr(BaseCtorContinueBB);1318EmitBlock(BaseCtorContinueBB);1319}13201321// Then, non-virtual base initializers.1322for (; B != E && (*B)->isBaseInitializer(); B++) {1323assert(!(*B)->isBaseVirtual());1324SaveAndRestore ThisRAII(CXXThisValue);1325if (CGM.getCodeGenOpts().StrictVTablePointers &&1326CGM.getCodeGenOpts().OptimizationLevel > 0 &&1327isInitializerOfDynamicClass(*B))1328CXXThisValue = Builder.CreateLaunderInvariantGroup(LoadCXXThis());1329EmitBaseInitializer(*this, ClassDecl, *B);1330}13311332InitializeVTablePointers(ClassDecl);13331334// And finally, initialize class members.1335FieldConstructionScope FCS(*this, LoadCXXThisAddress());1336ConstructorMemcpyizer CM(*this, CD, Args);1337for (; B != E; B++) {1338CXXCtorInitializer *Member = (*B);1339assert(!Member->isBaseInitializer());1340assert(Member->isAnyMemberInitializer() &&1341"Delegating initializer on non-delegating constructor");1342CM.addMemberInitializer(Member);1343}1344CM.finish();1345}13461347static bool1348FieldHasTrivialDestructorBody(ASTContext &Context, const FieldDecl *Field);13491350static bool1351HasTrivialDestructorBody(ASTContext &Context,1352const CXXRecordDecl *BaseClassDecl,1353const CXXRecordDecl *MostDerivedClassDecl)1354{1355// If the destructor is trivial we don't have to check anything else.1356if (BaseClassDecl->hasTrivialDestructor())1357return true;13581359if (!BaseClassDecl->getDestructor()->hasTrivialBody())1360return false;13611362// Check fields.1363for (const auto *Field : BaseClassDecl->fields())1364if (!FieldHasTrivialDestructorBody(Context, Field))1365return false;13661367// Check non-virtual bases.1368for (const auto &I : BaseClassDecl->bases()) {1369if (I.isVirtual())1370continue;13711372const CXXRecordDecl *NonVirtualBase =1373cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());1374if (!HasTrivialDestructorBody(Context, NonVirtualBase,1375MostDerivedClassDecl))1376return false;1377}13781379if (BaseClassDecl == MostDerivedClassDecl) {1380// Check virtual bases.1381for (const auto &I : BaseClassDecl->vbases()) {1382const CXXRecordDecl *VirtualBase =1383cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());1384if (!HasTrivialDestructorBody(Context, VirtualBase,1385MostDerivedClassDecl))1386return false;1387}1388}13891390return true;1391}13921393static bool1394FieldHasTrivialDestructorBody(ASTContext &Context,1395const FieldDecl *Field)1396{1397QualType FieldBaseElementType = Context.getBaseElementType(Field->getType());13981399const RecordType *RT = FieldBaseElementType->getAs<RecordType>();1400if (!RT)1401return true;14021403CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl());14041405// The destructor for an implicit anonymous union member is never invoked.1406if (FieldClassDecl->isUnion() && FieldClassDecl->isAnonymousStructOrUnion())1407return true;14081409return HasTrivialDestructorBody(Context, FieldClassDecl, FieldClassDecl);1410}14111412/// CanSkipVTablePointerInitialization - Check whether we need to initialize1413/// any vtable pointers before calling this destructor.1414static bool CanSkipVTablePointerInitialization(CodeGenFunction &CGF,1415const CXXDestructorDecl *Dtor) {1416const CXXRecordDecl *ClassDecl = Dtor->getParent();1417if (!ClassDecl->isDynamicClass())1418return true;14191420// For a final class, the vtable pointer is known to already point to the1421// class's vtable.1422if (ClassDecl->isEffectivelyFinal())1423return true;14241425if (!Dtor->hasTrivialBody())1426return false;14271428// Check the fields.1429for (const auto *Field : ClassDecl->fields())1430if (!FieldHasTrivialDestructorBody(CGF.getContext(), Field))1431return false;14321433return true;1434}14351436/// EmitDestructorBody - Emits the body of the current destructor.1437void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) {1438const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl());1439CXXDtorType DtorType = CurGD.getDtorType();14401441// For an abstract class, non-base destructors are never used (and can't1442// be emitted in general, because vbase dtors may not have been validated1443// by Sema), but the Itanium ABI doesn't make them optional and Clang may1444// in fact emit references to them from other compilations, so emit them1445// as functions containing a trap instruction.1446if (DtorType != Dtor_Base && Dtor->getParent()->isAbstract()) {1447llvm::CallInst *TrapCall = EmitTrapCall(llvm::Intrinsic::trap);1448TrapCall->setDoesNotReturn();1449TrapCall->setDoesNotThrow();1450Builder.CreateUnreachable();1451Builder.ClearInsertionPoint();1452return;1453}14541455Stmt *Body = Dtor->getBody();1456if (Body) {1457incrementProfileCounter(Body);1458maybeCreateMCDCCondBitmap();1459}14601461// The call to operator delete in a deleting destructor happens1462// outside of the function-try-block, which means it's always1463// possible to delegate the destructor body to the complete1464// destructor. Do so.1465if (DtorType == Dtor_Deleting) {1466RunCleanupsScope DtorEpilogue(*this);1467EnterDtorCleanups(Dtor, Dtor_Deleting);1468if (HaveInsertPoint()) {1469QualType ThisTy = Dtor->getFunctionObjectParameterType();1470EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false,1471/*Delegating=*/false, LoadCXXThisAddress(), ThisTy);1472}1473return;1474}14751476// If the body is a function-try-block, enter the try before1477// anything else.1478bool isTryBody = isa_and_nonnull<CXXTryStmt>(Body);1479if (isTryBody)1480EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);1481EmitAsanPrologueOrEpilogue(false);14821483// Enter the epilogue cleanups.1484RunCleanupsScope DtorEpilogue(*this);14851486// If this is the complete variant, just invoke the base variant;1487// the epilogue will destruct the virtual bases. But we can't do1488// this optimization if the body is a function-try-block, because1489// we'd introduce *two* handler blocks. In the Microsoft ABI, we1490// always delegate because we might not have a definition in this TU.1491switch (DtorType) {1492case Dtor_Comdat: llvm_unreachable("not expecting a COMDAT");1493case Dtor_Deleting: llvm_unreachable("already handled deleting case");14941495case Dtor_Complete:1496assert((Body || getTarget().getCXXABI().isMicrosoft()) &&1497"can't emit a dtor without a body for non-Microsoft ABIs");14981499// Enter the cleanup scopes for virtual bases.1500EnterDtorCleanups(Dtor, Dtor_Complete);15011502if (!isTryBody) {1503QualType ThisTy = Dtor->getFunctionObjectParameterType();1504EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false,1505/*Delegating=*/false, LoadCXXThisAddress(), ThisTy);1506break;1507}15081509// Fallthrough: act like we're in the base variant.1510[[fallthrough]];15111512case Dtor_Base:1513assert(Body);15141515// Enter the cleanup scopes for fields and non-virtual bases.1516EnterDtorCleanups(Dtor, Dtor_Base);15171518// Initialize the vtable pointers before entering the body.1519if (!CanSkipVTablePointerInitialization(*this, Dtor)) {1520// Insert the llvm.launder.invariant.group intrinsic before initializing1521// the vptrs to cancel any previous assumptions we might have made.1522if (CGM.getCodeGenOpts().StrictVTablePointers &&1523CGM.getCodeGenOpts().OptimizationLevel > 0)1524CXXThisValue = Builder.CreateLaunderInvariantGroup(LoadCXXThis());1525InitializeVTablePointers(Dtor->getParent());1526}15271528if (isTryBody)1529EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());1530else if (Body)1531EmitStmt(Body);1532else {1533assert(Dtor->isImplicit() && "bodyless dtor not implicit");1534// nothing to do besides what's in the epilogue1535}1536// -fapple-kext must inline any call to this dtor into1537// the caller's body.1538if (getLangOpts().AppleKext)1539CurFn->addFnAttr(llvm::Attribute::AlwaysInline);15401541break;1542}15431544// Jump out through the epilogue cleanups.1545DtorEpilogue.ForceCleanup();15461547// Exit the try if applicable.1548if (isTryBody)1549ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);1550}15511552void CodeGenFunction::emitImplicitAssignmentOperatorBody(FunctionArgList &Args) {1553const CXXMethodDecl *AssignOp = cast<CXXMethodDecl>(CurGD.getDecl());1554const Stmt *RootS = AssignOp->getBody();1555assert(isa<CompoundStmt>(RootS) &&1556"Body of an implicit assignment operator should be compound stmt.");1557const CompoundStmt *RootCS = cast<CompoundStmt>(RootS);15581559LexicalScope Scope(*this, RootCS->getSourceRange());15601561incrementProfileCounter(RootCS);1562maybeCreateMCDCCondBitmap();1563AssignmentMemcpyizer AM(*this, AssignOp, Args);1564for (auto *I : RootCS->body())1565AM.emitAssignment(I);1566AM.finish();1567}15681569namespace {1570llvm::Value *LoadThisForDtorDelete(CodeGenFunction &CGF,1571const CXXDestructorDecl *DD) {1572if (Expr *ThisArg = DD->getOperatorDeleteThisArg())1573return CGF.EmitScalarExpr(ThisArg);1574return CGF.LoadCXXThis();1575}15761577/// Call the operator delete associated with the current destructor.1578struct CallDtorDelete final : EHScopeStack::Cleanup {1579CallDtorDelete() {}15801581void Emit(CodeGenFunction &CGF, Flags flags) override {1582const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl);1583const CXXRecordDecl *ClassDecl = Dtor->getParent();1584CGF.EmitDeleteCall(Dtor->getOperatorDelete(),1585LoadThisForDtorDelete(CGF, Dtor),1586CGF.getContext().getTagDeclType(ClassDecl));1587}1588};15891590void EmitConditionalDtorDeleteCall(CodeGenFunction &CGF,1591llvm::Value *ShouldDeleteCondition,1592bool ReturnAfterDelete) {1593llvm::BasicBlock *callDeleteBB = CGF.createBasicBlock("dtor.call_delete");1594llvm::BasicBlock *continueBB = CGF.createBasicBlock("dtor.continue");1595llvm::Value *ShouldCallDelete1596= CGF.Builder.CreateIsNull(ShouldDeleteCondition);1597CGF.Builder.CreateCondBr(ShouldCallDelete, continueBB, callDeleteBB);15981599CGF.EmitBlock(callDeleteBB);1600const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl);1601const CXXRecordDecl *ClassDecl = Dtor->getParent();1602CGF.EmitDeleteCall(Dtor->getOperatorDelete(),1603LoadThisForDtorDelete(CGF, Dtor),1604CGF.getContext().getTagDeclType(ClassDecl));1605assert(Dtor->getOperatorDelete()->isDestroyingOperatorDelete() ==1606ReturnAfterDelete &&1607"unexpected value for ReturnAfterDelete");1608if (ReturnAfterDelete)1609CGF.EmitBranchThroughCleanup(CGF.ReturnBlock);1610else1611CGF.Builder.CreateBr(continueBB);16121613CGF.EmitBlock(continueBB);1614}16151616struct CallDtorDeleteConditional final : EHScopeStack::Cleanup {1617llvm::Value *ShouldDeleteCondition;16181619public:1620CallDtorDeleteConditional(llvm::Value *ShouldDeleteCondition)1621: ShouldDeleteCondition(ShouldDeleteCondition) {1622assert(ShouldDeleteCondition != nullptr);1623}16241625void Emit(CodeGenFunction &CGF, Flags flags) override {1626EmitConditionalDtorDeleteCall(CGF, ShouldDeleteCondition,1627/*ReturnAfterDelete*/false);1628}1629};16301631class DestroyField final : public EHScopeStack::Cleanup {1632const FieldDecl *field;1633CodeGenFunction::Destroyer *destroyer;1634bool useEHCleanupForArray;16351636public:1637DestroyField(const FieldDecl *field, CodeGenFunction::Destroyer *destroyer,1638bool useEHCleanupForArray)1639: field(field), destroyer(destroyer),1640useEHCleanupForArray(useEHCleanupForArray) {}16411642void Emit(CodeGenFunction &CGF, Flags flags) override {1643// Find the address of the field.1644Address thisValue = CGF.LoadCXXThisAddress();1645QualType RecordTy = CGF.getContext().getTagDeclType(field->getParent());1646LValue ThisLV = CGF.MakeAddrLValue(thisValue, RecordTy);1647LValue LV = CGF.EmitLValueForField(ThisLV, field);1648assert(LV.isSimple());16491650CGF.emitDestroy(LV.getAddress(), field->getType(), destroyer,1651flags.isForNormalCleanup() && useEHCleanupForArray);1652}1653};16541655class DeclAsInlineDebugLocation {1656CGDebugInfo *DI;1657llvm::MDNode *InlinedAt;1658std::optional<ApplyDebugLocation> Location;16591660public:1661DeclAsInlineDebugLocation(CodeGenFunction &CGF, const NamedDecl &Decl)1662: DI(CGF.getDebugInfo()) {1663if (!DI)1664return;1665InlinedAt = DI->getInlinedAt();1666DI->setInlinedAt(CGF.Builder.getCurrentDebugLocation());1667Location.emplace(CGF, Decl.getLocation());1668}16691670~DeclAsInlineDebugLocation() {1671if (!DI)1672return;1673Location.reset();1674DI->setInlinedAt(InlinedAt);1675}1676};16771678static void EmitSanitizerDtorCallback(1679CodeGenFunction &CGF, StringRef Name, llvm::Value *Ptr,1680std::optional<CharUnits::QuantityType> PoisonSize = {}) {1681CodeGenFunction::SanitizerScope SanScope(&CGF);1682// Pass in void pointer and size of region as arguments to runtime1683// function1684SmallVector<llvm::Value *, 2> Args = {Ptr};1685SmallVector<llvm::Type *, 2> ArgTypes = {CGF.VoidPtrTy};16861687if (PoisonSize.has_value()) {1688Args.emplace_back(llvm::ConstantInt::get(CGF.SizeTy, *PoisonSize));1689ArgTypes.emplace_back(CGF.SizeTy);1690}16911692llvm::FunctionType *FnType =1693llvm::FunctionType::get(CGF.VoidTy, ArgTypes, false);1694llvm::FunctionCallee Fn = CGF.CGM.CreateRuntimeFunction(FnType, Name);16951696CGF.EmitNounwindRuntimeCall(Fn, Args);1697}16981699static void1700EmitSanitizerDtorFieldsCallback(CodeGenFunction &CGF, llvm::Value *Ptr,1701CharUnits::QuantityType PoisonSize) {1702EmitSanitizerDtorCallback(CGF, "__sanitizer_dtor_callback_fields", Ptr,1703PoisonSize);1704}17051706/// Poison base class with a trivial destructor.1707struct SanitizeDtorTrivialBase final : EHScopeStack::Cleanup {1708const CXXRecordDecl *BaseClass;1709bool BaseIsVirtual;1710SanitizeDtorTrivialBase(const CXXRecordDecl *Base, bool BaseIsVirtual)1711: BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {}17121713void Emit(CodeGenFunction &CGF, Flags flags) override {1714const CXXRecordDecl *DerivedClass =1715cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent();17161717Address Addr = CGF.GetAddressOfDirectBaseInCompleteClass(1718CGF.LoadCXXThisAddress(), DerivedClass, BaseClass, BaseIsVirtual);17191720const ASTRecordLayout &BaseLayout =1721CGF.getContext().getASTRecordLayout(BaseClass);1722CharUnits BaseSize = BaseLayout.getSize();17231724if (!BaseSize.isPositive())1725return;17261727// Use the base class declaration location as inline DebugLocation. All1728// fields of the class are destroyed.1729DeclAsInlineDebugLocation InlineHere(CGF, *BaseClass);1730EmitSanitizerDtorFieldsCallback(CGF, Addr.emitRawPointer(CGF),1731BaseSize.getQuantity());17321733// Prevent the current stack frame from disappearing from the stack trace.1734CGF.CurFn->addFnAttr("disable-tail-calls", "true");1735}1736};17371738class SanitizeDtorFieldRange final : public EHScopeStack::Cleanup {1739const CXXDestructorDecl *Dtor;1740unsigned StartIndex;1741unsigned EndIndex;17421743public:1744SanitizeDtorFieldRange(const CXXDestructorDecl *Dtor, unsigned StartIndex,1745unsigned EndIndex)1746: Dtor(Dtor), StartIndex(StartIndex), EndIndex(EndIndex) {}17471748// Generate function call for handling object poisoning.1749// Disables tail call elimination, to prevent the current stack frame1750// from disappearing from the stack trace.1751void Emit(CodeGenFunction &CGF, Flags flags) override {1752const ASTContext &Context = CGF.getContext();1753const ASTRecordLayout &Layout =1754Context.getASTRecordLayout(Dtor->getParent());17551756// It's a first trivial field so it should be at the begining of a char,1757// still round up start offset just in case.1758CharUnits PoisonStart = Context.toCharUnitsFromBits(1759Layout.getFieldOffset(StartIndex) + Context.getCharWidth() - 1);1760llvm::ConstantInt *OffsetSizePtr =1761llvm::ConstantInt::get(CGF.SizeTy, PoisonStart.getQuantity());17621763llvm::Value *OffsetPtr =1764CGF.Builder.CreateGEP(CGF.Int8Ty, CGF.LoadCXXThis(), OffsetSizePtr);17651766CharUnits PoisonEnd;1767if (EndIndex >= Layout.getFieldCount()) {1768PoisonEnd = Layout.getNonVirtualSize();1769} else {1770PoisonEnd =1771Context.toCharUnitsFromBits(Layout.getFieldOffset(EndIndex));1772}1773CharUnits PoisonSize = PoisonEnd - PoisonStart;1774if (!PoisonSize.isPositive())1775return;17761777// Use the top field declaration location as inline DebugLocation.1778DeclAsInlineDebugLocation InlineHere(1779CGF, **std::next(Dtor->getParent()->field_begin(), StartIndex));1780EmitSanitizerDtorFieldsCallback(CGF, OffsetPtr, PoisonSize.getQuantity());17811782// Prevent the current stack frame from disappearing from the stack trace.1783CGF.CurFn->addFnAttr("disable-tail-calls", "true");1784}1785};17861787class SanitizeDtorVTable final : public EHScopeStack::Cleanup {1788const CXXDestructorDecl *Dtor;17891790public:1791SanitizeDtorVTable(const CXXDestructorDecl *Dtor) : Dtor(Dtor) {}17921793// Generate function call for handling vtable pointer poisoning.1794void Emit(CodeGenFunction &CGF, Flags flags) override {1795assert(Dtor->getParent()->isDynamicClass());1796(void)Dtor;1797// Poison vtable and vtable ptr if they exist for this class.1798llvm::Value *VTablePtr = CGF.LoadCXXThis();17991800// Pass in void pointer and size of region as arguments to runtime1801// function1802EmitSanitizerDtorCallback(CGF, "__sanitizer_dtor_callback_vptr",1803VTablePtr);1804}1805};18061807class SanitizeDtorCleanupBuilder {1808ASTContext &Context;1809EHScopeStack &EHStack;1810const CXXDestructorDecl *DD;1811std::optional<unsigned> StartIndex;18121813public:1814SanitizeDtorCleanupBuilder(ASTContext &Context, EHScopeStack &EHStack,1815const CXXDestructorDecl *DD)1816: Context(Context), EHStack(EHStack), DD(DD), StartIndex(std::nullopt) {}1817void PushCleanupForField(const FieldDecl *Field) {1818if (isEmptyFieldForLayout(Context, Field))1819return;1820unsigned FieldIndex = Field->getFieldIndex();1821if (FieldHasTrivialDestructorBody(Context, Field)) {1822if (!StartIndex)1823StartIndex = FieldIndex;1824} else if (StartIndex) {1825EHStack.pushCleanup<SanitizeDtorFieldRange>(NormalAndEHCleanup, DD,1826*StartIndex, FieldIndex);1827StartIndex = std::nullopt;1828}1829}1830void End() {1831if (StartIndex)1832EHStack.pushCleanup<SanitizeDtorFieldRange>(NormalAndEHCleanup, DD,1833*StartIndex, -1);1834}1835};1836} // end anonymous namespace18371838/// Emit all code that comes at the end of class's1839/// destructor. This is to call destructors on members and base classes1840/// in reverse order of their construction.1841///1842/// For a deleting destructor, this also handles the case where a destroying1843/// operator delete completely overrides the definition.1844void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD,1845CXXDtorType DtorType) {1846assert((!DD->isTrivial() || DD->hasAttr<DLLExportAttr>()) &&1847"Should not emit dtor epilogue for non-exported trivial dtor!");18481849// The deleting-destructor phase just needs to call the appropriate1850// operator delete that Sema picked up.1851if (DtorType == Dtor_Deleting) {1852assert(DD->getOperatorDelete() &&1853"operator delete missing - EnterDtorCleanups");1854if (CXXStructorImplicitParamValue) {1855// If there is an implicit param to the deleting dtor, it's a boolean1856// telling whether this is a deleting destructor.1857if (DD->getOperatorDelete()->isDestroyingOperatorDelete())1858EmitConditionalDtorDeleteCall(*this, CXXStructorImplicitParamValue,1859/*ReturnAfterDelete*/true);1860else1861EHStack.pushCleanup<CallDtorDeleteConditional>(1862NormalAndEHCleanup, CXXStructorImplicitParamValue);1863} else {1864if (DD->getOperatorDelete()->isDestroyingOperatorDelete()) {1865const CXXRecordDecl *ClassDecl = DD->getParent();1866EmitDeleteCall(DD->getOperatorDelete(),1867LoadThisForDtorDelete(*this, DD),1868getContext().getTagDeclType(ClassDecl));1869EmitBranchThroughCleanup(ReturnBlock);1870} else {1871EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup);1872}1873}1874return;1875}18761877const CXXRecordDecl *ClassDecl = DD->getParent();18781879// Unions have no bases and do not call field destructors.1880if (ClassDecl->isUnion())1881return;18821883// The complete-destructor phase just destructs all the virtual bases.1884if (DtorType == Dtor_Complete) {1885// Poison the vtable pointer such that access after the base1886// and member destructors are invoked is invalid.1887if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor &&1888SanOpts.has(SanitizerKind::Memory) && ClassDecl->getNumVBases() &&1889ClassDecl->isPolymorphic())1890EHStack.pushCleanup<SanitizeDtorVTable>(NormalAndEHCleanup, DD);18911892// We push them in the forward order so that they'll be popped in1893// the reverse order.1894for (const auto &Base : ClassDecl->vbases()) {1895auto *BaseClassDecl =1896cast<CXXRecordDecl>(Base.getType()->castAs<RecordType>()->getDecl());18971898if (BaseClassDecl->hasTrivialDestructor()) {1899// Under SanitizeMemoryUseAfterDtor, poison the trivial base class1900// memory. For non-trival base classes the same is done in the class1901// destructor.1902if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor &&1903SanOpts.has(SanitizerKind::Memory) && !BaseClassDecl->isEmpty())1904EHStack.pushCleanup<SanitizeDtorTrivialBase>(NormalAndEHCleanup,1905BaseClassDecl,1906/*BaseIsVirtual*/ true);1907} else {1908EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, BaseClassDecl,1909/*BaseIsVirtual*/ true);1910}1911}19121913return;1914}19151916assert(DtorType == Dtor_Base);1917// Poison the vtable pointer if it has no virtual bases, but inherits1918// virtual functions.1919if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor &&1920SanOpts.has(SanitizerKind::Memory) && !ClassDecl->getNumVBases() &&1921ClassDecl->isPolymorphic())1922EHStack.pushCleanup<SanitizeDtorVTable>(NormalAndEHCleanup, DD);19231924// Destroy non-virtual bases.1925for (const auto &Base : ClassDecl->bases()) {1926// Ignore virtual bases.1927if (Base.isVirtual())1928continue;19291930CXXRecordDecl *BaseClassDecl = Base.getType()->getAsCXXRecordDecl();19311932if (BaseClassDecl->hasTrivialDestructor()) {1933if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor &&1934SanOpts.has(SanitizerKind::Memory) && !BaseClassDecl->isEmpty())1935EHStack.pushCleanup<SanitizeDtorTrivialBase>(NormalAndEHCleanup,1936BaseClassDecl,1937/*BaseIsVirtual*/ false);1938} else {1939EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, BaseClassDecl,1940/*BaseIsVirtual*/ false);1941}1942}19431944// Poison fields such that access after their destructors are1945// invoked, and before the base class destructor runs, is invalid.1946bool SanitizeFields = CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor &&1947SanOpts.has(SanitizerKind::Memory);1948SanitizeDtorCleanupBuilder SanitizeBuilder(getContext(), EHStack, DD);19491950// Destroy direct fields.1951for (const auto *Field : ClassDecl->fields()) {1952if (SanitizeFields)1953SanitizeBuilder.PushCleanupForField(Field);19541955QualType type = Field->getType();1956QualType::DestructionKind dtorKind = type.isDestructedType();1957if (!dtorKind)1958continue;19591960// Anonymous union members do not have their destructors called.1961const RecordType *RT = type->getAsUnionType();1962if (RT && RT->getDecl()->isAnonymousStructOrUnion())1963continue;19641965CleanupKind cleanupKind = getCleanupKind(dtorKind);1966EHStack.pushCleanup<DestroyField>(1967cleanupKind, Field, getDestroyer(dtorKind), cleanupKind & EHCleanup);1968}19691970if (SanitizeFields)1971SanitizeBuilder.End();1972}19731974/// EmitCXXAggrConstructorCall - Emit a loop to call a particular1975/// constructor for each of several members of an array.1976///1977/// \param ctor the constructor to call for each element1978/// \param arrayType the type of the array to initialize1979/// \param arrayBegin an arrayType*1980/// \param zeroInitialize true if each element should be1981/// zero-initialized before it is constructed1982void CodeGenFunction::EmitCXXAggrConstructorCall(1983const CXXConstructorDecl *ctor, const ArrayType *arrayType,1984Address arrayBegin, const CXXConstructExpr *E, bool NewPointerIsChecked,1985bool zeroInitialize) {1986QualType elementType;1987llvm::Value *numElements =1988emitArrayLength(arrayType, elementType, arrayBegin);19891990EmitCXXAggrConstructorCall(ctor, numElements, arrayBegin, E,1991NewPointerIsChecked, zeroInitialize);1992}19931994/// EmitCXXAggrConstructorCall - Emit a loop to call a particular1995/// constructor for each of several members of an array.1996///1997/// \param ctor the constructor to call for each element1998/// \param numElements the number of elements in the array;1999/// may be zero2000/// \param arrayBase a T*, where T is the type constructed by ctor2001/// \param zeroInitialize true if each element should be2002/// zero-initialized before it is constructed2003void CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor,2004llvm::Value *numElements,2005Address arrayBase,2006const CXXConstructExpr *E,2007bool NewPointerIsChecked,2008bool zeroInitialize) {2009// It's legal for numElements to be zero. This can happen both2010// dynamically, because x can be zero in 'new A[x]', and statically,2011// because of GCC extensions that permit zero-length arrays. There2012// are probably legitimate places where we could assume that this2013// doesn't happen, but it's not clear that it's worth it.2014llvm::BranchInst *zeroCheckBranch = nullptr;20152016// Optimize for a constant count.2017llvm::ConstantInt *constantCount2018= dyn_cast<llvm::ConstantInt>(numElements);2019if (constantCount) {2020// Just skip out if the constant count is zero.2021if (constantCount->isZero()) return;20222023// Otherwise, emit the check.2024} else {2025llvm::BasicBlock *loopBB = createBasicBlock("new.ctorloop");2026llvm::Value *iszero = Builder.CreateIsNull(numElements, "isempty");2027zeroCheckBranch = Builder.CreateCondBr(iszero, loopBB, loopBB);2028EmitBlock(loopBB);2029}20302031// Find the end of the array.2032llvm::Type *elementType = arrayBase.getElementType();2033llvm::Value *arrayBegin = arrayBase.emitRawPointer(*this);2034llvm::Value *arrayEnd = Builder.CreateInBoundsGEP(2035elementType, arrayBegin, numElements, "arrayctor.end");20362037// Enter the loop, setting up a phi for the current location to initialize.2038llvm::BasicBlock *entryBB = Builder.GetInsertBlock();2039llvm::BasicBlock *loopBB = createBasicBlock("arrayctor.loop");2040EmitBlock(loopBB);2041llvm::PHINode *cur = Builder.CreatePHI(arrayBegin->getType(), 2,2042"arrayctor.cur");2043cur->addIncoming(arrayBegin, entryBB);20442045// Inside the loop body, emit the constructor call on the array element.20462047// The alignment of the base, adjusted by the size of a single element,2048// provides a conservative estimate of the alignment of every element.2049// (This assumes we never start tracking offsetted alignments.)2050//2051// Note that these are complete objects and so we don't need to2052// use the non-virtual size or alignment.2053QualType type = getContext().getTypeDeclType(ctor->getParent());2054CharUnits eltAlignment =2055arrayBase.getAlignment()2056.alignmentOfArrayElement(getContext().getTypeSizeInChars(type));2057Address curAddr = Address(cur, elementType, eltAlignment);20582059// Zero initialize the storage, if requested.2060if (zeroInitialize)2061EmitNullInitialization(curAddr, type);20622063// C++ [class.temporary]p4:2064// There are two contexts in which temporaries are destroyed at a different2065// point than the end of the full-expression. The first context is when a2066// default constructor is called to initialize an element of an array.2067// If the constructor has one or more default arguments, the destruction of2068// every temporary created in a default argument expression is sequenced2069// before the construction of the next array element, if any.20702071{2072RunCleanupsScope Scope(*this);20732074// Evaluate the constructor and its arguments in a regular2075// partial-destroy cleanup.2076if (getLangOpts().Exceptions &&2077!ctor->getParent()->hasTrivialDestructor()) {2078Destroyer *destroyer = destroyCXXObject;2079pushRegularPartialArrayCleanup(arrayBegin, cur, type, eltAlignment,2080*destroyer);2081}2082auto currAVS = AggValueSlot::forAddr(2083curAddr, type.getQualifiers(), AggValueSlot::IsDestructed,2084AggValueSlot::DoesNotNeedGCBarriers, AggValueSlot::IsNotAliased,2085AggValueSlot::DoesNotOverlap, AggValueSlot::IsNotZeroed,2086NewPointerIsChecked ? AggValueSlot::IsSanitizerChecked2087: AggValueSlot::IsNotSanitizerChecked);2088EmitCXXConstructorCall(ctor, Ctor_Complete, /*ForVirtualBase=*/false,2089/*Delegating=*/false, currAVS, E);2090}20912092// Go to the next element.2093llvm::Value *next = Builder.CreateInBoundsGEP(2094elementType, cur, llvm::ConstantInt::get(SizeTy, 1), "arrayctor.next");2095cur->addIncoming(next, Builder.GetInsertBlock());20962097// Check whether that's the end of the loop.2098llvm::Value *done = Builder.CreateICmpEQ(next, arrayEnd, "arrayctor.done");2099llvm::BasicBlock *contBB = createBasicBlock("arrayctor.cont");2100Builder.CreateCondBr(done, contBB, loopBB);21012102// Patch the earlier check to skip over the loop.2103if (zeroCheckBranch) zeroCheckBranch->setSuccessor(0, contBB);21042105EmitBlock(contBB);2106}21072108void CodeGenFunction::destroyCXXObject(CodeGenFunction &CGF,2109Address addr,2110QualType type) {2111const RecordType *rtype = type->castAs<RecordType>();2112const CXXRecordDecl *record = cast<CXXRecordDecl>(rtype->getDecl());2113const CXXDestructorDecl *dtor = record->getDestructor();2114assert(!dtor->isTrivial());2115CGF.EmitCXXDestructorCall(dtor, Dtor_Complete, /*for vbase*/ false,2116/*Delegating=*/false, addr, type);2117}21182119void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D,2120CXXCtorType Type,2121bool ForVirtualBase,2122bool Delegating,2123AggValueSlot ThisAVS,2124const CXXConstructExpr *E) {2125CallArgList Args;2126Address This = ThisAVS.getAddress();2127LangAS SlotAS = ThisAVS.getQualifiers().getAddressSpace();2128LangAS ThisAS = D->getFunctionObjectParameterType().getAddressSpace();2129llvm::Value *ThisPtr =2130getAsNaturalPointerTo(This, D->getThisType()->getPointeeType());21312132if (SlotAS != ThisAS) {2133unsigned TargetThisAS = getContext().getTargetAddressSpace(ThisAS);2134llvm::Type *NewType =2135llvm::PointerType::get(getLLVMContext(), TargetThisAS);2136ThisPtr = getTargetHooks().performAddrSpaceCast(*this, ThisPtr, ThisAS,2137SlotAS, NewType);2138}21392140// Push the this ptr.2141Args.add(RValue::get(ThisPtr), D->getThisType());21422143// If this is a trivial constructor, emit a memcpy now before we lose2144// the alignment information on the argument.2145// FIXME: It would be better to preserve alignment information into CallArg.2146if (isMemcpyEquivalentSpecialMember(D)) {2147assert(E->getNumArgs() == 1 && "unexpected argcount for trivial ctor");21482149const Expr *Arg = E->getArg(0);2150LValue Src = EmitLValue(Arg);2151QualType DestTy = getContext().getTypeDeclType(D->getParent());2152LValue Dest = MakeAddrLValue(This, DestTy);2153EmitAggregateCopyCtor(Dest, Src, ThisAVS.mayOverlap());2154return;2155}21562157// Add the rest of the user-supplied arguments.2158const FunctionProtoType *FPT = D->getType()->castAs<FunctionProtoType>();2159EvaluationOrder Order = E->isListInitialization()2160? EvaluationOrder::ForceLeftToRight2161: EvaluationOrder::Default;2162EmitCallArgs(Args, FPT, E->arguments(), E->getConstructor(),2163/*ParamsToSkip*/ 0, Order);21642165EmitCXXConstructorCall(D, Type, ForVirtualBase, Delegating, This, Args,2166ThisAVS.mayOverlap(), E->getExprLoc(),2167ThisAVS.isSanitizerChecked());2168}21692170static bool canEmitDelegateCallArgs(CodeGenFunction &CGF,2171const CXXConstructorDecl *Ctor,2172CXXCtorType Type, CallArgList &Args) {2173// We can't forward a variadic call.2174if (Ctor->isVariadic())2175return false;21762177if (CGF.getTarget().getCXXABI().areArgsDestroyedLeftToRightInCallee()) {2178// If the parameters are callee-cleanup, it's not safe to forward.2179for (auto *P : Ctor->parameters())2180if (P->needsDestruction(CGF.getContext()))2181return false;21822183// Likewise if they're inalloca.2184const CGFunctionInfo &Info =2185CGF.CGM.getTypes().arrangeCXXConstructorCall(Args, Ctor, Type, 0, 0);2186if (Info.usesInAlloca())2187return false;2188}21892190// Anything else should be OK.2191return true;2192}21932194void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D,2195CXXCtorType Type,2196bool ForVirtualBase,2197bool Delegating,2198Address This,2199CallArgList &Args,2200AggValueSlot::Overlap_t Overlap,2201SourceLocation Loc,2202bool NewPointerIsChecked) {2203const CXXRecordDecl *ClassDecl = D->getParent();22042205if (!NewPointerIsChecked)2206EmitTypeCheck(CodeGenFunction::TCK_ConstructorCall, Loc, This,2207getContext().getRecordType(ClassDecl), CharUnits::Zero());22082209if (D->isTrivial() && D->isDefaultConstructor()) {2210assert(Args.size() == 1 && "trivial default ctor with args");2211return;2212}22132214// If this is a trivial constructor, just emit what's needed. If this is a2215// union copy constructor, we must emit a memcpy, because the AST does not2216// model that copy.2217if (isMemcpyEquivalentSpecialMember(D)) {2218assert(Args.size() == 2 && "unexpected argcount for trivial ctor");2219QualType SrcTy = D->getParamDecl(0)->getType().getNonReferenceType();2220Address Src = makeNaturalAddressForPointer(2221Args[1].getRValue(*this).getScalarVal(), SrcTy);2222LValue SrcLVal = MakeAddrLValue(Src, SrcTy);2223QualType DestTy = getContext().getTypeDeclType(ClassDecl);2224LValue DestLVal = MakeAddrLValue(This, DestTy);2225EmitAggregateCopyCtor(DestLVal, SrcLVal, Overlap);2226return;2227}22282229bool PassPrototypeArgs = true;2230// Check whether we can actually emit the constructor before trying to do so.2231if (auto Inherited = D->getInheritedConstructor()) {2232PassPrototypeArgs = getTypes().inheritingCtorHasParams(Inherited, Type);2233if (PassPrototypeArgs && !canEmitDelegateCallArgs(*this, D, Type, Args)) {2234EmitInlinedInheritingCXXConstructorCall(D, Type, ForVirtualBase,2235Delegating, Args);2236return;2237}2238}22392240// Insert any ABI-specific implicit constructor arguments.2241CGCXXABI::AddedStructorArgCounts ExtraArgs =2242CGM.getCXXABI().addImplicitConstructorArgs(*this, D, Type, ForVirtualBase,2243Delegating, Args);22442245// Emit the call.2246llvm::Constant *CalleePtr = CGM.getAddrOfCXXStructor(GlobalDecl(D, Type));2247const CGFunctionInfo &Info = CGM.getTypes().arrangeCXXConstructorCall(2248Args, D, Type, ExtraArgs.Prefix, ExtraArgs.Suffix, PassPrototypeArgs);2249CGCallee Callee = CGCallee::forDirect(CalleePtr, GlobalDecl(D, Type));2250EmitCall(Info, Callee, ReturnValueSlot(), Args, nullptr, false, Loc);22512252// Generate vtable assumptions if we're constructing a complete object2253// with a vtable. We don't do this for base subobjects for two reasons:2254// first, it's incorrect for classes with virtual bases, and second, we're2255// about to overwrite the vptrs anyway.2256// We also have to make sure if we can refer to vtable:2257// - Otherwise we can refer to vtable if it's safe to speculatively emit.2258// FIXME: If vtable is used by ctor/dtor, or if vtable is external and we are2259// sure that definition of vtable is not hidden,2260// then we are always safe to refer to it.2261// FIXME: It looks like InstCombine is very inefficient on dealing with2262// assumes. Make assumption loads require -fstrict-vtable-pointers temporarily.2263if (CGM.getCodeGenOpts().OptimizationLevel > 0 &&2264ClassDecl->isDynamicClass() && Type != Ctor_Base &&2265CGM.getCXXABI().canSpeculativelyEmitVTable(ClassDecl) &&2266CGM.getCodeGenOpts().StrictVTablePointers)2267EmitVTableAssumptionLoads(ClassDecl, This);2268}22692270void CodeGenFunction::EmitInheritedCXXConstructorCall(2271const CXXConstructorDecl *D, bool ForVirtualBase, Address This,2272bool InheritedFromVBase, const CXXInheritedCtorInitExpr *E) {2273CallArgList Args;2274CallArg ThisArg(RValue::get(getAsNaturalPointerTo(2275This, D->getThisType()->getPointeeType())),2276D->getThisType());22772278// Forward the parameters.2279if (InheritedFromVBase &&2280CGM.getTarget().getCXXABI().hasConstructorVariants()) {2281// Nothing to do; this construction is not responsible for constructing2282// the base class containing the inherited constructor.2283// FIXME: Can we just pass undef's for the remaining arguments if we don't2284// have constructor variants?2285Args.push_back(ThisArg);2286} else if (!CXXInheritedCtorInitExprArgs.empty()) {2287// The inheriting constructor was inlined; just inject its arguments.2288assert(CXXInheritedCtorInitExprArgs.size() >= D->getNumParams() &&2289"wrong number of parameters for inherited constructor call");2290Args = CXXInheritedCtorInitExprArgs;2291Args[0] = ThisArg;2292} else {2293// The inheriting constructor was not inlined. Emit delegating arguments.2294Args.push_back(ThisArg);2295const auto *OuterCtor = cast<CXXConstructorDecl>(CurCodeDecl);2296assert(OuterCtor->getNumParams() == D->getNumParams());2297assert(!OuterCtor->isVariadic() && "should have been inlined");22982299for (const auto *Param : OuterCtor->parameters()) {2300assert(getContext().hasSameUnqualifiedType(2301OuterCtor->getParamDecl(Param->getFunctionScopeIndex())->getType(),2302Param->getType()));2303EmitDelegateCallArg(Args, Param, E->getLocation());23042305// Forward __attribute__(pass_object_size).2306if (Param->hasAttr<PassObjectSizeAttr>()) {2307auto *POSParam = SizeArguments[Param];2308assert(POSParam && "missing pass_object_size value for forwarding");2309EmitDelegateCallArg(Args, POSParam, E->getLocation());2310}2311}2312}23132314EmitCXXConstructorCall(D, Ctor_Base, ForVirtualBase, /*Delegating*/false,2315This, Args, AggValueSlot::MayOverlap,2316E->getLocation(), /*NewPointerIsChecked*/true);2317}23182319void CodeGenFunction::EmitInlinedInheritingCXXConstructorCall(2320const CXXConstructorDecl *Ctor, CXXCtorType CtorType, bool ForVirtualBase,2321bool Delegating, CallArgList &Args) {2322GlobalDecl GD(Ctor, CtorType);2323InlinedInheritingConstructorScope Scope(*this, GD);2324ApplyInlineDebugLocation DebugScope(*this, GD);2325RunCleanupsScope RunCleanups(*this);23262327// Save the arguments to be passed to the inherited constructor.2328CXXInheritedCtorInitExprArgs = Args;23292330FunctionArgList Params;2331QualType RetType = BuildFunctionArgList(CurGD, Params);2332FnRetTy = RetType;23332334// Insert any ABI-specific implicit constructor arguments.2335CGM.getCXXABI().addImplicitConstructorArgs(*this, Ctor, CtorType,2336ForVirtualBase, Delegating, Args);23372338// Emit a simplified prolog. We only need to emit the implicit params.2339assert(Args.size() >= Params.size() && "too few arguments for call");2340for (unsigned I = 0, N = Args.size(); I != N; ++I) {2341if (I < Params.size() && isa<ImplicitParamDecl>(Params[I])) {2342const RValue &RV = Args[I].getRValue(*this);2343assert(!RV.isComplex() && "complex indirect params not supported");2344ParamValue Val = RV.isScalar()2345? ParamValue::forDirect(RV.getScalarVal())2346: ParamValue::forIndirect(RV.getAggregateAddress());2347EmitParmDecl(*Params[I], Val, I + 1);2348}2349}23502351// Create a return value slot if the ABI implementation wants one.2352// FIXME: This is dumb, we should ask the ABI not to try to set the return2353// value instead.2354if (!RetType->isVoidType())2355ReturnValue = CreateIRTemp(RetType, "retval.inhctor");23562357CGM.getCXXABI().EmitInstanceFunctionProlog(*this);2358CXXThisValue = CXXABIThisValue;23592360// Directly emit the constructor initializers.2361EmitCtorPrologue(Ctor, CtorType, Params);2362}23632364void CodeGenFunction::EmitVTableAssumptionLoad(const VPtr &Vptr, Address This) {2365llvm::Value *VTableGlobal =2366CGM.getCXXABI().getVTableAddressPoint(Vptr.Base, Vptr.VTableClass);2367if (!VTableGlobal)2368return;23692370// We can just use the base offset in the complete class.2371CharUnits NonVirtualOffset = Vptr.Base.getBaseOffset();23722373if (!NonVirtualOffset.isZero())2374This =2375ApplyNonVirtualAndVirtualOffset(*this, This, NonVirtualOffset, nullptr,2376Vptr.VTableClass, Vptr.NearestVBase);23772378llvm::Value *VPtrValue =2379GetVTablePtr(This, VTableGlobal->getType(), Vptr.VTableClass);2380llvm::Value *Cmp =2381Builder.CreateICmpEQ(VPtrValue, VTableGlobal, "cmp.vtables");2382Builder.CreateAssumption(Cmp);2383}23842385void CodeGenFunction::EmitVTableAssumptionLoads(const CXXRecordDecl *ClassDecl,2386Address This) {2387if (CGM.getCXXABI().doStructorsInitializeVPtrs(ClassDecl))2388for (const VPtr &Vptr : getVTablePointers(ClassDecl))2389EmitVTableAssumptionLoad(Vptr, This);2390}23912392void2393CodeGenFunction::EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D,2394Address This, Address Src,2395const CXXConstructExpr *E) {2396const FunctionProtoType *FPT = D->getType()->castAs<FunctionProtoType>();23972398CallArgList Args;23992400// Push the this ptr.2401Args.add(RValue::get(getAsNaturalPointerTo(This, D->getThisType())),2402D->getThisType());24032404// Push the src ptr.2405QualType QT = *(FPT->param_type_begin());2406llvm::Type *t = CGM.getTypes().ConvertType(QT);2407llvm::Value *Val = getAsNaturalPointerTo(Src, D->getThisType());2408llvm::Value *SrcVal = Builder.CreateBitCast(Val, t);2409Args.add(RValue::get(SrcVal), QT);24102411// Skip over first argument (Src).2412EmitCallArgs(Args, FPT, drop_begin(E->arguments(), 1), E->getConstructor(),2413/*ParamsToSkip*/ 1);24142415EmitCXXConstructorCall(D, Ctor_Complete, /*ForVirtualBase*/false,2416/*Delegating*/false, This, Args,2417AggValueSlot::MayOverlap, E->getExprLoc(),2418/*NewPointerIsChecked*/false);2419}24202421void2422CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor,2423CXXCtorType CtorType,2424const FunctionArgList &Args,2425SourceLocation Loc) {2426CallArgList DelegateArgs;24272428FunctionArgList::const_iterator I = Args.begin(), E = Args.end();2429assert(I != E && "no parameters to constructor");24302431// this2432Address This = LoadCXXThisAddress();2433DelegateArgs.add(RValue::get(getAsNaturalPointerTo(2434This, (*I)->getType()->getPointeeType())),2435(*I)->getType());2436++I;24372438// FIXME: The location of the VTT parameter in the parameter list is2439// specific to the Itanium ABI and shouldn't be hardcoded here.2440if (CGM.getCXXABI().NeedsVTTParameter(CurGD)) {2441assert(I != E && "cannot skip vtt parameter, already done with args");2442assert((*I)->getType()->isPointerType() &&2443"skipping parameter not of vtt type");2444++I;2445}24462447// Explicit arguments.2448for (; I != E; ++I) {2449const VarDecl *param = *I;2450// FIXME: per-argument source location2451EmitDelegateCallArg(DelegateArgs, param, Loc);2452}24532454EmitCXXConstructorCall(Ctor, CtorType, /*ForVirtualBase=*/false,2455/*Delegating=*/true, This, DelegateArgs,2456AggValueSlot::MayOverlap, Loc,2457/*NewPointerIsChecked=*/true);2458}24592460namespace {2461struct CallDelegatingCtorDtor final : EHScopeStack::Cleanup {2462const CXXDestructorDecl *Dtor;2463Address Addr;2464CXXDtorType Type;24652466CallDelegatingCtorDtor(const CXXDestructorDecl *D, Address Addr,2467CXXDtorType Type)2468: Dtor(D), Addr(Addr), Type(Type) {}24692470void Emit(CodeGenFunction &CGF, Flags flags) override {2471// We are calling the destructor from within the constructor.2472// Therefore, "this" should have the expected type.2473QualType ThisTy = Dtor->getFunctionObjectParameterType();2474CGF.EmitCXXDestructorCall(Dtor, Type, /*ForVirtualBase=*/false,2475/*Delegating=*/true, Addr, ThisTy);2476}2477};2478} // end anonymous namespace24792480void2481CodeGenFunction::EmitDelegatingCXXConstructorCall(const CXXConstructorDecl *Ctor,2482const FunctionArgList &Args) {2483assert(Ctor->isDelegatingConstructor());24842485Address ThisPtr = LoadCXXThisAddress();24862487AggValueSlot AggSlot =2488AggValueSlot::forAddr(ThisPtr, Qualifiers(),2489AggValueSlot::IsDestructed,2490AggValueSlot::DoesNotNeedGCBarriers,2491AggValueSlot::IsNotAliased,2492AggValueSlot::MayOverlap,2493AggValueSlot::IsNotZeroed,2494// Checks are made by the code that calls constructor.2495AggValueSlot::IsSanitizerChecked);24962497EmitAggExpr(Ctor->init_begin()[0]->getInit(), AggSlot);24982499const CXXRecordDecl *ClassDecl = Ctor->getParent();2500if (CGM.getLangOpts().Exceptions && !ClassDecl->hasTrivialDestructor()) {2501CXXDtorType Type =2502CurGD.getCtorType() == Ctor_Complete ? Dtor_Complete : Dtor_Base;25032504EHStack.pushCleanup<CallDelegatingCtorDtor>(EHCleanup,2505ClassDecl->getDestructor(),2506ThisPtr, Type);2507}2508}25092510void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD,2511CXXDtorType Type,2512bool ForVirtualBase,2513bool Delegating, Address This,2514QualType ThisTy) {2515CGM.getCXXABI().EmitDestructorCall(*this, DD, Type, ForVirtualBase,2516Delegating, This, ThisTy);2517}25182519namespace {2520struct CallLocalDtor final : EHScopeStack::Cleanup {2521const CXXDestructorDecl *Dtor;2522Address Addr;2523QualType Ty;25242525CallLocalDtor(const CXXDestructorDecl *D, Address Addr, QualType Ty)2526: Dtor(D), Addr(Addr), Ty(Ty) {}25272528void Emit(CodeGenFunction &CGF, Flags flags) override {2529CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete,2530/*ForVirtualBase=*/false,2531/*Delegating=*/false, Addr, Ty);2532}2533};2534} // end anonymous namespace25352536void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D,2537QualType T, Address Addr) {2538EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr, T);2539}25402541void CodeGenFunction::PushDestructorCleanup(QualType T, Address Addr) {2542CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl();2543if (!ClassDecl) return;2544if (ClassDecl->hasTrivialDestructor()) return;25452546const CXXDestructorDecl *D = ClassDecl->getDestructor();2547assert(D && D->isUsed() && "destructor not marked as used!");2548PushDestructorCleanup(D, T, Addr);2549}25502551void CodeGenFunction::InitializeVTablePointer(const VPtr &Vptr) {2552// Compute the address point.2553llvm::Value *VTableAddressPoint =2554CGM.getCXXABI().getVTableAddressPointInStructor(2555*this, Vptr.VTableClass, Vptr.Base, Vptr.NearestVBase);25562557if (!VTableAddressPoint)2558return;25592560// Compute where to store the address point.2561llvm::Value *VirtualOffset = nullptr;2562CharUnits NonVirtualOffset = CharUnits::Zero();25632564if (CGM.getCXXABI().isVirtualOffsetNeededForVTableField(*this, Vptr)) {2565// We need to use the virtual base offset offset because the virtual base2566// might have a different offset in the most derived class.25672568VirtualOffset = CGM.getCXXABI().GetVirtualBaseClassOffset(2569*this, LoadCXXThisAddress(), Vptr.VTableClass, Vptr.NearestVBase);2570NonVirtualOffset = Vptr.OffsetFromNearestVBase;2571} else {2572// We can just use the base offset in the complete class.2573NonVirtualOffset = Vptr.Base.getBaseOffset();2574}25752576// Apply the offsets.2577Address VTableField = LoadCXXThisAddress();2578if (!NonVirtualOffset.isZero() || VirtualOffset)2579VTableField = ApplyNonVirtualAndVirtualOffset(2580*this, VTableField, NonVirtualOffset, VirtualOffset, Vptr.VTableClass,2581Vptr.NearestVBase);25822583// Finally, store the address point. Use the same LLVM types as the field to2584// support optimization.2585unsigned GlobalsAS = CGM.getDataLayout().getDefaultGlobalsAddressSpace();2586llvm::Type *PtrTy = llvm::PointerType::get(CGM.getLLVMContext(), GlobalsAS);2587// vtable field is derived from `this` pointer, therefore they should be in2588// the same addr space. Note that this might not be LLVM address space 0.2589VTableField = VTableField.withElementType(PtrTy);25902591if (auto AuthenticationInfo = CGM.getVTablePointerAuthInfo(2592this, Vptr.Base.getBase(), VTableField.emitRawPointer(*this)))2593VTableAddressPoint =2594EmitPointerAuthSign(*AuthenticationInfo, VTableAddressPoint);25952596llvm::StoreInst *Store = Builder.CreateStore(VTableAddressPoint, VTableField);2597TBAAAccessInfo TBAAInfo = CGM.getTBAAVTablePtrAccessInfo(PtrTy);2598CGM.DecorateInstructionWithTBAA(Store, TBAAInfo);2599if (CGM.getCodeGenOpts().OptimizationLevel > 0 &&2600CGM.getCodeGenOpts().StrictVTablePointers)2601CGM.DecorateInstructionWithInvariantGroup(Store, Vptr.VTableClass);2602}26032604CodeGenFunction::VPtrsVector2605CodeGenFunction::getVTablePointers(const CXXRecordDecl *VTableClass) {2606CodeGenFunction::VPtrsVector VPtrsResult;2607VisitedVirtualBasesSetTy VBases;2608getVTablePointers(BaseSubobject(VTableClass, CharUnits::Zero()),2609/*NearestVBase=*/nullptr,2610/*OffsetFromNearestVBase=*/CharUnits::Zero(),2611/*BaseIsNonVirtualPrimaryBase=*/false, VTableClass, VBases,2612VPtrsResult);2613return VPtrsResult;2614}26152616void CodeGenFunction::getVTablePointers(BaseSubobject Base,2617const CXXRecordDecl *NearestVBase,2618CharUnits OffsetFromNearestVBase,2619bool BaseIsNonVirtualPrimaryBase,2620const CXXRecordDecl *VTableClass,2621VisitedVirtualBasesSetTy &VBases,2622VPtrsVector &Vptrs) {2623// If this base is a non-virtual primary base the address point has already2624// been set.2625if (!BaseIsNonVirtualPrimaryBase) {2626// Initialize the vtable pointer for this base.2627VPtr Vptr = {Base, NearestVBase, OffsetFromNearestVBase, VTableClass};2628Vptrs.push_back(Vptr);2629}26302631const CXXRecordDecl *RD = Base.getBase();26322633// Traverse bases.2634for (const auto &I : RD->bases()) {2635auto *BaseDecl =2636cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());26372638// Ignore classes without a vtable.2639if (!BaseDecl->isDynamicClass())2640continue;26412642CharUnits BaseOffset;2643CharUnits BaseOffsetFromNearestVBase;2644bool BaseDeclIsNonVirtualPrimaryBase;26452646if (I.isVirtual()) {2647// Check if we've visited this virtual base before.2648if (!VBases.insert(BaseDecl).second)2649continue;26502651const ASTRecordLayout &Layout =2652getContext().getASTRecordLayout(VTableClass);26532654BaseOffset = Layout.getVBaseClassOffset(BaseDecl);2655BaseOffsetFromNearestVBase = CharUnits::Zero();2656BaseDeclIsNonVirtualPrimaryBase = false;2657} else {2658const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD);26592660BaseOffset = Base.getBaseOffset() + Layout.getBaseClassOffset(BaseDecl);2661BaseOffsetFromNearestVBase =2662OffsetFromNearestVBase + Layout.getBaseClassOffset(BaseDecl);2663BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl;2664}26652666getVTablePointers(2667BaseSubobject(BaseDecl, BaseOffset),2668I.isVirtual() ? BaseDecl : NearestVBase, BaseOffsetFromNearestVBase,2669BaseDeclIsNonVirtualPrimaryBase, VTableClass, VBases, Vptrs);2670}2671}26722673void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) {2674// Ignore classes without a vtable.2675if (!RD->isDynamicClass())2676return;26772678// Initialize the vtable pointers for this class and all of its bases.2679if (CGM.getCXXABI().doStructorsInitializeVPtrs(RD))2680for (const VPtr &Vptr : getVTablePointers(RD))2681InitializeVTablePointer(Vptr);26822683if (RD->getNumVBases())2684CGM.getCXXABI().initializeHiddenVirtualInheritanceMembers(*this, RD);2685}26862687llvm::Value *CodeGenFunction::GetVTablePtr(Address This,2688llvm::Type *VTableTy,2689const CXXRecordDecl *RD,2690VTableAuthMode AuthMode) {2691Address VTablePtrSrc = This.withElementType(VTableTy);2692llvm::Instruction *VTable = Builder.CreateLoad(VTablePtrSrc, "vtable");2693TBAAAccessInfo TBAAInfo = CGM.getTBAAVTablePtrAccessInfo(VTableTy);2694CGM.DecorateInstructionWithTBAA(VTable, TBAAInfo);26952696if (auto AuthenticationInfo =2697CGM.getVTablePointerAuthInfo(this, RD, This.emitRawPointer(*this))) {2698if (AuthMode != VTableAuthMode::UnsafeUbsanStrip) {2699VTable = cast<llvm::Instruction>(2700EmitPointerAuthAuth(*AuthenticationInfo, VTable));2701if (AuthMode == VTableAuthMode::MustTrap) {2702// This is clearly suboptimal but until we have an ability2703// to rely on the authentication intrinsic trapping and force2704// an authentication to occur we don't really have a choice.2705VTable =2706cast<llvm::Instruction>(Builder.CreateBitCast(VTable, Int8PtrTy));2707Builder.CreateLoad(RawAddress(VTable, Int8Ty, CGM.getPointerAlign()),2708/* IsVolatile */ true);2709}2710} else {2711VTable = cast<llvm::Instruction>(EmitPointerAuthAuth(2712CGPointerAuthInfo(0, PointerAuthenticationMode::Strip, false, false,2713nullptr),2714VTable));2715}2716}27172718if (CGM.getCodeGenOpts().OptimizationLevel > 0 &&2719CGM.getCodeGenOpts().StrictVTablePointers)2720CGM.DecorateInstructionWithInvariantGroup(VTable, RD);27212722return VTable;2723}27242725// If a class has a single non-virtual base and does not introduce or override2726// virtual member functions or fields, it will have the same layout as its base.2727// This function returns the least derived such class.2728//2729// Casting an instance of a base class to such a derived class is technically2730// undefined behavior, but it is a relatively common hack for introducing member2731// functions on class instances with specific properties (e.g. llvm::Operator)2732// that works under most compilers and should not have security implications, so2733// we allow it by default. It can be disabled with -fsanitize=cfi-cast-strict.2734static const CXXRecordDecl *2735LeastDerivedClassWithSameLayout(const CXXRecordDecl *RD) {2736if (!RD->field_empty())2737return RD;27382739if (RD->getNumVBases() != 0)2740return RD;27412742if (RD->getNumBases() != 1)2743return RD;27442745for (const CXXMethodDecl *MD : RD->methods()) {2746if (MD->isVirtual()) {2747// Virtual member functions are only ok if they are implicit destructors2748// because the implicit destructor will have the same semantics as the2749// base class's destructor if no fields are added.2750if (isa<CXXDestructorDecl>(MD) && MD->isImplicit())2751continue;2752return RD;2753}2754}27552756return LeastDerivedClassWithSameLayout(2757RD->bases_begin()->getType()->getAsCXXRecordDecl());2758}27592760void CodeGenFunction::EmitTypeMetadataCodeForVCall(const CXXRecordDecl *RD,2761llvm::Value *VTable,2762SourceLocation Loc) {2763if (SanOpts.has(SanitizerKind::CFIVCall))2764EmitVTablePtrCheckForCall(RD, VTable, CodeGenFunction::CFITCK_VCall, Loc);2765else if (CGM.getCodeGenOpts().WholeProgramVTables &&2766// Don't insert type test assumes if we are forcing public2767// visibility.2768!CGM.AlwaysHasLTOVisibilityPublic(RD)) {2769QualType Ty = QualType(RD->getTypeForDecl(), 0);2770llvm::Metadata *MD = CGM.CreateMetadataIdentifierForType(Ty);2771llvm::Value *TypeId =2772llvm::MetadataAsValue::get(CGM.getLLVMContext(), MD);27732774// If we already know that the call has hidden LTO visibility, emit2775// @llvm.type.test(). Otherwise emit @llvm.public.type.test(), which WPD2776// will convert to @llvm.type.test() if we assert at link time that we have2777// whole program visibility.2778llvm::Intrinsic::ID IID = CGM.HasHiddenLTOVisibility(RD)2779? llvm::Intrinsic::type_test2780: llvm::Intrinsic::public_type_test;2781llvm::Value *TypeTest =2782Builder.CreateCall(CGM.getIntrinsic(IID), {VTable, TypeId});2783Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::assume), TypeTest);2784}2785}27862787void CodeGenFunction::EmitVTablePtrCheckForCall(const CXXRecordDecl *RD,2788llvm::Value *VTable,2789CFITypeCheckKind TCK,2790SourceLocation Loc) {2791if (!SanOpts.has(SanitizerKind::CFICastStrict))2792RD = LeastDerivedClassWithSameLayout(RD);27932794EmitVTablePtrCheck(RD, VTable, TCK, Loc);2795}27962797void CodeGenFunction::EmitVTablePtrCheckForCast(QualType T, Address Derived,2798bool MayBeNull,2799CFITypeCheckKind TCK,2800SourceLocation Loc) {2801if (!getLangOpts().CPlusPlus)2802return;28032804auto *ClassTy = T->getAs<RecordType>();2805if (!ClassTy)2806return;28072808const CXXRecordDecl *ClassDecl = cast<CXXRecordDecl>(ClassTy->getDecl());28092810if (!ClassDecl->isCompleteDefinition() || !ClassDecl->isDynamicClass())2811return;28122813if (!SanOpts.has(SanitizerKind::CFICastStrict))2814ClassDecl = LeastDerivedClassWithSameLayout(ClassDecl);28152816llvm::BasicBlock *ContBlock = nullptr;28172818if (MayBeNull) {2819llvm::Value *DerivedNotNull =2820Builder.CreateIsNotNull(Derived.emitRawPointer(*this), "cast.nonnull");28212822llvm::BasicBlock *CheckBlock = createBasicBlock("cast.check");2823ContBlock = createBasicBlock("cast.cont");28242825Builder.CreateCondBr(DerivedNotNull, CheckBlock, ContBlock);28262827EmitBlock(CheckBlock);2828}28292830llvm::Value *VTable;2831std::tie(VTable, ClassDecl) =2832CGM.getCXXABI().LoadVTablePtr(*this, Derived, ClassDecl);28332834EmitVTablePtrCheck(ClassDecl, VTable, TCK, Loc);28352836if (MayBeNull) {2837Builder.CreateBr(ContBlock);2838EmitBlock(ContBlock);2839}2840}28412842void CodeGenFunction::EmitVTablePtrCheck(const CXXRecordDecl *RD,2843llvm::Value *VTable,2844CFITypeCheckKind TCK,2845SourceLocation Loc) {2846if (!CGM.getCodeGenOpts().SanitizeCfiCrossDso &&2847!CGM.HasHiddenLTOVisibility(RD))2848return;28492850SanitizerMask M;2851llvm::SanitizerStatKind SSK;2852switch (TCK) {2853case CFITCK_VCall:2854M = SanitizerKind::CFIVCall;2855SSK = llvm::SanStat_CFI_VCall;2856break;2857case CFITCK_NVCall:2858M = SanitizerKind::CFINVCall;2859SSK = llvm::SanStat_CFI_NVCall;2860break;2861case CFITCK_DerivedCast:2862M = SanitizerKind::CFIDerivedCast;2863SSK = llvm::SanStat_CFI_DerivedCast;2864break;2865case CFITCK_UnrelatedCast:2866M = SanitizerKind::CFIUnrelatedCast;2867SSK = llvm::SanStat_CFI_UnrelatedCast;2868break;2869case CFITCK_ICall:2870case CFITCK_NVMFCall:2871case CFITCK_VMFCall:2872llvm_unreachable("unexpected sanitizer kind");2873}28742875std::string TypeName = RD->getQualifiedNameAsString();2876if (getContext().getNoSanitizeList().containsType(M, TypeName))2877return;28782879SanitizerScope SanScope(this);2880EmitSanitizerStatReport(SSK);28812882llvm::Metadata *MD =2883CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0));2884llvm::Value *TypeId = llvm::MetadataAsValue::get(getLLVMContext(), MD);28852886llvm::Value *TypeTest = Builder.CreateCall(2887CGM.getIntrinsic(llvm::Intrinsic::type_test), {VTable, TypeId});28882889llvm::Constant *StaticData[] = {2890llvm::ConstantInt::get(Int8Ty, TCK),2891EmitCheckSourceLocation(Loc),2892EmitCheckTypeDescriptor(QualType(RD->getTypeForDecl(), 0)),2893};28942895auto CrossDsoTypeId = CGM.CreateCrossDsoCfiTypeId(MD);2896if (CGM.getCodeGenOpts().SanitizeCfiCrossDso && CrossDsoTypeId) {2897EmitCfiSlowPathCheck(M, TypeTest, CrossDsoTypeId, VTable, StaticData);2898return;2899}29002901if (CGM.getCodeGenOpts().SanitizeTrap.has(M)) {2902EmitTrapCheck(TypeTest, SanitizerHandler::CFICheckFail);2903return;2904}29052906llvm::Value *AllVtables = llvm::MetadataAsValue::get(2907CGM.getLLVMContext(),2908llvm::MDString::get(CGM.getLLVMContext(), "all-vtables"));2909llvm::Value *ValidVtable = Builder.CreateCall(2910CGM.getIntrinsic(llvm::Intrinsic::type_test), {VTable, AllVtables});2911EmitCheck(std::make_pair(TypeTest, M), SanitizerHandler::CFICheckFail,2912StaticData, {VTable, ValidVtable});2913}29142915bool CodeGenFunction::ShouldEmitVTableTypeCheckedLoad(const CXXRecordDecl *RD) {2916if (!CGM.getCodeGenOpts().WholeProgramVTables ||2917!CGM.HasHiddenLTOVisibility(RD))2918return false;29192920if (CGM.getCodeGenOpts().VirtualFunctionElimination)2921return true;29222923if (!SanOpts.has(SanitizerKind::CFIVCall) ||2924!CGM.getCodeGenOpts().SanitizeTrap.has(SanitizerKind::CFIVCall))2925return false;29262927std::string TypeName = RD->getQualifiedNameAsString();2928return !getContext().getNoSanitizeList().containsType(SanitizerKind::CFIVCall,2929TypeName);2930}29312932llvm::Value *CodeGenFunction::EmitVTableTypeCheckedLoad(2933const CXXRecordDecl *RD, llvm::Value *VTable, llvm::Type *VTableTy,2934uint64_t VTableByteOffset) {2935SanitizerScope SanScope(this);29362937EmitSanitizerStatReport(llvm::SanStat_CFI_VCall);29382939llvm::Metadata *MD =2940CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0));2941llvm::Value *TypeId = llvm::MetadataAsValue::get(CGM.getLLVMContext(), MD);29422943llvm::Value *CheckedLoad = Builder.CreateCall(2944CGM.getIntrinsic(llvm::Intrinsic::type_checked_load),2945{VTable, llvm::ConstantInt::get(Int32Ty, VTableByteOffset), TypeId});2946llvm::Value *CheckResult = Builder.CreateExtractValue(CheckedLoad, 1);29472948std::string TypeName = RD->getQualifiedNameAsString();2949if (SanOpts.has(SanitizerKind::CFIVCall) &&2950!getContext().getNoSanitizeList().containsType(SanitizerKind::CFIVCall,2951TypeName)) {2952EmitCheck(std::make_pair(CheckResult, SanitizerKind::CFIVCall),2953SanitizerHandler::CFICheckFail, {}, {});2954}29552956return Builder.CreateBitCast(Builder.CreateExtractValue(CheckedLoad, 0),2957VTableTy);2958}29592960void CodeGenFunction::EmitForwardingCallToLambda(2961const CXXMethodDecl *callOperator, CallArgList &callArgs,2962const CGFunctionInfo *calleeFnInfo, llvm::Constant *calleePtr) {2963// Get the address of the call operator.2964if (!calleeFnInfo)2965calleeFnInfo = &CGM.getTypes().arrangeCXXMethodDeclaration(callOperator);29662967if (!calleePtr)2968calleePtr =2969CGM.GetAddrOfFunction(GlobalDecl(callOperator),2970CGM.getTypes().GetFunctionType(*calleeFnInfo));29712972// Prepare the return slot.2973const FunctionProtoType *FPT =2974callOperator->getType()->castAs<FunctionProtoType>();2975QualType resultType = FPT->getReturnType();2976ReturnValueSlot returnSlot;2977if (!resultType->isVoidType() &&2978calleeFnInfo->getReturnInfo().getKind() == ABIArgInfo::Indirect &&2979!hasScalarEvaluationKind(calleeFnInfo->getReturnType()))2980returnSlot =2981ReturnValueSlot(ReturnValue, resultType.isVolatileQualified(),2982/*IsUnused=*/false, /*IsExternallyDestructed=*/true);29832984// We don't need to separately arrange the call arguments because2985// the call can't be variadic anyway --- it's impossible to forward2986// variadic arguments.29872988// Now emit our call.2989auto callee = CGCallee::forDirect(calleePtr, GlobalDecl(callOperator));2990RValue RV = EmitCall(*calleeFnInfo, callee, returnSlot, callArgs);29912992// If necessary, copy the returned value into the slot.2993if (!resultType->isVoidType() && returnSlot.isNull()) {2994if (getLangOpts().ObjCAutoRefCount && resultType->isObjCRetainableType()) {2995RV = RValue::get(EmitARCRetainAutoreleasedReturnValue(RV.getScalarVal()));2996}2997EmitReturnOfRValue(RV, resultType);2998} else2999EmitBranchThroughCleanup(ReturnBlock);3000}30013002void CodeGenFunction::EmitLambdaBlockInvokeBody() {3003const BlockDecl *BD = BlockInfo->getBlockDecl();3004const VarDecl *variable = BD->capture_begin()->getVariable();3005const CXXRecordDecl *Lambda = variable->getType()->getAsCXXRecordDecl();3006const CXXMethodDecl *CallOp = Lambda->getLambdaCallOperator();30073008if (CallOp->isVariadic()) {3009// FIXME: Making this work correctly is nasty because it requires either3010// cloning the body of the call operator or making the call operator3011// forward.3012CGM.ErrorUnsupported(CurCodeDecl, "lambda conversion to variadic function");3013return;3014}30153016// Start building arguments for forwarding call3017CallArgList CallArgs;30183019QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda));3020Address ThisPtr = GetAddrOfBlockDecl(variable);3021CallArgs.add(RValue::get(getAsNaturalPointerTo(ThisPtr, ThisType)), ThisType);30223023// Add the rest of the parameters.3024for (auto *param : BD->parameters())3025EmitDelegateCallArg(CallArgs, param, param->getBeginLoc());30263027assert(!Lambda->isGenericLambda() &&3028"generic lambda interconversion to block not implemented");3029EmitForwardingCallToLambda(CallOp, CallArgs);3030}30313032void CodeGenFunction::EmitLambdaStaticInvokeBody(const CXXMethodDecl *MD) {3033if (MD->isVariadic()) {3034// FIXME: Making this work correctly is nasty because it requires either3035// cloning the body of the call operator or making the call operator3036// forward.3037CGM.ErrorUnsupported(MD, "lambda conversion to variadic function");3038return;3039}30403041const CXXRecordDecl *Lambda = MD->getParent();30423043// Start building arguments for forwarding call3044CallArgList CallArgs;30453046QualType LambdaType = getContext().getRecordType(Lambda);3047QualType ThisType = getContext().getPointerType(LambdaType);3048Address ThisPtr = CreateMemTemp(LambdaType, "unused.capture");3049CallArgs.add(RValue::get(ThisPtr.emitRawPointer(*this)), ThisType);30503051EmitLambdaDelegatingInvokeBody(MD, CallArgs);3052}30533054void CodeGenFunction::EmitLambdaDelegatingInvokeBody(const CXXMethodDecl *MD,3055CallArgList &CallArgs) {3056// Add the rest of the forwarded parameters.3057for (auto *Param : MD->parameters())3058EmitDelegateCallArg(CallArgs, Param, Param->getBeginLoc());30593060const CXXRecordDecl *Lambda = MD->getParent();3061const CXXMethodDecl *CallOp = Lambda->getLambdaCallOperator();3062// For a generic lambda, find the corresponding call operator specialization3063// to which the call to the static-invoker shall be forwarded.3064if (Lambda->isGenericLambda()) {3065assert(MD->isFunctionTemplateSpecialization());3066const TemplateArgumentList *TAL = MD->getTemplateSpecializationArgs();3067FunctionTemplateDecl *CallOpTemplate = CallOp->getDescribedFunctionTemplate();3068void *InsertPos = nullptr;3069FunctionDecl *CorrespondingCallOpSpecialization =3070CallOpTemplate->findSpecialization(TAL->asArray(), InsertPos);3071assert(CorrespondingCallOpSpecialization);3072CallOp = cast<CXXMethodDecl>(CorrespondingCallOpSpecialization);3073}30743075// Special lambda forwarding when there are inalloca parameters.3076if (hasInAllocaArg(MD)) {3077const CGFunctionInfo *ImplFnInfo = nullptr;3078llvm::Function *ImplFn = nullptr;3079EmitLambdaInAllocaImplFn(CallOp, &ImplFnInfo, &ImplFn);30803081EmitForwardingCallToLambda(CallOp, CallArgs, ImplFnInfo, ImplFn);3082return;3083}30843085EmitForwardingCallToLambda(CallOp, CallArgs);3086}30873088void CodeGenFunction::EmitLambdaInAllocaCallOpBody(const CXXMethodDecl *MD) {3089if (MD->isVariadic()) {3090// FIXME: Making this work correctly is nasty because it requires either3091// cloning the body of the call operator or making the call operator forward.3092CGM.ErrorUnsupported(MD, "lambda conversion to variadic function");3093return;3094}30953096// Forward %this argument.3097CallArgList CallArgs;3098QualType LambdaType = getContext().getRecordType(MD->getParent());3099QualType ThisType = getContext().getPointerType(LambdaType);3100llvm::Value *ThisArg = CurFn->getArg(0);3101CallArgs.add(RValue::get(ThisArg), ThisType);31023103EmitLambdaDelegatingInvokeBody(MD, CallArgs);3104}31053106void CodeGenFunction::EmitLambdaInAllocaImplFn(3107const CXXMethodDecl *CallOp, const CGFunctionInfo **ImplFnInfo,3108llvm::Function **ImplFn) {3109const CGFunctionInfo &FnInfo =3110CGM.getTypes().arrangeCXXMethodDeclaration(CallOp);3111llvm::Function *CallOpFn =3112cast<llvm::Function>(CGM.GetAddrOfFunction(GlobalDecl(CallOp)));31133114// Emit function containing the original call op body. __invoke will delegate3115// to this function.3116SmallVector<CanQualType, 4> ArgTypes;3117for (auto I = FnInfo.arg_begin(); I != FnInfo.arg_end(); ++I)3118ArgTypes.push_back(I->type);3119*ImplFnInfo = &CGM.getTypes().arrangeLLVMFunctionInfo(3120FnInfo.getReturnType(), FnInfoOpts::IsDelegateCall, ArgTypes,3121FnInfo.getExtInfo(), {}, FnInfo.getRequiredArgs());31223123// Create mangled name as if this was a method named __impl. If for some3124// reason the name doesn't look as expected then just tack __impl to the3125// front.3126// TODO: Use the name mangler to produce the right name instead of using3127// string replacement.3128StringRef CallOpName = CallOpFn->getName();3129std::string ImplName;3130if (size_t Pos = CallOpName.find_first_of("<lambda"))3131ImplName = ("?__impl@" + CallOpName.drop_front(Pos)).str();3132else3133ImplName = ("__impl" + CallOpName).str();31343135llvm::Function *Fn = CallOpFn->getParent()->getFunction(ImplName);3136if (!Fn) {3137Fn = llvm::Function::Create(CGM.getTypes().GetFunctionType(**ImplFnInfo),3138llvm::GlobalValue::InternalLinkage, ImplName,3139CGM.getModule());3140CGM.SetInternalFunctionAttributes(CallOp, Fn, **ImplFnInfo);31413142const GlobalDecl &GD = GlobalDecl(CallOp);3143const auto *D = cast<FunctionDecl>(GD.getDecl());3144CodeGenFunction(CGM).GenerateCode(GD, Fn, **ImplFnInfo);3145CGM.SetLLVMFunctionAttributesForDefinition(D, Fn);3146}3147*ImplFn = Fn;3148}314931503151