Path: blob/main/contrib/llvm-project/llvm/lib/Target/BPF/BPFPreserveStaticOffset.cpp
35267 views
//===------ BPFPreserveStaticOffset.cpp -----------------------------------===//1//2// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.3// See https://llvm.org/LICENSE.txt for license information.4// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception5//6//===----------------------------------------------------------------------===//7//8// TLDR: replaces llvm.preserve.static.offset + GEP + load / store9// with llvm.bpf.getelementptr.and.load / store10//11// This file implements BPFPreserveStaticOffsetPass transformation.12// This transformation address two BPF verifier specific issues:13//14// (a) Access to the fields of some structural types is allowed only15// using load and store instructions with static immediate offsets.16//17// Examples of such types are `struct __sk_buff` and `struct18// bpf_sock_ops`. This is so because offsets of the fields of19// these structures do not match real offsets in the running20// kernel. During BPF program load LDX and STX instructions21// referring to the fields of these types are rewritten so that22// offsets match real offsets. For this rewrite to happen field23// offsets have to be encoded as immediate operands of the24// instructions.25//26// See kernel/bpf/verifier.c:convert_ctx_access function in the27// Linux kernel source tree for details.28//29// (b) Pointers to context parameters of BPF programs must not be30// modified before access.31//32// During BPF program verification a tag PTR_TO_CTX is tracked for33// register values. In case if register with such tag is modified34// BPF program is not allowed to read or write memory using this35// register. See kernel/bpf/verifier.c:check_mem_access function36// in the Linux kernel source tree for details.37//38// The following sequence of the IR instructions:39//40// %x = getelementptr %ptr, %constant_offset41// %y = load %x42//43// Is translated as a single machine instruction:44//45// LDW %ptr, %constant_offset46//47// In order for cases (a) and (b) to work the sequence %x-%y above has48// to be preserved by the IR passes.49//50// However, several optimization passes might sink `load` instruction51// or hoist `getelementptr` instruction so that the instructions are52// no longer in sequence. Examples of such passes are:53// SimplifyCFGPass, InstCombinePass, GVNPass.54// After such modification the verifier would reject the BPF program.55//56// To avoid this issue the patterns like (load/store (getelementptr ...))57// are replaced by calls to BPF specific intrinsic functions:58// - llvm.bpf.getelementptr.and.load59// - llvm.bpf.getelementptr.and.store60//61// These calls are lowered back to (load/store (getelementptr ...))62// by BPFCheckAndAdjustIR pass right before the translation from IR to63// machine instructions.64//65// The transformation is split into the following steps:66// - When IR is generated from AST the calls to intrinsic function67// llvm.preserve.static.offset are inserted.68// - BPFPreserveStaticOffsetPass is executed as early as possible69// with AllowPatial set to true, this handles marked GEP chains70// with constant offsets.71// - BPFPreserveStaticOffsetPass is executed at ScalarOptimizerLateEPCallback72// with AllowPatial set to false, this handles marked GEP chains73// with offsets that became constant after loop unrolling, e.g.74// to handle the following code:75//76// struct context { int x[4]; } __attribute__((preserve_static_offset));77//78// struct context *ctx = ...;79// #pragma clang loop unroll(full)80// for (int i = 0; i < 4; ++i)81// foo(ctx->x[i]);82//83// The early BPFPreserveStaticOffsetPass run is necessary to allow84// additional GVN / CSE opportunities after functions inlining.85// The relative order of optimization applied to function:86// - early stage (1)87// - ...88// - function inlining (2)89// - ...90// - loop unrolling91// - ...92// - ScalarOptimizerLateEPCallback (3)93//94// When function A is inlined into function B all optimizations for A95// are already done, while some passes remain for B. In case if96// BPFPreserveStaticOffsetPass is done at (3) but not done at (1)97// the code after (2) would contain a mix of98// (load (gep %p)) and (get.and.load %p) usages:99// - the (load (gep %p)) would come from the calling function;100// - the (get.and.load %p) would come from the callee function.101// Thus clobbering CSE / GVN passes done after inlining.102103#include "BPF.h"104#include "BPFCORE.h"105#include "llvm/ADT/SmallPtrSet.h"106#include "llvm/ADT/SmallVector.h"107#include "llvm/IR/Argument.h"108#include "llvm/IR/Attributes.h"109#include "llvm/IR/BasicBlock.h"110#include "llvm/IR/Constants.h"111#include "llvm/IR/DebugInfoMetadata.h"112#include "llvm/IR/DiagnosticInfo.h"113#include "llvm/IR/IRBuilder.h"114#include "llvm/IR/InstIterator.h"115#include "llvm/IR/Instructions.h"116#include "llvm/IR/Intrinsics.h"117#include "llvm/IR/IntrinsicsBPF.h"118#include "llvm/IR/Module.h"119#include "llvm/Support/Debug.h"120#include "llvm/Support/ErrorHandling.h"121122#define DEBUG_TYPE "bpf-preserve-static-offset"123124using namespace llvm;125126static const unsigned GepAndLoadFirstIdxArg = 6;127static const unsigned GepAndStoreFirstIdxArg = 7;128129static bool isIntrinsicCall(Value *I, Intrinsic::ID Id) {130if (auto *Call = dyn_cast<CallInst>(I))131if (Function *Func = Call->getCalledFunction())132return Func->getIntrinsicID() == Id;133return false;134}135136static bool isPreserveStaticOffsetCall(Value *I) {137return isIntrinsicCall(I, Intrinsic::preserve_static_offset);138}139140static CallInst *isGEPAndLoad(Value *I) {141if (isIntrinsicCall(I, Intrinsic::bpf_getelementptr_and_load))142return cast<CallInst>(I);143return nullptr;144}145146static CallInst *isGEPAndStore(Value *I) {147if (isIntrinsicCall(I, Intrinsic::bpf_getelementptr_and_store))148return cast<CallInst>(I);149return nullptr;150}151152template <class T = Instruction>153static DILocation *mergeDILocations(SmallVector<T *> &Insns) {154DILocation *Merged = (*Insns.begin())->getDebugLoc();155for (T *I : Insns)156Merged = DILocation::getMergedLocation(Merged, I->getDebugLoc());157return Merged;158}159160static CallInst *makeIntrinsicCall(Module *M,161Intrinsic::BPFIntrinsics Intrinsic,162ArrayRef<Type *> Types,163ArrayRef<Value *> Args) {164165Function *Fn = Intrinsic::getDeclaration(M, Intrinsic, Types);166return CallInst::Create(Fn, Args);167}168169static void setParamElementType(CallInst *Call, unsigned ArgNo, Type *Type) {170LLVMContext &C = Call->getContext();171Call->addParamAttr(ArgNo, Attribute::get(C, Attribute::ElementType, Type));172}173174static void setParamReadNone(CallInst *Call, unsigned ArgNo) {175LLVMContext &C = Call->getContext();176Call->addParamAttr(ArgNo, Attribute::get(C, Attribute::ReadNone));177}178179static void setParamReadOnly(CallInst *Call, unsigned ArgNo) {180LLVMContext &C = Call->getContext();181Call->addParamAttr(ArgNo, Attribute::get(C, Attribute::ReadOnly));182}183184static void setParamWriteOnly(CallInst *Call, unsigned ArgNo) {185LLVMContext &C = Call->getContext();186Call->addParamAttr(ArgNo, Attribute::get(C, Attribute::WriteOnly));187}188189namespace {190struct GEPChainInfo {191bool InBounds;192Type *SourceElementType;193SmallVector<Value *> Indices;194SmallVector<GetElementPtrInst *> Members;195196GEPChainInfo() { reset(); }197198void reset() {199InBounds = true;200SourceElementType = nullptr;201Indices.clear();202Members.clear();203}204};205} // Anonymous namespace206207template <class T = std::disjunction<LoadInst, StoreInst>>208static void fillCommonArgs(LLVMContext &C, SmallVector<Value *> &Args,209GEPChainInfo &GEP, T *Insn) {210Type *Int8Ty = Type::getInt8Ty(C);211Type *Int1Ty = Type::getInt1Ty(C);212// Implementation of Align guarantees that ShiftValue < 64213unsigned AlignShiftValue = Log2_64(Insn->getAlign().value());214Args.push_back(GEP.Members[0]->getPointerOperand());215Args.push_back(ConstantInt::get(Int1Ty, Insn->isVolatile()));216Args.push_back(ConstantInt::get(Int8Ty, (unsigned)Insn->getOrdering()));217Args.push_back(ConstantInt::get(Int8Ty, (unsigned)Insn->getSyncScopeID()));218Args.push_back(ConstantInt::get(Int8Ty, AlignShiftValue));219Args.push_back(ConstantInt::get(Int1Ty, GEP.InBounds));220Args.append(GEP.Indices.begin(), GEP.Indices.end());221}222223static Instruction *makeGEPAndLoad(Module *M, GEPChainInfo &GEP,224LoadInst *Load) {225SmallVector<Value *> Args;226fillCommonArgs(M->getContext(), Args, GEP, Load);227CallInst *Call = makeIntrinsicCall(M, Intrinsic::bpf_getelementptr_and_load,228{Load->getType()}, Args);229setParamElementType(Call, 0, GEP.SourceElementType);230Call->applyMergedLocation(mergeDILocations(GEP.Members), Load->getDebugLoc());231Call->setName((*GEP.Members.rbegin())->getName());232if (Load->isUnordered()) {233Call->setOnlyReadsMemory();234Call->setOnlyAccessesArgMemory();235setParamReadOnly(Call, 0);236}237for (unsigned I = GepAndLoadFirstIdxArg; I < Args.size(); ++I)238Call->addParamAttr(I, Attribute::ImmArg);239Call->setAAMetadata(Load->getAAMetadata());240return Call;241}242243static Instruction *makeGEPAndStore(Module *M, GEPChainInfo &GEP,244StoreInst *Store) {245SmallVector<Value *> Args;246Args.push_back(Store->getValueOperand());247fillCommonArgs(M->getContext(), Args, GEP, Store);248CallInst *Call =249makeIntrinsicCall(M, Intrinsic::bpf_getelementptr_and_store,250{Store->getValueOperand()->getType()}, Args);251setParamElementType(Call, 1, GEP.SourceElementType);252if (Store->getValueOperand()->getType()->isPointerTy())253setParamReadNone(Call, 0);254Call->applyMergedLocation(mergeDILocations(GEP.Members),255Store->getDebugLoc());256if (Store->isUnordered()) {257Call->setOnlyWritesMemory();258Call->setOnlyAccessesArgMemory();259setParamWriteOnly(Call, 1);260}261for (unsigned I = GepAndStoreFirstIdxArg; I < Args.size(); ++I)262Call->addParamAttr(I, Attribute::ImmArg);263Call->setAAMetadata(Store->getAAMetadata());264return Call;265}266267static unsigned getOperandAsUnsigned(CallInst *Call, unsigned ArgNo) {268if (auto *Int = dyn_cast<ConstantInt>(Call->getOperand(ArgNo)))269return Int->getValue().getZExtValue();270std::string Report;271raw_string_ostream ReportS(Report);272ReportS << "Expecting ConstantInt as argument #" << ArgNo << " of " << *Call273<< "\n";274report_fatal_error(StringRef(Report));275}276277static GetElementPtrInst *reconstructGEP(CallInst *Call, int Delta) {278SmallVector<Value *> Indices;279Indices.append(Call->data_operands_begin() + 6 + Delta,280Call->data_operands_end());281Type *GEPPointeeType = Call->getParamElementType(Delta);282auto *GEP =283GetElementPtrInst::Create(GEPPointeeType, Call->getOperand(Delta),284ArrayRef<Value *>(Indices), Call->getName());285GEP->setIsInBounds(getOperandAsUnsigned(Call, 5 + Delta));286return GEP;287}288289template <class T = std::disjunction<LoadInst, StoreInst>>290static void reconstructCommon(CallInst *Call, GetElementPtrInst *GEP, T *Insn,291int Delta) {292Insn->setVolatile(getOperandAsUnsigned(Call, 1 + Delta));293Insn->setOrdering((AtomicOrdering)getOperandAsUnsigned(Call, 2 + Delta));294Insn->setSyncScopeID(getOperandAsUnsigned(Call, 3 + Delta));295unsigned AlignShiftValue = getOperandAsUnsigned(Call, 4 + Delta);296Insn->setAlignment(Align(1ULL << AlignShiftValue));297GEP->setDebugLoc(Call->getDebugLoc());298Insn->setDebugLoc(Call->getDebugLoc());299Insn->setAAMetadata(Call->getAAMetadata());300}301302std::pair<GetElementPtrInst *, LoadInst *>303BPFPreserveStaticOffsetPass::reconstructLoad(CallInst *Call) {304GetElementPtrInst *GEP = reconstructGEP(Call, 0);305Type *ReturnType = Call->getFunctionType()->getReturnType();306auto *Load = new LoadInst(ReturnType, GEP, "",307/* These would be set in reconstructCommon */308false, Align(1));309reconstructCommon(Call, GEP, Load, 0);310return std::pair{GEP, Load};311}312313std::pair<GetElementPtrInst *, StoreInst *>314BPFPreserveStaticOffsetPass::reconstructStore(CallInst *Call) {315GetElementPtrInst *GEP = reconstructGEP(Call, 1);316auto *Store = new StoreInst(Call->getOperand(0), GEP,317/* These would be set in reconstructCommon */318false, Align(1));319reconstructCommon(Call, GEP, Store, 1);320return std::pair{GEP, Store};321}322323static bool isZero(Value *V) {324auto *CI = dyn_cast<ConstantInt>(V);325return CI && CI->isZero();326}327328// Given a chain of GEP instructions collect information necessary to329// merge this chain as a single GEP instruction of form:330// getelementptr %<type>, ptr %p, i32 0, <field_idx1>, <field_idx2>, ...331static bool foldGEPChainAsStructAccess(SmallVector<GetElementPtrInst *> &GEPs,332GEPChainInfo &Info) {333if (GEPs.empty())334return false;335336if (!all_of(GEPs, [=](GetElementPtrInst *GEP) {337return GEP->hasAllConstantIndices();338}))339return false;340341GetElementPtrInst *First = GEPs[0];342Info.InBounds = First->isInBounds();343Info.SourceElementType = First->getSourceElementType();344Type *ResultElementType = First->getResultElementType();345Info.Indices.append(First->idx_begin(), First->idx_end());346Info.Members.push_back(First);347348for (auto *Iter = GEPs.begin() + 1; Iter != GEPs.end(); ++Iter) {349GetElementPtrInst *GEP = *Iter;350if (!isZero(*GEP->idx_begin())) {351Info.reset();352return false;353}354if (!GEP->getSourceElementType() ||355GEP->getSourceElementType() != ResultElementType) {356Info.reset();357return false;358}359Info.InBounds &= GEP->isInBounds();360Info.Indices.append(GEP->idx_begin() + 1, GEP->idx_end());361Info.Members.push_back(GEP);362ResultElementType = GEP->getResultElementType();363}364365return true;366}367368// Given a chain of GEP instructions collect information necessary to369// merge this chain as a single GEP instruction of form:370// getelementptr i8, ptr %p, i64 %offset371static bool foldGEPChainAsU8Access(SmallVector<GetElementPtrInst *> &GEPs,372GEPChainInfo &Info) {373if (GEPs.empty())374return false;375376GetElementPtrInst *First = GEPs[0];377const DataLayout &DL = First->getDataLayout();378LLVMContext &C = First->getContext();379Type *PtrTy = First->getType()->getScalarType();380APInt Offset(DL.getIndexTypeSizeInBits(PtrTy), 0);381for (GetElementPtrInst *GEP : GEPs) {382if (!GEP->accumulateConstantOffset(DL, Offset)) {383Info.reset();384return false;385}386Info.InBounds &= GEP->isInBounds();387Info.Members.push_back(GEP);388}389Info.SourceElementType = Type::getInt8Ty(C);390Info.Indices.push_back(ConstantInt::get(C, Offset));391392return true;393}394395static void reportNonStaticGEPChain(Instruction *Insn) {396auto Msg = DiagnosticInfoUnsupported(397*Insn->getFunction(),398Twine("Non-constant offset in access to a field of a type marked "399"with preserve_static_offset might be rejected by BPF verifier")400.concat(Insn->getDebugLoc()401? ""402: " (pass -g option to get exact location)"),403Insn->getDebugLoc(), DS_Warning);404Insn->getContext().diagnose(Msg);405}406407static bool allZeroIndices(SmallVector<GetElementPtrInst *> &GEPs) {408return GEPs.empty() || all_of(GEPs, [=](GetElementPtrInst *GEP) {409return GEP->hasAllZeroIndices();410});411}412413static bool tryToReplaceWithGEPBuiltin(Instruction *LoadOrStoreTemplate,414SmallVector<GetElementPtrInst *> &GEPs,415Instruction *InsnToReplace) {416GEPChainInfo GEPChain;417if (!foldGEPChainAsStructAccess(GEPs, GEPChain) &&418!foldGEPChainAsU8Access(GEPs, GEPChain)) {419return false;420}421Module *M = InsnToReplace->getModule();422if (auto *Load = dyn_cast<LoadInst>(LoadOrStoreTemplate)) {423Instruction *Replacement = makeGEPAndLoad(M, GEPChain, Load);424Replacement->insertBefore(InsnToReplace);425InsnToReplace->replaceAllUsesWith(Replacement);426}427if (auto *Store = dyn_cast<StoreInst>(LoadOrStoreTemplate)) {428Instruction *Replacement = makeGEPAndStore(M, GEPChain, Store);429Replacement->insertBefore(InsnToReplace);430}431return true;432}433434// Check if U->getPointerOperand() == I435static bool isPointerOperand(Value *I, User *U) {436if (auto *L = dyn_cast<LoadInst>(U))437return L->getPointerOperand() == I;438if (auto *S = dyn_cast<StoreInst>(U))439return S->getPointerOperand() == I;440if (auto *GEP = dyn_cast<GetElementPtrInst>(U))441return GEP->getPointerOperand() == I;442if (auto *Call = isGEPAndLoad(U))443return Call->getArgOperand(0) == I;444if (auto *Call = isGEPAndStore(U))445return Call->getArgOperand(1) == I;446return false;447}448449static bool isInlineableCall(User *U) {450if (auto *Call = dyn_cast<CallInst>(U))451return Call->hasFnAttr(Attribute::InlineHint);452return false;453}454455static void rewriteAccessChain(Instruction *Insn,456SmallVector<GetElementPtrInst *> &GEPs,457SmallVector<Instruction *> &Visited,458bool AllowPatial, bool &StillUsed);459460static void rewriteUses(Instruction *Insn,461SmallVector<GetElementPtrInst *> &GEPs,462SmallVector<Instruction *> &Visited, bool AllowPatial,463bool &StillUsed) {464for (User *U : Insn->users()) {465auto *UI = dyn_cast<Instruction>(U);466if (UI && (isPointerOperand(Insn, UI) || isPreserveStaticOffsetCall(UI) ||467isInlineableCall(UI)))468rewriteAccessChain(UI, GEPs, Visited, AllowPatial, StillUsed);469else470LLVM_DEBUG({471llvm::dbgs() << "unsupported usage in BPFPreserveStaticOffsetPass:\n";472llvm::dbgs() << " Insn: " << *Insn << "\n";473llvm::dbgs() << " User: " << *U << "\n";474});475}476}477478// A DFS traversal of GEP chain trees starting from Root.479//480// Recursion descends through GEP instructions and481// llvm.preserve.static.offset calls. Recursion stops at any other482// instruction. If load or store instruction is reached it is replaced483// by a call to `llvm.bpf.getelementptr.and.load` or484// `llvm.bpf.getelementptr.and.store` intrinsic.485// If `llvm.bpf.getelementptr.and.load/store` is reached the accumulated486// GEPs are merged into the intrinsic call.487// If nested calls to `llvm.preserve.static.offset` are encountered these488// calls are marked for deletion.489//490// Parameters description:491// - Insn - current position in the tree492// - GEPs - GEP instructions for the current branch493// - Visited - a list of visited instructions in DFS order,494// order is important for unused instruction deletion.495// - AllowPartial - when true GEP chains that can't be folded are496// not reported, otherwise diagnostic message is show for such chains.497// - StillUsed - set to true if one of the GEP chains could not be498// folded, makes sense when AllowPartial is false, means that root499// preserve.static.offset call is still in use and should remain500// until the next run of this pass.501static void rewriteAccessChain(Instruction *Insn,502SmallVector<GetElementPtrInst *> &GEPs,503SmallVector<Instruction *> &Visited,504bool AllowPatial, bool &StillUsed) {505auto MarkAndTraverseUses = [&]() {506Visited.push_back(Insn);507rewriteUses(Insn, GEPs, Visited, AllowPatial, StillUsed);508};509auto TryToReplace = [&](Instruction *LoadOrStore) {510// Do nothing for (preserve.static.offset (load/store ..)) or for511// GEPs with zero indices. Such constructs lead to zero offset and512// are simplified by other passes.513if (allZeroIndices(GEPs))514return;515if (tryToReplaceWithGEPBuiltin(LoadOrStore, GEPs, Insn)) {516Visited.push_back(Insn);517return;518}519if (!AllowPatial)520reportNonStaticGEPChain(Insn);521StillUsed = true;522};523if (isa<LoadInst>(Insn) || isa<StoreInst>(Insn)) {524TryToReplace(Insn);525} else if (isGEPAndLoad(Insn)) {526auto [GEP, Load] =527BPFPreserveStaticOffsetPass::reconstructLoad(cast<CallInst>(Insn));528GEPs.push_back(GEP);529TryToReplace(Load);530GEPs.pop_back();531delete Load;532delete GEP;533} else if (isGEPAndStore(Insn)) {534// This case can't be merged with the above because535// `delete Load` / `delete Store` wants a concrete type,536// destructor of Instruction is protected.537auto [GEP, Store] =538BPFPreserveStaticOffsetPass::reconstructStore(cast<CallInst>(Insn));539GEPs.push_back(GEP);540TryToReplace(Store);541GEPs.pop_back();542delete Store;543delete GEP;544} else if (auto *GEP = dyn_cast<GetElementPtrInst>(Insn)) {545GEPs.push_back(GEP);546MarkAndTraverseUses();547GEPs.pop_back();548} else if (isPreserveStaticOffsetCall(Insn)) {549MarkAndTraverseUses();550} else if (isInlineableCall(Insn)) {551// Preserve preserve.static.offset call for parameters of552// functions that might be inlined. These would be removed on a553// second pass after inlining.554// Might happen when a pointer to a preserve_static_offset555// structure is passed as parameter of a function that would be556// inlined inside a loop that would be unrolled.557if (AllowPatial)558StillUsed = true;559} else {560SmallString<128> Buf;561raw_svector_ostream BufStream(Buf);562BufStream << *Insn;563report_fatal_error(564Twine("Unexpected rewriteAccessChain Insn = ").concat(Buf));565}566}567568static void removeMarkerCall(Instruction *Marker) {569Marker->replaceAllUsesWith(Marker->getOperand(0));570Marker->eraseFromParent();571}572573static bool rewriteAccessChain(Instruction *Marker, bool AllowPatial,574SmallPtrSetImpl<Instruction *> &RemovedMarkers) {575SmallVector<GetElementPtrInst *> GEPs;576SmallVector<Instruction *> Visited;577bool StillUsed = false;578rewriteUses(Marker, GEPs, Visited, AllowPatial, StillUsed);579// Check if Visited instructions could be removed, iterate in580// reverse to unblock instructions higher in the chain.581for (auto V = Visited.rbegin(); V != Visited.rend(); ++V) {582if (isPreserveStaticOffsetCall(*V)) {583removeMarkerCall(*V);584RemovedMarkers.insert(*V);585} else if ((*V)->use_empty()) {586(*V)->eraseFromParent();587}588}589return StillUsed;590}591592static std::vector<Instruction *>593collectPreserveStaticOffsetCalls(Function &F) {594std::vector<Instruction *> Calls;595for (Instruction &Insn : instructions(F))596if (isPreserveStaticOffsetCall(&Insn))597Calls.push_back(&Insn);598return Calls;599}600601bool isPreserveArrayIndex(Value *V) {602return isIntrinsicCall(V, Intrinsic::preserve_array_access_index);603}604605bool isPreserveStructIndex(Value *V) {606return isIntrinsicCall(V, Intrinsic::preserve_struct_access_index);607}608609bool isPreserveUnionIndex(Value *V) {610return isIntrinsicCall(V, Intrinsic::preserve_union_access_index);611}612613static void removePAICalls(Instruction *Marker) {614auto IsPointerOperand = [](Value *Op, User *U) {615if (auto *GEP = dyn_cast<GetElementPtrInst>(U))616return GEP->getPointerOperand() == Op;617if (isPreserveStaticOffsetCall(U) || isPreserveArrayIndex(U) ||618isPreserveStructIndex(U) || isPreserveUnionIndex(U))619return cast<CallInst>(U)->getArgOperand(0) == Op;620return false;621};622623SmallVector<Value *, 32> WorkList;624WorkList.push_back(Marker);625do {626Value *V = WorkList.pop_back_val();627for (User *U : V->users())628if (IsPointerOperand(V, U))629WorkList.push_back(U);630auto *Call = dyn_cast<CallInst>(V);631if (!Call)632continue;633if (isPreserveArrayIndex(V))634BPFCoreSharedInfo::removeArrayAccessCall(Call);635else if (isPreserveStructIndex(V))636BPFCoreSharedInfo::removeStructAccessCall(Call);637else if (isPreserveUnionIndex(V))638BPFCoreSharedInfo::removeUnionAccessCall(Call);639} while (!WorkList.empty());640}641642// Look for sequences:643// - llvm.preserve.static.offset -> getelementptr... -> load644// - llvm.preserve.static.offset -> getelementptr... -> store645// And replace those with calls to intrinsics:646// - llvm.bpf.getelementptr.and.load647// - llvm.bpf.getelementptr.and.store648static bool rewriteFunction(Function &F, bool AllowPartial) {649LLVM_DEBUG(dbgs() << "********** BPFPreserveStaticOffsetPass (AllowPartial="650<< AllowPartial << ") ************\n");651652auto MarkerCalls = collectPreserveStaticOffsetCalls(F);653SmallPtrSet<Instruction *, 16> RemovedMarkers;654655LLVM_DEBUG(dbgs() << "There are " << MarkerCalls.size()656<< " preserve.static.offset calls\n");657658if (MarkerCalls.empty())659return false;660661for (auto *Call : MarkerCalls)662removePAICalls(Call);663664for (auto *Call : MarkerCalls) {665if (RemovedMarkers.contains(Call))666continue;667bool StillUsed = rewriteAccessChain(Call, AllowPartial, RemovedMarkers);668if (!StillUsed || !AllowPartial)669removeMarkerCall(Call);670}671672return true;673}674675PreservedAnalyses676llvm::BPFPreserveStaticOffsetPass::run(Function &F,677FunctionAnalysisManager &AM) {678return rewriteFunction(F, AllowPartial) ? PreservedAnalyses::none()679: PreservedAnalyses::all();680}681682683