Path: blob/main/cranelift/codegen/src/isa/pulley_shared/lower/isle.rs
3088 views
//! ISLE integration glue code for Pulley lowering.12// Pull in the ISLE generated code.3pub mod generated_code;4use generated_code::MInst;5use inst::InstAndKind;67// Types that the generated ISLE code uses via `use super::*`.8use crate::ir::{condcodes::*, immediates::*, types::*, *};9use crate::isa::CallConv;10use crate::isa::pulley_shared::{11inst::{12FReg, OperandSize, PulleyCall, ReturnCallInfo, VReg, WritableFReg, WritableVReg,13WritableXReg, XReg,14},15lower::{Cond, regs},16*,17};18use crate::machinst::{19CallArgList, CallInfo, CallRetList, MachInst, Reg, VCodeConstant, VCodeConstantData,20abi::{ArgPair, RetPair, StackAMode},21isle::*,22};23use alloc::boxed::Box;24use pulley_interpreter::U6;25use regalloc2::PReg;26use smallvec::SmallVec;2728type Unit = ();29type VecArgPair = Vec<ArgPair>;30type VecRetPair = Vec<RetPair>;31type BoxCallInfo = Box<CallInfo<PulleyCall>>;32type BoxCallIndInfo = Box<CallInfo<XReg>>;33type BoxCallIndirectHostInfo = Box<CallInfo<ExternalName>>;34type BoxReturnCallInfo = Box<ReturnCallInfo<ExternalName>>;35type BoxReturnCallIndInfo = Box<ReturnCallInfo<XReg>>;36type BoxExternalName = Box<ExternalName>;37type UpperXRegSet = pulley_interpreter::UpperRegSet<pulley_interpreter::XReg>;38type PcRelOffset = pulley_interpreter::PcRelOffset;3940#[expect(41unused_imports,42reason = "used on other backends, used here to suppress warning elsewhere"43)]44use crate::machinst::isle::UnwindInst as _;4546pub(crate) struct PulleyIsleContext<'a, 'b, I, B>47where48I: VCodeInst,49B: LowerBackend,50{51pub lower_ctx: &'a mut Lower<'b, I>,52pub backend: &'a B,53}5455impl<'a, 'b, P> PulleyIsleContext<'a, 'b, InstAndKind<P>, PulleyBackend<P>>56where57P: PulleyTargetKind,58{59fn new(lower_ctx: &'a mut Lower<'b, InstAndKind<P>>, backend: &'a PulleyBackend<P>) -> Self {60Self { lower_ctx, backend }61}6263pub(crate) fn dfg(&self) -> &crate::ir::DataFlowGraph {64&self.lower_ctx.f.dfg65}66}6768impl<P> generated_code::Context for PulleyIsleContext<'_, '_, InstAndKind<P>, PulleyBackend<P>>69where70P: PulleyTargetKind,71{72crate::isle_lower_prelude_methods!(InstAndKind<P>);7374fn gen_call_info(75&mut self,76sig: Sig,77name: ExternalName,78mut uses: CallArgList,79defs: CallRetList,80try_call_info: Option<TryCallInfo>,81patchable: bool,82) -> BoxCallInfo {83let stack_ret_space = self.lower_ctx.sigs()[sig].sized_stack_ret_space();84let stack_arg_space = self.lower_ctx.sigs()[sig].sized_stack_arg_space();85self.lower_ctx86.abi_mut()87.accumulate_outgoing_args_size(stack_ret_space + stack_arg_space);88let call_conv = self.lower_ctx.sigs()[sig].call_conv();8990// The first four integer arguments to a call can be handled via91// special pulley call instructions. Assert here that92// `uses` is sorted in order and then take out x0-x3 if93// they're present and move them from `uses` to94// `dest.args` to be handled differently during register95// allocation.96//97// We don't perform this optimization for callsites with the98// PreserveAll ABI because argument registers are not99// clobbered on those ISAs.100let mut args = SmallVec::new();101uses.sort_by_key(|arg| arg.preg);102if call_conv != CallConv::PreserveAll {103uses.retain(|arg| {104if arg.preg != regs::x0()105&& arg.preg != regs::x1()106&& arg.preg != regs::x2()107&& arg.preg != regs::x3()108{109return true;110}111args.push(XReg::new(arg.vreg).unwrap());112false113});114}115let dest = PulleyCall { name, args };116Box::new(117self.lower_ctx118.gen_call_info(sig, dest, uses, defs, try_call_info, patchable),119)120}121122fn gen_call_ind_info(123&mut self,124sig: Sig,125dest: Reg,126uses: CallArgList,127defs: CallRetList,128try_call_info: Option<TryCallInfo>,129) -> BoxCallIndInfo {130let stack_ret_space = self.lower_ctx.sigs()[sig].sized_stack_ret_space();131let stack_arg_space = self.lower_ctx.sigs()[sig].sized_stack_arg_space();132self.lower_ctx133.abi_mut()134.accumulate_outgoing_args_size(stack_ret_space + stack_arg_space);135136let dest = XReg::new(dest).unwrap();137Box::new(138self.lower_ctx139.gen_call_info(sig, dest, uses, defs, try_call_info, false),140)141}142143fn gen_call_host_info(144&mut self,145sig: Sig,146dest: ExternalName,147uses: CallArgList,148defs: CallRetList,149try_call_info: Option<TryCallInfo>,150) -> BoxCallIndirectHostInfo {151let stack_ret_space = self.lower_ctx.sigs()[sig].sized_stack_ret_space();152let stack_arg_space = self.lower_ctx.sigs()[sig].sized_stack_arg_space();153self.lower_ctx154.abi_mut()155.accumulate_outgoing_args_size(stack_ret_space + stack_arg_space);156157Box::new(158self.lower_ctx159.gen_call_info(sig, dest, uses, defs, try_call_info, false),160)161}162163fn gen_return_call_info(164&mut self,165sig: Sig,166dest: ExternalName,167uses: CallArgList,168) -> BoxReturnCallInfo {169let new_stack_arg_size = self.lower_ctx.sigs()[sig].sized_stack_arg_space();170self.lower_ctx171.abi_mut()172.accumulate_tail_args_size(new_stack_arg_size);173174Box::new(ReturnCallInfo {175dest,176uses,177new_stack_arg_size,178})179}180181fn gen_return_call_ind_info(182&mut self,183sig: Sig,184dest: Reg,185uses: CallArgList,186) -> BoxReturnCallIndInfo {187let new_stack_arg_size = self.lower_ctx.sigs()[sig].sized_stack_arg_space();188self.lower_ctx189.abi_mut()190.accumulate_tail_args_size(new_stack_arg_size);191192Box::new(ReturnCallInfo {193dest: XReg::new(dest).unwrap(),194uses,195new_stack_arg_size,196})197}198199fn vreg_new(&mut self, r: Reg) -> VReg {200VReg::new(r).unwrap()201}202fn writable_vreg_new(&mut self, r: WritableReg) -> WritableVReg {203r.map(|wr| VReg::new(wr).unwrap())204}205fn writable_vreg_to_vreg(&mut self, arg0: WritableVReg) -> VReg {206arg0.to_reg()207}208fn writable_vreg_to_writable_reg(&mut self, arg0: WritableVReg) -> WritableReg {209arg0.map(|vr| vr.to_reg())210}211fn vreg_to_reg(&mut self, arg0: VReg) -> Reg {212*arg0213}214fn xreg_new(&mut self, r: Reg) -> XReg {215XReg::new(r).unwrap()216}217fn writable_xreg_new(&mut self, r: WritableReg) -> WritableXReg {218r.map(|wr| XReg::new(wr).unwrap())219}220fn writable_xreg_to_xreg(&mut self, arg0: WritableXReg) -> XReg {221arg0.to_reg()222}223fn writable_xreg_to_writable_reg(&mut self, arg0: WritableXReg) -> WritableReg {224arg0.map(|xr| xr.to_reg())225}226fn xreg_to_reg(&mut self, arg0: XReg) -> Reg {227*arg0228}229fn freg_new(&mut self, r: Reg) -> FReg {230FReg::new(r).unwrap()231}232fn writable_freg_new(&mut self, r: WritableReg) -> WritableFReg {233r.map(|wr| FReg::new(wr).unwrap())234}235fn writable_freg_to_freg(&mut self, arg0: WritableFReg) -> FReg {236arg0.to_reg()237}238fn writable_freg_to_writable_reg(&mut self, arg0: WritableFReg) -> WritableReg {239arg0.map(|fr| fr.to_reg())240}241fn freg_to_reg(&mut self, arg0: FReg) -> Reg {242*arg0243}244245#[inline]246fn emit(&mut self, arg0: &MInst) -> Unit {247self.lower_ctx.emit(arg0.clone().into());248}249250fn sp_reg(&mut self) -> XReg {251XReg::new(regs::stack_reg()).unwrap()252}253254fn cond_invert(&mut self, cond: &Cond) -> Cond {255cond.invert()256}257258fn u6_from_u8(&mut self, imm: u8) -> Option<U6> {259U6::new(imm)260}261262fn endianness(&mut self, flags: MemFlags) -> Endianness {263flags.endianness(self.backend.isa_flags.endianness())264}265266fn is_native_endianness(&mut self, endianness: &Endianness) -> bool {267*endianness == self.backend.isa_flags.endianness()268}269270fn pointer_width(&mut self) -> PointerWidth {271P::pointer_width()272}273274fn memflags_nontrapping(&mut self, flags: MemFlags) -> bool {275flags.trap_code().is_none()276}277278fn memflags_is_wasm(&mut self, flags: MemFlags) -> bool {279flags.trap_code() == Some(TrapCode::HEAP_OUT_OF_BOUNDS)280&& self.endianness(flags) == Endianness::Little281}282283fn g32_offset(284&mut self,285load_offset: i32,286load_ty: Type,287bound_check_offset: u64,288) -> Option<u16> {289// NB: for more docs on this see the ISLE definition.290let load_offset = u64::try_from(load_offset).ok()?;291let load_bytes = u64::from(load_ty.bytes());292if bound_check_offset != load_offset + load_bytes {293return None;294}295u16::try_from(load_offset).ok()296}297}298299/// The main entry point for lowering with ISLE.300pub(crate) fn lower<P>(301lower_ctx: &mut Lower<InstAndKind<P>>,302backend: &PulleyBackend<P>,303inst: Inst,304) -> Option<InstOutput>305where306P: PulleyTargetKind,307{308// TODO: reuse the ISLE context across lowerings so we can reuse its309// internal heap allocations.310let mut isle_ctx = PulleyIsleContext::new(lower_ctx, backend);311generated_code::constructor_lower(&mut isle_ctx, inst)312}313314/// The main entry point for branch lowering with ISLE.315pub(crate) fn lower_branch<P>(316lower_ctx: &mut Lower<InstAndKind<P>>,317backend: &PulleyBackend<P>,318branch: Inst,319targets: &[MachLabel],320) -> Option<()>321where322P: PulleyTargetKind,323{324// TODO: reuse the ISLE context across lowerings so we can reuse its325// internal heap allocations.326let mut isle_ctx = PulleyIsleContext::new(lower_ctx, backend);327generated_code::constructor_lower_branch(&mut isle_ctx, branch, targets)328}329330331