Path: blob/main/cranelift/codegen/src/isa/pulley_shared/lower/isle.rs
1693 views
//! ISLE integration glue code for Pulley lowering.12// Pull in the ISLE generated code.3pub mod generated_code;4use generated_code::MInst;5use inst::InstAndKind;67// Types that the generated ISLE code uses via `use super::*`.8use crate::ir::{condcodes::*, immediates::*, types::*, *};9use crate::isa::pulley_shared::{10inst::{11FReg, OperandSize, PulleyCall, ReturnCallInfo, VReg, WritableFReg, WritableVReg,12WritableXReg, XReg,13},14lower::{Cond, regs},15*,16};17use crate::machinst::{18CallArgList, CallInfo, CallRetList, MachInst, Reg, VCodeConstant, VCodeConstantData,19abi::{ArgPair, RetPair, StackAMode},20isle::*,21};22use alloc::boxed::Box;23use pulley_interpreter::U6;24use regalloc2::PReg;25use smallvec::SmallVec;2627type Unit = ();28type VecArgPair = Vec<ArgPair>;29type VecRetPair = Vec<RetPair>;30type BoxCallInfo = Box<CallInfo<PulleyCall>>;31type BoxCallIndInfo = Box<CallInfo<XReg>>;32type BoxCallIndirectHostInfo = Box<CallInfo<ExternalName>>;33type BoxReturnCallInfo = Box<ReturnCallInfo<ExternalName>>;34type BoxReturnCallIndInfo = Box<ReturnCallInfo<XReg>>;35type BoxExternalName = Box<ExternalName>;36type UpperXRegSet = pulley_interpreter::UpperRegSet<pulley_interpreter::XReg>;37type PcRelOffset = pulley_interpreter::PcRelOffset;3839#[expect(40unused_imports,41reason = "used on other backends, used here to suppress warning elsewhere"42)]43use crate::machinst::isle::UnwindInst as _;4445pub(crate) struct PulleyIsleContext<'a, 'b, I, B>46where47I: VCodeInst,48B: LowerBackend,49{50pub lower_ctx: &'a mut Lower<'b, I>,51pub backend: &'a B,52}5354impl<'a, 'b, P> PulleyIsleContext<'a, 'b, InstAndKind<P>, PulleyBackend<P>>55where56P: PulleyTargetKind,57{58fn new(lower_ctx: &'a mut Lower<'b, InstAndKind<P>>, backend: &'a PulleyBackend<P>) -> Self {59Self { lower_ctx, backend }60}6162pub(crate) fn dfg(&self) -> &crate::ir::DataFlowGraph {63&self.lower_ctx.f.dfg64}65}6667impl<P> generated_code::Context for PulleyIsleContext<'_, '_, InstAndKind<P>, PulleyBackend<P>>68where69P: PulleyTargetKind,70{71crate::isle_lower_prelude_methods!(InstAndKind<P>);7273fn gen_call_info(74&mut self,75sig: Sig,76name: ExternalName,77mut uses: CallArgList,78defs: CallRetList,79try_call_info: Option<TryCallInfo>,80) -> BoxCallInfo {81let stack_ret_space = self.lower_ctx.sigs()[sig].sized_stack_ret_space();82let stack_arg_space = self.lower_ctx.sigs()[sig].sized_stack_arg_space();83self.lower_ctx84.abi_mut()85.accumulate_outgoing_args_size(stack_ret_space + stack_arg_space);8687// The first four integer arguments to a call can be handled via88// special pulley call instructions. Assert here that89// `uses` is sorted in order and then take out x0-x3 if90// they're present and move them from `uses` to91// `dest.args` to be handled differently during register92// allocation.93let mut args = SmallVec::new();94uses.sort_by_key(|arg| arg.preg);95uses.retain(|arg| {96if arg.preg != regs::x0()97&& arg.preg != regs::x1()98&& arg.preg != regs::x2()99&& arg.preg != regs::x3()100{101return true;102}103args.push(XReg::new(arg.vreg).unwrap());104false105});106let dest = PulleyCall { name, args };107Box::new(108self.lower_ctx109.gen_call_info(sig, dest, uses, defs, try_call_info),110)111}112113fn gen_call_ind_info(114&mut self,115sig: Sig,116dest: Reg,117uses: CallArgList,118defs: CallRetList,119try_call_info: Option<TryCallInfo>,120) -> BoxCallIndInfo {121let stack_ret_space = self.lower_ctx.sigs()[sig].sized_stack_ret_space();122let stack_arg_space = self.lower_ctx.sigs()[sig].sized_stack_arg_space();123self.lower_ctx124.abi_mut()125.accumulate_outgoing_args_size(stack_ret_space + stack_arg_space);126127let dest = XReg::new(dest).unwrap();128Box::new(129self.lower_ctx130.gen_call_info(sig, dest, uses, defs, try_call_info),131)132}133134fn gen_call_host_info(135&mut self,136sig: Sig,137dest: ExternalName,138uses: CallArgList,139defs: CallRetList,140try_call_info: Option<TryCallInfo>,141) -> BoxCallIndirectHostInfo {142let stack_ret_space = self.lower_ctx.sigs()[sig].sized_stack_ret_space();143let stack_arg_space = self.lower_ctx.sigs()[sig].sized_stack_arg_space();144self.lower_ctx145.abi_mut()146.accumulate_outgoing_args_size(stack_ret_space + stack_arg_space);147148Box::new(149self.lower_ctx150.gen_call_info(sig, dest, uses, defs, try_call_info),151)152}153154fn gen_return_call_info(155&mut self,156sig: Sig,157dest: ExternalName,158uses: CallArgList,159) -> BoxReturnCallInfo {160let new_stack_arg_size = self.lower_ctx.sigs()[sig].sized_stack_arg_space();161self.lower_ctx162.abi_mut()163.accumulate_tail_args_size(new_stack_arg_size);164165Box::new(ReturnCallInfo {166dest,167uses,168new_stack_arg_size,169})170}171172fn gen_return_call_ind_info(173&mut self,174sig: Sig,175dest: Reg,176uses: CallArgList,177) -> BoxReturnCallIndInfo {178let new_stack_arg_size = self.lower_ctx.sigs()[sig].sized_stack_arg_space();179self.lower_ctx180.abi_mut()181.accumulate_tail_args_size(new_stack_arg_size);182183Box::new(ReturnCallInfo {184dest: XReg::new(dest).unwrap(),185uses,186new_stack_arg_size,187})188}189190fn vreg_new(&mut self, r: Reg) -> VReg {191VReg::new(r).unwrap()192}193fn writable_vreg_new(&mut self, r: WritableReg) -> WritableVReg {194r.map(|wr| VReg::new(wr).unwrap())195}196fn writable_vreg_to_vreg(&mut self, arg0: WritableVReg) -> VReg {197arg0.to_reg()198}199fn writable_vreg_to_writable_reg(&mut self, arg0: WritableVReg) -> WritableReg {200arg0.map(|vr| vr.to_reg())201}202fn vreg_to_reg(&mut self, arg0: VReg) -> Reg {203*arg0204}205fn xreg_new(&mut self, r: Reg) -> XReg {206XReg::new(r).unwrap()207}208fn writable_xreg_new(&mut self, r: WritableReg) -> WritableXReg {209r.map(|wr| XReg::new(wr).unwrap())210}211fn writable_xreg_to_xreg(&mut self, arg0: WritableXReg) -> XReg {212arg0.to_reg()213}214fn writable_xreg_to_writable_reg(&mut self, arg0: WritableXReg) -> WritableReg {215arg0.map(|xr| xr.to_reg())216}217fn xreg_to_reg(&mut self, arg0: XReg) -> Reg {218*arg0219}220fn freg_new(&mut self, r: Reg) -> FReg {221FReg::new(r).unwrap()222}223fn writable_freg_new(&mut self, r: WritableReg) -> WritableFReg {224r.map(|wr| FReg::new(wr).unwrap())225}226fn writable_freg_to_freg(&mut self, arg0: WritableFReg) -> FReg {227arg0.to_reg()228}229fn writable_freg_to_writable_reg(&mut self, arg0: WritableFReg) -> WritableReg {230arg0.map(|fr| fr.to_reg())231}232fn freg_to_reg(&mut self, arg0: FReg) -> Reg {233*arg0234}235236#[inline]237fn emit(&mut self, arg0: &MInst) -> Unit {238self.lower_ctx.emit(arg0.clone().into());239}240241fn sp_reg(&mut self) -> XReg {242XReg::new(regs::stack_reg()).unwrap()243}244245fn cond_invert(&mut self, cond: &Cond) -> Cond {246cond.invert()247}248249fn u6_from_u8(&mut self, imm: u8) -> Option<U6> {250U6::new(imm)251}252253fn endianness(&mut self, flags: MemFlags) -> Endianness {254flags.endianness(self.backend.isa_flags.endianness())255}256257fn is_native_endianness(&mut self, endianness: &Endianness) -> bool {258*endianness == self.backend.isa_flags.endianness()259}260261fn pointer_width(&mut self) -> PointerWidth {262P::pointer_width()263}264265fn memflags_nontrapping(&mut self, flags: MemFlags) -> bool {266flags.trap_code().is_none()267}268269fn memflags_is_wasm(&mut self, flags: MemFlags) -> bool {270flags.trap_code() == Some(TrapCode::HEAP_OUT_OF_BOUNDS)271&& self.endianness(flags) == Endianness::Little272}273274fn g32_offset(275&mut self,276load_offset: i32,277load_ty: Type,278bound_check_offset: u64,279) -> Option<u16> {280// NB: for more docs on this see the ISLE definition.281let load_offset = u64::try_from(load_offset).ok()?;282let load_bytes = u64::from(load_ty.bytes());283if bound_check_offset != load_offset + load_bytes {284return None;285}286u16::try_from(load_offset).ok()287}288}289290/// The main entry point for lowering with ISLE.291pub(crate) fn lower<P>(292lower_ctx: &mut Lower<InstAndKind<P>>,293backend: &PulleyBackend<P>,294inst: Inst,295) -> Option<InstOutput>296where297P: PulleyTargetKind,298{299// TODO: reuse the ISLE context across lowerings so we can reuse its300// internal heap allocations.301let mut isle_ctx = PulleyIsleContext::new(lower_ctx, backend);302generated_code::constructor_lower(&mut isle_ctx, inst)303}304305/// The main entry point for branch lowering with ISLE.306pub(crate) fn lower_branch<P>(307lower_ctx: &mut Lower<InstAndKind<P>>,308backend: &PulleyBackend<P>,309branch: Inst,310targets: &[MachLabel],311) -> Option<()>312where313P: PulleyTargetKind,314{315// TODO: reuse the ISLE context across lowerings so we can reuse its316// internal heap allocations.317let mut isle_ctx = PulleyIsleContext::new(lower_ctx, backend);318generated_code::constructor_lower_branch(&mut isle_ctx, branch, targets)319}320321322