Path: blob/main/cranelift/codegen/src/machinst/isle.rs
3050 views
use crate::ir::{BlockCall, Value, ValueList};1use alloc::boxed::Box;2use alloc::vec::Vec;3use smallvec::SmallVec;45pub use super::MachLabel;6use super::RetPair;7pub use crate::ir::{condcodes::CondCode, *};8pub use crate::isa::{TargetIsa, unwind::UnwindInst};9pub use crate::machinst::{10ABIArg, ABIArgSlot, ABIMachineSpec, InputSourceInst, Lower, LowerBackend, RealReg, Reg,11RelocDistance, Sig, TryCallInfo, VCodeInst, Writable,12};13pub use crate::settings::{StackSwitchModel, TlsModel};1415pub type Unit = ();16pub type ValueSlice = (ValueList, usize);17pub type ValueArray2 = [Value; 2];18pub type ValueArray3 = [Value; 3];19pub type BlockArray2 = [BlockCall; 2];20pub type WritableReg = Writable<Reg>;21pub type VecRetPair = Vec<RetPair>;22pub type VecMask = Vec<u8>;23pub type ValueRegs = crate::machinst::ValueRegs<Reg>;24pub type WritableValueRegs = crate::machinst::ValueRegs<WritableReg>;25pub type ValueRegsVec = SmallVec<[ValueRegs; 2]>;26pub type InstOutput = SmallVec<[ValueRegs; 2]>;27pub type BoxExternalName = Box<ExternalName>;28pub type MachLabelSlice = [MachLabel];29pub type BoxVecMachLabel = Box<Vec<MachLabel>>;30pub type OptionTryCallInfo = Option<TryCallInfo>;3132/// Helper macro to define methods in `prelude.isle` within `impl Context for33/// ...` for each backend. These methods are shared amongst all backends.34#[macro_export]35#[doc(hidden)]36macro_rules! isle_lower_prelude_methods {37() => {38crate::isle_lower_prelude_methods!(MInst);39};40($inst:ty) => {41crate::isle_common_prelude_methods!();4243#[inline]44fn value_type(&mut self, val: Value) -> Type {45self.lower_ctx.dfg().value_type(val)46}4748#[inline]49fn value_reg(&mut self, reg: Reg) -> ValueRegs {50ValueRegs::one(reg)51}5253#[inline]54fn value_regs(&mut self, r1: Reg, r2: Reg) -> ValueRegs {55ValueRegs::two(r1, r2)56}5758#[inline]59fn writable_value_regs(&mut self, r1: WritableReg, r2: WritableReg) -> WritableValueRegs {60WritableValueRegs::two(r1, r2)61}6263#[inline]64fn writable_value_reg(&mut self, r: WritableReg) -> WritableValueRegs {65WritableValueRegs::one(r)66}6768#[inline]69fn value_regs_invalid(&mut self) -> ValueRegs {70ValueRegs::invalid()71}7273#[inline]74fn output_none(&mut self) -> InstOutput {75smallvec::smallvec![]76}7778#[inline]79fn output(&mut self, regs: ValueRegs) -> InstOutput {80smallvec::smallvec![regs]81}8283#[inline]84fn output_pair(&mut self, r1: ValueRegs, r2: ValueRegs) -> InstOutput {85smallvec::smallvec![r1, r2]86}8788#[inline]89fn output_vec(&mut self, output: &ValueRegsVec) -> InstOutput {90output.clone()91}9293#[inline]94fn temp_writable_reg(&mut self, ty: Type) -> WritableReg {95let value_regs = self.lower_ctx.alloc_tmp(ty);96value_regs.only_reg().unwrap()97}9899#[inline]100fn is_valid_reg(&mut self, reg: Reg) -> bool {101use crate::machinst::valueregs::InvalidSentinel;102!reg.is_invalid_sentinel()103}104105#[inline]106fn invalid_reg(&mut self) -> Reg {107use crate::machinst::valueregs::InvalidSentinel;108Reg::invalid_sentinel()109}110111#[inline]112fn mark_value_used(&mut self, val: Value) {113self.lower_ctx.increment_lowered_uses(val);114}115116#[inline]117fn put_in_reg(&mut self, val: Value) -> Reg {118self.put_in_regs(val).only_reg().unwrap()119}120121#[inline]122fn put_in_regs(&mut self, val: Value) -> ValueRegs {123self.lower_ctx.put_value_in_regs(val)124}125126#[inline]127fn put_in_regs_vec(&mut self, (list, off): ValueSlice) -> ValueRegsVec {128(off..list.len(&self.lower_ctx.dfg().value_lists))129.map(|ix| {130let val = list.get(ix, &self.lower_ctx.dfg().value_lists).unwrap();131self.put_in_regs(val)132})133.collect()134}135136#[inline]137fn ensure_in_vreg(&mut self, reg: Reg, ty: Type) -> Reg {138self.lower_ctx.ensure_in_vreg(reg, ty)139}140141#[inline]142fn value_regs_get(&mut self, regs: ValueRegs, i: usize) -> Reg {143regs.regs()[i]144}145146#[inline]147fn value_regs_len(&mut self, regs: ValueRegs) -> usize {148regs.regs().len()149}150151#[inline]152fn value_list_slice(&mut self, list: ValueList) -> ValueSlice {153(list, 0)154}155156#[inline]157fn value_slice_empty(&mut self, slice: ValueSlice) -> Option<()> {158let (list, off) = slice;159if off >= list.len(&self.lower_ctx.dfg().value_lists) {160Some(())161} else {162None163}164}165166#[inline]167fn value_slice_unwrap(&mut self, slice: ValueSlice) -> Option<(Value, ValueSlice)> {168let (list, off) = slice;169if let Some(val) = list.get(off, &self.lower_ctx.dfg().value_lists) {170Some((val, (list, off + 1)))171} else {172None173}174}175176#[inline]177fn value_slice_len(&mut self, slice: ValueSlice) -> usize {178let (list, off) = slice;179list.len(&self.lower_ctx.dfg().value_lists) - off180}181182#[inline]183fn value_slice_get(&mut self, slice: ValueSlice, idx: usize) -> Value {184let (list, off) = slice;185list.get(off + idx, &self.lower_ctx.dfg().value_lists)186.unwrap()187}188189#[inline]190fn writable_reg_to_reg(&mut self, r: WritableReg) -> Reg {191r.to_reg()192}193194#[inline]195fn inst_results(&mut self, inst: Inst) -> ValueSlice {196(self.lower_ctx.dfg().inst_results_list(inst), 0)197}198199#[inline]200fn first_result(&mut self, inst: Inst) -> Option<Value> {201self.lower_ctx.dfg().inst_results(inst).first().copied()202}203204#[inline]205fn inst_data_value(&mut self, inst: Inst) -> InstructionData {206self.lower_ctx.dfg().insts[inst]207}208209#[inline]210fn i64_from_iconst(&mut self, val: Value) -> Option<i64> {211let inst = self.def_inst(val)?;212let constant = match self.lower_ctx.data(inst) {213InstructionData::UnaryImm {214opcode: Opcode::Iconst,215imm,216} => imm.bits(),217_ => return None,218};219let ty = self.lower_ctx.output_ty(inst, 0);220let shift_amt = core::cmp::max(0, 64 - self.ty_bits(ty));221Some((constant << shift_amt) >> shift_amt)222}223224fn zero_value(&mut self, value: Value) -> Option<Value> {225let insn = self.def_inst(value);226if insn.is_some() {227let insn = insn.unwrap();228let inst_data = self.lower_ctx.data(insn);229match inst_data {230InstructionData::Unary {231opcode: Opcode::Splat,232arg,233} => {234let arg = arg.clone();235return self.zero_value(arg);236}237InstructionData::UnaryConst {238opcode: Opcode::Vconst | Opcode::F128const,239constant_handle,240} => {241let constant_data =242self.lower_ctx.get_constant_data(*constant_handle).clone();243if constant_data.into_vec().iter().any(|&x| x != 0) {244return None;245} else {246return Some(value);247}248}249InstructionData::UnaryImm { imm, .. } => {250if imm.bits() == 0 {251return Some(value);252} else {253return None;254}255}256InstructionData::UnaryIeee16 { imm, .. } => {257if imm.bits() == 0 {258return Some(value);259} else {260return None;261}262}263InstructionData::UnaryIeee32 { imm, .. } => {264if imm.bits() == 0 {265return Some(value);266} else {267return None;268}269}270InstructionData::UnaryIeee64 { imm, .. } => {271if imm.bits() == 0 {272return Some(value);273} else {274return None;275}276}277_ => None,278}279} else {280None281}282}283284#[inline]285fn tls_model(&mut self, _: Type) -> TlsModel {286self.backend.flags().tls_model()287}288289#[inline]290fn tls_model_is_elf_gd(&mut self) -> Option<()> {291if self.backend.flags().tls_model() == TlsModel::ElfGd {292Some(())293} else {294None295}296}297298#[inline]299fn tls_model_is_macho(&mut self) -> Option<()> {300if self.backend.flags().tls_model() == TlsModel::Macho {301Some(())302} else {303None304}305}306307#[inline]308fn tls_model_is_coff(&mut self) -> Option<()> {309if self.backend.flags().tls_model() == TlsModel::Coff {310Some(())311} else {312None313}314}315316#[inline]317fn preserve_frame_pointers(&mut self) -> Option<()> {318if self.backend.flags().preserve_frame_pointers() {319Some(())320} else {321None322}323}324325#[inline]326fn stack_switch_model(&mut self) -> Option<StackSwitchModel> {327Some(self.backend.flags().stack_switch_model())328}329330#[inline]331fn func_ref_data(332&mut self,333func_ref: FuncRef,334) -> (SigRef, ExternalName, RelocDistance, bool) {335let funcdata = &self.lower_ctx.dfg().ext_funcs[func_ref];336let reloc_distance = if funcdata.colocated {337RelocDistance::Near338} else {339RelocDistance::Far340};341(342funcdata.signature,343funcdata.name.clone(),344reloc_distance,345funcdata.patchable,346)347}348349#[inline]350fn exception_sig(&mut self, et: ExceptionTable) -> SigRef {351self.lower_ctx.dfg().exception_tables[et].signature()352}353354#[inline]355fn box_external_name(&mut self, extname: ExternalName) -> BoxExternalName {356Box::new(extname)357}358359#[inline]360fn symbol_value_data(361&mut self,362global_value: GlobalValue,363) -> Option<(ExternalName, RelocDistance, i64)> {364let (name, reloc, offset) = self.lower_ctx.symbol_value_data(global_value)?;365Some((name.clone(), reloc, offset))366}367368#[inline]369fn u128_from_immediate(&mut self, imm: Immediate) -> Option<u128> {370let bytes = self.lower_ctx.get_immediate_data(imm).as_slice();371Some(u128::from_le_bytes(bytes.try_into().ok()?))372}373374#[inline]375fn vconst_from_immediate(&mut self, imm: Immediate) -> Option<VCodeConstant> {376Some(self.lower_ctx.use_constant(VCodeConstantData::Generated(377self.lower_ctx.get_immediate_data(imm).clone(),378)))379}380381#[inline]382fn vec_mask_from_immediate(&mut self, imm: Immediate) -> Option<VecMask> {383let data = self.lower_ctx.get_immediate_data(imm);384if data.len() == 16 {385Some(Vec::from(data.as_slice()))386} else {387None388}389}390391#[inline]392fn u64_from_constant(&mut self, constant: Constant) -> Option<u64> {393let bytes = self.lower_ctx.get_constant_data(constant).as_slice();394Some(u64::from_le_bytes(bytes.try_into().ok()?))395}396397#[inline]398fn u128_from_constant(&mut self, constant: Constant) -> Option<u128> {399let bytes = self.lower_ctx.get_constant_data(constant).as_slice();400Some(u128::from_le_bytes(bytes.try_into().ok()?))401}402403#[inline]404fn emit_u64_le_const(&mut self, value: u64) -> VCodeConstant {405let data = VCodeConstantData::U64(value.to_le_bytes());406self.lower_ctx.use_constant(data)407}408409#[inline]410fn emit_u64_be_const(&mut self, value: u64) -> VCodeConstant {411let data = VCodeConstantData::U64(value.to_be_bytes());412self.lower_ctx.use_constant(data)413}414415#[inline]416fn emit_u128_le_const(&mut self, value: u128) -> VCodeConstant {417let data = VCodeConstantData::Generated(value.to_le_bytes().as_slice().into());418self.lower_ctx.use_constant(data)419}420421#[inline]422fn emit_u128_be_const(&mut self, value: u128) -> VCodeConstant {423let data = VCodeConstantData::Generated(value.to_be_bytes().as_slice().into());424self.lower_ctx.use_constant(data)425}426427#[inline]428fn const_to_vconst(&mut self, constant: Constant) -> VCodeConstant {429self.lower_ctx.use_constant(VCodeConstantData::Pool(430constant,431self.lower_ctx.get_constant_data(constant).clone(),432))433}434435fn only_writable_reg(&mut self, regs: WritableValueRegs) -> Option<WritableReg> {436regs.only_reg()437}438439fn writable_regs_get(&mut self, regs: WritableValueRegs, idx: usize) -> WritableReg {440regs.regs()[idx]441}442443fn abi_sig(&mut self, sig_ref: SigRef) -> Sig {444self.lower_ctx.sigs().abi_sig_for_sig_ref(sig_ref)445}446447fn abi_num_args(&mut self, abi: Sig) -> usize {448self.lower_ctx.sigs().num_args(abi)449}450451fn abi_get_arg(&mut self, abi: Sig, idx: usize) -> ABIArg {452self.lower_ctx.sigs().get_arg(abi, idx)453}454455fn abi_num_rets(&mut self, abi: Sig) -> usize {456self.lower_ctx.sigs().num_rets(abi)457}458459fn abi_get_ret(&mut self, abi: Sig, idx: usize) -> ABIArg {460self.lower_ctx.sigs().get_ret(abi, idx)461}462463fn abi_ret_arg(&mut self, abi: Sig) -> Option<ABIArg> {464self.lower_ctx.sigs().get_ret_arg(abi)465}466467fn abi_no_ret_arg(&mut self, abi: Sig) -> Option<()> {468if let Some(_) = self.lower_ctx.sigs().get_ret_arg(abi) {469None470} else {471Some(())472}473}474475fn abi_arg_only_slot(&mut self, arg: &ABIArg) -> Option<ABIArgSlot> {476match arg {477&ABIArg::Slots { ref slots, .. } => {478if slots.len() == 1 {479Some(slots[0])480} else {481None482}483}484_ => None,485}486}487488fn abi_arg_implicit_pointer(&mut self, arg: &ABIArg) -> Option<(ABIArgSlot, i64, Type)> {489match arg {490&ABIArg::ImplicitPtrArg {491pointer,492offset,493ty,494..495} => Some((pointer, offset, ty)),496_ => None,497}498}499500fn abi_unwrap_ret_area_ptr(&mut self) -> Reg {501self.lower_ctx.abi().ret_area_ptr().unwrap()502}503504fn abi_stackslot_addr(505&mut self,506dst: WritableReg,507stack_slot: StackSlot,508offset: Offset32,509) -> MInst {510let offset = u32::try_from(i32::from(offset)).unwrap();511self.lower_ctx512.abi()513.sized_stackslot_addr(stack_slot, offset, dst)514.into()515}516517fn abi_stackslot_offset_into_slot_region(518&mut self,519stack_slot: StackSlot,520offset1: Offset32,521offset2: Offset32,522) -> i32 {523let offset1 = i32::from(offset1);524let offset2 = i32::from(offset2);525i32::try_from(self.lower_ctx.abi().sized_stackslot_offset(stack_slot))526.expect("Stack slot region cannot be larger than 2GiB")527.checked_add(offset1)528.expect("Stack slot region cannot be larger than 2GiB")529.checked_add(offset2)530.expect("Stack slot region cannot be larger than 2GiB")531}532533fn abi_dynamic_stackslot_addr(534&mut self,535dst: WritableReg,536stack_slot: DynamicStackSlot,537) -> MInst {538assert!(539self.lower_ctx540.abi()541.dynamic_stackslot_offsets()542.is_valid(stack_slot)543);544self.lower_ctx545.abi()546.dynamic_stackslot_addr(stack_slot, dst)547.into()548}549550fn real_reg_to_reg(&mut self, reg: RealReg) -> Reg {551Reg::from(reg)552}553554fn real_reg_to_writable_reg(&mut self, reg: RealReg) -> WritableReg {555Writable::from_reg(Reg::from(reg))556}557558fn is_sinkable_inst(&mut self, val: Value) -> Option<Inst> {559let input = self.lower_ctx.get_value_as_source_or_const(val);560561if let InputSourceInst::UniqueUse(inst, _) = input.inst {562Some(inst)563} else {564None565}566}567568#[inline]569fn sink_inst(&mut self, inst: Inst) {570self.lower_ctx.sink_inst(inst);571}572573#[inline]574fn maybe_uextend(&mut self, value: Value) -> Option<Value> {575if let Some(def_inst) = self.def_inst(value) {576if let InstructionData::Unary {577opcode: Opcode::Uextend,578arg,579} = self.lower_ctx.data(def_inst)580{581return Some(*arg);582}583}584585Some(value)586}587588#[inline]589fn uimm8(&mut self, x: Imm64) -> Option<u8> {590let x64: i64 = x.into();591let x8: u8 = x64.try_into().ok()?;592Some(x8)593}594595#[inline]596fn preg_to_reg(&mut self, preg: PReg) -> Reg {597preg.into()598}599600#[inline]601fn gen_move(&mut self, ty: Type, dst: WritableReg, src: Reg) -> MInst {602<$inst>::gen_move(dst, src, ty).into()603}604605/// Generate the return instruction.606fn gen_return(&mut self, rets: &ValueRegsVec) {607self.lower_ctx.gen_return(rets);608}609610fn gen_call_output(&mut self, sig_ref: SigRef) -> ValueRegsVec {611self.lower_ctx.gen_call_output_from_sig_ref(sig_ref)612}613614fn gen_call_args(&mut self, sig: Sig, inputs: &ValueRegsVec) -> CallArgList {615self.lower_ctx.gen_call_args(sig, inputs)616}617618fn gen_return_call_args(&mut self, sig: Sig, inputs: &ValueRegsVec) -> CallArgList {619self.lower_ctx.gen_return_call_args(sig, inputs)620}621622fn gen_call_rets(&mut self, sig: Sig, outputs: &ValueRegsVec) -> CallRetList {623self.lower_ctx.gen_call_rets(sig, &outputs)624}625626fn gen_try_call_rets(&mut self, sig: Sig) -> CallRetList {627self.lower_ctx.gen_try_call_rets(sig)628}629630fn gen_patchable_call_rets(&mut self) -> CallRetList {631smallvec::smallvec![]632}633634fn try_call_none(&mut self) -> OptionTryCallInfo {635None636}637638fn try_call_info(639&mut self,640et: ExceptionTable,641labels: &MachLabelSlice,642) -> OptionTryCallInfo {643let mut exception_handlers = vec![];644let mut labels = labels.iter().cloned();645for item in self.lower_ctx.dfg().exception_tables[et].clone().items() {646match item {647crate::ir::ExceptionTableItem::Tag(tag, _) => {648exception_handlers.push(crate::machinst::abi::TryCallHandler::Tag(649tag,650labels.next().unwrap(),651));652}653crate::ir::ExceptionTableItem::Default(_) => {654exception_handlers.push(crate::machinst::abi::TryCallHandler::Default(655labels.next().unwrap(),656));657}658crate::ir::ExceptionTableItem::Context(ctx) => {659let reg = self.put_in_reg(ctx);660exception_handlers.push(crate::machinst::abi::TryCallHandler::Context(reg));661}662}663}664665let continuation = labels.next().unwrap();666assert_eq!(labels.next(), None);667668let exception_handlers = exception_handlers.into_boxed_slice();669670Some(TryCallInfo {671continuation,672exception_handlers,673})674}675676/// Same as `shuffle32_from_imm`, but for 64-bit lane shuffles.677fn shuffle64_from_imm(&mut self, imm: Immediate) -> Option<(u8, u8)> {678use crate::machinst::isle::shuffle_imm_as_le_lane_idx;679680let bytes = self.lower_ctx.get_immediate_data(imm).as_slice();681Some((682shuffle_imm_as_le_lane_idx(8, &bytes[0..8])?,683shuffle_imm_as_le_lane_idx(8, &bytes[8..16])?,684))685}686687/// Attempts to interpret the shuffle immediate `imm` as a shuffle of688/// 32-bit lanes, returning four integers, each of which is less than 8,689/// which represents a permutation of 32-bit lanes as specified by690/// `imm`.691///692/// For example the shuffle immediate693///694/// `0 1 2 3 8 9 10 11 16 17 18 19 24 25 26 27`695///696/// would return `Some((0, 2, 4, 6))`.697fn shuffle32_from_imm(&mut self, imm: Immediate) -> Option<(u8, u8, u8, u8)> {698use crate::machinst::isle::shuffle_imm_as_le_lane_idx;699700let bytes = self.lower_ctx.get_immediate_data(imm).as_slice();701Some((702shuffle_imm_as_le_lane_idx(4, &bytes[0..4])?,703shuffle_imm_as_le_lane_idx(4, &bytes[4..8])?,704shuffle_imm_as_le_lane_idx(4, &bytes[8..12])?,705shuffle_imm_as_le_lane_idx(4, &bytes[12..16])?,706))707}708709/// Same as `shuffle32_from_imm`, but for 16-bit lane shuffles.710fn shuffle16_from_imm(711&mut self,712imm: Immediate,713) -> Option<(u8, u8, u8, u8, u8, u8, u8, u8)> {714use crate::machinst::isle::shuffle_imm_as_le_lane_idx;715let bytes = self.lower_ctx.get_immediate_data(imm).as_slice();716Some((717shuffle_imm_as_le_lane_idx(2, &bytes[0..2])?,718shuffle_imm_as_le_lane_idx(2, &bytes[2..4])?,719shuffle_imm_as_le_lane_idx(2, &bytes[4..6])?,720shuffle_imm_as_le_lane_idx(2, &bytes[6..8])?,721shuffle_imm_as_le_lane_idx(2, &bytes[8..10])?,722shuffle_imm_as_le_lane_idx(2, &bytes[10..12])?,723shuffle_imm_as_le_lane_idx(2, &bytes[12..14])?,724shuffle_imm_as_le_lane_idx(2, &bytes[14..16])?,725))726}727728fn safe_divisor_from_imm64(&mut self, ty: Type, val: Imm64) -> Option<u64> {729let minus_one = if ty.bytes() == 8 {730-1731} else {732(1 << (ty.bytes() * 8)) - 1733};734let bits = val.bits() & minus_one;735if bits == 0 || bits == minus_one {736None737} else {738Some(bits as u64)739}740}741742fn single_target(&mut self, targets: &MachLabelSlice) -> Option<MachLabel> {743if targets.len() == 1 {744Some(targets[0])745} else {746None747}748}749750fn two_targets(&mut self, targets: &MachLabelSlice) -> Option<(MachLabel, MachLabel)> {751if targets.len() == 2 {752Some((targets[0], targets[1]))753} else {754None755}756}757758fn jump_table_targets(759&mut self,760targets: &MachLabelSlice,761) -> Option<(MachLabel, BoxVecMachLabel)> {762use alloc::boxed::Box;763if targets.is_empty() {764return None;765}766767let default_label = targets[0];768let jt_targets = Box::new(targets[1..].to_vec());769Some((default_label, jt_targets))770}771772fn jump_table_size(&mut self, targets: &BoxVecMachLabel) -> u32 {773targets.len() as u32774}775776fn add_range_fact(&mut self, reg: Reg, bits: u16, min: u64, max: u64) -> Reg {777self.lower_ctx.add_range_fact(reg, bits, min, max);778reg779}780781fn value_is_unused(&mut self, val: Value) -> bool {782self.lower_ctx.value_is_unused(val)783}784785fn block_exn_successor_label(&mut self, block: &Block, exn_succ: u64) -> MachLabel {786// The first N successors are the exceptional edges, and787// the normal return is last; so the `exn_succ`'th788// exceptional edge is just the `exn_succ`'th edge overall.789let succ = usize::try_from(exn_succ).unwrap();790self.lower_ctx.block_successor_label(*block, succ)791}792};793}794795/// Returns the `size`-byte lane referred to by the shuffle immediate specified796/// in `bytes`.797///798/// This helper is used by `shuffleNN_from_imm` above and is used to interpret a799/// byte-based shuffle as a higher-level shuffle of bigger lanes. This will see800/// if the `bytes` specified, which must have `size` length, specifies a lane in801/// vectors aligned to a `size`-byte boundary.802///803/// Returns `None` if `bytes` doesn't specify a `size`-byte lane aligned804/// appropriately, or returns `Some(n)` where `n` is the index of the lane being805/// shuffled.806pub fn shuffle_imm_as_le_lane_idx(size: u8, bytes: &[u8]) -> Option<u8> {807assert_eq!(bytes.len(), usize::from(size));808809// The first index in `bytes` must be aligned to a `size` boundary for the810// bytes to be a valid specifier for a lane of `size` bytes.811if bytes[0] % size != 0 {812return None;813}814815// Afterwards the bytes must all be one larger than the prior to specify a816// contiguous sequence of bytes that's being shuffled. Basically `bytes`817// must refer to the entire `size`-byte lane, in little-endian order.818for i in 0..size - 1 {819let idx = usize::from(i);820if bytes[idx] + 1 != bytes[idx + 1] {821return None;822}823}824825// All of the `bytes` are in-order, meaning that this is a valid shuffle826// immediate to specify a lane of `size` bytes. The index, when viewed as827// `size`-byte immediates, will be the first byte divided by the byte size.828Some(bytes[0] / size)829}830831/// This structure is used to implement the ISLE-generated `Context` trait and832/// internally has a temporary reference to a machinst `LowerCtx`.833pub(crate) struct IsleContext<'a, 'b, I, B>834where835I: VCodeInst,836B: LowerBackend,837{838pub lower_ctx: &'a mut Lower<'b, I>,839pub backend: &'a B,840}841842impl<I, B> IsleContext<'_, '_, I, B>843where844I: VCodeInst,845B: LowerBackend,846{847pub(crate) fn dfg(&self) -> &crate::ir::DataFlowGraph {848&self.lower_ctx.f.dfg849}850}851852853