Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bytecodealliance
GitHub Repository: bytecodealliance/wasmtime
Path: blob/main/cranelift/codegen/src/machinst/isle.rs
3050 views
1
use crate::ir::{BlockCall, Value, ValueList};
2
use alloc::boxed::Box;
3
use alloc::vec::Vec;
4
use smallvec::SmallVec;
5
6
pub use super::MachLabel;
7
use super::RetPair;
8
pub use crate::ir::{condcodes::CondCode, *};
9
pub use crate::isa::{TargetIsa, unwind::UnwindInst};
10
pub use crate::machinst::{
11
ABIArg, ABIArgSlot, ABIMachineSpec, InputSourceInst, Lower, LowerBackend, RealReg, Reg,
12
RelocDistance, Sig, TryCallInfo, VCodeInst, Writable,
13
};
14
pub use crate::settings::{StackSwitchModel, TlsModel};
15
16
pub type Unit = ();
17
pub type ValueSlice = (ValueList, usize);
18
pub type ValueArray2 = [Value; 2];
19
pub type ValueArray3 = [Value; 3];
20
pub type BlockArray2 = [BlockCall; 2];
21
pub type WritableReg = Writable<Reg>;
22
pub type VecRetPair = Vec<RetPair>;
23
pub type VecMask = Vec<u8>;
24
pub type ValueRegs = crate::machinst::ValueRegs<Reg>;
25
pub type WritableValueRegs = crate::machinst::ValueRegs<WritableReg>;
26
pub type ValueRegsVec = SmallVec<[ValueRegs; 2]>;
27
pub type InstOutput = SmallVec<[ValueRegs; 2]>;
28
pub type BoxExternalName = Box<ExternalName>;
29
pub type MachLabelSlice = [MachLabel];
30
pub type BoxVecMachLabel = Box<Vec<MachLabel>>;
31
pub type OptionTryCallInfo = Option<TryCallInfo>;
32
33
/// Helper macro to define methods in `prelude.isle` within `impl Context for
34
/// ...` for each backend. These methods are shared amongst all backends.
35
#[macro_export]
36
#[doc(hidden)]
37
macro_rules! isle_lower_prelude_methods {
38
() => {
39
crate::isle_lower_prelude_methods!(MInst);
40
};
41
($inst:ty) => {
42
crate::isle_common_prelude_methods!();
43
44
#[inline]
45
fn value_type(&mut self, val: Value) -> Type {
46
self.lower_ctx.dfg().value_type(val)
47
}
48
49
#[inline]
50
fn value_reg(&mut self, reg: Reg) -> ValueRegs {
51
ValueRegs::one(reg)
52
}
53
54
#[inline]
55
fn value_regs(&mut self, r1: Reg, r2: Reg) -> ValueRegs {
56
ValueRegs::two(r1, r2)
57
}
58
59
#[inline]
60
fn writable_value_regs(&mut self, r1: WritableReg, r2: WritableReg) -> WritableValueRegs {
61
WritableValueRegs::two(r1, r2)
62
}
63
64
#[inline]
65
fn writable_value_reg(&mut self, r: WritableReg) -> WritableValueRegs {
66
WritableValueRegs::one(r)
67
}
68
69
#[inline]
70
fn value_regs_invalid(&mut self) -> ValueRegs {
71
ValueRegs::invalid()
72
}
73
74
#[inline]
75
fn output_none(&mut self) -> InstOutput {
76
smallvec::smallvec![]
77
}
78
79
#[inline]
80
fn output(&mut self, regs: ValueRegs) -> InstOutput {
81
smallvec::smallvec![regs]
82
}
83
84
#[inline]
85
fn output_pair(&mut self, r1: ValueRegs, r2: ValueRegs) -> InstOutput {
86
smallvec::smallvec![r1, r2]
87
}
88
89
#[inline]
90
fn output_vec(&mut self, output: &ValueRegsVec) -> InstOutput {
91
output.clone()
92
}
93
94
#[inline]
95
fn temp_writable_reg(&mut self, ty: Type) -> WritableReg {
96
let value_regs = self.lower_ctx.alloc_tmp(ty);
97
value_regs.only_reg().unwrap()
98
}
99
100
#[inline]
101
fn is_valid_reg(&mut self, reg: Reg) -> bool {
102
use crate::machinst::valueregs::InvalidSentinel;
103
!reg.is_invalid_sentinel()
104
}
105
106
#[inline]
107
fn invalid_reg(&mut self) -> Reg {
108
use crate::machinst::valueregs::InvalidSentinel;
109
Reg::invalid_sentinel()
110
}
111
112
#[inline]
113
fn mark_value_used(&mut self, val: Value) {
114
self.lower_ctx.increment_lowered_uses(val);
115
}
116
117
#[inline]
118
fn put_in_reg(&mut self, val: Value) -> Reg {
119
self.put_in_regs(val).only_reg().unwrap()
120
}
121
122
#[inline]
123
fn put_in_regs(&mut self, val: Value) -> ValueRegs {
124
self.lower_ctx.put_value_in_regs(val)
125
}
126
127
#[inline]
128
fn put_in_regs_vec(&mut self, (list, off): ValueSlice) -> ValueRegsVec {
129
(off..list.len(&self.lower_ctx.dfg().value_lists))
130
.map(|ix| {
131
let val = list.get(ix, &self.lower_ctx.dfg().value_lists).unwrap();
132
self.put_in_regs(val)
133
})
134
.collect()
135
}
136
137
#[inline]
138
fn ensure_in_vreg(&mut self, reg: Reg, ty: Type) -> Reg {
139
self.lower_ctx.ensure_in_vreg(reg, ty)
140
}
141
142
#[inline]
143
fn value_regs_get(&mut self, regs: ValueRegs, i: usize) -> Reg {
144
regs.regs()[i]
145
}
146
147
#[inline]
148
fn value_regs_len(&mut self, regs: ValueRegs) -> usize {
149
regs.regs().len()
150
}
151
152
#[inline]
153
fn value_list_slice(&mut self, list: ValueList) -> ValueSlice {
154
(list, 0)
155
}
156
157
#[inline]
158
fn value_slice_empty(&mut self, slice: ValueSlice) -> Option<()> {
159
let (list, off) = slice;
160
if off >= list.len(&self.lower_ctx.dfg().value_lists) {
161
Some(())
162
} else {
163
None
164
}
165
}
166
167
#[inline]
168
fn value_slice_unwrap(&mut self, slice: ValueSlice) -> Option<(Value, ValueSlice)> {
169
let (list, off) = slice;
170
if let Some(val) = list.get(off, &self.lower_ctx.dfg().value_lists) {
171
Some((val, (list, off + 1)))
172
} else {
173
None
174
}
175
}
176
177
#[inline]
178
fn value_slice_len(&mut self, slice: ValueSlice) -> usize {
179
let (list, off) = slice;
180
list.len(&self.lower_ctx.dfg().value_lists) - off
181
}
182
183
#[inline]
184
fn value_slice_get(&mut self, slice: ValueSlice, idx: usize) -> Value {
185
let (list, off) = slice;
186
list.get(off + idx, &self.lower_ctx.dfg().value_lists)
187
.unwrap()
188
}
189
190
#[inline]
191
fn writable_reg_to_reg(&mut self, r: WritableReg) -> Reg {
192
r.to_reg()
193
}
194
195
#[inline]
196
fn inst_results(&mut self, inst: Inst) -> ValueSlice {
197
(self.lower_ctx.dfg().inst_results_list(inst), 0)
198
}
199
200
#[inline]
201
fn first_result(&mut self, inst: Inst) -> Option<Value> {
202
self.lower_ctx.dfg().inst_results(inst).first().copied()
203
}
204
205
#[inline]
206
fn inst_data_value(&mut self, inst: Inst) -> InstructionData {
207
self.lower_ctx.dfg().insts[inst]
208
}
209
210
#[inline]
211
fn i64_from_iconst(&mut self, val: Value) -> Option<i64> {
212
let inst = self.def_inst(val)?;
213
let constant = match self.lower_ctx.data(inst) {
214
InstructionData::UnaryImm {
215
opcode: Opcode::Iconst,
216
imm,
217
} => imm.bits(),
218
_ => return None,
219
};
220
let ty = self.lower_ctx.output_ty(inst, 0);
221
let shift_amt = core::cmp::max(0, 64 - self.ty_bits(ty));
222
Some((constant << shift_amt) >> shift_amt)
223
}
224
225
fn zero_value(&mut self, value: Value) -> Option<Value> {
226
let insn = self.def_inst(value);
227
if insn.is_some() {
228
let insn = insn.unwrap();
229
let inst_data = self.lower_ctx.data(insn);
230
match inst_data {
231
InstructionData::Unary {
232
opcode: Opcode::Splat,
233
arg,
234
} => {
235
let arg = arg.clone();
236
return self.zero_value(arg);
237
}
238
InstructionData::UnaryConst {
239
opcode: Opcode::Vconst | Opcode::F128const,
240
constant_handle,
241
} => {
242
let constant_data =
243
self.lower_ctx.get_constant_data(*constant_handle).clone();
244
if constant_data.into_vec().iter().any(|&x| x != 0) {
245
return None;
246
} else {
247
return Some(value);
248
}
249
}
250
InstructionData::UnaryImm { imm, .. } => {
251
if imm.bits() == 0 {
252
return Some(value);
253
} else {
254
return None;
255
}
256
}
257
InstructionData::UnaryIeee16 { imm, .. } => {
258
if imm.bits() == 0 {
259
return Some(value);
260
} else {
261
return None;
262
}
263
}
264
InstructionData::UnaryIeee32 { imm, .. } => {
265
if imm.bits() == 0 {
266
return Some(value);
267
} else {
268
return None;
269
}
270
}
271
InstructionData::UnaryIeee64 { imm, .. } => {
272
if imm.bits() == 0 {
273
return Some(value);
274
} else {
275
return None;
276
}
277
}
278
_ => None,
279
}
280
} else {
281
None
282
}
283
}
284
285
#[inline]
286
fn tls_model(&mut self, _: Type) -> TlsModel {
287
self.backend.flags().tls_model()
288
}
289
290
#[inline]
291
fn tls_model_is_elf_gd(&mut self) -> Option<()> {
292
if self.backend.flags().tls_model() == TlsModel::ElfGd {
293
Some(())
294
} else {
295
None
296
}
297
}
298
299
#[inline]
300
fn tls_model_is_macho(&mut self) -> Option<()> {
301
if self.backend.flags().tls_model() == TlsModel::Macho {
302
Some(())
303
} else {
304
None
305
}
306
}
307
308
#[inline]
309
fn tls_model_is_coff(&mut self) -> Option<()> {
310
if self.backend.flags().tls_model() == TlsModel::Coff {
311
Some(())
312
} else {
313
None
314
}
315
}
316
317
#[inline]
318
fn preserve_frame_pointers(&mut self) -> Option<()> {
319
if self.backend.flags().preserve_frame_pointers() {
320
Some(())
321
} else {
322
None
323
}
324
}
325
326
#[inline]
327
fn stack_switch_model(&mut self) -> Option<StackSwitchModel> {
328
Some(self.backend.flags().stack_switch_model())
329
}
330
331
#[inline]
332
fn func_ref_data(
333
&mut self,
334
func_ref: FuncRef,
335
) -> (SigRef, ExternalName, RelocDistance, bool) {
336
let funcdata = &self.lower_ctx.dfg().ext_funcs[func_ref];
337
let reloc_distance = if funcdata.colocated {
338
RelocDistance::Near
339
} else {
340
RelocDistance::Far
341
};
342
(
343
funcdata.signature,
344
funcdata.name.clone(),
345
reloc_distance,
346
funcdata.patchable,
347
)
348
}
349
350
#[inline]
351
fn exception_sig(&mut self, et: ExceptionTable) -> SigRef {
352
self.lower_ctx.dfg().exception_tables[et].signature()
353
}
354
355
#[inline]
356
fn box_external_name(&mut self, extname: ExternalName) -> BoxExternalName {
357
Box::new(extname)
358
}
359
360
#[inline]
361
fn symbol_value_data(
362
&mut self,
363
global_value: GlobalValue,
364
) -> Option<(ExternalName, RelocDistance, i64)> {
365
let (name, reloc, offset) = self.lower_ctx.symbol_value_data(global_value)?;
366
Some((name.clone(), reloc, offset))
367
}
368
369
#[inline]
370
fn u128_from_immediate(&mut self, imm: Immediate) -> Option<u128> {
371
let bytes = self.lower_ctx.get_immediate_data(imm).as_slice();
372
Some(u128::from_le_bytes(bytes.try_into().ok()?))
373
}
374
375
#[inline]
376
fn vconst_from_immediate(&mut self, imm: Immediate) -> Option<VCodeConstant> {
377
Some(self.lower_ctx.use_constant(VCodeConstantData::Generated(
378
self.lower_ctx.get_immediate_data(imm).clone(),
379
)))
380
}
381
382
#[inline]
383
fn vec_mask_from_immediate(&mut self, imm: Immediate) -> Option<VecMask> {
384
let data = self.lower_ctx.get_immediate_data(imm);
385
if data.len() == 16 {
386
Some(Vec::from(data.as_slice()))
387
} else {
388
None
389
}
390
}
391
392
#[inline]
393
fn u64_from_constant(&mut self, constant: Constant) -> Option<u64> {
394
let bytes = self.lower_ctx.get_constant_data(constant).as_slice();
395
Some(u64::from_le_bytes(bytes.try_into().ok()?))
396
}
397
398
#[inline]
399
fn u128_from_constant(&mut self, constant: Constant) -> Option<u128> {
400
let bytes = self.lower_ctx.get_constant_data(constant).as_slice();
401
Some(u128::from_le_bytes(bytes.try_into().ok()?))
402
}
403
404
#[inline]
405
fn emit_u64_le_const(&mut self, value: u64) -> VCodeConstant {
406
let data = VCodeConstantData::U64(value.to_le_bytes());
407
self.lower_ctx.use_constant(data)
408
}
409
410
#[inline]
411
fn emit_u64_be_const(&mut self, value: u64) -> VCodeConstant {
412
let data = VCodeConstantData::U64(value.to_be_bytes());
413
self.lower_ctx.use_constant(data)
414
}
415
416
#[inline]
417
fn emit_u128_le_const(&mut self, value: u128) -> VCodeConstant {
418
let data = VCodeConstantData::Generated(value.to_le_bytes().as_slice().into());
419
self.lower_ctx.use_constant(data)
420
}
421
422
#[inline]
423
fn emit_u128_be_const(&mut self, value: u128) -> VCodeConstant {
424
let data = VCodeConstantData::Generated(value.to_be_bytes().as_slice().into());
425
self.lower_ctx.use_constant(data)
426
}
427
428
#[inline]
429
fn const_to_vconst(&mut self, constant: Constant) -> VCodeConstant {
430
self.lower_ctx.use_constant(VCodeConstantData::Pool(
431
constant,
432
self.lower_ctx.get_constant_data(constant).clone(),
433
))
434
}
435
436
fn only_writable_reg(&mut self, regs: WritableValueRegs) -> Option<WritableReg> {
437
regs.only_reg()
438
}
439
440
fn writable_regs_get(&mut self, regs: WritableValueRegs, idx: usize) -> WritableReg {
441
regs.regs()[idx]
442
}
443
444
fn abi_sig(&mut self, sig_ref: SigRef) -> Sig {
445
self.lower_ctx.sigs().abi_sig_for_sig_ref(sig_ref)
446
}
447
448
fn abi_num_args(&mut self, abi: Sig) -> usize {
449
self.lower_ctx.sigs().num_args(abi)
450
}
451
452
fn abi_get_arg(&mut self, abi: Sig, idx: usize) -> ABIArg {
453
self.lower_ctx.sigs().get_arg(abi, idx)
454
}
455
456
fn abi_num_rets(&mut self, abi: Sig) -> usize {
457
self.lower_ctx.sigs().num_rets(abi)
458
}
459
460
fn abi_get_ret(&mut self, abi: Sig, idx: usize) -> ABIArg {
461
self.lower_ctx.sigs().get_ret(abi, idx)
462
}
463
464
fn abi_ret_arg(&mut self, abi: Sig) -> Option<ABIArg> {
465
self.lower_ctx.sigs().get_ret_arg(abi)
466
}
467
468
fn abi_no_ret_arg(&mut self, abi: Sig) -> Option<()> {
469
if let Some(_) = self.lower_ctx.sigs().get_ret_arg(abi) {
470
None
471
} else {
472
Some(())
473
}
474
}
475
476
fn abi_arg_only_slot(&mut self, arg: &ABIArg) -> Option<ABIArgSlot> {
477
match arg {
478
&ABIArg::Slots { ref slots, .. } => {
479
if slots.len() == 1 {
480
Some(slots[0])
481
} else {
482
None
483
}
484
}
485
_ => None,
486
}
487
}
488
489
fn abi_arg_implicit_pointer(&mut self, arg: &ABIArg) -> Option<(ABIArgSlot, i64, Type)> {
490
match arg {
491
&ABIArg::ImplicitPtrArg {
492
pointer,
493
offset,
494
ty,
495
..
496
} => Some((pointer, offset, ty)),
497
_ => None,
498
}
499
}
500
501
fn abi_unwrap_ret_area_ptr(&mut self) -> Reg {
502
self.lower_ctx.abi().ret_area_ptr().unwrap()
503
}
504
505
fn abi_stackslot_addr(
506
&mut self,
507
dst: WritableReg,
508
stack_slot: StackSlot,
509
offset: Offset32,
510
) -> MInst {
511
let offset = u32::try_from(i32::from(offset)).unwrap();
512
self.lower_ctx
513
.abi()
514
.sized_stackslot_addr(stack_slot, offset, dst)
515
.into()
516
}
517
518
fn abi_stackslot_offset_into_slot_region(
519
&mut self,
520
stack_slot: StackSlot,
521
offset1: Offset32,
522
offset2: Offset32,
523
) -> i32 {
524
let offset1 = i32::from(offset1);
525
let offset2 = i32::from(offset2);
526
i32::try_from(self.lower_ctx.abi().sized_stackslot_offset(stack_slot))
527
.expect("Stack slot region cannot be larger than 2GiB")
528
.checked_add(offset1)
529
.expect("Stack slot region cannot be larger than 2GiB")
530
.checked_add(offset2)
531
.expect("Stack slot region cannot be larger than 2GiB")
532
}
533
534
fn abi_dynamic_stackslot_addr(
535
&mut self,
536
dst: WritableReg,
537
stack_slot: DynamicStackSlot,
538
) -> MInst {
539
assert!(
540
self.lower_ctx
541
.abi()
542
.dynamic_stackslot_offsets()
543
.is_valid(stack_slot)
544
);
545
self.lower_ctx
546
.abi()
547
.dynamic_stackslot_addr(stack_slot, dst)
548
.into()
549
}
550
551
fn real_reg_to_reg(&mut self, reg: RealReg) -> Reg {
552
Reg::from(reg)
553
}
554
555
fn real_reg_to_writable_reg(&mut self, reg: RealReg) -> WritableReg {
556
Writable::from_reg(Reg::from(reg))
557
}
558
559
fn is_sinkable_inst(&mut self, val: Value) -> Option<Inst> {
560
let input = self.lower_ctx.get_value_as_source_or_const(val);
561
562
if let InputSourceInst::UniqueUse(inst, _) = input.inst {
563
Some(inst)
564
} else {
565
None
566
}
567
}
568
569
#[inline]
570
fn sink_inst(&mut self, inst: Inst) {
571
self.lower_ctx.sink_inst(inst);
572
}
573
574
#[inline]
575
fn maybe_uextend(&mut self, value: Value) -> Option<Value> {
576
if let Some(def_inst) = self.def_inst(value) {
577
if let InstructionData::Unary {
578
opcode: Opcode::Uextend,
579
arg,
580
} = self.lower_ctx.data(def_inst)
581
{
582
return Some(*arg);
583
}
584
}
585
586
Some(value)
587
}
588
589
#[inline]
590
fn uimm8(&mut self, x: Imm64) -> Option<u8> {
591
let x64: i64 = x.into();
592
let x8: u8 = x64.try_into().ok()?;
593
Some(x8)
594
}
595
596
#[inline]
597
fn preg_to_reg(&mut self, preg: PReg) -> Reg {
598
preg.into()
599
}
600
601
#[inline]
602
fn gen_move(&mut self, ty: Type, dst: WritableReg, src: Reg) -> MInst {
603
<$inst>::gen_move(dst, src, ty).into()
604
}
605
606
/// Generate the return instruction.
607
fn gen_return(&mut self, rets: &ValueRegsVec) {
608
self.lower_ctx.gen_return(rets);
609
}
610
611
fn gen_call_output(&mut self, sig_ref: SigRef) -> ValueRegsVec {
612
self.lower_ctx.gen_call_output_from_sig_ref(sig_ref)
613
}
614
615
fn gen_call_args(&mut self, sig: Sig, inputs: &ValueRegsVec) -> CallArgList {
616
self.lower_ctx.gen_call_args(sig, inputs)
617
}
618
619
fn gen_return_call_args(&mut self, sig: Sig, inputs: &ValueRegsVec) -> CallArgList {
620
self.lower_ctx.gen_return_call_args(sig, inputs)
621
}
622
623
fn gen_call_rets(&mut self, sig: Sig, outputs: &ValueRegsVec) -> CallRetList {
624
self.lower_ctx.gen_call_rets(sig, &outputs)
625
}
626
627
fn gen_try_call_rets(&mut self, sig: Sig) -> CallRetList {
628
self.lower_ctx.gen_try_call_rets(sig)
629
}
630
631
fn gen_patchable_call_rets(&mut self) -> CallRetList {
632
smallvec::smallvec![]
633
}
634
635
fn try_call_none(&mut self) -> OptionTryCallInfo {
636
None
637
}
638
639
fn try_call_info(
640
&mut self,
641
et: ExceptionTable,
642
labels: &MachLabelSlice,
643
) -> OptionTryCallInfo {
644
let mut exception_handlers = vec![];
645
let mut labels = labels.iter().cloned();
646
for item in self.lower_ctx.dfg().exception_tables[et].clone().items() {
647
match item {
648
crate::ir::ExceptionTableItem::Tag(tag, _) => {
649
exception_handlers.push(crate::machinst::abi::TryCallHandler::Tag(
650
tag,
651
labels.next().unwrap(),
652
));
653
}
654
crate::ir::ExceptionTableItem::Default(_) => {
655
exception_handlers.push(crate::machinst::abi::TryCallHandler::Default(
656
labels.next().unwrap(),
657
));
658
}
659
crate::ir::ExceptionTableItem::Context(ctx) => {
660
let reg = self.put_in_reg(ctx);
661
exception_handlers.push(crate::machinst::abi::TryCallHandler::Context(reg));
662
}
663
}
664
}
665
666
let continuation = labels.next().unwrap();
667
assert_eq!(labels.next(), None);
668
669
let exception_handlers = exception_handlers.into_boxed_slice();
670
671
Some(TryCallInfo {
672
continuation,
673
exception_handlers,
674
})
675
}
676
677
/// Same as `shuffle32_from_imm`, but for 64-bit lane shuffles.
678
fn shuffle64_from_imm(&mut self, imm: Immediate) -> Option<(u8, u8)> {
679
use crate::machinst::isle::shuffle_imm_as_le_lane_idx;
680
681
let bytes = self.lower_ctx.get_immediate_data(imm).as_slice();
682
Some((
683
shuffle_imm_as_le_lane_idx(8, &bytes[0..8])?,
684
shuffle_imm_as_le_lane_idx(8, &bytes[8..16])?,
685
))
686
}
687
688
/// Attempts to interpret the shuffle immediate `imm` as a shuffle of
689
/// 32-bit lanes, returning four integers, each of which is less than 8,
690
/// which represents a permutation of 32-bit lanes as specified by
691
/// `imm`.
692
///
693
/// For example the shuffle immediate
694
///
695
/// `0 1 2 3 8 9 10 11 16 17 18 19 24 25 26 27`
696
///
697
/// would return `Some((0, 2, 4, 6))`.
698
fn shuffle32_from_imm(&mut self, imm: Immediate) -> Option<(u8, u8, u8, u8)> {
699
use crate::machinst::isle::shuffle_imm_as_le_lane_idx;
700
701
let bytes = self.lower_ctx.get_immediate_data(imm).as_slice();
702
Some((
703
shuffle_imm_as_le_lane_idx(4, &bytes[0..4])?,
704
shuffle_imm_as_le_lane_idx(4, &bytes[4..8])?,
705
shuffle_imm_as_le_lane_idx(4, &bytes[8..12])?,
706
shuffle_imm_as_le_lane_idx(4, &bytes[12..16])?,
707
))
708
}
709
710
/// Same as `shuffle32_from_imm`, but for 16-bit lane shuffles.
711
fn shuffle16_from_imm(
712
&mut self,
713
imm: Immediate,
714
) -> Option<(u8, u8, u8, u8, u8, u8, u8, u8)> {
715
use crate::machinst::isle::shuffle_imm_as_le_lane_idx;
716
let bytes = self.lower_ctx.get_immediate_data(imm).as_slice();
717
Some((
718
shuffle_imm_as_le_lane_idx(2, &bytes[0..2])?,
719
shuffle_imm_as_le_lane_idx(2, &bytes[2..4])?,
720
shuffle_imm_as_le_lane_idx(2, &bytes[4..6])?,
721
shuffle_imm_as_le_lane_idx(2, &bytes[6..8])?,
722
shuffle_imm_as_le_lane_idx(2, &bytes[8..10])?,
723
shuffle_imm_as_le_lane_idx(2, &bytes[10..12])?,
724
shuffle_imm_as_le_lane_idx(2, &bytes[12..14])?,
725
shuffle_imm_as_le_lane_idx(2, &bytes[14..16])?,
726
))
727
}
728
729
fn safe_divisor_from_imm64(&mut self, ty: Type, val: Imm64) -> Option<u64> {
730
let minus_one = if ty.bytes() == 8 {
731
-1
732
} else {
733
(1 << (ty.bytes() * 8)) - 1
734
};
735
let bits = val.bits() & minus_one;
736
if bits == 0 || bits == minus_one {
737
None
738
} else {
739
Some(bits as u64)
740
}
741
}
742
743
fn single_target(&mut self, targets: &MachLabelSlice) -> Option<MachLabel> {
744
if targets.len() == 1 {
745
Some(targets[0])
746
} else {
747
None
748
}
749
}
750
751
fn two_targets(&mut self, targets: &MachLabelSlice) -> Option<(MachLabel, MachLabel)> {
752
if targets.len() == 2 {
753
Some((targets[0], targets[1]))
754
} else {
755
None
756
}
757
}
758
759
fn jump_table_targets(
760
&mut self,
761
targets: &MachLabelSlice,
762
) -> Option<(MachLabel, BoxVecMachLabel)> {
763
use alloc::boxed::Box;
764
if targets.is_empty() {
765
return None;
766
}
767
768
let default_label = targets[0];
769
let jt_targets = Box::new(targets[1..].to_vec());
770
Some((default_label, jt_targets))
771
}
772
773
fn jump_table_size(&mut self, targets: &BoxVecMachLabel) -> u32 {
774
targets.len() as u32
775
}
776
777
fn add_range_fact(&mut self, reg: Reg, bits: u16, min: u64, max: u64) -> Reg {
778
self.lower_ctx.add_range_fact(reg, bits, min, max);
779
reg
780
}
781
782
fn value_is_unused(&mut self, val: Value) -> bool {
783
self.lower_ctx.value_is_unused(val)
784
}
785
786
fn block_exn_successor_label(&mut self, block: &Block, exn_succ: u64) -> MachLabel {
787
// The first N successors are the exceptional edges, and
788
// the normal return is last; so the `exn_succ`'th
789
// exceptional edge is just the `exn_succ`'th edge overall.
790
let succ = usize::try_from(exn_succ).unwrap();
791
self.lower_ctx.block_successor_label(*block, succ)
792
}
793
};
794
}
795
796
/// Returns the `size`-byte lane referred to by the shuffle immediate specified
797
/// in `bytes`.
798
///
799
/// This helper is used by `shuffleNN_from_imm` above and is used to interpret a
800
/// byte-based shuffle as a higher-level shuffle of bigger lanes. This will see
801
/// if the `bytes` specified, which must have `size` length, specifies a lane in
802
/// vectors aligned to a `size`-byte boundary.
803
///
804
/// Returns `None` if `bytes` doesn't specify a `size`-byte lane aligned
805
/// appropriately, or returns `Some(n)` where `n` is the index of the lane being
806
/// shuffled.
807
pub fn shuffle_imm_as_le_lane_idx(size: u8, bytes: &[u8]) -> Option<u8> {
808
assert_eq!(bytes.len(), usize::from(size));
809
810
// The first index in `bytes` must be aligned to a `size` boundary for the
811
// bytes to be a valid specifier for a lane of `size` bytes.
812
if bytes[0] % size != 0 {
813
return None;
814
}
815
816
// Afterwards the bytes must all be one larger than the prior to specify a
817
// contiguous sequence of bytes that's being shuffled. Basically `bytes`
818
// must refer to the entire `size`-byte lane, in little-endian order.
819
for i in 0..size - 1 {
820
let idx = usize::from(i);
821
if bytes[idx] + 1 != bytes[idx + 1] {
822
return None;
823
}
824
}
825
826
// All of the `bytes` are in-order, meaning that this is a valid shuffle
827
// immediate to specify a lane of `size` bytes. The index, when viewed as
828
// `size`-byte immediates, will be the first byte divided by the byte size.
829
Some(bytes[0] / size)
830
}
831
832
/// This structure is used to implement the ISLE-generated `Context` trait and
833
/// internally has a temporary reference to a machinst `LowerCtx`.
834
pub(crate) struct IsleContext<'a, 'b, I, B>
835
where
836
I: VCodeInst,
837
B: LowerBackend,
838
{
839
pub lower_ctx: &'a mut Lower<'b, I>,
840
pub backend: &'a B,
841
}
842
843
impl<I, B> IsleContext<'_, '_, I, B>
844
where
845
I: VCodeInst,
846
B: LowerBackend,
847
{
848
pub(crate) fn dfg(&self) -> &crate::ir::DataFlowGraph {
849
&self.lower_ctx.f.dfg
850
}
851
}
852
853