Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bytecodealliance
GitHub Repository: bytecodealliance/wasmtime
Path: blob/main/cranelift/codegen/src/isa/pulley_shared/inst/mod.rs
3088 views
1
//! This module defines Pulley-specific machine instruction types.
2
3
use core::marker::PhantomData;
4
5
use crate::binemit::{Addend, CodeOffset, Reloc};
6
use crate::ir::types::{self, F32, F64, I8, I8X16, I16, I32, I64, I128};
7
use crate::ir::{self, MemFlags, Type};
8
use crate::isa::FunctionAlignment;
9
use crate::isa::pulley_shared::abi::PulleyMachineDeps;
10
use crate::{CodegenError, CodegenResult, settings};
11
use crate::{machinst::*, trace};
12
use alloc::string::{String, ToString};
13
use alloc::vec;
14
use alloc::vec::Vec;
15
use regalloc2::RegClass;
16
use smallvec::SmallVec;
17
18
pub mod regs;
19
pub use self::regs::*;
20
pub mod args;
21
pub use self::args::*;
22
pub mod emit;
23
pub use self::emit::*;
24
25
//=============================================================================
26
// Instructions (top level): definition
27
28
pub use crate::isa::pulley_shared::lower::isle::generated_code::MInst as Inst;
29
pub use crate::isa::pulley_shared::lower::isle::generated_code::RawInst;
30
31
impl From<RawInst> for Inst {
32
fn from(raw: RawInst) -> Inst {
33
Inst::Raw { raw }
34
}
35
}
36
37
use super::PulleyTargetKind;
38
39
mod generated {
40
use super::*;
41
use crate::isa::pulley_shared::lower::isle::generated_code::RawInst;
42
43
include!(concat!(env!("OUT_DIR"), "/pulley_inst_gen.rs"));
44
}
45
46
/// Out-of-line data for return-calls, to keep the size of `Inst` down.
47
#[derive(Clone, Debug)]
48
pub struct ReturnCallInfo<T> {
49
/// Where this call is going.
50
pub dest: T,
51
52
/// The size of the argument area for this return-call, potentially smaller
53
/// than that of the caller, but never larger.
54
pub new_stack_arg_size: u32,
55
56
/// The in-register arguments and their constraints.
57
pub uses: CallArgList,
58
}
59
60
impl Inst {
61
/// Generic constructor for a load (zero-extending where appropriate).
62
pub fn gen_load(dst: Writable<Reg>, mem: Amode, ty: Type, flags: MemFlags) -> Inst {
63
if ty.is_vector() {
64
assert_eq!(ty.bytes(), 16);
65
Inst::VLoad {
66
dst: dst.map(|r| VReg::new(r).unwrap()),
67
mem,
68
ty,
69
flags,
70
}
71
} else if ty.is_int() {
72
assert!(ty.bytes() <= 8);
73
Inst::XLoad {
74
dst: dst.map(|r| XReg::new(r).unwrap()),
75
mem,
76
ty,
77
flags,
78
}
79
} else {
80
Inst::FLoad {
81
dst: dst.map(|r| FReg::new(r).unwrap()),
82
mem,
83
ty,
84
flags,
85
}
86
}
87
}
88
89
/// Generic constructor for a store.
90
pub fn gen_store(mem: Amode, from_reg: Reg, ty: Type, flags: MemFlags) -> Inst {
91
if ty.is_vector() {
92
assert_eq!(ty.bytes(), 16);
93
Inst::VStore {
94
mem,
95
src: VReg::new(from_reg).unwrap(),
96
ty,
97
flags,
98
}
99
} else if ty.is_int() {
100
assert!(ty.bytes() <= 8);
101
Inst::XStore {
102
mem,
103
src: XReg::new(from_reg).unwrap(),
104
ty,
105
flags,
106
}
107
} else {
108
Inst::FStore {
109
mem,
110
src: FReg::new(from_reg).unwrap(),
111
ty,
112
flags,
113
}
114
}
115
}
116
}
117
118
fn pulley_get_operands(inst: &mut Inst, collector: &mut impl OperandVisitor) {
119
match inst {
120
Inst::Args { args } => {
121
for ArgPair { vreg, preg } in args {
122
collector.reg_fixed_def(vreg, *preg);
123
}
124
}
125
Inst::Rets { rets } => {
126
for RetPair { vreg, preg } in rets {
127
collector.reg_fixed_use(vreg, *preg);
128
}
129
}
130
131
Inst::DummyUse { reg } => {
132
collector.reg_use(reg);
133
}
134
135
Inst::Nop => {}
136
137
Inst::TrapIf { cond, code: _ } => {
138
cond.get_operands(collector);
139
}
140
141
Inst::GetSpecial { dst, reg } => {
142
collector.reg_def(dst);
143
// Note that this is explicitly ignored as this is only used for
144
// special registers that don't participate in register allocation
145
// such as the stack pointer, frame pointer, etc.
146
assert!(reg.is_special());
147
}
148
149
Inst::LoadExtNameNear { dst, .. } | Inst::LoadExtNameFar { dst, .. } => {
150
collector.reg_def(dst);
151
}
152
153
Inst::Call { info } => {
154
let CallInfo {
155
uses,
156
defs,
157
dest,
158
try_call_info,
159
clobbers,
160
..
161
} = &mut **info;
162
163
// Pulley supports having the first few integer arguments in any
164
// register, so flag that with `reg_use` here.
165
let PulleyCall { args, .. } = dest;
166
for arg in args {
167
collector.reg_use(arg);
168
}
169
170
// Remaining arguments (and return values) are all in fixed
171
// registers according to Pulley's ABI, however.
172
for CallArgPair { vreg, preg } in uses {
173
collector.reg_fixed_use(vreg, *preg);
174
}
175
for CallRetPair { vreg, location } in defs {
176
match location {
177
RetLocation::Reg(preg, ..) => collector.reg_fixed_def(vreg, *preg),
178
RetLocation::Stack(..) => collector.any_def(vreg),
179
}
180
}
181
collector.reg_clobbers(*clobbers);
182
if let Some(try_call_info) = try_call_info {
183
try_call_info.collect_operands(collector);
184
}
185
}
186
Inst::IndirectCallHost { info } => {
187
let CallInfo {
188
uses,
189
defs,
190
try_call_info,
191
clobbers,
192
..
193
} = &mut **info;
194
for CallArgPair { vreg, preg } in uses {
195
collector.reg_fixed_use(vreg, *preg);
196
}
197
for CallRetPair { vreg, location } in defs {
198
match location {
199
RetLocation::Reg(preg, ..) => collector.reg_fixed_def(vreg, *preg),
200
RetLocation::Stack(..) => collector.any_def(vreg),
201
}
202
}
203
collector.reg_clobbers(*clobbers);
204
if let Some(try_call_info) = try_call_info {
205
try_call_info.collect_operands(collector);
206
}
207
}
208
Inst::IndirectCall { info } => {
209
collector.reg_use(&mut info.dest);
210
let CallInfo {
211
uses,
212
defs,
213
try_call_info,
214
clobbers,
215
..
216
} = &mut **info;
217
for CallArgPair { vreg, preg } in uses {
218
collector.reg_fixed_use(vreg, *preg);
219
}
220
for CallRetPair { vreg, location } in defs {
221
match location {
222
RetLocation::Reg(preg, ..) => collector.reg_fixed_def(vreg, *preg),
223
RetLocation::Stack(..) => collector.any_def(vreg),
224
}
225
}
226
collector.reg_clobbers(*clobbers);
227
if let Some(try_call_info) = try_call_info {
228
try_call_info.collect_operands(collector);
229
}
230
}
231
Inst::ReturnCall { info } => {
232
for CallArgPair { vreg, preg } in &mut info.uses {
233
collector.reg_fixed_use(vreg, *preg);
234
}
235
}
236
Inst::ReturnIndirectCall { info } => {
237
// Use a fixed location of where to store the value to
238
// return-call-to. Using a fixed location prevents this register
239
// from being allocated to a callee-saved register which will get
240
// clobbered during the register restores just before the
241
// return-call.
242
//
243
// Also note that `x15` is specifically the last caller-saved
244
// register and, at this time, the only non-argument caller-saved
245
// register. This register allocation constraint is why it's not an
246
// argument register.
247
collector.reg_fixed_use(&mut info.dest, regs::x15());
248
249
for CallArgPair { vreg, preg } in &mut info.uses {
250
collector.reg_fixed_use(vreg, *preg);
251
}
252
}
253
254
Inst::Jump { .. } => {}
255
256
Inst::BrIf {
257
cond,
258
taken: _,
259
not_taken: _,
260
} => {
261
cond.get_operands(collector);
262
}
263
264
Inst::LoadAddr { dst, mem } => {
265
collector.reg_def(dst);
266
mem.get_operands(collector);
267
}
268
269
Inst::XLoad {
270
dst,
271
mem,
272
ty: _,
273
flags: _,
274
} => {
275
collector.reg_def(dst);
276
mem.get_operands(collector);
277
}
278
279
Inst::XStore {
280
mem,
281
src,
282
ty: _,
283
flags: _,
284
} => {
285
mem.get_operands(collector);
286
collector.reg_use(src);
287
}
288
289
Inst::FLoad {
290
dst,
291
mem,
292
ty: _,
293
flags: _,
294
} => {
295
collector.reg_def(dst);
296
mem.get_operands(collector);
297
}
298
299
Inst::FStore {
300
mem,
301
src,
302
ty: _,
303
flags: _,
304
} => {
305
mem.get_operands(collector);
306
collector.reg_use(src);
307
}
308
309
Inst::VLoad {
310
dst,
311
mem,
312
ty: _,
313
flags: _,
314
} => {
315
collector.reg_def(dst);
316
mem.get_operands(collector);
317
}
318
319
Inst::VStore {
320
mem,
321
src,
322
ty: _,
323
flags: _,
324
} => {
325
mem.get_operands(collector);
326
collector.reg_use(src);
327
}
328
329
Inst::BrTable { idx, .. } => {
330
collector.reg_use(idx);
331
}
332
333
Inst::Raw { raw } => generated::get_operands(raw, collector),
334
335
Inst::EmitIsland { .. } => {}
336
337
Inst::LabelAddress { dst, label: _ } => {
338
collector.reg_def(dst);
339
}
340
341
Inst::SequencePoint { .. } => {}
342
}
343
}
344
345
/// A newtype over a Pulley instruction that also carries a phantom type
346
/// parameter describing whether we are targeting 32- or 64-bit Pulley bytecode.
347
///
348
/// Implements `Deref`, `DerefMut`, and `From`/`Into` for `Inst` to allow for
349
/// seamless conversion between `Inst` and `InstAndKind`.
350
#[derive(Clone, Debug)]
351
pub struct InstAndKind<P>
352
where
353
P: PulleyTargetKind,
354
{
355
inst: Inst,
356
kind: PhantomData<P>,
357
}
358
359
impl<P> From<Inst> for InstAndKind<P>
360
where
361
P: PulleyTargetKind,
362
{
363
fn from(inst: Inst) -> Self {
364
Self {
365
inst,
366
kind: PhantomData,
367
}
368
}
369
}
370
371
impl<P> From<RawInst> for InstAndKind<P>
372
where
373
P: PulleyTargetKind,
374
{
375
fn from(inst: RawInst) -> Self {
376
Self {
377
inst: inst.into(),
378
kind: PhantomData,
379
}
380
}
381
}
382
383
impl<P> From<InstAndKind<P>> for Inst
384
where
385
P: PulleyTargetKind,
386
{
387
fn from(inst: InstAndKind<P>) -> Self {
388
inst.inst
389
}
390
}
391
392
impl<P> core::ops::Deref for InstAndKind<P>
393
where
394
P: PulleyTargetKind,
395
{
396
type Target = Inst;
397
398
fn deref(&self) -> &Self::Target {
399
&self.inst
400
}
401
}
402
403
impl<P> core::ops::DerefMut for InstAndKind<P>
404
where
405
P: PulleyTargetKind,
406
{
407
fn deref_mut(&mut self) -> &mut Self::Target {
408
&mut self.inst
409
}
410
}
411
412
impl<P> MachInst for InstAndKind<P>
413
where
414
P: PulleyTargetKind,
415
{
416
type LabelUse = LabelUse;
417
type ABIMachineSpec = PulleyMachineDeps<P>;
418
419
const TRAP_OPCODE: &'static [u8] = TRAP_OPCODE;
420
421
fn gen_dummy_use(reg: Reg) -> Self {
422
Inst::DummyUse { reg }.into()
423
}
424
425
fn canonical_type_for_rc(rc: RegClass) -> Type {
426
match rc {
427
regalloc2::RegClass::Int => I64,
428
regalloc2::RegClass::Float => F64,
429
regalloc2::RegClass::Vector => I8X16,
430
}
431
}
432
433
fn is_safepoint(&self) -> bool {
434
match self.inst {
435
Inst::Raw {
436
raw: RawInst::Trap { .. },
437
}
438
| Inst::Call { .. }
439
| Inst::IndirectCall { .. }
440
| Inst::IndirectCallHost { .. } => true,
441
_ => false,
442
}
443
}
444
445
fn get_operands(&mut self, collector: &mut impl OperandVisitor) {
446
pulley_get_operands(self, collector);
447
}
448
449
fn is_move(&self) -> Option<(Writable<Reg>, Reg)> {
450
match self.inst {
451
Inst::Raw {
452
raw: RawInst::Xmov { dst, src },
453
} => Some((Writable::from_reg(*dst.to_reg()), *src)),
454
_ => None,
455
}
456
}
457
458
fn is_included_in_clobbers(&self) -> bool {
459
!self.is_args()
460
}
461
462
fn is_trap(&self) -> bool {
463
match self.inst {
464
Inst::Raw {
465
raw: RawInst::Trap { .. },
466
} => true,
467
_ => false,
468
}
469
}
470
471
fn is_args(&self) -> bool {
472
match self.inst {
473
Inst::Args { .. } => true,
474
_ => false,
475
}
476
}
477
478
fn is_term(&self) -> MachTerminator {
479
match &self.inst {
480
Inst::Raw {
481
raw: RawInst::Ret { .. },
482
}
483
| Inst::Rets { .. } => MachTerminator::Ret,
484
Inst::Jump { .. } => MachTerminator::Branch,
485
Inst::BrIf { .. } => MachTerminator::Branch,
486
Inst::BrTable { .. } => MachTerminator::Branch,
487
Inst::ReturnCall { .. } | Inst::ReturnIndirectCall { .. } => MachTerminator::RetCall,
488
Inst::Call { info } if info.try_call_info.is_some() => MachTerminator::Branch,
489
Inst::IndirectCall { info } if info.try_call_info.is_some() => MachTerminator::Branch,
490
Inst::IndirectCallHost { info } if info.try_call_info.is_some() => {
491
MachTerminator::Branch
492
}
493
_ => MachTerminator::None,
494
}
495
}
496
497
fn is_mem_access(&self) -> bool {
498
todo!()
499
}
500
501
fn call_type(&self) -> CallType {
502
match &self.inst {
503
Inst::Call { .. } | Inst::IndirectCall { .. } | Inst::IndirectCallHost { .. } => {
504
CallType::Regular
505
}
506
507
Inst::ReturnCall { .. } | Inst::ReturnIndirectCall { .. } => CallType::TailCall,
508
509
_ => CallType::None,
510
}
511
}
512
513
fn gen_move(to_reg: Writable<Reg>, from_reg: Reg, ty: Type) -> Self {
514
match ty {
515
ir::types::I8 | ir::types::I16 | ir::types::I32 | ir::types::I64 => RawInst::Xmov {
516
dst: WritableXReg::try_from(to_reg).unwrap(),
517
src: XReg::new(from_reg).unwrap(),
518
}
519
.into(),
520
ir::types::F32 | ir::types::F64 => RawInst::Fmov {
521
dst: WritableFReg::try_from(to_reg).unwrap(),
522
src: FReg::new(from_reg).unwrap(),
523
}
524
.into(),
525
_ if ty.is_vector() => RawInst::Vmov {
526
dst: WritableVReg::try_from(to_reg).unwrap(),
527
src: VReg::new(from_reg).unwrap(),
528
}
529
.into(),
530
_ => panic!("don't know how to generate a move for type {ty}"),
531
}
532
}
533
534
fn gen_nop(_preferred_size: usize) -> Self {
535
todo!()
536
}
537
538
fn gen_nop_units() -> Vec<Vec<u8>> {
539
let mut bytes = vec![];
540
let nop = pulley_interpreter::op::Nop {};
541
nop.encode(&mut bytes);
542
// NOP needs to be a 1-byte opcode so it can be used to
543
// overwrite a callsite of any length.
544
assert_eq!(bytes.len(), 1);
545
vec![bytes]
546
}
547
548
fn rc_for_type(ty: Type) -> CodegenResult<(&'static [RegClass], &'static [Type])> {
549
match ty {
550
I8 => Ok((&[RegClass::Int], &[I8])),
551
I16 => Ok((&[RegClass::Int], &[I16])),
552
I32 => Ok((&[RegClass::Int], &[I32])),
553
I64 => Ok((&[RegClass::Int], &[I64])),
554
F32 => Ok((&[RegClass::Float], &[F32])),
555
F64 => Ok((&[RegClass::Float], &[F64])),
556
I128 => Ok((&[RegClass::Int, RegClass::Int], &[I64, I64])),
557
_ if ty.is_vector() => {
558
debug_assert!(ty.bits() <= 512);
559
560
// Here we only need to return a SIMD type with the same size as `ty`.
561
// We use these types for spills and reloads, so prefer types with lanes <= 31
562
// since that fits in the immediate field of `vsetivli`.
563
const SIMD_TYPES: [[Type; 1]; 6] = [
564
[types::I8X2],
565
[types::I8X4],
566
[types::I8X8],
567
[types::I8X16],
568
[types::I16X16],
569
[types::I32X16],
570
];
571
let idx = (ty.bytes().ilog2() - 1) as usize;
572
let ty = &SIMD_TYPES[idx][..];
573
574
Ok((&[RegClass::Vector], ty))
575
}
576
_ => Err(CodegenError::Unsupported(format!(
577
"Unexpected SSA-value type: {ty}"
578
))),
579
}
580
}
581
582
fn gen_jump(label: MachLabel) -> Self {
583
Inst::Jump { label }.into()
584
}
585
586
fn worst_case_size() -> CodeOffset {
587
// `VShuffle { dst, src1, src2, imm }` is 22 bytes:
588
// 3-byte opcode
589
// dst, src1, src2
590
// 16-byte immediate
591
22
592
}
593
594
fn ref_type_regclass(_settings: &settings::Flags) -> RegClass {
595
RegClass::Int
596
}
597
598
fn function_alignment() -> FunctionAlignment {
599
FunctionAlignment {
600
minimum: 1,
601
preferred: 1,
602
}
603
}
604
}
605
606
const TRAP_OPCODE: &'static [u8] = &[
607
pulley_interpreter::opcode::Opcode::ExtendedOp as u8,
608
((pulley_interpreter::opcode::ExtendedOpcode::Trap as u16) >> 0) as u8,
609
((pulley_interpreter::opcode::ExtendedOpcode::Trap as u16) >> 8) as u8,
610
];
611
612
#[test]
613
fn test_trap_encoding() {
614
let mut dst = alloc::vec::Vec::new();
615
pulley_interpreter::encode::trap(&mut dst);
616
assert_eq!(dst, TRAP_OPCODE);
617
}
618
619
//=============================================================================
620
// Pretty-printing of instructions.
621
622
pub fn reg_name(reg: Reg) -> String {
623
match reg.to_real_reg() {
624
Some(real) => {
625
let n = real.hw_enc();
626
match (real.class(), n) {
627
(RegClass::Int, 63) => format!("sp"),
628
(RegClass::Int, 62) => format!("lr"),
629
(RegClass::Int, 61) => format!("fp"),
630
(RegClass::Int, 60) => format!("tmp0"),
631
(RegClass::Int, 59) => format!("tmp1"),
632
633
(RegClass::Int, _) => format!("x{n}"),
634
(RegClass::Float, _) => format!("f{n}"),
635
(RegClass::Vector, _) => format!("v{n}"),
636
}
637
}
638
None => {
639
format!("{reg:?}")
640
}
641
}
642
}
643
644
fn pretty_print_try_call(info: &TryCallInfo) -> String {
645
format!(
646
"; jump {:?}; catch [{}]",
647
info.continuation,
648
info.pretty_print_dests()
649
)
650
}
651
652
impl Inst {
653
fn print_with_state<P>(&self, _state: &mut EmitState<P>) -> String
654
where
655
P: PulleyTargetKind,
656
{
657
use core::fmt::Write;
658
659
let format_reg = |reg: Reg| -> String { reg_name(reg) };
660
661
match self {
662
Inst::Args { args } => {
663
let mut s = "args".to_string();
664
for arg in args {
665
let preg = format_reg(arg.preg);
666
let def = format_reg(arg.vreg.to_reg());
667
write!(&mut s, " {def}={preg}").unwrap();
668
}
669
s
670
}
671
Inst::Rets { rets } => {
672
let mut s = "rets".to_string();
673
for ret in rets {
674
let preg = format_reg(ret.preg);
675
let vreg = format_reg(ret.vreg);
676
write!(&mut s, " {vreg}={preg}").unwrap();
677
}
678
s
679
}
680
681
Inst::DummyUse { reg } => {
682
let reg = format_reg(*reg);
683
format!("dummy_use {reg}")
684
}
685
686
Inst::TrapIf { cond, code } => {
687
format!("trap_{cond} // code = {code:?}")
688
}
689
690
Inst::Nop => format!("nop"),
691
692
Inst::GetSpecial { dst, reg } => {
693
let dst = format_reg(*dst.to_reg());
694
let reg = format_reg(**reg);
695
format!("xmov {dst}, {reg}")
696
}
697
698
Inst::LoadExtNameNear { dst, name, offset } => {
699
let dst = format_reg(*dst.to_reg());
700
format!("{dst} = load_ext_name_near {name:?}, {offset}")
701
}
702
703
Inst::LoadExtNameFar { dst, name, offset } => {
704
let dst = format_reg(*dst.to_reg());
705
format!("{dst} = load_ext_name_far {name:?}, {offset}")
706
}
707
708
Inst::Call { info } => {
709
let try_call = info
710
.try_call_info
711
.as_ref()
712
.map(|tci| pretty_print_try_call(tci))
713
.unwrap_or_default();
714
format!("call {info:?}{try_call}")
715
}
716
717
Inst::IndirectCall { info } => {
718
let callee = format_reg(*info.dest);
719
let try_call = info
720
.try_call_info
721
.as_ref()
722
.map(|tci| pretty_print_try_call(tci))
723
.unwrap_or_default();
724
format!("indirect_call {callee}, {info:?}{try_call}")
725
}
726
727
Inst::ReturnCall { info } => {
728
format!("return_call {info:?}")
729
}
730
731
Inst::ReturnIndirectCall { info } => {
732
let callee = format_reg(*info.dest);
733
format!("return_indirect_call {callee}, {info:?}")
734
}
735
736
Inst::IndirectCallHost { info } => {
737
let try_call = info
738
.try_call_info
739
.as_ref()
740
.map(|tci| pretty_print_try_call(tci))
741
.unwrap_or_default();
742
format!("indirect_call_host {info:?}{try_call}")
743
}
744
745
Inst::Jump { label } => format!("jump {}", label.to_string()),
746
747
Inst::BrIf {
748
cond,
749
taken,
750
not_taken,
751
} => {
752
let taken = taken.to_string();
753
let not_taken = not_taken.to_string();
754
format!("br_{cond}, {taken}; jump {not_taken}")
755
}
756
757
Inst::LoadAddr { dst, mem } => {
758
let dst = format_reg(*dst.to_reg());
759
let mem = mem.to_string();
760
format!("{dst} = load_addr {mem}")
761
}
762
763
Inst::XLoad {
764
dst,
765
mem,
766
ty,
767
flags,
768
} => {
769
let dst = format_reg(*dst.to_reg());
770
let ty = ty.bits();
771
let mem = mem.to_string();
772
format!("{dst} = xload{ty} {mem} // flags ={flags}")
773
}
774
775
Inst::XStore {
776
mem,
777
src,
778
ty,
779
flags,
780
} => {
781
let ty = ty.bits();
782
let mem = mem.to_string();
783
let src = format_reg(**src);
784
format!("xstore{ty} {mem}, {src} // flags = {flags}")
785
}
786
787
Inst::FLoad {
788
dst,
789
mem,
790
ty,
791
flags,
792
} => {
793
let dst = format_reg(*dst.to_reg());
794
let ty = ty.bits();
795
let mem = mem.to_string();
796
format!("{dst} = fload{ty} {mem} // flags ={flags}")
797
}
798
799
Inst::FStore {
800
mem,
801
src,
802
ty,
803
flags,
804
} => {
805
let ty = ty.bits();
806
let mem = mem.to_string();
807
let src = format_reg(**src);
808
format!("fstore{ty} {mem}, {src} // flags = {flags}")
809
}
810
811
Inst::VLoad {
812
dst,
813
mem,
814
ty,
815
flags,
816
} => {
817
let dst = format_reg(*dst.to_reg());
818
let ty = ty.bits();
819
let mem = mem.to_string();
820
format!("{dst} = vload{ty} {mem} // flags ={flags}")
821
}
822
823
Inst::VStore {
824
mem,
825
src,
826
ty,
827
flags,
828
} => {
829
let ty = ty.bits();
830
let mem = mem.to_string();
831
let src = format_reg(**src);
832
format!("vstore{ty} {mem}, {src} // flags = {flags}")
833
}
834
835
Inst::BrTable {
836
idx,
837
default,
838
targets,
839
} => {
840
let idx = format_reg(**idx);
841
format!("br_table {idx} {default:?} {targets:?}")
842
}
843
Inst::Raw { raw } => generated::print(raw),
844
845
Inst::EmitIsland { space_needed } => format!("emit_island {space_needed}"),
846
847
Inst::LabelAddress { dst, label } => {
848
let dst = format_reg(dst.to_reg().to_reg());
849
format!("label_address {dst}, {label:?}")
850
}
851
852
Inst::SequencePoint {} => {
853
format!("sequence_point")
854
}
855
}
856
}
857
}
858
859
/// Different forms of label references for different instruction formats.
860
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
861
pub enum LabelUse {
862
/// A PC-relative `jump`/`call`/etc... instruction with an `i32` relative
863
/// target.
864
///
865
/// The relative distance to the destination is added to the 4 bytes at the
866
/// label site.
867
PcRel,
868
}
869
870
impl MachInstLabelUse for LabelUse {
871
/// Alignment for veneer code. Pulley instructions don't require any
872
/// particular alignment.
873
const ALIGN: CodeOffset = 1;
874
875
/// Maximum PC-relative range (positive), inclusive.
876
fn max_pos_range(self) -> CodeOffset {
877
match self {
878
Self::PcRel => 0x7fff_ffff,
879
}
880
}
881
882
/// Maximum PC-relative range (negative).
883
fn max_neg_range(self) -> CodeOffset {
884
match self {
885
Self::PcRel => 0x8000_0000,
886
}
887
}
888
889
/// Size of window into code needed to do the patch.
890
fn patch_size(self) -> CodeOffset {
891
match self {
892
Self::PcRel => 4,
893
}
894
}
895
896
/// Perform the patch.
897
fn patch(self, buffer: &mut [u8], use_offset: CodeOffset, label_offset: CodeOffset) {
898
let use_relative = (label_offset as i64) - (use_offset as i64);
899
debug_assert!(use_relative <= self.max_pos_range() as i64);
900
debug_assert!(use_relative >= -(self.max_neg_range() as i64));
901
let pc_rel = i32::try_from(use_relative).unwrap() as u32;
902
match self {
903
Self::PcRel => {
904
let buf: &mut [u8; 4] = buffer.try_into().unwrap();
905
let addend = u32::from_le_bytes(*buf);
906
trace!(
907
"patching label use @ {use_offset:#x} \
908
to label {label_offset:#x} via \
909
PC-relative offset {pc_rel:#x} \
910
adding in {addend:#x}"
911
);
912
let value = pc_rel.wrapping_add(addend);
913
*buf = value.to_le_bytes();
914
}
915
}
916
}
917
918
/// Is a veneer supported for this label reference type?
919
fn supports_veneer(self) -> bool {
920
match self {
921
Self::PcRel => false,
922
}
923
}
924
925
/// How large is the veneer, if supported?
926
fn veneer_size(self) -> CodeOffset {
927
match self {
928
Self::PcRel => 0,
929
}
930
}
931
932
fn worst_case_veneer_size() -> CodeOffset {
933
0
934
}
935
936
/// Generate a veneer into the buffer, given that this veneer is at `veneer_offset`, and return
937
/// an offset and label-use for the veneer's use of the original label.
938
fn generate_veneer(
939
self,
940
_buffer: &mut [u8],
941
_veneer_offset: CodeOffset,
942
) -> (CodeOffset, LabelUse) {
943
match self {
944
Self::PcRel => panic!("veneer not supported for {self:?}"),
945
}
946
}
947
948
fn from_reloc(reloc: Reloc, addend: Addend) -> Option<LabelUse> {
949
match (reloc, addend) {
950
(Reloc::PulleyPcRel, 0) => Some(LabelUse::PcRel),
951
_ => None,
952
}
953
}
954
}
955
956