Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bytecodealliance
GitHub Repository: bytecodealliance/wasmtime
Path: blob/main/cranelift/codegen/src/machinst/mod.rs
3050 views
1
//! This module exposes the machine-specific backend definition pieces.
2
//!
3
//! The MachInst infrastructure is the compiler backend, from CLIF
4
//! (ir::Function) to machine code. The purpose of this infrastructure is, at a
5
//! high level, to do instruction selection/lowering (to machine instructions),
6
//! register allocation, and then perform all the fixups to branches, constant
7
//! data references, etc., needed to actually generate machine code.
8
//!
9
//! The container for machine instructions, at various stages of construction,
10
//! is the `VCode` struct. We refer to a sequence of machine instructions organized
11
//! into basic blocks as "vcode". This is short for "virtual-register code".
12
//!
13
//! The compilation pipeline, from an `ir::Function` (already optimized as much as
14
//! you like by machine-independent optimization passes) onward, is as follows.
15
//!
16
//! ```plain
17
//!
18
//! ir::Function (SSA IR, machine-independent opcodes)
19
//! |
20
//! | [lower]
21
//! |
22
//! VCode<arch_backend::Inst> (machine instructions:
23
//! | - mostly virtual registers.
24
//! | - cond branches in two-target form.
25
//! | - branch targets are block indices.
26
//! | - in-memory constants held by insns,
27
//! | with unknown offsets.
28
//! | - critical edges (actually all edges)
29
//! | are split.)
30
//! |
31
//! | [regalloc --> `regalloc2::Output`; VCode is unchanged]
32
//! |
33
//! | [binary emission via MachBuffer]
34
//! |
35
//! Vec<u8> (machine code:
36
//! | - two-dest branches resolved via
37
//! | streaming branch resolution/simplification.
38
//! | - regalloc `Allocation` results used directly
39
//! | by instruction emission code.
40
//! | - prologue and epilogue(s) built and emitted
41
//! | directly during emission.
42
//! | - SP-relative offsets resolved by tracking
43
//! | EmitState.)
44
//!
45
//! ```
46
47
use crate::binemit::{Addend, CodeInfo, CodeOffset, Reloc};
48
use crate::ir::{
49
self, DynamicStackSlot, RelSourceLoc, StackSlot, Type, function::FunctionParameters,
50
};
51
use crate::isa::FunctionAlignment;
52
use crate::result::CodegenResult;
53
use crate::settings;
54
use crate::settings::Flags;
55
use crate::value_label::ValueLabelsRanges;
56
use alloc::string::String;
57
use alloc::vec::Vec;
58
use core::fmt::Debug;
59
use cranelift_control::ControlPlane;
60
use cranelift_entity::PrimaryMap;
61
use regalloc2::VReg;
62
use smallvec::{SmallVec, smallvec};
63
64
#[cfg(feature = "enable-serde")]
65
use serde_derive::{Deserialize, Serialize};
66
67
#[macro_use]
68
pub mod isle;
69
70
pub mod lower;
71
pub use lower::*;
72
pub mod vcode;
73
pub use vcode::*;
74
pub mod compile;
75
pub use compile::*;
76
pub mod blockorder;
77
pub use blockorder::*;
78
pub mod abi;
79
pub use abi::*;
80
pub mod buffer;
81
pub use buffer::*;
82
pub mod helpers;
83
pub use helpers::*;
84
pub mod valueregs;
85
pub use reg::*;
86
pub use valueregs::*;
87
pub mod pcc;
88
pub mod reg;
89
90
/// A machine instruction.
91
pub trait MachInst: Clone + Debug {
92
/// The ABI machine spec for this `MachInst`.
93
type ABIMachineSpec: ABIMachineSpec<I = Self>;
94
95
/// Return the registers referenced by this machine instruction along with
96
/// the modes of reference (use, def, modify).
97
fn get_operands(&mut self, collector: &mut impl OperandVisitor);
98
99
/// If this is a simple move, return the (source, destination) tuple of registers.
100
fn is_move(&self) -> Option<(Writable<Reg>, Reg)>;
101
102
/// Is this a terminator (branch or ret)? If so, return its type
103
/// (ret/uncond/cond) and target if applicable.
104
fn is_term(&self) -> MachTerminator;
105
106
/// Is this an unconditional trap?
107
fn is_trap(&self) -> bool;
108
109
/// Is this an "args" pseudoinst?
110
fn is_args(&self) -> bool;
111
112
/// Classify the type of call instruction this is.
113
///
114
/// This enables more granular function type analysis and optimization.
115
/// Returns `CallType::None` for non-call instructions, `CallType::Regular`
116
/// for normal calls that return to the caller, and `CallType::TailCall`
117
/// for tail calls that don't return to the caller.
118
fn call_type(&self) -> CallType;
119
120
/// Should this instruction's clobber-list be included in the
121
/// clobber-set?
122
fn is_included_in_clobbers(&self) -> bool;
123
124
/// Does this instruction access memory?
125
fn is_mem_access(&self) -> bool;
126
127
/// Generate a move.
128
fn gen_move(to_reg: Writable<Reg>, from_reg: Reg, ty: Type) -> Self;
129
130
/// Generate a dummy instruction that will keep a value alive but
131
/// has no other purpose.
132
fn gen_dummy_use(reg: Reg) -> Self;
133
134
/// Determine register class(es) to store the given Cranelift type, and the
135
/// Cranelift type actually stored in the underlying register(s). May return
136
/// an error if the type isn't supported by this backend.
137
///
138
/// If the type requires multiple registers, then the list of registers is
139
/// returned in little-endian order.
140
///
141
/// Note that the type actually stored in the register(s) may differ in the
142
/// case that a value is split across registers: for example, on a 32-bit
143
/// target, an I64 may be stored in two registers, each of which holds an
144
/// I32. The actually-stored types are used only to inform the backend when
145
/// generating spills and reloads for individual registers.
146
fn rc_for_type(ty: Type) -> CodegenResult<(&'static [RegClass], &'static [Type])>;
147
148
/// Get an appropriate type that can fully hold a value in a given
149
/// register class. This may not be the only type that maps to
150
/// that class, but when used with `gen_move()` or the ABI trait's
151
/// load/spill constructors, it should produce instruction(s) that
152
/// move the entire register contents.
153
fn canonical_type_for_rc(rc: RegClass) -> Type;
154
155
/// Generate a jump to another target. Used during lowering of
156
/// control flow.
157
fn gen_jump(target: MachLabel) -> Self;
158
159
/// Generate a store of an immediate 64-bit integer to a register. Used by
160
/// the control plane to generate random instructions.
161
fn gen_imm_u64(_value: u64, _dst: Writable<Reg>) -> Option<Self> {
162
None
163
}
164
165
/// Generate a store of an immediate 64-bit integer to a register. Used by
166
/// the control plane to generate random instructions. The tmp register may
167
/// be used by architectures which don't support writing immediate values to
168
/// floating point registers directly.
169
fn gen_imm_f64(_value: f64, _tmp: Writable<Reg>, _dst: Writable<Reg>) -> SmallVec<[Self; 2]> {
170
SmallVec::new()
171
}
172
173
/// Generate a NOP. The `preferred_size` parameter allows the caller to
174
/// request a NOP of that size, or as close to it as possible. The machine
175
/// backend may return a NOP whose binary encoding is smaller than the
176
/// preferred size, but must not return a NOP that is larger. However,
177
/// the instruction must have a nonzero size if preferred_size is nonzero.
178
fn gen_nop(preferred_size: usize) -> Self;
179
180
/// The various kinds of NOP, with size, sorted in ascending-size
181
/// order.
182
fn gen_nop_units() -> Vec<Vec<u8>>;
183
184
/// Align a basic block offset (from start of function). By default, no
185
/// alignment occurs.
186
fn align_basic_block(offset: CodeOffset) -> CodeOffset {
187
offset
188
}
189
190
/// What is the worst-case instruction size emitted by this instruction type?
191
fn worst_case_size() -> CodeOffset;
192
193
/// What is the register class used for reference types (GC-observable pointers)? Can
194
/// be dependent on compilation flags.
195
fn ref_type_regclass(_flags: &Flags) -> RegClass;
196
197
/// Is this a safepoint?
198
fn is_safepoint(&self) -> bool;
199
200
/// Generate an instruction that must appear at the beginning of a basic
201
/// block, if any. Note that the return value must not be subject to
202
/// register allocation.
203
fn gen_block_start(
204
_is_indirect_branch_target: bool,
205
_is_forward_edge_cfi_enabled: bool,
206
) -> Option<Self> {
207
None
208
}
209
210
/// Returns a description of the alignment required for functions for this
211
/// architecture.
212
fn function_alignment() -> FunctionAlignment;
213
214
/// Is this a low-level, one-way branch, not meant for use in a
215
/// VCode body? These instructions are meant to be used only when
216
/// directly emitted, i.e. when `MachInst` is used as an assembler
217
/// library.
218
fn is_low_level_branch(&self) -> bool {
219
false
220
}
221
222
/// A label-use kind: a type that describes the types of label references that
223
/// can occur in an instruction.
224
type LabelUse: MachInstLabelUse;
225
226
/// Byte representation of a trap opcode which is inserted by `MachBuffer`
227
/// during its `defer_trap` method.
228
const TRAP_OPCODE: &'static [u8];
229
}
230
231
/// A descriptor of a label reference (use) in an instruction set.
232
pub trait MachInstLabelUse: Clone + Copy + Debug + Eq {
233
/// Required alignment for any veneer. Usually the required instruction
234
/// alignment (e.g., 4 for a RISC with 32-bit instructions, or 1 for x86).
235
const ALIGN: CodeOffset;
236
237
/// What is the maximum PC-relative range (positive)? E.g., if `1024`, a
238
/// label-reference fixup at offset `x` is valid if the label resolves to `x
239
/// + 1024`.
240
fn max_pos_range(self) -> CodeOffset;
241
/// What is the maximum PC-relative range (negative)? This is the absolute
242
/// value; i.e., if `1024`, then a label-reference fixup at offset `x` is
243
/// valid if the label resolves to `x - 1024`.
244
fn max_neg_range(self) -> CodeOffset;
245
/// What is the size of code-buffer slice this label-use needs to patch in
246
/// the label's value?
247
fn patch_size(self) -> CodeOffset;
248
/// Perform a code-patch, given the offset into the buffer of this label use
249
/// and the offset into the buffer of the label's definition.
250
/// It is guaranteed that, given `delta = offset - label_offset`, we will
251
/// have `offset >= -self.max_neg_range()` and `offset <=
252
/// self.max_pos_range()`.
253
fn patch(self, buffer: &mut [u8], use_offset: CodeOffset, label_offset: CodeOffset);
254
/// Can the label-use be patched to a veneer that supports a longer range?
255
/// Usually valid for jumps (a short-range jump can jump to a longer-range
256
/// jump), but not for e.g. constant pool references, because the constant
257
/// load would require different code (one more level of indirection).
258
fn supports_veneer(self) -> bool;
259
/// How many bytes are needed for a veneer?
260
fn veneer_size(self) -> CodeOffset;
261
/// What's the largest possible veneer that may be generated?
262
fn worst_case_veneer_size() -> CodeOffset;
263
/// Generate a veneer. The given code-buffer slice is `self.veneer_size()`
264
/// bytes long at offset `veneer_offset` in the buffer. The original
265
/// label-use will be patched to refer to this veneer's offset. A new
266
/// (offset, LabelUse) is returned that allows the veneer to use the actual
267
/// label. For veneers to work properly, it is expected that the new veneer
268
/// has a larger range; on most platforms this probably means either a
269
/// "long-range jump" (e.g., on ARM, the 26-bit form), or if already at that
270
/// stage, a jump that supports a full 32-bit range, for example.
271
fn generate_veneer(self, buffer: &mut [u8], veneer_offset: CodeOffset) -> (CodeOffset, Self);
272
273
/// Returns the corresponding label-use for the relocation specified.
274
///
275
/// This returns `None` if the relocation doesn't have a corresponding
276
/// representation for the target architecture.
277
fn from_reloc(reloc: Reloc, addend: Addend) -> Option<Self>;
278
}
279
280
/// Classification of call instruction types for granular analysis.
281
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
282
pub enum CallType {
283
/// Not a call instruction.
284
None,
285
/// Regular call that returns to the caller.
286
Regular,
287
/// Tail call that doesn't return to the caller.
288
TailCall,
289
}
290
291
/// Function classification based on call patterns.
292
///
293
/// This enum classifies functions based on their calling behavior to enable
294
/// targeted optimizations. Functions are categorized as:
295
/// - `None`: No calls at all (can use simplified calling conventions)
296
/// - `TailOnly`: Only tail calls (may skip frame setup in some cases)
297
/// - `Regular`: Has regular calls (requires full calling convention support)
298
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
299
pub enum FunctionCalls {
300
/// Function makes no calls at all.
301
#[default]
302
None,
303
/// Function only makes tail calls (no regular calls).
304
TailOnly,
305
/// Function makes at least one regular call (may also have tail calls).
306
Regular,
307
}
308
309
impl FunctionCalls {
310
/// Update the function classification based on a new call instruction.
311
///
312
/// This method implements the merge logic for accumulating call patterns:
313
/// - Any regular call makes the function Regular
314
/// - Tail calls upgrade None to TailOnly
315
/// - Regular always stays Regular
316
pub fn update(&mut self, call_type: CallType) {
317
*self = match (*self, call_type) {
318
// No call instruction - state unchanged
319
(current, CallType::None) => current,
320
// Regular call always results in Regular classification
321
(_, CallType::Regular) => FunctionCalls::Regular,
322
// Tail call: None becomes TailOnly, others unchanged
323
(FunctionCalls::None, CallType::TailCall) => FunctionCalls::TailOnly,
324
(current, CallType::TailCall) => current,
325
};
326
}
327
}
328
329
/// Describes a block terminator (not call) in the VCode.
330
///
331
/// Actual targets are not included: the single-source-of-truth for
332
/// those is the VCode itself, which holds, for each block, successors
333
/// and outgoing branch args per successor.
334
#[derive(Clone, Debug, PartialEq, Eq)]
335
pub enum MachTerminator {
336
/// Not a terminator.
337
None,
338
/// A return instruction.
339
Ret,
340
/// A tail call.
341
RetCall,
342
/// A branch.
343
Branch,
344
}
345
346
/// A trait describing the ability to encode a MachInst into binary machine code.
347
pub trait MachInstEmit: MachInst {
348
/// Persistent state carried across `emit` invocations.
349
type State: MachInstEmitState<Self>;
350
351
/// Constant information used in `emit` invocations.
352
type Info;
353
354
/// Emit the instruction.
355
fn emit(&self, code: &mut MachBuffer<Self>, info: &Self::Info, state: &mut Self::State);
356
357
/// Pretty-print the instruction.
358
fn pretty_print_inst(&self, state: &mut Self::State) -> String;
359
}
360
361
/// A trait describing the emission state carried between MachInsts when
362
/// emitting a function body.
363
pub trait MachInstEmitState<I: VCodeInst>: Default + Clone + Debug {
364
/// Create a new emission state given the ABI object.
365
fn new(abi: &Callee<I::ABIMachineSpec>, ctrl_plane: ControlPlane) -> Self;
366
367
/// Update the emission state before emitting an instruction that is a
368
/// safepoint.
369
fn pre_safepoint(&mut self, user_stack_map: Option<ir::UserStackMap>);
370
371
/// The emission state holds ownership of a control plane, so it doesn't
372
/// have to be passed around explicitly too much. `ctrl_plane_mut` may
373
/// be used if temporary access to the control plane is needed by some
374
/// other function that doesn't have access to the emission state.
375
fn ctrl_plane_mut(&mut self) -> &mut ControlPlane;
376
377
/// Used to continue using a control plane after the emission state is
378
/// not needed anymore.
379
fn take_ctrl_plane(self) -> ControlPlane;
380
381
/// A hook that triggers when first emitting a new block.
382
/// It is guaranteed to be called before any instructions are emitted.
383
fn on_new_block(&mut self) {}
384
385
/// The [`FrameLayout`] for the function currently being compiled.
386
fn frame_layout(&self) -> &FrameLayout;
387
}
388
389
/// The result of a `MachBackend::compile_function()` call. Contains machine
390
/// code (as bytes) and a disassembly, if requested.
391
#[derive(PartialEq, Debug, Clone)]
392
#[cfg_attr(feature = "enable-serde", derive(Serialize, Deserialize))]
393
pub struct CompiledCodeBase<T: CompilePhase> {
394
/// Machine code.
395
pub buffer: MachBufferFinalized<T>,
396
/// Disassembly, if requested.
397
pub vcode: Option<String>,
398
/// Debug info: value labels to registers/stackslots at code offsets.
399
pub value_labels_ranges: ValueLabelsRanges,
400
/// Basic-block layout info: block start offsets.
401
///
402
/// This info is generated only if the `machine_code_cfg_info`
403
/// flag is set.
404
pub bb_starts: Vec<CodeOffset>,
405
/// Basic-block layout info: block edges. Each edge is `(from,
406
/// to)`, where `from` and `to` are basic-block start offsets of
407
/// the respective blocks.
408
///
409
/// This info is generated only if the `machine_code_cfg_info`
410
/// flag is set.
411
pub bb_edges: Vec<(CodeOffset, CodeOffset)>,
412
}
413
414
impl CompiledCodeStencil {
415
/// Apply function parameters to finalize a stencil into its final form.
416
pub fn apply_params(self, params: &FunctionParameters) -> CompiledCode {
417
CompiledCode {
418
buffer: self.buffer.apply_base_srcloc(params.base_srcloc()),
419
vcode: self.vcode,
420
value_labels_ranges: self.value_labels_ranges,
421
bb_starts: self.bb_starts,
422
bb_edges: self.bb_edges,
423
}
424
}
425
}
426
427
impl<T: CompilePhase> CompiledCodeBase<T> {
428
/// Get a `CodeInfo` describing section sizes from this compilation result.
429
pub fn code_info(&self) -> CodeInfo {
430
CodeInfo {
431
total_size: self.buffer.total_size(),
432
}
433
}
434
435
/// Returns a reference to the machine code generated for this function compilation.
436
pub fn code_buffer(&self) -> &[u8] {
437
self.buffer.data()
438
}
439
440
/// Get the disassembly of the buffer, using the given capstone context.
441
#[cfg(feature = "disas")]
442
pub fn disassemble(
443
&self,
444
params: Option<&crate::ir::function::FunctionParameters>,
445
cs: &capstone::Capstone,
446
) -> Result<String, anyhow::Error> {
447
use core::fmt::Write;
448
449
let mut buf = String::new();
450
451
let relocs = self.buffer.relocs();
452
let traps = self.buffer.traps();
453
let mut patchables = self.buffer.patchable_call_sites().peekable();
454
455
// Normalize the block starts to include an initial block of offset 0.
456
let mut block_starts = Vec::new();
457
if self.bb_starts.first().copied() != Some(0) {
458
block_starts.push(0);
459
}
460
block_starts.extend_from_slice(&self.bb_starts);
461
block_starts.push(self.buffer.data().len() as u32);
462
463
// Iterate over block regions, to ensure that we always produce block labels
464
for (n, (&start, &end)) in block_starts
465
.iter()
466
.zip(block_starts.iter().skip(1))
467
.enumerate()
468
{
469
writeln!(buf, "block{n}: ; offset 0x{start:x}")?;
470
471
let buffer = &self.buffer.data()[start as usize..end as usize];
472
let insns = cs.disasm_all(buffer, start as u64).map_err(map_caperr)?;
473
for i in insns.iter() {
474
write!(buf, " ")?;
475
476
let op_str = i.op_str().unwrap_or("");
477
if let Some(s) = i.mnemonic() {
478
write!(buf, "{s}")?;
479
if !op_str.is_empty() {
480
write!(buf, " ")?;
481
}
482
}
483
484
write!(buf, "{op_str}")?;
485
486
let end = i.address() + i.bytes().len() as u64;
487
let contains = |off| i.address() <= off && off < end;
488
489
for reloc in relocs.iter().filter(|reloc| contains(reloc.offset as u64)) {
490
write!(
491
buf,
492
" ; reloc_external {} {} {}",
493
reloc.kind,
494
reloc.target.display(params),
495
reloc.addend,
496
)?;
497
}
498
499
if let Some(trap) = traps.iter().find(|trap| contains(trap.offset as u64)) {
500
write!(buf, " ; trap: {}", trap.code)?;
501
}
502
503
if let Some(patchable) = patchables.peek()
504
&& patchable.ret_addr == end as u32
505
{
506
write!(
507
buf,
508
" ; patchable call: NOP out last {} bytes",
509
patchable.len
510
)?;
511
patchables.next();
512
}
513
514
writeln!(buf)?;
515
}
516
}
517
518
return Ok(buf);
519
520
fn map_caperr(err: capstone::Error) -> anyhow::Error {
521
anyhow::format_err!("{err}")
522
}
523
}
524
}
525
526
/// Result of compiling a `FunctionStencil`, before applying `FunctionParameters` onto it.
527
///
528
/// Only used internally, in a transient manner, for the incremental compilation cache.
529
pub type CompiledCodeStencil = CompiledCodeBase<Stencil>;
530
531
/// `CompiledCode` in its final form (i.e. after `FunctionParameters` have been applied), ready for
532
/// consumption.
533
pub type CompiledCode = CompiledCodeBase<Final>;
534
535
impl CompiledCode {
536
/// If available, return information about the code layout in the
537
/// final machine code: the offsets (in bytes) of each basic-block
538
/// start, and all basic-block edges.
539
pub fn get_code_bb_layout(&self) -> (Vec<usize>, Vec<(usize, usize)>) {
540
(
541
self.bb_starts.iter().map(|&off| off as usize).collect(),
542
self.bb_edges
543
.iter()
544
.map(|&(from, to)| (from as usize, to as usize))
545
.collect(),
546
)
547
}
548
549
/// Creates unwind information for the function.
550
///
551
/// Returns `None` if the function has no unwind information.
552
#[cfg(feature = "unwind")]
553
pub fn create_unwind_info(
554
&self,
555
isa: &dyn crate::isa::TargetIsa,
556
) -> CodegenResult<Option<crate::isa::unwind::UnwindInfo>> {
557
use crate::isa::unwind::UnwindInfoKind;
558
let unwind_info_kind = match isa.triple().operating_system {
559
target_lexicon::OperatingSystem::Windows => UnwindInfoKind::Windows,
560
_ => UnwindInfoKind::SystemV,
561
};
562
self.create_unwind_info_of_kind(isa, unwind_info_kind)
563
}
564
565
/// Creates unwind information for the function using the supplied
566
/// "kind". Supports cross-OS (but not cross-arch) generation.
567
///
568
/// Returns `None` if the function has no unwind information.
569
#[cfg(feature = "unwind")]
570
pub fn create_unwind_info_of_kind(
571
&self,
572
isa: &dyn crate::isa::TargetIsa,
573
unwind_info_kind: crate::isa::unwind::UnwindInfoKind,
574
) -> CodegenResult<Option<crate::isa::unwind::UnwindInfo>> {
575
isa.emit_unwind_info(self, unwind_info_kind)
576
}
577
}
578
579
/// An object that can be used to create the text section of an executable.
580
///
581
/// This primarily handles resolving relative relocations at
582
/// text-section-assembly time rather than at load/link time. This
583
/// architecture-specific logic is sort of like a linker, but only for one
584
/// object file at a time.
585
pub trait TextSectionBuilder {
586
/// Appends `data` to the text section with the `align` specified.
587
///
588
/// If `labeled` is `true` then this also binds the appended data to the
589
/// `n`th label for how many times this has been called with `labeled:
590
/// true`. The label target can be passed as the `target` argument to
591
/// `resolve_reloc`.
592
///
593
/// This function returns the offset at which the data was placed in the
594
/// text section.
595
fn append(
596
&mut self,
597
labeled: bool,
598
data: &[u8],
599
align: u32,
600
ctrl_plane: &mut ControlPlane,
601
) -> u64;
602
603
/// Attempts to resolve a relocation for this function.
604
///
605
/// The `offset` is the offset of the relocation, within the text section.
606
/// The `reloc` is the kind of relocation.
607
/// The `addend` is the value to add to the relocation.
608
/// The `target` is the labeled function that is the target of this
609
/// relocation.
610
///
611
/// Labeled functions are created with the `append` function above by
612
/// setting the `labeled` parameter to `true`.
613
///
614
/// If this builder does not know how to handle `reloc` then this function
615
/// will return `false`. Otherwise this function will return `true` and this
616
/// relocation will be resolved in the final bytes returned by `finish`.
617
fn resolve_reloc(&mut self, offset: u64, reloc: Reloc, addend: Addend, target: usize) -> bool;
618
619
/// A debug-only option which is used to for
620
fn force_veneers(&mut self);
621
622
/// Write the `data` provided at `offset`, for example when resolving a
623
/// relocation.
624
fn write(&mut self, offset: u64, data: &[u8]);
625
626
/// Completes this text section, filling out any final details, and returns
627
/// the bytes of the text section.
628
fn finish(&mut self, ctrl_plane: &mut ControlPlane) -> Vec<u8>;
629
}
630
631