Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bytecodealliance
GitHub Repository: bytecodealliance/wasmtime
Path: blob/main/cranelift/frontend/src/frontend.rs
1692 views
1
//! A frontend for building Cranelift IR from other languages.
2
use crate::ssa::{SSABuilder, SideEffects};
3
use crate::variable::Variable;
4
use alloc::vec::Vec;
5
use core::fmt::{self, Debug};
6
use cranelift_codegen::cursor::{Cursor, CursorPosition, FuncCursor};
7
use cranelift_codegen::entity::{EntityRef, EntitySet, PrimaryMap, SecondaryMap};
8
use cranelift_codegen::ir;
9
use cranelift_codegen::ir::condcodes::IntCC;
10
use cranelift_codegen::ir::{
11
AbiParam, Block, DataFlowGraph, DynamicStackSlot, DynamicStackSlotData, ExtFuncData,
12
ExternalName, FuncRef, Function, GlobalValue, GlobalValueData, Inst, InstBuilder,
13
InstBuilderBase, InstructionData, JumpTable, JumpTableData, LibCall, MemFlags, RelSourceLoc,
14
SigRef, Signature, StackSlot, StackSlotData, Type, Value, ValueLabel, ValueLabelAssignments,
15
ValueLabelStart, types,
16
};
17
use cranelift_codegen::isa::TargetFrontendConfig;
18
use cranelift_codegen::packed_option::PackedOption;
19
use cranelift_codegen::traversals::Dfs;
20
use smallvec::SmallVec;
21
22
mod safepoints;
23
24
/// Structure used for translating a series of functions into Cranelift IR.
25
///
26
/// In order to reduce memory reallocations when compiling multiple functions,
27
/// [`FunctionBuilderContext`] holds various data structures which are cleared between
28
/// functions, rather than dropped, preserving the underlying allocations.
29
#[derive(Default)]
30
pub struct FunctionBuilderContext {
31
ssa: SSABuilder,
32
status: SecondaryMap<Block, BlockStatus>,
33
variables: PrimaryMap<Variable, Type>,
34
stack_map_vars: EntitySet<Variable>,
35
stack_map_values: EntitySet<Value>,
36
safepoints: safepoints::SafepointSpiller,
37
}
38
39
/// Temporary object used to build a single Cranelift IR [`Function`].
40
pub struct FunctionBuilder<'a> {
41
/// The function currently being built.
42
/// This field is public so the function can be re-borrowed.
43
pub func: &'a mut Function,
44
45
/// Source location to assign to all new instructions.
46
srcloc: ir::SourceLoc,
47
48
func_ctx: &'a mut FunctionBuilderContext,
49
position: PackedOption<Block>,
50
}
51
52
#[derive(Clone, Default, Eq, PartialEq)]
53
enum BlockStatus {
54
/// No instructions have been added.
55
#[default]
56
Empty,
57
/// Some instructions have been added, but no terminator.
58
Partial,
59
/// A terminator has been added; no further instructions may be added.
60
Filled,
61
}
62
63
impl FunctionBuilderContext {
64
/// Creates a [`FunctionBuilderContext`] structure. The structure is automatically cleared after
65
/// each [`FunctionBuilder`] completes translating a function.
66
pub fn new() -> Self {
67
Self::default()
68
}
69
70
fn clear(&mut self) {
71
let FunctionBuilderContext {
72
ssa,
73
status,
74
variables,
75
stack_map_vars,
76
stack_map_values,
77
safepoints,
78
} = self;
79
ssa.clear();
80
status.clear();
81
variables.clear();
82
stack_map_values.clear();
83
stack_map_vars.clear();
84
safepoints.clear();
85
}
86
87
fn is_empty(&self) -> bool {
88
self.ssa.is_empty() && self.status.is_empty() && self.variables.is_empty()
89
}
90
}
91
92
/// Implementation of the [`InstBuilder`] that has
93
/// one convenience method per Cranelift IR instruction.
94
pub struct FuncInstBuilder<'short, 'long: 'short> {
95
builder: &'short mut FunctionBuilder<'long>,
96
block: Block,
97
}
98
99
impl<'short, 'long> FuncInstBuilder<'short, 'long> {
100
fn new(builder: &'short mut FunctionBuilder<'long>, block: Block) -> Self {
101
Self { builder, block }
102
}
103
}
104
105
impl<'short, 'long> InstBuilderBase<'short> for FuncInstBuilder<'short, 'long> {
106
fn data_flow_graph(&self) -> &DataFlowGraph {
107
&self.builder.func.dfg
108
}
109
110
fn data_flow_graph_mut(&mut self) -> &mut DataFlowGraph {
111
&mut self.builder.func.dfg
112
}
113
114
// This implementation is richer than `InsertBuilder` because we use the data of the
115
// instruction being inserted to add related info to the DFG and the SSA building system,
116
// and perform debug sanity checks.
117
fn build(self, data: InstructionData, ctrl_typevar: Type) -> (Inst, &'short mut DataFlowGraph) {
118
// We only insert the Block in the layout when an instruction is added to it
119
self.builder.ensure_inserted_block();
120
121
let inst = self.builder.func.dfg.make_inst(data);
122
self.builder.func.dfg.make_inst_results(inst, ctrl_typevar);
123
self.builder.func.layout.append_inst(inst, self.block);
124
if !self.builder.srcloc.is_default() {
125
self.builder.func.set_srcloc(inst, self.builder.srcloc);
126
}
127
128
match &self.builder.func.dfg.insts[inst] {
129
ir::InstructionData::Jump {
130
destination: dest, ..
131
} => {
132
// If the user has supplied jump arguments we must adapt the arguments of
133
// the destination block
134
let block = dest.block(&self.builder.func.dfg.value_lists);
135
self.builder.declare_successor(block, inst);
136
}
137
138
ir::InstructionData::Brif {
139
blocks: [branch_then, branch_else],
140
..
141
} => {
142
let block_then = branch_then.block(&self.builder.func.dfg.value_lists);
143
let block_else = branch_else.block(&self.builder.func.dfg.value_lists);
144
145
self.builder.declare_successor(block_then, inst);
146
if block_then != block_else {
147
self.builder.declare_successor(block_else, inst);
148
}
149
}
150
151
ir::InstructionData::BranchTable { table, .. } => {
152
let pool = &self.builder.func.dfg.value_lists;
153
154
// Unlike most other jumps/branches and like try_call,
155
// jump tables are capable of having the same successor appear
156
// multiple times, so we must deduplicate.
157
let mut unique = EntitySet::<Block>::new();
158
for dest_block in self
159
.builder
160
.func
161
.stencil
162
.dfg
163
.jump_tables
164
.get(*table)
165
.expect("you are referencing an undeclared jump table")
166
.all_branches()
167
{
168
let block = dest_block.block(pool);
169
if !unique.insert(block) {
170
continue;
171
}
172
173
// Call `declare_block_predecessor` instead of `declare_successor` for
174
// avoiding the borrow checker.
175
self.builder
176
.func_ctx
177
.ssa
178
.declare_block_predecessor(block, inst);
179
}
180
}
181
182
ir::InstructionData::TryCall { exception, .. }
183
| ir::InstructionData::TryCallIndirect { exception, .. } => {
184
let pool = &self.builder.func.dfg.value_lists;
185
186
// Unlike most other jumps/branches and like br_table,
187
// exception tables are capable of having the same successor
188
// appear multiple times, so we must deduplicate.
189
let mut unique = EntitySet::<Block>::new();
190
for dest_block in self
191
.builder
192
.func
193
.stencil
194
.dfg
195
.exception_tables
196
.get(*exception)
197
.expect("you are referencing an undeclared exception table")
198
.all_branches()
199
{
200
let block = dest_block.block(pool);
201
if !unique.insert(block) {
202
continue;
203
}
204
205
// Call `declare_block_predecessor` instead of `declare_successor` for
206
// avoiding the borrow checker.
207
self.builder
208
.func_ctx
209
.ssa
210
.declare_block_predecessor(block, inst);
211
}
212
}
213
214
inst => assert!(!inst.opcode().is_branch()),
215
}
216
217
if data.opcode().is_terminator() {
218
self.builder.fill_current_block()
219
}
220
(inst, &mut self.builder.func.dfg)
221
}
222
}
223
224
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
225
/// An error encountered when calling [`FunctionBuilder::try_use_var`].
226
pub enum UseVariableError {
227
UsedBeforeDeclared(Variable),
228
}
229
230
impl fmt::Display for UseVariableError {
231
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
232
match self {
233
UseVariableError::UsedBeforeDeclared(variable) => {
234
write!(
235
f,
236
"variable {} was used before it was defined",
237
variable.index()
238
)?;
239
}
240
}
241
Ok(())
242
}
243
}
244
245
impl std::error::Error for UseVariableError {}
246
247
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
248
/// An error encountered when defining the initial value of a variable.
249
pub enum DefVariableError {
250
/// The variable was instantiated with a value of the wrong type.
251
///
252
/// note: to obtain the type of the value, you can call
253
/// [`cranelift_codegen::ir::dfg::DataFlowGraph::value_type`] (using the
254
/// `FunctionBuilder.func.dfg` field)
255
TypeMismatch(Variable, Value),
256
/// The value was defined (in a call to [`FunctionBuilder::def_var`]) before
257
/// it was declared (in a call to [`FunctionBuilder::declare_var`]).
258
DefinedBeforeDeclared(Variable),
259
}
260
261
impl fmt::Display for DefVariableError {
262
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
263
match self {
264
DefVariableError::TypeMismatch(variable, value) => {
265
write!(
266
f,
267
"the types of variable {} and value {} are not the same.
268
The `Value` supplied to `def_var` must be of the same type as
269
the variable was declared to be of in `declare_var`.",
270
variable.index(),
271
value.as_u32()
272
)?;
273
}
274
DefVariableError::DefinedBeforeDeclared(variable) => {
275
write!(
276
f,
277
"the value of variable {} was declared before it was defined",
278
variable.index()
279
)?;
280
}
281
}
282
Ok(())
283
}
284
}
285
286
/// This module allows you to create a function in Cranelift IR in a straightforward way, hiding
287
/// all the complexity of its internal representation.
288
///
289
/// The module is parametrized by one type which is the representation of variables in your
290
/// origin language. It offers a way to conveniently append instruction to your program flow.
291
/// You are responsible to split your instruction flow into extended blocks (declared with
292
/// [`create_block`](Self::create_block)) whose properties are:
293
///
294
/// - branch and jump instructions can only point at the top of extended blocks;
295
/// - the last instruction of each block is a terminator instruction which has no natural successor,
296
/// and those instructions can only appear at the end of extended blocks.
297
///
298
/// The parameters of Cranelift IR instructions are Cranelift IR values, which can only be created
299
/// as results of other Cranelift IR instructions. To be able to create variables redefined multiple
300
/// times in your program, use the [`def_var`](Self::def_var) and [`use_var`](Self::use_var) command,
301
/// that will maintain the correspondence between your variables and Cranelift IR SSA values.
302
///
303
/// The first block for which you call [`switch_to_block`](Self::switch_to_block) will be assumed to
304
/// be the beginning of the function.
305
///
306
/// At creation, a [`FunctionBuilder`] instance borrows an already allocated `Function` which it
307
/// modifies with the information stored in the mutable borrowed
308
/// [`FunctionBuilderContext`]. The function passed in argument should be newly created with
309
/// [`Function::with_name_signature()`], whereas the [`FunctionBuilderContext`] can be kept as is
310
/// between two function translations.
311
///
312
/// # Errors
313
///
314
/// The functions below will panic in debug mode whenever you try to modify the Cranelift IR
315
/// function in a way that violate the coherence of the code. For instance: switching to a new
316
/// [`Block`] when you haven't filled the current one with a terminator instruction, inserting a
317
/// return instruction with arguments that don't match the function's signature.
318
impl<'a> FunctionBuilder<'a> {
319
/// Creates a new [`FunctionBuilder`] structure that will operate on a [`Function`] using a
320
/// [`FunctionBuilderContext`].
321
pub fn new(func: &'a mut Function, func_ctx: &'a mut FunctionBuilderContext) -> Self {
322
debug_assert!(func_ctx.is_empty());
323
Self {
324
func,
325
srcloc: Default::default(),
326
func_ctx,
327
position: Default::default(),
328
}
329
}
330
331
/// Get the block that this builder is currently at.
332
pub fn current_block(&self) -> Option<Block> {
333
self.position.expand()
334
}
335
336
/// Set the source location that should be assigned to all new instructions.
337
pub fn set_srcloc(&mut self, srcloc: ir::SourceLoc) {
338
self.srcloc = srcloc;
339
}
340
341
/// Creates a new [`Block`] and returns its reference.
342
pub fn create_block(&mut self) -> Block {
343
let block = self.func.dfg.make_block();
344
self.func_ctx.ssa.declare_block(block);
345
block
346
}
347
348
/// Mark a block as "cold".
349
///
350
/// This will try to move it out of the ordinary path of execution
351
/// when lowered to machine code.
352
pub fn set_cold_block(&mut self, block: Block) {
353
self.func.layout.set_cold(block);
354
}
355
356
/// Insert `block` in the layout *after* the existing block `after`.
357
pub fn insert_block_after(&mut self, block: Block, after: Block) {
358
self.func.layout.insert_block_after(block, after);
359
}
360
361
/// After the call to this function, new instructions will be inserted into the designated
362
/// block, in the order they are declared. You must declare the types of the [`Block`] arguments
363
/// you will use here.
364
///
365
/// When inserting the terminator instruction (which doesn't have a fallthrough to its immediate
366
/// successor), the block will be declared filled and it will not be possible to append
367
/// instructions to it.
368
pub fn switch_to_block(&mut self, block: Block) {
369
log::trace!("switch to {block:?}");
370
371
// First we check that the previous block has been filled.
372
debug_assert!(
373
self.position.is_none()
374
|| self.is_unreachable()
375
|| self.is_pristine(self.position.unwrap())
376
|| self.is_filled(self.position.unwrap()),
377
"you have to fill your block before switching"
378
);
379
// We cannot switch to a filled block
380
debug_assert!(
381
!self.is_filled(block),
382
"you cannot switch to a block which is already filled"
383
);
384
385
// Then we change the cursor position.
386
self.position = PackedOption::from(block);
387
}
388
389
/// Declares that all the predecessors of this block are known.
390
///
391
/// Function to call with `block` as soon as the last branch instruction to `block` has been
392
/// created. Forgetting to call this method on every block will cause inconsistencies in the
393
/// produced functions.
394
pub fn seal_block(&mut self, block: Block) {
395
let side_effects = self.func_ctx.ssa.seal_block(block, self.func);
396
self.handle_ssa_side_effects(side_effects);
397
}
398
399
/// Effectively calls [seal_block](Self::seal_block) on all unsealed blocks in the function.
400
///
401
/// It's more efficient to seal [`Block`]s as soon as possible, during
402
/// translation, but for frontends where this is impractical to do, this
403
/// function can be used at the end of translating all blocks to ensure
404
/// that everything is sealed.
405
pub fn seal_all_blocks(&mut self) {
406
let side_effects = self.func_ctx.ssa.seal_all_blocks(self.func);
407
self.handle_ssa_side_effects(side_effects);
408
}
409
410
/// Declares the type of a variable.
411
///
412
/// This allows the variable to be defined and used later (by calling
413
/// [`FunctionBuilder::def_var`] and [`FunctionBuilder::use_var`]
414
/// respectively).
415
pub fn declare_var(&mut self, ty: Type) -> Variable {
416
self.func_ctx.variables.push(ty)
417
}
418
419
/// Declare that all uses of the given variable must be included in stack
420
/// map metadata.
421
///
422
/// All values that are uses of this variable will be spilled to the stack
423
/// before each safepoint and their location on the stack included in stack
424
/// maps. Stack maps allow the garbage collector to identify the on-stack GC
425
/// roots.
426
///
427
/// This does not affect any pre-existing uses of the variable.
428
///
429
/// # Panics
430
///
431
/// Panics if the variable's type is larger than 16 bytes or if this
432
/// variable has not been declared yet.
433
pub fn declare_var_needs_stack_map(&mut self, var: Variable) {
434
log::trace!("declare_var_needs_stack_map({var:?})");
435
let ty = self.func_ctx.variables[var];
436
assert!(ty != types::INVALID);
437
assert!(ty.bytes() <= 16);
438
self.func_ctx.stack_map_vars.insert(var);
439
}
440
441
/// Returns the Cranelift IR necessary to use a previously defined user
442
/// variable, returning an error if this is not possible.
443
pub fn try_use_var(&mut self, var: Variable) -> Result<Value, UseVariableError> {
444
// Assert that we're about to add instructions to this block using the definition of the
445
// given variable. ssa.use_var is the only part of this crate which can add block parameters
446
// behind the caller's back. If we disallow calling append_block_param as soon as use_var is
447
// called, then we enforce a strict separation between user parameters and SSA parameters.
448
self.ensure_inserted_block();
449
450
let (val, side_effects) = {
451
let ty = *self
452
.func_ctx
453
.variables
454
.get(var)
455
.ok_or(UseVariableError::UsedBeforeDeclared(var))?;
456
debug_assert_ne!(
457
ty,
458
types::INVALID,
459
"variable {var:?} is used but its type has not been declared"
460
);
461
self.func_ctx
462
.ssa
463
.use_var(self.func, var, ty, self.position.unwrap())
464
};
465
self.handle_ssa_side_effects(side_effects);
466
467
Ok(val)
468
}
469
470
/// Returns the Cranelift IR value corresponding to the utilization at the current program
471
/// position of a previously defined user variable.
472
pub fn use_var(&mut self, var: Variable) -> Value {
473
self.try_use_var(var).unwrap_or_else(|_| {
474
panic!("variable {var:?} is used but its type has not been declared")
475
})
476
}
477
478
/// Registers a new definition of a user variable. This function will return
479
/// an error if the value supplied does not match the type the variable was
480
/// declared to have.
481
pub fn try_def_var(&mut self, var: Variable, val: Value) -> Result<(), DefVariableError> {
482
log::trace!("try_def_var: {var:?} = {val:?}");
483
484
let var_ty = *self
485
.func_ctx
486
.variables
487
.get(var)
488
.ok_or(DefVariableError::DefinedBeforeDeclared(var))?;
489
if var_ty != self.func.dfg.value_type(val) {
490
return Err(DefVariableError::TypeMismatch(var, val));
491
}
492
493
self.func_ctx.ssa.def_var(var, val, self.position.unwrap());
494
Ok(())
495
}
496
497
/// Register a new definition of a user variable. The type of the value must be
498
/// the same as the type registered for the variable.
499
pub fn def_var(&mut self, var: Variable, val: Value) {
500
self.try_def_var(var, val)
501
.unwrap_or_else(|error| match error {
502
DefVariableError::TypeMismatch(var, val) => {
503
panic!("declared type of variable {var:?} doesn't match type of value {val}");
504
}
505
DefVariableError::DefinedBeforeDeclared(var) => {
506
panic!("variable {var:?} is used but its type has not been declared");
507
}
508
})
509
}
510
511
/// Set label for [`Value`]
512
///
513
/// This will not do anything unless
514
/// [`func.dfg.collect_debug_info`](DataFlowGraph::collect_debug_info) is called first.
515
pub fn set_val_label(&mut self, val: Value, label: ValueLabel) {
516
if let Some(values_labels) = self.func.stencil.dfg.values_labels.as_mut() {
517
use alloc::collections::btree_map::Entry;
518
519
let start = ValueLabelStart {
520
from: RelSourceLoc::from_base_offset(self.func.params.base_srcloc(), self.srcloc),
521
label,
522
};
523
524
match values_labels.entry(val) {
525
Entry::Occupied(mut e) => match e.get_mut() {
526
ValueLabelAssignments::Starts(starts) => starts.push(start),
527
_ => panic!("Unexpected ValueLabelAssignments at this stage"),
528
},
529
Entry::Vacant(e) => {
530
e.insert(ValueLabelAssignments::Starts(vec![start]));
531
}
532
}
533
}
534
}
535
536
/// Declare that the given value is a GC reference that requires inclusion
537
/// in a stack map when it is live across GC safepoints.
538
///
539
/// At the current moment, values that need inclusion in stack maps are
540
/// spilled before safepoints, but they are not reloaded afterwards. This
541
/// means that moving GCs are not yet supported, however the intention is to
542
/// add this support in the near future.
543
///
544
/// # Panics
545
///
546
/// Panics if `val` is larger than 16 bytes.
547
pub fn declare_value_needs_stack_map(&mut self, val: Value) {
548
log::trace!("declare_value_needs_stack_map({val:?})");
549
550
// We rely on these properties in `insert_safepoint_spills`.
551
let size = self.func.dfg.value_type(val).bytes();
552
assert!(size <= 16);
553
assert!(size.is_power_of_two());
554
555
self.func_ctx.stack_map_values.insert(val);
556
}
557
558
/// Creates a jump table in the function, to be used by [`br_table`](InstBuilder::br_table) instructions.
559
pub fn create_jump_table(&mut self, data: JumpTableData) -> JumpTable {
560
self.func.create_jump_table(data)
561
}
562
563
/// Creates a sized stack slot in the function, to be used by [`stack_load`](InstBuilder::stack_load),
564
/// [`stack_store`](InstBuilder::stack_store) and [`stack_addr`](InstBuilder::stack_addr) instructions.
565
pub fn create_sized_stack_slot(&mut self, data: StackSlotData) -> StackSlot {
566
self.func.create_sized_stack_slot(data)
567
}
568
569
/// Creates a dynamic stack slot in the function, to be used by
570
/// [`dynamic_stack_load`](InstBuilder::dynamic_stack_load),
571
/// [`dynamic_stack_store`](InstBuilder::dynamic_stack_store) and
572
/// [`dynamic_stack_addr`](InstBuilder::dynamic_stack_addr) instructions.
573
pub fn create_dynamic_stack_slot(&mut self, data: DynamicStackSlotData) -> DynamicStackSlot {
574
self.func.create_dynamic_stack_slot(data)
575
}
576
577
/// Adds a signature which can later be used to declare an external function import.
578
pub fn import_signature(&mut self, signature: Signature) -> SigRef {
579
self.func.import_signature(signature)
580
}
581
582
/// Declare an external function import.
583
pub fn import_function(&mut self, data: ExtFuncData) -> FuncRef {
584
self.func.import_function(data)
585
}
586
587
/// Declares a global value accessible to the function.
588
pub fn create_global_value(&mut self, data: GlobalValueData) -> GlobalValue {
589
self.func.create_global_value(data)
590
}
591
592
/// Returns an object with the [`InstBuilder`]
593
/// trait that allows to conveniently append an instruction to the current [`Block`] being built.
594
pub fn ins<'short>(&'short mut self) -> FuncInstBuilder<'short, 'a> {
595
let block = self
596
.position
597
.expect("Please call switch_to_block before inserting instructions");
598
FuncInstBuilder::new(self, block)
599
}
600
601
/// Make sure that the current block is inserted in the layout.
602
pub fn ensure_inserted_block(&mut self) {
603
let block = self.position.unwrap();
604
if self.is_pristine(block) {
605
if !self.func.layout.is_block_inserted(block) {
606
self.func.layout.append_block(block);
607
}
608
self.func_ctx.status[block] = BlockStatus::Partial;
609
} else {
610
debug_assert!(
611
!self.is_filled(block),
612
"you cannot add an instruction to a block already filled"
613
);
614
}
615
}
616
617
/// Returns a [`FuncCursor`] pointed at the current position ready for inserting instructions.
618
///
619
/// This can be used to insert SSA code that doesn't need to access locals and that doesn't
620
/// need to know about [`FunctionBuilder`] at all.
621
pub fn cursor(&mut self) -> FuncCursor<'_> {
622
self.ensure_inserted_block();
623
FuncCursor::new(self.func)
624
.with_srcloc(self.srcloc)
625
.at_bottom(self.position.unwrap())
626
}
627
628
/// Append parameters to the given [`Block`] corresponding to the function
629
/// parameters. This can be used to set up the block parameters for the
630
/// entry block.
631
pub fn append_block_params_for_function_params(&mut self, block: Block) {
632
debug_assert!(
633
!self.func_ctx.ssa.has_any_predecessors(block),
634
"block parameters for function parameters should only be added to the entry block"
635
);
636
637
// These parameters count as "user" parameters here because they aren't
638
// inserted by the SSABuilder.
639
debug_assert!(
640
self.is_pristine(block),
641
"You can't add block parameters after adding any instruction"
642
);
643
644
for argtyp in &self.func.stencil.signature.params {
645
self.func
646
.stencil
647
.dfg
648
.append_block_param(block, argtyp.value_type);
649
}
650
}
651
652
/// Append parameters to the given [`Block`] corresponding to the function
653
/// return values. This can be used to set up the block parameters for a
654
/// function exit block.
655
pub fn append_block_params_for_function_returns(&mut self, block: Block) {
656
// These parameters count as "user" parameters here because they aren't
657
// inserted by the SSABuilder.
658
debug_assert!(
659
self.is_pristine(block),
660
"You can't add block parameters after adding any instruction"
661
);
662
663
for argtyp in &self.func.stencil.signature.returns {
664
self.func
665
.stencil
666
.dfg
667
.append_block_param(block, argtyp.value_type);
668
}
669
}
670
671
/// Declare that translation of the current function is complete.
672
///
673
/// This resets the state of the [`FunctionBuilderContext`] in preparation to
674
/// be used for another function.
675
pub fn finalize(mut self) {
676
// Check that all the `Block`s are filled and sealed.
677
#[cfg(debug_assertions)]
678
{
679
for block in self.func_ctx.status.keys() {
680
if !self.is_pristine(block) {
681
assert!(
682
self.func_ctx.ssa.is_sealed(block),
683
"FunctionBuilder finalized, but block {block} is not sealed",
684
);
685
assert!(
686
self.is_filled(block),
687
"FunctionBuilder finalized, but block {block} is not filled",
688
);
689
}
690
}
691
}
692
693
// In debug mode, check that all blocks are valid basic blocks.
694
#[cfg(debug_assertions)]
695
{
696
// Iterate manually to provide more helpful error messages.
697
for block in self.func_ctx.status.keys() {
698
if let Err((inst, msg)) = self.func.is_block_basic(block) {
699
let inst_str = self.func.dfg.display_inst(inst);
700
panic!("{block} failed basic block invariants on {inst_str}: {msg}");
701
}
702
}
703
}
704
705
// Propagate the needs-stack-map bit from variables to each of their
706
// associated values.
707
for var in self.func_ctx.stack_map_vars.iter() {
708
for val in self.func_ctx.ssa.values_for_var(var) {
709
log::trace!("propagating needs-stack-map from {var:?} to {val:?}");
710
debug_assert_eq!(self.func.dfg.value_type(val), self.func_ctx.variables[var]);
711
self.func_ctx.stack_map_values.insert(val);
712
}
713
}
714
715
// If we have any values that need inclusion in stack maps, then we need
716
// to run our pass to spill those values to the stack at safepoints and
717
// generate stack maps.
718
if !self.func_ctx.stack_map_values.is_empty() {
719
self.func_ctx
720
.safepoints
721
.run(&mut self.func, &self.func_ctx.stack_map_values);
722
}
723
724
// Clear the state (but preserve the allocated buffers) in preparation
725
// for translation another function.
726
self.func_ctx.clear();
727
}
728
}
729
730
/// All the functions documented in the previous block are write-only and help you build a valid
731
/// Cranelift IR functions via multiple debug asserts. However, you might need to improve the
732
/// performance of your translation perform more complex transformations to your Cranelift IR
733
/// function. The functions below help you inspect the function you're creating and modify it
734
/// in ways that can be unsafe if used incorrectly.
735
impl<'a> FunctionBuilder<'a> {
736
/// Retrieves all the parameters for a [`Block`] currently inferred from the jump instructions
737
/// inserted that target it and the SSA construction.
738
pub fn block_params(&self, block: Block) -> &[Value] {
739
self.func.dfg.block_params(block)
740
}
741
742
/// Retrieves the signature with reference `sigref` previously added with
743
/// [`import_signature`](Self::import_signature).
744
pub fn signature(&self, sigref: SigRef) -> Option<&Signature> {
745
self.func.dfg.signatures.get(sigref)
746
}
747
748
/// Creates a parameter for a specific [`Block`] by appending it to the list of already existing
749
/// parameters.
750
///
751
/// **Note:** this function has to be called at the creation of the `Block` before adding
752
/// instructions to it, otherwise this could interfere with SSA construction.
753
pub fn append_block_param(&mut self, block: Block, ty: Type) -> Value {
754
debug_assert!(
755
self.is_pristine(block),
756
"You can't add block parameters after adding any instruction"
757
);
758
self.func.dfg.append_block_param(block, ty)
759
}
760
761
/// Returns the result values of an instruction.
762
pub fn inst_results(&self, inst: Inst) -> &[Value] {
763
self.func.dfg.inst_results(inst)
764
}
765
766
/// Changes the destination of a jump instruction after creation.
767
///
768
/// **Note:** You are responsible for maintaining the coherence with the arguments of
769
/// other jump instructions.
770
pub fn change_jump_destination(&mut self, inst: Inst, old_block: Block, new_block: Block) {
771
let dfg = &mut self.func.dfg;
772
for block in
773
dfg.insts[inst].branch_destination_mut(&mut dfg.jump_tables, &mut dfg.exception_tables)
774
{
775
if block.block(&dfg.value_lists) == old_block {
776
self.func_ctx.ssa.remove_block_predecessor(old_block, inst);
777
block.set_block(new_block, &mut dfg.value_lists);
778
self.func_ctx.ssa.declare_block_predecessor(new_block, inst);
779
}
780
}
781
}
782
783
/// Returns `true` if and only if the current [`Block`] is sealed and has no predecessors declared.
784
///
785
/// The entry block of a function is never unreachable.
786
pub fn is_unreachable(&self) -> bool {
787
let is_entry = match self.func.layout.entry_block() {
788
None => false,
789
Some(entry) => self.position.unwrap() == entry,
790
};
791
!is_entry
792
&& self.func_ctx.ssa.is_sealed(self.position.unwrap())
793
&& !self
794
.func_ctx
795
.ssa
796
.has_any_predecessors(self.position.unwrap())
797
}
798
799
/// Returns `true` if and only if no instructions have been added since the last call to
800
/// [`switch_to_block`](Self::switch_to_block).
801
fn is_pristine(&self, block: Block) -> bool {
802
self.func_ctx.status[block] == BlockStatus::Empty
803
}
804
805
/// Returns `true` if and only if a terminator instruction has been inserted since the
806
/// last call to [`switch_to_block`](Self::switch_to_block).
807
fn is_filled(&self, block: Block) -> bool {
808
self.func_ctx.status[block] == BlockStatus::Filled
809
}
810
}
811
812
/// Helper functions
813
impl<'a> FunctionBuilder<'a> {
814
/// Calls libc.memcpy
815
///
816
/// Copies the `size` bytes from `src` to `dest`, assumes that `src + size`
817
/// won't overlap onto `dest`. If `dest` and `src` overlap, the behavior is
818
/// undefined. Applications in which `dest` and `src` might overlap should
819
/// use `call_memmove` instead.
820
pub fn call_memcpy(
821
&mut self,
822
config: TargetFrontendConfig,
823
dest: Value,
824
src: Value,
825
size: Value,
826
) {
827
let pointer_type = config.pointer_type();
828
let signature = {
829
let mut s = Signature::new(config.default_call_conv);
830
s.params.push(AbiParam::new(pointer_type));
831
s.params.push(AbiParam::new(pointer_type));
832
s.params.push(AbiParam::new(pointer_type));
833
s.returns.push(AbiParam::new(pointer_type));
834
self.import_signature(s)
835
};
836
837
let libc_memcpy = self.import_function(ExtFuncData {
838
name: ExternalName::LibCall(LibCall::Memcpy),
839
signature,
840
colocated: false,
841
});
842
843
self.ins().call(libc_memcpy, &[dest, src, size]);
844
}
845
846
/// Optimised memcpy or memmove for small copies.
847
///
848
/// # Codegen safety
849
///
850
/// The following properties must hold to prevent UB:
851
///
852
/// * `src_align` and `dest_align` are an upper-bound on the alignment of `src` respectively `dest`.
853
/// * If `non_overlapping` is true, then this must be correct.
854
pub fn emit_small_memory_copy(
855
&mut self,
856
config: TargetFrontendConfig,
857
dest: Value,
858
src: Value,
859
size: u64,
860
dest_align: u8,
861
src_align: u8,
862
non_overlapping: bool,
863
mut flags: MemFlags,
864
) {
865
// Currently the result of guess work, not actual profiling.
866
const THRESHOLD: u64 = 4;
867
868
if size == 0 {
869
return;
870
}
871
872
let access_size = greatest_divisible_power_of_two(size);
873
assert!(
874
access_size.is_power_of_two(),
875
"`size` is not a power of two"
876
);
877
assert!(
878
access_size >= u64::from(::core::cmp::min(src_align, dest_align)),
879
"`size` is smaller than `dest` and `src`'s alignment value."
880
);
881
882
let (access_size, int_type) = if access_size <= 8 {
883
(access_size, Type::int((access_size * 8) as u16).unwrap())
884
} else {
885
(8, types::I64)
886
};
887
888
let load_and_store_amount = size / access_size;
889
890
if load_and_store_amount > THRESHOLD {
891
let size_value = self.ins().iconst(config.pointer_type(), size as i64);
892
if non_overlapping {
893
self.call_memcpy(config, dest, src, size_value);
894
} else {
895
self.call_memmove(config, dest, src, size_value);
896
}
897
return;
898
}
899
900
if u64::from(src_align) >= access_size && u64::from(dest_align) >= access_size {
901
flags.set_aligned();
902
}
903
904
// Load all of the memory first. This is necessary in case `dest` overlaps.
905
// It can also improve performance a bit.
906
let registers: smallvec::SmallVec<[_; THRESHOLD as usize]> = (0..load_and_store_amount)
907
.map(|i| {
908
let offset = (access_size * i) as i32;
909
(self.ins().load(int_type, flags, src, offset), offset)
910
})
911
.collect();
912
913
for (value, offset) in registers {
914
self.ins().store(flags, value, dest, offset);
915
}
916
}
917
918
/// Calls libc.memset
919
///
920
/// Writes `size` bytes of i8 value `ch` to memory starting at `buffer`.
921
pub fn call_memset(
922
&mut self,
923
config: TargetFrontendConfig,
924
buffer: Value,
925
ch: Value,
926
size: Value,
927
) {
928
let pointer_type = config.pointer_type();
929
let signature = {
930
let mut s = Signature::new(config.default_call_conv);
931
s.params.push(AbiParam::new(pointer_type));
932
s.params.push(AbiParam::new(types::I32));
933
s.params.push(AbiParam::new(pointer_type));
934
s.returns.push(AbiParam::new(pointer_type));
935
self.import_signature(s)
936
};
937
938
let libc_memset = self.import_function(ExtFuncData {
939
name: ExternalName::LibCall(LibCall::Memset),
940
signature,
941
colocated: false,
942
});
943
944
let ch = self.ins().uextend(types::I32, ch);
945
self.ins().call(libc_memset, &[buffer, ch, size]);
946
}
947
948
/// Calls libc.memset
949
///
950
/// Writes `size` bytes of value `ch` to memory starting at `buffer`.
951
pub fn emit_small_memset(
952
&mut self,
953
config: TargetFrontendConfig,
954
buffer: Value,
955
ch: u8,
956
size: u64,
957
buffer_align: u8,
958
mut flags: MemFlags,
959
) {
960
// Currently the result of guess work, not actual profiling.
961
const THRESHOLD: u64 = 4;
962
963
if size == 0 {
964
return;
965
}
966
967
let access_size = greatest_divisible_power_of_two(size);
968
assert!(
969
access_size.is_power_of_two(),
970
"`size` is not a power of two"
971
);
972
assert!(
973
access_size >= u64::from(buffer_align),
974
"`size` is smaller than `dest` and `src`'s alignment value."
975
);
976
977
let (access_size, int_type) = if access_size <= 8 {
978
(access_size, Type::int((access_size * 8) as u16).unwrap())
979
} else {
980
(8, types::I64)
981
};
982
983
let load_and_store_amount = size / access_size;
984
985
if load_and_store_amount > THRESHOLD {
986
let ch = self.ins().iconst(types::I8, i64::from(ch));
987
let size = self.ins().iconst(config.pointer_type(), size as i64);
988
self.call_memset(config, buffer, ch, size);
989
} else {
990
if u64::from(buffer_align) >= access_size {
991
flags.set_aligned();
992
}
993
994
let ch = u64::from(ch);
995
let raw_value = if int_type == types::I64 {
996
ch * 0x0101010101010101_u64
997
} else if int_type == types::I32 {
998
ch * 0x01010101_u64
999
} else if int_type == types::I16 {
1000
(ch << 8) | ch
1001
} else {
1002
assert_eq!(int_type, types::I8);
1003
ch
1004
};
1005
1006
let value = self.ins().iconst(int_type, raw_value as i64);
1007
for i in 0..load_and_store_amount {
1008
let offset = (access_size * i) as i32;
1009
self.ins().store(flags, value, buffer, offset);
1010
}
1011
}
1012
}
1013
1014
/// Calls libc.memmove
1015
///
1016
/// Copies `size` bytes from memory starting at `source` to memory starting
1017
/// at `dest`. `source` is always read before writing to `dest`.
1018
pub fn call_memmove(
1019
&mut self,
1020
config: TargetFrontendConfig,
1021
dest: Value,
1022
source: Value,
1023
size: Value,
1024
) {
1025
let pointer_type = config.pointer_type();
1026
let signature = {
1027
let mut s = Signature::new(config.default_call_conv);
1028
s.params.push(AbiParam::new(pointer_type));
1029
s.params.push(AbiParam::new(pointer_type));
1030
s.params.push(AbiParam::new(pointer_type));
1031
s.returns.push(AbiParam::new(pointer_type));
1032
self.import_signature(s)
1033
};
1034
1035
let libc_memmove = self.import_function(ExtFuncData {
1036
name: ExternalName::LibCall(LibCall::Memmove),
1037
signature,
1038
colocated: false,
1039
});
1040
1041
self.ins().call(libc_memmove, &[dest, source, size]);
1042
}
1043
1044
/// Calls libc.memcmp
1045
///
1046
/// Compares `size` bytes from memory starting at `left` to memory starting
1047
/// at `right`. Returns `0` if all `n` bytes are equal. If the first difference
1048
/// is at offset `i`, returns a positive integer if `ugt(left[i], right[i])`
1049
/// and a negative integer if `ult(left[i], right[i])`.
1050
///
1051
/// Returns a C `int`, which is currently always [`types::I32`].
1052
pub fn call_memcmp(
1053
&mut self,
1054
config: TargetFrontendConfig,
1055
left: Value,
1056
right: Value,
1057
size: Value,
1058
) -> Value {
1059
let pointer_type = config.pointer_type();
1060
let signature = {
1061
let mut s = Signature::new(config.default_call_conv);
1062
s.params.reserve(3);
1063
s.params.push(AbiParam::new(pointer_type));
1064
s.params.push(AbiParam::new(pointer_type));
1065
s.params.push(AbiParam::new(pointer_type));
1066
s.returns.push(AbiParam::new(types::I32));
1067
self.import_signature(s)
1068
};
1069
1070
let libc_memcmp = self.import_function(ExtFuncData {
1071
name: ExternalName::LibCall(LibCall::Memcmp),
1072
signature,
1073
colocated: false,
1074
});
1075
1076
let call = self.ins().call(libc_memcmp, &[left, right, size]);
1077
self.func.dfg.first_result(call)
1078
}
1079
1080
/// Optimised [`Self::call_memcmp`] for small copies.
1081
///
1082
/// This implements the byte slice comparison `int_cc(left[..size], right[..size])`.
1083
///
1084
/// `left_align` and `right_align` are the statically-known alignments of the
1085
/// `left` and `right` pointers respectively. These are used to know whether
1086
/// to mark `load`s as aligned. It's always fine to pass `1` for these, but
1087
/// passing something higher than the true alignment may trap or otherwise
1088
/// misbehave as described in [`MemFlags::aligned`].
1089
///
1090
/// Note that `memcmp` is a *big-endian* and *unsigned* comparison.
1091
/// As such, this panics when called with `IntCC::Signed*`.
1092
pub fn emit_small_memory_compare(
1093
&mut self,
1094
config: TargetFrontendConfig,
1095
int_cc: IntCC,
1096
left: Value,
1097
right: Value,
1098
size: u64,
1099
left_align: std::num::NonZeroU8,
1100
right_align: std::num::NonZeroU8,
1101
flags: MemFlags,
1102
) -> Value {
1103
use IntCC::*;
1104
let (zero_cc, empty_imm) = match int_cc {
1105
//
1106
Equal => (Equal, 1),
1107
NotEqual => (NotEqual, 0),
1108
1109
UnsignedLessThan => (SignedLessThan, 0),
1110
UnsignedGreaterThanOrEqual => (SignedGreaterThanOrEqual, 1),
1111
UnsignedGreaterThan => (SignedGreaterThan, 0),
1112
UnsignedLessThanOrEqual => (SignedLessThanOrEqual, 1),
1113
1114
SignedLessThan
1115
| SignedGreaterThanOrEqual
1116
| SignedGreaterThan
1117
| SignedLessThanOrEqual => {
1118
panic!("Signed comparison {int_cc} not supported by memcmp")
1119
}
1120
};
1121
1122
if size == 0 {
1123
return self.ins().iconst(types::I8, empty_imm);
1124
}
1125
1126
// Future work could consider expanding this to handle more-complex scenarios.
1127
if let Some(small_type) = size.try_into().ok().and_then(Type::int_with_byte_size) {
1128
if let Equal | NotEqual = zero_cc {
1129
let mut left_flags = flags;
1130
if size == left_align.get() as u64 {
1131
left_flags.set_aligned();
1132
}
1133
let mut right_flags = flags;
1134
if size == right_align.get() as u64 {
1135
right_flags.set_aligned();
1136
}
1137
let left_val = self.ins().load(small_type, left_flags, left, 0);
1138
let right_val = self.ins().load(small_type, right_flags, right, 0);
1139
return self.ins().icmp(int_cc, left_val, right_val);
1140
} else if small_type == types::I8 {
1141
// Once the big-endian loads from wasmtime#2492 are implemented in
1142
// the backends, we could easily handle comparisons for more sizes here.
1143
// But for now, just handle single bytes where we don't need to worry.
1144
1145
let mut aligned_flags = flags;
1146
aligned_flags.set_aligned();
1147
let left_val = self.ins().load(small_type, aligned_flags, left, 0);
1148
let right_val = self.ins().load(small_type, aligned_flags, right, 0);
1149
return self.ins().icmp(int_cc, left_val, right_val);
1150
}
1151
}
1152
1153
let pointer_type = config.pointer_type();
1154
let size = self.ins().iconst(pointer_type, size as i64);
1155
let cmp = self.call_memcmp(config, left, right, size);
1156
self.ins().icmp_imm(zero_cc, cmp, 0)
1157
}
1158
}
1159
1160
fn greatest_divisible_power_of_two(size: u64) -> u64 {
1161
(size as i64 & -(size as i64)) as u64
1162
}
1163
1164
// Helper functions
1165
impl<'a> FunctionBuilder<'a> {
1166
/// A Block is 'filled' when a terminator instruction is present.
1167
fn fill_current_block(&mut self) {
1168
self.func_ctx.status[self.position.unwrap()] = BlockStatus::Filled;
1169
}
1170
1171
fn declare_successor(&mut self, dest_block: Block, jump_inst: Inst) {
1172
self.func_ctx
1173
.ssa
1174
.declare_block_predecessor(dest_block, jump_inst);
1175
}
1176
1177
fn handle_ssa_side_effects(&mut self, side_effects: SideEffects) {
1178
let SideEffects {
1179
instructions_added_to_blocks,
1180
} = side_effects;
1181
1182
for modified_block in instructions_added_to_blocks {
1183
if self.is_pristine(modified_block) {
1184
self.func_ctx.status[modified_block] = BlockStatus::Partial;
1185
}
1186
}
1187
}
1188
}
1189
1190
#[cfg(test)]
1191
mod tests {
1192
use super::greatest_divisible_power_of_two;
1193
use crate::Variable;
1194
use crate::frontend::{
1195
DefVariableError, FunctionBuilder, FunctionBuilderContext, UseVariableError,
1196
};
1197
use alloc::string::ToString;
1198
use cranelift_codegen::ir::condcodes::IntCC;
1199
use cranelift_codegen::ir::{
1200
AbiParam, BlockCall, ExceptionTableData, ExtFuncData, ExternalName, Function, InstBuilder,
1201
MemFlags, Signature, UserExternalName, UserFuncName, Value, types::*,
1202
};
1203
use cranelift_codegen::isa::{CallConv, TargetFrontendConfig, TargetIsa};
1204
use cranelift_codegen::settings;
1205
use cranelift_codegen::verifier::verify_function;
1206
use target_lexicon::PointerWidth;
1207
1208
fn sample_function(lazy_seal: bool) {
1209
let mut sig = Signature::new(CallConv::SystemV);
1210
sig.returns.push(AbiParam::new(I32));
1211
sig.params.push(AbiParam::new(I32));
1212
1213
let mut fn_ctx = FunctionBuilderContext::new();
1214
let mut func = Function::with_name_signature(UserFuncName::testcase("sample"), sig);
1215
{
1216
let mut builder = FunctionBuilder::new(&mut func, &mut fn_ctx);
1217
1218
let block0 = builder.create_block();
1219
let block1 = builder.create_block();
1220
let block2 = builder.create_block();
1221
let block3 = builder.create_block();
1222
let x = builder.declare_var(I32);
1223
let y = builder.declare_var(I32);
1224
let z = builder.declare_var(I32);
1225
1226
builder.append_block_params_for_function_params(block0);
1227
1228
builder.switch_to_block(block0);
1229
if !lazy_seal {
1230
builder.seal_block(block0);
1231
}
1232
{
1233
let tmp = builder.block_params(block0)[0]; // the first function parameter
1234
builder.def_var(x, tmp);
1235
}
1236
{
1237
let tmp = builder.ins().iconst(I32, 2);
1238
builder.def_var(y, tmp);
1239
}
1240
{
1241
let arg1 = builder.use_var(x);
1242
let arg2 = builder.use_var(y);
1243
let tmp = builder.ins().iadd(arg1, arg2);
1244
builder.def_var(z, tmp);
1245
}
1246
builder.ins().jump(block1, &[]);
1247
1248
builder.switch_to_block(block1);
1249
{
1250
let arg1 = builder.use_var(y);
1251
let arg2 = builder.use_var(z);
1252
let tmp = builder.ins().iadd(arg1, arg2);
1253
builder.def_var(z, tmp);
1254
}
1255
{
1256
let arg = builder.use_var(y);
1257
builder.ins().brif(arg, block3, &[], block2, &[]);
1258
}
1259
1260
builder.switch_to_block(block2);
1261
if !lazy_seal {
1262
builder.seal_block(block2);
1263
}
1264
{
1265
let arg1 = builder.use_var(z);
1266
let arg2 = builder.use_var(x);
1267
let tmp = builder.ins().isub(arg1, arg2);
1268
builder.def_var(z, tmp);
1269
}
1270
{
1271
let arg = builder.use_var(y);
1272
builder.ins().return_(&[arg]);
1273
}
1274
1275
builder.switch_to_block(block3);
1276
if !lazy_seal {
1277
builder.seal_block(block3);
1278
}
1279
1280
{
1281
let arg1 = builder.use_var(y);
1282
let arg2 = builder.use_var(x);
1283
let tmp = builder.ins().isub(arg1, arg2);
1284
builder.def_var(y, tmp);
1285
}
1286
builder.ins().jump(block1, &[]);
1287
if !lazy_seal {
1288
builder.seal_block(block1);
1289
}
1290
1291
if lazy_seal {
1292
builder.seal_all_blocks();
1293
}
1294
1295
builder.finalize();
1296
}
1297
1298
let flags = settings::Flags::new(settings::builder());
1299
// println!("{}", func.display(None));
1300
if let Err(errors) = verify_function(&func, &flags) {
1301
panic!("{}\n{}", func.display(), errors)
1302
}
1303
}
1304
1305
#[test]
1306
fn sample() {
1307
sample_function(false)
1308
}
1309
1310
#[test]
1311
fn sample_with_lazy_seal() {
1312
sample_function(true)
1313
}
1314
1315
#[track_caller]
1316
fn check(func: &Function, expected_ir: &str) {
1317
let expected_ir = expected_ir.trim();
1318
let actual_ir = func.display().to_string();
1319
let actual_ir = actual_ir.trim();
1320
assert!(
1321
expected_ir == actual_ir,
1322
"Expected:\n{expected_ir}\nGot:\n{actual_ir}"
1323
);
1324
}
1325
1326
/// Helper function to construct a fixed frontend configuration.
1327
fn systemv_frontend_config() -> TargetFrontendConfig {
1328
TargetFrontendConfig {
1329
default_call_conv: CallConv::SystemV,
1330
pointer_width: PointerWidth::U64,
1331
page_size_align_log2: 12,
1332
}
1333
}
1334
1335
#[test]
1336
fn memcpy() {
1337
let frontend_config = systemv_frontend_config();
1338
let mut sig = Signature::new(frontend_config.default_call_conv);
1339
sig.returns.push(AbiParam::new(I32));
1340
1341
let mut fn_ctx = FunctionBuilderContext::new();
1342
let mut func = Function::with_name_signature(UserFuncName::testcase("sample"), sig);
1343
{
1344
let mut builder = FunctionBuilder::new(&mut func, &mut fn_ctx);
1345
1346
let block0 = builder.create_block();
1347
let x = builder.declare_var(frontend_config.pointer_type());
1348
let y = builder.declare_var(frontend_config.pointer_type());
1349
let _z = builder.declare_var(I32);
1350
1351
builder.append_block_params_for_function_params(block0);
1352
builder.switch_to_block(block0);
1353
1354
let src = builder.use_var(x);
1355
let dest = builder.use_var(y);
1356
let size = builder.use_var(y);
1357
builder.call_memcpy(frontend_config, dest, src, size);
1358
builder.ins().return_(&[size]);
1359
1360
builder.seal_all_blocks();
1361
builder.finalize();
1362
}
1363
1364
check(
1365
&func,
1366
"function %sample() -> i32 system_v {
1367
sig0 = (i64, i64, i64) -> i64 system_v
1368
fn0 = %Memcpy sig0
1369
1370
block0:
1371
v4 = iconst.i64 0
1372
v1 -> v4
1373
v3 = iconst.i64 0
1374
v0 -> v3
1375
v2 = call fn0(v1, v0, v1) ; v1 = 0, v0 = 0, v1 = 0
1376
return v1 ; v1 = 0
1377
}
1378
",
1379
);
1380
}
1381
1382
#[test]
1383
fn small_memcpy() {
1384
let frontend_config = systemv_frontend_config();
1385
let mut sig = Signature::new(frontend_config.default_call_conv);
1386
sig.returns.push(AbiParam::new(I32));
1387
1388
let mut fn_ctx = FunctionBuilderContext::new();
1389
let mut func = Function::with_name_signature(UserFuncName::testcase("sample"), sig);
1390
{
1391
let mut builder = FunctionBuilder::new(&mut func, &mut fn_ctx);
1392
1393
let block0 = builder.create_block();
1394
let x = builder.declare_var(frontend_config.pointer_type());
1395
let y = builder.declare_var(frontend_config.pointer_type());
1396
1397
builder.append_block_params_for_function_params(block0);
1398
builder.switch_to_block(block0);
1399
1400
let src = builder.use_var(x);
1401
let dest = builder.use_var(y);
1402
let size = 8;
1403
builder.emit_small_memory_copy(
1404
frontend_config,
1405
dest,
1406
src,
1407
size,
1408
8,
1409
8,
1410
true,
1411
MemFlags::new(),
1412
);
1413
builder.ins().return_(&[dest]);
1414
1415
builder.seal_all_blocks();
1416
builder.finalize();
1417
}
1418
1419
check(
1420
&func,
1421
"function %sample() -> i32 system_v {
1422
block0:
1423
v4 = iconst.i64 0
1424
v1 -> v4
1425
v3 = iconst.i64 0
1426
v0 -> v3
1427
v2 = load.i64 aligned v0 ; v0 = 0
1428
store aligned v2, v1 ; v1 = 0
1429
return v1 ; v1 = 0
1430
}
1431
",
1432
);
1433
}
1434
1435
#[test]
1436
fn not_so_small_memcpy() {
1437
let frontend_config = systemv_frontend_config();
1438
let mut sig = Signature::new(frontend_config.default_call_conv);
1439
sig.returns.push(AbiParam::new(I32));
1440
1441
let mut fn_ctx = FunctionBuilderContext::new();
1442
let mut func = Function::with_name_signature(UserFuncName::testcase("sample"), sig);
1443
{
1444
let mut builder = FunctionBuilder::new(&mut func, &mut fn_ctx);
1445
1446
let block0 = builder.create_block();
1447
let x = builder.declare_var(frontend_config.pointer_type());
1448
let y = builder.declare_var(frontend_config.pointer_type());
1449
builder.append_block_params_for_function_params(block0);
1450
builder.switch_to_block(block0);
1451
1452
let src = builder.use_var(x);
1453
let dest = builder.use_var(y);
1454
let size = 8192;
1455
builder.emit_small_memory_copy(
1456
frontend_config,
1457
dest,
1458
src,
1459
size,
1460
8,
1461
8,
1462
true,
1463
MemFlags::new(),
1464
);
1465
builder.ins().return_(&[dest]);
1466
1467
builder.seal_all_blocks();
1468
builder.finalize();
1469
}
1470
1471
check(
1472
&func,
1473
"function %sample() -> i32 system_v {
1474
sig0 = (i64, i64, i64) -> i64 system_v
1475
fn0 = %Memcpy sig0
1476
1477
block0:
1478
v5 = iconst.i64 0
1479
v1 -> v5
1480
v4 = iconst.i64 0
1481
v0 -> v4
1482
v2 = iconst.i64 8192
1483
v3 = call fn0(v1, v0, v2) ; v1 = 0, v0 = 0, v2 = 8192
1484
return v1 ; v1 = 0
1485
}
1486
",
1487
);
1488
}
1489
1490
#[test]
1491
fn small_memset() {
1492
let frontend_config = systemv_frontend_config();
1493
let mut sig = Signature::new(frontend_config.default_call_conv);
1494
sig.returns.push(AbiParam::new(I32));
1495
1496
let mut fn_ctx = FunctionBuilderContext::new();
1497
let mut func = Function::with_name_signature(UserFuncName::testcase("sample"), sig);
1498
{
1499
let mut builder = FunctionBuilder::new(&mut func, &mut fn_ctx);
1500
1501
let block0 = builder.create_block();
1502
let y = builder.declare_var(frontend_config.pointer_type());
1503
builder.append_block_params_for_function_params(block0);
1504
builder.switch_to_block(block0);
1505
1506
let dest = builder.use_var(y);
1507
let size = 8;
1508
builder.emit_small_memset(frontend_config, dest, 1, size, 8, MemFlags::new());
1509
builder.ins().return_(&[dest]);
1510
1511
builder.seal_all_blocks();
1512
builder.finalize();
1513
}
1514
1515
check(
1516
&func,
1517
"function %sample() -> i32 system_v {
1518
block0:
1519
v2 = iconst.i64 0
1520
v0 -> v2
1521
v1 = iconst.i64 0x0101_0101_0101_0101
1522
store aligned v1, v0 ; v1 = 0x0101_0101_0101_0101, v0 = 0
1523
return v0 ; v0 = 0
1524
}
1525
",
1526
);
1527
}
1528
1529
#[test]
1530
fn not_so_small_memset() {
1531
let frontend_config = systemv_frontend_config();
1532
let mut sig = Signature::new(frontend_config.default_call_conv);
1533
sig.returns.push(AbiParam::new(I32));
1534
1535
let mut fn_ctx = FunctionBuilderContext::new();
1536
let mut func = Function::with_name_signature(UserFuncName::testcase("sample"), sig);
1537
{
1538
let mut builder = FunctionBuilder::new(&mut func, &mut fn_ctx);
1539
1540
let block0 = builder.create_block();
1541
let y = builder.declare_var(frontend_config.pointer_type());
1542
builder.append_block_params_for_function_params(block0);
1543
builder.switch_to_block(block0);
1544
1545
let dest = builder.use_var(y);
1546
let size = 8192;
1547
builder.emit_small_memset(frontend_config, dest, 1, size, 8, MemFlags::new());
1548
builder.ins().return_(&[dest]);
1549
1550
builder.seal_all_blocks();
1551
builder.finalize();
1552
}
1553
1554
check(
1555
&func,
1556
"function %sample() -> i32 system_v {
1557
sig0 = (i64, i32, i64) -> i64 system_v
1558
fn0 = %Memset sig0
1559
1560
block0:
1561
v5 = iconst.i64 0
1562
v0 -> v5
1563
v1 = iconst.i8 1
1564
v2 = iconst.i64 8192
1565
v3 = uextend.i32 v1 ; v1 = 1
1566
v4 = call fn0(v0, v3, v2) ; v0 = 0, v2 = 8192
1567
return v0 ; v0 = 0
1568
}
1569
",
1570
);
1571
}
1572
1573
#[test]
1574
fn memcmp() {
1575
use core::str::FromStr;
1576
use cranelift_codegen::isa;
1577
1578
let shared_builder = settings::builder();
1579
let shared_flags = settings::Flags::new(shared_builder);
1580
1581
let triple =
1582
::target_lexicon::Triple::from_str("x86_64").expect("Couldn't create x86_64 triple");
1583
1584
let target = isa::lookup(triple)
1585
.ok()
1586
.map(|b| b.finish(shared_flags))
1587
.expect("This test requires x86_64 support.")
1588
.expect("Should be able to create backend with default flags");
1589
1590
let mut sig = Signature::new(target.default_call_conv());
1591
sig.returns.push(AbiParam::new(I32));
1592
1593
let mut fn_ctx = FunctionBuilderContext::new();
1594
let mut func = Function::with_name_signature(UserFuncName::testcase("sample"), sig);
1595
{
1596
let mut builder = FunctionBuilder::new(&mut func, &mut fn_ctx);
1597
1598
let block0 = builder.create_block();
1599
let x = builder.declare_var(target.pointer_type());
1600
let y = builder.declare_var(target.pointer_type());
1601
let z = builder.declare_var(target.pointer_type());
1602
builder.append_block_params_for_function_params(block0);
1603
builder.switch_to_block(block0);
1604
1605
let left = builder.use_var(x);
1606
let right = builder.use_var(y);
1607
let size = builder.use_var(z);
1608
let cmp = builder.call_memcmp(target.frontend_config(), left, right, size);
1609
builder.ins().return_(&[cmp]);
1610
1611
builder.seal_all_blocks();
1612
builder.finalize();
1613
}
1614
1615
check(
1616
&func,
1617
"function %sample() -> i32 system_v {
1618
sig0 = (i64, i64, i64) -> i32 system_v
1619
fn0 = %Memcmp sig0
1620
1621
block0:
1622
v6 = iconst.i64 0
1623
v2 -> v6
1624
v5 = iconst.i64 0
1625
v1 -> v5
1626
v4 = iconst.i64 0
1627
v0 -> v4
1628
v3 = call fn0(v0, v1, v2) ; v0 = 0, v1 = 0, v2 = 0
1629
return v3
1630
}
1631
",
1632
);
1633
}
1634
1635
#[test]
1636
fn small_memcmp_zero_size() {
1637
let align_eight = std::num::NonZeroU8::new(8).unwrap();
1638
small_memcmp_helper(
1639
"
1640
block0:
1641
v4 = iconst.i64 0
1642
v1 -> v4
1643
v3 = iconst.i64 0
1644
v0 -> v3
1645
v2 = iconst.i8 1
1646
return v2 ; v2 = 1",
1647
|builder, target, x, y| {
1648
builder.emit_small_memory_compare(
1649
target.frontend_config(),
1650
IntCC::UnsignedGreaterThanOrEqual,
1651
x,
1652
y,
1653
0,
1654
align_eight,
1655
align_eight,
1656
MemFlags::new(),
1657
)
1658
},
1659
);
1660
}
1661
1662
#[test]
1663
fn small_memcmp_byte_ugt() {
1664
let align_one = std::num::NonZeroU8::new(1).unwrap();
1665
small_memcmp_helper(
1666
"
1667
block0:
1668
v6 = iconst.i64 0
1669
v1 -> v6
1670
v5 = iconst.i64 0
1671
v0 -> v5
1672
v2 = load.i8 aligned v0 ; v0 = 0
1673
v3 = load.i8 aligned v1 ; v1 = 0
1674
v4 = icmp ugt v2, v3
1675
return v4",
1676
|builder, target, x, y| {
1677
builder.emit_small_memory_compare(
1678
target.frontend_config(),
1679
IntCC::UnsignedGreaterThan,
1680
x,
1681
y,
1682
1,
1683
align_one,
1684
align_one,
1685
MemFlags::new(),
1686
)
1687
},
1688
);
1689
}
1690
1691
#[test]
1692
fn small_memcmp_aligned_eq() {
1693
let align_four = std::num::NonZeroU8::new(4).unwrap();
1694
small_memcmp_helper(
1695
"
1696
block0:
1697
v6 = iconst.i64 0
1698
v1 -> v6
1699
v5 = iconst.i64 0
1700
v0 -> v5
1701
v2 = load.i32 aligned v0 ; v0 = 0
1702
v3 = load.i32 aligned v1 ; v1 = 0
1703
v4 = icmp eq v2, v3
1704
return v4",
1705
|builder, target, x, y| {
1706
builder.emit_small_memory_compare(
1707
target.frontend_config(),
1708
IntCC::Equal,
1709
x,
1710
y,
1711
4,
1712
align_four,
1713
align_four,
1714
MemFlags::new(),
1715
)
1716
},
1717
);
1718
}
1719
1720
#[test]
1721
fn small_memcmp_ipv6_ne() {
1722
let align_two = std::num::NonZeroU8::new(2).unwrap();
1723
small_memcmp_helper(
1724
"
1725
block0:
1726
v6 = iconst.i64 0
1727
v1 -> v6
1728
v5 = iconst.i64 0
1729
v0 -> v5
1730
v2 = load.i128 v0 ; v0 = 0
1731
v3 = load.i128 v1 ; v1 = 0
1732
v4 = icmp ne v2, v3
1733
return v4",
1734
|builder, target, x, y| {
1735
builder.emit_small_memory_compare(
1736
target.frontend_config(),
1737
IntCC::NotEqual,
1738
x,
1739
y,
1740
16,
1741
align_two,
1742
align_two,
1743
MemFlags::new(),
1744
)
1745
},
1746
);
1747
}
1748
1749
#[test]
1750
fn small_memcmp_odd_size_uge() {
1751
let one = std::num::NonZeroU8::new(1).unwrap();
1752
small_memcmp_helper(
1753
"
1754
sig0 = (i64, i64, i64) -> i32 system_v
1755
fn0 = %Memcmp sig0
1756
1757
block0:
1758
v6 = iconst.i64 0
1759
v1 -> v6
1760
v5 = iconst.i64 0
1761
v0 -> v5
1762
v2 = iconst.i64 3
1763
v3 = call fn0(v0, v1, v2) ; v0 = 0, v1 = 0, v2 = 3
1764
v4 = icmp_imm sge v3, 0
1765
return v4",
1766
|builder, target, x, y| {
1767
builder.emit_small_memory_compare(
1768
target.frontend_config(),
1769
IntCC::UnsignedGreaterThanOrEqual,
1770
x,
1771
y,
1772
3,
1773
one,
1774
one,
1775
MemFlags::new(),
1776
)
1777
},
1778
);
1779
}
1780
1781
fn small_memcmp_helper(
1782
expected: &str,
1783
f: impl FnOnce(&mut FunctionBuilder, &dyn TargetIsa, Value, Value) -> Value,
1784
) {
1785
use core::str::FromStr;
1786
use cranelift_codegen::isa;
1787
1788
let shared_builder = settings::builder();
1789
let shared_flags = settings::Flags::new(shared_builder);
1790
1791
let triple =
1792
::target_lexicon::Triple::from_str("x86_64").expect("Couldn't create x86_64 triple");
1793
1794
let target = isa::lookup(triple)
1795
.ok()
1796
.map(|b| b.finish(shared_flags))
1797
.expect("This test requires x86_64 support.")
1798
.expect("Should be able to create backend with default flags");
1799
1800
let mut sig = Signature::new(target.default_call_conv());
1801
sig.returns.push(AbiParam::new(I8));
1802
1803
let mut fn_ctx = FunctionBuilderContext::new();
1804
let mut func = Function::with_name_signature(UserFuncName::testcase("sample"), sig);
1805
{
1806
let mut builder = FunctionBuilder::new(&mut func, &mut fn_ctx);
1807
1808
let block0 = builder.create_block();
1809
let x = builder.declare_var(target.pointer_type());
1810
let y = builder.declare_var(target.pointer_type());
1811
builder.append_block_params_for_function_params(block0);
1812
builder.switch_to_block(block0);
1813
1814
let left = builder.use_var(x);
1815
let right = builder.use_var(y);
1816
let ret = f(&mut builder, &*target, left, right);
1817
builder.ins().return_(&[ret]);
1818
1819
builder.seal_all_blocks();
1820
builder.finalize();
1821
}
1822
1823
check(
1824
&func,
1825
&format!("function %sample() -> i8 system_v {{{expected}\n}}\n"),
1826
);
1827
}
1828
1829
#[test]
1830
fn undef_vector_vars() {
1831
let mut sig = Signature::new(CallConv::SystemV);
1832
sig.returns.push(AbiParam::new(I8X16));
1833
sig.returns.push(AbiParam::new(I8X16));
1834
sig.returns.push(AbiParam::new(F32X4));
1835
1836
let mut fn_ctx = FunctionBuilderContext::new();
1837
let mut func = Function::with_name_signature(UserFuncName::testcase("sample"), sig);
1838
{
1839
let mut builder = FunctionBuilder::new(&mut func, &mut fn_ctx);
1840
1841
let block0 = builder.create_block();
1842
let a = builder.declare_var(I8X16);
1843
let b = builder.declare_var(I8X16);
1844
let c = builder.declare_var(F32X4);
1845
builder.switch_to_block(block0);
1846
1847
let a = builder.use_var(a);
1848
let b = builder.use_var(b);
1849
let c = builder.use_var(c);
1850
builder.ins().return_(&[a, b, c]);
1851
1852
builder.seal_all_blocks();
1853
builder.finalize();
1854
}
1855
1856
check(
1857
&func,
1858
"function %sample() -> i8x16, i8x16, f32x4 system_v {
1859
const0 = 0x00000000000000000000000000000000
1860
1861
block0:
1862
v5 = f32const 0.0
1863
v6 = splat.f32x4 v5 ; v5 = 0.0
1864
v2 -> v6
1865
v4 = vconst.i8x16 const0
1866
v1 -> v4
1867
v3 = vconst.i8x16 const0
1868
v0 -> v3
1869
return v0, v1, v2 ; v0 = const0, v1 = const0
1870
}
1871
",
1872
);
1873
}
1874
1875
#[test]
1876
fn test_greatest_divisible_power_of_two() {
1877
assert_eq!(64, greatest_divisible_power_of_two(64));
1878
assert_eq!(16, greatest_divisible_power_of_two(48));
1879
assert_eq!(8, greatest_divisible_power_of_two(24));
1880
assert_eq!(1, greatest_divisible_power_of_two(25));
1881
}
1882
1883
#[test]
1884
fn try_use_var() {
1885
let sig = Signature::new(CallConv::SystemV);
1886
1887
let mut fn_ctx = FunctionBuilderContext::new();
1888
let mut func = Function::with_name_signature(UserFuncName::testcase("sample"), sig);
1889
{
1890
let mut builder = FunctionBuilder::new(&mut func, &mut fn_ctx);
1891
1892
let block0 = builder.create_block();
1893
builder.append_block_params_for_function_params(block0);
1894
builder.switch_to_block(block0);
1895
1896
assert_eq!(
1897
builder.try_use_var(Variable::from_u32(0)),
1898
Err(UseVariableError::UsedBeforeDeclared(Variable::from_u32(0)))
1899
);
1900
1901
let value = builder.ins().iconst(cranelift_codegen::ir::types::I32, 0);
1902
1903
assert_eq!(
1904
builder.try_def_var(Variable::from_u32(0), value),
1905
Err(DefVariableError::DefinedBeforeDeclared(Variable::from_u32(
1906
0
1907
)))
1908
);
1909
}
1910
}
1911
1912
#[test]
1913
fn test_builder_with_iconst_and_negative_constant() {
1914
let sig = Signature::new(CallConv::SystemV);
1915
let mut fn_ctx = FunctionBuilderContext::new();
1916
let mut func = Function::with_name_signature(UserFuncName::testcase("sample"), sig);
1917
1918
let mut builder = FunctionBuilder::new(&mut func, &mut fn_ctx);
1919
1920
let block0 = builder.create_block();
1921
builder.switch_to_block(block0);
1922
builder.ins().iconst(I32, -1);
1923
builder.ins().return_(&[]);
1924
1925
builder.seal_all_blocks();
1926
builder.finalize();
1927
1928
let flags = cranelift_codegen::settings::Flags::new(cranelift_codegen::settings::builder());
1929
let ctx = cranelift_codegen::Context::for_function(func);
1930
ctx.verify(&flags).expect("should be valid");
1931
1932
check(
1933
&ctx.func,
1934
"function %sample() system_v {
1935
block0:
1936
v0 = iconst.i32 -1
1937
return
1938
}",
1939
);
1940
}
1941
1942
#[test]
1943
fn try_call() {
1944
let mut sig = Signature::new(CallConv::SystemV);
1945
sig.params.push(AbiParam::new(I8));
1946
sig.returns.push(AbiParam::new(I32));
1947
let mut fn_ctx = FunctionBuilderContext::new();
1948
let mut func = Function::with_name_signature(UserFuncName::testcase("sample"), sig);
1949
1950
let sig0 = func.import_signature(Signature::new(CallConv::SystemV));
1951
let name = func.declare_imported_user_function(UserExternalName::new(0, 0));
1952
let fn0 = func.import_function(ExtFuncData {
1953
name: ExternalName::User(name),
1954
signature: sig0,
1955
colocated: false,
1956
});
1957
1958
let mut builder = FunctionBuilder::new(&mut func, &mut fn_ctx);
1959
1960
let block0 = builder.create_block();
1961
let block1 = builder.create_block();
1962
let block2 = builder.create_block();
1963
let block3 = builder.create_block();
1964
1965
let my_var = builder.declare_var(I32);
1966
1967
builder.switch_to_block(block0);
1968
let branch_val = builder.append_block_param(block0, I8);
1969
builder.ins().brif(branch_val, block1, &[], block2, &[]);
1970
1971
builder.switch_to_block(block1);
1972
let one = builder.ins().iconst(I32, 1);
1973
builder.def_var(my_var, one);
1974
1975
let normal_return = BlockCall::new(block3, [], &mut builder.func.dfg.value_lists);
1976
let exception_table = builder
1977
.func
1978
.dfg
1979
.exception_tables
1980
.push(ExceptionTableData::new(sig0, normal_return, []));
1981
builder.ins().try_call(fn0, &[], exception_table);
1982
1983
builder.switch_to_block(block2);
1984
let two = builder.ins().iconst(I32, 2);
1985
builder.def_var(my_var, two);
1986
1987
let normal_return = BlockCall::new(block3, [], &mut builder.func.dfg.value_lists);
1988
let exception_table = builder
1989
.func
1990
.dfg
1991
.exception_tables
1992
.push(ExceptionTableData::new(sig0, normal_return, []));
1993
builder.ins().try_call(fn0, &[], exception_table);
1994
1995
builder.switch_to_block(block3);
1996
let ret_val = builder.use_var(my_var);
1997
builder.ins().return_(&[ret_val]);
1998
1999
builder.seal_all_blocks();
2000
builder.finalize();
2001
2002
let flags = cranelift_codegen::settings::Flags::new(cranelift_codegen::settings::builder());
2003
let ctx = cranelift_codegen::Context::for_function(func);
2004
ctx.verify(&flags).expect("should be valid");
2005
2006
check(
2007
&ctx.func,
2008
"function %sample(i8) -> i32 system_v {
2009
sig0 = () system_v
2010
fn0 = u0:0 sig0
2011
2012
block0(v0: i8):
2013
brif v0, block1, block2
2014
2015
block1:
2016
v1 = iconst.i32 1
2017
try_call fn0(), sig0, block3(v1), [] ; v1 = 1
2018
2019
block2:
2020
v2 = iconst.i32 2
2021
try_call fn0(), sig0, block3(v2), [] ; v2 = 2
2022
2023
block3(v3: i32):
2024
return v3
2025
}",
2026
);
2027
}
2028
}
2029
2030