Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bytecodealliance
GitHub Repository: bytecodealliance/wasmtime
Path: blob/main/crates/environ/src/builtin.rs
1691 views
1
/// Helper macro to iterate over all builtin functions and their signatures.
2
#[macro_export]
3
macro_rules! foreach_builtin_function {
4
($mac:ident) => {
5
$mac! {
6
// Returns an index for wasm's `memory.grow` builtin function.
7
memory_grow(vmctx: vmctx, delta: u64, index: u32) -> pointer;
8
// Returns an index for wasm's `table.copy` when both tables are locally
9
// defined.
10
table_copy(vmctx: vmctx, dst_index: u32, src_index: u32, dst: u64, src: u64, len: u64) -> bool;
11
// Returns an index for wasm's `table.init`.
12
table_init(vmctx: vmctx, table: u32, elem: u32, dst: u64, src: u64, len: u64) -> bool;
13
// Returns an index for wasm's `elem.drop`.
14
elem_drop(vmctx: vmctx, elem: u32);
15
// Returns an index for wasm's `memory.copy`
16
memory_copy(vmctx: vmctx, dst_index: u32, dst: u64, src_index: u32, src: u64, len: u64) -> bool;
17
// Returns an index for wasm's `memory.fill` instruction.
18
memory_fill(vmctx: vmctx, memory: u32, dst: u64, val: u32, len: u64) -> bool;
19
// Returns an index for wasm's `memory.init` instruction.
20
memory_init(vmctx: vmctx, memory: u32, data: u32, dst: u64, src: u32, len: u32) -> bool;
21
// Returns a value for wasm's `ref.func` instruction.
22
ref_func(vmctx: vmctx, func: u32) -> pointer;
23
// Returns an index for wasm's `data.drop` instruction.
24
data_drop(vmctx: vmctx, data: u32);
25
// Returns a table entry after lazily initializing it.
26
table_get_lazy_init_func_ref(vmctx: vmctx, table: u32, index: u64) -> pointer;
27
// Returns an index for Wasm's `table.grow` instruction for `funcref`s.
28
table_grow_func_ref(vmctx: vmctx, table: u32, delta: u64, init: pointer) -> pointer;
29
// Returns an index for Wasm's `table.fill` instruction for `funcref`s.
30
table_fill_func_ref(vmctx: vmctx, table: u32, dst: u64, val: pointer, len: u64) -> bool;
31
// Returns an index for wasm's `memory.atomic.notify` instruction.
32
#[cfg(feature = "threads")]
33
memory_atomic_notify(vmctx: vmctx, memory: u32, addr: u64, count: u32) -> u64;
34
// Returns an index for wasm's `memory.atomic.wait32` instruction.
35
#[cfg(feature = "threads")]
36
memory_atomic_wait32(vmctx: vmctx, memory: u32, addr: u64, expected: u32, timeout: u64) -> u64;
37
// Returns an index for wasm's `memory.atomic.wait64` instruction.
38
#[cfg(feature = "threads")]
39
memory_atomic_wait64(vmctx: vmctx, memory: u32, addr: u64, expected: u64, timeout: u64) -> u64;
40
// Invoked when fuel has run out while executing a function.
41
out_of_gas(vmctx: vmctx) -> bool;
42
// Invoked when we reach a new epoch.
43
#[cfg(target_has_atomic = "64")]
44
new_epoch(vmctx: vmctx) -> u64;
45
// Invoked before malloc returns.
46
#[cfg(feature = "wmemcheck")]
47
check_malloc(vmctx: vmctx, addr: u32, len: u32) -> bool;
48
// Invoked before the free returns.
49
#[cfg(feature = "wmemcheck")]
50
check_free(vmctx: vmctx, addr: u32) -> bool;
51
// Invoked before a load is executed.
52
#[cfg(feature = "wmemcheck")]
53
check_load(vmctx: vmctx, num_bytes: u32, addr: u32, offset: u32) -> bool;
54
// Invoked before a store is executed.
55
#[cfg(feature = "wmemcheck")]
56
check_store(vmctx: vmctx, num_bytes: u32, addr: u32, offset: u32) -> bool;
57
// Invoked after malloc is called.
58
#[cfg(feature = "wmemcheck")]
59
malloc_start(vmctx: vmctx);
60
// Invoked after free is called.
61
#[cfg(feature = "wmemcheck")]
62
free_start(vmctx: vmctx);
63
// Invoked when wasm stack pointer is updated.
64
#[cfg(feature = "wmemcheck")]
65
update_stack_pointer(vmctx: vmctx, value: u32);
66
// Invoked before memory.grow is called.
67
#[cfg(feature = "wmemcheck")]
68
update_mem_size(vmctx: vmctx, num_bytes: u32);
69
70
// Drop a non-stack GC reference (eg an overwritten table entry)
71
// once it will no longer be used again. (Note: `val` is not of type
72
// `reference` because it needn't appear in any stack maps, as it
73
// must not be live after this call.)
74
#[cfg(feature = "gc-drc")]
75
drop_gc_ref(vmctx: vmctx, val: u32);
76
77
// Grow the GC heap by `bytes_needed` bytes.
78
//
79
// Traps if growing the GC heap fails.
80
#[cfg(feature = "gc-null")]
81
grow_gc_heap(vmctx: vmctx, bytes_needed: u64) -> bool;
82
83
// Allocate a new, uninitialized GC object and return a reference to
84
// it.
85
#[cfg(feature = "gc-drc")]
86
gc_alloc_raw(
87
vmctx: vmctx,
88
kind: u32,
89
module_interned_type_index: u32,
90
size: u32,
91
align: u32
92
) -> u32;
93
94
// Intern a `funcref` into the GC heap, returning its
95
// `FuncRefTableId`.
96
//
97
// This libcall may not GC.
98
#[cfg(feature = "gc")]
99
intern_func_ref_for_gc_heap(
100
vmctx: vmctx,
101
func_ref: pointer
102
) -> u64;
103
104
// Get the raw `VMFuncRef` pointer associated with a
105
// `FuncRefTableId` from an earlier `intern_func_ref_for_gc_heap`
106
// call.
107
//
108
// This libcall may not GC.
109
//
110
// Passes in the `ModuleInternedTypeIndex` of the funcref's expected
111
// type, or `ModuleInternedTypeIndex::reserved_value()` if we are
112
// getting the function reference as an untyped `funcref` rather
113
// than a typed `(ref $ty)`.
114
//
115
// TODO: We will want to eventually expose the table directly to
116
// Wasm code, so that it doesn't need to make a libcall to go from
117
// id to `VMFuncRef`. That will be a little tricky: it will also
118
// require updating the pointer to the slab in the `VMContext` (or
119
// `VMStoreContext` or wherever we put it) when the slab is
120
// resized.
121
#[cfg(feature = "gc")]
122
get_interned_func_ref(
123
vmctx: vmctx,
124
func_ref_id: u32,
125
module_interned_type_index: u32
126
) -> pointer;
127
128
// Builtin implementation of the `array.new_data` instruction.
129
#[cfg(feature = "gc")]
130
array_new_data(
131
vmctx: vmctx,
132
array_interned_type_index: u32,
133
data_index: u32,
134
data_offset: u32,
135
len: u32
136
) -> u32;
137
138
// Builtin implementation of the `array.new_elem` instruction.
139
#[cfg(feature = "gc")]
140
array_new_elem(
141
vmctx: vmctx,
142
array_interned_type_index: u32,
143
elem_index: u32,
144
elem_offset: u32,
145
len: u32
146
) -> u32;
147
148
// Builtin implementation of the `array.copy` instruction.
149
#[cfg(feature = "gc")]
150
array_copy(
151
vmctx: vmctx,
152
dst_array: u32,
153
dst_index: u32,
154
src_array: u32,
155
src_index: u32,
156
len: u32
157
) -> bool;
158
159
// Builtin implementation of the `array.init_data` instruction.
160
#[cfg(feature = "gc")]
161
array_init_data(
162
vmctx: vmctx,
163
array_interned_type_index: u32,
164
array: u32,
165
dst_index: u32,
166
data_index: u32,
167
data_offset: u32,
168
len: u32
169
) -> bool;
170
171
// Builtin implementation of the `array.init_elem` instruction.
172
#[cfg(feature = "gc")]
173
array_init_elem(
174
vmctx: vmctx,
175
array_interned_type_index: u32,
176
array: u32,
177
dst: u32,
178
elem_index: u32,
179
src: u32,
180
len: u32
181
) -> bool;
182
183
// Returns whether `actual_engine_type` is a subtype of
184
// `expected_engine_type`.
185
#[cfg(feature = "gc")]
186
is_subtype(
187
vmctx: vmctx,
188
actual_engine_type: u32,
189
expected_engine_type: u32
190
) -> u32;
191
192
// Returns an index for Wasm's `table.grow` instruction for GC references.
193
#[cfg(feature = "gc")]
194
table_grow_gc_ref(vmctx: vmctx, table: u32, delta: u64, init: u32) -> pointer;
195
196
// Returns an index for Wasm's `table.fill` instruction for GC references.
197
#[cfg(feature = "gc")]
198
table_fill_gc_ref(vmctx: vmctx, table: u32, dst: u64, val: u32, len: u64) -> bool;
199
200
// Wasm floating-point routines for when the CPU instructions aren't available.
201
ceil_f32(vmctx: vmctx, x: f32) -> f32;
202
ceil_f64(vmctx: vmctx, x: f64) -> f64;
203
floor_f32(vmctx: vmctx, x: f32) -> f32;
204
floor_f64(vmctx: vmctx, x: f64) -> f64;
205
trunc_f32(vmctx: vmctx, x: f32) -> f32;
206
trunc_f64(vmctx: vmctx, x: f64) -> f64;
207
nearest_f32(vmctx: vmctx, x: f32) -> f32;
208
nearest_f64(vmctx: vmctx, x: f64) -> f64;
209
i8x16_swizzle(vmctx: vmctx, a: i8x16, b: i8x16) -> i8x16;
210
i8x16_shuffle(vmctx: vmctx, a: i8x16, b: i8x16, c: i8x16) -> i8x16;
211
fma_f32x4(vmctx: vmctx, x: f32x4, y: f32x4, z: f32x4) -> f32x4;
212
fma_f64x2(vmctx: vmctx, x: f64x2, y: f64x2, z: f64x2) -> f64x2;
213
214
// Raises an unconditional trap with the specified code.
215
//
216
// This is used when signals-based-traps are disabled for backends
217
// when an illegal instruction can't be executed for example.
218
trap(vmctx: vmctx, code: u8);
219
220
// Raises an unconditional trap where the trap information must have
221
// been previously filled in.
222
raise(vmctx: vmctx);
223
224
// Creates a new continuation from a funcref.
225
#[cfg(feature = "stack-switching")]
226
cont_new(vmctx: vmctx, r: pointer, param_count: u32, result_count: u32) -> pointer;
227
228
// Returns an index for Wasm's `table.grow` instruction
229
// for `contobj`s. Note that the initial
230
// Option<VMContObj> (i.e., the value to fill the new
231
// slots with) is split into two arguments: The underlying
232
// continuation reference and the revision count. To
233
// denote the continuation being `None`, `init_contref`
234
// may be 0.
235
#[cfg(feature = "stack-switching")]
236
table_grow_cont_obj(vmctx: vmctx, table: u32, delta: u64, init_contref: pointer, init_revision: size) -> pointer;
237
238
// `value_contref` and `value_revision` together encode
239
// the Option<VMContObj>, as in previous libcall.
240
#[cfg(feature = "stack-switching")]
241
table_fill_cont_obj(vmctx: vmctx, table: u32, dst: u64, value_contref: pointer, value_revision: size, len: u64) -> bool;
242
243
// Return the instance ID for a given vmctx.
244
#[cfg(feature = "gc")]
245
get_instance_id(vmctx: vmctx) -> u32;
246
247
// Throw an exception.
248
#[cfg(feature = "gc")]
249
throw_ref(vmctx: vmctx, exnref: u32) -> bool;
250
}
251
};
252
}
253
254
/// Helper macro to define a builtin type such as `BuiltinFunctionIndex` and
255
/// `ComponentBuiltinFunctionIndex` using the iterator macro, e.g.
256
/// `foreach_builtin_function`, as the way to generate accessor methods.
257
macro_rules! declare_builtin_index {
258
(
259
$(#[$attr:meta])*
260
pub struct $index_name:ident : $for_each_builtin:ident ;
261
) => {
262
$(#[$attr])*
263
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
264
pub struct $index_name(u32);
265
266
impl $index_name {
267
/// Create a new builtin from its raw index
268
pub const fn from_u32(i: u32) -> Self {
269
assert!(i < Self::len());
270
Self(i)
271
}
272
273
/// Return the index as an u32 number.
274
pub const fn index(&self) -> u32 {
275
self.0
276
}
277
278
$for_each_builtin!(declare_builtin_index_constructors);
279
}
280
};
281
}
282
283
/// Helper macro used by the above macro.
284
macro_rules! declare_builtin_index_constructors {
285
(
286
$(
287
$( #[$attr:meta] )*
288
$name:ident( $( $pname:ident: $param:ident ),* ) $( -> $result:ident )?;
289
)*
290
) => {
291
declare_builtin_index_constructors!(
292
@indices;
293
0;
294
$( $( #[$attr] )* $name; )*
295
);
296
297
/// Returns a symbol name for this builtin.
298
pub fn name(&self) -> &'static str {
299
$(
300
if *self == Self::$name() {
301
return stringify!($name);
302
}
303
)*
304
unreachable!()
305
}
306
};
307
308
// Base case: no more indices to declare, so define the total number of
309
// function indices.
310
(
311
@indices;
312
$len:expr;
313
) => {
314
/// Returns the total number of builtin functions.
315
pub const fn len() -> u32 {
316
$len
317
}
318
};
319
320
// Recursive case: declare the next index, and then keep declaring the rest of
321
// the indices.
322
(
323
@indices;
324
$index:expr;
325
$( #[$this_attr:meta] )*
326
$this_name:ident;
327
$(
328
$( #[$rest_attr:meta] )*
329
$rest_name:ident;
330
)*
331
) => {
332
#[expect(missing_docs, reason = "macro-generated")]
333
pub const fn $this_name() -> Self {
334
Self($index)
335
}
336
337
declare_builtin_index_constructors!(
338
@indices;
339
($index + 1);
340
$( $( #[$rest_attr] )* $rest_name; )*
341
);
342
}
343
}
344
345
// Define `struct BuiltinFunctionIndex`
346
declare_builtin_index! {
347
/// An index type for builtin functions.
348
pub struct BuiltinFunctionIndex : foreach_builtin_function;
349
}
350
351
/// Return value of [`BuiltinFunctionIndex::trap_sentinel`].
352
pub enum TrapSentinel {
353
/// A falsy or zero value indicates a trap.
354
Falsy,
355
/// The value `-2` indicates a trap (used for growth-related builtins).
356
NegativeTwo,
357
/// The value `-1` indicates a trap .
358
NegativeOne,
359
/// Any negative value indicates a trap.
360
Negative,
361
}
362
363
impl BuiltinFunctionIndex {
364
/// Describes the return value of this builtin and what represents a trap.
365
///
366
/// Libcalls don't raise traps themselves and instead delegate to compilers
367
/// to do so. This means that some return values of libcalls indicate a trap
368
/// is happening and this is represented with sentinel values. This function
369
/// returns the description of the sentinel value which indicates a trap, if
370
/// any. If `None` is returned from this function then this builtin cannot
371
/// generate a trap.
372
#[allow(unreachable_code, unused_macro_rules, reason = "macro-generated code")]
373
pub fn trap_sentinel(&self) -> Option<TrapSentinel> {
374
macro_rules! trap_sentinel {
375
(
376
$(
377
$( #[$attr:meta] )*
378
$name:ident( $( $pname:ident: $param:ident ),* ) $( -> $result:ident )?;
379
)*
380
) => {{
381
$(
382
$(#[$attr])*
383
if *self == BuiltinFunctionIndex::$name() {
384
let mut _ret = None;
385
$(_ret = Some(trap_sentinel!(@get $name $result));)?
386
return _ret;
387
}
388
)*
389
390
None
391
}};
392
393
// Growth-related functions return -2 as a sentinel.
394
(@get memory_grow pointer) => (TrapSentinel::NegativeTwo);
395
(@get table_grow_func_ref pointer) => (TrapSentinel::NegativeTwo);
396
(@get table_grow_gc_ref pointer) => (TrapSentinel::NegativeTwo);
397
(@get table_grow_cont_obj pointer) => (TrapSentinel::NegativeTwo);
398
399
// Atomics-related functions return a negative value indicating trap
400
// indicate a trap.
401
(@get memory_atomic_notify u64) => (TrapSentinel::Negative);
402
(@get memory_atomic_wait32 u64) => (TrapSentinel::Negative);
403
(@get memory_atomic_wait64 u64) => (TrapSentinel::Negative);
404
405
// GC returns an optional GC ref, encoded as a `u64` with a negative
406
// value indicating a trap.
407
(@get gc u64) => (TrapSentinel::Negative);
408
409
// GC allocation functions return a u32 which is zero to indicate a
410
// trap.
411
(@get gc_alloc_raw u32) => (TrapSentinel::Falsy);
412
(@get array_new_data u32) => (TrapSentinel::Falsy);
413
(@get array_new_elem u32) => (TrapSentinel::Falsy);
414
415
// The final epoch represents a trap
416
(@get new_epoch u64) => (TrapSentinel::NegativeOne);
417
418
// These libcalls can't trap
419
(@get ref_func pointer) => (return None);
420
(@get table_get_lazy_init_func_ref pointer) => (return None);
421
(@get get_interned_func_ref pointer) => (return None);
422
(@get intern_func_ref_for_gc_heap u64) => (return None);
423
(@get is_subtype u32) => (return None);
424
(@get ceil_f32 f32) => (return None);
425
(@get ceil_f64 f64) => (return None);
426
(@get floor_f32 f32) => (return None);
427
(@get floor_f64 f64) => (return None);
428
(@get trunc_f32 f32) => (return None);
429
(@get trunc_f64 f64) => (return None);
430
(@get nearest_f32 f32) => (return None);
431
(@get nearest_f64 f64) => (return None);
432
(@get i8x16_swizzle i8x16) => (return None);
433
(@get i8x16_shuffle i8x16) => (return None);
434
(@get fma_f32x4 f32x4) => (return None);
435
(@get fma_f64x2 f64x2) => (return None);
436
437
(@get cont_new pointer) => (TrapSentinel::Negative);
438
439
(@get get_instance_id u32) => (return None);
440
441
// Bool-returning functions use `false` as an indicator of a trap.
442
(@get $name:ident bool) => (TrapSentinel::Falsy);
443
444
(@get $name:ident $ret:ident) => (
445
compile_error!(concat!("no trap sentinel registered for ", stringify!($name)))
446
)
447
}
448
449
foreach_builtin_function!(trap_sentinel)
450
}
451
}
452
453