Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bytecodealliance
GitHub Repository: bytecodealliance/wasmtime
Path: blob/main/crates/fiber/src/nostd.rs
1692 views
1
//! no_std implementation of fibers.
2
//!
3
//! This is a very stripped-down version of the Unix platform support,
4
//! but without mmap or guard pages, because on no_std systems we do
5
//! not assume that virtual memory exists.
6
//!
7
//! The stack layout is nevertheless the same (modulo the guard page)
8
//! as on Unix because we share its low-level implementations:
9
//!
10
//! ```text
11
//! 0xB000 +-----------------------+ <- top of stack
12
//! | &Cell<RunResult> | <- where to store results
13
//! 0xAff8 +-----------------------+
14
//! | *const u8 | <- last sp to resume from
15
//! 0xAff0 +-----------------------+ <- 16-byte aligned
16
//! | |
17
//! ~ ... ~ <- actual native stack space to use
18
//! | |
19
//! 0x0000 +-----------------------+
20
//! ```
21
//!
22
//! Here `0xAff8` is filled in temporarily while `resume` is running. The fiber
23
//! started with 0xB000 as a parameter so it knows how to find this.
24
//! Additionally `resumes` stores state at 0xAff0 to restart execution, and
25
//! `suspend`, which has 0xB000 so it can find this, will read that and write
26
//! its own resumption information into this slot as well.
27
28
use crate::stackswitch::*;
29
use crate::{Result, RunResult, RuntimeFiberStack};
30
use alloc::boxed::Box;
31
use alloc::{vec, vec::Vec};
32
use core::cell::Cell;
33
use core::ops::Range;
34
35
// The no_std implementation is infallible in practice, but we use
36
// `anyhow::Error` here absent any better alternative.
37
pub type Error = anyhow::Error;
38
39
pub struct FiberStack {
40
base: BasePtr,
41
len: usize,
42
/// Backing storage, if owned. Allocated once at startup and then
43
/// not reallocated afterward.
44
storage: Vec<u8>,
45
}
46
47
struct BasePtr(*mut u8);
48
49
unsafe impl Send for BasePtr {}
50
unsafe impl Sync for BasePtr {}
51
52
const STACK_ALIGN: usize = 16;
53
54
/// Align a pointer by incrementing it up to `align - 1`
55
/// bytes. `align` must be a power of two. Also updates the length as
56
/// appropriate so that `ptr + len` points to the same endpoint.
57
fn align_ptr(ptr: *mut u8, len: usize, align: usize) -> (*mut u8, usize) {
58
let ptr = ptr as usize;
59
let aligned = (ptr + align - 1) & !(align - 1);
60
let new_len = len - (aligned - ptr);
61
(aligned as *mut u8, new_len)
62
}
63
64
impl FiberStack {
65
pub fn new(size: usize, zeroed: bool) -> Result<Self> {
66
// Round up the size to at least one page.
67
let size = core::cmp::max(4096, size);
68
let mut storage = Vec::new();
69
storage.reserve_exact(size);
70
if zeroed {
71
storage.resize(size, 0);
72
}
73
let (base, len) = align_ptr(storage.as_mut_ptr(), size, STACK_ALIGN);
74
Ok(FiberStack {
75
storage,
76
base: BasePtr(base),
77
len,
78
})
79
}
80
81
pub unsafe fn from_raw_parts(base: *mut u8, guard_size: usize, len: usize) -> Result<Self> {
82
Ok(FiberStack {
83
storage: vec![],
84
base: BasePtr(unsafe { base.offset(isize::try_from(guard_size).unwrap()) }),
85
len,
86
})
87
}
88
89
pub fn is_from_raw_parts(&self) -> bool {
90
self.storage.is_empty()
91
}
92
93
pub fn from_custom(_custom: Box<dyn RuntimeFiberStack>) -> Result<Self> {
94
unimplemented!("Custom fiber stacks not supported in no_std fiber library")
95
}
96
97
pub fn top(&self) -> Option<*mut u8> {
98
Some(self.base.0.wrapping_byte_add(self.len))
99
}
100
101
pub fn range(&self) -> Option<Range<usize>> {
102
let base = self.base.0 as usize;
103
Some(base..base + self.len)
104
}
105
106
pub fn guard_range(&self) -> Option<Range<*mut u8>> {
107
None
108
}
109
}
110
111
pub struct Fiber;
112
113
pub struct Suspend {
114
top_of_stack: *mut u8,
115
}
116
117
extern "C" fn fiber_start<F, A, B, C>(arg0: *mut u8, top_of_stack: *mut u8)
118
where
119
F: FnOnce(A, &mut super::Suspend<A, B, C>) -> C,
120
{
121
unsafe {
122
let inner = Suspend { top_of_stack };
123
let initial = inner.take_resume::<A, B, C>();
124
super::Suspend::<A, B, C>::execute(inner, initial, Box::from_raw(arg0.cast::<F>()))
125
}
126
}
127
128
impl Fiber {
129
pub fn new<F, A, B, C>(stack: &FiberStack, func: F) -> Result<Self>
130
where
131
F: FnOnce(A, &mut super::Suspend<A, B, C>) -> C,
132
{
133
// On unsupported platforms `wasmtime_fiber_init` is a panicking shim so
134
// return an error saying the host architecture isn't supported instead.
135
if !SUPPORTED_ARCH {
136
anyhow::bail!("fibers unsupported on this host architecture");
137
}
138
unsafe {
139
let data = Box::into_raw(Box::new(func)).cast();
140
wasmtime_fiber_init(stack.top().unwrap(), fiber_start::<F, A, B, C>, data);
141
}
142
143
Ok(Self)
144
}
145
146
pub(crate) fn resume<A, B, C>(&self, stack: &FiberStack, result: &Cell<RunResult<A, B, C>>) {
147
unsafe {
148
// Store where our result is going at the very tip-top of the
149
// stack, otherwise known as our reserved slot for this information.
150
//
151
// In the diagram above this is updating address 0xAff8
152
let addr = stack.top().unwrap().cast::<usize>().offset(-1);
153
addr.write(result as *const _ as usize);
154
155
assert!(SUPPORTED_ARCH);
156
wasmtime_fiber_switch(stack.top().unwrap());
157
158
// null this out to help catch use-after-free
159
addr.write(0);
160
}
161
}
162
163
pub(crate) unsafe fn drop<A, B, C>(&mut self) {}
164
}
165
166
impl Suspend {
167
pub(crate) fn switch<A, B, C>(&mut self, result: RunResult<A, B, C>) -> A {
168
unsafe {
169
// Calculate 0xAff8 and then write to it
170
(*self.result_location::<A, B, C>()).set(result);
171
172
wasmtime_fiber_switch(self.top_of_stack);
173
174
self.take_resume::<A, B, C>()
175
}
176
}
177
178
pub(crate) fn exit<A, B, C>(&mut self, result: RunResult<A, B, C>) {
179
self.switch(result);
180
unreachable!();
181
}
182
183
unsafe fn take_resume<A, B, C>(&self) -> A {
184
unsafe {
185
match (*self.result_location::<A, B, C>()).replace(RunResult::Executing) {
186
RunResult::Resuming(val) => val,
187
_ => panic!("not in resuming state"),
188
}
189
}
190
}
191
192
unsafe fn result_location<A, B, C>(&self) -> *const Cell<RunResult<A, B, C>> {
193
unsafe {
194
let ret = self.top_of_stack.cast::<*const u8>().offset(-1).read();
195
assert!(!ret.is_null());
196
ret.cast()
197
}
198
}
199
}
200
201