Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bytecodealliance
GitHub Repository: bytecodealliance/wasmtime
Path: blob/main/crates/fiber/src/nostd.rs
3072 views
1
//! no_std implementation of fibers.
2
//!
3
//! This is a very stripped-down version of the Unix platform support,
4
//! but without mmap or guard pages, because on no_std systems we do
5
//! not assume that virtual memory exists.
6
//!
7
//! The stack layout is nevertheless the same (modulo the guard page)
8
//! as on Unix because we share its low-level implementations:
9
//!
10
//! ```text
11
//! 0xB000 +-----------------------+ <- top of stack
12
//! | &Cell<RunResult> | <- where to store results
13
//! 0xAff8 +-----------------------+
14
//! | *const u8 | <- last sp to resume from
15
//! 0xAff0 +-----------------------+ <- 16-byte aligned
16
//! | |
17
//! ~ ... ~ <- actual native stack space to use
18
//! | |
19
//! 0x0000 +-----------------------+
20
//! ```
21
//!
22
//! Here `0xAff8` is filled in temporarily while `resume` is running. The fiber
23
//! started with 0xB000 as a parameter so it knows how to find this.
24
//! Additionally `resumes` stores state at 0xAff0 to restart execution, and
25
//! `suspend`, which has 0xB000 so it can find this, will read that and write
26
//! its own resumption information into this slot as well.
27
28
use crate::stackswitch::*;
29
use crate::{Result, RunResult, RuntimeFiberStack};
30
use alloc::boxed::Box;
31
use alloc::{vec, vec::Vec};
32
use core::cell::Cell;
33
use core::ops::Range;
34
use wasmtime_environ::prelude::*;
35
36
// The no_std implementation is infallible in practice, but we use
37
// `wasmtime_environ::error::Error` here absent any better alternative.
38
pub use wasmtime_environ::error::Error;
39
40
pub struct FiberStack {
41
base: BasePtr,
42
len: usize,
43
/// Backing storage, if owned. Allocated once at startup and then
44
/// not reallocated afterward.
45
storage: Vec<u8>,
46
}
47
48
struct BasePtr(*mut u8);
49
50
unsafe impl Send for BasePtr {}
51
unsafe impl Sync for BasePtr {}
52
53
const STACK_ALIGN: usize = 16;
54
55
/// Align a pointer by incrementing it up to `align - 1`
56
/// bytes. `align` must be a power of two. Also updates the length as
57
/// appropriate so that `ptr + len` points to the same endpoint.
58
fn align_ptr(ptr: *mut u8, len: usize, align: usize) -> (*mut u8, usize) {
59
let ptr = ptr as usize;
60
let aligned = (ptr + align - 1) & !(align - 1);
61
let new_len = len - (aligned - ptr);
62
(aligned as *mut u8, new_len)
63
}
64
65
impl FiberStack {
66
pub fn new(size: usize, zeroed: bool) -> Result<Self> {
67
// Round up the size to at least one page.
68
let size = core::cmp::max(4096, size);
69
let mut storage = Vec::new();
70
storage.reserve_exact(size);
71
if zeroed {
72
storage.resize(size, 0);
73
}
74
let (base, len) = align_ptr(storage.as_mut_ptr(), size, STACK_ALIGN);
75
Ok(FiberStack {
76
storage,
77
base: BasePtr(base),
78
len,
79
})
80
}
81
82
pub unsafe fn from_raw_parts(base: *mut u8, guard_size: usize, len: usize) -> Result<Self> {
83
Ok(FiberStack {
84
storage: vec![],
85
base: BasePtr(unsafe { base.offset(isize::try_from(guard_size).unwrap()) }),
86
len,
87
})
88
}
89
90
pub fn is_from_raw_parts(&self) -> bool {
91
self.storage.is_empty()
92
}
93
94
pub fn from_custom(_custom: Box<dyn RuntimeFiberStack>) -> Result<Self> {
95
unimplemented!("Custom fiber stacks not supported in no_std fiber library")
96
}
97
98
pub fn top(&self) -> Option<*mut u8> {
99
Some(self.base.0.wrapping_byte_add(self.len))
100
}
101
102
pub fn range(&self) -> Option<Range<usize>> {
103
let base = self.base.0 as usize;
104
Some(base..base + self.len)
105
}
106
107
pub fn guard_range(&self) -> Option<Range<*mut u8>> {
108
None
109
}
110
}
111
112
pub struct Fiber;
113
114
pub struct Suspend {
115
top_of_stack: *mut u8,
116
}
117
118
extern "C" fn fiber_start<F, A, B, C>(arg0: *mut u8, top_of_stack: *mut u8)
119
where
120
F: FnOnce(A, &mut super::Suspend<A, B, C>) -> C,
121
{
122
unsafe {
123
let inner = Suspend { top_of_stack };
124
let initial = inner.take_resume::<A, B, C>();
125
super::Suspend::<A, B, C>::execute(inner, initial, Box::from_raw(arg0.cast::<F>()))
126
}
127
}
128
129
impl Fiber {
130
pub fn new<F, A, B, C>(stack: &FiberStack, func: F) -> Result<Self>
131
where
132
F: FnOnce(A, &mut super::Suspend<A, B, C>) -> C,
133
{
134
// On unsupported platforms `wasmtime_fiber_init` is a panicking shim so
135
// return an error saying the host architecture isn't supported instead.
136
if !SUPPORTED_ARCH {
137
bail!("fibers unsupported on this host architecture");
138
}
139
unsafe {
140
let data = Box::into_raw(Box::new(func)).cast();
141
wasmtime_fiber_init(stack.top().unwrap(), fiber_start::<F, A, B, C>, data);
142
}
143
144
Ok(Self)
145
}
146
147
pub(crate) fn resume<A, B, C>(&self, stack: &FiberStack, result: &Cell<RunResult<A, B, C>>) {
148
unsafe {
149
// Store where our result is going at the very tip-top of the
150
// stack, otherwise known as our reserved slot for this information.
151
//
152
// In the diagram above this is updating address 0xAff8
153
let addr = stack.top().unwrap().cast::<usize>().offset(-1);
154
addr.write(result as *const _ as usize);
155
156
assert!(SUPPORTED_ARCH);
157
wasmtime_fiber_switch(stack.top().unwrap());
158
159
// null this out to help catch use-after-free
160
addr.write(0);
161
}
162
}
163
164
pub(crate) unsafe fn drop<A, B, C>(&mut self) {}
165
}
166
167
impl Suspend {
168
pub(crate) fn switch<A, B, C>(&mut self, result: RunResult<A, B, C>) -> A {
169
unsafe {
170
// Calculate 0xAff8 and then write to it
171
(*self.result_location::<A, B, C>()).set(result);
172
173
wasmtime_fiber_switch(self.top_of_stack);
174
175
self.take_resume::<A, B, C>()
176
}
177
}
178
179
pub(crate) fn exit<A, B, C>(&mut self, result: RunResult<A, B, C>) {
180
self.switch(result);
181
unreachable!();
182
}
183
184
unsafe fn take_resume<A, B, C>(&self) -> A {
185
unsafe {
186
match (*self.result_location::<A, B, C>()).replace(RunResult::Executing) {
187
RunResult::Resuming(val) => val,
188
_ => panic!("not in resuming state"),
189
}
190
}
191
}
192
193
unsafe fn result_location<A, B, C>(&self) -> *const Cell<RunResult<A, B, C>> {
194
unsafe {
195
let ret = self.top_of_stack.cast::<*const u8>().offset(-1).read();
196
assert!(!ret.is_null());
197
ret.cast()
198
}
199
}
200
}
201
202