Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bytecodealliance
GitHub Repository: bytecodealliance/wasmtime
Path: blob/main/crates/environ/src/component/vmcomponent_offsets.rs
1692 views
1
// Currently the `VMComponentContext` allocation by field looks like this:
2
//
3
// struct VMComponentContext {
4
// magic: u32,
5
// builtins: &'static VMComponentBuiltins,
6
// limits: *const VMStoreContext,
7
// flags: [VMGlobalDefinition; component.num_runtime_component_instances],
8
// trampoline_func_refs: [VMFuncRef; component.num_trampolines],
9
// lowerings: [VMLowering; component.num_lowerings],
10
// memories: [*mut VMMemoryDefinition; component.num_runtime_memories],
11
// tables: [VMTable; component.num_runtime_tables],
12
// reallocs: [*mut VMFuncRef; component.num_runtime_reallocs],
13
// post_returns: [*mut VMFuncRef; component.num_runtime_post_returns],
14
// resource_destructors: [*mut VMFuncRef; component.num_resources],
15
// }
16
17
use crate::PtrSize;
18
use crate::component::*;
19
20
/// Equivalent of `VMCONTEXT_MAGIC` except for components.
21
///
22
/// This is stored at the start of all `VMComponentContext` structures and
23
/// double-checked on `VMComponentContext::from_opaque`.
24
pub const VMCOMPONENT_MAGIC: u32 = u32::from_le_bytes(*b"comp");
25
26
/// Flag for the `VMComponentContext::flags` field which corresponds to the
27
/// canonical ABI flag `may_leave`
28
pub const FLAG_MAY_LEAVE: i32 = 1 << 0;
29
30
/// Flag for the `VMComponentContext::flags` field which corresponds to the
31
/// canonical ABI flag `may_enter`
32
pub const FLAG_MAY_ENTER: i32 = 1 << 1;
33
34
/// Flag for the `VMComponentContext::flags` field which is set whenever a
35
/// function is called to indicate that `post_return` must be called next.
36
pub const FLAG_NEEDS_POST_RETURN: i32 = 1 << 2;
37
38
/// Runtime offsets within a `VMComponentContext` for a specific component.
39
#[derive(Debug, Clone, Copy)]
40
pub struct VMComponentOffsets<P> {
41
/// The host pointer size
42
pub ptr: P,
43
44
/// The number of lowered functions this component will be creating.
45
pub num_lowerings: u32,
46
/// The number of memories which are recorded in this component for options.
47
pub num_runtime_memories: u32,
48
/// The number of tables which are recorded in this component for options.
49
pub num_runtime_tables: u32,
50
/// The number of reallocs which are recorded in this component for options.
51
pub num_runtime_reallocs: u32,
52
/// The number of callbacks which are recorded in this component for options.
53
pub num_runtime_callbacks: u32,
54
/// The number of post-returns which are recorded in this component for options.
55
pub num_runtime_post_returns: u32,
56
/// Number of component instances internally in the component (always at
57
/// least 1).
58
pub num_runtime_component_instances: u32,
59
/// Number of cranelift-compiled trampolines required for this component.
60
pub num_trampolines: u32,
61
/// Number of resources within a component which need destructors stored.
62
pub num_resources: u32,
63
64
// precalculated offsets of various member fields
65
magic: u32,
66
builtins: u32,
67
vm_store_context: u32,
68
flags: u32,
69
trampoline_func_refs: u32,
70
lowerings: u32,
71
memories: u32,
72
tables: u32,
73
reallocs: u32,
74
callbacks: u32,
75
post_returns: u32,
76
resource_destructors: u32,
77
size: u32,
78
}
79
80
#[inline]
81
fn align(offset: u32, align: u32) -> u32 {
82
assert!(align.is_power_of_two());
83
(offset + (align - 1)) & !(align - 1)
84
}
85
86
impl<P: PtrSize> VMComponentOffsets<P> {
87
/// Creates a new set of offsets for the `component` specified configured
88
/// additionally for the `ptr` size specified.
89
pub fn new(ptr: P, component: &Component) -> Self {
90
let mut ret = Self {
91
ptr,
92
num_lowerings: component.num_lowerings,
93
num_runtime_memories: component.num_runtime_memories,
94
num_runtime_tables: component.num_runtime_tables,
95
num_runtime_reallocs: component.num_runtime_reallocs,
96
num_runtime_callbacks: component.num_runtime_callbacks,
97
num_runtime_post_returns: component.num_runtime_post_returns,
98
num_runtime_component_instances: component.num_runtime_component_instances,
99
num_trampolines: component.trampolines.len().try_into().unwrap(),
100
num_resources: component.num_resources,
101
magic: 0,
102
builtins: 0,
103
vm_store_context: 0,
104
flags: 0,
105
trampoline_func_refs: 0,
106
lowerings: 0,
107
memories: 0,
108
tables: 0,
109
reallocs: 0,
110
callbacks: 0,
111
post_returns: 0,
112
resource_destructors: 0,
113
size: 0,
114
};
115
116
// Convenience functions for checked addition and multiplication.
117
// As side effect this reduces binary size by using only a single
118
// `#[track_caller]` location for each function instead of one for
119
// each individual invocation.
120
#[inline]
121
fn cmul(count: u32, size: u8) -> u32 {
122
count.checked_mul(u32::from(size)).unwrap()
123
}
124
125
let mut next_field_offset = 0;
126
127
macro_rules! fields {
128
(size($field:ident) = $size:expr, $($rest:tt)*) => {
129
ret.$field = next_field_offset;
130
next_field_offset = next_field_offset.checked_add(u32::from($size)).unwrap();
131
fields!($($rest)*);
132
};
133
(align($align:expr), $($rest:tt)*) => {
134
next_field_offset = align(next_field_offset, $align);
135
fields!($($rest)*);
136
};
137
() => {};
138
}
139
140
fields! {
141
size(magic) = 4u32,
142
align(u32::from(ret.ptr.size())),
143
size(builtins) = ret.ptr.size(),
144
size(vm_store_context) = ret.ptr.size(),
145
align(16),
146
size(flags) = cmul(ret.num_runtime_component_instances, ret.ptr.size_of_vmglobal_definition()),
147
align(u32::from(ret.ptr.size())),
148
size(trampoline_func_refs) = cmul(ret.num_trampolines, ret.ptr.size_of_vm_func_ref()),
149
size(lowerings) = cmul(ret.num_lowerings, ret.ptr.size() * 2),
150
size(memories) = cmul(ret.num_runtime_memories, ret.ptr.size()),
151
size(tables) = cmul(ret.num_runtime_tables, ret.size_of_vmtable_import()),
152
size(reallocs) = cmul(ret.num_runtime_reallocs, ret.ptr.size()),
153
size(callbacks) = cmul(ret.num_runtime_callbacks, ret.ptr.size()),
154
size(post_returns) = cmul(ret.num_runtime_post_returns, ret.ptr.size()),
155
size(resource_destructors) = cmul(ret.num_resources, ret.ptr.size()),
156
}
157
158
ret.size = next_field_offset;
159
160
// This is required by the implementation of
161
// `VMComponentContext::from_opaque`. If this value changes then this
162
// location needs to be updated.
163
assert_eq!(ret.magic, 0);
164
165
return ret;
166
}
167
168
/// The size, in bytes, of the host pointer.
169
#[inline]
170
pub fn pointer_size(&self) -> u8 {
171
self.ptr.size()
172
}
173
174
/// The offset of the `magic` field.
175
#[inline]
176
pub fn magic(&self) -> u32 {
177
self.magic
178
}
179
180
/// The offset of the `builtins` field.
181
#[inline]
182
pub fn builtins(&self) -> u32 {
183
self.builtins
184
}
185
186
/// The offset of the `flags` field.
187
#[inline]
188
pub fn instance_flags(&self, index: RuntimeComponentInstanceIndex) -> u32 {
189
assert!(index.as_u32() < self.num_runtime_component_instances);
190
self.flags + index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
191
}
192
193
/// The offset of the `vm_store_context` field.
194
#[inline]
195
pub fn vm_store_context(&self) -> u32 {
196
self.vm_store_context
197
}
198
199
/// The offset of the `trampoline_func_refs` field.
200
#[inline]
201
pub fn trampoline_func_refs(&self) -> u32 {
202
self.trampoline_func_refs
203
}
204
205
/// The offset of `VMFuncRef` for the `index` specified.
206
#[inline]
207
pub fn trampoline_func_ref(&self, index: TrampolineIndex) -> u32 {
208
assert!(index.as_u32() < self.num_trampolines);
209
self.trampoline_func_refs() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())
210
}
211
212
/// The offset of the `lowerings` field.
213
#[inline]
214
pub fn lowerings(&self) -> u32 {
215
self.lowerings
216
}
217
218
/// The offset of the `VMLowering` for the `index` specified.
219
#[inline]
220
pub fn lowering(&self, index: LoweredIndex) -> u32 {
221
assert!(index.as_u32() < self.num_lowerings);
222
self.lowerings() + index.as_u32() * u32::from(2 * self.ptr.size())
223
}
224
225
/// The offset of the `callee` for the `index` specified.
226
#[inline]
227
pub fn lowering_callee(&self, index: LoweredIndex) -> u32 {
228
self.lowering(index) + self.lowering_callee_offset()
229
}
230
231
/// The offset of the `data` for the `index` specified.
232
#[inline]
233
pub fn lowering_data(&self, index: LoweredIndex) -> u32 {
234
self.lowering(index) + self.lowering_data_offset()
235
}
236
237
/// The size of the `VMLowering` type
238
#[inline]
239
pub fn lowering_size(&self) -> u8 {
240
2 * self.ptr.size()
241
}
242
243
/// The offset of the `callee` field within the `VMLowering` type.
244
#[inline]
245
pub fn lowering_callee_offset(&self) -> u32 {
246
0
247
}
248
249
/// The offset of the `data` field within the `VMLowering` type.
250
#[inline]
251
pub fn lowering_data_offset(&self) -> u32 {
252
u32::from(self.ptr.size())
253
}
254
255
/// The offset of the base of the `runtime_memories` field
256
#[inline]
257
pub fn runtime_memories(&self) -> u32 {
258
self.memories
259
}
260
261
/// The offset of the `*mut VMMemoryDefinition` for the runtime index
262
/// provided.
263
#[inline]
264
pub fn runtime_memory(&self, index: RuntimeMemoryIndex) -> u32 {
265
assert!(index.as_u32() < self.num_runtime_memories);
266
self.runtime_memories() + index.as_u32() * u32::from(self.ptr.size())
267
}
268
269
/// The offset of the base of the `runtime_tables` field
270
#[inline]
271
pub fn runtime_tables(&self) -> u32 {
272
self.tables
273
}
274
275
/// The offset of the table for the runtime index provided.
276
#[inline]
277
pub fn runtime_table(&self, index: RuntimeTableIndex) -> u32 {
278
assert!(index.as_u32() < self.num_runtime_tables);
279
self.runtime_tables() + index.as_u32() * u32::from(self.size_of_vmtable_import())
280
}
281
282
/// Return the size of `VMTableImport`, used here to hold the pointers to
283
/// the `VMTableDefinition` and `VMContext`.
284
#[inline]
285
pub fn size_of_vmtable_import(&self) -> u8 {
286
3 * self.pointer_size()
287
}
288
289
/// The offset of the base of the `runtime_reallocs` field
290
#[inline]
291
pub fn runtime_reallocs(&self) -> u32 {
292
self.reallocs
293
}
294
295
/// The offset of the `*mut VMFuncRef` for the runtime index
296
/// provided.
297
#[inline]
298
pub fn runtime_realloc(&self, index: RuntimeReallocIndex) -> u32 {
299
assert!(index.as_u32() < self.num_runtime_reallocs);
300
self.runtime_reallocs() + index.as_u32() * u32::from(self.ptr.size())
301
}
302
303
/// The offset of the base of the `runtime_callbacks` field
304
#[inline]
305
pub fn runtime_callbacks(&self) -> u32 {
306
self.callbacks
307
}
308
309
/// The offset of the `*mut VMFuncRef` for the runtime index
310
/// provided.
311
#[inline]
312
pub fn runtime_callback(&self, index: RuntimeCallbackIndex) -> u32 {
313
assert!(index.as_u32() < self.num_runtime_callbacks);
314
self.runtime_callbacks() + index.as_u32() * u32::from(self.ptr.size())
315
}
316
317
/// The offset of the base of the `runtime_post_returns` field
318
#[inline]
319
pub fn runtime_post_returns(&self) -> u32 {
320
self.post_returns
321
}
322
323
/// The offset of the `*mut VMFuncRef` for the runtime index
324
/// provided.
325
#[inline]
326
pub fn runtime_post_return(&self, index: RuntimePostReturnIndex) -> u32 {
327
assert!(index.as_u32() < self.num_runtime_post_returns);
328
self.runtime_post_returns() + index.as_u32() * u32::from(self.ptr.size())
329
}
330
331
/// The offset of the base of the `resource_destructors` field
332
#[inline]
333
pub fn resource_destructors(&self) -> u32 {
334
self.resource_destructors
335
}
336
337
/// The offset of the `*mut VMFuncRef` for the runtime index
338
/// provided.
339
#[inline]
340
pub fn resource_destructor(&self, index: ResourceIndex) -> u32 {
341
assert!(index.as_u32() < self.num_resources);
342
self.resource_destructors() + index.as_u32() * u32::from(self.ptr.size())
343
}
344
345
/// Return the size of the `VMComponentContext` allocation.
346
#[inline]
347
pub fn size_of_vmctx(&self) -> u32 {
348
self.size
349
}
350
}
351
352