Path: blob/main/crates/environ/src/component/vmcomponent_offsets.rs
3070 views
// Currently the `VMComponentContext` allocation by field looks like this:1//2// struct VMComponentContext {3// magic: u32,4// builtins: &'static VMComponentBuiltins,5// limits: *const VMStoreContext,6// flags: [VMGlobalDefinition; component.num_runtime_component_instances],7// task_may_block: u32,8// trampoline_func_refs: [VMFuncRef; component.num_trampolines],9// unsafe_intrinsics: [VMFuncRef; component.num_unsafe_intrinsics],10// lowerings: [VMLowering; component.num_lowerings],11// memories: [*mut VMMemoryDefinition; component.num_runtime_memories],12// tables: [VMTable; component.num_runtime_tables],13// reallocs: [*mut VMFuncRef; component.num_runtime_reallocs],14// post_returns: [*mut VMFuncRef; component.num_runtime_post_returns],15// resource_destructors: [*mut VMFuncRef; component.num_resources],16// }1718use crate::PtrSize;19use crate::component::*;2021/// Equivalent of `VMCONTEXT_MAGIC` except for components.22///23/// This is stored at the start of all `VMComponentContext` structures and24/// double-checked on `VMComponentContext::from_opaque`.25pub const VMCOMPONENT_MAGIC: u32 = u32::from_le_bytes(*b"comp");2627/// Flag for the `VMComponentContext::flags` field which corresponds to the28/// canonical ABI flag `may_leave`29pub const FLAG_MAY_LEAVE: i32 = 1 << 0;3031/// Runtime offsets within a `VMComponentContext` for a specific component.32#[derive(Debug, Clone, Copy)]33pub struct VMComponentOffsets<P> {34/// The host pointer size35pub ptr: P,3637/// The number of lowered functions this component will be creating.38pub num_lowerings: u32,39/// The number of memories which are recorded in this component for options.40pub num_runtime_memories: u32,41/// The number of tables which are recorded in this component for options.42pub num_runtime_tables: u32,43/// The number of reallocs which are recorded in this component for options.44pub num_runtime_reallocs: u32,45/// The number of callbacks which are recorded in this component for options.46pub num_runtime_callbacks: u32,47/// The number of post-returns which are recorded in this component for options.48pub num_runtime_post_returns: u32,49/// Number of component instances internally in the component (always at50/// least 1).51pub num_runtime_component_instances: u32,52/// Number of cranelift-compiled trampolines required for this component.53pub num_trampolines: u32,54/// Number of `VMFuncRef`s for unsafe intrinsics within this component's55/// context.56pub num_unsafe_intrinsics: u32,57/// Number of resources within a component which need destructors stored.58pub num_resources: u32,5960// precalculated offsets of various member fields61magic: u32,62builtins: u32,63vm_store_context: u32,64flags: u32,65task_may_block: u32,66trampoline_func_refs: u32,67intrinsic_func_refs: u32,68lowerings: u32,69memories: u32,70tables: u32,71reallocs: u32,72callbacks: u32,73post_returns: u32,74resource_destructors: u32,75size: u32,76}7778#[inline]79fn align(offset: u32, align: u32) -> u32 {80assert!(align.is_power_of_two());81(offset + (align - 1)) & !(align - 1)82}8384impl<P: PtrSize> VMComponentOffsets<P> {85/// Creates a new set of offsets for the `component` specified configured86/// additionally for the `ptr` size specified.87pub fn new(ptr: P, component: &Component) -> Self {88let mut ret = Self {89ptr,90num_lowerings: component.num_lowerings,91num_runtime_memories: component.num_runtime_memories,92num_runtime_tables: component.num_runtime_tables,93num_runtime_reallocs: component.num_runtime_reallocs,94num_runtime_callbacks: component.num_runtime_callbacks,95num_runtime_post_returns: component.num_runtime_post_returns,96num_runtime_component_instances: component.num_runtime_component_instances,97num_trampolines: component.trampolines.len().try_into().unwrap(),98num_unsafe_intrinsics: if let Some(i) = component99.unsafe_intrinsics100.iter()101.rposition(|x| x.is_some())102{103// Note: We do not currently have an indirection between "the104// `i`th unsafe intrinsic in the vmctx" and105// `UnsafeIntrinsic::from_u32(i)`, so therefore if we are106// compiling in *any* intrinsics, we need to include space for107// all of them up to the max `i` that is used.108//109// We _could_ introduce such an indirection via a map in110// `Component` like `PrimaryMap<UnsafeIntrinsicIndex,111// UnsafeIntrinsic>`, and that would allow us to densely pack112// intrinsics in the vmctx. However we do not do that today113// because there are very few unsafe intrinsics, and we do not114// see that changing anytime soon, so we aren't wasting much115// space.116u32::try_from(i + 1).unwrap()117} else {1180119},120num_resources: component.num_resources,121magic: 0,122builtins: 0,123vm_store_context: 0,124flags: 0,125task_may_block: 0,126trampoline_func_refs: 0,127intrinsic_func_refs: 0,128lowerings: 0,129memories: 0,130tables: 0,131reallocs: 0,132callbacks: 0,133post_returns: 0,134resource_destructors: 0,135size: 0,136};137138// Convenience functions for checked addition and multiplication.139// As side effect this reduces binary size by using only a single140// `#[track_caller]` location for each function instead of one for141// each individual invocation.142#[inline]143fn cmul(count: u32, size: u8) -> u32 {144count.checked_mul(u32::from(size)).unwrap()145}146147let mut next_field_offset = 0;148149macro_rules! fields {150(size($field:ident) = $size:expr, $($rest:tt)*) => {151ret.$field = next_field_offset;152next_field_offset = next_field_offset.checked_add(u32::from($size)).unwrap();153fields!($($rest)*);154};155(align($align:expr), $($rest:tt)*) => {156next_field_offset = align(next_field_offset, $align);157fields!($($rest)*);158};159() => {};160}161162fields! {163size(magic) = 4u32,164align(u32::from(ret.ptr.size())),165size(builtins) = ret.ptr.size(),166size(vm_store_context) = ret.ptr.size(),167align(16),168size(flags) = cmul(ret.num_runtime_component_instances, ret.ptr.size_of_vmglobal_definition()),169size(task_may_block) = ret.ptr.size_of_vmglobal_definition(),170align(u32::from(ret.ptr.size())),171size(trampoline_func_refs) = cmul(ret.num_trampolines, ret.ptr.size_of_vm_func_ref()),172size(intrinsic_func_refs) = cmul(ret.num_unsafe_intrinsics, ret.ptr.size_of_vm_func_ref()),173size(lowerings) = cmul(ret.num_lowerings, ret.ptr.size() * 2),174size(memories) = cmul(ret.num_runtime_memories, ret.ptr.size()),175size(tables) = cmul(ret.num_runtime_tables, ret.size_of_vmtable_import()),176size(reallocs) = cmul(ret.num_runtime_reallocs, ret.ptr.size()),177size(callbacks) = cmul(ret.num_runtime_callbacks, ret.ptr.size()),178size(post_returns) = cmul(ret.num_runtime_post_returns, ret.ptr.size()),179size(resource_destructors) = cmul(ret.num_resources, ret.ptr.size()),180}181182ret.size = next_field_offset;183184// This is required by the implementation of185// `VMComponentContext::from_opaque`. If this value changes then this186// location needs to be updated.187assert_eq!(ret.magic, 0);188189return ret;190}191192/// The size, in bytes, of the host pointer.193#[inline]194pub fn pointer_size(&self) -> u8 {195self.ptr.size()196}197198/// The offset of the `magic` field.199#[inline]200pub fn magic(&self) -> u32 {201self.magic202}203204/// The offset of the `builtins` field.205#[inline]206pub fn builtins(&self) -> u32 {207self.builtins208}209210/// The offset of the `flags` field.211#[inline]212pub fn instance_flags(&self, index: RuntimeComponentInstanceIndex) -> u32 {213assert!(index.as_u32() < self.num_runtime_component_instances);214self.flags + index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())215}216217/// The offset of the `task_may_block` field.218pub fn task_may_block(&self) -> u32 {219self.task_may_block220}221222/// The offset of the `vm_store_context` field.223#[inline]224pub fn vm_store_context(&self) -> u32 {225self.vm_store_context226}227228/// The offset of the `trampoline_func_refs` field.229#[inline]230pub fn trampoline_func_refs(&self) -> u32 {231self.trampoline_func_refs232}233234/// The offset of `VMFuncRef` for the `index` specified.235#[inline]236pub fn trampoline_func_ref(&self, index: TrampolineIndex) -> u32 {237assert!(index.as_u32() < self.num_trampolines);238self.trampoline_func_refs() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())239}240241/// The offset of the `unsafe_intrinsic_func_refs` field.242#[inline]243pub fn unsafe_intrinsic_func_refs(&self) -> u32 {244self.intrinsic_func_refs245}246247/// The offset of the `VMFuncRef` for the `intrinsic` specified.248#[inline]249pub fn unsafe_intrinsic_func_ref(&self, intrinsic: UnsafeIntrinsic) -> u32 {250assert!(intrinsic.index() < self.num_unsafe_intrinsics);251self.unsafe_intrinsic_func_refs()252+ intrinsic.index() * u32::from(self.ptr.size_of_vm_func_ref())253}254255/// The offset of the `lowerings` field.256#[inline]257pub fn lowerings(&self) -> u32 {258self.lowerings259}260261/// The offset of the `VMLowering` for the `index` specified.262#[inline]263pub fn lowering(&self, index: LoweredIndex) -> u32 {264assert!(index.as_u32() < self.num_lowerings);265self.lowerings() + index.as_u32() * u32::from(2 * self.ptr.size())266}267268/// The offset of the `callee` for the `index` specified.269#[inline]270pub fn lowering_callee(&self, index: LoweredIndex) -> u32 {271self.lowering(index) + self.lowering_callee_offset()272}273274/// The offset of the `data` for the `index` specified.275#[inline]276pub fn lowering_data(&self, index: LoweredIndex) -> u32 {277self.lowering(index) + self.lowering_data_offset()278}279280/// The size of the `VMLowering` type281#[inline]282pub fn lowering_size(&self) -> u8 {2832 * self.ptr.size()284}285286/// The offset of the `callee` field within the `VMLowering` type.287#[inline]288pub fn lowering_callee_offset(&self) -> u32 {2890290}291292/// The offset of the `data` field within the `VMLowering` type.293#[inline]294pub fn lowering_data_offset(&self) -> u32 {295u32::from(self.ptr.size())296}297298/// The offset of the base of the `runtime_memories` field299#[inline]300pub fn runtime_memories(&self) -> u32 {301self.memories302}303304/// The offset of the `*mut VMMemoryDefinition` for the runtime index305/// provided.306#[inline]307pub fn runtime_memory(&self, index: RuntimeMemoryIndex) -> u32 {308assert!(index.as_u32() < self.num_runtime_memories);309self.runtime_memories() + index.as_u32() * u32::from(self.ptr.size())310}311312/// The offset of the base of the `runtime_tables` field313#[inline]314pub fn runtime_tables(&self) -> u32 {315self.tables316}317318/// The offset of the table for the runtime index provided.319#[inline]320pub fn runtime_table(&self, index: RuntimeTableIndex) -> u32 {321assert!(index.as_u32() < self.num_runtime_tables);322self.runtime_tables() + index.as_u32() * u32::from(self.size_of_vmtable_import())323}324325/// Return the size of `VMTableImport`, used here to hold the pointers to326/// the `VMTableDefinition` and `VMContext`.327#[inline]328pub fn size_of_vmtable_import(&self) -> u8 {3293 * self.pointer_size()330}331332/// The offset of the base of the `runtime_reallocs` field333#[inline]334pub fn runtime_reallocs(&self) -> u32 {335self.reallocs336}337338/// The offset of the `*mut VMFuncRef` for the runtime index339/// provided.340#[inline]341pub fn runtime_realloc(&self, index: RuntimeReallocIndex) -> u32 {342assert!(index.as_u32() < self.num_runtime_reallocs);343self.runtime_reallocs() + index.as_u32() * u32::from(self.ptr.size())344}345346/// The offset of the base of the `runtime_callbacks` field347#[inline]348pub fn runtime_callbacks(&self) -> u32 {349self.callbacks350}351352/// The offset of the `*mut VMFuncRef` for the runtime index353/// provided.354#[inline]355pub fn runtime_callback(&self, index: RuntimeCallbackIndex) -> u32 {356assert!(index.as_u32() < self.num_runtime_callbacks);357self.runtime_callbacks() + index.as_u32() * u32::from(self.ptr.size())358}359360/// The offset of the base of the `runtime_post_returns` field361#[inline]362pub fn runtime_post_returns(&self) -> u32 {363self.post_returns364}365366/// The offset of the `*mut VMFuncRef` for the runtime index367/// provided.368#[inline]369pub fn runtime_post_return(&self, index: RuntimePostReturnIndex) -> u32 {370assert!(index.as_u32() < self.num_runtime_post_returns);371self.runtime_post_returns() + index.as_u32() * u32::from(self.ptr.size())372}373374/// The offset of the base of the `resource_destructors` field375#[inline]376pub fn resource_destructors(&self) -> u32 {377self.resource_destructors378}379380/// The offset of the `*mut VMFuncRef` for the runtime index381/// provided.382#[inline]383pub fn resource_destructor(&self, index: ResourceIndex) -> u32 {384assert!(index.as_u32() < self.num_resources);385self.resource_destructors() + index.as_u32() * u32::from(self.ptr.size())386}387388/// Return the size of the `VMComponentContext` allocation.389#[inline]390pub fn size_of_vmctx(&self) -> u32 {391self.size392}393}394395396