Path: blob/main/crates/cranelift/src/compiler/component.rs
3080 views
//! Compilation support for the component model.12use crate::{TRAP_CANNOT_LEAVE_COMPONENT, TRAP_INTERNAL_ASSERT, compiler::Compiler};3use cranelift_codegen::ir::condcodes::IntCC;4use cranelift_codegen::ir::{self, InstBuilder, MemFlags, Value};5use cranelift_codegen::isa::{CallConv, TargetIsa};6use cranelift_frontend::FunctionBuilder;7use wasmtime_environ::error::{Result, bail};8use wasmtime_environ::{9Abi, CompiledFunctionBody, EntityRef, FuncKey, HostCall, PtrSize, TrapSentinel, Tunables,10WasmFuncType, WasmValType, component::*, fact::PREPARE_CALL_FIXED_PARAMS,11};1213struct TrampolineCompiler<'a> {14compiler: &'a Compiler,15isa: &'a (dyn TargetIsa + 'static),16builder: FunctionBuilder<'a>,17component: &'a Component,18types: &'a ComponentTypesBuilder,19offsets: VMComponentOffsets<u8>,20block0: ir::Block,21signature: &'a WasmFuncType,22}2324/// What host functions can be called, used in `translate_hostcall` below.25enum HostCallee {26/// Call a host-lowered function specified by this index.27Lowering(LoweredIndex),28/// Call a host libcall, specified by this accessor.29Libcall(GetLibcallFn),30}3132type GetLibcallFn =33fn(&dyn TargetIsa, &mut ir::Function) -> (ir::SigRef, ComponentBuiltinFunctionIndex);3435impl From<LoweredIndex> for HostCallee {36fn from(index: LoweredIndex) -> HostCallee {37HostCallee::Lowering(index)38}39}4041impl From<GetLibcallFn> for HostCallee {42fn from(f: GetLibcallFn) -> HostCallee {43HostCallee::Libcall(f)44}45}4647/// How to interpret the results of a host function.48enum HostResult {49/// The host function returns the sentinel specified which is interpreted50/// and translated to the real return value.51Sentinel(TrapSentinel),5253/// The host function returns a `bool` indicating whether it succeeded or54/// not.55///56/// After the return value is interpreted the host function also filled in57/// `ptr` and `len` with wasm return values which need to be returned.58///59/// If `ptr` and `len` are not specified then this must be used with60/// `WasmArgs::ValRawList` and that ptr/len is used.61MultiValue {62/// The base pointer of the `ValRaw` list on the stack.63ptr: Option<ir::Value>,64/// The length of the `ValRaw` list on the stack.65len: Option<ir::Value>,66},67}6869impl From<TrapSentinel> for HostResult {70fn from(sentinel: TrapSentinel) -> HostResult {71HostResult::Sentinel(sentinel)72}73}7475/// Different means of passing WebAssembly arguments to host calls.76#[derive(Debug, Copy, Clone)]77enum WasmArgs {78/// All wasm arguments to the host are passed directly as values, typically79/// through registers.80InRegisters,8182/// All wasm arguments to the host are passed indirectly by spilling them83/// to the stack as a sequence of contiguous `ValRaw`s.84ValRawList,8586/// The first `n` arguments are passed in registers, but everything after87/// that is spilled to the stack.88InRegistersUpTo(usize),89}9091impl<'a> TrampolineCompiler<'a> {92fn new(93compiler: &'a Compiler,94func_compiler: &'a mut super::FunctionCompiler<'_>,95component: &'a Component,96types: &'a ComponentTypesBuilder,97signature: &'a WasmFuncType,98) -> TrampolineCompiler<'a> {99let isa = &*compiler.isa;100let func = ir::Function::with_name_signature(101ir::UserFuncName::user(0, 0),102crate::wasm_call_signature(isa, signature, &compiler.tunables),103);104let (builder, block0) = func_compiler.builder(func);105TrampolineCompiler {106compiler,107isa,108builder,109component,110types,111offsets: VMComponentOffsets::new(isa.pointer_bytes(), component),112block0,113signature,114}115}116117fn translate(&mut self, trampoline: &Trampoline) {118self.check_may_leave(trampoline);119120match trampoline {121Trampoline::Transcoder {122op,123from,124from64,125to,126to64,127} => {128self.translate_transcode(*op, *from, *from64, *to, *to64);129}130Trampoline::LowerImport {131index,132options,133lower_ty,134} => {135let pointer_type = self.isa.pointer_type();136self.translate_hostcall(137HostCallee::Lowering(*index),138HostResult::MultiValue {139ptr: None,140len: None,141},142WasmArgs::ValRawList,143|me, params| {144let vmctx = params[0];145params.extend([146me.builder.ins().load(147pointer_type,148MemFlags::trusted(),149vmctx,150i32::try_from(me.offsets.lowering_data(*index)).unwrap(),151),152me.index_value(*lower_ty),153me.index_value(*options),154]);155},156);157}158Trampoline::ResourceNew { instance, ty } => {159// Currently this only supports resources represented by `i32`160assert_eq!(self.signature.params()[0], WasmValType::I32);161self.translate_libcall(162host::resource_new32,163TrapSentinel::NegativeOne,164WasmArgs::InRegisters,165|me, params| {166params.push(me.index_value(*instance));167params.push(me.index_value(*ty));168},169);170}171Trampoline::ResourceRep { instance, ty } => {172// Currently this only supports resources represented by `i32`173assert_eq!(self.signature.returns()[0], WasmValType::I32);174self.translate_libcall(175host::resource_rep32,176TrapSentinel::NegativeOne,177WasmArgs::InRegisters,178|me, params| {179params.push(me.index_value(*instance));180params.push(me.index_value(*ty));181},182);183}184Trampoline::ResourceDrop { instance, ty } => {185self.translate_resource_drop(*instance, *ty);186}187Trampoline::BackpressureInc { instance } => {188self.translate_libcall(189host::backpressure_modify,190TrapSentinel::Falsy,191WasmArgs::InRegisters,192|me, params| {193params.push(me.index_value(*instance));194params.push(me.builder.ins().iconst(ir::types::I8, 1));195},196);197}198Trampoline::BackpressureDec { instance } => {199self.translate_libcall(200host::backpressure_modify,201TrapSentinel::Falsy,202WasmArgs::InRegisters,203|me, params| {204params.push(me.index_value(*instance));205params.push(me.builder.ins().iconst(ir::types::I8, 0));206},207);208}209Trampoline::TaskReturn {210instance,211results,212options,213} => {214self.translate_libcall(215host::task_return,216TrapSentinel::Falsy,217WasmArgs::ValRawList,218|me, params| {219params.push(me.index_value(*instance));220params.push(me.index_value(*results));221params.push(me.index_value(*options));222},223);224}225Trampoline::TaskCancel { instance } => {226self.translate_libcall(227host::task_cancel,228TrapSentinel::Falsy,229WasmArgs::InRegisters,230|me, params| {231params.push(me.index_value(*instance));232},233);234}235Trampoline::WaitableSetNew { instance } => {236self.translate_libcall(237host::waitable_set_new,238TrapSentinel::NegativeOne,239WasmArgs::InRegisters,240|me, params| {241params.push(me.index_value(*instance));242},243);244}245Trampoline::WaitableSetWait { instance, options } => {246self.translate_libcall(247host::waitable_set_wait,248TrapSentinel::NegativeOne,249WasmArgs::InRegisters,250|me, params| {251params.push(me.index_value(*instance));252params.push(me.index_value(*options));253},254);255}256Trampoline::WaitableSetPoll { instance, options } => {257self.translate_libcall(258host::waitable_set_poll,259TrapSentinel::NegativeOne,260WasmArgs::InRegisters,261|me, params| {262params.push(me.index_value(*instance));263params.push(me.index_value(*options));264},265);266}267Trampoline::WaitableSetDrop { instance } => {268self.translate_libcall(269host::waitable_set_drop,270TrapSentinel::Falsy,271WasmArgs::InRegisters,272|me, params| {273params.push(me.index_value(*instance));274},275);276}277Trampoline::WaitableJoin { instance } => {278self.translate_libcall(279host::waitable_join,280TrapSentinel::Falsy,281WasmArgs::InRegisters,282|me, params| {283params.push(me.index_value(*instance));284},285);286}287Trampoline::ThreadYield {288instance,289cancellable,290} => {291self.translate_libcall(292host::thread_yield,293TrapSentinel::NegativeOne,294WasmArgs::InRegisters,295|me, params| {296params.push(me.index_value(*instance));297params.push(298me.builder299.ins()300.iconst(ir::types::I8, i64::from(*cancellable)),301);302},303);304}305Trampoline::SubtaskDrop { instance } => {306self.translate_libcall(307host::subtask_drop,308TrapSentinel::Falsy,309WasmArgs::InRegisters,310|me, params| {311params.push(me.index_value(*instance));312},313);314}315Trampoline::SubtaskCancel { instance, async_ } => {316self.translate_libcall(317host::subtask_cancel,318TrapSentinel::NegativeOne,319WasmArgs::InRegisters,320|me, params| {321params.push(me.index_value(*instance));322params.push(me.builder.ins().iconst(ir::types::I8, i64::from(*async_)));323},324);325}326Trampoline::StreamNew { instance, ty } => {327self.translate_libcall(328host::stream_new,329TrapSentinel::NegativeOne,330WasmArgs::InRegisters,331|me, params| {332params.push(me.index_value(*instance));333params.push(me.index_value(*ty));334},335);336}337Trampoline::StreamRead {338instance,339ty,340options,341} => {342if let Some(info) = self.flat_stream_element_info(*ty).cloned() {343self.translate_libcall(344host::flat_stream_read,345TrapSentinel::NegativeOne,346WasmArgs::InRegisters,347|me, params| {348params.extend([349me.index_value(*instance),350me.index_value(*ty),351me.index_value(*options),352me.builder353.ins()354.iconst(ir::types::I32, i64::from(info.size32)),355me.builder356.ins()357.iconst(ir::types::I32, i64::from(info.align32)),358]);359},360);361} else {362self.translate_libcall(363host::stream_read,364TrapSentinel::NegativeOne,365WasmArgs::InRegisters,366|me, params| {367params.push(me.index_value(*instance));368params.push(me.index_value(*ty));369params.push(me.index_value(*options));370},371);372}373}374Trampoline::StreamWrite {375instance,376ty,377options,378} => {379if let Some(info) = self.flat_stream_element_info(*ty).cloned() {380self.translate_libcall(381host::flat_stream_write,382TrapSentinel::NegativeOne,383WasmArgs::InRegisters,384|me, params| {385params.extend([386me.index_value(*instance),387me.index_value(*ty),388me.index_value(*options),389me.builder390.ins()391.iconst(ir::types::I32, i64::from(info.size32)),392me.builder393.ins()394.iconst(ir::types::I32, i64::from(info.align32)),395]);396},397);398} else {399self.translate_libcall(400host::stream_write,401TrapSentinel::NegativeOne,402WasmArgs::InRegisters,403|me, params| {404params.push(me.index_value(*instance));405params.push(me.index_value(*ty));406params.push(me.index_value(*options));407},408);409}410}411Trampoline::StreamCancelRead {412instance,413ty,414async_,415} => {416self.translate_libcall(417host::stream_cancel_read,418TrapSentinel::NegativeOne,419WasmArgs::InRegisters,420|me, params| {421params.push(me.index_value(*instance));422params.push(me.index_value(*ty));423params.push(me.builder.ins().iconst(ir::types::I8, i64::from(*async_)));424},425);426}427Trampoline::StreamCancelWrite {428instance,429ty,430async_,431} => {432self.translate_libcall(433host::stream_cancel_write,434TrapSentinel::NegativeOne,435WasmArgs::InRegisters,436|me, params| {437params.push(me.index_value(*instance));438params.push(me.index_value(*ty));439params.push(me.builder.ins().iconst(ir::types::I8, i64::from(*async_)));440},441);442}443Trampoline::StreamDropReadable { instance, ty } => {444self.translate_libcall(445host::stream_drop_readable,446TrapSentinel::Falsy,447WasmArgs::InRegisters,448|me, params| {449params.push(me.index_value(*instance));450params.push(me.index_value(*ty));451},452);453}454Trampoline::StreamDropWritable { instance, ty } => {455self.translate_libcall(456host::stream_drop_writable,457TrapSentinel::Falsy,458WasmArgs::InRegisters,459|me, params| {460params.push(me.index_value(*instance));461params.push(me.index_value(*ty));462},463);464}465Trampoline::FutureNew { instance, ty } => {466self.translate_libcall(467host::future_new,468TrapSentinel::NegativeOne,469WasmArgs::InRegisters,470|me, params| {471params.push(me.index_value(*instance));472params.push(me.index_value(*ty));473},474);475}476Trampoline::FutureRead {477instance,478ty,479options,480} => {481self.translate_libcall(482host::future_read,483TrapSentinel::NegativeOne,484WasmArgs::InRegisters,485|me, params| {486params.push(me.index_value(*instance));487params.push(me.index_value(*ty));488params.push(me.index_value(*options));489},490);491}492Trampoline::FutureWrite {493instance,494ty,495options,496} => {497self.translate_libcall(498host::future_write,499TrapSentinel::NegativeOne,500WasmArgs::InRegisters,501|me, params| {502params.push(me.index_value(*instance));503params.push(me.index_value(*ty));504params.push(me.index_value(*options));505},506);507}508Trampoline::FutureCancelRead {509instance,510ty,511async_,512} => {513self.translate_libcall(514host::future_cancel_read,515TrapSentinel::NegativeOne,516WasmArgs::InRegisters,517|me, params| {518params.push(me.index_value(*instance));519params.push(me.index_value(*ty));520params.push(me.builder.ins().iconst(ir::types::I8, i64::from(*async_)));521},522);523}524Trampoline::FutureCancelWrite {525instance,526ty,527async_,528} => {529self.translate_libcall(530host::future_cancel_write,531TrapSentinel::NegativeOne,532WasmArgs::InRegisters,533|me, params| {534params.push(me.index_value(*instance));535params.push(me.index_value(*ty));536params.push(me.builder.ins().iconst(ir::types::I8, i64::from(*async_)));537},538);539}540Trampoline::FutureDropReadable { instance, ty } => {541self.translate_libcall(542host::future_drop_readable,543TrapSentinel::Falsy,544WasmArgs::InRegisters,545|me, params| {546params.push(me.index_value(*instance));547params.push(me.index_value(*ty));548},549);550}551Trampoline::FutureDropWritable { instance, ty } => {552self.translate_libcall(553host::future_drop_writable,554TrapSentinel::Falsy,555WasmArgs::InRegisters,556|me, params| {557params.push(me.index_value(*instance));558params.push(me.index_value(*ty));559},560);561}562Trampoline::ErrorContextNew {563instance,564ty,565options,566} => {567self.translate_libcall(568host::error_context_new,569TrapSentinel::NegativeOne,570WasmArgs::InRegisters,571|me, params| {572params.push(me.index_value(*instance));573params.push(me.index_value(*ty));574params.push(me.index_value(*options));575},576);577}578Trampoline::ErrorContextDebugMessage {579instance,580ty,581options,582} => {583self.translate_libcall(584host::error_context_debug_message,585TrapSentinel::Falsy,586WasmArgs::InRegisters,587|me, params| {588params.push(me.index_value(*instance));589params.push(me.index_value(*ty));590params.push(me.index_value(*options));591},592);593}594Trampoline::ErrorContextDrop { instance, ty } => {595self.translate_libcall(596host::error_context_drop,597TrapSentinel::Falsy,598WasmArgs::InRegisters,599|me, params| {600params.push(me.index_value(*instance));601params.push(me.index_value(*ty));602},603);604}605Trampoline::ResourceTransferOwn => {606self.translate_libcall(607host::resource_transfer_own,608TrapSentinel::NegativeOne,609WasmArgs::InRegisters,610|_, _| {},611);612}613Trampoline::ResourceTransferBorrow => {614self.translate_libcall(615host::resource_transfer_borrow,616TrapSentinel::NegativeOne,617WasmArgs::InRegisters,618|_, _| {},619);620}621Trampoline::PrepareCall { memory } => {622self.translate_libcall(623host::prepare_call,624TrapSentinel::Falsy,625WasmArgs::InRegistersUpTo(PREPARE_CALL_FIXED_PARAMS.len()),626|me, params| {627let vmctx = params[0];628params.push(me.load_optional_memory(vmctx, *memory));629},630);631}632Trampoline::SyncStartCall { callback } => {633let pointer_type = self.isa.pointer_type();634let (values_vec_ptr, len) = self.compiler.allocate_stack_array_and_spill_args(635&WasmFuncType::new(636Box::new([]),637self.signature.returns().iter().copied().collect(),638),639&mut self.builder,640&[],641);642let values_vec_len = self.builder.ins().iconst(pointer_type, i64::from(len));643self.translate_libcall(644host::sync_start,645HostResult::MultiValue {646ptr: Some(values_vec_ptr),647len: Some(values_vec_len),648},649WasmArgs::InRegisters,650|me, params| {651let vmctx = params[0];652params.push(me.load_callback(vmctx, *callback));653params.push(values_vec_ptr);654params.push(values_vec_len);655},656);657}658Trampoline::AsyncStartCall {659callback,660post_return,661} => {662self.translate_libcall(663host::async_start,664TrapSentinel::NegativeOne,665WasmArgs::InRegisters,666|me, params| {667let vmctx = params[0];668params.extend([669me.load_callback(vmctx, *callback),670me.load_post_return(vmctx, *post_return),671]);672},673);674}675Trampoline::FutureTransfer => {676self.translate_libcall(677host::future_transfer,678TrapSentinel::NegativeOne,679WasmArgs::InRegisters,680|_, _| {},681);682}683Trampoline::StreamTransfer => {684self.translate_libcall(685host::stream_transfer,686TrapSentinel::NegativeOne,687WasmArgs::InRegisters,688|_, _| {},689);690}691Trampoline::ErrorContextTransfer => {692self.translate_libcall(693host::error_context_transfer,694TrapSentinel::NegativeOne,695WasmArgs::InRegisters,696|_, _| {},697);698}699Trampoline::Trap => {700self.translate_libcall(701host::trap,702TrapSentinel::Falsy,703WasmArgs::InRegisters,704|_, _| {},705);706}707Trampoline::EnterSyncCall => {708self.translate_libcall(709host::enter_sync_call,710TrapSentinel::Falsy,711WasmArgs::InRegisters,712|_, _| {},713);714}715Trampoline::ExitSyncCall => {716self.translate_libcall(717host::exit_sync_call,718TrapSentinel::Falsy,719WasmArgs::InRegisters,720|_, _| {},721);722}723Trampoline::ContextGet { instance, slot } => {724self.translate_libcall(725host::context_get,726TrapSentinel::NegativeOne,727WasmArgs::InRegisters,728|me, params| {729params.push(me.index_value(*instance));730params.push(me.builder.ins().iconst(ir::types::I32, i64::from(*slot)));731},732);733}734Trampoline::ContextSet { instance, slot } => {735self.translate_libcall(736host::context_set,737TrapSentinel::Falsy,738WasmArgs::InRegisters,739|me, params| {740params.push(me.index_value(*instance));741params.push(me.builder.ins().iconst(ir::types::I32, i64::from(*slot)));742},743);744}745Trampoline::ThreadIndex => {746self.translate_libcall(747host::thread_index,748TrapSentinel::NegativeOne,749WasmArgs::InRegisters,750|_, _| {},751);752}753Trampoline::ThreadNewIndirect {754instance,755start_func_table_idx,756start_func_ty_idx,757} => {758self.translate_libcall(759host::thread_new_indirect,760TrapSentinel::NegativeOne,761WasmArgs::InRegisters,762|me, params| {763params.push(me.index_value(*instance));764params.push(me.index_value(*start_func_table_idx));765params.push(me.index_value(*start_func_ty_idx));766},767);768}769Trampoline::ThreadSwitchTo {770instance,771cancellable,772} => {773self.translate_libcall(774host::thread_switch_to,775TrapSentinel::NegativeOne,776WasmArgs::InRegisters,777|me, params| {778params.push(me.index_value(*instance));779params.push(780me.builder781.ins()782.iconst(ir::types::I8, i64::from(*cancellable)),783);784},785);786}787Trampoline::ThreadSuspend {788instance,789cancellable,790} => {791self.translate_libcall(792host::thread_suspend,793TrapSentinel::NegativeOne,794WasmArgs::InRegisters,795|me, params| {796params.push(me.index_value(*instance));797params.push(798me.builder799.ins()800.iconst(ir::types::I8, i64::from(*cancellable)),801);802},803);804}805Trampoline::ThreadResumeLater { instance } => {806self.translate_libcall(807host::thread_resume_later,808TrapSentinel::Falsy,809WasmArgs::InRegisters,810|me, params| {811params.push(me.index_value(*instance));812},813);814}815Trampoline::ThreadYieldTo {816instance,817cancellable,818} => {819self.translate_libcall(820host::thread_yield_to,821TrapSentinel::NegativeOne,822WasmArgs::InRegisters,823|me, params| {824params.push(me.index_value(*instance));825params.push(826me.builder827.ins()828.iconst(ir::types::I8, i64::from(*cancellable)),829);830},831);832}833}834}835836/// Determine whether the specified type can be optimized as a stream837/// payload by lifting and lowering with a simple `memcpy`.838///839/// Any type containing only "flat", primitive data for which all bit840/// patterns are valid (i.e. no pointers, handles, bools, or chars) should841/// qualify for this optimization, but it's also okay to conservatively842/// return `None` here; the fallback slow path will always work -- it just843/// won't be as efficient.844fn flat_stream_element_info(&self, ty: TypeStreamTableIndex) -> Option<&CanonicalAbiInfo> {845let payload = self.types[self.types[ty].ty].payload;846match payload {847None => Some(&CanonicalAbiInfo::ZERO),848Some(849// Note that we exclude `Bool` and `Char` from this list because850// not all bit patterns are valid for those types.851payload @ (InterfaceType::S8852| InterfaceType::U8853| InterfaceType::S16854| InterfaceType::U16855| InterfaceType::S32856| InterfaceType::U32857| InterfaceType::S64858| InterfaceType::U64859| InterfaceType::Float32860| InterfaceType::Float64),861) => Some(self.types.canonical_abi(&payload)),862// TODO: Recursively check for other "flat" types (i.e. those without pointers or handles),863// e.g. `record`s, `variant`s, etc. which contain only flat types.864_ => None,865}866}867868/// Helper function to spill the wasm arguments `args` to this function into869/// a stack-allocated array.870fn store_wasm_arguments(&mut self, args: &[Value]) -> (Value, Value) {871let pointer_type = self.isa.pointer_type();872873let (ptr, len) = self.compiler.allocate_stack_array_and_spill_args(874self.signature,875&mut self.builder,876args,877);878let len = self.builder.ins().iconst(pointer_type, i64::from(len));879(ptr, len)880}881882/// Convenience wrapper around `translate_hostcall` to enable type inference883/// on the `get_libcall` parameter here.884fn translate_libcall(885&mut self,886get_libcall: GetLibcallFn,887host_result: impl Into<HostResult>,888wasm_args: WasmArgs,889extra_host_args: impl FnOnce(&mut Self, &mut Vec<ir::Value>),890) {891self.translate_hostcall(892HostCallee::Libcall(get_libcall),893host_result.into(),894wasm_args,895extra_host_args,896)897}898899/// Translates an invocation of a host function and interpret the result.900///901/// This is intended to be a relatively narrow waist which most intrinsics902/// go through. The configuration supported here is:903///904/// * `host_callee` - what's being called, either a libcall or a lowered905/// function906/// * `host_result` - how to interpret the return value to see if it's a907/// trap908/// * `wasm_args` - how to pass wasm args to the host, either in registers909/// or on the stack910/// * `extra_host_args` - a closure used to push extra arguments just before911/// the wasm arguments are forwarded.912fn translate_hostcall(913&mut self,914host_callee: HostCallee,915host_result: impl Into<HostResult>,916wasm_args: WasmArgs,917extra_host_args: impl FnOnce(&mut Self, &mut Vec<ir::Value>),918) {919let pointer_type = self.isa.pointer_type();920921// Load all parameters in an ABI-agnostic fashion, of which the922// `VMComponentContext` will be the first.923let params = self.abi_load_params();924let vmctx = params[0];925let wasm_params = ¶ms[2..];926927// Start building up arguments to the host. The first is always the928// vmctx. After is whatever `extra_host_args` appends, and then finally929// is what `WasmArgs` specifies.930let mut host_args = vec![vmctx];931extra_host_args(self, &mut host_args);932let mut val_raw_ptr = None;933let mut val_raw_len = None;934match wasm_args {935// Wasm params are passed through as values themselves.936WasmArgs::InRegisters => host_args.extend(wasm_params.iter().copied()),937938// Wasm params are spilled and then the ptr/len is passed.939WasmArgs::ValRawList => {940let (ptr, len) = self.store_wasm_arguments(wasm_params);941val_raw_ptr = Some(ptr);942val_raw_len = Some(len);943host_args.push(ptr);944host_args.push(len);945}946947// A mixture of the above two.948WasmArgs::InRegistersUpTo(n) => {949let (values_vec_ptr, len) = self.compiler.allocate_stack_array_and_spill_args(950&WasmFuncType::new(951self.signature.params().iter().skip(n).copied().collect(),952Box::new([]),953),954&mut self.builder,955&wasm_params[n..],956);957let values_vec_len = self.builder.ins().iconst(pointer_type, i64::from(len));958959host_args.extend(wasm_params[..n].iter().copied());960host_args.push(values_vec_ptr);961host_args.push(values_vec_len);962}963}964965// Next perform the actual invocation of the host with `host_args`.966let call = match host_callee {967HostCallee::Libcall(get_libcall) => self.call_libcall(vmctx, get_libcall, &host_args),968HostCallee::Lowering(index) => {969// Load host function pointer from the vmcontext and then call that970// indirect function pointer with the list of arguments.971let host_fn = self.builder.ins().load(972pointer_type,973MemFlags::trusted(),974vmctx,975i32::try_from(self.offsets.lowering_callee(index)).unwrap(),976);977let host_sig = {978let mut sig = ir::Signature::new(CallConv::triple_default(self.isa.triple()));979for param in host_args.iter() {980let ty = self.builder.func.dfg.value_type(*param);981sig.params.push(ir::AbiParam::new(ty));982}983// return value is a bool whether a trap was raised or not984sig.returns.push(ir::AbiParam::new(ir::types::I8));985self.builder.import_signature(sig)986};987self.compiler.call_indirect_host(988&mut self.builder,989HostCall::ComponentLowerImport,990host_sig,991host_fn,992&host_args,993)994}995};996997// Acquire the result of this function (if any) and interpret it998// according to `host_result`.999//1000// Note that all match arms here end with `abi_store_results` which1001// accounts for the ABI of this function when storing results.1002let result = self.builder.func.dfg.inst_results(call).get(0).copied();1003let result_ty = result.map(|v| self.builder.func.dfg.value_type(v));1004let expected = self.signature.returns();1005match host_result.into() {1006HostResult::Sentinel(TrapSentinel::NegativeOne) => {1007assert_eq!(expected.len(), 1);1008let (result, result_ty) = (result.unwrap(), result_ty.unwrap());1009let result = match (result_ty, expected[0]) {1010(ir::types::I64, WasmValType::I32) => {1011self.raise_if_negative_one_and_truncate(result)1012}1013(ir::types::I64, WasmValType::I64) | (ir::types::I32, WasmValType::I32) => {1014self.raise_if_negative_one(result)1015}1016other => panic!("unsupported NegativeOne combo {other:?}"),1017};1018self.abi_store_results(&[result]);1019}1020HostResult::Sentinel(TrapSentinel::Falsy) => {1021assert_eq!(expected.len(), 0);1022self.raise_if_host_trapped(result.unwrap());1023self.abi_store_results(&[]);1024}1025HostResult::Sentinel(_) => todo!("support additional return types if/when necessary"),10261027HostResult::MultiValue { ptr, len } => {1028let ptr = ptr.or(val_raw_ptr).unwrap();1029let len = len.or(val_raw_len).unwrap();1030self.raise_if_host_trapped(result.unwrap());1031let results = self.compiler.load_values_from_array(1032self.signature.returns(),1033&mut self.builder,1034ptr,1035len,1036);1037self.abi_store_results(&results);1038}1039}1040}10411042fn index_value(&mut self, index: impl EntityRef) -> ir::Value {1043self.builder1044.ins()1045.iconst(ir::types::I32, i64::try_from(index.index()).unwrap())1046}10471048fn translate_resource_drop(1049&mut self,1050instance: RuntimeComponentInstanceIndex,1051resource: TypeResourceTableIndex,1052) {1053let args = self.abi_load_params();1054let vmctx = args[0];1055let caller_vmctx = args[1];1056let pointer_type = self.isa.pointer_type();10571058// The arguments this shim passes along to the libcall are:1059//1060// * the vmctx1061// * the calling component instance index1062// * a constant value for this `ResourceDrop` intrinsic1063// * the wasm handle index to drop1064let mut host_args = Vec::new();1065host_args.push(vmctx);1066host_args.push(1067self.builder1068.ins()1069.iconst(ir::types::I32, i64::from(instance.as_u32())),1070);1071host_args.push(1072self.builder1073.ins()1074.iconst(ir::types::I32, i64::from(resource.as_u32())),1075);1076host_args.push(args[2]);10771078let call = self.call_libcall(vmctx, host::resource_drop, &host_args);10791080// Immediately raise a trap if requested by the host1081let should_run_destructor =1082self.raise_if_negative_one(self.builder.func.dfg.inst_results(call)[0]);10831084let resource_ty = self.types[resource].unwrap_concrete_ty();1085let resource_def = self1086.component1087.defined_resource_index(resource_ty)1088.map(|idx| {1089self.component1090.initializers1091.iter()1092.filter_map(|i| match i {1093GlobalInitializer::Resource(r) if r.index == idx => Some(r),1094_ => None,1095})1096.next()1097.unwrap()1098});1099let has_destructor = match resource_def {1100Some(def) => def.dtor.is_some(),1101None => true,1102};1103// Synthesize the following:1104//1105// ...1106// brif should_run_destructor, run_destructor_block, return_block1107//1108// run_destructor_block:1109// ;; test may_leave, but only if the component instances1110// ;; differ1111// flags = load.i32 vmctx+$instance_flags_offset1112// masked = band flags, $FLAG_MAY_LEAVE1113// trapz masked, $TRAP_CANNOT_LEAVE_COMPONENT1114//1115// ;; set may_block to false, saving the old value to restore1116// ;; later, but only if the component instances differ and1117// ;; concurrency is enabled1118// old_may_block = load.i32 vmctx+$may_block_offset1119// store 0, vmctx+$may_block_offset1120//1121// ;; call enter_sync_call, but only if the component instances1122// ;; differ and concurrency is enabled1123// ...1124//1125// ;; ============================================================1126// ;; this is conditionally emitted based on whether the resource1127// ;; has a destructor or not, and can be statically omitted1128// ;; because that information is known at compile time here.1129// rep = ushr.i64 rep, 11130// rep = ireduce.i32 rep1131// dtor = load.ptr vmctx+$offset1132// func_addr = load.ptr dtor+$offset1133// callee_vmctx = load.ptr dtor+$offset1134// call_indirect func_addr, callee_vmctx, vmctx, rep1135// ;; ============================================================1136//1137// ;; restore old value of may_block1138// store old_may_block, vmctx+$may_block_offset1139//1140// ;; if needed, call exit_sync_call1141// ...1142//1143// ;; if needed, restore the old value of may_block1144// store old_may_block, vmctx+$may_block_offset1145//1146// jump return_block1147//1148// return_block:1149// return1150//1151// This will decode `should_run_destructor` and run the destructor1152// funcref if one is specified for this resource. Note that not all1153// resources have destructors, hence the null check.1154self.builder.ensure_inserted_block();1155let current_block = self.builder.current_block().unwrap();1156let run_destructor_block = self.builder.create_block();1157self.builder1158.insert_block_after(run_destructor_block, current_block);1159let return_block = self.builder.create_block();1160self.builder1161.insert_block_after(return_block, run_destructor_block);11621163self.builder.ins().brif(1164should_run_destructor,1165run_destructor_block,1166&[],1167return_block,1168&[],1169);11701171let trusted = ir::MemFlags::trusted().with_readonly();11721173self.builder.switch_to_block(run_destructor_block);11741175// If this is a component-defined resource, the `may_leave` flag must be1176// checked. Additionally, if concurrency is enabled, the `may_block`1177// field must be updated and `enter_sync_call` called. Note though that1178// all of that may be elided if the resource table resides in the same1179// component instance that defined the resource as the component is1180// calling itself.1181let old_may_block = if let Some(def) = resource_def {1182if self.types[resource].unwrap_concrete_instance() != def.instance {1183self.check_may_leave_instance(self.types[resource].unwrap_concrete_instance());11841185if self.compiler.tunables.concurrency_support {1186// Stash the old value of `may_block` and then set it to false.1187let old_may_block = self.builder.ins().load(1188ir::types::I32,1189trusted,1190vmctx,1191i32::try_from(self.offsets.task_may_block()).unwrap(),1192);1193let zero = self.builder.ins().iconst(ir::types::I32, i64::from(0));1194self.builder.ins().store(1195ir::MemFlags::trusted(),1196zero,1197vmctx,1198i32::try_from(self.offsets.task_may_block()).unwrap(),1199);12001201// Call `enter_sync_call`1202//1203// FIXME: Apply the optimizations described in #12311.1204let host_args = vec![1205vmctx,1206self.builder1207.ins()1208.iconst(ir::types::I32, i64::from(instance.as_u32())),1209self.builder.ins().iconst(ir::types::I32, i64::from(0)),1210self.builder1211.ins()1212.iconst(ir::types::I32, i64::from(def.instance.as_u32())),1213];1214let call = self.call_libcall(vmctx, host::enter_sync_call, &host_args);1215let result = self.builder.func.dfg.inst_results(call).get(0).copied();1216self.raise_if_host_trapped(result.unwrap());12171218Some(old_may_block)1219} else {1220None1221}1222} else {1223None1224}1225} else {1226None1227};12281229// Conditionally emit destructor-execution code based on whether we1230// statically know that a destructor exists or not.1231if has_destructor {1232let rep = self.builder.ins().ushr_imm(should_run_destructor, 1);1233let rep = self.builder.ins().ireduce(ir::types::I32, rep);1234let index = self.types[resource].unwrap_concrete_ty();1235// NB: despite the vmcontext storing nullable funcrefs for function1236// pointers we know this is statically never null due to the1237// `has_destructor` check above.1238let dtor_func_ref = self.builder.ins().load(1239pointer_type,1240trusted,1241vmctx,1242i32::try_from(self.offsets.resource_destructor(index)).unwrap(),1243);1244if self.compiler.emit_debug_checks {1245self.builder1246.ins()1247.trapz(dtor_func_ref, TRAP_INTERNAL_ASSERT);1248}1249let func_addr = self.builder.ins().load(1250pointer_type,1251trusted,1252dtor_func_ref,1253i32::from(self.offsets.ptr.vm_func_ref_wasm_call()),1254);1255let callee_vmctx = self.builder.ins().load(1256pointer_type,1257trusted,1258dtor_func_ref,1259i32::from(self.offsets.ptr.vm_func_ref_vmctx()),1260);12611262let sig = crate::wasm_call_signature(self.isa, self.signature, &self.compiler.tunables);1263let sig_ref = self.builder.import_signature(sig);12641265// NB: note that the "caller" vmctx here is the caller of this1266// intrinsic itself, not the `VMComponentContext`. This effectively1267// takes ourselves out of the chain here but that's ok since the1268// caller is only used for store/limits and that same info is1269// stored, but elsewhere, in the component context.1270self.builder.ins().call_indirect(1271sig_ref,1272func_addr,1273&[callee_vmctx, caller_vmctx, rep],1274);1275}12761277if let Some(old_may_block) = old_may_block {1278// Call `exit_sync_call`1279//1280// FIXME: Apply the optimizations described in #12311.1281let call = self.call_libcall(vmctx, host::exit_sync_call, &[vmctx]);1282let result = self.builder.func.dfg.inst_results(call).get(0).copied();1283self.raise_if_host_trapped(result.unwrap());12841285// Restore the old value of `may_block`1286self.builder.ins().store(1287ir::MemFlags::trusted(),1288old_may_block,1289vmctx,1290i32::try_from(self.offsets.task_may_block()).unwrap(),1291);1292}12931294self.builder.ins().jump(return_block, &[]);1295self.builder.seal_block(run_destructor_block);12961297self.builder.switch_to_block(return_block);1298self.builder.seal_block(return_block);1299self.abi_store_results(&[]);1300}13011302fn load_optional_memory(1303&mut self,1304vmctx: ir::Value,1305memory: Option<RuntimeMemoryIndex>,1306) -> ir::Value {1307match memory {1308Some(idx) => self.load_memory(vmctx, idx),1309None => self.builder.ins().iconst(self.isa.pointer_type(), 0),1310}1311}13121313fn load_memory(&mut self, vmctx: ir::Value, memory: RuntimeMemoryIndex) -> ir::Value {1314self.builder.ins().load(1315self.isa.pointer_type(),1316MemFlags::trusted(),1317vmctx,1318i32::try_from(self.offsets.runtime_memory(memory)).unwrap(),1319)1320}13211322fn load_callback(1323&mut self,1324vmctx: ir::Value,1325callback: Option<RuntimeCallbackIndex>,1326) -> ir::Value {1327let pointer_type = self.isa.pointer_type();1328match callback {1329Some(idx) => self.builder.ins().load(1330pointer_type,1331MemFlags::trusted(),1332vmctx,1333i32::try_from(self.offsets.runtime_callback(idx)).unwrap(),1334),1335None => self.builder.ins().iconst(pointer_type, 0),1336}1337}13381339fn load_post_return(1340&mut self,1341vmctx: ir::Value,1342post_return: Option<RuntimePostReturnIndex>,1343) -> ir::Value {1344let pointer_type = self.isa.pointer_type();1345match post_return {1346Some(idx) => self.builder.ins().load(1347pointer_type,1348MemFlags::trusted(),1349vmctx,1350i32::try_from(self.offsets.runtime_post_return(idx)).unwrap(),1351),1352None => self.builder.ins().iconst(pointer_type, 0),1353}1354}13551356/// Loads a host function pointer for a libcall stored at the `offset`1357/// provided in the libcalls array.1358///1359/// The offset is calculated in the `host` module below.1360fn load_libcall(1361&mut self,1362vmctx: ir::Value,1363index: ComponentBuiltinFunctionIndex,1364) -> ir::Value {1365let pointer_type = self.isa.pointer_type();1366// First load the pointer to the builtins structure which is static1367// per-process.1368let builtins_array = self.builder.ins().load(1369pointer_type,1370MemFlags::trusted().with_readonly(),1371vmctx,1372i32::try_from(self.offsets.builtins()).unwrap(),1373);1374// Next load the function pointer at `offset` and return that.1375self.builder.ins().load(1376pointer_type,1377MemFlags::trusted().with_readonly(),1378builtins_array,1379i32::try_from(index.index() * u32::from(self.offsets.ptr.size())).unwrap(),1380)1381}13821383/// Get a function's parameters regardless of the ABI in use.1384///1385/// This emits code to load the parameters from the array-call's ABI's values1386/// vector, if necessary.1387fn abi_load_params(&mut self) -> Vec<ir::Value> {1388self.builder.func.dfg.block_params(self.block0).to_vec()1389}13901391/// Emit code to return the given result values, regardless of the ABI in use.1392fn abi_store_results(&mut self, results: &[ir::Value]) {1393self.builder.ins().return_(results);1394}13951396fn raise_if_host_trapped(&mut self, succeeded: ir::Value) {1397let caller_vmctx = self.builder.func.dfg.block_params(self.block0)[1];1398self.compiler1399.raise_if_host_trapped(&mut self.builder, caller_vmctx, succeeded);1400}14011402fn raise_if_transcode_trapped(&mut self, amount_copied: ir::Value) {1403let pointer_type = self.isa.pointer_type();1404let minus_one = self.builder.ins().iconst(pointer_type, -1);1405let succeeded = self1406.builder1407.ins()1408.icmp(IntCC::NotEqual, amount_copied, minus_one);1409self.raise_if_host_trapped(succeeded);1410}14111412fn raise_if_negative_one_and_truncate(&mut self, ret: ir::Value) -> ir::Value {1413let ret = self.raise_if_negative_one(ret);1414self.builder.ins().ireduce(ir::types::I32, ret)1415}14161417fn raise_if_negative_one(&mut self, ret: ir::Value) -> ir::Value {1418let result_ty = self.builder.func.dfg.value_type(ret);1419let minus_one = self.builder.ins().iconst(result_ty, -1);1420let succeeded = self.builder.ins().icmp(IntCC::NotEqual, ret, minus_one);1421self.raise_if_host_trapped(succeeded);1422ret1423}14241425fn call_libcall(1426&mut self,1427vmctx: ir::Value,1428get_libcall: GetLibcallFn,1429args: &[ir::Value],1430) -> ir::Inst {1431let (host_sig, index) = get_libcall(self.isa, &mut self.builder.func);1432let host_fn = self.load_libcall(vmctx, index);1433self.compiler1434.call_indirect_host(&mut self.builder, index, host_sig, host_fn, args)1435}14361437fn check_may_leave(&mut self, trampoline: &Trampoline) {1438let instance = match trampoline {1439// These intrinsics explicitly do not check the may-leave flag.1440Trampoline::ResourceRep { .. }1441| Trampoline::ThreadIndex1442| Trampoline::BackpressureInc { .. }1443| Trampoline::BackpressureDec { .. }1444| Trampoline::ContextGet { .. }1445| Trampoline::ContextSet { .. } => return,14461447// Intrinsics used in adapters generated by FACT that aren't called1448// directly from guest wasm, so no check is needed.1449Trampoline::ResourceTransferOwn1450| Trampoline::ResourceTransferBorrow1451| Trampoline::PrepareCall { .. }1452| Trampoline::SyncStartCall { .. }1453| Trampoline::AsyncStartCall { .. }1454| Trampoline::FutureTransfer1455| Trampoline::StreamTransfer1456| Trampoline::ErrorContextTransfer1457| Trampoline::Trap1458| Trampoline::EnterSyncCall1459| Trampoline::ExitSyncCall1460| Trampoline::Transcoder { .. } => return,14611462Trampoline::LowerImport { options, .. } => self.component.options[*options].instance,14631464Trampoline::ResourceNew { instance, .. }1465| Trampoline::ResourceDrop { instance, .. }1466| Trampoline::TaskReturn { instance, .. }1467| Trampoline::TaskCancel { instance }1468| Trampoline::WaitableSetNew { instance }1469| Trampoline::WaitableSetWait { instance, .. }1470| Trampoline::WaitableSetPoll { instance, .. }1471| Trampoline::WaitableSetDrop { instance }1472| Trampoline::WaitableJoin { instance }1473| Trampoline::ThreadYield { instance, .. }1474| Trampoline::ThreadSwitchTo { instance, .. }1475| Trampoline::ThreadNewIndirect { instance, .. }1476| Trampoline::ThreadSuspend { instance, .. }1477| Trampoline::ThreadResumeLater { instance }1478| Trampoline::ThreadYieldTo { instance, .. }1479| Trampoline::SubtaskDrop { instance }1480| Trampoline::SubtaskCancel { instance, .. }1481| Trampoline::ErrorContextNew { instance, .. }1482| Trampoline::ErrorContextDebugMessage { instance, .. }1483| Trampoline::ErrorContextDrop { instance, .. }1484| Trampoline::StreamNew { instance, .. }1485| Trampoline::StreamRead { instance, .. }1486| Trampoline::StreamWrite { instance, .. }1487| Trampoline::StreamCancelRead { instance, .. }1488| Trampoline::StreamCancelWrite { instance, .. }1489| Trampoline::StreamDropReadable { instance, .. }1490| Trampoline::StreamDropWritable { instance, .. }1491| Trampoline::FutureNew { instance, .. }1492| Trampoline::FutureRead { instance, .. }1493| Trampoline::FutureWrite { instance, .. }1494| Trampoline::FutureCancelRead { instance, .. }1495| Trampoline::FutureCancelWrite { instance, .. }1496| Trampoline::FutureDropReadable { instance, .. }1497| Trampoline::FutureDropWritable { instance, .. } => *instance,1498};14991500self.check_may_leave_instance(instance)1501}15021503fn check_may_leave_instance(&mut self, instance: RuntimeComponentInstanceIndex) {1504let vmctx = self.builder.func.dfg.block_params(self.block0)[0];15051506let flags = self.builder.ins().load(1507ir::types::I32,1508ir::MemFlags::trusted(),1509vmctx,1510i32::try_from(self.offsets.instance_flags(instance)).unwrap(),1511);1512let may_leave_bit = self1513.builder1514.ins()1515.band_imm(flags, i64::from(FLAG_MAY_LEAVE));1516self.builder1517.ins()1518.trapz(may_leave_bit, TRAP_CANNOT_LEAVE_COMPONENT);1519}1520}15211522impl ComponentCompiler for Compiler {1523fn compile_trampoline(1524&self,1525component: &ComponentTranslation,1526types: &ComponentTypesBuilder,1527key: FuncKey,1528abi: Abi,1529_tunables: &Tunables,1530symbol: &str,1531) -> Result<CompiledFunctionBody> {1532let (abi2, trampoline_index) = key.unwrap_component_trampoline();1533debug_assert_eq!(abi, abi2);1534let sig = types[component.component.trampolines[trampoline_index]].unwrap_func();15351536match abi {1537// Fall through to the trampoline compiler.1538Abi::Wasm => {}15391540// Implement the array-abi trampoline in terms of calling the1541// wasm-abi trampoline.1542Abi::Array => {1543let offsets =1544VMComponentOffsets::new(self.isa.pointer_bytes(), &component.component);1545return Ok(self.array_to_wasm_trampoline(1546key,1547FuncKey::ComponentTrampoline(Abi::Wasm, trampoline_index),1548sig,1549symbol,1550offsets.vm_store_context(),1551wasmtime_environ::component::VMCOMPONENT_MAGIC,1552)?);1553}15541555Abi::Patchable => unreachable!(1556"We should not be compiling a patchable-ABI trampoline for a component function"1557),1558}15591560let mut compiler = self.function_compiler();1561let mut c = TrampolineCompiler::new(self, &mut compiler, &component.component, types, sig);15621563// If we are crossing the Wasm-to-native boundary, we need to save the1564// exit FP and return address for stack walking purposes. However, we1565// always debug assert that our vmctx is a component context, regardless1566// whether we are actually crossing that boundary because it should1567// always hold.1568let vmctx = c.builder.block_params(c.block0)[0];1569let pointer_type = self.isa.pointer_type();1570self.debug_assert_vmctx_kind(1571&mut c.builder,1572vmctx,1573wasmtime_environ::component::VMCOMPONENT_MAGIC,1574);1575let vm_store_context = c.builder.ins().load(1576pointer_type,1577MemFlags::trusted(),1578vmctx,1579i32::try_from(c.offsets.vm_store_context()).unwrap(),1580);1581super::save_last_wasm_exit_fp_and_pc(1582&mut c.builder,1583pointer_type,1584&c.offsets.ptr,1585vm_store_context,1586);15871588c.translate(&component.trampolines[trampoline_index]);1589c.builder.finalize();1590compiler.cx.abi = Some(abi);15911592Ok(CompiledFunctionBody {1593code: super::box_dyn_any_compiler_context(Some(compiler.cx)),1594needs_gc_heap: false,1595})1596}15971598fn compile_intrinsic(1599&self,1600_tunables: &Tunables,1601component: &ComponentTranslation,1602types: &ComponentTypesBuilder,1603intrinsic: UnsafeIntrinsic,1604abi: Abi,1605symbol: &str,1606) -> Result<CompiledFunctionBody> {1607let wasm_func_ty = WasmFuncType::new(1608intrinsic.core_params().into(),1609intrinsic.core_results().into(),1610);16111612match abi {1613// Fall through to the trampoline compiler.1614Abi::Wasm => {}16151616// Implement the array-abi trampoline in terms of calling the1617// wasm-abi trampoline.1618Abi::Array => {1619let offsets =1620VMComponentOffsets::new(self.isa.pointer_bytes(), &component.component);1621return Ok(self.array_to_wasm_trampoline(1622FuncKey::UnsafeIntrinsic(abi, intrinsic),1623FuncKey::UnsafeIntrinsic(Abi::Wasm, intrinsic),1624&wasm_func_ty,1625symbol,1626offsets.vm_store_context(),1627wasmtime_environ::component::VMCOMPONENT_MAGIC,1628)?);1629}16301631Abi::Patchable => {1632unreachable!(1633"We should not be compiling a patchable trampoline for a component intrinsic"1634)1635}1636}16371638let mut compiler = self.function_compiler();1639let mut c = TrampolineCompiler::new(1640self,1641&mut compiler,1642&component.component,1643&types,1644&wasm_func_ty,1645);16461647match intrinsic {1648UnsafeIntrinsic::U8NativeLoad1649| UnsafeIntrinsic::U16NativeLoad1650| UnsafeIntrinsic::U32NativeLoad1651| UnsafeIntrinsic::U64NativeLoad => c.translate_load_intrinsic(intrinsic)?,1652UnsafeIntrinsic::U8NativeStore1653| UnsafeIntrinsic::U16NativeStore1654| UnsafeIntrinsic::U32NativeStore1655| UnsafeIntrinsic::U64NativeStore => c.translate_store_intrinsic(intrinsic)?,1656UnsafeIntrinsic::StoreDataAddress => {1657let [callee_vmctx, _caller_vmctx] = *c.abi_load_params() else {1658unreachable!()1659};1660let pointer_type = self.isa.pointer_type();16611662// Load the `*mut VMStoreContext` out of our vmctx.1663let store_ctx = c.builder.ins().load(1664pointer_type,1665ir::MemFlags::trusted()1666.with_readonly()1667.with_alias_region(Some(ir::AliasRegion::Vmctx))1668.with_can_move(),1669callee_vmctx,1670i32::try_from(c.offsets.vm_store_context()).unwrap(),1671);16721673// Load the `*mut T` out of the `VMStoreContext`.1674let data_address = c.builder.ins().load(1675pointer_type,1676ir::MemFlags::trusted()1677.with_readonly()1678.with_alias_region(Some(ir::AliasRegion::Vmctx))1679.with_can_move(),1680store_ctx,1681i32::from(c.offsets.ptr.vmstore_context_store_data()),1682);16831684// Zero-extend the address if we are on a 32-bit architecture.1685let data_address = match pointer_type.bits() {168632 => c.builder.ins().uextend(ir::types::I64, data_address),168764 => data_address,1688p => bail!("unsupported architecture: no support for {p}-bit pointers"),1689};16901691c.abi_store_results(&[data_address]);1692}1693}16941695c.builder.finalize();1696compiler.cx.abi = Some(abi);16971698Ok(CompiledFunctionBody {1699code: super::box_dyn_any_compiler_context(Some(compiler.cx)),1700needs_gc_heap: false,1701})1702}1703}17041705macro_rules! unsafe_intrinsic_clif_params_results {1706(1707$(1708$symbol:expr => $variant:ident : $ctor:ident ( $( $param:ident : $param_ty:ident ),* ) $( -> $result_ty:ident )? ;1709)*1710) => {1711fn unsafe_intrinsic_clif_params(intrinsic: UnsafeIntrinsic) -> &'static [ir::types::Type] {1712match intrinsic {1713$(1714UnsafeIntrinsic::$variant => &[ $( unsafe_intrinsic_clif_params_results!(@clif_type $param_ty) ),* ],1715)*1716}1717}17181719fn unsafe_intrinsic_clif_results(intrinsic: UnsafeIntrinsic) -> &'static [ir::types::Type] {1720match intrinsic {1721$(1722UnsafeIntrinsic::$variant => &[ $( unsafe_intrinsic_clif_params_results!(@clif_type $result_ty) )? ],1723)*1724}1725}1726};17271728(@clif_type u8) => { ir::types::I8 };1729(@clif_type u16) => { ir::types::I16 };1730(@clif_type u32) => { ir::types::I32 };1731(@clif_type u64) => { ir::types::I64 };1732}17331734wasmtime_environ::for_each_unsafe_intrinsic!(unsafe_intrinsic_clif_params_results);17351736impl TrampolineCompiler<'_> {1737fn translate_transcode(1738&mut self,1739op: Transcode,1740from: RuntimeMemoryIndex,1741from64: bool,1742to: RuntimeMemoryIndex,1743to64: bool,1744) {1745let pointer_type = self.isa.pointer_type();1746let vmctx = self.builder.func.dfg.block_params(self.block0)[0];17471748// Determine the static signature of the host libcall for this transcode1749// operation and additionally calculate the static offset within the1750// transode libcalls array.1751let get_libcall = match op {1752Transcode::Copy(FixedEncoding::Utf8) => host::utf8_to_utf8,1753Transcode::Copy(FixedEncoding::Utf16) => host::utf16_to_utf16,1754Transcode::Copy(FixedEncoding::Latin1) => host::latin1_to_latin1,1755Transcode::Latin1ToUtf16 => host::latin1_to_utf16,1756Transcode::Latin1ToUtf8 => host::latin1_to_utf8,1757Transcode::Utf16ToCompactProbablyUtf16 => host::utf16_to_compact_probably_utf16,1758Transcode::Utf16ToCompactUtf16 => host::utf16_to_compact_utf16,1759Transcode::Utf16ToLatin1 => host::utf16_to_latin1,1760Transcode::Utf16ToUtf8 => host::utf16_to_utf8,1761Transcode::Utf8ToCompactUtf16 => host::utf8_to_compact_utf16,1762Transcode::Utf8ToLatin1 => host::utf8_to_latin1,1763Transcode::Utf8ToUtf16 => host::utf8_to_utf16,1764};17651766// Load the base pointers for the from/to linear memories.1767let from_base = self.load_runtime_memory_base(vmctx, from);1768let to_base = self.load_runtime_memory_base(vmctx, to);17691770let mut args = Vec::new();1771args.push(vmctx);17721773let uses_retptr = match op {1774Transcode::Utf16ToUtf81775| Transcode::Latin1ToUtf81776| Transcode::Utf8ToLatin11777| Transcode::Utf16ToLatin1 => true,1778_ => false,1779};17801781// Most transcoders share roughly the same signature despite doing very1782// different things internally, so most libcalls are lumped together1783// here.1784match op {1785Transcode::Copy(_)1786| Transcode::Latin1ToUtf161787| Transcode::Utf16ToCompactProbablyUtf161788| Transcode::Utf8ToLatin11789| Transcode::Utf16ToLatin11790| Transcode::Utf8ToUtf16 => {1791args.push(self.ptr_param(0, from64, from_base));1792args.push(self.len_param(1, from64));1793args.push(self.ptr_param(2, to64, to_base));1794}17951796Transcode::Utf16ToUtf8 | Transcode::Latin1ToUtf8 => {1797args.push(self.ptr_param(0, from64, from_base));1798args.push(self.len_param(1, from64));1799args.push(self.ptr_param(2, to64, to_base));1800args.push(self.len_param(3, to64));1801}18021803Transcode::Utf8ToCompactUtf16 | Transcode::Utf16ToCompactUtf16 => {1804args.push(self.ptr_param(0, from64, from_base));1805args.push(self.len_param(1, from64));1806args.push(self.ptr_param(2, to64, to_base));1807args.push(self.len_param(3, to64));1808args.push(self.len_param(4, to64));1809}1810};1811if uses_retptr {1812let slot = self1813.builder1814.func1815.create_sized_stack_slot(ir::StackSlotData::new(1816ir::StackSlotKind::ExplicitSlot,1817pointer_type.bytes(),18180,1819));1820args.push(self.builder.ins().stack_addr(pointer_type, slot, 0));1821}1822let call = self.call_libcall(vmctx, get_libcall, &args);1823let mut results = self.builder.func.dfg.inst_results(call).to_vec();1824if uses_retptr {1825results.push(self.builder.ins().load(1826pointer_type,1827ir::MemFlags::trusted(),1828*args.last().unwrap(),18290,1830));1831}1832let mut raw_results = Vec::new();18331834// Like the arguments the results are fairly similar across libcalls, so1835// they're lumped into various buckets here.1836match op {1837Transcode::Copy(_) | Transcode::Latin1ToUtf16 => {1838self.raise_if_host_trapped(results[0]);1839}18401841Transcode::Utf8ToUtf161842| Transcode::Utf16ToCompactProbablyUtf161843| Transcode::Utf8ToCompactUtf161844| Transcode::Utf16ToCompactUtf16 => {1845self.raise_if_transcode_trapped(results[0]);1846raw_results.push(self.cast_from_pointer(results[0], to64));1847}18481849Transcode::Latin1ToUtf81850| Transcode::Utf16ToUtf81851| Transcode::Utf8ToLatin11852| Transcode::Utf16ToLatin1 => {1853self.raise_if_transcode_trapped(results[0]);1854raw_results.push(self.cast_from_pointer(results[0], from64));1855raw_results.push(self.cast_from_pointer(results[1], to64));1856}1857};18581859self.builder.ins().return_(&raw_results);1860}18611862// Helper function to cast an input parameter to the host pointer type.1863fn len_param(&mut self, param: usize, is64: bool) -> ir::Value {1864let val = self.builder.func.dfg.block_params(self.block0)[2 + param];1865self.cast_to_pointer(val, is64)1866}18671868// Helper function to interpret an input parameter as a pointer into1869// linear memory. This will cast the input parameter to the host integer1870// type and then add that value to the base.1871//1872// Note that bounds-checking happens in adapter modules, and this1873// trampoline is simply calling the host libcall.1874fn ptr_param(&mut self, param: usize, is64: bool, base: ir::Value) -> ir::Value {1875let val = self.len_param(param, is64);1876self.builder.ins().iadd(base, val)1877}18781879// Helper function to cast a core wasm input to a host pointer type1880// which will go into the host libcall.1881fn cast_to_pointer(&mut self, val: ir::Value, is64: bool) -> ir::Value {1882let pointer_type = self.isa.pointer_type();1883let host64 = pointer_type == ir::types::I64;1884if is64 == host64 {1885val1886} else if !is64 {1887assert!(host64);1888self.builder.ins().uextend(pointer_type, val)1889} else {1890assert!(!host64);1891self.builder.ins().ireduce(pointer_type, val)1892}1893}18941895// Helper to cast a host pointer integer type to the destination type.1896fn cast_from_pointer(&mut self, val: ir::Value, is64: bool) -> ir::Value {1897let host64 = self.isa.pointer_type() == ir::types::I64;1898if is64 == host64 {1899val1900} else if !is64 {1901assert!(host64);1902self.builder.ins().ireduce(ir::types::I32, val)1903} else {1904assert!(!host64);1905self.builder.ins().uextend(ir::types::I64, val)1906}1907}19081909fn load_runtime_memory_base(&mut self, vmctx: ir::Value, mem: RuntimeMemoryIndex) -> ir::Value {1910let pointer_type = self.isa.pointer_type();1911let from_vmmemory_definition = self.load_memory(vmctx, mem);1912self.builder.ins().load(1913pointer_type,1914MemFlags::trusted(),1915from_vmmemory_definition,1916i32::from(self.offsets.ptr.vmmemory_definition_base()),1917)1918}19191920fn translate_load_intrinsic(&mut self, intrinsic: UnsafeIntrinsic) -> Result<()> {1921// Emit code for a native-load intrinsic.1922debug_assert_eq!(intrinsic.core_params(), &[WasmValType::I64]);1923debug_assert_eq!(intrinsic.core_results().len(), 1);19241925let wasm_ty = intrinsic.core_results()[0];1926let clif_ty = unsafe_intrinsic_clif_results(intrinsic)[0];19271928let [_callee_vmctx, _caller_vmctx, pointer] = *self.abi_load_params() else {1929unreachable!()1930};19311932// Truncate the pointer, if necessary.1933debug_assert_eq!(self.builder.func.dfg.value_type(pointer), ir::types::I64);1934let pointer = match self.isa.pointer_bits() {193532 => self.builder.ins().ireduce(ir::types::I32, pointer),193664 => pointer,1937p => bail!("unsupported architecture: no support for {p}-bit pointers"),1938};19391940// Do the load!1941let mut value = self1942.builder1943.ins()1944.load(clif_ty, ir::MemFlags::trusted(), pointer, 0);19451946// Extend the value, if necessary. When implementing the1947// `u8-native-load` intrinsic, for example, we will load a Cranelift1948// value of type `i8` but we need to extend it to an `i32` because1949// Wasm doesn't have an `i8` core value type.1950let wasm_clif_ty = crate::value_type(self.isa, wasm_ty);1951if clif_ty != wasm_clif_ty {1952assert!(clif_ty.bytes() < wasm_clif_ty.bytes());1953// NB: all of our unsafe intrinsics for native loads are1954// unsigned, so we always zero-extend.1955value = self.builder.ins().uextend(wasm_clif_ty, value);1956}19571958self.abi_store_results(&[value]);1959Ok(())1960}19611962fn translate_store_intrinsic(&mut self, intrinsic: UnsafeIntrinsic) -> Result<()> {1963debug_assert!(intrinsic.core_results().is_empty());1964debug_assert!(matches!(intrinsic.core_params(), [WasmValType::I64, _]));19651966let wasm_ty = intrinsic.core_params()[1];1967let clif_ty = unsafe_intrinsic_clif_params(intrinsic)[1];19681969let [_callee_vmctx, _caller_vmctx, pointer, mut value] = *self.abi_load_params() else {1970unreachable!()1971};19721973// Truncate the pointer, if necessary.1974debug_assert_eq!(self.builder.func.dfg.value_type(pointer), ir::types::I64);1975let pointer = match self.isa.pointer_bits() {197632 => self.builder.ins().ireduce(ir::types::I32, pointer),197764 => pointer,1978p => bail!("unsupported architecture: no support for {p}-bit pointers"),1979};19801981// Truncate the value, if necessary. For example, with1982// `u8-native-store` we will be given an `i32` from Wasm (because1983// core Wasm does not have an 8-bit integer value type) and we need1984// to reduce that into an `i8`.1985let wasm_ty = crate::value_type(self.isa, wasm_ty);1986if clif_ty != wasm_ty {1987assert!(clif_ty.bytes() < wasm_ty.bytes());1988value = self.builder.ins().ireduce(clif_ty, value);1989}19901991// Do the store!1992self.builder1993.ins()1994.store(ir::MemFlags::trusted(), value, pointer, 0);19951996self.abi_store_results(&[]);1997Ok(())1998}1999}20002001/// Module with macro-generated contents that will return the signature and2002/// offset for each of the host transcoder functions.2003///2004/// Note that a macro is used here to keep this in sync with the actual2005/// transcoder functions themselves which are also defined via a macro.2006mod host {2007use cranelift_codegen::ir::{self, AbiParam};2008use cranelift_codegen::isa::{CallConv, TargetIsa};2009use wasmtime_environ::component::ComponentBuiltinFunctionIndex;20102011macro_rules! define {2012(2013$(2014$( #[$attr:meta] )*2015$name:ident( $( $pname:ident: $param:ident ),* ) $( -> $result:ident )?;2016)*2017) => {2018$(2019pub(super) fn $name(isa: &dyn TargetIsa, func: &mut ir::Function) -> (ir::SigRef, ComponentBuiltinFunctionIndex) {2020let pointer_type = isa.pointer_type();2021let sig = build_sig(2022isa,2023func,2024&[$( define!(@ty pointer_type $param) ),*],2025&[$( define!(@ty pointer_type $result) ),*],2026);20272028return (sig, ComponentBuiltinFunctionIndex::$name())2029}2030)*2031};20322033(@ty $ptr:ident size) => ($ptr);2034(@ty $ptr:ident ptr_u8) => ($ptr);2035(@ty $ptr:ident ptr_u16) => ($ptr);2036(@ty $ptr:ident ptr_size) => ($ptr);2037(@ty $ptr:ident bool) => (ir::types::I8);2038(@ty $ptr:ident u8) => (ir::types::I8);2039(@ty $ptr:ident u32) => (ir::types::I32);2040(@ty $ptr:ident u64) => (ir::types::I64);2041(@ty $ptr:ident vmctx) => ($ptr);2042}20432044wasmtime_environ::foreach_builtin_component_function!(define);20452046fn build_sig(2047isa: &dyn TargetIsa,2048func: &mut ir::Function,2049params: &[ir::Type],2050returns: &[ir::Type],2051) -> ir::SigRef {2052let mut sig = ir::Signature {2053params: params.iter().map(|ty| AbiParam::new(*ty)).collect(),2054returns: returns.iter().map(|ty| AbiParam::new(*ty)).collect(),2055call_conv: CallConv::triple_default(isa.triple()),2056};20572058// Once we're declaring the signature of a host function we must respect2059// the default ABI of the platform which is where argument extension of2060// params/results may come into play.2061let extension = isa.default_argument_extension();2062for arg in sig.params.iter_mut().chain(sig.returns.iter_mut()) {2063if arg.value_type.is_int() {2064arg.extension = extension;2065}2066}2067func.import_signature(sig)2068}2069}207020712072