Path: blob/main/crates/environ/src/fact/trampoline.rs
3092 views
//! Low-level compilation of an fused adapter function.1//!2//! This module is tasked with the top-level `compile` function which creates a3//! single WebAssembly function which will perform the steps of the fused4//! adapter for an `AdapterData` provided. This is the "meat" of compilation5//! where the validation of the canonical ABI or similar all happens to6//! translate arguments from one module to another.7//!8//! ## Traps and their ordering9//!10//! Currently this compiler is pretty "loose" about the ordering of precisely11//! what trap happens where. The main reason for this is that to core wasm all12//! traps are the same and for fused adapters if a trap happens no intermediate13//! side effects are visible (as designed by the canonical ABI itself). For this14//! it's important to note that some of the precise choices of control flow here15//! can be somewhat arbitrary, an intentional decision.1617use crate::component::{18CanonicalAbiInfo, ComponentTypesBuilder, FLAG_MAY_LEAVE, FixedEncoding as FE, FlatType,19InterfaceType, MAX_FLAT_ASYNC_PARAMS, MAX_FLAT_PARAMS, PREPARE_ASYNC_NO_RESULT,20PREPARE_ASYNC_WITH_RESULT, START_FLAG_ASYNC_CALLEE, StringEncoding, Transcode,21TypeComponentLocalErrorContextTableIndex, TypeEnumIndex, TypeFixedLengthListIndex,22TypeFlagsIndex, TypeFutureTableIndex, TypeListIndex, TypeOptionIndex, TypeRecordIndex,23TypeResourceTableIndex, TypeResultIndex, TypeStreamTableIndex, TypeTupleIndex,24TypeVariantIndex, VariantInfo,25};26use crate::fact::signature::Signature;27use crate::fact::transcode::Transcoder;28use crate::fact::{29AdapterData, Body, Function, FunctionId, Helper, HelperLocation, HelperType,30LinearMemoryOptions, Module, Options,31};32use crate::prelude::*;33use crate::{FuncIndex, GlobalIndex, Trap};34use std::collections::HashMap;35use std::mem;36use std::ops::Range;37use wasm_encoder::{BlockType, Encode, Instruction, Instruction::*, MemArg, ValType};38use wasmtime_component_util::{DiscriminantSize, FlagsSize};3940use super::DataModel;4142const MAX_STRING_BYTE_LENGTH: u32 = 1 << 31;43const UTF16_TAG: u32 = 1 << 31;4445/// This value is arbitrarily chosen and should be fine to change at any time,46/// it just seemed like a halfway reasonable starting point.47const INITIAL_FUEL: usize = 1_000;4849struct Compiler<'a, 'b> {50types: &'a ComponentTypesBuilder,51module: &'b mut Module<'a>,52result: FunctionId,5354/// The encoded WebAssembly function body so far, not including locals.55code: Vec<u8>,5657/// Total number of locals generated so far.58nlocals: u32,5960/// Locals partitioned by type which are not currently in use.61free_locals: HashMap<ValType, Vec<u32>>,6263/// A heuristic which is intended to limit the size of a generated function64/// to a certain maximum to avoid generating arbitrarily large functions.65///66/// This fuel counter is decremented each time `translate` is called and67/// when fuel is entirely consumed further translations, if necessary, will68/// be done through calls to other functions in the module. This is intended69/// to be a heuristic to split up the main function into theoretically70/// reusable portions.71fuel: usize,7273/// Indicates whether an "enter call" should be emitted in the generated74/// function with a call to `Resource{Enter,Exit}Call` at the beginning and75/// end of the function for tracking of information related to borrowed76/// resources.77emit_resource_call: bool,78}7980pub(super) fn compile(module: &mut Module<'_>, adapter: &AdapterData) {81fn compiler<'a, 'b>(82module: &'b mut Module<'a>,83adapter: &AdapterData,84) -> (Compiler<'a, 'b>, Signature, Signature) {85let lower_sig = module.types.signature(&adapter.lower);86let lift_sig = module.types.signature(&adapter.lift);87let ty = module88.core_types89.function(&lower_sig.params, &lower_sig.results);90let result = module91.funcs92.push(Function::new(Some(adapter.name.clone()), ty));9394// If this type signature contains any borrowed resources then invocations95// of enter/exit call for resource-related metadata tracking must be used.96// It shouldn't matter whether the lower/lift signature is used here as both97// should return the same answer.98let emit_resource_call = module.types.contains_borrow_resource(&adapter.lower);99assert_eq!(100emit_resource_call,101module.types.contains_borrow_resource(&adapter.lift)102);103104(105Compiler::new(106module,107result,108lower_sig.params.len() as u32,109emit_resource_call,110),111lower_sig,112lift_sig,113)114}115116// If the lift and lower instances are equal, or if one is an ancestor of117// the other, we trap unconditionally. This ensures that recursive118// reentrance via an adapter is impossible.119if adapter.lift.instance == adapter.lower.instance120|| adapter.lower.ancestors.contains(&adapter.lift.instance)121|| adapter.lift.ancestors.contains(&adapter.lower.instance)122{123let (mut compiler, _, _) = compiler(module, adapter);124compiler.trap(Trap::CannotEnterComponent);125compiler.finish();126return;127}128129// This closure compiles a function to be exported to the host which host to130// lift the parameters from the caller and lower them to the callee.131//132// This allows the host to delay copying the parameters until the callee133// signals readiness by clearing its backpressure flag.134let async_start_adapter = |module: &mut Module| {135let sig = module136.types137.async_start_signature(&adapter.lower, &adapter.lift);138let ty = module.core_types.function(&sig.params, &sig.results);139let result = module.funcs.push(Function::new(140Some(format!("[async-start]{}", adapter.name)),141ty,142));143144Compiler::new(module, result, sig.params.len() as u32, false)145.compile_async_start_adapter(adapter, &sig);146147result148};149150// This closure compiles a function to be exported by the adapter module and151// called by the host to lift the results from the callee and lower them to152// the caller.153//154// Given that async-lifted exports return their results via the155// `task.return` intrinsic, the host will need to copy the results from156// callee to caller when that intrinsic is called rather than when the157// callee task fully completes (which may happen much later).158let async_return_adapter = |module: &mut Module| {159let sig = module160.types161.async_return_signature(&adapter.lower, &adapter.lift);162let ty = module.core_types.function(&sig.params, &sig.results);163let result = module.funcs.push(Function::new(164Some(format!("[async-return]{}", adapter.name)),165ty,166));167168Compiler::new(module, result, sig.params.len() as u32, false)169.compile_async_return_adapter(adapter, &sig);170171result172};173174match (adapter.lower.options.async_, adapter.lift.options.async_) {175(false, false) => {176// We can adapt sync->sync case with only minimal use of intrinsics,177// e.g. resource enter and exit calls as needed.178let (compiler, lower_sig, lift_sig) = compiler(module, adapter);179compiler.compile_sync_to_sync_adapter(adapter, &lower_sig, &lift_sig)180}181(true, true) => {182assert!(module.tunables.concurrency_support);183184// In the async->async case, we must compile a couple of helper functions:185//186// - `async-start`: copies the parameters from the caller to the callee187// - `async-return`: copies the result from the callee to the caller188//189// Unlike synchronous calls, the above operations are asynchronous190// and subject to backpressure. If the callee is not yet ready to191// handle a new call, the `async-start` function will not be called192// immediately. Instead, control will return to the caller,193// allowing it to do other work while waiting for this call to make194// progress. Once the callee indicates it is ready, `async-start`195// will be called, and sometime later (possibly after various task196// switch events), when the callee has produced a result, it will197// call `async-return` via the `task.return` intrinsic, at which198// point a `STATUS_RETURNED` event will be delivered to the caller.199let start = async_start_adapter(module);200let return_ = async_return_adapter(module);201let (compiler, lower_sig, lift_sig) = compiler(module, adapter);202compiler.compile_async_to_async_adapter(203adapter,204start,205return_,206i32::try_from(lift_sig.params.len()).unwrap(),207&lower_sig,208);209}210(false, true) => {211assert!(module.tunables.concurrency_support);212213// Like the async->async case above, for the sync->async case we214// also need `async-start` and `async-return` helper functions to215// allow the callee to asynchronously "pull" the parameters and216// "push" the results when it is ready.217//218// However, since the caller is using the synchronous ABI, the219// parameters may have been passed via the stack rather than linear220// memory. In that case, we pass them to the host to store in a221// task-local location temporarily in the case of backpressure.222// Similarly, the host will also temporarily store the results that223// the callee provides to `async-return` until it is ready to resume224// the caller.225let start = async_start_adapter(module);226let return_ = async_return_adapter(module);227let (compiler, lower_sig, lift_sig) = compiler(module, adapter);228compiler.compile_sync_to_async_adapter(229adapter,230start,231return_,232i32::try_from(lift_sig.params.len()).unwrap(),233&lower_sig,234);235}236(true, false) => {237assert!(module.tunables.concurrency_support);238239// As with the async->async and sync->async cases above, for the240// async->sync case we use `async-start` and `async-return` helper241// functions. Here, those functions allow the host to enforce242// backpressure in the case where the callee instance already has243// another synchronous call in progress, in which case we can't244// start a new one until the current one (and any others already245// waiting in line behind it) has completed.246//247// In the case of backpressure, we'll return control to the caller248// immediately so it can do other work. Later, once the callee is249// ready, the host will call the `async-start` function to retrieve250// the parameters and pass them to the callee. At that point, the251// callee may block on a host call, at which point the host will252// suspend the fiber it is running on and allow the caller (or any253// other ready instance) to run concurrently with the blocked254// callee. Once the callee finally returns, the host will call the255// `async-return` function to write the result to the caller's256// linear memory and deliver a `STATUS_RETURNED` event to the257// caller.258let lift_sig = module.types.signature(&adapter.lift);259let start = async_start_adapter(module);260let return_ = async_return_adapter(module);261let (compiler, lower_sig, ..) = compiler(module, adapter);262compiler.compile_async_to_sync_adapter(263adapter,264start,265return_,266i32::try_from(lift_sig.params.len()).unwrap(),267i32::try_from(lift_sig.results.len()).unwrap(),268&lower_sig,269);270}271}272}273274/// Compiles a helper function as specified by the `Helper` configuration.275///276/// This function is invoked when the translation process runs out of fuel for277/// some prior function which enqueues a helper to get translated later. This278/// translation function will perform one type translation as specified by279/// `Helper` which can either be in the stack or memory for each side.280pub(super) fn compile_helper(module: &mut Module<'_>, result: FunctionId, helper: Helper) {281let mut nlocals = 0;282let src_flat;283let src = match helper.src.loc {284// If the source is on the stack then it's specified in the parameters285// to the function, so this creates the flattened representation and286// then lists those as the locals with appropriate types for the source287// values.288HelperLocation::Stack => {289src_flat = module290.types291.flatten_types(&helper.src.opts, usize::MAX, [helper.src.ty])292.unwrap()293.iter()294.enumerate()295.map(|(i, ty)| (i as u32, *ty))296.collect::<Vec<_>>();297nlocals += src_flat.len() as u32;298Source::Stack(Stack {299locals: &src_flat,300opts: &helper.src.opts,301})302}303// If the source is in memory then that's just propagated here as the304// first local is the pointer to the source.305HelperLocation::Memory => {306nlocals += 1;307Source::Memory(Memory {308opts: &helper.src.opts,309addr: TempLocal::new(0, helper.src.opts.data_model.unwrap_memory().ptr()),310offset: 0,311})312}313HelperLocation::StructField | HelperLocation::ArrayElement => todo!("CM+GC"),314};315let dst_flat;316let dst = match helper.dst.loc {317// This is the same as the stack-based source although `Destination` is318// configured slightly differently.319HelperLocation::Stack => {320dst_flat = module321.types322.flatten_types(&helper.dst.opts, usize::MAX, [helper.dst.ty])323.unwrap();324Destination::Stack(&dst_flat, &helper.dst.opts)325}326// This is the same as a memory-based source but note that the address327// of the destination is passed as the final parameter to the function.328HelperLocation::Memory => {329nlocals += 1;330Destination::Memory(Memory {331opts: &helper.dst.opts,332addr: TempLocal::new(333nlocals - 1,334helper.dst.opts.data_model.unwrap_memory().ptr(),335),336offset: 0,337})338}339HelperLocation::StructField | HelperLocation::ArrayElement => todo!("CM+GC"),340};341let mut compiler = Compiler {342types: module.types,343module,344code: Vec::new(),345nlocals,346free_locals: HashMap::new(),347result,348fuel: INITIAL_FUEL,349// This is a helper function and only the top-level function is350// responsible for emitting these intrinsic calls.351emit_resource_call: false,352};353compiler.translate(&helper.src.ty, &src, &helper.dst.ty, &dst);354compiler.finish();355}356357/// Possible ways that a interface value is represented in the core wasm358/// canonical ABI.359enum Source<'a> {360/// This value is stored on the "stack" in wasm locals.361///362/// This could mean that it's inline from the parameters to the function or363/// that after a function call the results were stored in locals and the364/// locals are the inline results.365Stack(Stack<'a>),366367/// This value is stored in linear memory described by the `Memory`368/// structure.369Memory(Memory<'a>),370371/// This value is stored in a GC struct field described by the `GcStruct`372/// structure.373#[allow(dead_code, reason = "CM+GC is still WIP")]374Struct(GcStruct<'a>),375376/// This value is stored in a GC array element described by the `GcArray`377/// structure.378#[allow(dead_code, reason = "CM+GC is still WIP")]379Array(GcArray<'a>),380}381382/// Same as `Source` but for where values are translated into.383enum Destination<'a> {384/// This value is destined for the WebAssembly stack which means that385/// results are simply pushed as we go along.386///387/// The types listed are the types that are expected to be on the stack at388/// the end of translation.389Stack(&'a [ValType], &'a Options),390391/// This value is to be placed in linear memory described by `Memory`.392Memory(Memory<'a>),393394/// This value is to be placed in a GC struct field described by the395/// `GcStruct` structure.396#[allow(dead_code, reason = "CM+GC is still WIP")]397Struct(GcStruct<'a>),398399/// This value is to be placed in a GC array element described by the400/// `GcArray` structure.401#[allow(dead_code, reason = "CM+GC is still WIP")]402Array(GcArray<'a>),403}404405struct Stack<'a> {406/// The locals that comprise a particular value.407///408/// The length of this list represents the flattened list of types that make409/// up the component value. Each list has the index of the local being410/// accessed as well as the type of the local itself.411locals: &'a [(u32, ValType)],412/// The lifting/lowering options for where this stack of values comes from413opts: &'a Options,414}415416/// Representation of where a value is going to be stored in linear memory.417struct Memory<'a> {418/// The lifting/lowering options with memory configuration419opts: &'a Options,420/// The index of the local that contains the base address of where the421/// storage is happening.422addr: TempLocal,423/// A "static" offset that will be baked into wasm instructions for where424/// memory loads/stores happen.425offset: u32,426}427428impl<'a> Memory<'a> {429fn mem_opts(&self) -> &'a LinearMemoryOptions {430self.opts.data_model.unwrap_memory()431}432}433434/// Representation of where a value is coming from or going to in a GC struct.435struct GcStruct<'a> {436opts: &'a Options,437// TODO: more fields to come in the future.438}439440/// Representation of where a value is coming from or going to in a GC array.441struct GcArray<'a> {442opts: &'a Options,443// TODO: more fields to come in the future.444}445446impl<'a, 'b> Compiler<'a, 'b> {447fn new(448module: &'b mut Module<'a>,449result: FunctionId,450nlocals: u32,451emit_resource_call: bool,452) -> Self {453Self {454types: module.types,455module,456result,457code: Vec::new(),458nlocals,459free_locals: HashMap::new(),460fuel: INITIAL_FUEL,461emit_resource_call,462}463}464465/// Compile an adapter function supporting an async-lowered import to an466/// async-lifted export.467///468/// This uses a pair of `async-prepare` and `async-start` built-in functions469/// to set up and start a subtask, respectively. `async-prepare` accepts470/// `start` and `return_` functions which copy the parameters and results,471/// respectively; the host will call the former when the callee has cleared472/// its backpressure flag and the latter when the callee has called473/// `task.return`.474fn compile_async_to_async_adapter(475mut self,476adapter: &AdapterData,477start: FunctionId,478return_: FunctionId,479param_count: i32,480lower_sig: &Signature,481) {482let start_call =483self.module484.import_async_start_call(&adapter.name, adapter.lift.options.callback, None);485486self.call_prepare(adapter, start, return_, lower_sig, false);487488// TODO: As an optimization, consider checking the backpressure flag on489// the callee instance and, if it's unset _and_ the callee uses a490// callback, translate the params and call the callee function directly491// here (and make sure `start_call` knows _not_ to call it in that case).492493// We export this function so we can pass a funcref to the host.494//495// TODO: Use a declarative element segment instead of exporting this.496self.module.exports.push((497adapter.callee.as_u32(),498format!("[adapter-callee]{}", adapter.name),499));500501self.instruction(RefFunc(adapter.callee.as_u32()));502self.instruction(I32Const(param_count));503// The result count for an async callee is either one (if there's a504// callback) or zero (if there's no callback). We conservatively use505// one here to ensure the host provides room for the result, if any.506self.instruction(I32Const(1));507self.instruction(I32Const(START_FLAG_ASYNC_CALLEE));508self.instruction(Call(start_call.as_u32()));509510self.finish()511}512513/// Invokes the `prepare_call` builtin with the provided parameters for this514/// adapter.515///516/// This is part of a async lower and/or async lift adapter. This is not517/// used for a sync->sync function call. This is done to create the task on518/// the host side of the runtime and such. This will notably invoke a519/// Cranelift builtin which will spill all wasm-level parameters to the520/// stack to handle variadic signatures.521///522/// Note that the `prepare_sync` parameter here configures the523/// `result_count_or_max_if_async` parameter to indicate whether this is a524/// sync or async prepare.525fn call_prepare(526&mut self,527adapter: &AdapterData,528start: FunctionId,529return_: FunctionId,530lower_sig: &Signature,531prepare_sync: bool,532) {533let prepare = self.module.import_prepare_call(534&adapter.name,535&lower_sig.params,536match adapter.lift.options.data_model {537DataModel::Gc {} => todo!("CM+GC"),538DataModel::LinearMemory(LinearMemoryOptions { memory, .. }) => memory,539},540);541542self.flush_code();543self.module.funcs[self.result]544.body545.push(Body::RefFunc(start));546self.module.funcs[self.result]547.body548.push(Body::RefFunc(return_));549self.instruction(I32Const(550i32::try_from(adapter.lower.instance.as_u32()).unwrap(),551));552self.instruction(I32Const(553i32::try_from(adapter.lift.instance.as_u32()).unwrap(),554));555self.instruction(I32Const(556i32::try_from(self.types[adapter.lift.ty].results.as_u32()).unwrap(),557));558self.instruction(I32Const(if self.types[adapter.lift.ty].async_ {5591560} else {5610562}));563self.instruction(I32Const(i32::from(564adapter.lift.options.string_encoding as u8,565)));566567// flag this as a preparation for either an async call or sync call,568// depending on `prepare_sync`569let result_types = &self.types[self.types[adapter.lower.ty].results].types;570if prepare_sync {571self.instruction(I32Const(572i32::try_from(573self.types574.flatten_types(575&adapter.lower.options,576usize::MAX,577result_types.iter().copied(),578)579.map(|v| v.len())580.unwrap_or(usize::try_from(i32::MAX).unwrap()),581)582.unwrap(),583));584} else {585if result_types.len() > 0 {586self.instruction(I32Const(PREPARE_ASYNC_WITH_RESULT.cast_signed()));587} else {588self.instruction(I32Const(PREPARE_ASYNC_NO_RESULT.cast_signed()));589}590}591592// forward all our own arguments on to the host stub593for index in 0..lower_sig.params.len() {594self.instruction(LocalGet(u32::try_from(index).unwrap()));595}596self.instruction(Call(prepare.as_u32()));597}598599/// Compile an adapter function supporting a sync-lowered import to an600/// async-lifted export.601///602/// This uses a pair of `sync-prepare` and `sync-start` built-in functions603/// to set up and start a subtask, respectively. `sync-prepare` accepts604/// `start` and `return_` functions which copy the parameters and results,605/// respectively; the host will call the former when the callee has cleared606/// its backpressure flag and the latter when the callee has called607/// `task.return`.608fn compile_sync_to_async_adapter(609mut self,610adapter: &AdapterData,611start: FunctionId,612return_: FunctionId,613lift_param_count: i32,614lower_sig: &Signature,615) {616let start_call = self.module.import_sync_start_call(617&adapter.name,618adapter.lift.options.callback,619&lower_sig.results,620);621622self.call_prepare(adapter, start, return_, lower_sig, true);623624// TODO: As an optimization, consider checking the backpressure flag on625// the callee instance and, if it's unset _and_ the callee uses a626// callback, translate the params and call the callee function directly627// here (and make sure `start_call` knows _not_ to call it in that case).628629// We export this function so we can pass a funcref to the host.630//631// TODO: Use a declarative element segment instead of exporting this.632self.module.exports.push((633adapter.callee.as_u32(),634format!("[adapter-callee]{}", adapter.name),635));636637self.instruction(RefFunc(adapter.callee.as_u32()));638self.instruction(I32Const(lift_param_count));639self.instruction(Call(start_call.as_u32()));640641self.finish()642}643644/// Compile an adapter function supporting an async-lowered import to a645/// sync-lifted export.646///647/// This uses a pair of `async-prepare` and `async-start` built-in functions648/// to set up and start a subtask, respectively. `async-prepare` accepts649/// `start` and `return_` functions which copy the parameters and results,650/// respectively; the host will call the former when the callee has cleared651/// its backpressure flag and the latter when the callee has returned its652/// result(s).653fn compile_async_to_sync_adapter(654mut self,655adapter: &AdapterData,656start: FunctionId,657return_: FunctionId,658param_count: i32,659result_count: i32,660lower_sig: &Signature,661) {662let start_call =663self.module664.import_async_start_call(&adapter.name, None, adapter.lift.post_return);665666self.call_prepare(adapter, start, return_, lower_sig, false);667668// We export this function so we can pass a funcref to the host.669//670// TODO: Use a declarative element segment instead of exporting this.671self.module.exports.push((672adapter.callee.as_u32(),673format!("[adapter-callee]{}", adapter.name),674));675676self.instruction(RefFunc(adapter.callee.as_u32()));677self.instruction(I32Const(param_count));678self.instruction(I32Const(result_count));679self.instruction(I32Const(0));680self.instruction(Call(start_call.as_u32()));681682self.finish()683}684685/// Compiles a function to be exported to the host which host to lift the686/// parameters from the caller and lower them to the callee.687///688/// This allows the host to delay copying the parameters until the callee689/// signals readiness by clearing its backpressure flag.690fn compile_async_start_adapter(mut self, adapter: &AdapterData, sig: &Signature) {691let param_locals = sig692.params693.iter()694.enumerate()695.map(|(i, ty)| (i as u32, *ty))696.collect::<Vec<_>>();697698self.set_flag(adapter.lift.flags, FLAG_MAY_LEAVE, false);699self.translate_params(adapter, ¶m_locals);700self.set_flag(adapter.lift.flags, FLAG_MAY_LEAVE, true);701702self.finish();703}704705/// Compiles a function to be exported by the adapter module and called by706/// the host to lift the results from the callee and lower them to the707/// caller.708///709/// Given that async-lifted exports return their results via the710/// `task.return` intrinsic, the host will need to copy the results from711/// callee to caller when that intrinsic is called rather than when the712/// callee task fully completes (which may happen much later).713fn compile_async_return_adapter(mut self, adapter: &AdapterData, sig: &Signature) {714let param_locals = sig715.params716.iter()717.enumerate()718.map(|(i, ty)| (i as u32, *ty))719.collect::<Vec<_>>();720721self.set_flag(adapter.lower.flags, FLAG_MAY_LEAVE, false);722// Note that we pass `param_locals` as _both_ the `param_locals` and723// `result_locals` parameters to `translate_results`. That's because724// the _parameters_ to `task.return` are actually the _results_ that the725// caller is waiting for.726//727// Additionally, the host will append a return728// pointer to the end of that list before calling this adapter's729// `async-return` function if the results exceed `MAX_FLAT_RESULTS` or730// the import is lowered async, in which case `translate_results` will731// use that pointer to store the results.732self.translate_results(adapter, ¶m_locals, ¶m_locals);733self.set_flag(adapter.lower.flags, FLAG_MAY_LEAVE, true);734735self.finish()736}737738/// Compile an adapter function supporting a sync-lowered import to a739/// sync-lifted export.740///741/// Unlike calls involving async-lowered imports or async-lifted exports,742/// this adapter need not involve host built-ins except possibly for743/// resource bookkeeping.744fn compile_sync_to_sync_adapter(745mut self,746adapter: &AdapterData,747lower_sig: &Signature,748lift_sig: &Signature,749) {750// Check the instance flags required for this trampoline.751//752// This inserts the initial check required by `canon_lower` that the753// caller instance can be left and additionally checks the754// flags on the callee if necessary whether it can be entered.755self.trap_if_not_flag(756adapter.lower.flags,757FLAG_MAY_LEAVE,758Trap::CannotLeaveComponent,759);760761let old_task_may_block = if self.module.tunables.concurrency_support {762// Save, clear, and later restore the `may_block` field.763let task_may_block = self.module.import_task_may_block();764let old_task_may_block = if self.types[adapter.lift.ty].async_ {765self.instruction(GlobalGet(task_may_block.as_u32()));766self.instruction(I32Eqz);767self.instruction(If(BlockType::Empty));768self.trap(Trap::CannotBlockSyncTask);769self.instruction(End);770None771} else {772let task_may_block = self.module.import_task_may_block();773self.instruction(GlobalGet(task_may_block.as_u32()));774let old_task_may_block = self.local_set_new_tmp(ValType::I32);775self.instruction(I32Const(0));776self.instruction(GlobalSet(task_may_block.as_u32()));777Some(old_task_may_block)778};779780// Push a task onto the current task stack.781//782// FIXME: Apply the optimizations described in #12311.783784self.instruction(I32Const(785i32::try_from(adapter.lower.instance.as_u32()).unwrap(),786));787self.instruction(I32Const(if self.types[adapter.lift.ty].async_ {7881789} else {7900791}));792self.instruction(I32Const(793i32::try_from(adapter.lift.instance.as_u32()).unwrap(),794));795let enter_sync_call = self.module.import_enter_sync_call();796self.instruction(Call(enter_sync_call.as_u32()));797798old_task_may_block799} else if self.emit_resource_call {800let enter_sync_call = self.module.import_enter_sync_call();801self.instruction(Call(enter_sync_call.as_u32()));802None803} else {804None805};806807// Perform the translation of arguments. Note that `FLAG_MAY_LEAVE` is808// cleared around this invocation for the callee as per the809// `canon_lift` definition in the spec. Additionally note that the810// precise ordering of traps here is not required since internal state811// is not visible to either instance and a trap will "lock down" both812// instances to no longer be visible. This means that we're free to813// reorder lifts/lowers and flags and such as is necessary and814// convenient here.815//816// TODO: if translation doesn't actually call any functions in either817// instance then there's no need to set/clear the flag here and that can818// be optimized away.819self.set_flag(adapter.lift.flags, FLAG_MAY_LEAVE, false);820let param_locals = lower_sig821.params822.iter()823.enumerate()824.map(|(i, ty)| (i as u32, *ty))825.collect::<Vec<_>>();826self.translate_params(adapter, ¶m_locals);827self.set_flag(adapter.lift.flags, FLAG_MAY_LEAVE, true);828829// With all the arguments on the stack the actual target function is830// now invoked. The core wasm results of the function are then placed831// into locals for result translation afterwards.832self.instruction(Call(adapter.callee.as_u32()));833let mut result_locals = Vec::with_capacity(lift_sig.results.len());834let mut temps = Vec::new();835for ty in lift_sig.results.iter().rev() {836let local = self.local_set_new_tmp(*ty);837result_locals.push((local.idx, *ty));838temps.push(local);839}840result_locals.reverse();841842// Like above during the translation of results the caller cannot be843// left (as we might invoke things like `realloc`). Again the precise844// order of everything doesn't matter since intermediate states cannot845// be witnessed, hence the setting of flags here to encapsulate both846// liftings and lowerings.847//848// TODO: like above the management of the `MAY_LEAVE` flag can probably849// be elided here for "simple" results.850self.set_flag(adapter.lower.flags, FLAG_MAY_LEAVE, false);851self.translate_results(adapter, ¶m_locals, &result_locals);852self.set_flag(adapter.lower.flags, FLAG_MAY_LEAVE, true);853854// And finally post-return state is handled here once all results/etc855// are all translated.856if let Some(func) = adapter.lift.post_return {857for (result, _) in result_locals.iter() {858self.instruction(LocalGet(*result));859}860self.instruction(Call(func.as_u32()));861}862863for tmp in temps {864self.free_temp_local(tmp);865}866867if self.emit_resource_call || self.module.tunables.concurrency_support {868// Pop the task we pushed earlier off of the current task stack.869//870// FIXME: Apply the optimizations described in #12311.871let exit_sync_call = self.module.import_exit_sync_call();872self.instruction(Call(exit_sync_call.as_u32()));873}874875if self.module.tunables.concurrency_support {876// Restore old `may_block_field`877if let Some(old_task_may_block) = old_task_may_block {878let task_may_block = self.module.import_task_may_block();879self.instruction(LocalGet(old_task_may_block.idx));880self.instruction(GlobalSet(task_may_block.as_u32()));881self.free_temp_local(old_task_may_block);882}883}884885self.finish()886}887888fn translate_params(&mut self, adapter: &AdapterData, param_locals: &[(u32, ValType)]) {889let src_tys = self.types[adapter.lower.ty].params;890let src_tys = self.types[src_tys]891.types892.iter()893.copied()894.collect::<Vec<_>>();895let dst_tys = self.types[adapter.lift.ty].params;896let dst_tys = self.types[dst_tys]897.types898.iter()899.copied()900.collect::<Vec<_>>();901let lift_opts = &adapter.lift.options;902let lower_opts = &adapter.lower.options;903904// TODO: handle subtyping905assert_eq!(src_tys.len(), dst_tys.len());906907// Async lowered functions have a smaller limit on flat parameters, but908// their destination, a lifted function, does not have a different limit909// than sync functions.910let max_flat_params = if adapter.lower.options.async_ {911MAX_FLAT_ASYNC_PARAMS912} else {913MAX_FLAT_PARAMS914};915let src_flat =916self.types917.flatten_types(lower_opts, max_flat_params, src_tys.iter().copied());918let dst_flat =919self.types920.flatten_types(lift_opts, MAX_FLAT_PARAMS, dst_tys.iter().copied());921922let src = if let Some(flat) = &src_flat {923Source::Stack(Stack {924locals: ¶m_locals[..flat.len()],925opts: lower_opts,926})927} else {928// If there are too many parameters then that means the parameters929// are actually a tuple stored in linear memory addressed by the930// first parameter local.931let lower_mem_opts = lower_opts.data_model.unwrap_memory();932let (addr, ty) = param_locals[0];933assert_eq!(ty, lower_mem_opts.ptr());934let align = src_tys935.iter()936.map(|t| self.types.align(lower_mem_opts, t))937.max()938.unwrap_or(1);939Source::Memory(self.memory_operand(lower_opts, TempLocal::new(addr, ty), align))940};941942let dst = if let Some(flat) = &dst_flat {943Destination::Stack(flat, lift_opts)944} else {945let abi = CanonicalAbiInfo::record(dst_tys.iter().map(|t| self.types.canonical_abi(t)));946match lift_opts.data_model {947DataModel::Gc {} => todo!("CM+GC"),948DataModel::LinearMemory(LinearMemoryOptions { memory64, .. }) => {949let (size, align) = if memory64 {950(abi.size64, abi.align64)951} else {952(abi.size32, abi.align32)953};954955// If there are too many parameters then space is allocated in the956// destination module for the parameters via its `realloc` function.957let size = MallocSize::Const(size);958Destination::Memory(self.malloc(lift_opts, size, align))959}960}961};962963let srcs = src964.record_field_srcs(self.types, src_tys.iter().copied())965.zip(src_tys.iter());966let dsts = dst967.record_field_dsts(self.types, dst_tys.iter().copied())968.zip(dst_tys.iter());969for ((src, src_ty), (dst, dst_ty)) in srcs.zip(dsts) {970self.translate(&src_ty, &src, &dst_ty, &dst);971}972973// If the destination was linear memory instead of the stack then the974// actual parameter that we're passing is the address of the values975// stored, so ensure that's happening in the wasm body here.976if let Destination::Memory(mem) = dst {977self.instruction(LocalGet(mem.addr.idx));978self.free_temp_local(mem.addr);979}980}981982fn translate_results(983&mut self,984adapter: &AdapterData,985param_locals: &[(u32, ValType)],986result_locals: &[(u32, ValType)],987) {988let src_tys = self.types[adapter.lift.ty].results;989let src_tys = self.types[src_tys]990.types991.iter()992.copied()993.collect::<Vec<_>>();994let dst_tys = self.types[adapter.lower.ty].results;995let dst_tys = self.types[dst_tys]996.types997.iter()998.copied()999.collect::<Vec<_>>();1000let lift_opts = &adapter.lift.options;1001let lower_opts = &adapter.lower.options;10021003let src_flat = self1004.types1005.flatten_lifting_types(lift_opts, src_tys.iter().copied());1006let dst_flat = self1007.types1008.flatten_lowering_types(lower_opts, dst_tys.iter().copied());10091010let src = if src_flat.is_some() {1011Source::Stack(Stack {1012locals: result_locals,1013opts: lift_opts,1014})1015} else {1016// The original results to read from in this case come from the1017// return value of the function itself. The imported function will1018// return a linear memory address at which the values can be read1019// from.1020let lift_mem_opts = lift_opts.data_model.unwrap_memory();1021let align = src_tys1022.iter()1023.map(|t| self.types.align(lift_mem_opts, t))1024.max()1025.unwrap_or(1);1026assert_eq!(1027result_locals.len(),1028if lower_opts.async_ || lift_opts.async_ {102921030} else {103111032}1033);1034let (addr, ty) = result_locals[0];1035assert_eq!(ty, lift_opts.data_model.unwrap_memory().ptr());1036Source::Memory(self.memory_operand(lift_opts, TempLocal::new(addr, ty), align))1037};10381039let dst = if let Some(flat) = &dst_flat {1040Destination::Stack(flat, lower_opts)1041} else {1042// This is slightly different than `translate_params` where the1043// return pointer was provided by the caller of this function1044// meaning the last parameter local is a pointer into linear memory.1045let lower_mem_opts = lower_opts.data_model.unwrap_memory();1046let align = dst_tys1047.iter()1048.map(|t| self.types.align(lower_mem_opts, t))1049.max()1050.unwrap_or(1);1051let (addr, ty) = *param_locals.last().expect("no retptr");1052assert_eq!(ty, lower_opts.data_model.unwrap_memory().ptr());1053Destination::Memory(self.memory_operand(lower_opts, TempLocal::new(addr, ty), align))1054};10551056let srcs = src1057.record_field_srcs(self.types, src_tys.iter().copied())1058.zip(src_tys.iter());1059let dsts = dst1060.record_field_dsts(self.types, dst_tys.iter().copied())1061.zip(dst_tys.iter());1062for ((src, src_ty), (dst, dst_ty)) in srcs.zip(dsts) {1063self.translate(&src_ty, &src, &dst_ty, &dst);1064}1065}10661067fn translate(1068&mut self,1069src_ty: &InterfaceType,1070src: &Source<'_>,1071dst_ty: &InterfaceType,1072dst: &Destination,1073) {1074if let Source::Memory(mem) = src {1075self.assert_aligned(src_ty, mem);1076}1077if let Destination::Memory(mem) = dst {1078self.assert_aligned(dst_ty, mem);1079}10801081// Calculate a cost heuristic for what the translation of this specific1082// layer of the type is going to incur. The purpose of this cost is that1083// we'll deduct it from `self.fuel` and if no fuel is remaining then1084// translation is outlined into a separate function rather than being1085// translated into this function.1086//1087// The general goal is to avoid creating an exponentially sized function1088// for a linearly sized input (the type section). By outlining helper1089// functions there will ideally be a constant set of helper functions1090// per type (to accommodate in-memory or on-stack transfers as well as1091// src/dst options) which means that each function is at most a certain1092// size and we have a linear number of functions which should guarantee1093// an overall linear size of the output.1094//1095// To implement this the current heuristic is that each layer of1096// translating a type has a cost associated with it and this cost is1097// accounted for in `self.fuel`. Some conversions are considered free as1098// they generate basically as much code as the `call` to the translation1099// function while other are considered proportionally expensive to the1100// size of the type. The hope is that some upper layers are of a type's1101// translation are all inlined into one function but bottom layers end1102// up getting outlined to separate functions. Theoretically, again this1103// is built on hopes and dreams, the outlining can be shared amongst1104// tightly-intertwined type hierarchies which will reduce the size of1105// the output module due to the helpers being used.1106//1107// This heuristic of how to split functions has changed a few times in1108// the past and this isn't necessarily guaranteed to be the final1109// iteration.1110let cost = match src_ty {1111// These types are all quite simple to load/store and equate to1112// basically the same cost of the `call` instruction to call an1113// out-of-line translation function, so give them 0 cost.1114InterfaceType::Bool1115| InterfaceType::U81116| InterfaceType::S81117| InterfaceType::U161118| InterfaceType::S161119| InterfaceType::U321120| InterfaceType::S321121| InterfaceType::U641122| InterfaceType::S641123| InterfaceType::Float321124| InterfaceType::Float64 => 0,11251126// This has a small amount of validation associated with it, so1127// give it a cost of 1.1128InterfaceType::Char => 1,11291130// This has a fair bit of code behind it depending on the1131// strings/encodings in play, so arbitrarily assign it this cost.1132InterfaceType::String => 40,11331134// Iteration of a loop is along the lines of the cost of a string1135// so give it the same cost1136InterfaceType::List(_) => 40,11371138InterfaceType::Flags(i) => {1139let count = self.module.types[*i].names.len();1140match FlagsSize::from_count(count) {1141FlagsSize::Size0 => 0,1142FlagsSize::Size1 | FlagsSize::Size2 => 1,1143FlagsSize::Size4Plus(n) => n.into(),1144}1145}11461147InterfaceType::Record(i) => self.types[*i].fields.len(),1148InterfaceType::Tuple(i) => self.types[*i].types.len(),1149InterfaceType::Variant(i) => self.types[*i].cases.len(),1150InterfaceType::Enum(i) => self.types[*i].names.len(),11511152// 2 cases to consider for each of these variants.1153InterfaceType::Option(_) | InterfaceType::Result(_) => 2,11541155// TODO(#6696) - something nonzero, is 1 right?1156InterfaceType::Own(_)1157| InterfaceType::Borrow(_)1158| InterfaceType::Future(_)1159| InterfaceType::Stream(_)1160| InterfaceType::ErrorContext(_) => 1,1161InterfaceType::FixedLengthList(i) => self.types[*i].size as usize,1162};11631164match self.fuel.checked_sub(cost) {1165// This function has enough fuel to perform the layer of translation1166// necessary for this type, so the fuel is updated in-place and1167// translation continues. Note that the recursion here is bounded by1168// the static recursion limit for all interface types as imposed1169// during the translation phase.1170Some(n) => {1171self.fuel = n;1172match src_ty {1173InterfaceType::Bool => self.translate_bool(src, dst_ty, dst),1174InterfaceType::U8 => self.translate_u8(src, dst_ty, dst),1175InterfaceType::S8 => self.translate_s8(src, dst_ty, dst),1176InterfaceType::U16 => self.translate_u16(src, dst_ty, dst),1177InterfaceType::S16 => self.translate_s16(src, dst_ty, dst),1178InterfaceType::U32 => self.translate_u32(src, dst_ty, dst),1179InterfaceType::S32 => self.translate_s32(src, dst_ty, dst),1180InterfaceType::U64 => self.translate_u64(src, dst_ty, dst),1181InterfaceType::S64 => self.translate_s64(src, dst_ty, dst),1182InterfaceType::Float32 => self.translate_f32(src, dst_ty, dst),1183InterfaceType::Float64 => self.translate_f64(src, dst_ty, dst),1184InterfaceType::Char => self.translate_char(src, dst_ty, dst),1185InterfaceType::String => self.translate_string(src, dst_ty, dst),1186InterfaceType::List(t) => self.translate_list(*t, src, dst_ty, dst),1187InterfaceType::Record(t) => self.translate_record(*t, src, dst_ty, dst),1188InterfaceType::Flags(f) => self.translate_flags(*f, src, dst_ty, dst),1189InterfaceType::Tuple(t) => self.translate_tuple(*t, src, dst_ty, dst),1190InterfaceType::Variant(v) => self.translate_variant(*v, src, dst_ty, dst),1191InterfaceType::Enum(t) => self.translate_enum(*t, src, dst_ty, dst),1192InterfaceType::Option(t) => self.translate_option(*t, src, dst_ty, dst),1193InterfaceType::Result(t) => self.translate_result(*t, src, dst_ty, dst),1194InterfaceType::Own(t) => self.translate_own(*t, src, dst_ty, dst),1195InterfaceType::Borrow(t) => self.translate_borrow(*t, src, dst_ty, dst),1196InterfaceType::Future(t) => self.translate_future(*t, src, dst_ty, dst),1197InterfaceType::Stream(t) => self.translate_stream(*t, src, dst_ty, dst),1198InterfaceType::ErrorContext(t) => {1199self.translate_error_context(*t, src, dst_ty, dst)1200}1201InterfaceType::FixedLengthList(t) => {1202self.translate_fixed_length_list(*t, src, dst_ty, dst);1203}1204}1205}12061207// This function does not have enough fuel left to perform this1208// layer of translation so the translation is deferred to a helper1209// function. The actual translation here is then done by marshalling1210// the src/dst into the function we're calling and then processing1211// the results.1212None => {1213let src_loc = match src {1214// If the source is on the stack then `stack_get` is used to1215// convert everything to the appropriate flat representation1216// for the source type.1217Source::Stack(stack) => {1218for (i, ty) in stack1219.opts1220.flat_types(src_ty, self.types)1221.unwrap()1222.iter()1223.enumerate()1224{1225let stack = stack.slice(i..i + 1);1226self.stack_get(&stack, (*ty).into());1227}1228HelperLocation::Stack1229}1230// If the source is in memory then the pointer is passed1231// through, but note that the offset must be factored in1232// here since the translation function will start from1233// offset 0.1234Source::Memory(mem) => {1235self.push_mem_addr(mem);1236HelperLocation::Memory1237}1238Source::Struct(_) | Source::Array(_) => todo!("CM+GC"),1239};1240let dst_loc = match dst {1241Destination::Stack(..) => HelperLocation::Stack,1242Destination::Memory(mem) => {1243self.push_mem_addr(mem);1244HelperLocation::Memory1245}1246Destination::Struct(_) | Destination::Array(_) => todo!("CM+GC"),1247};1248// Generate a `FunctionId` corresponding to the `Helper`1249// configuration that is necessary here. This will ideally be a1250// "cache hit" and use a preexisting helper which represents1251// outlining what would otherwise be duplicate code within a1252// function to one function.1253let helper = self.module.translate_helper(Helper {1254src: HelperType {1255ty: *src_ty,1256opts: *src.opts(),1257loc: src_loc,1258},1259dst: HelperType {1260ty: *dst_ty,1261opts: *dst.opts(),1262loc: dst_loc,1263},1264});1265// Emit a `call` instruction which will get "relocated" to a1266// function index once translation has completely finished.1267self.flush_code();1268self.module.funcs[self.result].body.push(Body::Call(helper));12691270// If the destination of the translation was on the stack then1271// the types on the stack need to be optionally converted to1272// different types (e.g. if the result here is part of a variant1273// somewhere else).1274//1275// This translation happens inline here by popping the results1276// into new locals and then using those locals to do a1277// `stack_set`.1278if let Destination::Stack(tys, opts) = dst {1279let flat = self1280.types1281.flatten_types(opts, usize::MAX, [*dst_ty])1282.unwrap();1283assert_eq!(flat.len(), tys.len());1284let locals = flat1285.iter()1286.rev()1287.map(|ty| self.local_set_new_tmp(*ty))1288.collect::<Vec<_>>();1289for (ty, local) in tys.iter().zip(locals.into_iter().rev()) {1290self.instruction(LocalGet(local.idx));1291self.stack_set(std::slice::from_ref(ty), local.ty);1292self.free_temp_local(local);1293}1294}1295}1296}1297}12981299fn push_mem_addr(&mut self, mem: &Memory<'_>) {1300self.instruction(LocalGet(mem.addr.idx));1301if mem.offset != 0 {1302self.ptr_uconst(mem.mem_opts(), mem.offset);1303self.ptr_add(mem.mem_opts());1304}1305}13061307fn translate_bool(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {1308// TODO: subtyping1309assert!(matches!(dst_ty, InterfaceType::Bool));1310self.push_dst_addr(dst);13111312// Booleans are canonicalized to 0 or 1 as they pass through the1313// component boundary, so use a `select` instruction to do so.1314self.instruction(I32Const(1));1315self.instruction(I32Const(0));1316match src {1317Source::Memory(mem) => self.i32_load8u(mem),1318Source::Stack(stack) => self.stack_get(stack, ValType::I32),1319Source::Struct(_) | Source::Array(_) => todo!("CM+GC"),1320}1321self.instruction(Select);13221323match dst {1324Destination::Memory(mem) => self.i32_store8(mem),1325Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),1326Destination::Struct(_) | Destination::Array(_) => todo!("CM+GC"),1327}1328}13291330fn translate_u8(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {1331// TODO: subtyping1332assert!(matches!(dst_ty, InterfaceType::U8));1333self.convert_u8_mask(src, dst, 0xff);1334}13351336fn convert_u8_mask(&mut self, src: &Source<'_>, dst: &Destination<'_>, mask: u8) {1337self.push_dst_addr(dst);1338let mut needs_mask = true;1339match src {1340Source::Memory(mem) => {1341self.i32_load8u(mem);1342needs_mask = mask != 0xff;1343}1344Source::Stack(stack) => {1345self.stack_get(stack, ValType::I32);1346}1347Source::Struct(_) | Source::Array(_) => todo!("CM+GC"),1348}1349if needs_mask {1350self.instruction(I32Const(i32::from(mask)));1351self.instruction(I32And);1352}1353match dst {1354Destination::Memory(mem) => self.i32_store8(mem),1355Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),1356Destination::Struct(_) | Destination::Array(_) => todo!("CM+GC"),1357}1358}13591360fn translate_s8(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {1361// TODO: subtyping1362assert!(matches!(dst_ty, InterfaceType::S8));1363self.push_dst_addr(dst);1364match src {1365Source::Memory(mem) => self.i32_load8s(mem),1366Source::Stack(stack) => {1367self.stack_get(stack, ValType::I32);1368self.instruction(I32Extend8S);1369}1370Source::Struct(_) | Source::Array(_) => todo!("CM+GC"),1371}1372match dst {1373Destination::Memory(mem) => self.i32_store8(mem),1374Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),1375Destination::Struct(_) | Destination::Array(_) => todo!("CM+GC"),1376}1377}13781379fn translate_u16(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {1380// TODO: subtyping1381assert!(matches!(dst_ty, InterfaceType::U16));1382self.convert_u16_mask(src, dst, 0xffff);1383}13841385fn convert_u16_mask(&mut self, src: &Source<'_>, dst: &Destination<'_>, mask: u16) {1386self.push_dst_addr(dst);1387let mut needs_mask = true;1388match src {1389Source::Memory(mem) => {1390self.i32_load16u(mem);1391needs_mask = mask != 0xffff;1392}1393Source::Stack(stack) => {1394self.stack_get(stack, ValType::I32);1395}1396Source::Struct(_) | Source::Array(_) => todo!("CM+GC"),1397}1398if needs_mask {1399self.instruction(I32Const(i32::from(mask)));1400self.instruction(I32And);1401}1402match dst {1403Destination::Memory(mem) => self.i32_store16(mem),1404Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),1405Destination::Struct(_) | Destination::Array(_) => todo!("CM+GC"),1406}1407}14081409fn translate_s16(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {1410// TODO: subtyping1411assert!(matches!(dst_ty, InterfaceType::S16));1412self.push_dst_addr(dst);1413match src {1414Source::Memory(mem) => self.i32_load16s(mem),1415Source::Stack(stack) => {1416self.stack_get(stack, ValType::I32);1417self.instruction(I32Extend16S);1418}1419Source::Struct(_) | Source::Array(_) => todo!("CM+GC"),1420}1421match dst {1422Destination::Memory(mem) => self.i32_store16(mem),1423Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),1424Destination::Struct(_) | Destination::Array(_) => todo!("CM+GC"),1425}1426}14271428fn translate_u32(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {1429// TODO: subtyping1430assert!(matches!(dst_ty, InterfaceType::U32));1431self.convert_u32_mask(src, dst, 0xffffffff)1432}14331434fn convert_u32_mask(&mut self, src: &Source<'_>, dst: &Destination<'_>, mask: u32) {1435self.push_dst_addr(dst);1436match src {1437Source::Memory(mem) => self.i32_load(mem),1438Source::Stack(stack) => self.stack_get(stack, ValType::I32),1439Source::Struct(_) | Source::Array(_) => todo!("CM+GC"),1440}1441if mask != 0xffffffff {1442self.instruction(I32Const(mask as i32));1443self.instruction(I32And);1444}1445match dst {1446Destination::Memory(mem) => self.i32_store(mem),1447Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),1448Destination::Struct(_) | Destination::Array(_) => todo!("CM+GC"),1449}1450}14511452fn translate_s32(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {1453// TODO: subtyping1454assert!(matches!(dst_ty, InterfaceType::S32));1455self.push_dst_addr(dst);1456match src {1457Source::Memory(mem) => self.i32_load(mem),1458Source::Stack(stack) => self.stack_get(stack, ValType::I32),1459Source::Struct(_) | Source::Array(_) => todo!("CM+GC"),1460}1461match dst {1462Destination::Memory(mem) => self.i32_store(mem),1463Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),1464Destination::Struct(_) | Destination::Array(_) => todo!("CM+GC"),1465}1466}14671468fn translate_u64(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {1469// TODO: subtyping1470assert!(matches!(dst_ty, InterfaceType::U64));1471self.push_dst_addr(dst);1472match src {1473Source::Memory(mem) => self.i64_load(mem),1474Source::Stack(stack) => self.stack_get(stack, ValType::I64),1475Source::Struct(_) | Source::Array(_) => todo!("CM+GC"),1476}1477match dst {1478Destination::Memory(mem) => self.i64_store(mem),1479Destination::Stack(stack, _) => self.stack_set(stack, ValType::I64),1480Destination::Struct(_) | Destination::Array(_) => todo!("CM+GC"),1481}1482}14831484fn translate_s64(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {1485// TODO: subtyping1486assert!(matches!(dst_ty, InterfaceType::S64));1487self.push_dst_addr(dst);1488match src {1489Source::Memory(mem) => self.i64_load(mem),1490Source::Stack(stack) => self.stack_get(stack, ValType::I64),1491Source::Struct(_) | Source::Array(_) => todo!("CM+GC"),1492}1493match dst {1494Destination::Memory(mem) => self.i64_store(mem),1495Destination::Stack(stack, _) => self.stack_set(stack, ValType::I64),1496Destination::Struct(_) | Destination::Array(_) => todo!("CM+GC"),1497}1498}14991500fn translate_f32(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {1501// TODO: subtyping1502assert!(matches!(dst_ty, InterfaceType::Float32));1503self.push_dst_addr(dst);1504match src {1505Source::Memory(mem) => self.f32_load(mem),1506Source::Stack(stack) => self.stack_get(stack, ValType::F32),1507Source::Struct(_) | Source::Array(_) => todo!("CM+GC"),1508}1509match dst {1510Destination::Memory(mem) => self.f32_store(mem),1511Destination::Stack(stack, _) => self.stack_set(stack, ValType::F32),1512Destination::Struct(_) | Destination::Array(_) => todo!("CM+GC"),1513}1514}15151516fn translate_f64(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {1517// TODO: subtyping1518assert!(matches!(dst_ty, InterfaceType::Float64));1519self.push_dst_addr(dst);1520match src {1521Source::Memory(mem) => self.f64_load(mem),1522Source::Stack(stack) => self.stack_get(stack, ValType::F64),1523Source::Struct(_) | Source::Array(_) => todo!("CM+GC"),1524}1525match dst {1526Destination::Memory(mem) => self.f64_store(mem),1527Destination::Stack(stack, _) => self.stack_set(stack, ValType::F64),1528Destination::Struct(_) | Destination::Array(_) => todo!("CM+GC"),1529}1530}15311532fn translate_char(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {1533assert!(matches!(dst_ty, InterfaceType::Char));1534match src {1535Source::Memory(mem) => self.i32_load(mem),1536Source::Stack(stack) => self.stack_get(stack, ValType::I32),1537Source::Struct(_) | Source::Array(_) => todo!("CM+GC"),1538}1539let local = self.local_set_new_tmp(ValType::I32);15401541// This sequence is copied from the output of LLVM for:1542//1543// pub extern "C" fn foo(x: u32) -> char {1544// char::try_from(x)1545// .unwrap_or_else(|_| std::arch::wasm32::unreachable())1546// }1547//1548// Apparently this does what's required by the canonical ABI:1549//1550// def i32_to_char(opts, i):1551// trap_if(i >= 0x110000)1552// trap_if(0xD800 <= i <= 0xDFFF)1553// return chr(i)1554//1555// ... but I don't know how it works other than "well I trust LLVM"1556self.instruction(Block(BlockType::Empty));1557self.instruction(Block(BlockType::Empty));1558self.instruction(LocalGet(local.idx));1559self.instruction(I32Const(0xd800));1560self.instruction(I32Xor);1561self.instruction(I32Const(-0x110000));1562self.instruction(I32Add);1563self.instruction(I32Const(-0x10f800));1564self.instruction(I32LtU);1565self.instruction(BrIf(0));1566self.instruction(LocalGet(local.idx));1567self.instruction(I32Const(0x110000));1568self.instruction(I32Ne);1569self.instruction(BrIf(1));1570self.instruction(End);1571self.trap(Trap::InvalidChar);1572self.instruction(End);15731574self.push_dst_addr(dst);1575self.instruction(LocalGet(local.idx));1576match dst {1577Destination::Memory(mem) => {1578self.i32_store(mem);1579}1580Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),1581Destination::Struct(_) | Destination::Array(_) => todo!("CM+GC"),1582}15831584self.free_temp_local(local);1585}15861587fn translate_string(&mut self, src: &Source<'_>, dst_ty: &InterfaceType, dst: &Destination) {1588assert!(matches!(dst_ty, InterfaceType::String));1589let src_opts = src.opts();1590let dst_opts = dst.opts();15911592let src_mem_opts = match &src_opts.data_model {1593DataModel::Gc {} => todo!("CM+GC"),1594DataModel::LinearMemory(opts) => opts,1595};1596let dst_mem_opts = match &dst_opts.data_model {1597DataModel::Gc {} => todo!("CM+GC"),1598DataModel::LinearMemory(opts) => opts,1599};16001601// Load the pointer/length of this string into temporary locals. These1602// will be referenced a good deal so this just makes it easier to deal1603// with them consistently below rather than trying to reload from memory1604// for example.1605match src {1606Source::Stack(s) => {1607assert_eq!(s.locals.len(), 2);1608self.stack_get(&s.slice(0..1), src_mem_opts.ptr());1609self.stack_get(&s.slice(1..2), src_mem_opts.ptr());1610}1611Source::Memory(mem) => {1612self.ptr_load(mem);1613self.ptr_load(&mem.bump(src_mem_opts.ptr_size().into()));1614}1615Source::Struct(_) | Source::Array(_) => todo!("CM+GC"),1616}1617let src_len = self.local_set_new_tmp(src_mem_opts.ptr());1618let src_ptr = self.local_set_new_tmp(src_mem_opts.ptr());1619let src_str = WasmString {1620ptr: src_ptr,1621len: src_len,1622opts: src_opts,1623};16241625let dst_str = match src_opts.string_encoding {1626StringEncoding::Utf8 => match dst_opts.string_encoding {1627StringEncoding::Utf8 => self.string_copy(&src_str, FE::Utf8, dst_opts, FE::Utf8),1628StringEncoding::Utf16 => self.string_utf8_to_utf16(&src_str, dst_opts),1629StringEncoding::CompactUtf16 => {1630self.string_to_compact(&src_str, FE::Utf8, dst_opts)1631}1632},16331634StringEncoding::Utf16 => {1635self.verify_aligned(src_mem_opts, src_str.ptr.idx, 2);1636match dst_opts.string_encoding {1637StringEncoding::Utf8 => {1638self.string_deflate_to_utf8(&src_str, FE::Utf16, dst_opts)1639}1640StringEncoding::Utf16 => {1641self.string_copy(&src_str, FE::Utf16, dst_opts, FE::Utf16)1642}1643StringEncoding::CompactUtf16 => {1644self.string_to_compact(&src_str, FE::Utf16, dst_opts)1645}1646}1647}16481649StringEncoding::CompactUtf16 => {1650self.verify_aligned(src_mem_opts, src_str.ptr.idx, 2);16511652// Test the tag big to see if this is a utf16 or a latin1 string1653// at runtime...1654self.instruction(LocalGet(src_str.len.idx));1655self.ptr_uconst(src_mem_opts, UTF16_TAG);1656self.ptr_and(src_mem_opts);1657self.ptr_if(src_mem_opts, BlockType::Empty);16581659// In the utf16 block unset the upper bit from the length local1660// so further calculations have the right value. Afterwards the1661// string transcode proceeds assuming utf16.1662self.instruction(LocalGet(src_str.len.idx));1663self.ptr_uconst(src_mem_opts, UTF16_TAG);1664self.ptr_xor(src_mem_opts);1665self.instruction(LocalSet(src_str.len.idx));1666let s1 = match dst_opts.string_encoding {1667StringEncoding::Utf8 => {1668self.string_deflate_to_utf8(&src_str, FE::Utf16, dst_opts)1669}1670StringEncoding::Utf16 => {1671self.string_copy(&src_str, FE::Utf16, dst_opts, FE::Utf16)1672}1673StringEncoding::CompactUtf16 => {1674self.string_compact_utf16_to_compact(&src_str, dst_opts)1675}1676};16771678self.instruction(Else);16791680// In the latin1 block the `src_len` local is already the number1681// of code units, so the string transcoding is all that needs to1682// happen.1683let s2 = match dst_opts.string_encoding {1684StringEncoding::Utf16 => {1685self.string_copy(&src_str, FE::Latin1, dst_opts, FE::Utf16)1686}1687StringEncoding::Utf8 => {1688self.string_deflate_to_utf8(&src_str, FE::Latin1, dst_opts)1689}1690StringEncoding::CompactUtf16 => {1691self.string_copy(&src_str, FE::Latin1, dst_opts, FE::Latin1)1692}1693};1694// Set our `s2` generated locals to the `s2` generated locals1695// as the resulting pointer of this transcode.1696self.instruction(LocalGet(s2.ptr.idx));1697self.instruction(LocalSet(s1.ptr.idx));1698self.instruction(LocalGet(s2.len.idx));1699self.instruction(LocalSet(s1.len.idx));1700self.instruction(End);1701self.free_temp_local(s2.ptr);1702self.free_temp_local(s2.len);1703s11704}1705};17061707// Store the ptr/length in the desired destination1708match dst {1709Destination::Stack(s, _) => {1710self.instruction(LocalGet(dst_str.ptr.idx));1711self.stack_set(&s[..1], dst_mem_opts.ptr());1712self.instruction(LocalGet(dst_str.len.idx));1713self.stack_set(&s[1..], dst_mem_opts.ptr());1714}1715Destination::Memory(mem) => {1716self.instruction(LocalGet(mem.addr.idx));1717self.instruction(LocalGet(dst_str.ptr.idx));1718self.ptr_store(mem);1719self.instruction(LocalGet(mem.addr.idx));1720self.instruction(LocalGet(dst_str.len.idx));1721self.ptr_store(&mem.bump(dst_mem_opts.ptr_size().into()));1722}1723Destination::Struct(_) | Destination::Array(_) => todo!("CM+GC"),1724}17251726self.free_temp_local(src_str.ptr);1727self.free_temp_local(src_str.len);1728self.free_temp_local(dst_str.ptr);1729self.free_temp_local(dst_str.len);1730}17311732// Corresponding function for `store_string_copy` in the spec.1733//1734// This performs a transcoding of the string with a one-pass copy from1735// the `src` encoding to the `dst` encoding. This is only possible for1736// fixed encodings where the first allocation is guaranteed to be an1737// appropriate fit so it's not suitable for all encodings.1738//1739// Imported host transcoding functions here take the src/dst pointers as1740// well as the number of code units in the source (which always matches1741// the number of code units in the destination). There is no return1742// value from the transcode function since the encoding should always1743// work on the first pass.1744fn string_copy<'c>(1745&mut self,1746src: &WasmString<'_>,1747src_enc: FE,1748dst_opts: &'c Options,1749dst_enc: FE,1750) -> WasmString<'c> {1751assert!(dst_enc.width() >= src_enc.width());1752self.validate_string_length(src, dst_enc);17531754let src_mem_opts = {1755match &src.opts.data_model {1756DataModel::Gc {} => todo!("CM+GC"),1757DataModel::LinearMemory(opts) => opts,1758}1759};1760let dst_mem_opts = {1761match &dst_opts.data_model {1762DataModel::Gc {} => todo!("CM+GC"),1763DataModel::LinearMemory(opts) => opts,1764}1765};17661767// Calculate the source byte length given the size of each code1768// unit. Note that this shouldn't overflow given1769// `validate_string_length` above.1770let mut src_byte_len_tmp = None;1771let src_byte_len = if src_enc.width() == 1 {1772src.len.idx1773} else {1774assert_eq!(src_enc.width(), 2);1775self.instruction(LocalGet(src.len.idx));1776self.ptr_uconst(src_mem_opts, 1);1777self.ptr_shl(src_mem_opts);1778let tmp = self.local_set_new_tmp(src.opts.data_model.unwrap_memory().ptr());1779let ret = tmp.idx;1780src_byte_len_tmp = Some(tmp);1781ret1782};17831784// Convert the source code units length to the destination byte1785// length type.1786self.convert_src_len_to_dst(1787src.len.idx,1788src.opts.data_model.unwrap_memory().ptr(),1789dst_opts.data_model.unwrap_memory().ptr(),1790);1791let dst_len = self.local_tee_new_tmp(dst_opts.data_model.unwrap_memory().ptr());1792if dst_enc.width() > 1 {1793assert_eq!(dst_enc.width(), 2);1794self.ptr_uconst(dst_mem_opts, 1);1795self.ptr_shl(dst_mem_opts);1796}1797let dst_byte_len = self.local_set_new_tmp(dst_opts.data_model.unwrap_memory().ptr());17981799// Allocate space in the destination using the calculated byte1800// length.1801let dst = {1802let dst_mem = self.malloc(1803dst_opts,1804MallocSize::Local(dst_byte_len.idx),1805dst_enc.width().into(),1806);1807WasmString {1808ptr: dst_mem.addr,1809len: dst_len,1810opts: dst_opts,1811}1812};18131814// Validate that `src_len + src_ptr` and1815// `dst_mem.addr_local + dst_byte_len` are both in-bounds. This1816// is done by loading the last byte of the string and if that1817// doesn't trap then it's known valid.1818self.validate_string_inbounds(src, src_byte_len);1819self.validate_string_inbounds(&dst, dst_byte_len.idx);18201821// If the validations pass then the host `transcode` intrinsic1822// is invoked. This will either raise a trap or otherwise succeed1823// in which case we're done.1824let op = if src_enc == dst_enc {1825Transcode::Copy(src_enc)1826} else {1827assert_eq!(src_enc, FE::Latin1);1828assert_eq!(dst_enc, FE::Utf16);1829Transcode::Latin1ToUtf161830};1831let transcode = self.transcoder(src, &dst, op);1832self.instruction(LocalGet(src.ptr.idx));1833self.instruction(LocalGet(src.len.idx));1834self.instruction(LocalGet(dst.ptr.idx));1835self.instruction(Call(transcode.as_u32()));18361837self.free_temp_local(dst_byte_len);1838if let Some(tmp) = src_byte_len_tmp {1839self.free_temp_local(tmp);1840}18411842dst1843}1844// Corresponding function for `store_string_to_utf8` in the spec.1845//1846// This translation works by possibly performing a number of1847// reallocations. First a buffer of size input-code-units is used to try1848// to get the transcoding correct on the first try. If that fails the1849// maximum worst-case size is used and then that is resized down if it's1850// too large.1851//1852// The host transcoding function imported here will receive src ptr/len1853// and dst ptr/len and return how many code units were consumed on both1854// sides. The amount of code units consumed in the source dictates which1855// branches are taken in this conversion.1856fn string_deflate_to_utf8<'c>(1857&mut self,1858src: &WasmString<'_>,1859src_enc: FE,1860dst_opts: &'c Options,1861) -> WasmString<'c> {1862let src_mem_opts = match &src.opts.data_model {1863DataModel::Gc {} => todo!("CM+GC"),1864DataModel::LinearMemory(opts) => opts,1865};1866let dst_mem_opts = match &dst_opts.data_model {1867DataModel::Gc {} => todo!("CM+GC"),1868DataModel::LinearMemory(opts) => opts,1869};18701871self.validate_string_length(src, src_enc);18721873// Optimistically assume that the code unit length of the source is1874// all that's needed in the destination. Perform that allocation1875// here and proceed to transcoding below.1876self.convert_src_len_to_dst(1877src.len.idx,1878src.opts.data_model.unwrap_memory().ptr(),1879dst_opts.data_model.unwrap_memory().ptr(),1880);1881let dst_len = self.local_tee_new_tmp(dst_opts.data_model.unwrap_memory().ptr());1882let dst_byte_len = self.local_set_new_tmp(dst_opts.data_model.unwrap_memory().ptr());18831884let dst = {1885let dst_mem = self.malloc(dst_opts, MallocSize::Local(dst_byte_len.idx), 1);1886WasmString {1887ptr: dst_mem.addr,1888len: dst_len,1889opts: dst_opts,1890}1891};18921893// Ensure buffers are all in-bounds1894let mut src_byte_len_tmp = None;1895let src_byte_len = match src_enc {1896FE::Latin1 => src.len.idx,1897FE::Utf16 => {1898self.instruction(LocalGet(src.len.idx));1899self.ptr_uconst(src_mem_opts, 1);1900self.ptr_shl(src_mem_opts);1901let tmp = self.local_set_new_tmp(src.opts.data_model.unwrap_memory().ptr());1902let ret = tmp.idx;1903src_byte_len_tmp = Some(tmp);1904ret1905}1906FE::Utf8 => unreachable!(),1907};1908self.validate_string_inbounds(src, src_byte_len);1909self.validate_string_inbounds(&dst, dst_byte_len.idx);19101911// Perform the initial transcode1912let op = match src_enc {1913FE::Latin1 => Transcode::Latin1ToUtf8,1914FE::Utf16 => Transcode::Utf16ToUtf8,1915FE::Utf8 => unreachable!(),1916};1917let transcode = self.transcoder(src, &dst, op);1918self.instruction(LocalGet(src.ptr.idx));1919self.instruction(LocalGet(src.len.idx));1920self.instruction(LocalGet(dst.ptr.idx));1921self.instruction(LocalGet(dst_byte_len.idx));1922self.instruction(Call(transcode.as_u32()));1923self.instruction(LocalSet(dst.len.idx));1924let src_len_tmp = self.local_set_new_tmp(src.opts.data_model.unwrap_memory().ptr());19251926// Test if the source was entirely transcoded by comparing1927// `src_len_tmp`, the number of code units transcoded from the1928// source, with `src_len`, the original number of code units.1929self.instruction(LocalGet(src_len_tmp.idx));1930self.instruction(LocalGet(src.len.idx));1931self.ptr_ne(src_mem_opts);1932self.instruction(If(BlockType::Empty));19331934// Here a worst-case reallocation is performed to grow `dst_mem`.1935// In-line a check is also performed that the worst-case byte size1936// fits within the maximum size of strings.1937self.instruction(LocalGet(dst.ptr.idx)); // old_ptr1938self.instruction(LocalGet(dst_byte_len.idx)); // old_size1939self.ptr_uconst(dst_mem_opts, 1); // align1940let factor = match src_enc {1941FE::Latin1 => 2,1942FE::Utf16 => 3,1943_ => unreachable!(),1944};1945self.validate_string_length_u8(src, factor);1946self.convert_src_len_to_dst(1947src.len.idx,1948src.opts.data_model.unwrap_memory().ptr(),1949dst_opts.data_model.unwrap_memory().ptr(),1950);1951self.ptr_uconst(dst_mem_opts, factor.into());1952self.ptr_mul(dst_mem_opts);1953self.instruction(LocalTee(dst_byte_len.idx));1954self.instruction(Call(dst_mem_opts.realloc.unwrap().as_u32()));1955self.instruction(LocalSet(dst.ptr.idx));19561957// Verify that the destination is still in-bounds1958self.validate_string_inbounds(&dst, dst_byte_len.idx);19591960// Perform another round of transcoding that should be guaranteed1961// to succeed. Note that all the parameters here are offset by the1962// results of the first transcoding to only perform the remaining1963// transcode on the final units.1964self.instruction(LocalGet(src.ptr.idx));1965self.instruction(LocalGet(src_len_tmp.idx));1966if let FE::Utf16 = src_enc {1967self.ptr_uconst(src_mem_opts, 1);1968self.ptr_shl(src_mem_opts);1969}1970self.ptr_add(src_mem_opts);1971self.instruction(LocalGet(src.len.idx));1972self.instruction(LocalGet(src_len_tmp.idx));1973self.ptr_sub(src_mem_opts);1974self.instruction(LocalGet(dst.ptr.idx));1975self.instruction(LocalGet(dst.len.idx));1976self.ptr_add(dst_mem_opts);1977self.instruction(LocalGet(dst_byte_len.idx));1978self.instruction(LocalGet(dst.len.idx));1979self.ptr_sub(dst_mem_opts);1980self.instruction(Call(transcode.as_u32()));19811982// Add the second result, the amount of destination units encoded,1983// to `dst_len` so it's an accurate reflection of the final size of1984// the destination buffer.1985self.instruction(LocalGet(dst.len.idx));1986self.ptr_add(dst_mem_opts);1987self.instruction(LocalSet(dst.len.idx));19881989// In debug mode verify the first result consumed the entire string,1990// otherwise simply discard it.1991if self.module.tunables.debug_adapter_modules {1992self.instruction(LocalGet(src.len.idx));1993self.instruction(LocalGet(src_len_tmp.idx));1994self.ptr_sub(src_mem_opts);1995self.ptr_ne(src_mem_opts);1996self.instruction(If(BlockType::Empty));1997self.trap(Trap::DebugAssertStringEncodingFinished);1998self.instruction(End);1999} else {2000self.instruction(Drop);2001}20022003// Perform a downsizing if the worst-case size was too large2004self.instruction(LocalGet(dst.len.idx));2005self.instruction(LocalGet(dst_byte_len.idx));2006self.ptr_ne(dst_mem_opts);2007self.instruction(If(BlockType::Empty));2008self.instruction(LocalGet(dst.ptr.idx)); // old_ptr2009self.instruction(LocalGet(dst_byte_len.idx)); // old_size2010self.ptr_uconst(dst_mem_opts, 1); // align2011self.instruction(LocalGet(dst.len.idx)); // new_size2012self.instruction(Call(dst_mem_opts.realloc.unwrap().as_u32()));2013self.instruction(LocalSet(dst.ptr.idx));2014self.instruction(End);20152016// If the first transcode was enough then assert that the returned2017// amount of destination items written equals the byte size.2018if self.module.tunables.debug_adapter_modules {2019self.instruction(Else);20202021self.instruction(LocalGet(dst.len.idx));2022self.instruction(LocalGet(dst_byte_len.idx));2023self.ptr_ne(dst_mem_opts);2024self.instruction(If(BlockType::Empty));2025self.trap(Trap::DebugAssertStringEncodingFinished);2026self.instruction(End);2027}20282029self.instruction(End); // end of "first transcode not enough"20302031self.free_temp_local(src_len_tmp);2032self.free_temp_local(dst_byte_len);2033if let Some(tmp) = src_byte_len_tmp {2034self.free_temp_local(tmp);2035}20362037dst2038}20392040// Corresponds to the `store_utf8_to_utf16` function in the spec.2041//2042// When converting utf-8 to utf-16 a pessimistic allocation is2043// done which is twice the byte length of the utf-8 string.2044// The host then transcodes and returns how many code units were2045// actually used during the transcoding and if it's beneath the2046// pessimistic maximum then the buffer is reallocated down to2047// a smaller amount.2048//2049// The host-imported transcoding function takes the src/dst pointer as2050// well as the code unit size of both the source and destination. The2051// destination should always be big enough to hold the result of the2052// transcode and so the result of the host function is how many code2053// units were written to the destination.2054fn string_utf8_to_utf16<'c>(2055&mut self,2056src: &WasmString<'_>,2057dst_opts: &'c Options,2058) -> WasmString<'c> {2059let src_mem_opts = match &src.opts.data_model {2060DataModel::Gc {} => todo!("CM+GC"),2061DataModel::LinearMemory(opts) => opts,2062};2063let dst_mem_opts = match &dst_opts.data_model {2064DataModel::Gc {} => todo!("CM+GC"),2065DataModel::LinearMemory(opts) => opts,2066};20672068self.validate_string_length(src, FE::Utf16);2069self.convert_src_len_to_dst(2070src.len.idx,2071src_mem_opts.ptr(),2072dst_opts.data_model.unwrap_memory().ptr(),2073);2074let dst_len = self.local_tee_new_tmp(dst_opts.data_model.unwrap_memory().ptr());2075self.ptr_uconst(dst_mem_opts, 1);2076self.ptr_shl(dst_mem_opts);2077let dst_byte_len = self.local_set_new_tmp(dst_opts.data_model.unwrap_memory().ptr());2078let dst = {2079let dst_mem = self.malloc(dst_opts, MallocSize::Local(dst_byte_len.idx), 2);2080WasmString {2081ptr: dst_mem.addr,2082len: dst_len,2083opts: dst_opts,2084}2085};20862087self.validate_string_inbounds(src, src.len.idx);2088self.validate_string_inbounds(&dst, dst_byte_len.idx);20892090let transcode = self.transcoder(src, &dst, Transcode::Utf8ToUtf16);2091self.instruction(LocalGet(src.ptr.idx));2092self.instruction(LocalGet(src.len.idx));2093self.instruction(LocalGet(dst.ptr.idx));2094self.instruction(Call(transcode.as_u32()));2095self.instruction(LocalSet(dst.len.idx));20962097// If the number of code units returned by transcode is not2098// equal to the original number of code units then2099// the buffer must be shrunk.2100//2101// Note that the byte length of the final allocation we2102// want is twice the code unit length returned by the2103// transcoding function.2104self.convert_src_len_to_dst(src.len.idx, src_mem_opts.ptr(), dst_mem_opts.ptr());2105self.instruction(LocalGet(dst.len.idx));2106self.ptr_ne(dst_mem_opts);2107self.instruction(If(BlockType::Empty));2108self.instruction(LocalGet(dst.ptr.idx));2109self.instruction(LocalGet(dst_byte_len.idx));2110self.ptr_uconst(dst_mem_opts, 2);2111self.instruction(LocalGet(dst.len.idx));2112self.ptr_uconst(dst_mem_opts, 1);2113self.ptr_shl(dst_mem_opts);2114self.instruction(Call(match dst.opts.data_model {2115DataModel::Gc {} => todo!("CM+GC"),2116DataModel::LinearMemory(LinearMemoryOptions { realloc, .. }) => {2117realloc.unwrap().as_u32()2118}2119}));2120self.instruction(LocalSet(dst.ptr.idx));2121self.instruction(End); // end of shrink-to-fit21222123self.free_temp_local(dst_byte_len);21242125dst2126}21272128// Corresponds to `store_probably_utf16_to_latin1_or_utf16` in the spec.2129//2130// This will try to transcode the input utf16 string to utf16 in the2131// destination. If utf16 isn't needed though and latin1 could be used2132// then that's used instead and a reallocation to downsize occurs2133// afterwards.2134//2135// The host transcode function here will take the src/dst pointers as2136// well as src length. The destination byte length is twice the src code2137// unit length. The return value is the tagged length of the returned2138// string. If the upper bit is set then utf16 was used and the2139// conversion is done. If the upper bit is not set then latin1 was used2140// and a downsizing needs to happen.2141fn string_compact_utf16_to_compact<'c>(2142&mut self,2143src: &WasmString<'_>,2144dst_opts: &'c Options,2145) -> WasmString<'c> {2146let src_mem_opts = match &src.opts.data_model {2147DataModel::Gc {} => todo!("CM+GC"),2148DataModel::LinearMemory(opts) => opts,2149};2150let dst_mem_opts = match &dst_opts.data_model {2151DataModel::Gc {} => todo!("CM+GC"),2152DataModel::LinearMemory(opts) => opts,2153};21542155self.validate_string_length(src, FE::Utf16);2156self.convert_src_len_to_dst(src.len.idx, src_mem_opts.ptr(), dst_mem_opts.ptr());2157let dst_len = self.local_tee_new_tmp(dst_mem_opts.ptr());2158self.ptr_uconst(dst_mem_opts, 1);2159self.ptr_shl(dst_mem_opts);2160let dst_byte_len = self.local_set_new_tmp(dst_mem_opts.ptr());2161let dst = {2162let dst_mem = self.malloc(dst_opts, MallocSize::Local(dst_byte_len.idx), 2);2163WasmString {2164ptr: dst_mem.addr,2165len: dst_len,2166opts: dst_opts,2167}2168};21692170self.convert_src_len_to_dst(2171dst_byte_len.idx,2172dst.opts.data_model.unwrap_memory().ptr(),2173src_mem_opts.ptr(),2174);2175let src_byte_len = self.local_set_new_tmp(src_mem_opts.ptr());21762177self.validate_string_inbounds(src, src_byte_len.idx);2178self.validate_string_inbounds(&dst, dst_byte_len.idx);21792180let transcode = self.transcoder(src, &dst, Transcode::Utf16ToCompactProbablyUtf16);2181self.instruction(LocalGet(src.ptr.idx));2182self.instruction(LocalGet(src.len.idx));2183self.instruction(LocalGet(dst.ptr.idx));2184self.instruction(Call(transcode.as_u32()));2185self.instruction(LocalSet(dst.len.idx));21862187// Assert that the untagged code unit length is the same as the2188// source code unit length.2189if self.module.tunables.debug_adapter_modules {2190self.instruction(LocalGet(dst.len.idx));2191self.ptr_uconst(dst_mem_opts, !UTF16_TAG);2192self.ptr_and(dst_mem_opts);2193self.convert_src_len_to_dst(src.len.idx, src_mem_opts.ptr(), dst_mem_opts.ptr());2194self.ptr_ne(dst_mem_opts);2195self.instruction(If(BlockType::Empty));2196self.trap(Trap::DebugAssertEqualCodeUnits);2197self.instruction(End);2198}21992200// If the UTF16_TAG is set then utf16 was used and the destination2201// should be appropriately sized. Bail out of the "is this string2202// empty" block and fall through otherwise to resizing.2203self.instruction(LocalGet(dst.len.idx));2204self.ptr_uconst(dst_mem_opts, UTF16_TAG);2205self.ptr_and(dst_mem_opts);2206self.ptr_br_if(dst_mem_opts, 0);22072208// Here `realloc` is used to downsize the string2209self.instruction(LocalGet(dst.ptr.idx)); // old_ptr2210self.instruction(LocalGet(dst_byte_len.idx)); // old_size2211self.ptr_uconst(dst_mem_opts, 2); // align2212self.instruction(LocalGet(dst.len.idx)); // new_size2213self.instruction(Call(dst_mem_opts.realloc.unwrap().as_u32()));2214self.instruction(LocalSet(dst.ptr.idx));22152216self.free_temp_local(dst_byte_len);2217self.free_temp_local(src_byte_len);22182219dst2220}22212222// Corresponds to `store_string_to_latin1_or_utf16` in the spec.2223//2224// This will attempt a first pass of transcoding to latin1 and on2225// failure a larger buffer is allocated for utf16 and then utf16 is2226// encoded in-place into the buffer. After either latin1 or utf16 the2227// buffer is then resized to fit the final string allocation.2228fn string_to_compact<'c>(2229&mut self,2230src: &WasmString<'_>,2231src_enc: FE,2232dst_opts: &'c Options,2233) -> WasmString<'c> {2234let src_mem_opts = match &src.opts.data_model {2235DataModel::Gc {} => todo!("CM+GC"),2236DataModel::LinearMemory(opts) => opts,2237};2238let dst_mem_opts = match &dst_opts.data_model {2239DataModel::Gc {} => todo!("CM+GC"),2240DataModel::LinearMemory(opts) => opts,2241};22422243self.validate_string_length(src, src_enc);2244self.convert_src_len_to_dst(src.len.idx, src_mem_opts.ptr(), dst_mem_opts.ptr());2245let dst_len = self.local_tee_new_tmp(dst_mem_opts.ptr());2246let dst_byte_len = self.local_set_new_tmp(dst_mem_opts.ptr());2247let dst = {2248let dst_mem = self.malloc(dst_opts, MallocSize::Local(dst_byte_len.idx), 2);2249WasmString {2250ptr: dst_mem.addr,2251len: dst_len,2252opts: dst_opts,2253}2254};22552256self.validate_string_inbounds(src, src.len.idx);2257self.validate_string_inbounds(&dst, dst_byte_len.idx);22582259// Perform the initial latin1 transcode. This returns the number of2260// source code units consumed and the number of destination code2261// units (bytes) written.2262let (latin1, utf16) = match src_enc {2263FE::Utf8 => (Transcode::Utf8ToLatin1, Transcode::Utf8ToCompactUtf16),2264FE::Utf16 => (Transcode::Utf16ToLatin1, Transcode::Utf16ToCompactUtf16),2265FE::Latin1 => unreachable!(),2266};2267let transcode_latin1 = self.transcoder(src, &dst, latin1);2268let transcode_utf16 = self.transcoder(src, &dst, utf16);2269self.instruction(LocalGet(src.ptr.idx));2270self.instruction(LocalGet(src.len.idx));2271self.instruction(LocalGet(dst.ptr.idx));2272self.instruction(Call(transcode_latin1.as_u32()));2273self.instruction(LocalSet(dst.len.idx));2274let src_len_tmp = self.local_set_new_tmp(src_mem_opts.ptr());22752276// If the source was entirely consumed then the transcode completed2277// and all that's necessary is to optionally shrink the buffer.2278self.instruction(LocalGet(src_len_tmp.idx));2279self.instruction(LocalGet(src.len.idx));2280self.ptr_eq(src_mem_opts);2281self.instruction(If(BlockType::Empty)); // if latin1-or-utf16 block22822283// Test if the original byte length of the allocation is the same as2284// the number of written bytes, and if not then shrink the buffer2285// with a call to `realloc`.2286self.instruction(LocalGet(dst_byte_len.idx));2287self.instruction(LocalGet(dst.len.idx));2288self.ptr_ne(dst_mem_opts);2289self.instruction(If(BlockType::Empty));2290self.instruction(LocalGet(dst.ptr.idx)); // old_ptr2291self.instruction(LocalGet(dst_byte_len.idx)); // old_size2292self.ptr_uconst(dst_mem_opts, 2); // align2293self.instruction(LocalGet(dst.len.idx)); // new_size2294self.instruction(Call(dst_mem_opts.realloc.unwrap().as_u32()));2295self.instruction(LocalSet(dst.ptr.idx));2296self.instruction(End);22972298// In this block the latin1 encoding failed. The host transcode2299// returned how many units were consumed from the source and how2300// many bytes were written to the destination. Here the buffer is2301// inflated and sized and the second utf16 intrinsic is invoked to2302// perform the final inflation.2303self.instruction(Else); // else latin1-or-utf16 block23042305// For utf8 validate that the inflated size is still within bounds.2306if src_enc.width() == 1 {2307self.validate_string_length_u8(src, 2);2308}23092310// Reallocate the buffer with twice the source code units in byte2311// size.2312self.instruction(LocalGet(dst.ptr.idx)); // old_ptr2313self.instruction(LocalGet(dst_byte_len.idx)); // old_size2314self.ptr_uconst(dst_mem_opts, 2); // align2315self.convert_src_len_to_dst(src.len.idx, src_mem_opts.ptr(), dst_mem_opts.ptr());2316self.ptr_uconst(dst_mem_opts, 1);2317self.ptr_shl(dst_mem_opts);2318self.instruction(LocalTee(dst_byte_len.idx));2319self.instruction(Call(dst_mem_opts.realloc.unwrap().as_u32()));2320self.instruction(LocalSet(dst.ptr.idx));23212322// Call the host utf16 transcoding function. This will inflate the2323// prior latin1 bytes and then encode the rest of the source string2324// as utf16 into the remaining space in the destination buffer.2325self.instruction(LocalGet(src.ptr.idx));2326self.instruction(LocalGet(src_len_tmp.idx));2327if let FE::Utf16 = src_enc {2328self.ptr_uconst(src_mem_opts, 1);2329self.ptr_shl(src_mem_opts);2330}2331self.ptr_add(src_mem_opts);2332self.instruction(LocalGet(src.len.idx));2333self.instruction(LocalGet(src_len_tmp.idx));2334self.ptr_sub(src_mem_opts);2335self.instruction(LocalGet(dst.ptr.idx));2336self.convert_src_len_to_dst(src.len.idx, src_mem_opts.ptr(), dst_mem_opts.ptr());2337self.instruction(LocalGet(dst.len.idx));2338self.instruction(Call(transcode_utf16.as_u32()));2339self.instruction(LocalSet(dst.len.idx));23402341// If the returned number of code units written to the destination2342// is not equal to the size of the allocation then the allocation is2343// resized down to the appropriate size.2344//2345// Note that the byte size desired is `2*dst_len` and the current2346// byte buffer size is `2*src_len` so the `2` factor isn't checked2347// here, just the lengths.2348self.instruction(LocalGet(dst.len.idx));2349self.convert_src_len_to_dst(src.len.idx, src_mem_opts.ptr(), dst_mem_opts.ptr());2350self.ptr_ne(dst_mem_opts);2351self.instruction(If(BlockType::Empty));2352self.instruction(LocalGet(dst.ptr.idx)); // old_ptr2353self.instruction(LocalGet(dst_byte_len.idx)); // old_size2354self.ptr_uconst(dst_mem_opts, 2); // align2355self.instruction(LocalGet(dst.len.idx));2356self.ptr_uconst(dst_mem_opts, 1);2357self.ptr_shl(dst_mem_opts);2358self.instruction(Call(dst_mem_opts.realloc.unwrap().as_u32()));2359self.instruction(LocalSet(dst.ptr.idx));2360self.instruction(End);23612362// Tag the returned pointer as utf162363self.instruction(LocalGet(dst.len.idx));2364self.ptr_uconst(dst_mem_opts, UTF16_TAG);2365self.ptr_or(dst_mem_opts);2366self.instruction(LocalSet(dst.len.idx));23672368self.instruction(End); // end latin1-or-utf16 block23692370self.free_temp_local(src_len_tmp);2371self.free_temp_local(dst_byte_len);23722373dst2374}23752376fn validate_string_length(&mut self, src: &WasmString<'_>, dst: FE) {2377self.validate_string_length_u8(src, dst.width())2378}23792380fn validate_string_length_u8(&mut self, s: &WasmString<'_>, dst: u8) {2381let mem_opts = match &s.opts.data_model {2382DataModel::Gc {} => todo!("CM+GC"),2383DataModel::LinearMemory(opts) => opts,2384};23852386// Check to see if the source byte length is out of bounds in2387// which case a trap is generated.2388self.instruction(LocalGet(s.len.idx));2389let max = MAX_STRING_BYTE_LENGTH / u32::from(dst);2390self.ptr_uconst(mem_opts, max);2391self.ptr_ge_u(mem_opts);2392self.instruction(If(BlockType::Empty));2393self.trap(Trap::StringOutOfBounds);2394self.instruction(End);2395}23962397fn transcoder(2398&mut self,2399src: &WasmString<'_>,2400dst: &WasmString<'_>,2401op: Transcode,2402) -> FuncIndex {2403match (src.opts.data_model, dst.opts.data_model) {2404(DataModel::Gc {}, _) | (_, DataModel::Gc {}) => {2405todo!("CM+GC")2406}2407(2408DataModel::LinearMemory(LinearMemoryOptions {2409memory64: src64,2410memory: src_mem,2411realloc: _,2412}),2413DataModel::LinearMemory(LinearMemoryOptions {2414memory64: dst64,2415memory: dst_mem,2416realloc: _,2417}),2418) => self.module.import_transcoder(Transcoder {2419from_memory: src_mem.unwrap(),2420from_memory64: src64,2421to_memory: dst_mem.unwrap(),2422to_memory64: dst64,2423op,2424}),2425}2426}24272428fn validate_string_inbounds(&mut self, s: &WasmString<'_>, byte_len: u32) {2429match &s.opts.data_model {2430DataModel::Gc {} => todo!("CM+GC"),2431DataModel::LinearMemory(opts) => {2432self.validate_memory_inbounds(opts, s.ptr.idx, byte_len, Trap::StringOutOfBounds)2433}2434}2435}24362437fn validate_memory_inbounds(2438&mut self,2439opts: &LinearMemoryOptions,2440ptr_local: u32,2441byte_len_local: u32,2442trap: Trap,2443) {2444let extend_to_64 = |me: &mut Self| {2445if !opts.memory64 {2446me.instruction(I64ExtendI32U);2447}2448};24492450self.instruction(Block(BlockType::Empty));2451self.instruction(Block(BlockType::Empty));24522453// Calculate the full byte size of memory with `memory.size`. Note that2454// arithmetic here is done always in 64-bits to accommodate 4G memories.2455// Additionally it's assumed that 64-bit memories never fill up2456// entirely.2457self.instruction(MemorySize(opts.memory.unwrap().as_u32()));2458extend_to_64(self);2459self.instruction(I64Const(16));2460self.instruction(I64Shl);24612462// Calculate the end address of the string. This is done by adding the2463// base pointer to the byte length. For 32-bit memories there's no need2464// to check for overflow since everything is extended to 64-bit, but for2465// 64-bit memories overflow is checked.2466self.instruction(LocalGet(ptr_local));2467extend_to_64(self);2468self.instruction(LocalGet(byte_len_local));2469extend_to_64(self);2470self.instruction(I64Add);2471if opts.memory64 {2472let tmp = self.local_tee_new_tmp(ValType::I64);2473self.instruction(LocalGet(ptr_local));2474self.ptr_lt_u(opts);2475self.instruction(BrIf(0));2476self.instruction(LocalGet(tmp.idx));2477self.free_temp_local(tmp);2478}24792480// If the byte size of memory is greater than the final address of the2481// string then the string is invalid. Note that if it's precisely equal2482// then that's ok.2483self.instruction(I64GeU);2484self.instruction(BrIf(1));24852486self.instruction(End);2487self.trap(trap);2488self.instruction(End);2489}24902491fn translate_list(2492&mut self,2493src_ty: TypeListIndex,2494src: &Source<'_>,2495dst_ty: &InterfaceType,2496dst: &Destination,2497) {2498let src_mem_opts = match &src.opts().data_model {2499DataModel::Gc {} => todo!("CM+GC"),2500DataModel::LinearMemory(opts) => opts,2501};2502let dst_mem_opts = match &dst.opts().data_model {2503DataModel::Gc {} => todo!("CM+GC"),2504DataModel::LinearMemory(opts) => opts,2505};25062507let src_element_ty = &self.types[src_ty].element;2508let dst_element_ty = match dst_ty {2509InterfaceType::List(r) => &self.types[*r].element,2510_ => panic!("expected a list"),2511};2512let src_opts = src.opts();2513let dst_opts = dst.opts();2514let (src_size, src_align) = self.types.size_align(src_mem_opts, src_element_ty);2515let (dst_size, dst_align) = self.types.size_align(dst_mem_opts, dst_element_ty);25162517// Load the pointer/length of this list into temporary locals. These2518// will be referenced a good deal so this just makes it easier to deal2519// with them consistently below rather than trying to reload from memory2520// for example.2521match src {2522Source::Stack(s) => {2523assert_eq!(s.locals.len(), 2);2524self.stack_get(&s.slice(0..1), src_mem_opts.ptr());2525self.stack_get(&s.slice(1..2), src_mem_opts.ptr());2526}2527Source::Memory(mem) => {2528self.ptr_load(mem);2529self.ptr_load(&mem.bump(src_mem_opts.ptr_size().into()));2530}2531Source::Struct(_) | Source::Array(_) => todo!("CM+GC"),2532}2533let src_len = self.local_set_new_tmp(src_mem_opts.ptr());2534let src_ptr = self.local_set_new_tmp(src_mem_opts.ptr());25352536// Create a `Memory` operand which will internally assert that the2537// `src_ptr` value is properly aligned.2538let src_mem = self.memory_operand(src_opts, src_ptr, src_align);25392540// Calculate the source/destination byte lengths into unique locals.2541let src_byte_len = self.calculate_list_byte_len(src_mem_opts, src_len.idx, src_size);2542let dst_byte_len = if src_size == dst_size {2543self.convert_src_len_to_dst(src_byte_len.idx, src_mem_opts.ptr(), dst_mem_opts.ptr());2544self.local_set_new_tmp(dst_mem_opts.ptr())2545} else if src_mem_opts.ptr() == dst_mem_opts.ptr() {2546self.calculate_list_byte_len(dst_mem_opts, src_len.idx, dst_size)2547} else {2548self.convert_src_len_to_dst(src_byte_len.idx, src_mem_opts.ptr(), dst_mem_opts.ptr());2549let tmp = self.local_set_new_tmp(dst_mem_opts.ptr());2550let ret = self.calculate_list_byte_len(dst_mem_opts, tmp.idx, dst_size);2551self.free_temp_local(tmp);2552ret2553};25542555// Here `realloc` is invoked (in a `malloc`-like fashion) to allocate2556// space for the list in the destination memory. This will also2557// internally insert checks that the returned pointer is aligned2558// correctly for the destination.2559let dst_mem = self.malloc(dst_opts, MallocSize::Local(dst_byte_len.idx), dst_align);25602561// With all the pointers and byte lengths verity that both the source2562// and the destination buffers are in-bounds.2563self.validate_memory_inbounds(2564src_mem_opts,2565src_mem.addr.idx,2566src_byte_len.idx,2567Trap::ListOutOfBounds,2568);2569self.validate_memory_inbounds(2570dst_mem_opts,2571dst_mem.addr.idx,2572dst_byte_len.idx,2573Trap::ListOutOfBounds,2574);25752576self.free_temp_local(src_byte_len);2577self.free_temp_local(dst_byte_len);25782579// This is the main body of the loop to actually translate list types.2580// Note that if both element sizes are 0 then this won't actually do2581// anything so the loop is removed entirely.2582if src_size > 0 || dst_size > 0 {2583// This block encompasses the entire loop and is use to exit before even2584// entering the loop if the list size is zero.2585self.instruction(Block(BlockType::Empty));25862587// Set the `remaining` local and only continue if it's > 02588self.instruction(LocalGet(src_len.idx));2589let remaining = self.local_tee_new_tmp(src_mem_opts.ptr());2590self.ptr_eqz(src_mem_opts);2591self.instruction(BrIf(0));25922593// Initialize the two destination pointers to their initial values2594self.instruction(LocalGet(src_mem.addr.idx));2595let cur_src_ptr = self.local_set_new_tmp(src_mem_opts.ptr());2596self.instruction(LocalGet(dst_mem.addr.idx));2597let cur_dst_ptr = self.local_set_new_tmp(dst_mem_opts.ptr());25982599self.instruction(Loop(BlockType::Empty));26002601// Translate the next element in the list2602let element_src = Source::Memory(Memory {2603opts: src_opts,2604offset: 0,2605addr: TempLocal::new(cur_src_ptr.idx, cur_src_ptr.ty),2606});2607let element_dst = Destination::Memory(Memory {2608opts: dst_opts,2609offset: 0,2610addr: TempLocal::new(cur_dst_ptr.idx, cur_dst_ptr.ty),2611});2612self.translate(src_element_ty, &element_src, dst_element_ty, &element_dst);26132614// Update the two loop pointers2615if src_size > 0 {2616self.instruction(LocalGet(cur_src_ptr.idx));2617self.ptr_uconst(src_mem_opts, src_size);2618self.ptr_add(src_mem_opts);2619self.instruction(LocalSet(cur_src_ptr.idx));2620}2621if dst_size > 0 {2622self.instruction(LocalGet(cur_dst_ptr.idx));2623self.ptr_uconst(dst_mem_opts, dst_size);2624self.ptr_add(dst_mem_opts);2625self.instruction(LocalSet(cur_dst_ptr.idx));2626}26272628// Update the remaining count, falling through to break out if it's zero2629// now.2630self.instruction(LocalGet(remaining.idx));2631self.ptr_iconst(src_mem_opts, -1);2632self.ptr_add(src_mem_opts);2633self.instruction(LocalTee(remaining.idx));2634self.ptr_br_if(src_mem_opts, 0);2635self.instruction(End); // end of loop2636self.instruction(End); // end of block26372638self.free_temp_local(cur_dst_ptr);2639self.free_temp_local(cur_src_ptr);2640self.free_temp_local(remaining);2641}26422643// Store the ptr/length in the desired destination2644match dst {2645Destination::Stack(s, _) => {2646self.instruction(LocalGet(dst_mem.addr.idx));2647self.stack_set(&s[..1], dst_mem_opts.ptr());2648self.convert_src_len_to_dst(src_len.idx, src_mem_opts.ptr(), dst_mem_opts.ptr());2649self.stack_set(&s[1..], dst_mem_opts.ptr());2650}2651Destination::Memory(mem) => {2652self.instruction(LocalGet(mem.addr.idx));2653self.instruction(LocalGet(dst_mem.addr.idx));2654self.ptr_store(mem);2655self.instruction(LocalGet(mem.addr.idx));2656self.convert_src_len_to_dst(src_len.idx, src_mem_opts.ptr(), dst_mem_opts.ptr());2657self.ptr_store(&mem.bump(dst_mem_opts.ptr_size().into()));2658}2659Destination::Struct(_) | Destination::Array(_) => todo!("CM+GC"),2660}26612662self.free_temp_local(src_len);2663self.free_temp_local(src_mem.addr);2664self.free_temp_local(dst_mem.addr);2665}26662667fn calculate_list_byte_len(2668&mut self,2669opts: &LinearMemoryOptions,2670len_local: u32,2671elt_size: u32,2672) -> TempLocal {2673// Zero-size types are easy to handle here because the byte size of the2674// destination is always zero.2675if elt_size == 0 {2676self.ptr_uconst(opts, 0);2677return self.local_set_new_tmp(opts.ptr());2678}26792680// For one-byte elements in the destination the check here can be a bit2681// more optimal than the general case below. In these situations if the2682// source pointer type is 32-bit then we're guaranteed to not overflow,2683// so the source length is simply casted to the destination's type.2684//2685// If the source is 64-bit then all that needs to be checked is to2686// ensure that it does not have the upper 32-bits set.2687if elt_size == 1 {2688if let ValType::I64 = opts.ptr() {2689self.instruction(LocalGet(len_local));2690self.instruction(I64Const(32));2691self.instruction(I64ShrU);2692self.instruction(I32WrapI64);2693self.instruction(If(BlockType::Empty));2694self.trap(Trap::ListOutOfBounds);2695self.instruction(End);2696}2697self.instruction(LocalGet(len_local));2698return self.local_set_new_tmp(opts.ptr());2699}27002701// The main check implemented by this function is to verify that2702// `src_len_local` does not exceed the 32-bit range. Byte sizes for2703// lists must always fit in 32-bits to get transferred to 32-bit2704// memories.2705self.instruction(Block(BlockType::Empty));2706self.instruction(Block(BlockType::Empty));2707self.instruction(LocalGet(len_local));2708match opts.ptr() {2709// The source's list length is guaranteed to be less than 32-bits2710// so simply extend it up to a 64-bit type for the multiplication2711// below.2712ValType::I32 => self.instruction(I64ExtendI32U),27132714// If the source is a 64-bit memory then if the item length doesn't2715// fit in 32-bits the byte length definitely won't, so generate a2716// branch to our overflow trap here if any of the upper 32-bits are set.2717ValType::I64 => {2718self.instruction(I64Const(32));2719self.instruction(I64ShrU);2720self.instruction(I32WrapI64);2721self.instruction(BrIf(0));2722self.instruction(LocalGet(len_local));2723}27242725_ => unreachable!(),2726}27272728// Next perform a 64-bit multiplication with the element byte size that2729// is itself guaranteed to fit in 32-bits. The result is then checked2730// to see if we overflowed the 32-bit space. The two input operands to2731// the multiplication are guaranteed to be 32-bits at most which means2732// that this multiplication shouldn't overflow.2733//2734// The result of the multiplication is saved into a local as well to2735// get the result afterwards.2736self.instruction(I64Const(elt_size.into()));2737self.instruction(I64Mul);2738let tmp = self.local_tee_new_tmp(ValType::I64);2739// Branch to success if the upper 32-bits are zero, otherwise2740// fall-through to the trap.2741self.instruction(I64Const(32));2742self.instruction(I64ShrU);2743self.instruction(I64Eqz);2744self.instruction(BrIf(1));2745self.instruction(End);2746self.trap(Trap::ListOutOfBounds);2747self.instruction(End);27482749// If a fresh local was used to store the result of the multiplication2750// then convert it down to 32-bits which should be guaranteed to not2751// lose information at this point.2752if opts.ptr() == ValType::I64 {2753tmp2754} else {2755self.instruction(LocalGet(tmp.idx));2756self.instruction(I32WrapI64);2757self.free_temp_local(tmp);2758self.local_set_new_tmp(ValType::I32)2759}2760}27612762fn convert_src_len_to_dst(2763&mut self,2764src_len_local: u32,2765src_ptr_ty: ValType,2766dst_ptr_ty: ValType,2767) {2768self.instruction(LocalGet(src_len_local));2769match (src_ptr_ty, dst_ptr_ty) {2770(ValType::I32, ValType::I64) => self.instruction(I64ExtendI32U),2771(ValType::I64, ValType::I32) => self.instruction(I32WrapI64),2772(src, dst) => assert_eq!(src, dst),2773}2774}27752776fn translate_record(2777&mut self,2778src_ty: TypeRecordIndex,2779src: &Source<'_>,2780dst_ty: &InterfaceType,2781dst: &Destination,2782) {2783let src_ty = &self.types[src_ty];2784let dst_ty = match dst_ty {2785InterfaceType::Record(r) => &self.types[*r],2786_ => panic!("expected a record"),2787};27882789// TODO: subtyping2790assert_eq!(src_ty.fields.len(), dst_ty.fields.len());27912792// First a map is made of the source fields to where they're coming2793// from (e.g. which offset or which locals). This map is keyed by the2794// fields' names2795let mut src_fields = HashMap::new();2796for (i, src) in src2797.record_field_srcs(self.types, src_ty.fields.iter().map(|f| f.ty))2798.enumerate()2799{2800let field = &src_ty.fields[i];2801src_fields.insert(&field.name, (src, &field.ty));2802}28032804// .. and next translation is performed in the order of the destination2805// fields in case the destination is the stack to ensure that the stack2806// has the fields all in the right order.2807//2808// Note that the lookup in `src_fields` is an infallible lookup which2809// will panic if the field isn't found.2810//2811// TODO: should that lookup be fallible with subtyping?2812for (i, dst) in dst2813.record_field_dsts(self.types, dst_ty.fields.iter().map(|f| f.ty))2814.enumerate()2815{2816let field = &dst_ty.fields[i];2817let (src, src_ty) = &src_fields[&field.name];2818self.translate(src_ty, src, &field.ty, &dst);2819}2820}28212822fn translate_flags(2823&mut self,2824src_ty: TypeFlagsIndex,2825src: &Source<'_>,2826dst_ty: &InterfaceType,2827dst: &Destination,2828) {2829let src_ty = &self.types[src_ty];2830let dst_ty = match dst_ty {2831InterfaceType::Flags(r) => &self.types[*r],2832_ => panic!("expected a record"),2833};28342835// TODO: subtyping2836//2837// Notably this implementation does not support reordering flags from2838// the source to the destination nor having more flags in the2839// destination. Currently this is a copy from source to destination2840// in-bulk. Otherwise reordering indices would have to have some sort of2841// fancy bit twiddling tricks or something like that.2842assert_eq!(src_ty.names, dst_ty.names);2843let cnt = src_ty.names.len();2844match FlagsSize::from_count(cnt) {2845FlagsSize::Size0 => {}2846FlagsSize::Size1 => {2847let mask = if cnt == 8 { 0xff } else { (1 << cnt) - 1 };2848self.convert_u8_mask(src, dst, mask);2849}2850FlagsSize::Size2 => {2851let mask = if cnt == 16 { 0xffff } else { (1 << cnt) - 1 };2852self.convert_u16_mask(src, dst, mask);2853}2854FlagsSize::Size4Plus(n) => {2855let srcs = src.record_field_srcs(self.types, (0..n).map(|_| InterfaceType::U32));2856let dsts = dst.record_field_dsts(self.types, (0..n).map(|_| InterfaceType::U32));2857let n = usize::from(n);2858for (i, (src, dst)) in srcs.zip(dsts).enumerate() {2859let mask = if i == n - 1 && (cnt % 32 != 0) {2860(1 << (cnt % 32)) - 12861} else {28620xffffffff2863};2864self.convert_u32_mask(&src, &dst, mask);2865}2866}2867}2868}28692870fn translate_tuple(2871&mut self,2872src_ty: TypeTupleIndex,2873src: &Source<'_>,2874dst_ty: &InterfaceType,2875dst: &Destination,2876) {2877let src_ty = &self.types[src_ty];2878let dst_ty = match dst_ty {2879InterfaceType::Tuple(t) => &self.types[*t],2880_ => panic!("expected a tuple"),2881};28822883// TODO: subtyping2884assert_eq!(src_ty.types.len(), dst_ty.types.len());28852886let srcs = src2887.record_field_srcs(self.types, src_ty.types.iter().copied())2888.zip(src_ty.types.iter());2889let dsts = dst2890.record_field_dsts(self.types, dst_ty.types.iter().copied())2891.zip(dst_ty.types.iter());2892for ((src, src_ty), (dst, dst_ty)) in srcs.zip(dsts) {2893self.translate(src_ty, &src, dst_ty, &dst);2894}2895}28962897fn translate_fixed_length_list(2898&mut self,2899src_ty: TypeFixedLengthListIndex,2900src: &Source<'_>,2901dst_ty: &InterfaceType,2902dst: &Destination,2903) {2904let src_ty = &self.types[src_ty];2905let dst_ty = match dst_ty {2906InterfaceType::FixedLengthList(t) => &self.types[*t],2907_ => panic!("expected a fixed size list"),2908};29092910// TODO: subtyping2911assert_eq!(src_ty.size, dst_ty.size);29122913match (&src, &dst) {2914// Generate custom code for memory to memory copy2915(Source::Memory(src_mem), Destination::Memory(dst_mem)) => {2916let src_mem_opts = match &src_mem.opts.data_model {2917DataModel::Gc {} => todo!("CM+GC"),2918DataModel::LinearMemory(opts) => opts,2919};2920let dst_mem_opts = match &dst_mem.opts.data_model {2921DataModel::Gc {} => todo!("CM+GC"),2922DataModel::LinearMemory(opts) => opts,2923};2924let src_element_bytes = self.types.size_align(src_mem_opts, &src_ty.element).0;2925let dst_element_bytes = self.types.size_align(dst_mem_opts, &dst_ty.element).0;2926assert_ne!(src_element_bytes, 0);2927assert_ne!(dst_element_bytes, 0);29282929// because data is stored in-line, we assume that source and destination memory have been validated upstream29302931self.instruction(LocalGet(src_mem.addr.idx));2932if src_mem.offset != 0 {2933self.ptr_uconst(src_mem_opts, src_mem.offset);2934self.ptr_add(src_mem_opts);2935}2936let cur_src_ptr = self.local_set_new_tmp(src_mem_opts.ptr());2937self.instruction(LocalGet(dst_mem.addr.idx));2938if dst_mem.offset != 0 {2939self.ptr_uconst(dst_mem_opts, dst_mem.offset);2940self.ptr_add(dst_mem_opts);2941}2942let cur_dst_ptr = self.local_set_new_tmp(dst_mem_opts.ptr());29432944self.instruction(I32Const(src_ty.size as i32));2945let remaining = self.local_set_new_tmp(ValType::I32);29462947self.instruction(Loop(BlockType::Empty));29482949// Translate the next element in the list2950let element_src = Source::Memory(Memory {2951opts: src_mem.opts,2952offset: 0,2953addr: TempLocal::new(cur_src_ptr.idx, cur_src_ptr.ty),2954});2955let element_dst = Destination::Memory(Memory {2956opts: dst_mem.opts,2957offset: 0,2958addr: TempLocal::new(cur_dst_ptr.idx, cur_dst_ptr.ty),2959});2960self.translate(&src_ty.element, &element_src, &dst_ty.element, &element_dst);29612962// Update the two loop pointers2963self.instruction(LocalGet(cur_src_ptr.idx));2964self.ptr_uconst(src_mem_opts, src_element_bytes);2965self.ptr_add(src_mem_opts);2966self.instruction(LocalSet(cur_src_ptr.idx));2967self.instruction(LocalGet(cur_dst_ptr.idx));2968self.ptr_uconst(dst_mem_opts, dst_element_bytes);2969self.ptr_add(dst_mem_opts);2970self.instruction(LocalSet(cur_dst_ptr.idx));29712972// Update the remaining count, falling through to break out if it's zero2973// now.2974self.instruction(LocalGet(remaining.idx));2975self.ptr_iconst(src_mem_opts, -1);2976self.ptr_add(src_mem_opts);2977self.instruction(LocalTee(remaining.idx));2978self.ptr_br_if(src_mem_opts, 0);2979self.instruction(End); // end of loop29802981self.free_temp_local(cur_dst_ptr);2982self.free_temp_local(cur_src_ptr);2983self.free_temp_local(remaining);2984return;2985}2986// for the non-memory-to-memory case fall back to using generic tuple translation2987(_, _) => {2988// Assumes that the number of elements are small enough for this unrolling2989assert!(2990src_ty.size as usize <= MAX_FLAT_PARAMS2991&& dst_ty.size as usize <= MAX_FLAT_PARAMS2992);2993let srcs =2994src.record_field_srcs(self.types, (0..src_ty.size).map(|_| src_ty.element));2995let dsts =2996dst.record_field_dsts(self.types, (0..dst_ty.size).map(|_| dst_ty.element));2997for (src, dst) in srcs.zip(dsts) {2998self.translate(&src_ty.element, &src, &dst_ty.element, &dst);2999}3000}3001}3002}30033004fn translate_variant(3005&mut self,3006src_ty: TypeVariantIndex,3007src: &Source<'_>,3008dst_ty: &InterfaceType,3009dst: &Destination,3010) {3011let src_ty = &self.types[src_ty];3012let dst_ty = match dst_ty {3013InterfaceType::Variant(t) => &self.types[*t],3014_ => panic!("expected a variant"),3015};30163017let src_info = variant_info(self.types, src_ty.cases.iter().map(|(_, c)| c.as_ref()));3018let dst_info = variant_info(self.types, dst_ty.cases.iter().map(|(_, c)| c.as_ref()));30193020let iter = src_ty3021.cases3022.iter()3023.enumerate()3024.map(|(src_i, (src_case, src_case_ty))| {3025let dst_i = dst_ty3026.cases3027.iter()3028.position(|(c, _)| c == src_case)3029.unwrap();3030let dst_case_ty = &dst_ty.cases[dst_i];3031let src_i = u32::try_from(src_i).unwrap();3032let dst_i = u32::try_from(dst_i).unwrap();3033VariantCase {3034src_i,3035src_ty: src_case_ty.as_ref(),3036dst_i,3037dst_ty: dst_case_ty.as_ref(),3038}3039});3040self.convert_variant(src, &src_info, dst, &dst_info, iter);3041}30423043fn translate_enum(3044&mut self,3045src_ty: TypeEnumIndex,3046src: &Source<'_>,3047dst_ty: &InterfaceType,3048dst: &Destination,3049) {3050let src_ty = &self.types[src_ty];3051let dst_ty = match dst_ty {3052InterfaceType::Enum(t) => &self.types[*t],3053_ => panic!("expected an option"),3054};30553056debug_assert_eq!(src_ty.info.size, dst_ty.info.size);3057debug_assert_eq!(src_ty.names.len(), dst_ty.names.len());3058debug_assert!(3059src_ty3060.names3061.iter()3062.zip(dst_ty.names.iter())3063.all(|(a, b)| a == b)3064);30653066// Get the discriminant.3067match src {3068Source::Stack(s) => self.stack_get(&s.slice(0..1), ValType::I32),3069Source::Memory(mem) => match src_ty.info.size {3070DiscriminantSize::Size1 => self.i32_load8u(mem),3071DiscriminantSize::Size2 => self.i32_load16u(mem),3072DiscriminantSize::Size4 => self.i32_load(mem),3073},3074Source::Struct(_) | Source::Array(_) => todo!("CM+GC"),3075}3076let tmp = self.local_tee_new_tmp(ValType::I32);30773078// Assert that the discriminant is valid.3079self.instruction(I32Const(i32::try_from(src_ty.names.len()).unwrap()));3080self.instruction(I32GeU);3081self.instruction(If(BlockType::Empty));3082self.trap(Trap::InvalidDiscriminant);3083self.instruction(End);30843085// Save the discriminant to the destination.3086match dst {3087Destination::Stack(stack, _) => {3088self.local_get_tmp(&tmp);3089self.stack_set(&stack[..1], ValType::I32)3090}3091Destination::Memory(mem) => {3092self.push_dst_addr(dst);3093self.local_get_tmp(&tmp);3094match dst_ty.info.size {3095DiscriminantSize::Size1 => self.i32_store8(mem),3096DiscriminantSize::Size2 => self.i32_store16(mem),3097DiscriminantSize::Size4 => self.i32_store(mem),3098}3099}3100Destination::Struct(_) | Destination::Array(_) => todo!("CM+GC"),3101}3102self.free_temp_local(tmp);3103}31043105fn translate_option(3106&mut self,3107src_ty: TypeOptionIndex,3108src: &Source<'_>,3109dst_ty: &InterfaceType,3110dst: &Destination,3111) {3112let src_ty = &self.types[src_ty].ty;3113let dst_ty = match dst_ty {3114InterfaceType::Option(t) => &self.types[*t].ty,3115_ => panic!("expected an option"),3116};3117let src_ty = Some(src_ty);3118let dst_ty = Some(dst_ty);31193120let src_info = variant_info(self.types, [None, src_ty]);3121let dst_info = variant_info(self.types, [None, dst_ty]);31223123self.convert_variant(3124src,3125&src_info,3126dst,3127&dst_info,3128[3129VariantCase {3130src_i: 0,3131dst_i: 0,3132src_ty: None,3133dst_ty: None,3134},3135VariantCase {3136src_i: 1,3137dst_i: 1,3138src_ty,3139dst_ty,3140},3141]3142.into_iter(),3143);3144}31453146fn translate_result(3147&mut self,3148src_ty: TypeResultIndex,3149src: &Source<'_>,3150dst_ty: &InterfaceType,3151dst: &Destination,3152) {3153let src_ty = &self.types[src_ty];3154let dst_ty = match dst_ty {3155InterfaceType::Result(t) => &self.types[*t],3156_ => panic!("expected a result"),3157};31583159let src_info = variant_info(self.types, [src_ty.ok.as_ref(), src_ty.err.as_ref()]);3160let dst_info = variant_info(self.types, [dst_ty.ok.as_ref(), dst_ty.err.as_ref()]);31613162self.convert_variant(3163src,3164&src_info,3165dst,3166&dst_info,3167[3168VariantCase {3169src_i: 0,3170dst_i: 0,3171src_ty: src_ty.ok.as_ref(),3172dst_ty: dst_ty.ok.as_ref(),3173},3174VariantCase {3175src_i: 1,3176dst_i: 1,3177src_ty: src_ty.err.as_ref(),3178dst_ty: dst_ty.err.as_ref(),3179},3180]3181.into_iter(),3182);3183}31843185fn convert_variant<'c>(3186&mut self,3187src: &Source<'_>,3188src_info: &VariantInfo,3189dst: &Destination,3190dst_info: &VariantInfo,3191src_cases: impl ExactSizeIterator<Item = VariantCase<'c>>,3192) {3193// The outermost block is special since it has the result type of the3194// translation here. That will depend on the `dst`.3195let outer_block_ty = match dst {3196Destination::Stack(dst_flat, _) => match dst_flat.len() {31970 => BlockType::Empty,31981 => BlockType::Result(dst_flat[0]),3199_ => {3200let ty = self.module.core_types.function(&[], &dst_flat);3201BlockType::FunctionType(ty)3202}3203},3204Destination::Memory(_) => BlockType::Empty,3205Destination::Struct(_) | Destination::Array(_) => todo!("CM+GC"),3206};3207self.instruction(Block(outer_block_ty));32083209// After the outermost block generate a new block for each of the3210// remaining cases.3211let src_cases_len = src_cases.len();3212for _ in 0..src_cases_len - 1 {3213self.instruction(Block(BlockType::Empty));3214}32153216// Generate a block for an invalid variant discriminant3217self.instruction(Block(BlockType::Empty));32183219// And generate one final block that we'll be jumping out of with the3220// `br_table`3221self.instruction(Block(BlockType::Empty));32223223// Load the discriminant3224match src {3225Source::Stack(s) => self.stack_get(&s.slice(0..1), ValType::I32),3226Source::Memory(mem) => match src_info.size {3227DiscriminantSize::Size1 => self.i32_load8u(mem),3228DiscriminantSize::Size2 => self.i32_load16u(mem),3229DiscriminantSize::Size4 => self.i32_load(mem),3230},3231Source::Struct(_) | Source::Array(_) => todo!("CM+GC"),3232}32333234// Generate the `br_table` for the discriminant. Each case has an3235// offset of 1 to skip the trapping block.3236let mut targets = Vec::new();3237for i in 0..src_cases_len {3238targets.push((i + 1) as u32);3239}3240self.instruction(BrTable(targets[..].into(), 0));3241self.instruction(End); // end the `br_table` block32423243self.trap(Trap::InvalidDiscriminant);3244self.instruction(End); // end the "invalid discriminant" block32453246// Translate each case individually within its own block. Note that the3247// iteration order here places the first case in the innermost block3248// and the last case in the outermost block. This matches the order3249// of the jump targets in the `br_table` instruction.3250let src_cases_len = u32::try_from(src_cases_len).unwrap();3251for case in src_cases {3252let VariantCase {3253src_i,3254src_ty,3255dst_i,3256dst_ty,3257} = case;32583259// Translate the discriminant here, noting that `dst_i` may be3260// different than `src_i`.3261self.push_dst_addr(dst);3262self.instruction(I32Const(dst_i as i32));3263match dst {3264Destination::Stack(stack, _) => self.stack_set(&stack[..1], ValType::I32),3265Destination::Memory(mem) => match dst_info.size {3266DiscriminantSize::Size1 => self.i32_store8(mem),3267DiscriminantSize::Size2 => self.i32_store16(mem),3268DiscriminantSize::Size4 => self.i32_store(mem),3269},3270Destination::Struct(_) | Destination::Array(_) => todo!("CM+GC"),3271}32723273let src_payload = src.payload_src(self.types, src_info, src_ty);3274let dst_payload = dst.payload_dst(self.types, dst_info, dst_ty);32753276// Translate the payload of this case using the various types from3277// the dst/src.3278match (src_ty, dst_ty) {3279(Some(src_ty), Some(dst_ty)) => {3280self.translate(src_ty, &src_payload, dst_ty, &dst_payload);3281}3282(None, None) => {}3283_ => unimplemented!(),3284}32853286// If the results of this translation were placed on the stack then3287// the stack values may need to be padded with more zeros due to3288// this particular case being possibly smaller than the entire3289// variant. That's handled here by pushing remaining zeros after3290// accounting for the discriminant pushed as well as the results of3291// this individual payload.3292if let Destination::Stack(payload_results, _) = dst_payload {3293if let Destination::Stack(dst_results, _) = dst {3294let remaining = &dst_results[1..][payload_results.len()..];3295for ty in remaining {3296match ty {3297ValType::I32 => self.instruction(I32Const(0)),3298ValType::I64 => self.instruction(I64Const(0)),3299ValType::F32 => self.instruction(F32Const(0.0.into())),3300ValType::F64 => self.instruction(F64Const(0.0.into())),3301_ => unreachable!(),3302}3303}3304}3305}33063307// Branch to the outermost block. Note that this isn't needed for3308// the outermost case since it simply falls through.3309if src_i != src_cases_len - 1 {3310self.instruction(Br(src_cases_len - src_i - 1));3311}3312self.instruction(End); // end this case's block3313}3314}33153316fn translate_future(3317&mut self,3318src_ty: TypeFutureTableIndex,3319src: &Source<'_>,3320dst_ty: &InterfaceType,3321dst: &Destination,3322) {3323let dst_ty = match dst_ty {3324InterfaceType::Future(t) => *t,3325_ => panic!("expected a `Future`"),3326};3327let transfer = self.module.import_future_transfer();3328self.translate_handle(src_ty.as_u32(), src, dst_ty.as_u32(), dst, transfer);3329}33303331fn translate_stream(3332&mut self,3333src_ty: TypeStreamTableIndex,3334src: &Source<'_>,3335dst_ty: &InterfaceType,3336dst: &Destination,3337) {3338let dst_ty = match dst_ty {3339InterfaceType::Stream(t) => *t,3340_ => panic!("expected a `Stream`"),3341};3342let transfer = self.module.import_stream_transfer();3343self.translate_handle(src_ty.as_u32(), src, dst_ty.as_u32(), dst, transfer);3344}33453346fn translate_error_context(3347&mut self,3348src_ty: TypeComponentLocalErrorContextTableIndex,3349src: &Source<'_>,3350dst_ty: &InterfaceType,3351dst: &Destination,3352) {3353let dst_ty = match dst_ty {3354InterfaceType::ErrorContext(t) => *t,3355_ => panic!("expected an `ErrorContext`"),3356};3357let transfer = self.module.import_error_context_transfer();3358self.translate_handle(src_ty.as_u32(), src, dst_ty.as_u32(), dst, transfer);3359}33603361fn translate_own(3362&mut self,3363src_ty: TypeResourceTableIndex,3364src: &Source<'_>,3365dst_ty: &InterfaceType,3366dst: &Destination,3367) {3368let dst_ty = match dst_ty {3369InterfaceType::Own(t) => *t,3370_ => panic!("expected an `Own`"),3371};3372let transfer = self.module.import_resource_transfer_own();3373self.translate_handle(src_ty.as_u32(), src, dst_ty.as_u32(), dst, transfer);3374}33753376fn translate_borrow(3377&mut self,3378src_ty: TypeResourceTableIndex,3379src: &Source<'_>,3380dst_ty: &InterfaceType,3381dst: &Destination,3382) {3383let dst_ty = match dst_ty {3384InterfaceType::Borrow(t) => *t,3385_ => panic!("expected an `Borrow`"),3386};33873388let transfer = self.module.import_resource_transfer_borrow();3389self.translate_handle(src_ty.as_u32(), src, dst_ty.as_u32(), dst, transfer);3390}33913392/// Translates the index `src`, which resides in the table `src_ty`, into3393/// and index within `dst_ty` and is stored at `dst`.3394///3395/// Actual translation of the index happens in a wasmtime libcall, which a3396/// cranelift-generated trampoline to satisfy this import will call. The3397/// `transfer` function is an imported function which takes the src, src_ty,3398/// and dst_ty, and returns the dst index.3399fn translate_handle(3400&mut self,3401src_ty: u32,3402src: &Source<'_>,3403dst_ty: u32,3404dst: &Destination,3405transfer: FuncIndex,3406) {3407self.push_dst_addr(dst);3408match src {3409Source::Memory(mem) => self.i32_load(mem),3410Source::Stack(stack) => self.stack_get(stack, ValType::I32),3411Source::Struct(_) | Source::Array(_) => todo!("CM+GC"),3412}3413self.instruction(I32Const(src_ty as i32));3414self.instruction(I32Const(dst_ty as i32));3415self.instruction(Call(transfer.as_u32()));3416match dst {3417Destination::Memory(mem) => self.i32_store(mem),3418Destination::Stack(stack, _) => self.stack_set(stack, ValType::I32),3419Destination::Struct(_) | Destination::Array(_) => todo!("CM+GC"),3420}3421}34223423fn trap_if_not_flag(&mut self, flags_global: GlobalIndex, flag_to_test: i32, trap: Trap) {3424self.instruction(GlobalGet(flags_global.as_u32()));3425self.instruction(I32Const(flag_to_test));3426self.instruction(I32And);3427self.instruction(I32Eqz);3428self.instruction(If(BlockType::Empty));3429self.trap(trap);3430self.instruction(End);3431}34323433fn set_flag(&mut self, flags_global: GlobalIndex, flag_to_set: i32, value: bool) {3434self.instruction(GlobalGet(flags_global.as_u32()));3435if value {3436self.instruction(I32Const(flag_to_set));3437self.instruction(I32Or);3438} else {3439self.instruction(I32Const(!flag_to_set));3440self.instruction(I32And);3441}3442self.instruction(GlobalSet(flags_global.as_u32()));3443}34443445fn verify_aligned(&mut self, opts: &LinearMemoryOptions, addr_local: u32, align: u32) {3446// If the alignment is 1 then everything is trivially aligned and the3447// check can be omitted.3448if align == 1 {3449return;3450}3451self.instruction(LocalGet(addr_local));3452assert!(align.is_power_of_two());3453self.ptr_uconst(opts, align - 1);3454self.ptr_and(opts);3455self.ptr_if(opts, BlockType::Empty);3456self.trap(Trap::UnalignedPointer);3457self.instruction(End);3458}34593460fn assert_aligned(&mut self, ty: &InterfaceType, mem: &Memory) {3461let mem_opts = mem.mem_opts();3462if !self.module.tunables.debug_adapter_modules {3463return;3464}3465let align = self.types.align(mem_opts, ty);3466if align == 1 {3467return;3468}3469assert!(align.is_power_of_two());3470self.instruction(LocalGet(mem.addr.idx));3471self.ptr_uconst(mem_opts, mem.offset);3472self.ptr_add(mem_opts);3473self.ptr_uconst(mem_opts, align - 1);3474self.ptr_and(mem_opts);3475self.ptr_if(mem_opts, BlockType::Empty);3476self.trap(Trap::DebugAssertPointerAligned);3477self.instruction(End);3478}34793480fn malloc<'c>(&mut self, opts: &'c Options, size: MallocSize, align: u32) -> Memory<'c> {3481match &opts.data_model {3482DataModel::Gc {} => todo!("CM+GC"),3483DataModel::LinearMemory(mem_opts) => {3484let realloc = mem_opts.realloc.unwrap();3485self.ptr_uconst(mem_opts, 0);3486self.ptr_uconst(mem_opts, 0);3487self.ptr_uconst(mem_opts, align);3488match size {3489MallocSize::Const(size) => self.ptr_uconst(mem_opts, size),3490MallocSize::Local(idx) => self.instruction(LocalGet(idx)),3491}3492self.instruction(Call(realloc.as_u32()));3493let addr = self.local_set_new_tmp(mem_opts.ptr());3494self.memory_operand(opts, addr, align)3495}3496}3497}34983499fn memory_operand<'c>(&mut self, opts: &'c Options, addr: TempLocal, align: u32) -> Memory<'c> {3500let ret = Memory {3501addr,3502offset: 0,3503opts,3504};3505self.verify_aligned(opts.data_model.unwrap_memory(), ret.addr.idx, align);3506ret3507}35083509/// Generates a new local in this function of the `ty` specified,3510/// initializing it with the top value on the current wasm stack.3511///3512/// The returned `TempLocal` must be freed after it is finished with3513/// `free_temp_local`.3514fn local_tee_new_tmp(&mut self, ty: ValType) -> TempLocal {3515self.gen_temp_local(ty, LocalTee)3516}35173518/// Same as `local_tee_new_tmp` but initializes the local with `LocalSet`3519/// instead of `LocalTee`.3520fn local_set_new_tmp(&mut self, ty: ValType) -> TempLocal {3521self.gen_temp_local(ty, LocalSet)3522}35233524fn local_get_tmp(&mut self, local: &TempLocal) {3525self.instruction(LocalGet(local.idx));3526}35273528fn gen_temp_local(&mut self, ty: ValType, insn: fn(u32) -> Instruction<'static>) -> TempLocal {3529// First check to see if any locals are available in this function which3530// were previously generated but are no longer in use.3531if let Some(idx) = self.free_locals.get_mut(&ty).and_then(|v| v.pop()) {3532self.instruction(insn(idx));3533return TempLocal {3534ty,3535idx,3536needs_free: true,3537};3538}35393540// Failing that generate a fresh new local.3541let locals = &mut self.module.funcs[self.result].locals;3542match locals.last_mut() {3543Some((cnt, prev_ty)) if ty == *prev_ty => *cnt += 1,3544_ => locals.push((1, ty)),3545}3546self.nlocals += 1;3547let idx = self.nlocals - 1;3548self.instruction(insn(idx));3549TempLocal {3550ty,3551idx,3552needs_free: true,3553}3554}35553556/// Used to release a `TempLocal` from a particular lexical scope to allow3557/// its possible reuse in later scopes.3558fn free_temp_local(&mut self, mut local: TempLocal) {3559assert!(local.needs_free);3560self.free_locals3561.entry(local.ty)3562.or_insert(Vec::new())3563.push(local.idx);3564local.needs_free = false;3565}35663567fn instruction(&mut self, instr: Instruction) {3568instr.encode(&mut self.code);3569}35703571fn trap(&mut self, trap: Trap) {3572let trap_func = self.module.import_trap();3573self.instruction(I32Const(trap as i32));3574self.instruction(Call(trap_func.as_u32()));3575self.instruction(Unreachable);3576}35773578/// Flushes out the current `code` instructions into the destination3579/// function.3580///3581/// This is a noop if no instructions have been encoded yet.3582fn flush_code(&mut self) {3583if self.code.is_empty() {3584return;3585}3586self.module.funcs[self.result]3587.body3588.push(Body::Raw(mem::take(&mut self.code)));3589}35903591fn finish(mut self) {3592// Append the final `end` instruction which all functions require, and3593// then empty out the temporary buffer in `Compiler`.3594self.instruction(End);3595self.flush_code();35963597// Flag the function as "done" which helps with an assert later on in3598// emission that everything was eventually finished.3599self.module.funcs[self.result].filled_in = true;3600}36013602/// Fetches the value contained with the local specified by `stack` and3603/// converts it to `dst_ty`.3604///3605/// This is only intended for use in primitive operations where `stack` is3606/// guaranteed to have only one local. The type of the local on the stack is3607/// then converted to `dst_ty` appropriately. Note that the types may be3608/// different due to the "flattening" of variant types.3609fn stack_get(&mut self, stack: &Stack<'_>, dst_ty: ValType) {3610assert_eq!(stack.locals.len(), 1);3611let (idx, src_ty) = stack.locals[0];3612self.instruction(LocalGet(idx));3613match (src_ty, dst_ty) {3614(ValType::I32, ValType::I32)3615| (ValType::I64, ValType::I64)3616| (ValType::F32, ValType::F32)3617| (ValType::F64, ValType::F64) => {}36183619(ValType::I32, ValType::F32) => self.instruction(F32ReinterpretI32),3620(ValType::I64, ValType::I32) => {3621self.assert_i64_upper_bits_not_set(idx);3622self.instruction(I32WrapI64);3623}3624(ValType::I64, ValType::F64) => self.instruction(F64ReinterpretI64),3625(ValType::I64, ValType::F32) => {3626self.assert_i64_upper_bits_not_set(idx);3627self.instruction(I32WrapI64);3628self.instruction(F32ReinterpretI32);3629}36303631// should not be possible given the `join` function for variants3632(ValType::I32, ValType::I64)3633| (ValType::I32, ValType::F64)3634| (ValType::F32, ValType::I32)3635| (ValType::F32, ValType::I64)3636| (ValType::F32, ValType::F64)3637| (ValType::F64, ValType::I32)3638| (ValType::F64, ValType::I64)3639| (ValType::F64, ValType::F32)36403641// not used in the component model3642| (ValType::Ref(_), _)3643| (_, ValType::Ref(_))3644| (ValType::V128, _)3645| (_, ValType::V128) => {3646panic!("cannot get {dst_ty:?} from {src_ty:?} local");3647}3648}3649}36503651fn assert_i64_upper_bits_not_set(&mut self, local: u32) {3652if !self.module.tunables.debug_adapter_modules {3653return;3654}3655self.instruction(LocalGet(local));3656self.instruction(I64Const(32));3657self.instruction(I64ShrU);3658self.instruction(I32WrapI64);3659self.instruction(If(BlockType::Empty));3660self.trap(Trap::DebugAssertUpperBitsUnset);3661self.instruction(End);3662}36633664/// Converts the top value on the WebAssembly stack which has type3665/// `src_ty` to `dst_tys[0]`.3666///3667/// This is only intended for conversion of primitives where the `dst_tys`3668/// list is known to be of length 1.3669fn stack_set(&mut self, dst_tys: &[ValType], src_ty: ValType) {3670assert_eq!(dst_tys.len(), 1);3671let dst_ty = dst_tys[0];3672match (src_ty, dst_ty) {3673(ValType::I32, ValType::I32)3674| (ValType::I64, ValType::I64)3675| (ValType::F32, ValType::F32)3676| (ValType::F64, ValType::F64) => {}36773678(ValType::F32, ValType::I32) => self.instruction(I32ReinterpretF32),3679(ValType::I32, ValType::I64) => self.instruction(I64ExtendI32U),3680(ValType::F64, ValType::I64) => self.instruction(I64ReinterpretF64),3681(ValType::F32, ValType::I64) => {3682self.instruction(I32ReinterpretF32);3683self.instruction(I64ExtendI32U);3684}36853686// should not be possible given the `join` function for variants3687(ValType::I64, ValType::I32)3688| (ValType::F64, ValType::I32)3689| (ValType::I32, ValType::F32)3690| (ValType::I64, ValType::F32)3691| (ValType::F64, ValType::F32)3692| (ValType::I32, ValType::F64)3693| (ValType::I64, ValType::F64)3694| (ValType::F32, ValType::F64)36953696// not used in the component model3697| (ValType::Ref(_), _)3698| (_, ValType::Ref(_))3699| (ValType::V128, _)3700| (_, ValType::V128) => {3701panic!("cannot get {dst_ty:?} from {src_ty:?} local");3702}3703}3704}37053706fn i32_load8u(&mut self, mem: &Memory) {3707self.instruction(LocalGet(mem.addr.idx));3708self.instruction(I32Load8U(mem.memarg(0)));3709}37103711fn i32_load8s(&mut self, mem: &Memory) {3712self.instruction(LocalGet(mem.addr.idx));3713self.instruction(I32Load8S(mem.memarg(0)));3714}37153716fn i32_load16u(&mut self, mem: &Memory) {3717self.instruction(LocalGet(mem.addr.idx));3718self.instruction(I32Load16U(mem.memarg(1)));3719}37203721fn i32_load16s(&mut self, mem: &Memory) {3722self.instruction(LocalGet(mem.addr.idx));3723self.instruction(I32Load16S(mem.memarg(1)));3724}37253726fn i32_load(&mut self, mem: &Memory) {3727self.instruction(LocalGet(mem.addr.idx));3728self.instruction(I32Load(mem.memarg(2)));3729}37303731fn i64_load(&mut self, mem: &Memory) {3732self.instruction(LocalGet(mem.addr.idx));3733self.instruction(I64Load(mem.memarg(3)));3734}37353736fn ptr_load(&mut self, mem: &Memory) {3737if mem.mem_opts().memory64 {3738self.i64_load(mem);3739} else {3740self.i32_load(mem);3741}3742}37433744fn ptr_add(&mut self, opts: &LinearMemoryOptions) {3745if opts.memory64 {3746self.instruction(I64Add);3747} else {3748self.instruction(I32Add);3749}3750}37513752fn ptr_sub(&mut self, opts: &LinearMemoryOptions) {3753if opts.memory64 {3754self.instruction(I64Sub);3755} else {3756self.instruction(I32Sub);3757}3758}37593760fn ptr_mul(&mut self, opts: &LinearMemoryOptions) {3761if opts.memory64 {3762self.instruction(I64Mul);3763} else {3764self.instruction(I32Mul);3765}3766}37673768fn ptr_ge_u(&mut self, opts: &LinearMemoryOptions) {3769if opts.memory64 {3770self.instruction(I64GeU);3771} else {3772self.instruction(I32GeU);3773}3774}37753776fn ptr_lt_u(&mut self, opts: &LinearMemoryOptions) {3777if opts.memory64 {3778self.instruction(I64LtU);3779} else {3780self.instruction(I32LtU);3781}3782}37833784fn ptr_shl(&mut self, opts: &LinearMemoryOptions) {3785if opts.memory64 {3786self.instruction(I64Shl);3787} else {3788self.instruction(I32Shl);3789}3790}37913792fn ptr_eqz(&mut self, opts: &LinearMemoryOptions) {3793if opts.memory64 {3794self.instruction(I64Eqz);3795} else {3796self.instruction(I32Eqz);3797}3798}37993800fn ptr_uconst(&mut self, opts: &LinearMemoryOptions, val: u32) {3801if opts.memory64 {3802self.instruction(I64Const(val.into()));3803} else {3804self.instruction(I32Const(val as i32));3805}3806}38073808fn ptr_iconst(&mut self, opts: &LinearMemoryOptions, val: i32) {3809if opts.memory64 {3810self.instruction(I64Const(val.into()));3811} else {3812self.instruction(I32Const(val));3813}3814}38153816fn ptr_eq(&mut self, opts: &LinearMemoryOptions) {3817if opts.memory64 {3818self.instruction(I64Eq);3819} else {3820self.instruction(I32Eq);3821}3822}38233824fn ptr_ne(&mut self, opts: &LinearMemoryOptions) {3825if opts.memory64 {3826self.instruction(I64Ne);3827} else {3828self.instruction(I32Ne);3829}3830}38313832fn ptr_and(&mut self, opts: &LinearMemoryOptions) {3833if opts.memory64 {3834self.instruction(I64And);3835} else {3836self.instruction(I32And);3837}3838}38393840fn ptr_or(&mut self, opts: &LinearMemoryOptions) {3841if opts.memory64 {3842self.instruction(I64Or);3843} else {3844self.instruction(I32Or);3845}3846}38473848fn ptr_xor(&mut self, opts: &LinearMemoryOptions) {3849if opts.memory64 {3850self.instruction(I64Xor);3851} else {3852self.instruction(I32Xor);3853}3854}38553856fn ptr_if(&mut self, opts: &LinearMemoryOptions, ty: BlockType) {3857if opts.memory64 {3858self.instruction(I64Const(0));3859self.instruction(I64Ne);3860}3861self.instruction(If(ty));3862}38633864fn ptr_br_if(&mut self, opts: &LinearMemoryOptions, depth: u32) {3865if opts.memory64 {3866self.instruction(I64Const(0));3867self.instruction(I64Ne);3868}3869self.instruction(BrIf(depth));3870}38713872fn f32_load(&mut self, mem: &Memory) {3873self.instruction(LocalGet(mem.addr.idx));3874self.instruction(F32Load(mem.memarg(2)));3875}38763877fn f64_load(&mut self, mem: &Memory) {3878self.instruction(LocalGet(mem.addr.idx));3879self.instruction(F64Load(mem.memarg(3)));3880}38813882fn push_dst_addr(&mut self, dst: &Destination) {3883if let Destination::Memory(mem) = dst {3884self.instruction(LocalGet(mem.addr.idx));3885}3886}38873888fn i32_store8(&mut self, mem: &Memory) {3889self.instruction(I32Store8(mem.memarg(0)));3890}38913892fn i32_store16(&mut self, mem: &Memory) {3893self.instruction(I32Store16(mem.memarg(1)));3894}38953896fn i32_store(&mut self, mem: &Memory) {3897self.instruction(I32Store(mem.memarg(2)));3898}38993900fn i64_store(&mut self, mem: &Memory) {3901self.instruction(I64Store(mem.memarg(3)));3902}39033904fn ptr_store(&mut self, mem: &Memory) {3905if mem.mem_opts().memory64 {3906self.i64_store(mem);3907} else {3908self.i32_store(mem);3909}3910}39113912fn f32_store(&mut self, mem: &Memory) {3913self.instruction(F32Store(mem.memarg(2)));3914}39153916fn f64_store(&mut self, mem: &Memory) {3917self.instruction(F64Store(mem.memarg(3)));3918}3919}39203921impl<'a> Source<'a> {3922/// Given this `Source` returns an iterator over the `Source` for each of3923/// the component `fields` specified.3924///3925/// This will automatically slice stack-based locals to the appropriate3926/// width for each component type and additionally calculate the appropriate3927/// offset for each memory-based type.3928fn record_field_srcs<'b>(3929&'b self,3930types: &'b ComponentTypesBuilder,3931fields: impl IntoIterator<Item = InterfaceType> + 'b,3932) -> impl Iterator<Item = Source<'a>> + 'b3933where3934'a: 'b,3935{3936let mut offset = 0;3937fields.into_iter().map(move |ty| match self {3938Source::Memory(mem) => {3939let mem = next_field_offset(&mut offset, types, &ty, mem);3940Source::Memory(mem)3941}3942Source::Stack(stack) => {3943let cnt = types.flat_types(&ty).unwrap().len() as u32;3944offset += cnt;3945Source::Stack(stack.slice((offset - cnt) as usize..offset as usize))3946}3947Source::Struct(_) => todo!(),3948Source::Array(_) => todo!(),3949})3950}39513952/// Returns the corresponding discriminant source and payload source f3953fn payload_src(3954&self,3955types: &ComponentTypesBuilder,3956info: &VariantInfo,3957case: Option<&InterfaceType>,3958) -> Source<'a> {3959match self {3960Source::Stack(s) => {3961let flat_len = match case {3962Some(case) => types.flat_types(case).unwrap().len(),3963None => 0,3964};3965Source::Stack(s.slice(1..s.locals.len()).slice(0..flat_len))3966}3967Source::Memory(mem) => {3968let mem = if mem.mem_opts().memory64 {3969mem.bump(info.payload_offset64)3970} else {3971mem.bump(info.payload_offset32)3972};3973Source::Memory(mem)3974}3975Source::Struct(_) | Source::Array(_) => todo!("CM+GC"),3976}3977}39783979fn opts(&self) -> &'a Options {3980match self {3981Source::Stack(s) => s.opts,3982Source::Memory(mem) => mem.opts,3983Source::Struct(s) => s.opts,3984Source::Array(a) => a.opts,3985}3986}3987}39883989impl<'a> Destination<'a> {3990/// Same as `Source::record_field_srcs` but for destinations.3991fn record_field_dsts<'b, I>(3992&'b self,3993types: &'b ComponentTypesBuilder,3994fields: I,3995) -> impl Iterator<Item = Destination<'b>> + use<'b, I>3996where3997'a: 'b,3998I: IntoIterator<Item = InterfaceType> + 'b,3999{4000let mut offset = 0;4001fields.into_iter().map(move |ty| match self {4002Destination::Memory(mem) => {4003let mem = next_field_offset(&mut offset, types, &ty, mem);4004Destination::Memory(mem)4005}4006Destination::Stack(s, opts) => {4007let cnt = types.flat_types(&ty).unwrap().len() as u32;4008offset += cnt;4009Destination::Stack(&s[(offset - cnt) as usize..offset as usize], opts)4010}4011Destination::Struct(_) => todo!(),4012Destination::Array(_) => todo!(),4013})4014}40154016/// Returns the corresponding discriminant source and payload source f4017fn payload_dst(4018&self,4019types: &ComponentTypesBuilder,4020info: &VariantInfo,4021case: Option<&InterfaceType>,4022) -> Destination<'_> {4023match self {4024Destination::Stack(s, opts) => {4025let flat_len = match case {4026Some(case) => types.flat_types(case).unwrap().len(),4027None => 0,4028};4029Destination::Stack(&s[1..][..flat_len], opts)4030}4031Destination::Memory(mem) => {4032let mem = if mem.mem_opts().memory64 {4033mem.bump(info.payload_offset64)4034} else {4035mem.bump(info.payload_offset32)4036};4037Destination::Memory(mem)4038}4039Destination::Struct(_) | Destination::Array(_) => todo!("CM+GC"),4040}4041}40424043fn opts(&self) -> &'a Options {4044match self {4045Destination::Stack(_, opts) => opts,4046Destination::Memory(mem) => mem.opts,4047Destination::Struct(s) => s.opts,4048Destination::Array(a) => a.opts,4049}4050}4051}40524053fn next_field_offset<'a>(4054offset: &mut u32,4055types: &ComponentTypesBuilder,4056field: &InterfaceType,4057mem: &Memory<'a>,4058) -> Memory<'a> {4059let abi = types.canonical_abi(field);4060let offset = if mem.mem_opts().memory64 {4061abi.next_field64(offset)4062} else {4063abi.next_field32(offset)4064};4065mem.bump(offset)4066}40674068impl<'a> Memory<'a> {4069fn memarg(&self, align: u32) -> MemArg {4070MemArg {4071offset: u64::from(self.offset),4072align,4073memory_index: self.mem_opts().memory.unwrap().as_u32(),4074}4075}40764077fn bump(&self, offset: u32) -> Memory<'a> {4078Memory {4079opts: self.opts,4080addr: TempLocal::new(self.addr.idx, self.addr.ty),4081offset: self.offset + offset,4082}4083}4084}40854086impl<'a> Stack<'a> {4087fn slice(&self, range: Range<usize>) -> Stack<'a> {4088Stack {4089locals: &self.locals[range],4090opts: self.opts,4091}4092}4093}40944095struct VariantCase<'a> {4096src_i: u32,4097src_ty: Option<&'a InterfaceType>,4098dst_i: u32,4099dst_ty: Option<&'a InterfaceType>,4100}41014102fn variant_info<'a, I>(types: &ComponentTypesBuilder, cases: I) -> VariantInfo4103where4104I: IntoIterator<Item = Option<&'a InterfaceType>>,4105I::IntoIter: ExactSizeIterator,4106{4107VariantInfo::new(4108cases4109.into_iter()4110.map(|ty| ty.map(|ty| types.canonical_abi(ty))),4111)4112.04113}41144115enum MallocSize {4116Const(u32),4117Local(u32),4118}41194120struct WasmString<'a> {4121ptr: TempLocal,4122len: TempLocal,4123opts: &'a Options,4124}41254126struct TempLocal {4127idx: u32,4128ty: ValType,4129needs_free: bool,4130}41314132impl TempLocal {4133fn new(idx: u32, ty: ValType) -> TempLocal {4134TempLocal {4135idx,4136ty,4137needs_free: false,4138}4139}4140}41414142impl std::ops::Drop for TempLocal {4143fn drop(&mut self) {4144if self.needs_free {4145panic!("temporary local not free'd");4146}4147}4148}41494150impl From<FlatType> for ValType {4151fn from(ty: FlatType) -> ValType {4152match ty {4153FlatType::I32 => ValType::I32,4154FlatType::I64 => ValType::I64,4155FlatType::F32 => ValType::F32,4156FlatType::F64 => ValType::F64,4157}4158}4159}416041614162