Path: blob/main/crates/cranelift/src/func_environ.rs
3071 views
mod gc;1pub(crate) mod stack_switching;23use crate::compiler::Compiler;4use crate::translate::{5FuncTranslationStacks, GlobalVariable, Heap, HeapData, StructFieldsVec, TableData, TableSize,6TargetEnvironment,7};8use crate::{BuiltinFunctionSignatures, TRAP_INTERNAL_ASSERT};9use cranelift_codegen::cursor::FuncCursor;10use cranelift_codegen::ir::condcodes::{FloatCC, IntCC};11use cranelift_codegen::ir::immediates::{Imm64, Offset32, V128Imm};12use cranelift_codegen::ir::pcc::Fact;13use cranelift_codegen::ir::{self, BlockArg, ExceptionTableData, ExceptionTableItem, types};14use cranelift_codegen::ir::{ArgumentPurpose, ConstantData, Function, InstBuilder, MemFlags};15use cranelift_codegen::ir::{Block, types::*};16use cranelift_codegen::isa::{CallConv, TargetFrontendConfig, TargetIsa};17use cranelift_entity::packed_option::{PackedOption, ReservedValue};18use cranelift_entity::{EntityRef, PrimaryMap, SecondaryMap};19use cranelift_frontend::Variable;20use cranelift_frontend::{FuncInstBuilder, FunctionBuilder};21use smallvec::{SmallVec, smallvec};22use std::mem;23use wasmparser::{FuncValidator, Operator, WasmFeatures, WasmModuleResources};24use wasmtime_core::math::f64_cvt_to_int_bounds;25use wasmtime_environ::{26BuiltinFunctionIndex, DataIndex, DefinedFuncIndex, ElemIndex, EngineOrModuleTypeIndex,27FrameStateSlotBuilder, FrameValType, FuncIndex, FuncKey, GlobalConstValue, GlobalIndex,28IndexType, Memory, MemoryIndex, Module, ModuleInternedTypeIndex, ModuleTranslation,29ModuleTypesBuilder, PtrSize, Table, TableIndex, TagIndex, TripleExt, Tunables, TypeConvert,30TypeIndex, VMOffsets, WasmCompositeInnerType, WasmFuncType, WasmHeapTopType, WasmHeapType,31WasmRefType, WasmResult, WasmValType,32};33use wasmtime_environ::{FUNCREF_INIT_BIT, FUNCREF_MASK};3435#[derive(Debug)]36pub(crate) enum Extension {37Sign,38Zero,39}4041/// A struct with an `Option<ir::FuncRef>` member for every builtin42/// function, to de-duplicate constructing/getting its function.43pub(crate) struct BuiltinFunctions {44types: BuiltinFunctionSignatures,4546builtins: [Option<ir::FuncRef>; BuiltinFunctionIndex::len() as usize],47breakpoint_trampoline: Option<ir::FuncRef>,48}4950impl BuiltinFunctions {51fn new(compiler: &Compiler) -> Self {52Self {53types: BuiltinFunctionSignatures::new(compiler),54builtins: [None; BuiltinFunctionIndex::len() as usize],55breakpoint_trampoline: None,56}57}5859fn load_builtin(&mut self, func: &mut Function, builtin: BuiltinFunctionIndex) -> ir::FuncRef {60let cache = &mut self.builtins[builtin.index() as usize];61if let Some(f) = cache {62return *f;63}64let signature = func.import_signature(self.types.wasm_signature(builtin));65let key = FuncKey::WasmToBuiltinTrampoline(builtin);66let (namespace, index) = key.into_raw_parts();67let name = ir::ExternalName::User(68func.declare_imported_user_function(ir::UserExternalName { namespace, index }),69);70let f = func.import_function(ir::ExtFuncData {71name,72signature,73colocated: true,74patchable: false,75});76*cache = Some(f);77f78}7980pub(crate) fn patchable_breakpoint(&mut self, func: &mut Function) -> ir::FuncRef {81*self.breakpoint_trampoline.get_or_insert_with(|| {82let mut signature = ir::Signature::new(CallConv::PreserveAll);83signature84.params85.push(ir::AbiParam::new(self.types.pointer_type));86let signature = func.import_signature(signature);87let key = FuncKey::PatchableToBuiltinTrampoline(BuiltinFunctionIndex::breakpoint());88let (namespace, index) = key.into_raw_parts();89let name = ir::ExternalName::User(90func.declare_imported_user_function(ir::UserExternalName { namespace, index }),91);92func.import_function(ir::ExtFuncData {93name,94signature,95colocated: true,96patchable: true,97})98})99}100}101102// Generate helper methods on `BuiltinFunctions` above for each named builtin103// as well.104macro_rules! declare_function_signatures {105($(106$( #[$attr:meta] )*107$name:ident( $( $pname:ident: $param:ident ),* ) $( -> $result:ident )?;108)*) => {109$(impl BuiltinFunctions {110$( #[$attr] )*111#[allow(dead_code, reason = "debug breakpoint libcall not used in host ABI, only patchable ABI")]112pub(crate) fn $name(&mut self, func: &mut Function) -> ir::FuncRef {113self.load_builtin(func, BuiltinFunctionIndex::$name())114}115})*116};117}118wasmtime_environ::foreach_builtin_function!(declare_function_signatures);119120/// The `FuncEnvironment` implementation for use by the `ModuleEnvironment`.121pub struct FuncEnvironment<'module_environment> {122compiler: &'module_environment Compiler,123isa: &'module_environment (dyn TargetIsa + 'module_environment),124key: FuncKey,125pub(crate) module: &'module_environment Module,126types: &'module_environment ModuleTypesBuilder,127wasm_func_ty: &'module_environment WasmFuncType,128sig_ref_to_ty: SecondaryMap<ir::SigRef, Option<&'module_environment WasmFuncType>>,129needs_gc_heap: bool,130entities: WasmEntities,131132/// Translation state at the given point.133pub(crate) stacks: FuncTranslationStacks,134135#[cfg(feature = "gc")]136ty_to_gc_layout: std::collections::HashMap<137wasmtime_environ::ModuleInternedTypeIndex,138wasmtime_environ::GcLayout,139>,140141#[cfg(feature = "gc")]142gc_heap: Option<Heap>,143144/// The Cranelift global holding the GC heap's base address.145#[cfg(feature = "gc")]146gc_heap_base: Option<ir::GlobalValue>,147148/// The Cranelift global holding the GC heap's base address.149#[cfg(feature = "gc")]150gc_heap_bound: Option<ir::GlobalValue>,151152translation: &'module_environment ModuleTranslation<'module_environment>,153154/// Heaps implementing WebAssembly linear memories.155heaps: PrimaryMap<Heap, HeapData>,156157/// The Cranelift global holding the vmctx address.158vmctx: Option<ir::GlobalValue>,159160/// The Cranelift global for our vmctx's `*mut VMStoreContext`.161vm_store_context: Option<ir::GlobalValue>,162163/// The PCC memory type describing the vmctx layout, if we're164/// using PCC.165pcc_vmctx_memtype: Option<ir::MemoryType>,166167/// Caches of signatures for builtin functions.168builtin_functions: BuiltinFunctions,169170/// Offsets to struct fields accessed by JIT code.171pub(crate) offsets: VMOffsets<u8>,172173tunables: &'module_environment Tunables,174175/// A function-local variable which stores the cached value of the amount of176/// fuel remaining to execute. If used this is modified frequently so it's177/// stored locally as a variable instead of always referenced from the field178/// in `*const VMStoreContext`179fuel_var: cranelift_frontend::Variable,180181/// A cached epoch deadline value, when performing epoch-based182/// interruption. Loaded from `VMStoreContext` and reloaded after183/// any yield.184epoch_deadline_var: cranelift_frontend::Variable,185186/// A cached pointer to the per-Engine epoch counter, when187/// performing epoch-based interruption. Initialized in the188/// function prologue. We prefer to use a variable here rather189/// than reload on each check because it's better to let the190/// regalloc keep it in a register if able; if not, it can always191/// spill, and this isn't any worse than reloading each time.192epoch_ptr_var: cranelift_frontend::Variable,193194fuel_consumed: i64,195196/// A `GlobalValue` in CLIF which represents the stack limit.197///198/// Typically this resides in the `stack_limit` value of `ir::Function` but199/// that requires signal handlers on the host and when that's disabled this200/// is here with an explicit check instead. Note that the explicit check is201/// always present even if this is a "leaf" function, as we have to call202/// into the host to trap when signal handlers are disabled.203pub(crate) stack_limit_at_function_entry: Option<ir::GlobalValue>,204205/// Used by the stack switching feature. If set, we have a allocated a206/// slot on this function's stack to be used for the207/// current stack's `handler_list` field.208stack_switching_handler_list_buffer: Option<ir::StackSlot>,209210/// Used by the stack switching feature. If set, we have a allocated a211/// slot on this function's stack to be used for the212/// current continuation's `values` field.213stack_switching_values_buffer: Option<ir::StackSlot>,214215/// The stack-slot used for exposing Wasm state via debug216/// instrumentation, if any, and the builder containing its metadata.217pub(crate) state_slot: Option<(ir::StackSlot, FrameStateSlotBuilder)>,218}219220impl<'module_environment> FuncEnvironment<'module_environment> {221pub fn new(222compiler: &'module_environment Compiler,223translation: &'module_environment ModuleTranslation<'module_environment>,224types: &'module_environment ModuleTypesBuilder,225wasm_func_ty: &'module_environment WasmFuncType,226key: FuncKey,227) -> Self {228let tunables = compiler.tunables();229let builtin_functions = BuiltinFunctions::new(compiler);230231// This isn't used during translation, so squash the warning about this232// being unused from the compiler.233let _ = BuiltinFunctions::raise;234235Self {236key,237isa: compiler.isa(),238module: &translation.module,239compiler,240types,241wasm_func_ty,242sig_ref_to_ty: SecondaryMap::default(),243needs_gc_heap: false,244entities: WasmEntities::default(),245stacks: FuncTranslationStacks::new(),246247#[cfg(feature = "gc")]248ty_to_gc_layout: std::collections::HashMap::new(),249#[cfg(feature = "gc")]250gc_heap: None,251#[cfg(feature = "gc")]252gc_heap_base: None,253#[cfg(feature = "gc")]254gc_heap_bound: None,255256heaps: PrimaryMap::default(),257vmctx: None,258vm_store_context: None,259pcc_vmctx_memtype: None,260builtin_functions,261offsets: VMOffsets::new(compiler.isa().pointer_bytes(), &translation.module),262tunables,263fuel_var: Variable::reserved_value(),264epoch_deadline_var: Variable::reserved_value(),265epoch_ptr_var: Variable::reserved_value(),266267// Start with at least one fuel being consumed because even empty268// functions should consume at least some fuel.269fuel_consumed: 1,270271translation,272273stack_limit_at_function_entry: None,274275stack_switching_handler_list_buffer: None,276stack_switching_values_buffer: None,277278state_slot: None,279}280}281282pub(crate) fn pointer_type(&self) -> ir::Type {283self.isa.pointer_type()284}285286pub(crate) fn vmctx(&mut self, func: &mut Function) -> ir::GlobalValue {287self.vmctx.unwrap_or_else(|| {288let vmctx = func.create_global_value(ir::GlobalValueData::VMContext);289if self.isa.flags().enable_pcc() {290// Create a placeholder memtype for the vmctx; we'll291// add fields to it as we lazily create HeapData292// structs and global values.293let vmctx_memtype = func.create_memory_type(ir::MemoryTypeData::Struct {294size: 0,295fields: vec![],296});297298self.pcc_vmctx_memtype = Some(vmctx_memtype);299func.global_value_facts[vmctx] = Some(Fact::Mem {300ty: vmctx_memtype,301min_offset: 0,302max_offset: 0,303nullable: false,304});305}306307self.vmctx = Some(vmctx);308vmctx309})310}311312pub(crate) fn vmctx_val(&mut self, pos: &mut FuncCursor<'_>) -> ir::Value {313let pointer_type = self.pointer_type();314let vmctx = self.vmctx(&mut pos.func);315pos.ins().global_value(pointer_type, vmctx)316}317318fn get_table_copy_func(319&mut self,320func: &mut Function,321dst_table_index: TableIndex,322src_table_index: TableIndex,323) -> (ir::FuncRef, usize, usize) {324let sig = self.builtin_functions.table_copy(func);325(326sig,327dst_table_index.as_u32() as usize,328src_table_index.as_u32() as usize,329)330}331332#[cfg(feature = "threads")]333fn get_memory_atomic_wait(&mut self, func: &mut Function, ty: ir::Type) -> ir::FuncRef {334match ty {335I32 => self.builtin_functions.memory_atomic_wait32(func),336I64 => self.builtin_functions.memory_atomic_wait64(func),337x => panic!("get_memory_atomic_wait unsupported type: {x:?}"),338}339}340341fn get_global_location(342&mut self,343func: &mut ir::Function,344index: GlobalIndex,345) -> (ir::GlobalValue, i32) {346let pointer_type = self.pointer_type();347let vmctx = self.vmctx(func);348if let Some(def_index) = self.module.defined_global_index(index) {349let offset = i32::try_from(self.offsets.vmctx_vmglobal_definition(def_index)).unwrap();350(vmctx, offset)351} else {352let from_offset = self.offsets.vmctx_vmglobal_import_from(index);353let global = func.create_global_value(ir::GlobalValueData::Load {354base: vmctx,355offset: Offset32::new(i32::try_from(from_offset).unwrap()),356global_type: pointer_type,357flags: MemFlags::trusted().with_readonly().with_can_move(),358});359(global, 0)360}361}362363/// Get or create the `ir::Global` for the `*mut VMStoreContext` in our364/// `VMContext`.365fn get_vmstore_context_ptr_global(&mut self, func: &mut ir::Function) -> ir::GlobalValue {366if let Some(ptr) = self.vm_store_context {367return ptr;368}369370let offset = self.offsets.ptr.vmctx_store_context();371let base = self.vmctx(func);372let ptr = func.create_global_value(ir::GlobalValueData::Load {373base,374offset: Offset32::new(offset.into()),375global_type: self.pointer_type(),376flags: ir::MemFlags::trusted().with_readonly().with_can_move(),377});378self.vm_store_context = Some(ptr);379ptr380}381382/// Get the `*mut VMStoreContext` value for our `VMContext`.383fn get_vmstore_context_ptr(&mut self, builder: &mut FunctionBuilder) -> ir::Value {384let global = self.get_vmstore_context_ptr_global(&mut builder.func);385builder.ins().global_value(self.pointer_type(), global)386}387388fn fuel_function_entry(&mut self, builder: &mut FunctionBuilder<'_>) {389// On function entry we load the amount of fuel into a function-local390// `self.fuel_var` to make fuel modifications fast locally. This cache391// is then periodically flushed to the Store-defined location in392// `VMStoreContext` later.393debug_assert!(self.fuel_var.is_reserved_value());394self.fuel_var = builder.declare_var(ir::types::I64);395self.fuel_load_into_var(builder);396self.fuel_check(builder);397}398399fn fuel_function_exit(&mut self, builder: &mut FunctionBuilder<'_>) {400// On exiting the function we need to be sure to save the fuel we have401// cached locally in `self.fuel_var` back into the Store-defined402// location.403self.fuel_save_from_var(builder);404}405406fn fuel_before_op(407&mut self,408op: &Operator<'_>,409builder: &mut FunctionBuilder<'_>,410reachable: bool,411) {412if !reachable {413// In unreachable code we shouldn't have any leftover fuel we414// haven't accounted for since the reason for us to become415// unreachable should have already added it to `self.fuel_var`.416debug_assert_eq!(self.fuel_consumed, 0);417return;418}419420self.fuel_consumed += match op {421// Nop and drop generate no code, so don't consume fuel for them.422Operator::Nop | Operator::Drop => 0,423424// Control flow may create branches, but is generally cheap and425// free, so don't consume fuel. Note the lack of `if` since some426// cost is incurred with the conditional check.427Operator::Block { .. }428| Operator::Loop { .. }429| Operator::Unreachable430| Operator::Return431| Operator::Else432| Operator::End => 0,433434// everything else, just call it one operation.435_ => 1,436};437438match op {439// Exiting a function (via a return or unreachable) or otherwise440// entering a different function (via a call) means that we need to441// update the fuel consumption in `VMStoreContext` because we're442// about to move control out of this function itself and the fuel443// may need to be read.444//445// Before this we need to update the fuel counter from our own cost446// leading up to this function call, and then we can store447// `self.fuel_var` into `VMStoreContext`.448Operator::Unreachable449| Operator::Return450| Operator::CallIndirect { .. }451| Operator::Call { .. }452| Operator::ReturnCall { .. }453| Operator::ReturnCallRef { .. }454| Operator::ReturnCallIndirect { .. }455| Operator::Throw { .. } | Operator::ThrowRef => {456self.fuel_increment_var(builder);457self.fuel_save_from_var(builder);458}459460// To ensure all code preceding a loop is only counted once we461// update the fuel variable on entry.462Operator::Loop { .. }463464// Entering into an `if` block means that the edge we take isn't465// known until runtime, so we need to update our fuel consumption466// before we take the branch.467| Operator::If { .. }468469// Control-flow instructions mean that we're moving to the end/exit470// of a block somewhere else. That means we need to update the fuel471// counter since we're effectively terminating our basic block.472| Operator::Br { .. }473| Operator::BrIf { .. }474| Operator::BrTable { .. }475| Operator::BrOnNull { .. }476| Operator::BrOnNonNull { .. }477| Operator::BrOnCast { .. }478| Operator::BrOnCastFail { .. }479480// Exiting a scope means that we need to update the fuel481// consumption because there are multiple ways to exit a scope and482// this is the only time we have to account for instructions483// executed so far.484| Operator::End485486// This is similar to `end`, except that it's only the terminator487// for an `if` block. The same reasoning applies though in that we488// are terminating a basic block and need to update the fuel489// variable.490| Operator::Else => self.fuel_increment_var(builder),491492// This is a normal instruction where the fuel is buffered to later493// get added to `self.fuel_var`.494//495// Note that we generally ignore instructions which may trap and496// therefore result in exiting a block early. Current usage of fuel497// means that it's not too important to account for a precise amount498// of fuel consumed but rather "close to the actual amount" is good499// enough. For 100% precise counting, however, we'd probably need to500// not only increment but also save the fuel amount more often501// around trapping instructions. (see the `unreachable` instruction502// case above)503//504// Note that `Block` is specifically omitted from incrementing the505// fuel variable. Control flow entering a `block` is unconditional506// which means it's effectively executing straight-line code. We'll507// update the counter when exiting a block, but we shouldn't need to508// do so upon entering a block.509_ => {}510}511}512513fn fuel_after_op(&mut self, op: &Operator<'_>, builder: &mut FunctionBuilder<'_>) {514// After a function call we need to reload our fuel value since the515// function may have changed it.516match op {517Operator::Call { .. } | Operator::CallIndirect { .. } => {518self.fuel_load_into_var(builder);519}520_ => {}521}522}523524/// Adds `self.fuel_consumed` to the `fuel_var`, zero-ing out the amount of525/// fuel consumed at that point.526fn fuel_increment_var(&mut self, builder: &mut FunctionBuilder<'_>) {527let consumption = mem::replace(&mut self.fuel_consumed, 0);528if consumption == 0 {529return;530}531532let fuel = builder.use_var(self.fuel_var);533let fuel = builder.ins().iadd_imm(fuel, consumption);534builder.def_var(self.fuel_var, fuel);535}536537/// Loads the fuel consumption value from `VMStoreContext` into `self.fuel_var`538fn fuel_load_into_var(&mut self, builder: &mut FunctionBuilder<'_>) {539let (addr, offset) = self.fuel_addr_offset(builder);540let fuel = builder541.ins()542.load(ir::types::I64, ir::MemFlags::trusted(), addr, offset);543builder.def_var(self.fuel_var, fuel);544}545546/// Stores the fuel consumption value from `self.fuel_var` into547/// `VMStoreContext`.548fn fuel_save_from_var(&mut self, builder: &mut FunctionBuilder<'_>) {549let (addr, offset) = self.fuel_addr_offset(builder);550let fuel_consumed = builder.use_var(self.fuel_var);551builder552.ins()553.store(ir::MemFlags::trusted(), fuel_consumed, addr, offset);554}555556/// Returns the `(address, offset)` of the fuel consumption within557/// `VMStoreContext`, used to perform loads/stores later.558fn fuel_addr_offset(559&mut self,560builder: &mut FunctionBuilder<'_>,561) -> (ir::Value, ir::immediates::Offset32) {562let vmstore_ctx = self.get_vmstore_context_ptr(builder);563(564vmstore_ctx,565i32::from(self.offsets.ptr.vmstore_context_fuel_consumed()).into(),566)567}568569/// Checks the amount of remaining, and if we've run out of fuel we call570/// the out-of-fuel function.571fn fuel_check(&mut self, builder: &mut FunctionBuilder) {572self.fuel_increment_var(builder);573let out_of_gas_block = builder.create_block();574let continuation_block = builder.create_block();575576// Note that our fuel is encoded as adding positive values to a577// negative number. Whenever the negative number goes positive that578// means we ran out of fuel.579//580// Compare to see if our fuel is positive, and if so we ran out of gas.581// Otherwise we can continue on like usual.582let zero = builder.ins().iconst(ir::types::I64, 0);583let fuel = builder.use_var(self.fuel_var);584let cmp = builder585.ins()586.icmp(IntCC::SignedGreaterThanOrEqual, fuel, zero);587builder588.ins()589.brif(cmp, out_of_gas_block, &[], continuation_block, &[]);590builder.seal_block(out_of_gas_block);591592// If we ran out of gas then we call our out-of-gas intrinsic and it593// figures out what to do. Note that this may raise a trap, or do594// something like yield to an async runtime. In either case we don't595// assume what happens and handle the case the intrinsic returns.596//597// Note that we save/reload fuel around this since the out-of-gas598// intrinsic may alter how much fuel is in the system.599builder.switch_to_block(out_of_gas_block);600self.fuel_save_from_var(builder);601let out_of_gas = self.builtin_functions.out_of_gas(builder.func);602let vmctx = self.vmctx_val(&mut builder.cursor());603builder.ins().call(out_of_gas, &[vmctx]);604self.fuel_load_into_var(builder);605builder.ins().jump(continuation_block, &[]);606builder.seal_block(continuation_block);607608builder.switch_to_block(continuation_block);609}610611fn epoch_function_entry(&mut self, builder: &mut FunctionBuilder<'_>) {612debug_assert!(self.epoch_deadline_var.is_reserved_value());613self.epoch_deadline_var = builder.declare_var(ir::types::I64);614// Let epoch_check_full load the current deadline and call def_var615616debug_assert!(self.epoch_ptr_var.is_reserved_value());617self.epoch_ptr_var = builder.declare_var(self.pointer_type());618let epoch_ptr = self.epoch_ptr(builder);619builder.def_var(self.epoch_ptr_var, epoch_ptr);620621// We must check for an epoch change when entering a622// function. Why? Why aren't checks at loops sufficient to623// bound runtime to O(|static program size|)?624//625// The reason is that one can construct a "zip-bomb-like"626// program with exponential-in-program-size runtime, with no627// backedges (loops), by building a tree of function calls: f0628// calls f1 ten times, f1 calls f2 ten times, etc. E.g., nine629// levels of this yields a billion function calls with no630// backedges. So we can't do checks only at backedges.631//632// In this "call-tree" scenario, and in fact in any program633// that uses calls as a sort of control flow to try to evade634// backedge checks, a check at every function entry is635// sufficient. Then, combined with checks at every backedge636// (loop) the longest runtime between checks is bounded by the637// straightline length of any function body.638let continuation_block = builder.create_block();639let cur_epoch_value = self.epoch_load_current(builder);640self.epoch_check_full(builder, cur_epoch_value, continuation_block);641}642643#[cfg(feature = "wmemcheck")]644fn hook_malloc_exit(&mut self, builder: &mut FunctionBuilder, retvals: &[ir::Value]) {645let check_malloc = self.builtin_functions.check_malloc(builder.func);646let vmctx = self.vmctx_val(&mut builder.cursor());647let func_args = builder648.func649.dfg650.block_params(builder.func.layout.entry_block().unwrap());651let len = if func_args.len() < 3 {652return;653} else {654// If a function named `malloc` has at least one argument, we assume the655// first argument is the requested allocation size.656func_args[2]657};658let retval = if retvals.len() < 1 {659return;660} else {661retvals[0]662};663builder.ins().call(check_malloc, &[vmctx, retval, len]);664}665666#[cfg(feature = "wmemcheck")]667fn hook_free_exit(&mut self, builder: &mut FunctionBuilder) {668let check_free = self.builtin_functions.check_free(builder.func);669let vmctx = self.vmctx_val(&mut builder.cursor());670let func_args = builder671.func672.dfg673.block_params(builder.func.layout.entry_block().unwrap());674let ptr = if func_args.len() < 3 {675return;676} else {677// If a function named `free` has at least one argument, we assume the678// first argument is a pointer to memory.679func_args[2]680};681builder.ins().call(check_free, &[vmctx, ptr]);682}683684fn epoch_ptr(&mut self, builder: &mut FunctionBuilder<'_>) -> ir::Value {685let vmctx = self.vmctx(builder.func);686let pointer_type = self.pointer_type();687let base = builder.ins().global_value(pointer_type, vmctx);688let offset = i32::from(self.offsets.ptr.vmctx_epoch_ptr());689let epoch_ptr = builder690.ins()691.load(pointer_type, ir::MemFlags::trusted(), base, offset);692epoch_ptr693}694695fn epoch_load_current(&mut self, builder: &mut FunctionBuilder<'_>) -> ir::Value {696let addr = builder.use_var(self.epoch_ptr_var);697builder.ins().load(698ir::types::I64,699ir::MemFlags::trusted(),700addr,701ir::immediates::Offset32::new(0),702)703}704705fn epoch_check(&mut self, builder: &mut FunctionBuilder<'_>) {706let continuation_block = builder.create_block();707708// Load new epoch and check against the cached deadline.709let cur_epoch_value = self.epoch_load_current(builder);710self.epoch_check_cached(builder, cur_epoch_value, continuation_block);711712// At this point we've noticed that the epoch has exceeded our713// cached deadline. However the real deadline may have been714// updated (within another yield) during some function that we715// called in the meantime, so reload the cache and check again.716self.epoch_check_full(builder, cur_epoch_value, continuation_block);717}718719fn epoch_check_cached(720&mut self,721builder: &mut FunctionBuilder,722cur_epoch_value: ir::Value,723continuation_block: ir::Block,724) {725let new_epoch_block = builder.create_block();726builder.set_cold_block(new_epoch_block);727728let epoch_deadline = builder.use_var(self.epoch_deadline_var);729let cmp = builder.ins().icmp(730IntCC::UnsignedGreaterThanOrEqual,731cur_epoch_value,732epoch_deadline,733);734builder735.ins()736.brif(cmp, new_epoch_block, &[], continuation_block, &[]);737builder.seal_block(new_epoch_block);738739builder.switch_to_block(new_epoch_block);740}741742fn epoch_check_full(743&mut self,744builder: &mut FunctionBuilder,745cur_epoch_value: ir::Value,746continuation_block: ir::Block,747) {748// We keep the deadline cached in a register to speed the checks749// in the common case (between epoch ticks) but we want to do a750// precise check here by reloading the cache first.751let vmstore_ctx = self.get_vmstore_context_ptr(builder);752let deadline = builder.ins().load(753ir::types::I64,754ir::MemFlags::trusted(),755vmstore_ctx,756ir::immediates::Offset32::new(self.offsets.ptr.vmstore_context_epoch_deadline() as i32),757);758builder.def_var(self.epoch_deadline_var, deadline);759self.epoch_check_cached(builder, cur_epoch_value, continuation_block);760761let new_epoch = self.builtin_functions.new_epoch(builder.func);762let vmctx = self.vmctx_val(&mut builder.cursor());763// new_epoch() returns the new deadline, so we don't have to764// reload it.765let call = builder.ins().call(new_epoch, &[vmctx]);766let new_deadline = *builder.func.dfg.inst_results(call).first().unwrap();767builder.def_var(self.epoch_deadline_var, new_deadline);768builder.ins().jump(continuation_block, &[]);769builder.seal_block(continuation_block);770771builder.switch_to_block(continuation_block);772}773774/// Get the Memory for the given index.775fn memory(&self, index: MemoryIndex) -> Memory {776self.module.memories[index]777}778779/// Get the Table for the given index.780fn table(&self, index: TableIndex) -> Table {781self.module.tables[index]782}783784/// Cast the value to I64 and sign extend if necessary.785///786/// Returns the value casted to I64.787fn cast_index_to_i64(788&self,789pos: &mut FuncCursor<'_>,790val: ir::Value,791index_type: IndexType,792) -> ir::Value {793match index_type {794IndexType::I32 => pos.ins().uextend(I64, val),795IndexType::I64 => val,796}797}798799/// Convert the target pointer-sized integer `val` into the memory/table's index type.800///801/// For memory, `val` is holding a memory length (or the `-1` `memory.grow`-failed sentinel).802/// For table, `val` is holding a table length.803///804/// This might involve extending or truncating it depending on the memory/table's805/// index type and the target's pointer type.806fn convert_pointer_to_index_type(807&self,808mut pos: FuncCursor<'_>,809val: ir::Value,810index_type: IndexType,811// When it is a memory and the memory is using single-byte pages,812// we need to handle the truncation differently. See comments below.813//814// When it is a table, this should be set to false.815single_byte_pages: bool,816) -> ir::Value {817let desired_type = index_type_to_ir_type(index_type);818let pointer_type = self.pointer_type();819assert_eq!(pos.func.dfg.value_type(val), pointer_type);820821// The current length is of type `pointer_type` but we need to fit it822// into `desired_type`. We are guaranteed that the result will always823// fit, so we just need to do the right ireduce/sextend here.824if pointer_type == desired_type {825val826} else if pointer_type.bits() > desired_type.bits() {827pos.ins().ireduce(desired_type, val)828} else {829// We have a 64-bit memory/table on a 32-bit host -- this combo doesn't830// really make a whole lot of sense to do from a user perspective831// but that is neither here nor there. We want to logically do an832// unsigned extend *except* when we are given the `-1` sentinel,833// which we must preserve as `-1` in the wider type.834match single_byte_pages {835false => {836// In the case that we have default page sizes, we can837// always sign extend, since valid memory lengths (in pages)838// never have their sign bit set, and so if the sign bit is839// set then this must be the `-1` sentinel, which we want to840// preserve through the extension.841//842// When it comes to table, `single_byte_pages` should have always been set to false.843// Then we simply do a signed extension.844pos.ins().sextend(desired_type, val)845}846true => {847// For single-byte pages, we have to explicitly check for848// `-1` and choose whether to do an unsigned extension or849// return a larger `-1` because there are valid memory850// lengths (in pages) that have the sign bit set.851let extended = pos.ins().uextend(desired_type, val);852let neg_one = pos.ins().iconst(desired_type, -1);853let is_failure = pos.ins().icmp_imm(IntCC::Equal, val, -1);854pos.ins().select(is_failure, neg_one, extended)855}856}857}858}859860fn get_or_init_func_ref_table_elem(861&mut self,862builder: &mut FunctionBuilder,863table_index: TableIndex,864index: ir::Value,865cold_blocks: bool,866) -> ir::Value {867let pointer_type = self.pointer_type();868let table_data = self.get_or_create_table(builder.func, table_index);869870// To support lazy initialization of table871// contents, we check for a null entry here, and872// if null, we take a slow-path that invokes a873// libcall.874let (table_entry_addr, flags) = table_data.prepare_table_addr(self, builder, index);875let value = builder.ins().load(pointer_type, flags, table_entry_addr, 0);876877if !self.tunables.table_lazy_init {878return value;879}880881// Mask off the "initialized bit". See documentation on882// FUNCREF_INIT_BIT in crates/environ/src/ref_bits.rs for more883// details. Note that `FUNCREF_MASK` has type `usize` which may not be884// appropriate for the target architecture. Right now its value is885// always -2 so assert that part doesn't change and then thread through886// -2 as the immediate.887assert_eq!(FUNCREF_MASK as isize, -2);888let value_masked = builder.ins().band_imm(value, Imm64::from(-2));889890let null_block = builder.create_block();891let continuation_block = builder.create_block();892if cold_blocks {893builder.set_cold_block(null_block);894builder.set_cold_block(continuation_block);895}896let result_param = builder.append_block_param(continuation_block, pointer_type);897builder.set_cold_block(null_block);898899builder.ins().brif(900value,901continuation_block,902&[value_masked.into()],903null_block,904&[],905);906builder.seal_block(null_block);907908builder.switch_to_block(null_block);909let index_type = self.table(table_index).idx_type;910let table_index = builder.ins().iconst(I32, table_index.index() as i64);911let lazy_init = self912.builtin_functions913.table_get_lazy_init_func_ref(builder.func);914let vmctx = self.vmctx_val(&mut builder.cursor());915let index = self.cast_index_to_i64(&mut builder.cursor(), index, index_type);916let call_inst = builder.ins().call(lazy_init, &[vmctx, table_index, index]);917let returned_entry = builder.func.dfg.inst_results(call_inst)[0];918builder919.ins()920.jump(continuation_block, &[returned_entry.into()]);921builder.seal_block(continuation_block);922923builder.switch_to_block(continuation_block);924result_param925}926927#[cfg(feature = "wmemcheck")]928fn check_malloc_start(&mut self, builder: &mut FunctionBuilder) {929let malloc_start = self.builtin_functions.malloc_start(builder.func);930let vmctx = self.vmctx_val(&mut builder.cursor());931builder.ins().call(malloc_start, &[vmctx]);932}933934#[cfg(feature = "wmemcheck")]935fn check_free_start(&mut self, builder: &mut FunctionBuilder) {936let free_start = self.builtin_functions.free_start(builder.func);937let vmctx = self.vmctx_val(&mut builder.cursor());938builder.ins().call(free_start, &[vmctx]);939}940941#[cfg(feature = "wmemcheck")]942fn current_func_name(&self, builder: &mut FunctionBuilder) -> Option<&str> {943let func_index = match &builder.func.name {944ir::UserFuncName::User(user) => FuncIndex::from_u32(user.index),945_ => {946panic!("function name not a UserFuncName::User as expected")947}948};949self.translation950.debuginfo951.name_section952.func_names953.get(&func_index)954.copied()955}956957/// Proof-carrying code: create a memtype describing an empty958/// runtime struct (to be updated later).959fn create_empty_struct_memtype(&self, func: &mut ir::Function) -> ir::MemoryType {960func.create_memory_type(ir::MemoryTypeData::Struct {961size: 0,962fields: vec![],963})964}965966/// Proof-carrying code: add a new field to a memtype used to967/// describe a runtime struct. A memory region of type `memtype`968/// will have a pointer at `offset` pointing to another memory969/// region of type `pointee`. `readonly` indicates whether the970/// PCC-checked code is expected to update this field or not.971fn add_field_to_memtype(972&self,973func: &mut ir::Function,974memtype: ir::MemoryType,975offset: u32,976pointee: ir::MemoryType,977readonly: bool,978) {979let ptr_size = self.pointer_type().bytes();980match &mut func.memory_types[memtype] {981ir::MemoryTypeData::Struct { size, fields } => {982*size = std::cmp::max(*size, offset.checked_add(ptr_size).unwrap().into());983fields.push(ir::MemoryTypeField {984ty: self.pointer_type(),985offset: offset.into(),986readonly,987fact: Some(ir::Fact::Mem {988ty: pointee,989min_offset: 0,990max_offset: 0,991nullable: false,992}),993});994995// Sort fields by offset -- we need to do this now996// because we may create an arbitrary number of997// memtypes for imported memories and we don't998// otherwise track them.999fields.sort_by_key(|f| f.offset);1000}1001_ => panic!("Cannot add field to non-struct memtype"),1002}1003}10041005/// Create an `ir::Global` that does `load(ptr + offset)` and, when PCC and1006/// memory types are enabled, adds a field to the pointer's memory type for1007/// this value we are loading.1008pub(crate) fn global_load_with_memory_type(1009&mut self,1010func: &mut ir::Function,1011ptr: ir::GlobalValue,1012offset: u32,1013flags: ir::MemFlags,1014ptr_mem_ty: Option<ir::MemoryType>,1015) -> (ir::GlobalValue, Option<ir::MemoryType>) {1016let pointee = func.create_global_value(ir::GlobalValueData::Load {1017base: ptr,1018offset: Offset32::new(i32::try_from(offset).unwrap()),1019global_type: self.pointer_type(),1020flags,1021});10221023let pointee_mem_ty = ptr_mem_ty.map(|ptr_mem_ty| {1024let pointee_mem_ty = self.create_empty_struct_memtype(func);1025self.add_field_to_memtype(func, ptr_mem_ty, offset, pointee_mem_ty, flags.readonly());1026func.global_value_facts[pointee] = Some(Fact::Mem {1027ty: pointee_mem_ty,1028min_offset: 0,1029max_offset: 0,1030nullable: false,1031});1032pointee_mem_ty1033});10341035(pointee, pointee_mem_ty)1036}10371038/// Like `global_load_with_memory_type` but specialized for loads out of the1039/// `vmctx`.1040pub(crate) fn global_load_from_vmctx_with_memory_type(1041&mut self,1042func: &mut ir::Function,1043offset: u32,1044flags: ir::MemFlags,1045) -> (ir::GlobalValue, Option<ir::MemoryType>) {1046let vmctx = self.vmctx(func);1047self.global_load_with_memory_type(func, vmctx, offset, flags, self.pcc_vmctx_memtype)1048}10491050/// Helper to emit a conditional trap based on `trap_cond`.1051///1052/// This should only be used if `self.clif_instruction_traps_enabled()` is1053/// false, otherwise native CLIF instructions should be used instead.1054pub fn conditionally_trap(1055&mut self,1056builder: &mut FunctionBuilder,1057trap_cond: ir::Value,1058trap: ir::TrapCode,1059) {1060assert!(!self.clif_instruction_traps_enabled());10611062let trap_block = builder.create_block();1063builder.set_cold_block(trap_block);1064let continuation_block = builder.create_block();10651066builder1067.ins()1068.brif(trap_cond, trap_block, &[], continuation_block, &[]);10691070builder.seal_block(trap_block);1071builder.seal_block(continuation_block);10721073builder.switch_to_block(trap_block);1074self.trap(builder, trap);1075builder.switch_to_block(continuation_block);1076}10771078/// Helper used when `!self.clif_instruction_traps_enabled()` is enabled to1079/// test whether the divisor is zero.1080fn guard_zero_divisor(&mut self, builder: &mut FunctionBuilder, rhs: ir::Value) {1081if self.clif_instruction_traps_enabled() {1082return;1083}1084self.trapz(builder, rhs, ir::TrapCode::INTEGER_DIVISION_BY_ZERO);1085}10861087/// Helper used when `!self.clif_instruction_traps_enabled()` is enabled to1088/// test whether a signed division operation will raise a trap.1089fn guard_signed_divide(1090&mut self,1091builder: &mut FunctionBuilder,1092lhs: ir::Value,1093rhs: ir::Value,1094) {1095if self.clif_instruction_traps_enabled() {1096return;1097}1098self.trapz(builder, rhs, ir::TrapCode::INTEGER_DIVISION_BY_ZERO);10991100let ty = builder.func.dfg.value_type(rhs);1101let minus_one = builder.ins().iconst(ty, -1);1102let rhs_is_minus_one = builder.ins().icmp(IntCC::Equal, rhs, minus_one);1103let int_min = builder.ins().iconst(1104ty,1105match ty {1106I32 => i64::from(i32::MIN),1107I64 => i64::MIN,1108_ => unreachable!(),1109},1110);1111let lhs_is_int_min = builder.ins().icmp(IntCC::Equal, lhs, int_min);1112let is_integer_overflow = builder.ins().band(rhs_is_minus_one, lhs_is_int_min);1113self.conditionally_trap(builder, is_integer_overflow, ir::TrapCode::INTEGER_OVERFLOW);1114}11151116/// Helper used when `!self.clif_instruction_traps_enabled()` is enabled to1117/// guard the traps from float-to-int conversions.1118fn guard_fcvt_to_int(1119&mut self,1120builder: &mut FunctionBuilder,1121ty: ir::Type,1122val: ir::Value,1123signed: bool,1124) {1125assert!(!self.clif_instruction_traps_enabled());1126let val_ty = builder.func.dfg.value_type(val);1127let val = if val_ty == F64 {1128val1129} else {1130builder.ins().fpromote(F64, val)1131};1132let isnan = builder.ins().fcmp(FloatCC::NotEqual, val, val);1133self.trapnz(builder, isnan, ir::TrapCode::BAD_CONVERSION_TO_INTEGER);1134let val = self.trunc_f64(builder, val);1135let (lower_bound, upper_bound) = f64_cvt_to_int_bounds(signed, ty.bits());1136let lower_bound = builder.ins().f64const(lower_bound);1137let too_small = builder1138.ins()1139.fcmp(FloatCC::LessThanOrEqual, val, lower_bound);1140self.trapnz(builder, too_small, ir::TrapCode::INTEGER_OVERFLOW);1141let upper_bound = builder.ins().f64const(upper_bound);1142let too_large = builder1143.ins()1144.fcmp(FloatCC::GreaterThanOrEqual, val, upper_bound);1145self.trapnz(builder, too_large, ir::TrapCode::INTEGER_OVERFLOW);1146}11471148/// Get the `ir::Type` for a `VMSharedTypeIndex`.1149pub(crate) fn vmshared_type_index_ty(&self) -> Type {1150Type::int_with_byte_size(self.offsets.size_of_vmshared_type_index().into()).unwrap()1151}11521153/// Given a `ModuleInternedTypeIndex`, emit code to get the corresponding1154/// `VMSharedTypeIndex` at runtime.1155pub(crate) fn module_interned_to_shared_ty(1156&mut self,1157pos: &mut FuncCursor,1158interned_ty: ModuleInternedTypeIndex,1159) -> ir::Value {1160let vmctx = self.vmctx_val(pos);1161let pointer_type = self.pointer_type();1162let mem_flags = ir::MemFlags::trusted().with_readonly().with_can_move();11631164// Load the base pointer of the array of `VMSharedTypeIndex`es.1165let shared_indices = pos.ins().load(1166pointer_type,1167mem_flags,1168vmctx,1169i32::from(self.offsets.ptr.vmctx_type_ids_array()),1170);11711172// Calculate the offset in that array for this type's entry.1173let ty = self.vmshared_type_index_ty();1174let offset = i32::try_from(interned_ty.as_u32().checked_mul(ty.bytes()).unwrap()).unwrap();11751176// Load the`VMSharedTypeIndex` that this `ModuleInternedTypeIndex` is1177// associated with at runtime from the array.1178pos.ins().load(ty, mem_flags, shared_indices, offset)1179}11801181/// Load the associated `VMSharedTypeIndex` from inside a `*const VMFuncRef`.1182///1183/// Does not check for null; just assumes that the `funcref` is a valid1184/// pointer.1185pub(crate) fn load_funcref_type_index(1186&mut self,1187pos: &mut FuncCursor,1188mem_flags: ir::MemFlags,1189funcref: ir::Value,1190) -> ir::Value {1191let ty = self.vmshared_type_index_ty();1192pos.ins().load(1193ty,1194mem_flags,1195funcref,1196i32::from(self.offsets.ptr.vm_func_ref_type_index()),1197)1198}11991200/// Does this function need a GC heap?1201pub fn needs_gc_heap(&self) -> bool {1202self.needs_gc_heap1203}12041205/// Get the number of Wasm parameters for the given function.1206pub(crate) fn num_params_for_func(&self, function_index: FuncIndex) -> usize {1207let ty = self.module.functions[function_index]1208.signature1209.unwrap_module_type_index();1210self.types[ty].unwrap_func().params().len()1211}12121213/// Get the number of Wasm parameters for the given function type.1214///1215/// Panics on non-function types.1216pub(crate) fn num_params_for_function_type(&self, type_index: TypeIndex) -> usize {1217let ty = self.module.types[type_index].unwrap_module_type_index();1218self.types[ty].unwrap_func().params().len()1219}12201221/// Initialize the state slot with an empty layout.1222pub(crate) fn create_state_slot(&mut self, builder: &mut FunctionBuilder) {1223if self.tunables.debug_guest {1224let frame_builder = FrameStateSlotBuilder::new(self.key, self.pointer_type().bytes());12251226// Initially zero-size and with no descriptor; we will fill in1227// this info once we're done with the function body.1228let slot = builder1229.func1230.create_sized_stack_slot(ir::StackSlotData::new_with_key(1231ir::StackSlotKind::ExplicitSlot,12320,12330,1234ir::StackSlotKey::new(self.key.into_raw_u64()),1235));12361237self.state_slot = Some((slot, frame_builder));1238}1239}12401241/// Update the state slot layout with a new layout given a local.1242pub(crate) fn add_state_slot_local(1243&mut self,1244builder: &mut FunctionBuilder,1245ty: WasmValType,1246init: Option<ir::Value>,1247) {1248if let Some((slot, b)) = &mut self.state_slot {1249let offset = b.add_local(FrameValType::from(ty));1250if let Some(init) = init {1251builder.ins().stack_store(init, *slot, offset.offset());1252}1253}1254}12551256fn update_state_slot_stack(1257&mut self,1258validator: &FuncValidator<impl WasmModuleResources>,1259builder: &mut FunctionBuilder,1260) -> WasmResult<()> {1261// Take ownership of the state-slot builder temporarily rather1262// than mutably borrowing so we can invoke a method below.1263if let Some((slot, mut b)) = self.state_slot.take() {1264// If the stack-shape stack is shorter than the value1265// stack, that means that values were popped and then new1266// values were pushed; hence, these operand-stack values1267// are "dirty" and need to be flushed to the stackslot.1268//1269// N.B.: note that we don't re-sync GC-rooted values, and1270// we don't root the instrumentation slots1271// explicitly. This is safe as long as we don't have a1272// moving GC, because the value that we're observing in1273// the main program dataflow is already rooted in the main1274// program (we are only storing an extra copy of it). But1275// if/when we do build a moving GC, we will need to handle1276// this, probably by invalidating the "freshness" of all1277// ref-typed values after a safepoint and re-writing them1278// to the instrumentation slot; or alternately, extending1279// the debug instrumentation mechanism to be able to1280// directly refer to the user stack-slot.1281for i in self.stacks.stack_shape.len()..self.stacks.stack.len() {1282let parent_shape = i1283.checked_sub(1)1284.map(|parent_idx| self.stacks.stack_shape[parent_idx]);1285if let Some(this_ty) = validator1286.get_operand_type(self.stacks.stack.len() - i - 1)1287.expect("Index should not be out of range")1288{1289let wasm_ty = self.convert_valtype(this_ty)?;1290let (this_shape, offset) =1291b.push_stack(parent_shape, FrameValType::from(wasm_ty));1292self.stacks.stack_shape.push(this_shape);12931294let value = self.stacks.stack[i];1295builder.ins().stack_store(value, slot, offset.offset());1296} else {1297// Unreachable code with unknown type -- no1298// flushes for this or later-pushed values.1299break;1300}1301}13021303self.state_slot = Some((slot, b));1304}13051306Ok(())1307}13081309pub(crate) fn debug_tags(&self, srcloc: ir::SourceLoc) -> Vec<ir::DebugTag> {1310if let Some((slot, _b)) = &self.state_slot {1311self.stacks.assert_debug_stack_is_synced();1312let stack_shape = self1313.stacks1314.stack_shape1315.last()1316.map(|s| s.raw())1317.unwrap_or(u32::MAX);1318let pc = srcloc.bits();1319vec![1320ir::DebugTag::StackSlot(*slot),1321ir::DebugTag::User(pc),1322ir::DebugTag::User(stack_shape),1323]1324} else {1325vec![]1326}1327}13281329fn finish_debug_metadata(&self, builder: &mut FunctionBuilder) {1330if let Some((slot, b)) = &self.state_slot {1331builder.func.sized_stack_slots[*slot].size = b.size();1332}1333}13341335/// Store a new value for a local in the state slot, if present.1336pub(crate) fn state_slot_local_set(1337&self,1338builder: &mut FunctionBuilder,1339local: u32,1340value: ir::Value,1341) {1342if let Some((slot, b)) = &self.state_slot {1343let offset = b.local_offset(local);1344builder.ins().stack_store(value, *slot, offset.offset());1345}1346}13471348fn update_state_slot_vmctx(&mut self, builder: &mut FunctionBuilder) {1349if let &Some((slot, _)) = &self.state_slot {1350let vmctx = self.vmctx_val(&mut builder.cursor());1351// N.B.: we always store vmctx at offset 0 in the1352// slot. This is relied upon in1353// crates/wasmtime/src/runtime/debug.rs in1354// `raw_instance()`. See also the slot layout computation in crates/environ/src/1355builder.ins().stack_store(vmctx, slot, 0);1356}1357}1358}13591360#[derive(Default)]1361pub(crate) struct WasmEntities {1362/// Map from a Wasm global index from this module to its implementation in1363/// the Cranelift function we are building.1364pub(crate) globals: SecondaryMap<GlobalIndex, Option<GlobalVariable>>,13651366/// Map from a Wasm memory index to its `Heap` implementation in the1367/// Cranelift function we are building.1368pub(crate) memories: SecondaryMap<MemoryIndex, PackedOption<Heap>>,13691370/// Map from an (interned) Wasm type index from this module to its1371/// `ir::SigRef` in the Cranelift function we are building.1372pub(crate) sig_refs: SecondaryMap<ModuleInternedTypeIndex, PackedOption<ir::SigRef>>,13731374/// Map from a defined Wasm function index to its associated function1375/// reference in the Cranelift function we are building.1376pub(crate) defined_func_refs: SecondaryMap<DefinedFuncIndex, PackedOption<ir::FuncRef>>,13771378/// Map from an imported Wasm function index for which we statically know1379/// which function will always be used to satisfy that import to its1380/// associated function reference in the Cranelift function we are building.1381pub(crate) imported_func_refs: SecondaryMap<FuncIndex, PackedOption<ir::FuncRef>>,13821383/// Map from a Wasm table index to its associated implementation in the1384/// Cranelift function we are building.1385pub(crate) tables: SecondaryMap<TableIndex, Option<TableData>>,1386}13871388macro_rules! define_get_or_create_methods {1389( $( $name:ident ( $map:ident ) : $create:ident : $key:ty => $val:ty ; )* ) => {1390$(1391pub(crate) fn $name(&mut self, func: &mut ir::Function, key: $key) -> $val {1392match self.entities.$map[key].clone().into() {1393Some(val) => val,1394None => {1395let val = self.$create(func, key);1396self.entities.$map[key] = Some(val.clone()).into();1397val1398}1399}1400}1401)*1402};1403}14041405impl FuncEnvironment<'_> {1406define_get_or_create_methods! {1407get_or_create_global(globals) : make_global : GlobalIndex => GlobalVariable;1408get_or_create_heap(memories) : make_heap : MemoryIndex => Heap;1409get_or_create_interned_sig_ref(sig_refs) : make_sig_ref : ModuleInternedTypeIndex => ir::SigRef;1410get_or_create_defined_func_ref(defined_func_refs) : make_defined_func_ref : DefinedFuncIndex => ir::FuncRef;1411get_or_create_imported_func_ref(imported_func_refs) : make_imported_func_ref : FuncIndex => ir::FuncRef;1412get_or_create_table(tables) : make_table : TableIndex => TableData;1413}14141415fn make_global(&mut self, func: &mut ir::Function, index: GlobalIndex) -> GlobalVariable {1416let ty = self.module.globals[index].wasm_ty;14171418if ty.is_vmgcref_type() {1419// Although reference-typed globals live at the same memory location as1420// any other type of global at the same index would, getting or1421// setting them requires ref counting barriers. Therefore, we need1422// to use `GlobalVariable::Custom`, as that is the only kind of1423// `GlobalVariable` for which translation supports custom1424// access translation.1425return GlobalVariable::Custom;1426}14271428if !self.module.globals[index].mutability {1429if let Some(index) = self.module.defined_global_index(index) {1430let init = &self.module.global_initializers[index];1431if let Some(value) = init.const_eval() {1432return GlobalVariable::Constant { value };1433}1434}1435}14361437let (gv, offset) = self.get_global_location(func, index);1438GlobalVariable::Memory {1439gv,1440offset: offset.into(),1441ty: super::value_type(self.isa, ty),1442}1443}14441445pub(crate) fn get_or_create_sig_ref(1446&mut self,1447func: &mut ir::Function,1448ty: TypeIndex,1449) -> ir::SigRef {1450let ty = self.module.types[ty].unwrap_module_type_index();1451self.get_or_create_interned_sig_ref(func, ty)1452}14531454fn make_sig_ref(1455&mut self,1456func: &mut ir::Function,1457index: ModuleInternedTypeIndex,1458) -> ir::SigRef {1459let wasm_func_ty = self.types[index].unwrap_func();1460let sig = crate::wasm_call_signature(self.isa, wasm_func_ty, &self.tunables);1461let sig_ref = func.import_signature(sig);1462self.sig_ref_to_ty[sig_ref] = Some(wasm_func_ty);1463sig_ref1464}14651466fn make_defined_func_ref(1467&mut self,1468func: &mut ir::Function,1469def_func_index: DefinedFuncIndex,1470) -> ir::FuncRef {1471let func_index = self.module.func_index(def_func_index);14721473let ty = self.module.functions[func_index]1474.signature1475.unwrap_module_type_index();1476let signature = self.get_or_create_interned_sig_ref(func, ty);14771478let key = FuncKey::DefinedWasmFunction(self.translation.module_index(), def_func_index);1479let (namespace, index) = key.into_raw_parts();1480let name = ir::ExternalName::User(1481func.declare_imported_user_function(ir::UserExternalName { namespace, index }),1482);14831484func.import_function(ir::ExtFuncData {1485name,1486signature,1487colocated: true,1488patchable: false,1489})1490}14911492fn make_imported_func_ref(1493&mut self,1494func: &mut ir::Function,1495func_index: FuncIndex,1496) -> ir::FuncRef {1497assert!(self.module.is_imported_function(func_index));1498assert!(self.translation.known_imported_functions[func_index].is_some());14991500let ty = self.module.functions[func_index]1501.signature1502.unwrap_module_type_index();1503let signature = self.get_or_create_interned_sig_ref(func, ty);15041505let key = match self.translation.known_imported_functions[func_index] {1506Some(key @ FuncKey::DefinedWasmFunction(..)) => key,15071508#[cfg(feature = "component-model")]1509Some(key @ FuncKey::UnsafeIntrinsic(..)) => key,15101511Some(key) => {1512panic!("unexpected kind of known-import function: {key:?}")1513}15141515None => panic!(1516"cannot make an `ir::FuncRef` for a function import that is not statically known"1517),1518};15191520let (namespace, index) = key.into_raw_parts();1521let name = ir::ExternalName::User(1522func.declare_imported_user_function(ir::UserExternalName { namespace, index }),1523);15241525func.import_function(ir::ExtFuncData {1526name,1527signature,1528colocated: true,1529patchable: false,1530})1531}15321533fn make_heap(&mut self, func: &mut ir::Function, index: MemoryIndex) -> Heap {1534let pointer_type = self.pointer_type();1535let memory = self.module.memories[index];1536let is_shared = memory.shared;15371538let (base_ptr, base_offset, current_length_offset, ptr_memtype) = {1539let vmctx = self.vmctx(func);1540if let Some(def_index) = self.module.defined_memory_index(index) {1541if is_shared {1542// As with imported memory, the `VMMemoryDefinition` for a1543// shared memory is stored elsewhere. We store a `*mut1544// VMMemoryDefinition` to it and dereference that when1545// atomically growing it.1546let from_offset = self.offsets.vmctx_vmmemory_pointer(def_index);1547let (memory, def_mt) = self.global_load_from_vmctx_with_memory_type(1548func,1549from_offset,1550ir::MemFlags::trusted().with_readonly().with_can_move(),1551);1552let base_offset = i32::from(self.offsets.ptr.vmmemory_definition_base());1553let current_length_offset =1554i32::from(self.offsets.ptr.vmmemory_definition_current_length());1555(memory, base_offset, current_length_offset, def_mt)1556} else {1557let owned_index = self.module.owned_memory_index(def_index);1558let owned_base_offset =1559self.offsets.vmctx_vmmemory_definition_base(owned_index);1560let owned_length_offset = self1561.offsets1562.vmctx_vmmemory_definition_current_length(owned_index);1563let current_base_offset = i32::try_from(owned_base_offset).unwrap();1564let current_length_offset = i32::try_from(owned_length_offset).unwrap();1565(1566vmctx,1567current_base_offset,1568current_length_offset,1569self.pcc_vmctx_memtype,1570)1571}1572} else {1573let from_offset = self.offsets.vmctx_vmmemory_import_from(index);1574let (memory, def_mt) = self.global_load_from_vmctx_with_memory_type(1575func,1576from_offset,1577ir::MemFlags::trusted().with_readonly().with_can_move(),1578);1579let base_offset = i32::from(self.offsets.ptr.vmmemory_definition_base());1580let current_length_offset =1581i32::from(self.offsets.ptr.vmmemory_definition_current_length());1582(memory, base_offset, current_length_offset, def_mt)1583}1584};15851586let bound = func.create_global_value(ir::GlobalValueData::Load {1587base: base_ptr,1588offset: Offset32::new(current_length_offset),1589global_type: pointer_type,1590flags: MemFlags::trusted(),1591});15921593let (base_fact, pcc_memory_type) = self.make_pcc_base_fact_and_type_for_memory(1594func,1595memory,1596base_offset,1597current_length_offset,1598ptr_memtype,1599bound,1600);16011602let base = self.make_heap_base(func, memory, base_ptr, base_offset, base_fact);16031604self.heaps.push(HeapData {1605base,1606bound,1607pcc_memory_type,1608memory,1609})1610}16111612pub(crate) fn make_heap_base(1613&self,1614func: &mut Function,1615memory: Memory,1616ptr: ir::GlobalValue,1617offset: i32,1618fact: Option<Fact>,1619) -> ir::GlobalValue {1620let pointer_type = self.pointer_type();16211622let mut flags = ir::MemFlags::trusted().with_checked().with_can_move();1623if !memory.memory_may_move(self.tunables) {1624flags.set_readonly();1625}16261627let heap_base = func.create_global_value(ir::GlobalValueData::Load {1628base: ptr,1629offset: Offset32::new(offset),1630global_type: pointer_type,1631flags,1632});1633func.global_value_facts[heap_base] = fact;1634heap_base1635}16361637pub(crate) fn make_pcc_base_fact_and_type_for_memory(1638&mut self,1639func: &mut Function,1640memory: Memory,1641base_offset: i32,1642current_length_offset: i32,1643ptr_memtype: Option<ir::MemoryType>,1644heap_bound: ir::GlobalValue,1645) -> (Option<Fact>, Option<ir::MemoryType>) {1646// If we have a declared maximum, we can make this a "static" heap, which is1647// allocated up front and never moved.1648let host_page_size_log2 = self.target_config().page_size_align_log2;1649let (base_fact, memory_type) = if !memory1650.can_elide_bounds_check(self.tunables, host_page_size_log2)1651{1652if let Some(ptr_memtype) = ptr_memtype {1653// Create a memtype representing the untyped memory region.1654let data_mt = func.create_memory_type(ir::MemoryTypeData::DynamicMemory {1655gv: heap_bound,1656size: self.tunables.memory_guard_size,1657});1658// This fact applies to any pointer to the start of the memory.1659let base_fact = ir::Fact::dynamic_base_ptr(data_mt);1660// This fact applies to the length.1661let length_fact = ir::Fact::global_value(1662u16::try_from(self.isa.pointer_type().bits()).unwrap(),1663heap_bound,1664);1665// Create a field in the vmctx for the base pointer.1666match &mut func.memory_types[ptr_memtype] {1667ir::MemoryTypeData::Struct { size, fields } => {1668let base_offset = u64::try_from(base_offset).unwrap();1669fields.push(ir::MemoryTypeField {1670offset: base_offset,1671ty: self.isa.pointer_type(),1672// Read-only field from the PoV of PCC checks:1673// don't allow stores to this field. (Even if1674// it is a dynamic memory whose base can1675// change, that update happens inside the1676// runtime, not in generated code.)1677readonly: true,1678fact: Some(base_fact.clone()),1679});1680let current_length_offset = u64::try_from(current_length_offset).unwrap();1681fields.push(ir::MemoryTypeField {1682offset: current_length_offset,1683ty: self.isa.pointer_type(),1684// As above, read-only; only the runtime modifies it.1685readonly: true,1686fact: Some(length_fact),1687});16881689let pointer_size = u64::from(self.isa.pointer_type().bytes());1690let fields_end = std::cmp::max(1691base_offset + pointer_size,1692current_length_offset + pointer_size,1693);1694*size = std::cmp::max(*size, fields_end);1695}1696_ => {1697panic!("Bad memtype");1698}1699}1700// Apply a fact to the base pointer.1701(Some(base_fact), Some(data_mt))1702} else {1703(None, None)1704}1705} else {1706if let Some(ptr_memtype) = ptr_memtype {1707// Create a memtype representing the untyped memory region.1708let data_mt = func.create_memory_type(ir::MemoryTypeData::Memory {1709size: self1710.tunables1711.memory_reservation1712.checked_add(self.tunables.memory_guard_size)1713.expect("Memory plan has overflowing size plus guard"),1714});1715// This fact applies to any pointer to the start of the memory.1716let base_fact = Fact::Mem {1717ty: data_mt,1718min_offset: 0,1719max_offset: 0,1720nullable: false,1721};1722// Create a field in the vmctx for the base pointer.1723match &mut func.memory_types[ptr_memtype] {1724ir::MemoryTypeData::Struct { size, fields } => {1725let offset = u64::try_from(base_offset).unwrap();1726fields.push(ir::MemoryTypeField {1727offset,1728ty: self.isa.pointer_type(),1729// Read-only field from the PoV of PCC checks:1730// don't allow stores to this field. (Even if1731// it is a dynamic memory whose base can1732// change, that update happens inside the1733// runtime, not in generated code.)1734readonly: true,1735fact: Some(base_fact.clone()),1736});1737*size = std::cmp::max(1738*size,1739offset + u64::from(self.isa.pointer_type().bytes()),1740);1741}1742_ => {1743panic!("Bad memtype");1744}1745}1746// Apply a fact to the base pointer.1747(Some(base_fact), Some(data_mt))1748} else {1749(None, None)1750}1751};1752(base_fact, memory_type)1753}17541755fn make_table(&mut self, func: &mut ir::Function, index: TableIndex) -> TableData {1756let pointer_type = self.pointer_type();17571758let (ptr, base_offset, current_elements_offset) = {1759let vmctx = self.vmctx(func);1760if let Some(def_index) = self.module.defined_table_index(index) {1761let base_offset =1762i32::try_from(self.offsets.vmctx_vmtable_definition_base(def_index)).unwrap();1763let current_elements_offset = i32::try_from(1764self.offsets1765.vmctx_vmtable_definition_current_elements(def_index),1766)1767.unwrap();1768(vmctx, base_offset, current_elements_offset)1769} else {1770let from_offset = self.offsets.vmctx_vmtable_from(index);1771let table = func.create_global_value(ir::GlobalValueData::Load {1772base: vmctx,1773offset: Offset32::new(i32::try_from(from_offset).unwrap()),1774global_type: pointer_type,1775flags: MemFlags::trusted().with_readonly().with_can_move(),1776});1777let base_offset = i32::from(self.offsets.vmtable_definition_base());1778let current_elements_offset =1779i32::from(self.offsets.vmtable_definition_current_elements());1780(table, base_offset, current_elements_offset)1781}1782};17831784let table = &self.module.tables[index];1785let element_size = if table.ref_type.is_vmgcref_type() {1786// For GC-managed references, tables store `Option<VMGcRef>`s.1787ir::types::I32.bytes()1788} else {1789self.reference_type(table.ref_type.heap_type).0.bytes()1790};17911792let base_gv = func.create_global_value(ir::GlobalValueData::Load {1793base: ptr,1794offset: Offset32::new(base_offset),1795global_type: pointer_type,1796flags: if Some(table.limits.min) == table.limits.max {1797// A fixed-size table can't be resized so its base address won't1798// change.1799MemFlags::trusted().with_readonly().with_can_move()1800} else {1801MemFlags::trusted()1802},1803});18041805let bound = if Some(table.limits.min) == table.limits.max {1806TableSize::Static {1807bound: table.limits.min,1808}1809} else {1810TableSize::Dynamic {1811bound_gv: func.create_global_value(ir::GlobalValueData::Load {1812base: ptr,1813offset: Offset32::new(current_elements_offset),1814global_type: ir::Type::int(1815u16::from(self.offsets.size_of_vmtable_definition_current_elements()) * 8,1816)1817.unwrap(),1818flags: MemFlags::trusted(),1819}),1820}1821};18221823TableData {1824base_gv,1825bound,1826element_size,1827}1828}18291830/// Get the type index associated with an exception object.1831#[cfg(feature = "gc")]1832pub(crate) fn exception_type_from_tag(&self, tag: TagIndex) -> EngineOrModuleTypeIndex {1833self.module.tags[tag].exception1834}18351836/// Get the parameter arity of the associated function type for the given tag.1837pub(crate) fn tag_param_arity(&self, tag: TagIndex) -> usize {1838let func_ty = self.module.tags[tag].signature.unwrap_module_type_index();1839let func_ty = self1840.types1841.unwrap_func(func_ty)1842.expect("already validated to refer to a function type");1843func_ty.params().len()1844}18451846/// Get the runtime instance ID and defined-tag ID in that1847/// instance for a particular static tag ID.1848#[cfg(feature = "gc")]1849pub(crate) fn get_instance_and_tag(1850&mut self,1851builder: &mut FunctionBuilder<'_>,1852tag_index: TagIndex,1853) -> (ir::Value, ir::Value) {1854if let Some(defined_tag_index) = self.module.defined_tag_index(tag_index) {1855// Our own tag -- we only need to get our instance ID.1856let builtin = self.builtin_functions.get_instance_id(builder.func);1857let vmctx = self.vmctx_val(&mut builder.cursor());1858let call = builder.ins().call(builtin, &[vmctx]);1859let instance_id = builder.func.dfg.inst_results(call)[0];1860let tag_id = builder1861.ins()1862.iconst(I32, i64::from(defined_tag_index.as_u32()));1863(instance_id, tag_id)1864} else {1865// An imported tag -- we need to load the VMTagImport struct.1866let vmctx_tag_vmctx_offset = self.offsets.vmctx_vmtag_import_vmctx(tag_index);1867let vmctx_tag_index_offset = self.offsets.vmctx_vmtag_import_index(tag_index);1868let vmctx = self.vmctx_val(&mut builder.cursor());1869let pointer_type = self.pointer_type();1870let from_vmctx = builder.ins().load(1871pointer_type,1872MemFlags::trusted().with_readonly(),1873vmctx,1874i32::try_from(vmctx_tag_vmctx_offset).unwrap(),1875);1876let index = builder.ins().load(1877I32,1878MemFlags::trusted().with_readonly(),1879vmctx,1880i32::try_from(vmctx_tag_index_offset).unwrap(),1881);1882let builtin = self.builtin_functions.get_instance_id(builder.func);1883let call = builder.ins().call(builtin, &[from_vmctx]);1884let from_instance_id = builder.func.dfg.inst_results(call)[0];1885(from_instance_id, index)1886}1887}1888}18891890struct Call<'a, 'func, 'module_env> {1891builder: &'a mut FunctionBuilder<'func>,1892env: &'a mut FuncEnvironment<'module_env>,1893srcloc: ir::SourceLoc,1894tail: bool,1895}18961897enum CheckIndirectCallTypeSignature {1898Runtime,1899StaticMatch {1900/// Whether or not the funcref may be null or if it's statically known1901/// to not be null.1902may_be_null: bool,1903},1904StaticTrap,1905}19061907type CallRets = SmallVec<[ir::Value; 4]>;19081909impl<'a, 'func, 'module_env> Call<'a, 'func, 'module_env> {1910/// Create a new `Call` site that will do regular, non-tail calls.1911pub fn new(1912builder: &'a mut FunctionBuilder<'func>,1913env: &'a mut FuncEnvironment<'module_env>,1914srcloc: ir::SourceLoc,1915) -> Self {1916Call {1917builder,1918env,1919srcloc,1920tail: false,1921}1922}19231924/// Create a new `Call` site that will perform tail calls.1925pub fn new_tail(1926builder: &'a mut FunctionBuilder<'func>,1927env: &'a mut FuncEnvironment<'module_env>,1928srcloc: ir::SourceLoc,1929) -> Self {1930Call {1931builder,1932env,1933srcloc,1934tail: true,1935}1936}19371938/// Do a Wasm-level direct call to the given callee function.1939pub fn direct_call(1940mut self,1941callee_index: FuncIndex,1942sig_ref: ir::SigRef,1943wasm_call_args: &[ir::Value],1944) -> WasmResult<CallRets> {1945let mut real_call_args = Vec::with_capacity(wasm_call_args.len() + 2);1946let caller_vmctx = self1947.builder1948.func1949.special_param(ArgumentPurpose::VMContext)1950.unwrap();19511952// Handle direct calls to locally-defined functions.1953if let Some(def_func_index) = self.env.module.defined_func_index(callee_index) {1954// First append the callee vmctx address, which is the same as the caller vmctx in1955// this case.1956real_call_args.push(caller_vmctx);19571958// Then append the caller vmctx address.1959real_call_args.push(caller_vmctx);19601961// Then append the regular call arguments.1962real_call_args.extend_from_slice(wasm_call_args);19631964// Finally, make the direct call!1965let callee = self1966.env1967.get_or_create_defined_func_ref(self.builder.func, def_func_index);1968return Ok(self.direct_call_inst(callee, &real_call_args));1969}19701971// Handle direct calls to imported functions. We use an indirect call1972// so that we don't have to patch the code at runtime.1973let pointer_type = self.env.pointer_type();1974let vmctx = self.env.vmctx(self.builder.func);1975let base = self.builder.ins().global_value(pointer_type, vmctx);19761977let mem_flags = ir::MemFlags::trusted().with_readonly().with_can_move();19781979// Load the callee address.1980let body_offset = i32::try_from(1981self.env1982.offsets1983.vmctx_vmfunction_import_wasm_call(callee_index),1984)1985.unwrap();19861987// First append the callee vmctx address.1988let vmctx_offset =1989i32::try_from(self.env.offsets.vmctx_vmfunction_import_vmctx(callee_index)).unwrap();1990let callee_vmctx = self1991.builder1992.ins()1993.load(pointer_type, mem_flags, base, vmctx_offset);1994real_call_args.push(callee_vmctx);1995real_call_args.push(caller_vmctx);19961997// Then append the Wasm call arguments.1998real_call_args.extend_from_slice(wasm_call_args);19992000// If we statically know the imported function (e.g. this is a2001// component-to-component call where we statically know both components)2002// then we can avoid doing an indirect call.2003match self.env.translation.known_imported_functions[callee_index].as_ref() {2004// The import is always a compile-time builtin intrinsic. Make a2005// direct call to that function (presumably it will eventually be2006// inlined).2007#[cfg(feature = "component-model")]2008Some(FuncKey::UnsafeIntrinsic(..)) => {2009let callee = self2010.env2011.get_or_create_imported_func_ref(self.builder.func, callee_index);2012Ok(self.direct_call_inst(callee, &real_call_args))2013}20142015// The import is always satisfied with the given defined Wasm2016// function, so do a direct call to that function! (Although we take2017// care to still pass its `funcref`'s `vmctx` as the callee `vmctx`2018// in `real_call_args` and not the caller's.)2019Some(FuncKey::DefinedWasmFunction(..)) => {2020let callee = self2021.env2022.get_or_create_imported_func_ref(self.builder.func, callee_index);2023Ok(self.direct_call_inst(callee, &real_call_args))2024}20252026Some(key) => panic!("unexpected kind of known-import function: {key:?}"),20272028// Unknown import function or this module is instantiated many times2029// and with different functions. Either way, we have to do the2030// indirect call.2031None => {2032let func_addr = self2033.builder2034.ins()2035.load(pointer_type, mem_flags, base, body_offset);2036Ok(self.indirect_call_inst(sig_ref, func_addr, &real_call_args))2037}2038}2039}20402041/// Do a Wasm-level indirect call through the given funcref table.2042pub fn indirect_call(2043mut self,2044features: &WasmFeatures,2045table_index: TableIndex,2046ty_index: TypeIndex,2047sig_ref: ir::SigRef,2048callee: ir::Value,2049call_args: &[ir::Value],2050) -> WasmResult<Option<CallRets>> {2051let (code_ptr, callee_vmctx) = match self.check_and_load_code_and_callee_vmctx(2052features,2053table_index,2054ty_index,2055callee,2056false,2057)? {2058Some(pair) => pair,2059None => return Ok(None),2060};20612062self.unchecked_call_impl(sig_ref, code_ptr, callee_vmctx, call_args)2063.map(Some)2064}20652066fn check_and_load_code_and_callee_vmctx(2067&mut self,2068features: &WasmFeatures,2069table_index: TableIndex,2070ty_index: TypeIndex,2071callee: ir::Value,2072cold_blocks: bool,2073) -> WasmResult<Option<(ir::Value, ir::Value)>> {2074// Get the funcref pointer from the table.2075let funcref_ptr = self.env.get_or_init_func_ref_table_elem(2076self.builder,2077table_index,2078callee,2079cold_blocks,2080);20812082// If necessary, check the signature.2083let check =2084self.check_indirect_call_type_signature(features, table_index, ty_index, funcref_ptr);20852086let trap_code = match check {2087// `funcref_ptr` is checked at runtime that its type matches,2088// meaning that if code gets this far it's guaranteed to not be2089// null. That means nothing in `unchecked_call` can fail.2090CheckIndirectCallTypeSignature::Runtime => None,20912092// No type check was performed on `funcref_ptr` because it's2093// statically known to have the right type. Note that whether or2094// not the function is null is not necessarily tested so far since2095// no type information was inspected.2096//2097// If the table may hold null functions, then further loads in2098// `unchecked_call` may fail. If the table only holds non-null2099// functions, though, then there's no possibility of a trap.2100CheckIndirectCallTypeSignature::StaticMatch { may_be_null } => {2101if may_be_null {2102Some(crate::TRAP_INDIRECT_CALL_TO_NULL)2103} else {2104None2105}2106}21072108// Code has already trapped, so return nothing indicating that this2109// is now unreachable code.2110CheckIndirectCallTypeSignature::StaticTrap => return Ok(None),2111};21122113Ok(Some(self.load_code_and_vmctx(funcref_ptr, trap_code)))2114}21152116fn check_indirect_call_type_signature(2117&mut self,2118features: &WasmFeatures,2119table_index: TableIndex,2120ty_index: TypeIndex,2121funcref_ptr: ir::Value,2122) -> CheckIndirectCallTypeSignature {2123let table = &self.env.module.tables[table_index];2124let sig_id_size = self.env.offsets.size_of_vmshared_type_index();2125let sig_id_type = Type::int(u16::from(sig_id_size) * 8).unwrap();21262127// Test if a type check is necessary for this table. If this table is a2128// table of typed functions and that type matches `ty_index`, then2129// there's no need to perform a typecheck.2130match table.ref_type.heap_type {2131// Functions do not have a statically known type in the table, a2132// typecheck is required. Fall through to below to perform the2133// actual typecheck.2134WasmHeapType::Func => {}21352136// Functions that have a statically known type are either going to2137// always succeed or always fail. Figure out by inspecting the types2138// further.2139WasmHeapType::ConcreteFunc(EngineOrModuleTypeIndex::Module(table_ty)) => {2140// If `ty_index` matches `table_ty`, then this call is2141// statically known to have the right type, so no checks are2142// necessary.2143let specified_ty = self.env.module.types[ty_index].unwrap_module_type_index();2144if specified_ty == table_ty {2145return CheckIndirectCallTypeSignature::StaticMatch {2146may_be_null: table.ref_type.nullable,2147};2148}21492150if features.gc() {2151// If we are in the Wasm GC world, then we need to perform2152// an actual subtype check at runtime. Fall through to below2153// to do that.2154} else {2155// Otherwise if the types don't match then either (a) this2156// is a null pointer or (b) it's a pointer with the wrong2157// type. Figure out which and trap here.2158//2159// If it's possible to have a null here then try to load the2160// type information. If that fails due to the function being2161// a null pointer, then this was a call to null. Otherwise2162// if it succeeds then we know it won't match, so trap2163// anyway.2164if table.ref_type.nullable {2165if self.env.clif_memory_traps_enabled() {2166self.builder.ins().load(2167sig_id_type,2168ir::MemFlags::trusted()2169.with_readonly()2170.with_trap_code(Some(crate::TRAP_INDIRECT_CALL_TO_NULL)),2171funcref_ptr,2172i32::from(self.env.offsets.ptr.vm_func_ref_type_index()),2173);2174} else {2175self.env.trapz(2176self.builder,2177funcref_ptr,2178crate::TRAP_INDIRECT_CALL_TO_NULL,2179);2180}2181}2182self.env.trap(self.builder, crate::TRAP_BAD_SIGNATURE);2183return CheckIndirectCallTypeSignature::StaticTrap;2184}2185}21862187// Tables of `nofunc` can only be inhabited by null, so go ahead and2188// trap with that.2189WasmHeapType::NoFunc => {2190assert!(table.ref_type.nullable);2191self.env2192.trap(self.builder, crate::TRAP_INDIRECT_CALL_TO_NULL);2193return CheckIndirectCallTypeSignature::StaticTrap;2194}21952196// Engine-indexed types don't show up until runtime and it's a Wasm2197// validation error to perform a call through a non-function table,2198// so these cases are dynamically not reachable.2199WasmHeapType::ConcreteFunc(EngineOrModuleTypeIndex::Engine(_))2200| WasmHeapType::ConcreteFunc(EngineOrModuleTypeIndex::RecGroup(_))2201| WasmHeapType::Extern2202| WasmHeapType::NoExtern2203| WasmHeapType::Any2204| WasmHeapType::Eq2205| WasmHeapType::I312206| WasmHeapType::Array2207| WasmHeapType::ConcreteArray(_)2208| WasmHeapType::Struct2209| WasmHeapType::ConcreteStruct(_)2210| WasmHeapType::Exn2211| WasmHeapType::ConcreteExn(_)2212| WasmHeapType::NoExn2213| WasmHeapType::Cont2214| WasmHeapType::ConcreteCont(_)2215| WasmHeapType::NoCont2216| WasmHeapType::None => {2217unreachable!()2218}2219}22202221// Load the caller's `VMSharedTypeIndex.2222let interned_ty = self.env.module.types[ty_index].unwrap_module_type_index();2223let caller_sig_id = self2224.env2225.module_interned_to_shared_ty(&mut self.builder.cursor(), interned_ty);22262227// Load the callee's `VMSharedTypeIndex`.2228//2229// Note that the callee may be null in which case this load may2230// trap. If so use the `TRAP_INDIRECT_CALL_TO_NULL` trap code.2231let mut mem_flags = ir::MemFlags::trusted().with_readonly();2232if self.env.clif_memory_traps_enabled() {2233mem_flags = mem_flags.with_trap_code(Some(crate::TRAP_INDIRECT_CALL_TO_NULL));2234} else {2235self.env2236.trapz(self.builder, funcref_ptr, crate::TRAP_INDIRECT_CALL_TO_NULL);2237}2238let callee_sig_id =2239self.env2240.load_funcref_type_index(&mut self.builder.cursor(), mem_flags, funcref_ptr);22412242// Check that they match: in the case of Wasm GC, this means doing a2243// full subtype check. Otherwise, we do a simple equality check.2244let matches = if features.gc() {2245#[cfg(feature = "gc")]2246{2247self.env2248.is_subtype(self.builder, callee_sig_id, caller_sig_id)2249}2250#[cfg(not(feature = "gc"))]2251{2252unreachable!()2253}2254} else {2255self.builder2256.ins()2257.icmp(IntCC::Equal, callee_sig_id, caller_sig_id)2258};2259self.env2260.trapz(self.builder, matches, crate::TRAP_BAD_SIGNATURE);2261CheckIndirectCallTypeSignature::Runtime2262}22632264/// Call a typed function reference.2265pub fn call_ref(2266self,2267sig_ref: ir::SigRef,2268callee: ir::Value,2269args: &[ir::Value],2270) -> WasmResult<CallRets> {2271// FIXME: the wasm type system tracks enough information to know whether2272// `callee` is a null reference or not. In some situations it can be2273// statically known here that `callee` cannot be null in which case this2274// can be `None` instead. This requires feeding type information from2275// wasmparser's validator into this function, however, which is not2276// easily done at this time.2277let callee_load_trap_code = Some(crate::TRAP_NULL_REFERENCE);22782279self.unchecked_call(sig_ref, callee, callee_load_trap_code, args)2280}22812282/// This calls a function by reference without checking the signature.2283///2284/// It gets the function address, sets relevant flags, and passes the2285/// special callee/caller vmctxs. It is used by both call_indirect (which2286/// checks the signature) and call_ref (which doesn't).2287fn unchecked_call(2288mut self,2289sig_ref: ir::SigRef,2290callee: ir::Value,2291callee_load_trap_code: Option<ir::TrapCode>,2292call_args: &[ir::Value],2293) -> WasmResult<CallRets> {2294let (func_addr, callee_vmctx) = self.load_code_and_vmctx(callee, callee_load_trap_code);2295self.unchecked_call_impl(sig_ref, func_addr, callee_vmctx, call_args)2296}22972298fn load_code_and_vmctx(2299&mut self,2300callee: ir::Value,2301callee_load_trap_code: Option<ir::TrapCode>,2302) -> (ir::Value, ir::Value) {2303let pointer_type = self.env.pointer_type();23042305// Dereference callee pointer to get the function address.2306//2307// Note that this may trap if `callee` hasn't previously been verified2308// to be non-null. This means that this load is annotated with an2309// optional trap code provided by the caller of `unchecked_call` which2310// will handle the case where this is either already known to be2311// non-null or may trap.2312let mem_flags = ir::MemFlags::trusted().with_readonly();2313let mut callee_flags = mem_flags;2314if self.env.clif_memory_traps_enabled() {2315callee_flags = callee_flags.with_trap_code(callee_load_trap_code);2316} else {2317if let Some(trap) = callee_load_trap_code {2318self.env.trapz(self.builder, callee, trap);2319}2320}2321let func_addr = self.builder.ins().load(2322pointer_type,2323callee_flags,2324callee,2325i32::from(self.env.offsets.ptr.vm_func_ref_wasm_call()),2326);2327let callee_vmctx = self.builder.ins().load(2328pointer_type,2329mem_flags,2330callee,2331i32::from(self.env.offsets.ptr.vm_func_ref_vmctx()),2332);23332334(func_addr, callee_vmctx)2335}23362337fn caller_vmctx(&self) -> ir::Value {2338self.builder2339.func2340.special_param(ArgumentPurpose::VMContext)2341.unwrap()2342}23432344/// This calls a function by reference without checking the2345/// signature, given the raw code pointer to the2346/// Wasm-calling-convention entry point and the callee vmctx.2347fn unchecked_call_impl(2348mut self,2349sig_ref: ir::SigRef,2350func_addr: ir::Value,2351callee_vmctx: ir::Value,2352call_args: &[ir::Value],2353) -> WasmResult<CallRets> {2354let mut real_call_args = Vec::with_capacity(call_args.len() + 2);2355let caller_vmctx = self.caller_vmctx();23562357// First append the callee and caller vmctx addresses.2358real_call_args.push(callee_vmctx);2359real_call_args.push(caller_vmctx);23602361// Then append the regular call arguments.2362real_call_args.extend_from_slice(call_args);23632364Ok(self.indirect_call_inst(sig_ref, func_addr, &real_call_args))2365}23662367fn exception_table(2368&mut self,2369sig: ir::SigRef,2370) -> Option<(ir::ExceptionTable, Block, CallRets)> {2371if !self.tail && !self.env.stacks.handlers.is_empty() {2372let continuation_block = self.builder.create_block();2373let mut args = vec![];2374let mut results = smallvec![];2375for i in 0..self.builder.func.dfg.signatures[sig].returns.len() {2376let ty = self.builder.func.dfg.signatures[sig].returns[i].value_type;2377results.push(2378self.builder2379.func2380.dfg2381.append_block_param(continuation_block, ty),2382);2383args.push(BlockArg::TryCallRet(u32::try_from(i).unwrap()));2384}23852386let continuation = self2387.builder2388.func2389.dfg2390.block_call(continuation_block, args.iter());2391let mut handlers = vec![ExceptionTableItem::Context(self.caller_vmctx())];2392for (tag, block) in self.env.stacks.handlers.handlers() {2393let block_call = self2394.builder2395.func2396.dfg2397.block_call(block, &[BlockArg::TryCallExn(0)]);2398handlers.push(match tag {2399Some(tag) => ExceptionTableItem::Tag(tag, block_call),2400None => ExceptionTableItem::Default(block_call),2401});2402}2403let etd = ExceptionTableData::new(sig, continuation, handlers);2404let et = self.builder.func.dfg.exception_tables.push(etd);2405Some((et, continuation_block, results))2406} else {2407None2408}2409}24102411fn results_from_call_inst(&self, inst: ir::Inst) -> CallRets {2412self.builder2413.func2414.dfg2415.inst_results(inst)2416.iter()2417.copied()2418.collect()2419}24202421fn handle_call_result_stackmap(&mut self, results: &[ir::Value], sig_ref: ir::SigRef) {2422for (i, &val) in results.iter().enumerate() {2423if self.env.sig_ref_result_needs_stack_map(sig_ref, i) {2424self.builder.declare_value_needs_stack_map(val);2425}2426}2427}24282429fn direct_call_inst(&mut self, callee: ir::FuncRef, args: &[ir::Value]) -> CallRets {2430let sig_ref = self.builder.func.dfg.ext_funcs[callee].signature;2431if self.tail {2432self.builder.ins().return_call(callee, args);2433smallvec![]2434} else if let Some((exception_table, continuation_block, results)) =2435self.exception_table(sig_ref)2436{2437let inst = self.builder.ins().try_call(callee, args, exception_table);2438self.handle_call_result_stackmap(&results, sig_ref);2439self.builder.switch_to_block(continuation_block);2440self.builder.seal_block(continuation_block);2441self.attach_tags(inst);2442results2443} else {2444let inst = self.builder.ins().call(callee, args);2445let results = self.results_from_call_inst(inst);2446self.handle_call_result_stackmap(&results, sig_ref);2447self.attach_tags(inst);2448results2449}2450}24512452fn indirect_call_inst(2453&mut self,2454sig_ref: ir::SigRef,2455func_addr: ir::Value,2456args: &[ir::Value],2457) -> CallRets {2458if self.tail {2459self.builder2460.ins()2461.return_call_indirect(sig_ref, func_addr, args);2462smallvec![]2463} else if let Some((exception_table, continuation_block, results)) =2464self.exception_table(sig_ref)2465{2466let inst = self2467.builder2468.ins()2469.try_call_indirect(func_addr, args, exception_table);2470self.handle_call_result_stackmap(&results, sig_ref);2471self.builder.switch_to_block(continuation_block);2472self.builder.seal_block(continuation_block);2473self.attach_tags(inst);2474results2475} else {2476let inst = self.builder.ins().call_indirect(sig_ref, func_addr, args);2477let results = self.results_from_call_inst(inst);2478self.handle_call_result_stackmap(&results, sig_ref);2479self.attach_tags(inst);2480results2481}2482}24832484fn attach_tags(&mut self, inst: ir::Inst) {2485let tags = self.env.debug_tags(self.srcloc);2486if !tags.is_empty() {2487self.builder.func.debug_tags.set(inst, tags);2488}2489}2490}24912492impl TypeConvert for FuncEnvironment<'_> {2493fn lookup_heap_type(&self, ty: wasmparser::UnpackedIndex) -> WasmHeapType {2494wasmtime_environ::WasmparserTypeConverter::new(self.types, |idx| {2495self.module.types[idx].unwrap_module_type_index()2496})2497.lookup_heap_type(ty)2498}24992500fn lookup_type_index(&self, index: wasmparser::UnpackedIndex) -> EngineOrModuleTypeIndex {2501wasmtime_environ::WasmparserTypeConverter::new(self.types, |idx| {2502self.module.types[idx].unwrap_module_type_index()2503})2504.lookup_type_index(index)2505}2506}25072508impl<'module_environment> TargetEnvironment for FuncEnvironment<'module_environment> {2509fn target_config(&self) -> TargetFrontendConfig {2510self.isa.frontend_config()2511}25122513fn reference_type(&self, wasm_ty: WasmHeapType) -> (ir::Type, bool) {2514let ty = crate::reference_type(wasm_ty, self.pointer_type());2515let needs_stack_map = match wasm_ty.top() {2516WasmHeapTopType::Extern | WasmHeapTopType::Any | WasmHeapTopType::Exn => true,2517WasmHeapTopType::Func => false,2518// TODO(#10248) Once continuations can be stored on the GC heap, we2519// will need stack maps for continuation objects.2520WasmHeapTopType::Cont => false,2521};2522(ty, needs_stack_map)2523}25242525fn heap_access_spectre_mitigation(&self) -> bool {2526self.isa.flags().enable_heap_access_spectre_mitigation()2527}25282529fn proof_carrying_code(&self) -> bool {2530self.isa.flags().enable_pcc()2531}25322533fn tunables(&self) -> &Tunables {2534self.compiler.tunables()2535}2536}25372538impl FuncEnvironment<'_> {2539pub fn heaps(&self) -> &PrimaryMap<Heap, HeapData> {2540&self.heaps2541}25422543pub fn is_wasm_parameter(&self, index: usize) -> bool {2544// The first two parameters are the vmctx and caller vmctx. The rest are2545// the wasm parameters.2546index >= 22547}25482549pub fn clif_param_as_wasm_param(&self, index: usize) -> Option<WasmValType> {2550if index >= 2 {2551Some(self.wasm_func_ty.params()[index - 2])2552} else {2553None2554}2555}25562557pub fn param_needs_stack_map(&self, _signature: &ir::Signature, index: usize) -> bool {2558// Skip the caller and callee vmctx.2559if index < 2 {2560return false;2561}25622563self.wasm_func_ty.params()[index - 2].is_vmgcref_type_and_not_i31()2564}25652566pub fn sig_ref_result_needs_stack_map(&self, sig_ref: ir::SigRef, index: usize) -> bool {2567let wasm_func_ty = self.sig_ref_to_ty[sig_ref].as_ref().unwrap();2568wasm_func_ty.returns()[index].is_vmgcref_type_and_not_i31()2569}25702571pub fn translate_table_grow(2572&mut self,2573builder: &mut FunctionBuilder<'_>,2574table_index: TableIndex,2575delta: ir::Value,2576init_value: ir::Value,2577) -> WasmResult<ir::Value> {2578let mut pos = builder.cursor();2579let table = self.table(table_index);2580let ty = table.ref_type.heap_type;2581let (table_vmctx, defined_table_index) =2582self.table_vmctx_and_defined_index(&mut pos, table_index);2583let index_type = table.idx_type;2584let delta = self.cast_index_to_i64(&mut pos, delta, index_type);25852586let mut args: SmallVec<[_; 6]> = smallvec![table_vmctx, defined_table_index, delta];2587let grow = match ty.top() {2588WasmHeapTopType::Extern | WasmHeapTopType::Any | WasmHeapTopType::Exn => {2589args.push(init_value);2590gc::builtins::table_grow_gc_ref(self, pos.func)?2591}2592WasmHeapTopType::Func => {2593args.push(init_value);2594self.builtin_functions.table_grow_func_ref(pos.func)2595}2596WasmHeapTopType::Cont => {2597let (revision, contref) =2598stack_switching::fatpointer::deconstruct(self, &mut pos, init_value);2599args.extend_from_slice(&[contref, revision]);2600stack_switching::builtins::table_grow_cont_obj(self, pos.func)?2601}2602};26032604let call_inst = pos.ins().call(grow, &args);2605let result = builder.func.dfg.first_result(call_inst);26062607Ok(self.convert_pointer_to_index_type(builder.cursor(), result, index_type, false))2608}26092610pub fn translate_table_get(2611&mut self,2612builder: &mut FunctionBuilder,2613table_index: TableIndex,2614index: ir::Value,2615) -> WasmResult<ir::Value> {2616let table = self.module.tables[table_index];2617let table_data = self.get_or_create_table(builder.func, table_index);2618let heap_ty = table.ref_type.heap_type;2619match heap_ty.top() {2620// GC-managed types.2621WasmHeapTopType::Any | WasmHeapTopType::Extern | WasmHeapTopType::Exn => {2622let (src, flags) = table_data.prepare_table_addr(self, builder, index);2623gc::gc_compiler(self)?.translate_read_gc_reference(2624self,2625builder,2626table.ref_type,2627src,2628flags,2629)2630}26312632// Function types.2633WasmHeapTopType::Func => {2634Ok(self.get_or_init_func_ref_table_elem(builder, table_index, index, false))2635}26362637// Continuation types.2638WasmHeapTopType::Cont => {2639let (elem_addr, flags) = table_data.prepare_table_addr(self, builder, index);2640Ok(builder.ins().load(2641stack_switching::fatpointer::fatpointer_type(self),2642flags,2643elem_addr,26440,2645))2646}2647}2648}26492650pub fn translate_table_set(2651&mut self,2652builder: &mut FunctionBuilder,2653table_index: TableIndex,2654value: ir::Value,2655index: ir::Value,2656) -> WasmResult<()> {2657let table = self.module.tables[table_index];2658let table_data = self.get_or_create_table(builder.func, table_index);2659let heap_ty = table.ref_type.heap_type;2660match heap_ty.top() {2661// GC-managed types.2662WasmHeapTopType::Any | WasmHeapTopType::Extern | WasmHeapTopType::Exn => {2663let (dst, flags) = table_data.prepare_table_addr(self, builder, index);2664gc::gc_compiler(self)?.translate_write_gc_reference(2665self,2666builder,2667table.ref_type,2668dst,2669value,2670flags,2671)2672}26732674// Function types.2675WasmHeapTopType::Func => {2676let (elem_addr, flags) = table_data.prepare_table_addr(self, builder, index);2677// Set the "initialized bit". See doc-comment on2678// `FUNCREF_INIT_BIT` in2679// crates/environ/src/ref_bits.rs for details.2680let value_with_init_bit = if self.tunables.table_lazy_init {2681builder2682.ins()2683.bor_imm(value, Imm64::from(FUNCREF_INIT_BIT as i64))2684} else {2685value2686};2687builder2688.ins()2689.store(flags, value_with_init_bit, elem_addr, 0);2690Ok(())2691}26922693// Continuation types.2694WasmHeapTopType::Cont => {2695let (elem_addr, flags) = table_data.prepare_table_addr(self, builder, index);2696builder.ins().store(flags, value, elem_addr, 0);2697Ok(())2698}2699}2700}27012702pub fn translate_table_fill(2703&mut self,2704builder: &mut FunctionBuilder<'_>,2705table_index: TableIndex,2706dst: ir::Value,2707val: ir::Value,2708len: ir::Value,2709) -> WasmResult<()> {2710let mut pos = builder.cursor();2711let table = self.table(table_index);2712let ty = table.ref_type.heap_type;2713let dst = self.cast_index_to_i64(&mut pos, dst, table.idx_type);2714let len = self.cast_index_to_i64(&mut pos, len, table.idx_type);2715let (table_vmctx, table_index) = self.table_vmctx_and_defined_index(&mut pos, table_index);27162717let mut args: SmallVec<[_; 6]> = smallvec![table_vmctx, table_index, dst];2718let libcall = match ty.top() {2719WasmHeapTopType::Any | WasmHeapTopType::Extern | WasmHeapTopType::Exn => {2720args.push(val);2721gc::builtins::table_fill_gc_ref(self, &mut pos.func)?2722}2723WasmHeapTopType::Func => {2724args.push(val);2725self.builtin_functions.table_fill_func_ref(&mut pos.func)2726}2727WasmHeapTopType::Cont => {2728let (revision, contref) =2729stack_switching::fatpointer::deconstruct(self, &mut pos, val);2730args.extend_from_slice(&[contref, revision]);2731stack_switching::builtins::table_fill_cont_obj(self, &mut pos.func)?2732}2733};27342735args.push(len);2736builder.ins().call(libcall, &args);27372738Ok(())2739}27402741pub fn translate_ref_i31(2742&mut self,2743mut pos: FuncCursor,2744val: ir::Value,2745) -> WasmResult<ir::Value> {2746debug_assert_eq!(pos.func.dfg.value_type(val), ir::types::I32);2747let shifted = pos.ins().ishl_imm(val, 1);2748let tagged = pos2749.ins()2750.bor_imm(shifted, i64::from(crate::I31_REF_DISCRIMINANT));2751let (ref_ty, _needs_stack_map) = self.reference_type(WasmHeapType::I31);2752debug_assert_eq!(ref_ty, ir::types::I32);2753Ok(tagged)2754}27552756pub fn translate_i31_get_s(2757&mut self,2758builder: &mut FunctionBuilder,2759i31ref: ir::Value,2760) -> WasmResult<ir::Value> {2761// TODO: If we knew we have a `(ref i31)` here, instead of maybe a `(ref2762// null i31)`, we could omit the `trapz`. But plumbing that type info2763// from `wasmparser` and through to here is a bit funky.2764self.trapz(builder, i31ref, crate::TRAP_NULL_REFERENCE);2765Ok(builder.ins().sshr_imm(i31ref, 1))2766}27672768pub fn translate_i31_get_u(2769&mut self,2770builder: &mut FunctionBuilder,2771i31ref: ir::Value,2772) -> WasmResult<ir::Value> {2773// TODO: If we knew we have a `(ref i31)` here, instead of maybe a `(ref2774// null i31)`, we could omit the `trapz`. But plumbing that type info2775// from `wasmparser` and through to here is a bit funky.2776self.trapz(builder, i31ref, crate::TRAP_NULL_REFERENCE);2777Ok(builder.ins().ushr_imm(i31ref, 1))2778}27792780pub fn struct_fields_len(&mut self, struct_type_index: TypeIndex) -> WasmResult<usize> {2781let ty = self.module.types[struct_type_index].unwrap_module_type_index();2782match &self.types[ty].composite_type.inner {2783WasmCompositeInnerType::Struct(s) => Ok(s.fields.len()),2784_ => unreachable!(),2785}2786}27872788pub fn translate_struct_new(2789&mut self,2790builder: &mut FunctionBuilder,2791struct_type_index: TypeIndex,2792fields: StructFieldsVec,2793) -> WasmResult<ir::Value> {2794gc::translate_struct_new(self, builder, struct_type_index, &fields)2795}27962797pub fn translate_struct_new_default(2798&mut self,2799builder: &mut FunctionBuilder,2800struct_type_index: TypeIndex,2801) -> WasmResult<ir::Value> {2802gc::translate_struct_new_default(self, builder, struct_type_index)2803}28042805pub fn translate_struct_get(2806&mut self,2807builder: &mut FunctionBuilder,2808struct_type_index: TypeIndex,2809field_index: u32,2810struct_ref: ir::Value,2811extension: Option<Extension>,2812) -> WasmResult<ir::Value> {2813gc::translate_struct_get(2814self,2815builder,2816struct_type_index,2817field_index,2818struct_ref,2819extension,2820)2821}28222823pub fn translate_struct_set(2824&mut self,2825builder: &mut FunctionBuilder,2826struct_type_index: TypeIndex,2827field_index: u32,2828struct_ref: ir::Value,2829value: ir::Value,2830) -> WasmResult<()> {2831gc::translate_struct_set(2832self,2833builder,2834struct_type_index,2835field_index,2836struct_ref,2837value,2838)2839}28402841pub fn translate_exn_unbox(2842&mut self,2843builder: &mut FunctionBuilder<'_>,2844tag_index: TagIndex,2845exn_ref: ir::Value,2846) -> WasmResult<SmallVec<[ir::Value; 4]>> {2847gc::translate_exn_unbox(self, builder, tag_index, exn_ref)2848}28492850pub fn translate_exn_throw(2851&mut self,2852builder: &mut FunctionBuilder<'_>,2853tag_index: TagIndex,2854args: &[ir::Value],2855) -> WasmResult<()> {2856gc::translate_exn_throw(self, builder, tag_index, args)2857}28582859pub fn translate_exn_throw_ref(2860&mut self,2861builder: &mut FunctionBuilder<'_>,2862exnref: ir::Value,2863) -> WasmResult<()> {2864gc::translate_exn_throw_ref(self, builder, exnref)2865}28662867pub fn translate_array_new(2868&mut self,2869builder: &mut FunctionBuilder,2870array_type_index: TypeIndex,2871elem: ir::Value,2872len: ir::Value,2873) -> WasmResult<ir::Value> {2874gc::translate_array_new(self, builder, array_type_index, elem, len)2875}28762877pub fn translate_array_new_default(2878&mut self,2879builder: &mut FunctionBuilder,2880array_type_index: TypeIndex,2881len: ir::Value,2882) -> WasmResult<ir::Value> {2883gc::translate_array_new_default(self, builder, array_type_index, len)2884}28852886pub fn translate_array_new_fixed(2887&mut self,2888builder: &mut FunctionBuilder,2889array_type_index: TypeIndex,2890elems: &[ir::Value],2891) -> WasmResult<ir::Value> {2892gc::translate_array_new_fixed(self, builder, array_type_index, elems)2893}28942895pub fn translate_array_new_data(2896&mut self,2897builder: &mut FunctionBuilder,2898array_type_index: TypeIndex,2899data_index: DataIndex,2900data_offset: ir::Value,2901len: ir::Value,2902) -> WasmResult<ir::Value> {2903let libcall = gc::builtins::array_new_data(self, builder.func)?;2904let vmctx = self.vmctx_val(&mut builder.cursor());2905let interned_type_index = self.module.types[array_type_index].unwrap_module_type_index();2906let interned_type_index = builder2907.ins()2908.iconst(I32, i64::from(interned_type_index.as_u32()));2909let data_index = builder.ins().iconst(I32, i64::from(data_index.as_u32()));2910let call_inst = builder.ins().call(2911libcall,2912&[vmctx, interned_type_index, data_index, data_offset, len],2913);2914Ok(builder.func.dfg.first_result(call_inst))2915}29162917pub fn translate_array_new_elem(2918&mut self,2919builder: &mut FunctionBuilder,2920array_type_index: TypeIndex,2921elem_index: ElemIndex,2922elem_offset: ir::Value,2923len: ir::Value,2924) -> WasmResult<ir::Value> {2925let libcall = gc::builtins::array_new_elem(self, builder.func)?;2926let vmctx = self.vmctx_val(&mut builder.cursor());2927let interned_type_index = self.module.types[array_type_index].unwrap_module_type_index();2928let interned_type_index = builder2929.ins()2930.iconst(I32, i64::from(interned_type_index.as_u32()));2931let elem_index = builder.ins().iconst(I32, i64::from(elem_index.as_u32()));2932let call_inst = builder.ins().call(2933libcall,2934&[vmctx, interned_type_index, elem_index, elem_offset, len],2935);2936Ok(builder.func.dfg.first_result(call_inst))2937}29382939pub fn translate_array_copy(2940&mut self,2941builder: &mut FunctionBuilder,2942_dst_array_type_index: TypeIndex,2943dst_array: ir::Value,2944dst_index: ir::Value,2945_src_array_type_index: TypeIndex,2946src_array: ir::Value,2947src_index: ir::Value,2948len: ir::Value,2949) -> WasmResult<()> {2950let libcall = gc::builtins::array_copy(self, builder.func)?;2951let vmctx = self.vmctx_val(&mut builder.cursor());2952builder.ins().call(2953libcall,2954&[vmctx, dst_array, dst_index, src_array, src_index, len],2955);2956Ok(())2957}29582959pub fn translate_array_fill(2960&mut self,2961builder: &mut FunctionBuilder,2962array_type_index: TypeIndex,2963array: ir::Value,2964index: ir::Value,2965value: ir::Value,2966len: ir::Value,2967) -> WasmResult<()> {2968gc::translate_array_fill(self, builder, array_type_index, array, index, value, len)2969}29702971pub fn translate_array_init_data(2972&mut self,2973builder: &mut FunctionBuilder,2974array_type_index: TypeIndex,2975array: ir::Value,2976dst_index: ir::Value,2977data_index: DataIndex,2978data_offset: ir::Value,2979len: ir::Value,2980) -> WasmResult<()> {2981let libcall = gc::builtins::array_init_data(self, builder.func)?;2982let vmctx = self.vmctx_val(&mut builder.cursor());2983let interned_type_index = self.module.types[array_type_index].unwrap_module_type_index();2984let interned_type_index = builder2985.ins()2986.iconst(I32, i64::from(interned_type_index.as_u32()));2987let data_index = builder.ins().iconst(I32, i64::from(data_index.as_u32()));2988builder.ins().call(2989libcall,2990&[2991vmctx,2992interned_type_index,2993array,2994dst_index,2995data_index,2996data_offset,2997len,2998],2999);3000Ok(())3001}30023003pub fn translate_array_init_elem(3004&mut self,3005builder: &mut FunctionBuilder,3006array_type_index: TypeIndex,3007array: ir::Value,3008dst_index: ir::Value,3009elem_index: ElemIndex,3010elem_offset: ir::Value,3011len: ir::Value,3012) -> WasmResult<()> {3013let libcall = gc::builtins::array_init_elem(self, builder.func)?;3014let vmctx = self.vmctx_val(&mut builder.cursor());3015let interned_type_index = self.module.types[array_type_index].unwrap_module_type_index();3016let interned_type_index = builder3017.ins()3018.iconst(I32, i64::from(interned_type_index.as_u32()));3019let elem_index = builder.ins().iconst(I32, i64::from(elem_index.as_u32()));3020builder.ins().call(3021libcall,3022&[3023vmctx,3024interned_type_index,3025array,3026dst_index,3027elem_index,3028elem_offset,3029len,3030],3031);3032Ok(())3033}30343035pub fn translate_array_len(3036&mut self,3037builder: &mut FunctionBuilder,3038array: ir::Value,3039) -> WasmResult<ir::Value> {3040gc::translate_array_len(self, builder, array)3041}30423043pub fn translate_array_get(3044&mut self,3045builder: &mut FunctionBuilder,3046array_type_index: TypeIndex,3047array: ir::Value,3048index: ir::Value,3049extension: Option<Extension>,3050) -> WasmResult<ir::Value> {3051gc::translate_array_get(self, builder, array_type_index, array, index, extension)3052}30533054pub fn translate_array_set(3055&mut self,3056builder: &mut FunctionBuilder,3057array_type_index: TypeIndex,3058array: ir::Value,3059index: ir::Value,3060value: ir::Value,3061) -> WasmResult<()> {3062gc::translate_array_set(self, builder, array_type_index, array, index, value)3063}30643065pub fn translate_ref_test(3066&mut self,3067builder: &mut FunctionBuilder<'_>,3068test_ty: WasmRefType,3069gc_ref: ir::Value,3070gc_ref_ty: WasmRefType,3071) -> WasmResult<ir::Value> {3072gc::translate_ref_test(self, builder, test_ty, gc_ref, gc_ref_ty)3073}30743075pub fn translate_ref_null(3076&mut self,3077mut pos: cranelift_codegen::cursor::FuncCursor,3078ht: WasmHeapType,3079) -> WasmResult<ir::Value> {3080Ok(match ht.top() {3081WasmHeapTopType::Func => pos.ins().iconst(self.pointer_type(), 0),3082// NB: null GC references don't need to be in stack maps.3083WasmHeapTopType::Any | WasmHeapTopType::Extern | WasmHeapTopType::Exn => {3084pos.ins().iconst(types::I32, 0)3085}3086WasmHeapTopType::Cont => {3087let zero = pos.ins().iconst(self.pointer_type(), 0);3088stack_switching::fatpointer::construct(self, &mut pos, zero, zero)3089}3090})3091}30923093pub fn translate_ref_is_null(3094&mut self,3095mut pos: cranelift_codegen::cursor::FuncCursor,3096value: ir::Value,3097ty: WasmRefType,3098) -> WasmResult<ir::Value> {3099// If we know the type is not nullable, then we don't actually need to3100// check for null.3101if !ty.nullable {3102return Ok(pos.ins().iconst(ir::types::I32, 0));3103}31043105let byte_is_null = match ty.heap_type.top() {3106WasmHeapTopType::Cont => {3107let (_revision, contref) =3108stack_switching::fatpointer::deconstruct(self, &mut pos, value);3109pos.ins()3110.icmp_imm(cranelift_codegen::ir::condcodes::IntCC::Equal, contref, 0)3111}3112_ => pos3113.ins()3114.icmp_imm(cranelift_codegen::ir::condcodes::IntCC::Equal, value, 0),3115};31163117Ok(pos.ins().uextend(ir::types::I32, byte_is_null))3118}31193120pub fn translate_ref_func(3121&mut self,3122mut pos: cranelift_codegen::cursor::FuncCursor<'_>,3123func_index: FuncIndex,3124) -> WasmResult<ir::Value> {3125let func_index = pos.ins().iconst(I32, func_index.as_u32() as i64);3126let ref_func = self.builtin_functions.ref_func(&mut pos.func);3127let vmctx = self.vmctx_val(&mut pos);31283129let call_inst = pos.ins().call(ref_func, &[vmctx, func_index]);3130Ok(pos.func.dfg.first_result(call_inst))3131}31323133pub(crate) fn translate_global_get(3134&mut self,3135builder: &mut FunctionBuilder<'_>,3136global_index: GlobalIndex,3137) -> WasmResult<ir::Value> {3138match self.get_or_create_global(builder.func, global_index) {3139GlobalVariable::Constant { value } => match value {3140GlobalConstValue::I32(x) => Ok(builder.ins().iconst(ir::types::I32, i64::from(x))),3141GlobalConstValue::I64(x) => Ok(builder.ins().iconst(ir::types::I64, x)),3142GlobalConstValue::F32(x) => {3143Ok(builder.ins().f32const(ir::immediates::Ieee32::with_bits(x)))3144}3145GlobalConstValue::F64(x) => {3146Ok(builder.ins().f64const(ir::immediates::Ieee64::with_bits(x)))3147}3148GlobalConstValue::V128(x) => {3149let data = x.to_le_bytes().to_vec().into();3150let handle = builder.func.dfg.constants.insert(data);3151Ok(builder.ins().vconst(ir::types::I8X16, handle))3152}3153},3154GlobalVariable::Memory { gv, offset, ty } => {3155let addr = builder.ins().global_value(self.pointer_type(), gv);3156let mut flags = ir::MemFlags::trusted();3157// Store vector globals in little-endian format to avoid3158// byte swaps on big-endian platforms since at-rest vectors3159// should already be in little-endian format anyway.3160if ty.is_vector() {3161flags.set_endianness(ir::Endianness::Little);3162}3163// Put globals in the "table" abstract heap category as well.3164flags.set_alias_region(Some(ir::AliasRegion::Table));3165Ok(builder.ins().load(ty, flags, addr, offset))3166}3167GlobalVariable::Custom => {3168let global_ty = self.module.globals[global_index];3169let wasm_ty = global_ty.wasm_ty;3170debug_assert!(3171wasm_ty.is_vmgcref_type(),3172"We only use GlobalVariable::Custom for VMGcRef types"3173);3174let WasmValType::Ref(ref_ty) = wasm_ty else {3175unreachable!()3176};31773178let (gv, offset) = self.get_global_location(builder.func, global_index);3179let gv = builder.ins().global_value(self.pointer_type(), gv);3180let src = builder.ins().iadd_imm(gv, i64::from(offset));31813182gc::gc_compiler(self)?.translate_read_gc_reference(3183self,3184builder,3185ref_ty,3186src,3187if global_ty.mutability {3188ir::MemFlags::trusted()3189} else {3190ir::MemFlags::trusted().with_readonly().with_can_move()3191},3192)3193}3194}3195}31963197pub(crate) fn translate_global_set(3198&mut self,3199builder: &mut FunctionBuilder<'_>,3200global_index: GlobalIndex,3201val: ir::Value,3202) -> WasmResult<()> {3203match self.get_or_create_global(builder.func, global_index) {3204GlobalVariable::Constant { .. } => {3205unreachable!("validation checks that Wasm cannot `global.set` constant globals")3206}3207GlobalVariable::Memory { gv, offset, ty } => {3208let addr = builder.ins().global_value(self.pointer_type(), gv);3209let mut flags = ir::MemFlags::trusted();3210// Like `global.get`, store globals in little-endian format.3211if ty.is_vector() {3212flags.set_endianness(ir::Endianness::Little);3213}3214// Put globals in the "table" abstract heap category as well.3215flags.set_alias_region(Some(ir::AliasRegion::Table));3216debug_assert_eq!(ty, builder.func.dfg.value_type(val));3217builder.ins().store(flags, val, addr, offset);3218self.update_global(builder, global_index, val);3219}3220GlobalVariable::Custom => {3221let ty = self.module.globals[global_index].wasm_ty;3222debug_assert!(3223ty.is_vmgcref_type(),3224"We only use GlobalVariable::Custom for VMGcRef types"3225);3226let WasmValType::Ref(ty) = ty else {3227unreachable!()3228};32293230let (gv, offset) = self.get_global_location(builder.func, global_index);3231let gv = builder.ins().global_value(self.pointer_type(), gv);3232let src = builder.ins().iadd_imm(gv, i64::from(offset));32333234gc::gc_compiler(self)?.translate_write_gc_reference(3235self,3236builder,3237ty,3238src,3239val,3240ir::MemFlags::trusted(),3241)?3242}3243}3244Ok(())3245}32463247pub fn translate_call_indirect<'a>(3248&mut self,3249builder: &'a mut FunctionBuilder,3250srcloc: ir::SourceLoc,3251features: &WasmFeatures,3252table_index: TableIndex,3253ty_index: TypeIndex,3254sig_ref: ir::SigRef,3255callee: ir::Value,3256call_args: &[ir::Value],3257) -> WasmResult<Option<CallRets>> {3258Call::new(builder, self, srcloc).indirect_call(3259features,3260table_index,3261ty_index,3262sig_ref,3263callee,3264call_args,3265)3266}32673268pub fn translate_call<'a>(3269&mut self,3270builder: &'a mut FunctionBuilder,3271srcloc: ir::SourceLoc,3272callee_index: FuncIndex,3273sig_ref: ir::SigRef,3274call_args: &[ir::Value],3275) -> WasmResult<CallRets> {3276Call::new(builder, self, srcloc).direct_call(callee_index, sig_ref, call_args)3277}32783279pub fn translate_call_ref<'a>(3280&mut self,3281builder: &'a mut FunctionBuilder,3282srcloc: ir::SourceLoc,3283sig_ref: ir::SigRef,3284callee: ir::Value,3285call_args: &[ir::Value],3286) -> WasmResult<CallRets> {3287Call::new(builder, self, srcloc).call_ref(sig_ref, callee, call_args)3288}32893290pub fn translate_return_call(3291&mut self,3292builder: &mut FunctionBuilder,3293srcloc: ir::SourceLoc,3294callee_index: FuncIndex,3295sig_ref: ir::SigRef,3296call_args: &[ir::Value],3297) -> WasmResult<()> {3298Call::new_tail(builder, self, srcloc).direct_call(callee_index, sig_ref, call_args)?;3299Ok(())3300}33013302pub fn translate_return_call_indirect(3303&mut self,3304builder: &mut FunctionBuilder,3305srcloc: ir::SourceLoc,3306features: &WasmFeatures,3307table_index: TableIndex,3308ty_index: TypeIndex,3309sig_ref: ir::SigRef,3310callee: ir::Value,3311call_args: &[ir::Value],3312) -> WasmResult<()> {3313Call::new_tail(builder, self, srcloc).indirect_call(3314features,3315table_index,3316ty_index,3317sig_ref,3318callee,3319call_args,3320)?;3321Ok(())3322}33233324pub fn translate_return_call_ref(3325&mut self,3326builder: &mut FunctionBuilder,3327srcloc: ir::SourceLoc,3328sig_ref: ir::SigRef,3329callee: ir::Value,3330call_args: &[ir::Value],3331) -> WasmResult<()> {3332Call::new_tail(builder, self, srcloc).call_ref(sig_ref, callee, call_args)?;3333Ok(())3334}33353336/// Returns two `ir::Value`s, the first of which is the vmctx for the memory3337/// `index` and the second of which is the `DefinedMemoryIndex` for `index`.3338///3339/// Handles internally whether `index` is an imported memory or not.3340fn memory_vmctx_and_defined_index(3341&mut self,3342pos: &mut FuncCursor,3343index: MemoryIndex,3344) -> (ir::Value, ir::Value) {3345let cur_vmctx = self.vmctx_val(pos);3346match self.module.defined_memory_index(index) {3347// This is a defined memory, so the vmctx is our own and the defined3348// index is `index` here.3349Some(index) => (cur_vmctx, pos.ins().iconst(I32, i64::from(index.as_u32()))),33503351// This is an imported memory, so load the vmctx/defined index from3352// the import definition itself.3353None => {3354let vmimport = self.offsets.vmctx_vmmemory_import(index);33553356let vmctx = pos.ins().load(3357self.isa.pointer_type(),3358ir::MemFlags::trusted(),3359cur_vmctx,3360i32::try_from(vmimport + u32::from(self.offsets.vmmemory_import_vmctx()))3361.unwrap(),3362);3363let index = pos.ins().load(3364ir::types::I32,3365ir::MemFlags::trusted(),3366cur_vmctx,3367i32::try_from(vmimport + u32::from(self.offsets.vmmemory_import_index()))3368.unwrap(),3369);3370(vmctx, index)3371}3372}3373}33743375/// Returns two `ir::Value`s, the first of which is the vmctx for the table3376/// `index` and the second of which is the `DefinedTableIndex` for `index`.3377///3378/// Handles internally whether `index` is an imported table or not.3379fn table_vmctx_and_defined_index(3380&mut self,3381pos: &mut FuncCursor,3382index: TableIndex,3383) -> (ir::Value, ir::Value) {3384// NB: the body of this method is similar to3385// `memory_vmctx_and_defined_index` above.3386let cur_vmctx = self.vmctx_val(pos);3387match self.module.defined_table_index(index) {3388Some(index) => (cur_vmctx, pos.ins().iconst(I32, i64::from(index.as_u32()))),3389None => {3390let vmimport = self.offsets.vmctx_vmtable_import(index);33913392let vmctx = pos.ins().load(3393self.isa.pointer_type(),3394ir::MemFlags::trusted(),3395cur_vmctx,3396i32::try_from(vmimport + u32::from(self.offsets.vmtable_import_vmctx()))3397.unwrap(),3398);3399let index = pos.ins().load(3400ir::types::I32,3401ir::MemFlags::trusted(),3402cur_vmctx,3403i32::try_from(vmimport + u32::from(self.offsets.vmtable_import_index()))3404.unwrap(),3405);3406(vmctx, index)3407}3408}3409}34103411pub fn translate_memory_grow(3412&mut self,3413builder: &mut FunctionBuilder<'_>,3414index: MemoryIndex,3415val: ir::Value,3416) -> WasmResult<ir::Value> {3417let mut pos = builder.cursor();3418let memory_grow = self.builtin_functions.memory_grow(&mut pos.func);34193420let (memory_vmctx, defined_memory_index) =3421self.memory_vmctx_and_defined_index(&mut pos, index);34223423let index_type = self.memory(index).idx_type;3424let val = self.cast_index_to_i64(&mut pos, val, index_type);3425let call_inst = pos3426.ins()3427.call(memory_grow, &[memory_vmctx, val, defined_memory_index]);3428let result = *pos.func.dfg.inst_results(call_inst).first().unwrap();3429let single_byte_pages = match self.memory(index).page_size_log2 {343016 => false,34310 => true,3432_ => unreachable!("only page sizes 2**0 and 2**16 are currently valid"),3433};3434Ok(self.convert_pointer_to_index_type(3435builder.cursor(),3436result,3437index_type,3438single_byte_pages,3439))3440}34413442pub fn translate_memory_size(3443&mut self,3444mut pos: FuncCursor<'_>,3445index: MemoryIndex,3446) -> WasmResult<ir::Value> {3447let pointer_type = self.pointer_type();3448let vmctx = self.vmctx(&mut pos.func);3449let is_shared = self.module.memories[index].shared;3450let base = pos.ins().global_value(pointer_type, vmctx);3451let current_length_in_bytes = match self.module.defined_memory_index(index) {3452Some(def_index) => {3453if is_shared {3454let offset =3455i32::try_from(self.offsets.vmctx_vmmemory_pointer(def_index)).unwrap();3456let vmmemory_ptr =3457pos.ins()3458.load(pointer_type, ir::MemFlags::trusted(), base, offset);3459let vmmemory_definition_offset =3460i64::from(self.offsets.ptr.vmmemory_definition_current_length());3461let vmmemory_definition_ptr =3462pos.ins().iadd_imm(vmmemory_ptr, vmmemory_definition_offset);3463// This atomic access of the3464// `VMMemoryDefinition::current_length` is direct; no bounds3465// check is needed. This is possible because shared memory3466// has a static size (the maximum is always known). Shared3467// memory is thus built with a static memory plan and no3468// bounds-checked version of this is implemented.3469pos.ins().atomic_load(3470pointer_type,3471ir::MemFlags::trusted(),3472vmmemory_definition_ptr,3473)3474} else {3475let owned_index = self.module.owned_memory_index(def_index);3476let offset = i32::try_from(3477self.offsets3478.vmctx_vmmemory_definition_current_length(owned_index),3479)3480.unwrap();3481pos.ins()3482.load(pointer_type, ir::MemFlags::trusted(), base, offset)3483}3484}3485None => {3486let offset = i32::try_from(self.offsets.vmctx_vmmemory_import_from(index)).unwrap();3487let vmmemory_ptr =3488pos.ins()3489.load(pointer_type, ir::MemFlags::trusted(), base, offset);3490if is_shared {3491let vmmemory_definition_offset =3492i64::from(self.offsets.ptr.vmmemory_definition_current_length());3493let vmmemory_definition_ptr =3494pos.ins().iadd_imm(vmmemory_ptr, vmmemory_definition_offset);3495pos.ins().atomic_load(3496pointer_type,3497ir::MemFlags::trusted(),3498vmmemory_definition_ptr,3499)3500} else {3501pos.ins().load(3502pointer_type,3503ir::MemFlags::trusted(),3504vmmemory_ptr,3505i32::from(self.offsets.ptr.vmmemory_definition_current_length()),3506)3507}3508}3509};35103511let page_size_log2 = i64::from(self.module.memories[index].page_size_log2);3512let current_length_in_pages = pos.ins().ushr_imm(current_length_in_bytes, page_size_log2);3513let single_byte_pages = match page_size_log2 {351416 => false,35150 => true,3516_ => unreachable!("only page sizes 2**0 and 2**16 are currently valid"),3517};3518Ok(self.convert_pointer_to_index_type(3519pos,3520current_length_in_pages,3521self.memory(index).idx_type,3522single_byte_pages,3523))3524}35253526pub fn translate_memory_copy(3527&mut self,3528builder: &mut FunctionBuilder<'_>,3529src_index: MemoryIndex,3530dst_index: MemoryIndex,3531dst: ir::Value,3532src: ir::Value,3533len: ir::Value,3534) -> WasmResult<()> {3535let mut pos = builder.cursor();3536let vmctx = self.vmctx_val(&mut pos);35373538let memory_copy = self.builtin_functions.memory_copy(&mut pos.func);3539let dst = self.cast_index_to_i64(&mut pos, dst, self.memory(dst_index).idx_type);3540let src = self.cast_index_to_i64(&mut pos, src, self.memory(src_index).idx_type);3541// The length is 32-bit if either memory is 32-bit, but if they're both3542// 64-bit then it's 64-bit. Our intrinsic takes a 64-bit length for3543// compatibility across all memories, so make sure that it's cast3544// correctly here (this is a bit special so no generic helper unlike for3545// `dst`/`src` above)3546let len = if index_type_to_ir_type(self.memory(dst_index).idx_type) == I643547&& index_type_to_ir_type(self.memory(src_index).idx_type) == I643548{3549len3550} else {3551pos.ins().uextend(I64, len)3552};3553let src_index = pos.ins().iconst(I32, i64::from(src_index.as_u32()));3554let dst_index = pos.ins().iconst(I32, i64::from(dst_index.as_u32()));3555pos.ins()3556.call(memory_copy, &[vmctx, dst_index, dst, src_index, src, len]);35573558Ok(())3559}35603561pub fn translate_memory_fill(3562&mut self,3563builder: &mut FunctionBuilder<'_>,3564memory_index: MemoryIndex,3565dst: ir::Value,3566val: ir::Value,3567len: ir::Value,3568) -> WasmResult<()> {3569let mut pos = builder.cursor();3570let memory_fill = self.builtin_functions.memory_fill(&mut pos.func);3571let dst = self.cast_index_to_i64(&mut pos, dst, self.memory(memory_index).idx_type);3572let len = self.cast_index_to_i64(&mut pos, len, self.memory(memory_index).idx_type);3573let (memory_vmctx, defined_memory_index) =3574self.memory_vmctx_and_defined_index(&mut pos, memory_index);35753576pos.ins().call(3577memory_fill,3578&[memory_vmctx, defined_memory_index, dst, val, len],3579);35803581Ok(())3582}35833584pub fn translate_memory_init(3585&mut self,3586builder: &mut FunctionBuilder<'_>,3587memory_index: MemoryIndex,3588seg_index: u32,3589dst: ir::Value,3590src: ir::Value,3591len: ir::Value,3592) -> WasmResult<()> {3593let mut pos = builder.cursor();3594let memory_init = self.builtin_functions.memory_init(&mut pos.func);35953596let memory_index_arg = pos.ins().iconst(I32, memory_index.index() as i64);3597let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);35983599let vmctx = self.vmctx_val(&mut pos);36003601let dst = self.cast_index_to_i64(&mut pos, dst, self.memory(memory_index).idx_type);36023603pos.ins().call(3604memory_init,3605&[vmctx, memory_index_arg, seg_index_arg, dst, src, len],3606);36073608Ok(())3609}36103611pub fn translate_data_drop(&mut self, mut pos: FuncCursor, seg_index: u32) -> WasmResult<()> {3612let data_drop = self.builtin_functions.data_drop(&mut pos.func);3613let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);3614let vmctx = self.vmctx_val(&mut pos);3615pos.ins().call(data_drop, &[vmctx, seg_index_arg]);3616Ok(())3617}36183619pub fn translate_table_size(3620&mut self,3621pos: FuncCursor,3622table_index: TableIndex,3623) -> WasmResult<ir::Value> {3624let table_data = self.get_or_create_table(pos.func, table_index);3625let index_type = index_type_to_ir_type(self.table(table_index).idx_type);3626Ok(table_data.bound.bound(&*self.isa, pos, index_type))3627}36283629pub fn translate_table_copy(3630&mut self,3631builder: &mut FunctionBuilder<'_>,3632dst_table_index: TableIndex,3633src_table_index: TableIndex,3634dst: ir::Value,3635src: ir::Value,3636len: ir::Value,3637) -> WasmResult<()> {3638let (table_copy, dst_table_index_arg, src_table_index_arg) =3639self.get_table_copy_func(&mut builder.func, dst_table_index, src_table_index);36403641let mut pos = builder.cursor();3642let dst = self.cast_index_to_i64(&mut pos, dst, self.table(dst_table_index).idx_type);3643let src = self.cast_index_to_i64(&mut pos, src, self.table(src_table_index).idx_type);3644let len = if index_type_to_ir_type(self.table(dst_table_index).idx_type) == I643645&& index_type_to_ir_type(self.table(src_table_index).idx_type) == I643646{3647len3648} else {3649pos.ins().uextend(I64, len)3650};3651let dst_table_index_arg = pos.ins().iconst(I32, dst_table_index_arg as i64);3652let src_table_index_arg = pos.ins().iconst(I32, src_table_index_arg as i64);3653let vmctx = self.vmctx_val(&mut pos);3654pos.ins().call(3655table_copy,3656&[3657vmctx,3658dst_table_index_arg,3659src_table_index_arg,3660dst,3661src,3662len,3663],3664);36653666Ok(())3667}36683669pub fn translate_table_init(3670&mut self,3671builder: &mut FunctionBuilder<'_>,3672seg_index: u32,3673table_index: TableIndex,3674dst: ir::Value,3675src: ir::Value,3676len: ir::Value,3677) -> WasmResult<()> {3678let mut pos = builder.cursor();3679let table_init = self.builtin_functions.table_init(&mut pos.func);3680let table_index_arg = pos.ins().iconst(I32, i64::from(table_index.as_u32()));3681let seg_index_arg = pos.ins().iconst(I32, i64::from(seg_index));3682let vmctx = self.vmctx_val(&mut pos);3683let index_type = self.table(table_index).idx_type;3684let dst = self.cast_index_to_i64(&mut pos, dst, index_type);3685let src = pos.ins().uextend(I64, src);3686let len = pos.ins().uextend(I64, len);36873688pos.ins().call(3689table_init,3690&[vmctx, table_index_arg, seg_index_arg, dst, src, len],3691);36923693Ok(())3694}36953696pub fn translate_elem_drop(&mut self, mut pos: FuncCursor, elem_index: u32) -> WasmResult<()> {3697let elem_drop = self.builtin_functions.elem_drop(&mut pos.func);3698let elem_index_arg = pos.ins().iconst(I32, elem_index as i64);3699let vmctx = self.vmctx_val(&mut pos);3700pos.ins().call(elem_drop, &[vmctx, elem_index_arg]);3701Ok(())3702}37033704pub fn translate_atomic_wait(3705&mut self,3706builder: &mut FunctionBuilder<'_>,3707memory_index: MemoryIndex,3708_heap: Heap,3709addr: ir::Value,3710expected: ir::Value,3711timeout: ir::Value,3712) -> WasmResult<ir::Value> {3713#[cfg(feature = "threads")]3714{3715let mut pos = builder.cursor();3716let addr = self.cast_index_to_i64(&mut pos, addr, self.memory(memory_index).idx_type);3717let implied_ty = pos.func.dfg.value_type(expected);3718let wait_func = self.get_memory_atomic_wait(&mut pos.func, implied_ty);37193720let (memory_vmctx, defined_memory_index) =3721self.memory_vmctx_and_defined_index(&mut pos, memory_index);37223723let call_inst = pos.ins().call(3724wait_func,3725&[memory_vmctx, defined_memory_index, addr, expected, timeout],3726);3727let ret = pos.func.dfg.inst_results(call_inst)[0];3728Ok(builder.ins().ireduce(ir::types::I32, ret))3729}3730#[cfg(not(feature = "threads"))]3731{3732let _ = (builder, memory_index, addr, expected, timeout);3733Err(wasmtime_environ::WasmError::Unsupported(3734"threads support disabled at compile time".to_string(),3735))3736}3737}37383739pub fn translate_atomic_notify(3740&mut self,3741builder: &mut FunctionBuilder<'_>,3742memory_index: MemoryIndex,3743_heap: Heap,3744addr: ir::Value,3745count: ir::Value,3746) -> WasmResult<ir::Value> {3747#[cfg(feature = "threads")]3748{3749let mut pos = builder.cursor();3750let addr = self.cast_index_to_i64(&mut pos, addr, self.memory(memory_index).idx_type);3751let atomic_notify = self.builtin_functions.memory_atomic_notify(&mut pos.func);37523753let (memory_vmctx, defined_memory_index) =3754self.memory_vmctx_and_defined_index(&mut pos, memory_index);3755let call_inst = pos.ins().call(3756atomic_notify,3757&[memory_vmctx, defined_memory_index, addr, count],3758);3759let ret = pos.func.dfg.inst_results(call_inst)[0];3760Ok(builder.ins().ireduce(ir::types::I32, ret))3761}3762#[cfg(not(feature = "threads"))]3763{3764let _ = (builder, memory_index, addr, count);3765Err(wasmtime_environ::WasmError::Unsupported(3766"threads support disabled at compile time".to_string(),3767))3768}3769}37703771pub fn translate_loop_header(&mut self, builder: &mut FunctionBuilder) -> WasmResult<()> {3772// Additionally if enabled check how much fuel we have remaining to see3773// if we've run out by this point.3774if self.tunables.consume_fuel {3775self.fuel_check(builder);3776}37773778// If we are performing epoch-based interruption, check to see3779// if the epoch counter has changed.3780if self.tunables.epoch_interruption {3781self.epoch_check(builder);3782}37833784Ok(())3785}37863787pub fn before_translate_operator(3788&mut self,3789op: &Operator,3790_operand_types: Option<&[WasmValType]>,3791builder: &mut FunctionBuilder,3792) -> WasmResult<()> {3793if self.tunables.consume_fuel {3794self.fuel_before_op(op, builder, self.is_reachable());3795}3796if self.is_reachable() && self.state_slot.is_some() {3797let builtin = self.builtin_functions.patchable_breakpoint(builder.func);3798let vmctx = self.vmctx_val(&mut builder.cursor());3799let inst = builder.ins().call(builtin, &[vmctx]);3800let tags = self.debug_tags(builder.srcloc());3801builder.func.debug_tags.set(inst, tags);3802}38033804Ok(())3805}38063807pub fn after_translate_operator(3808&mut self,3809op: &Operator,3810validator: &FuncValidator<impl WasmModuleResources>,3811builder: &mut FunctionBuilder,3812) -> WasmResult<()> {3813if self.tunables.consume_fuel && self.is_reachable() {3814self.fuel_after_op(op, builder);3815}3816if self.is_reachable() {3817self.update_state_slot_stack(validator, builder)?;3818}3819Ok(())3820}38213822pub fn before_unconditionally_trapping_memory_access(&mut self, builder: &mut FunctionBuilder) {3823if self.tunables.consume_fuel {3824self.fuel_increment_var(builder);3825self.fuel_save_from_var(builder);3826}3827}38283829pub fn before_translate_function(&mut self, builder: &mut FunctionBuilder) -> WasmResult<()> {3830// If an explicit stack limit is requested, emit one here at the start3831// of the function.3832if let Some(gv) = self.stack_limit_at_function_entry {3833let limit = builder.ins().global_value(self.pointer_type(), gv);3834let sp = builder.ins().get_stack_pointer(self.pointer_type());3835let overflow = builder.ins().icmp(IntCC::UnsignedLessThan, sp, limit);3836self.conditionally_trap(builder, overflow, ir::TrapCode::STACK_OVERFLOW);3837}38383839// Additionally we initialize `fuel_var` if it will get used.3840if self.tunables.consume_fuel {3841self.fuel_function_entry(builder);3842}38433844// Initialize `epoch_var` with the current epoch.3845if self.tunables.epoch_interruption {3846self.epoch_function_entry(builder);3847}38483849#[cfg(feature = "wmemcheck")]3850if self.compiler.wmemcheck {3851let func_name = self.current_func_name(builder);3852if func_name == Some("malloc") {3853self.check_malloc_start(builder);3854} else if func_name == Some("free") {3855self.check_free_start(builder);3856}3857}38583859self.update_state_slot_vmctx(builder);38603861Ok(())3862}38633864pub fn after_translate_function(&mut self, builder: &mut FunctionBuilder) -> WasmResult<()> {3865if self.tunables.consume_fuel && self.is_reachable() {3866self.fuel_function_exit(builder);3867}3868self.finish_debug_metadata(builder);3869Ok(())3870}38713872pub fn relaxed_simd_deterministic(&self) -> bool {3873self.tunables.relaxed_simd_deterministic3874}38753876pub fn has_native_fma(&self) -> bool {3877self.isa.has_native_fma()3878}38793880pub fn is_x86(&self) -> bool {3881self.isa.triple().architecture == target_lexicon::Architecture::X86_643882}38833884pub fn translate_cont_bind(3885&mut self,3886builder: &mut FunctionBuilder<'_>,3887contobj: ir::Value,3888args: &[ir::Value],3889) -> ir::Value {3890stack_switching::instructions::translate_cont_bind(self, builder, contobj, args)3891}38923893pub fn translate_cont_new(3894&mut self,3895builder: &mut FunctionBuilder<'_>,3896func: ir::Value,3897arg_types: &[WasmValType],3898return_types: &[WasmValType],3899) -> WasmResult<ir::Value> {3900stack_switching::instructions::translate_cont_new(3901self,3902builder,3903func,3904arg_types,3905return_types,3906)3907}39083909pub fn translate_resume(3910&mut self,3911builder: &mut FunctionBuilder<'_>,3912type_index: u32,3913contobj: ir::Value,3914resume_args: &[ir::Value],3915resumetable: &[(u32, Option<ir::Block>)],3916) -> WasmResult<Vec<ir::Value>> {3917stack_switching::instructions::translate_resume(3918self,3919builder,3920type_index,3921contobj,3922resume_args,3923resumetable,3924)3925}39263927pub fn translate_suspend(3928&mut self,3929builder: &mut FunctionBuilder<'_>,3930tag_index: u32,3931suspend_args: &[ir::Value],3932tag_return_types: &[ir::Type],3933) -> Vec<ir::Value> {3934stack_switching::instructions::translate_suspend(3935self,3936builder,3937tag_index,3938suspend_args,3939tag_return_types,3940)3941}39423943/// Translates switch instructions.3944pub fn translate_switch(3945&mut self,3946builder: &mut FunctionBuilder,3947tag_index: u32,3948contobj: ir::Value,3949switch_args: &[ir::Value],3950return_types: &[ir::Type],3951) -> WasmResult<Vec<ir::Value>> {3952stack_switching::instructions::translate_switch(3953self,3954builder,3955tag_index,3956contobj,3957switch_args,3958return_types,3959)3960}39613962pub fn continuation_arguments(&self, index: TypeIndex) -> &[WasmValType] {3963let idx = self.module.types[index].unwrap_module_type_index();3964self.types[self.types[idx].unwrap_cont().unwrap_module_type_index()]3965.unwrap_func()3966.params()3967}39683969pub fn continuation_returns(&self, index: TypeIndex) -> &[WasmValType] {3970let idx = self.module.types[index].unwrap_module_type_index();3971self.types[self.types[idx].unwrap_cont().unwrap_module_type_index()]3972.unwrap_func()3973.returns()3974}39753976pub fn tag_params(&self, tag_index: TagIndex) -> &[WasmValType] {3977let idx = self.module.tags[tag_index].signature;3978self.types[idx.unwrap_module_type_index()]3979.unwrap_func()3980.params()3981}39823983pub fn tag_returns(&self, tag_index: TagIndex) -> &[WasmValType] {3984let idx = self.module.tags[tag_index].signature;3985self.types[idx.unwrap_module_type_index()]3986.unwrap_func()3987.returns()3988}39893990pub fn use_blendv_for_relaxed_laneselect(&self, ty: Type) -> bool {3991self.isa.has_blendv_lowering(ty)3992}39933994pub fn use_x86_pmulhrsw_for_relaxed_q15mul(&self) -> bool {3995self.isa.has_x86_pmulhrsw_lowering()3996}39973998pub fn use_x86_pmaddubsw_for_dot(&self) -> bool {3999self.isa.has_x86_pmaddubsw_lowering()4000}40014002pub fn handle_before_return(&mut self, retvals: &[ir::Value], builder: &mut FunctionBuilder) {4003#[cfg(feature = "wmemcheck")]4004if self.compiler.wmemcheck {4005let func_name = self.current_func_name(builder);4006if func_name == Some("malloc") {4007self.hook_malloc_exit(builder, retvals);4008} else if func_name == Some("free") {4009self.hook_free_exit(builder);4010}4011}4012#[cfg(not(feature = "wmemcheck"))]4013let _ = (retvals, builder);4014}40154016pub fn before_load(4017&mut self,4018builder: &mut FunctionBuilder,4019val_size: u8,4020addr: ir::Value,4021offset: u64,4022) {4023#[cfg(feature = "wmemcheck")]4024if self.compiler.wmemcheck {4025let check_load = self.builtin_functions.check_load(builder.func);4026let vmctx = self.vmctx_val(&mut builder.cursor());4027let num_bytes = builder.ins().iconst(I32, val_size as i64);4028let offset_val = builder.ins().iconst(I64, offset as i64);4029builder4030.ins()4031.call(check_load, &[vmctx, num_bytes, addr, offset_val]);4032}4033#[cfg(not(feature = "wmemcheck"))]4034let _ = (builder, val_size, addr, offset);4035}40364037pub fn before_store(4038&mut self,4039builder: &mut FunctionBuilder,4040val_size: u8,4041addr: ir::Value,4042offset: u64,4043) {4044#[cfg(feature = "wmemcheck")]4045if self.compiler.wmemcheck {4046let check_store = self.builtin_functions.check_store(builder.func);4047let vmctx = self.vmctx_val(&mut builder.cursor());4048let num_bytes = builder.ins().iconst(I32, val_size as i64);4049let offset_val = builder.ins().iconst(I64, offset as i64);4050builder4051.ins()4052.call(check_store, &[vmctx, num_bytes, addr, offset_val]);4053}4054#[cfg(not(feature = "wmemcheck"))]4055let _ = (builder, val_size, addr, offset);4056}40574058pub fn update_global(4059&mut self,4060builder: &mut FunctionBuilder,4061global_index: GlobalIndex,4062value: ir::Value,4063) {4064#[cfg(feature = "wmemcheck")]4065if self.compiler.wmemcheck {4066if global_index.index() == 0 {4067// We are making the assumption that global 0 is the auxiliary stack pointer.4068let update_stack_pointer =4069self.builtin_functions.update_stack_pointer(builder.func);4070let vmctx = self.vmctx_val(&mut builder.cursor());4071builder.ins().call(update_stack_pointer, &[vmctx, value]);4072}4073}4074#[cfg(not(feature = "wmemcheck"))]4075let _ = (builder, global_index, value);4076}40774078pub fn before_memory_grow(4079&mut self,4080builder: &mut FunctionBuilder,4081num_pages: ir::Value,4082mem_index: MemoryIndex,4083) {4084#[cfg(feature = "wmemcheck")]4085if self.compiler.wmemcheck && mem_index.as_u32() == 0 {4086let update_mem_size = self.builtin_functions.update_mem_size(builder.func);4087let vmctx = self.vmctx_val(&mut builder.cursor());4088builder.ins().call(update_mem_size, &[vmctx, num_pages]);4089}4090#[cfg(not(feature = "wmemcheck"))]4091let _ = (builder, num_pages, mem_index);4092}40934094/// If the ISA has rounding instructions, let Cranelift use them. But if4095/// not, lower to a libcall here, rather than having Cranelift do it. We4096/// can pass our libcall the vmctx pointer, which we use for stack4097/// overflow checking.4098///4099/// This helper is generic for all rounding instructions below, both for4100/// scalar and simd types. The `clif_round` argument is the CLIF-level4101/// rounding instruction to use if the ISA has the instruction, and the4102/// `round_builtin` helper is used to determine which element-level4103/// rounding operation builtin is used. Note that this handles the case4104/// when `value` is a vector by doing an element-wise libcall invocation.4105fn isa_round(4106&mut self,4107builder: &mut FunctionBuilder,4108value: ir::Value,4109clif_round: fn(FuncInstBuilder<'_, '_>, ir::Value) -> ir::Value,4110round_builtin: fn(&mut BuiltinFunctions, &mut Function) -> ir::FuncRef,4111) -> ir::Value {4112if self.isa.has_round() {4113return clif_round(builder.ins(), value);4114}41154116let vmctx = self.vmctx_val(&mut builder.cursor());4117let round = round_builtin(&mut self.builtin_functions, builder.func);4118let round_one = |builder: &mut FunctionBuilder, value: ir::Value| {4119let call = builder.ins().call(round, &[vmctx, value]);4120*builder.func.dfg.inst_results(call).first().unwrap()4121};41224123let ty = builder.func.dfg.value_type(value);4124if !ty.is_vector() {4125return round_one(builder, value);4126}41274128assert_eq!(ty.bits(), 128);4129let zero = builder.func.dfg.constants.insert(V128Imm([0; 16]).into());4130let mut result = builder.ins().vconst(ty, zero);4131for i in 0..u8::try_from(ty.lane_count()).unwrap() {4132let element = builder.ins().extractlane(value, i);4133let element_rounded = round_one(builder, element);4134result = builder.ins().insertlane(result, element_rounded, i);4135}4136result4137}41384139pub fn ceil_f32(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {4140self.isa_round(4141builder,4142value,4143|ins, val| ins.ceil(val),4144BuiltinFunctions::ceil_f32,4145)4146}41474148pub fn ceil_f64(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {4149self.isa_round(4150builder,4151value,4152|ins, val| ins.ceil(val),4153BuiltinFunctions::ceil_f64,4154)4155}41564157pub fn ceil_f32x4(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {4158self.isa_round(4159builder,4160value,4161|ins, val| ins.ceil(val),4162BuiltinFunctions::ceil_f32,4163)4164}41654166pub fn ceil_f64x2(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {4167self.isa_round(4168builder,4169value,4170|ins, val| ins.ceil(val),4171BuiltinFunctions::ceil_f64,4172)4173}41744175pub fn floor_f32(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {4176self.isa_round(4177builder,4178value,4179|ins, val| ins.floor(val),4180BuiltinFunctions::floor_f32,4181)4182}41834184pub fn floor_f64(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {4185self.isa_round(4186builder,4187value,4188|ins, val| ins.floor(val),4189BuiltinFunctions::floor_f64,4190)4191}41924193pub fn floor_f32x4(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {4194self.isa_round(4195builder,4196value,4197|ins, val| ins.floor(val),4198BuiltinFunctions::floor_f32,4199)4200}42014202pub fn floor_f64x2(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {4203self.isa_round(4204builder,4205value,4206|ins, val| ins.floor(val),4207BuiltinFunctions::floor_f64,4208)4209}42104211pub fn trunc_f32(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {4212self.isa_round(4213builder,4214value,4215|ins, val| ins.trunc(val),4216BuiltinFunctions::trunc_f32,4217)4218}42194220pub fn trunc_f64(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {4221self.isa_round(4222builder,4223value,4224|ins, val| ins.trunc(val),4225BuiltinFunctions::trunc_f64,4226)4227}42284229pub fn trunc_f32x4(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {4230self.isa_round(4231builder,4232value,4233|ins, val| ins.trunc(val),4234BuiltinFunctions::trunc_f32,4235)4236}42374238pub fn trunc_f64x2(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {4239self.isa_round(4240builder,4241value,4242|ins, val| ins.trunc(val),4243BuiltinFunctions::trunc_f64,4244)4245}42464247pub fn nearest_f32(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {4248self.isa_round(4249builder,4250value,4251|ins, val| ins.nearest(val),4252BuiltinFunctions::nearest_f32,4253)4254}42554256pub fn nearest_f64(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {4257self.isa_round(4258builder,4259value,4260|ins, val| ins.nearest(val),4261BuiltinFunctions::nearest_f64,4262)4263}42644265pub fn nearest_f32x4(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {4266self.isa_round(4267builder,4268value,4269|ins, val| ins.nearest(val),4270BuiltinFunctions::nearest_f32,4271)4272}42734274pub fn nearest_f64x2(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {4275self.isa_round(4276builder,4277value,4278|ins, val| ins.nearest(val),4279BuiltinFunctions::nearest_f64,4280)4281}42824283pub fn swizzle(4284&mut self,4285builder: &mut FunctionBuilder,4286a: ir::Value,4287b: ir::Value,4288) -> ir::Value {4289// On x86, swizzle would typically be compiled to `pshufb`, except4290// that that's not available on CPUs that lack SSSE3. In that case,4291// fall back to a builtin function.4292if !self.is_x86() || self.isa.has_x86_pshufb_lowering() {4293builder.ins().swizzle(a, b)4294} else {4295let swizzle = self.builtin_functions.i8x16_swizzle(builder.func);4296let vmctx = self.vmctx_val(&mut builder.cursor());4297let call = builder.ins().call(swizzle, &[vmctx, a, b]);4298*builder.func.dfg.inst_results(call).first().unwrap()4299}4300}43014302pub fn relaxed_swizzle(4303&mut self,4304builder: &mut FunctionBuilder,4305a: ir::Value,4306b: ir::Value,4307) -> ir::Value {4308// As above, fall back to a builtin if we lack SSSE3.4309if !self.is_x86() || self.isa.has_x86_pshufb_lowering() {4310if !self.is_x86() || self.relaxed_simd_deterministic() {4311builder.ins().swizzle(a, b)4312} else {4313builder.ins().x86_pshufb(a, b)4314}4315} else {4316let swizzle = self.builtin_functions.i8x16_swizzle(builder.func);4317let vmctx = self.vmctx_val(&mut builder.cursor());4318let call = builder.ins().call(swizzle, &[vmctx, a, b]);4319*builder.func.dfg.inst_results(call).first().unwrap()4320}4321}43224323pub fn i8x16_shuffle(4324&mut self,4325builder: &mut FunctionBuilder,4326a: ir::Value,4327b: ir::Value,4328lanes: &[u8; 16],4329) -> ir::Value {4330// As with swizzle, i8x16.shuffle would also commonly be implemented4331// with pshufb, so if we lack SSSE3, fall back to a builtin.4332if !self.is_x86() || self.isa.has_x86_pshufb_lowering() {4333let lanes = ConstantData::from(&lanes[..]);4334let mask = builder.func.dfg.immediates.push(lanes);4335builder.ins().shuffle(a, b, mask)4336} else {4337let lanes = builder4338.func4339.dfg4340.constants4341.insert(ConstantData::from(&lanes[..]));4342let lanes = builder.ins().vconst(I8X16, lanes);4343let i8x16_shuffle = self.builtin_functions.i8x16_shuffle(builder.func);4344let vmctx = self.vmctx_val(&mut builder.cursor());4345let call = builder.ins().call(i8x16_shuffle, &[vmctx, a, b, lanes]);4346*builder.func.dfg.inst_results(call).first().unwrap()4347}4348}43494350pub fn fma_f32x4(4351&mut self,4352builder: &mut FunctionBuilder,4353a: ir::Value,4354b: ir::Value,4355c: ir::Value,4356) -> ir::Value {4357if self.has_native_fma() {4358builder.ins().fma(a, b, c)4359} else if self.relaxed_simd_deterministic() {4360// Deterministic semantics are "fused multiply and add".4361let fma = self.builtin_functions.fma_f32x4(builder.func);4362let vmctx = self.vmctx_val(&mut builder.cursor());4363let call = builder.ins().call(fma, &[vmctx, a, b, c]);4364*builder.func.dfg.inst_results(call).first().unwrap()4365} else {4366let mul = builder.ins().fmul(a, b);4367builder.ins().fadd(mul, c)4368}4369}43704371pub fn fma_f64x2(4372&mut self,4373builder: &mut FunctionBuilder,4374a: ir::Value,4375b: ir::Value,4376c: ir::Value,4377) -> ir::Value {4378if self.has_native_fma() {4379builder.ins().fma(a, b, c)4380} else if self.relaxed_simd_deterministic() {4381// Deterministic semantics are "fused multiply and add".4382let fma = self.builtin_functions.fma_f64x2(builder.func);4383let vmctx = self.vmctx_val(&mut builder.cursor());4384let call = builder.ins().call(fma, &[vmctx, a, b, c]);4385*builder.func.dfg.inst_results(call).first().unwrap()4386} else {4387let mul = builder.ins().fmul(a, b);4388builder.ins().fadd(mul, c)4389}4390}43914392pub fn isa(&self) -> &dyn TargetIsa {4393&*self.isa4394}43954396pub fn trap(&mut self, builder: &mut FunctionBuilder, trap: ir::TrapCode) {4397match (4398self.clif_instruction_traps_enabled(),4399crate::clif_trap_to_env_trap(trap),4400) {4401// If libcall traps are disabled or there's no wasmtime-defined trap4402// code for this, then emit a native trap instruction.4403(true, _) | (_, None) => {4404builder.ins().trap(trap);4405}4406// ... otherwise with libcall traps explicitly enabled and a4407// wasmtime-based trap code invoke the libcall to raise a trap and4408// pass in our trap code. Leave a debug `unreachable` in place4409// afterwards as a defense-in-depth measure.4410(false, Some(trap)) => {4411let libcall = self.builtin_functions.trap(&mut builder.func);4412let vmctx = self.vmctx_val(&mut builder.cursor());4413let trap_code = builder.ins().iconst(I8, i64::from(trap as u8));4414builder.ins().call(libcall, &[vmctx, trap_code]);4415let raise = self.builtin_functions.raise(&mut builder.func);4416builder.ins().call(raise, &[vmctx]);4417builder.ins().trap(TRAP_INTERNAL_ASSERT);4418}4419}4420}44214422pub fn trapz(&mut self, builder: &mut FunctionBuilder, value: ir::Value, trap: ir::TrapCode) {4423if self.clif_instruction_traps_enabled() {4424builder.ins().trapz(value, trap);4425} else {4426let ty = builder.func.dfg.value_type(value);4427let zero = builder.ins().iconst(ty, 0);4428let cmp = builder.ins().icmp(IntCC::Equal, value, zero);4429self.conditionally_trap(builder, cmp, trap);4430}4431}44324433pub fn trapnz(&mut self, builder: &mut FunctionBuilder, value: ir::Value, trap: ir::TrapCode) {4434if self.clif_instruction_traps_enabled() {4435builder.ins().trapnz(value, trap);4436} else {4437let ty = builder.func.dfg.value_type(value);4438let zero = builder.ins().iconst(ty, 0);4439let cmp = builder.ins().icmp(IntCC::NotEqual, value, zero);4440self.conditionally_trap(builder, cmp, trap);4441}4442}44434444pub fn uadd_overflow_trap(4445&mut self,4446builder: &mut FunctionBuilder,4447lhs: ir::Value,4448rhs: ir::Value,4449trap: ir::TrapCode,4450) -> ir::Value {4451if self.clif_instruction_traps_enabled() {4452builder.ins().uadd_overflow_trap(lhs, rhs, trap)4453} else {4454let (ret, overflow) = builder.ins().uadd_overflow(lhs, rhs);4455self.conditionally_trap(builder, overflow, trap);4456ret4457}4458}44594460pub fn translate_sdiv(4461&mut self,4462builder: &mut FunctionBuilder,4463lhs: ir::Value,4464rhs: ir::Value,4465) -> ir::Value {4466self.guard_signed_divide(builder, lhs, rhs);4467builder.ins().sdiv(lhs, rhs)4468}44694470pub fn translate_udiv(4471&mut self,4472builder: &mut FunctionBuilder,4473lhs: ir::Value,4474rhs: ir::Value,4475) -> ir::Value {4476self.guard_zero_divisor(builder, rhs);4477builder.ins().udiv(lhs, rhs)4478}44794480pub fn translate_srem(4481&mut self,4482builder: &mut FunctionBuilder,4483lhs: ir::Value,4484rhs: ir::Value,4485) -> ir::Value {4486self.guard_zero_divisor(builder, rhs);4487builder.ins().srem(lhs, rhs)4488}44894490pub fn translate_urem(4491&mut self,4492builder: &mut FunctionBuilder,4493lhs: ir::Value,4494rhs: ir::Value,4495) -> ir::Value {4496self.guard_zero_divisor(builder, rhs);4497builder.ins().urem(lhs, rhs)4498}44994500pub fn translate_fcvt_to_sint(4501&mut self,4502builder: &mut FunctionBuilder,4503ty: ir::Type,4504val: ir::Value,4505) -> ir::Value {4506// NB: for now avoid translating this entire instruction to CLIF and4507// just do it in a libcall.4508if !self.clif_instruction_traps_enabled() {4509self.guard_fcvt_to_int(builder, ty, val, true);4510}4511builder.ins().fcvt_to_sint(ty, val)4512}45134514pub fn translate_fcvt_to_uint(4515&mut self,4516builder: &mut FunctionBuilder,4517ty: ir::Type,4518val: ir::Value,4519) -> ir::Value {4520if !self.clif_instruction_traps_enabled() {4521self.guard_fcvt_to_int(builder, ty, val, false);4522}4523builder.ins().fcvt_to_uint(ty, val)4524}45254526/// Returns whether it's acceptable to rely on traps in CLIF memory-related4527/// instructions (e.g. loads and stores).4528///4529/// This is enabled if `signals_based_traps` is `true` since signal handlers4530/// are available, but this is additionally forcibly disabled if Pulley is4531/// being targeted since the Pulley runtime doesn't catch segfaults for4532/// itself.4533pub fn clif_memory_traps_enabled(&self) -> bool {4534self.tunables.signals_based_traps && !self.is_pulley()4535}45364537/// Returns whether it's acceptable to have CLIF instructions natively trap,4538/// such as division-by-zero.4539///4540/// This is enabled if `signals_based_traps` is `true` or on4541/// Pulley unconditionally since Pulley doesn't use hardware-based4542/// traps in its runtime. However, if guest debugging is enabled,4543/// then we cannot rely on Pulley traps and still need a libcall4544/// to gain proper ownership of the store in the runtime's4545/// debugger hooks.4546pub fn clif_instruction_traps_enabled(&self) -> bool {4547self.tunables.signals_based_traps || (self.is_pulley() && !self.tunables.debug_guest)4548}45494550/// Returns whether loads from the null address are allowed as signals of4551/// whether to trap or not.4552pub fn load_from_zero_allowed(&self) -> bool {4553// Pulley allows loads-from-zero and otherwise this is only allowed with4554// traps + spectre mitigations.4555self.is_pulley()4556|| (self.clif_memory_traps_enabled() && self.heap_access_spectre_mitigation())4557}45584559/// Returns whether translation is happening for Pulley bytecode.4560pub fn is_pulley(&self) -> bool {4561self.isa.triple().is_pulley()4562}45634564/// Returns whether the current location is reachable.4565pub fn is_reachable(&self) -> bool {4566self.stacks.reachable()4567}4568}45694570// Helper function to convert an `IndexType` to an `ir::Type`.4571//4572// Implementing From/Into trait for `IndexType` or `ir::Type` would4573// introduce an extra dependency between `wasmtime_types` and `cranelift_codegen`.4574fn index_type_to_ir_type(index_type: IndexType) -> ir::Type {4575match index_type {4576IndexType::I32 => I32,4577IndexType::I64 => I64,4578}4579}458045814582