Path: blob/main/crates/cranelift/src/func_environ.rs
1692 views
mod gc;1pub(crate) mod stack_switching;23use crate::compiler::Compiler;4use crate::translate::{5FuncTranslationStacks, GlobalVariable, Heap, HeapData, StructFieldsVec, TableData, TableSize,6TargetEnvironment,7};8use crate::{BuiltinFunctionSignatures, TRAP_INTERNAL_ASSERT};9use cranelift_codegen::cursor::FuncCursor;10use cranelift_codegen::ir::condcodes::{FloatCC, IntCC};11use cranelift_codegen::ir::immediates::{Imm64, Offset32, V128Imm};12use cranelift_codegen::ir::pcc::Fact;13use cranelift_codegen::ir::{self, BlockArg, ExceptionTableData, ExceptionTableItem, types};14use cranelift_codegen::ir::{ArgumentPurpose, ConstantData, Function, InstBuilder, MemFlags};15use cranelift_codegen::ir::{Block, ExceptionTag, types::*};16use cranelift_codegen::isa::{TargetFrontendConfig, TargetIsa};17use cranelift_entity::packed_option::{PackedOption, ReservedValue};18use cranelift_entity::{EntityRef, PrimaryMap, SecondaryMap};19use cranelift_frontend::Variable;20use cranelift_frontend::{FuncInstBuilder, FunctionBuilder};21use smallvec::{SmallVec, smallvec};22use std::mem;23use wasmparser::{Operator, WasmFeatures};24use wasmtime_environ::{25BuiltinFunctionIndex, DataIndex, DefinedFuncIndex, ElemIndex, EngineOrModuleTypeIndex,26FuncIndex, FuncKey, GlobalIndex, IndexType, Memory, MemoryIndex, Module,27ModuleInternedTypeIndex, ModuleTranslation, ModuleTypesBuilder, PtrSize, Table, TableIndex,28TagIndex, TripleExt, Tunables, TypeConvert, TypeIndex, VMOffsets, WasmCompositeInnerType,29WasmFuncType, WasmHeapTopType, WasmHeapType, WasmRefType, WasmResult, WasmValType,30};31use wasmtime_environ::{FUNCREF_INIT_BIT, FUNCREF_MASK};32use wasmtime_math::f64_cvt_to_int_bounds;3334#[derive(Debug)]35pub(crate) enum Extension {36Sign,37Zero,38}3940/// A struct with an `Option<ir::FuncRef>` member for every builtin41/// function, to de-duplicate constructing/getting its function.42pub(crate) struct BuiltinFunctions {43types: BuiltinFunctionSignatures,4445builtins: [Option<ir::FuncRef>; BuiltinFunctionIndex::len() as usize],46}4748impl BuiltinFunctions {49fn new(compiler: &Compiler) -> Self {50Self {51types: BuiltinFunctionSignatures::new(compiler),52builtins: [None; BuiltinFunctionIndex::len() as usize],53}54}5556fn load_builtin(&mut self, func: &mut Function, builtin: BuiltinFunctionIndex) -> ir::FuncRef {57let cache = &mut self.builtins[builtin.index() as usize];58if let Some(f) = cache {59return *f;60}61let signature = func.import_signature(self.types.wasm_signature(builtin));62let key = FuncKey::WasmToBuiltinTrampoline(builtin);63let (namespace, index) = key.into_raw_parts();64let name = ir::ExternalName::User(65func.declare_imported_user_function(ir::UserExternalName { namespace, index }),66);67let f = func.import_function(ir::ExtFuncData {68name,69signature,70colocated: true,71});72*cache = Some(f);73f74}75}7677// Generate helper methods on `BuiltinFunctions` above for each named builtin78// as well.79macro_rules! declare_function_signatures {80($(81$( #[$attr:meta] )*82$name:ident( $( $pname:ident: $param:ident ),* ) $( -> $result:ident )?;83)*) => {84$(impl BuiltinFunctions {85$( #[$attr] )*86pub(crate) fn $name(&mut self, func: &mut Function) -> ir::FuncRef {87self.load_builtin(func, BuiltinFunctionIndex::$name())88}89})*90};91}92wasmtime_environ::foreach_builtin_function!(declare_function_signatures);9394/// The `FuncEnvironment` implementation for use by the `ModuleEnvironment`.95pub struct FuncEnvironment<'module_environment> {96compiler: &'module_environment Compiler,97isa: &'module_environment (dyn TargetIsa + 'module_environment),98pub(crate) module: &'module_environment Module,99types: &'module_environment ModuleTypesBuilder,100wasm_func_ty: &'module_environment WasmFuncType,101sig_ref_to_ty: SecondaryMap<ir::SigRef, Option<&'module_environment WasmFuncType>>,102needs_gc_heap: bool,103entities: WasmEntities,104105#[cfg(feature = "gc")]106ty_to_gc_layout: std::collections::HashMap<107wasmtime_environ::ModuleInternedTypeIndex,108wasmtime_environ::GcLayout,109>,110111#[cfg(feature = "gc")]112gc_heap: Option<Heap>,113114/// The Cranelift global holding the GC heap's base address.115#[cfg(feature = "gc")]116gc_heap_base: Option<ir::GlobalValue>,117118/// The Cranelift global holding the GC heap's base address.119#[cfg(feature = "gc")]120gc_heap_bound: Option<ir::GlobalValue>,121122translation: &'module_environment ModuleTranslation<'module_environment>,123124/// Heaps implementing WebAssembly linear memories.125heaps: PrimaryMap<Heap, HeapData>,126127/// The Cranelift global holding the vmctx address.128vmctx: Option<ir::GlobalValue>,129130/// The Cranelift global for our vmctx's `*mut VMStoreContext`.131vm_store_context: Option<ir::GlobalValue>,132133/// The PCC memory type describing the vmctx layout, if we're134/// using PCC.135pcc_vmctx_memtype: Option<ir::MemoryType>,136137/// Caches of signatures for builtin functions.138builtin_functions: BuiltinFunctions,139140/// Offsets to struct fields accessed by JIT code.141pub(crate) offsets: VMOffsets<u8>,142143tunables: &'module_environment Tunables,144145/// A function-local variable which stores the cached value of the amount of146/// fuel remaining to execute. If used this is modified frequently so it's147/// stored locally as a variable instead of always referenced from the field148/// in `*const VMStoreContext`149fuel_var: cranelift_frontend::Variable,150151/// A cached epoch deadline value, when performing epoch-based152/// interruption. Loaded from `VMStoreContext` and reloaded after153/// any yield.154epoch_deadline_var: cranelift_frontend::Variable,155156/// A cached pointer to the per-Engine epoch counter, when157/// performing epoch-based interruption. Initialized in the158/// function prologue. We prefer to use a variable here rather159/// than reload on each check because it's better to let the160/// regalloc keep it in a register if able; if not, it can always161/// spill, and this isn't any worse than reloading each time.162epoch_ptr_var: cranelift_frontend::Variable,163164fuel_consumed: i64,165166/// A `GlobalValue` in CLIF which represents the stack limit.167///168/// Typically this resides in the `stack_limit` value of `ir::Function` but169/// that requires signal handlers on the host and when that's disabled this170/// is here with an explicit check instead. Note that the explicit check is171/// always present even if this is a "leaf" function, as we have to call172/// into the host to trap when signal handlers are disabled.173pub(crate) stack_limit_at_function_entry: Option<ir::GlobalValue>,174175/// Used by the stack switching feature. If set, we have a allocated a176/// slot on this function's stack to be used for the177/// current stack's `handler_list` field.178stack_switching_handler_list_buffer: Option<ir::StackSlot>,179180/// Used by the stack switching feature. If set, we have a allocated a181/// slot on this function's stack to be used for the182/// current continuation's `values` field.183stack_switching_values_buffer: Option<ir::StackSlot>,184}185186impl<'module_environment> FuncEnvironment<'module_environment> {187pub fn new(188compiler: &'module_environment Compiler,189translation: &'module_environment ModuleTranslation<'module_environment>,190types: &'module_environment ModuleTypesBuilder,191wasm_func_ty: &'module_environment WasmFuncType,192) -> Self {193let tunables = compiler.tunables();194let builtin_functions = BuiltinFunctions::new(compiler);195196// This isn't used during translation, so squash the warning about this197// being unused from the compiler.198let _ = BuiltinFunctions::raise;199200Self {201isa: compiler.isa(),202module: &translation.module,203compiler,204types,205wasm_func_ty,206sig_ref_to_ty: SecondaryMap::default(),207needs_gc_heap: false,208entities: WasmEntities::default(),209210#[cfg(feature = "gc")]211ty_to_gc_layout: std::collections::HashMap::new(),212#[cfg(feature = "gc")]213gc_heap: None,214#[cfg(feature = "gc")]215gc_heap_base: None,216#[cfg(feature = "gc")]217gc_heap_bound: None,218219heaps: PrimaryMap::default(),220vmctx: None,221vm_store_context: None,222pcc_vmctx_memtype: None,223builtin_functions,224offsets: VMOffsets::new(compiler.isa().pointer_bytes(), &translation.module),225tunables,226fuel_var: Variable::reserved_value(),227epoch_deadline_var: Variable::reserved_value(),228epoch_ptr_var: Variable::reserved_value(),229230// Start with at least one fuel being consumed because even empty231// functions should consume at least some fuel.232fuel_consumed: 1,233234translation,235236stack_limit_at_function_entry: None,237238stack_switching_handler_list_buffer: None,239stack_switching_values_buffer: None,240}241}242243pub(crate) fn pointer_type(&self) -> ir::Type {244self.isa.pointer_type()245}246247pub(crate) fn vmctx(&mut self, func: &mut Function) -> ir::GlobalValue {248self.vmctx.unwrap_or_else(|| {249let vmctx = func.create_global_value(ir::GlobalValueData::VMContext);250if self.isa.flags().enable_pcc() {251// Create a placeholder memtype for the vmctx; we'll252// add fields to it as we lazily create HeapData253// structs and global values.254let vmctx_memtype = func.create_memory_type(ir::MemoryTypeData::Struct {255size: 0,256fields: vec![],257});258259self.pcc_vmctx_memtype = Some(vmctx_memtype);260func.global_value_facts[vmctx] = Some(Fact::Mem {261ty: vmctx_memtype,262min_offset: 0,263max_offset: 0,264nullable: false,265});266}267268self.vmctx = Some(vmctx);269vmctx270})271}272273pub(crate) fn vmctx_val(&mut self, pos: &mut FuncCursor<'_>) -> ir::Value {274let pointer_type = self.pointer_type();275let vmctx = self.vmctx(&mut pos.func);276pos.ins().global_value(pointer_type, vmctx)277}278279fn get_table_copy_func(280&mut self,281func: &mut Function,282dst_table_index: TableIndex,283src_table_index: TableIndex,284) -> (ir::FuncRef, usize, usize) {285let sig = self.builtin_functions.table_copy(func);286(287sig,288dst_table_index.as_u32() as usize,289src_table_index.as_u32() as usize,290)291}292293#[cfg(feature = "threads")]294fn get_memory_atomic_wait(&mut self, func: &mut Function, ty: ir::Type) -> ir::FuncRef {295match ty {296I32 => self.builtin_functions.memory_atomic_wait32(func),297I64 => self.builtin_functions.memory_atomic_wait64(func),298x => panic!("get_memory_atomic_wait unsupported type: {x:?}"),299}300}301302fn get_global_location(303&mut self,304func: &mut ir::Function,305index: GlobalIndex,306) -> (ir::GlobalValue, i32) {307let pointer_type = self.pointer_type();308let vmctx = self.vmctx(func);309if let Some(def_index) = self.module.defined_global_index(index) {310let offset = i32::try_from(self.offsets.vmctx_vmglobal_definition(def_index)).unwrap();311(vmctx, offset)312} else {313let from_offset = self.offsets.vmctx_vmglobal_import_from(index);314let global = func.create_global_value(ir::GlobalValueData::Load {315base: vmctx,316offset: Offset32::new(i32::try_from(from_offset).unwrap()),317global_type: pointer_type,318flags: MemFlags::trusted().with_readonly().with_can_move(),319});320(global, 0)321}322}323324/// Get or create the `ir::Global` for the `*mut VMStoreContext` in our325/// `VMContext`.326fn get_vmstore_context_ptr_global(&mut self, func: &mut ir::Function) -> ir::GlobalValue {327if let Some(ptr) = self.vm_store_context {328return ptr;329}330331let offset = self.offsets.ptr.vmctx_store_context();332let base = self.vmctx(func);333let ptr = func.create_global_value(ir::GlobalValueData::Load {334base,335offset: Offset32::new(offset.into()),336global_type: self.pointer_type(),337flags: ir::MemFlags::trusted().with_readonly().with_can_move(),338});339self.vm_store_context = Some(ptr);340ptr341}342343/// Get the `*mut VMStoreContext` value for our `VMContext`.344fn get_vmstore_context_ptr(&mut self, builder: &mut FunctionBuilder) -> ir::Value {345let global = self.get_vmstore_context_ptr_global(&mut builder.func);346builder.ins().global_value(self.pointer_type(), global)347}348349fn fuel_function_entry(&mut self, builder: &mut FunctionBuilder<'_>) {350// On function entry we load the amount of fuel into a function-local351// `self.fuel_var` to make fuel modifications fast locally. This cache352// is then periodically flushed to the Store-defined location in353// `VMStoreContext` later.354debug_assert!(self.fuel_var.is_reserved_value());355self.fuel_var = builder.declare_var(ir::types::I64);356self.fuel_load_into_var(builder);357self.fuel_check(builder);358}359360fn fuel_function_exit(&mut self, builder: &mut FunctionBuilder<'_>) {361// On exiting the function we need to be sure to save the fuel we have362// cached locally in `self.fuel_var` back into the Store-defined363// location.364self.fuel_save_from_var(builder);365}366367fn fuel_before_op(368&mut self,369op: &Operator<'_>,370builder: &mut FunctionBuilder<'_>,371reachable: bool,372) {373if !reachable {374// In unreachable code we shouldn't have any leftover fuel we375// haven't accounted for since the reason for us to become376// unreachable should have already added it to `self.fuel_var`.377debug_assert_eq!(self.fuel_consumed, 0);378return;379}380381self.fuel_consumed += match op {382// Nop and drop generate no code, so don't consume fuel for them.383Operator::Nop | Operator::Drop => 0,384385// Control flow may create branches, but is generally cheap and386// free, so don't consume fuel. Note the lack of `if` since some387// cost is incurred with the conditional check.388Operator::Block { .. }389| Operator::Loop { .. }390| Operator::Unreachable391| Operator::Return392| Operator::Else393| Operator::End => 0,394395// everything else, just call it one operation.396_ => 1,397};398399match op {400// Exiting a function (via a return or unreachable) or otherwise401// entering a different function (via a call) means that we need to402// update the fuel consumption in `VMStoreContext` because we're403// about to move control out of this function itself and the fuel404// may need to be read.405//406// Before this we need to update the fuel counter from our own cost407// leading up to this function call, and then we can store408// `self.fuel_var` into `VMStoreContext`.409Operator::Unreachable410| Operator::Return411| Operator::CallIndirect { .. }412| Operator::Call { .. }413| Operator::ReturnCall { .. }414| Operator::ReturnCallRef { .. }415| Operator::ReturnCallIndirect { .. }416| Operator::Throw { .. } | Operator::ThrowRef => {417self.fuel_increment_var(builder);418self.fuel_save_from_var(builder);419}420421// To ensure all code preceding a loop is only counted once we422// update the fuel variable on entry.423Operator::Loop { .. }424425// Entering into an `if` block means that the edge we take isn't426// known until runtime, so we need to update our fuel consumption427// before we take the branch.428| Operator::If { .. }429430// Control-flow instructions mean that we're moving to the end/exit431// of a block somewhere else. That means we need to update the fuel432// counter since we're effectively terminating our basic block.433| Operator::Br { .. }434| Operator::BrIf { .. }435| Operator::BrTable { .. }436| Operator::BrOnNull { .. }437| Operator::BrOnNonNull { .. }438| Operator::BrOnCast { .. }439| Operator::BrOnCastFail { .. }440441// Exiting a scope means that we need to update the fuel442// consumption because there are multiple ways to exit a scope and443// this is the only time we have to account for instructions444// executed so far.445| Operator::End446447// This is similar to `end`, except that it's only the terminator448// for an `if` block. The same reasoning applies though in that we449// are terminating a basic block and need to update the fuel450// variable.451| Operator::Else => self.fuel_increment_var(builder),452453// This is a normal instruction where the fuel is buffered to later454// get added to `self.fuel_var`.455//456// Note that we generally ignore instructions which may trap and457// therefore result in exiting a block early. Current usage of fuel458// means that it's not too important to account for a precise amount459// of fuel consumed but rather "close to the actual amount" is good460// enough. For 100% precise counting, however, we'd probably need to461// not only increment but also save the fuel amount more often462// around trapping instructions. (see the `unreachable` instruction463// case above)464//465// Note that `Block` is specifically omitted from incrementing the466// fuel variable. Control flow entering a `block` is unconditional467// which means it's effectively executing straight-line code. We'll468// update the counter when exiting a block, but we shouldn't need to469// do so upon entering a block.470_ => {}471}472}473474fn fuel_after_op(&mut self, op: &Operator<'_>, builder: &mut FunctionBuilder<'_>) {475// After a function call we need to reload our fuel value since the476// function may have changed it.477match op {478Operator::Call { .. } | Operator::CallIndirect { .. } => {479self.fuel_load_into_var(builder);480}481_ => {}482}483}484485/// Adds `self.fuel_consumed` to the `fuel_var`, zero-ing out the amount of486/// fuel consumed at that point.487fn fuel_increment_var(&mut self, builder: &mut FunctionBuilder<'_>) {488let consumption = mem::replace(&mut self.fuel_consumed, 0);489if consumption == 0 {490return;491}492493let fuel = builder.use_var(self.fuel_var);494let fuel = builder.ins().iadd_imm(fuel, consumption);495builder.def_var(self.fuel_var, fuel);496}497498/// Loads the fuel consumption value from `VMStoreContext` into `self.fuel_var`499fn fuel_load_into_var(&mut self, builder: &mut FunctionBuilder<'_>) {500let (addr, offset) = self.fuel_addr_offset(builder);501let fuel = builder502.ins()503.load(ir::types::I64, ir::MemFlags::trusted(), addr, offset);504builder.def_var(self.fuel_var, fuel);505}506507/// Stores the fuel consumption value from `self.fuel_var` into508/// `VMStoreContext`.509fn fuel_save_from_var(&mut self, builder: &mut FunctionBuilder<'_>) {510let (addr, offset) = self.fuel_addr_offset(builder);511let fuel_consumed = builder.use_var(self.fuel_var);512builder513.ins()514.store(ir::MemFlags::trusted(), fuel_consumed, addr, offset);515}516517/// Returns the `(address, offset)` of the fuel consumption within518/// `VMStoreContext`, used to perform loads/stores later.519fn fuel_addr_offset(520&mut self,521builder: &mut FunctionBuilder<'_>,522) -> (ir::Value, ir::immediates::Offset32) {523let vmstore_ctx = self.get_vmstore_context_ptr(builder);524(525vmstore_ctx,526i32::from(self.offsets.ptr.vmstore_context_fuel_consumed()).into(),527)528}529530/// Checks the amount of remaining, and if we've run out of fuel we call531/// the out-of-fuel function.532fn fuel_check(&mut self, builder: &mut FunctionBuilder) {533self.fuel_increment_var(builder);534let out_of_gas_block = builder.create_block();535let continuation_block = builder.create_block();536537// Note that our fuel is encoded as adding positive values to a538// negative number. Whenever the negative number goes positive that539// means we ran out of fuel.540//541// Compare to see if our fuel is positive, and if so we ran out of gas.542// Otherwise we can continue on like usual.543let zero = builder.ins().iconst(ir::types::I64, 0);544let fuel = builder.use_var(self.fuel_var);545let cmp = builder546.ins()547.icmp(IntCC::SignedGreaterThanOrEqual, fuel, zero);548builder549.ins()550.brif(cmp, out_of_gas_block, &[], continuation_block, &[]);551builder.seal_block(out_of_gas_block);552553// If we ran out of gas then we call our out-of-gas intrinsic and it554// figures out what to do. Note that this may raise a trap, or do555// something like yield to an async runtime. In either case we don't556// assume what happens and handle the case the intrinsic returns.557//558// Note that we save/reload fuel around this since the out-of-gas559// intrinsic may alter how much fuel is in the system.560builder.switch_to_block(out_of_gas_block);561self.fuel_save_from_var(builder);562let out_of_gas = self.builtin_functions.out_of_gas(builder.func);563let vmctx = self.vmctx_val(&mut builder.cursor());564builder.ins().call(out_of_gas, &[vmctx]);565self.fuel_load_into_var(builder);566builder.ins().jump(continuation_block, &[]);567builder.seal_block(continuation_block);568569builder.switch_to_block(continuation_block);570}571572fn epoch_function_entry(&mut self, builder: &mut FunctionBuilder<'_>) {573debug_assert!(self.epoch_deadline_var.is_reserved_value());574self.epoch_deadline_var = builder.declare_var(ir::types::I64);575// Let epoch_check_full load the current deadline and call def_var576577debug_assert!(self.epoch_ptr_var.is_reserved_value());578self.epoch_ptr_var = builder.declare_var(self.pointer_type());579let epoch_ptr = self.epoch_ptr(builder);580builder.def_var(self.epoch_ptr_var, epoch_ptr);581582// We must check for an epoch change when entering a583// function. Why? Why aren't checks at loops sufficient to584// bound runtime to O(|static program size|)?585//586// The reason is that one can construct a "zip-bomb-like"587// program with exponential-in-program-size runtime, with no588// backedges (loops), by building a tree of function calls: f0589// calls f1 ten times, f1 calls f2 ten times, etc. E.g., nine590// levels of this yields a billion function calls with no591// backedges. So we can't do checks only at backedges.592//593// In this "call-tree" scenario, and in fact in any program594// that uses calls as a sort of control flow to try to evade595// backedge checks, a check at every function entry is596// sufficient. Then, combined with checks at every backedge597// (loop) the longest runtime between checks is bounded by the598// straightline length of any function body.599let continuation_block = builder.create_block();600let cur_epoch_value = self.epoch_load_current(builder);601self.epoch_check_full(builder, cur_epoch_value, continuation_block);602}603604#[cfg(feature = "wmemcheck")]605fn hook_malloc_exit(&mut self, builder: &mut FunctionBuilder, retvals: &[ir::Value]) {606let check_malloc = self.builtin_functions.check_malloc(builder.func);607let vmctx = self.vmctx_val(&mut builder.cursor());608let func_args = builder609.func610.dfg611.block_params(builder.func.layout.entry_block().unwrap());612let len = if func_args.len() < 3 {613return;614} else {615// If a function named `malloc` has at least one argument, we assume the616// first argument is the requested allocation size.617func_args[2]618};619let retval = if retvals.len() < 1 {620return;621} else {622retvals[0]623};624builder.ins().call(check_malloc, &[vmctx, retval, len]);625}626627#[cfg(feature = "wmemcheck")]628fn hook_free_exit(&mut self, builder: &mut FunctionBuilder) {629let check_free = self.builtin_functions.check_free(builder.func);630let vmctx = self.vmctx_val(&mut builder.cursor());631let func_args = builder632.func633.dfg634.block_params(builder.func.layout.entry_block().unwrap());635let ptr = if func_args.len() < 3 {636return;637} else {638// If a function named `free` has at least one argument, we assume the639// first argument is a pointer to memory.640func_args[2]641};642builder.ins().call(check_free, &[vmctx, ptr]);643}644645fn epoch_ptr(&mut self, builder: &mut FunctionBuilder<'_>) -> ir::Value {646let vmctx = self.vmctx(builder.func);647let pointer_type = self.pointer_type();648let base = builder.ins().global_value(pointer_type, vmctx);649let offset = i32::from(self.offsets.ptr.vmctx_epoch_ptr());650let epoch_ptr = builder651.ins()652.load(pointer_type, ir::MemFlags::trusted(), base, offset);653epoch_ptr654}655656fn epoch_load_current(&mut self, builder: &mut FunctionBuilder<'_>) -> ir::Value {657let addr = builder.use_var(self.epoch_ptr_var);658builder.ins().load(659ir::types::I64,660ir::MemFlags::trusted(),661addr,662ir::immediates::Offset32::new(0),663)664}665666fn epoch_check(&mut self, builder: &mut FunctionBuilder<'_>) {667let continuation_block = builder.create_block();668669// Load new epoch and check against the cached deadline.670let cur_epoch_value = self.epoch_load_current(builder);671self.epoch_check_cached(builder, cur_epoch_value, continuation_block);672673// At this point we've noticed that the epoch has exceeded our674// cached deadline. However the real deadline may have been675// updated (within another yield) during some function that we676// called in the meantime, so reload the cache and check again.677self.epoch_check_full(builder, cur_epoch_value, continuation_block);678}679680fn epoch_check_cached(681&mut self,682builder: &mut FunctionBuilder,683cur_epoch_value: ir::Value,684continuation_block: ir::Block,685) {686let new_epoch_block = builder.create_block();687builder.set_cold_block(new_epoch_block);688689let epoch_deadline = builder.use_var(self.epoch_deadline_var);690let cmp = builder.ins().icmp(691IntCC::UnsignedGreaterThanOrEqual,692cur_epoch_value,693epoch_deadline,694);695builder696.ins()697.brif(cmp, new_epoch_block, &[], continuation_block, &[]);698builder.seal_block(new_epoch_block);699700builder.switch_to_block(new_epoch_block);701}702703fn epoch_check_full(704&mut self,705builder: &mut FunctionBuilder,706cur_epoch_value: ir::Value,707continuation_block: ir::Block,708) {709// We keep the deadline cached in a register to speed the checks710// in the common case (between epoch ticks) but we want to do a711// precise check here by reloading the cache first.712let vmstore_ctx = self.get_vmstore_context_ptr(builder);713let deadline = builder.ins().load(714ir::types::I64,715ir::MemFlags::trusted(),716vmstore_ctx,717ir::immediates::Offset32::new(self.offsets.ptr.vmstore_context_epoch_deadline() as i32),718);719builder.def_var(self.epoch_deadline_var, deadline);720self.epoch_check_cached(builder, cur_epoch_value, continuation_block);721722let new_epoch = self.builtin_functions.new_epoch(builder.func);723let vmctx = self.vmctx_val(&mut builder.cursor());724// new_epoch() returns the new deadline, so we don't have to725// reload it.726let call = builder.ins().call(new_epoch, &[vmctx]);727let new_deadline = *builder.func.dfg.inst_results(call).first().unwrap();728builder.def_var(self.epoch_deadline_var, new_deadline);729builder.ins().jump(continuation_block, &[]);730builder.seal_block(continuation_block);731732builder.switch_to_block(continuation_block);733}734735/// Get the Memory for the given index.736fn memory(&self, index: MemoryIndex) -> Memory {737self.module.memories[index]738}739740/// Get the Table for the given index.741fn table(&self, index: TableIndex) -> Table {742self.module.tables[index]743}744745/// Cast the value to I64 and sign extend if necessary.746///747/// Returns the value casted to I64.748fn cast_index_to_i64(749&self,750pos: &mut FuncCursor<'_>,751val: ir::Value,752index_type: IndexType,753) -> ir::Value {754match index_type {755IndexType::I32 => pos.ins().uextend(I64, val),756IndexType::I64 => val,757}758}759760/// Convert the target pointer-sized integer `val` into the memory/table's index type.761///762/// For memory, `val` is holding a memory length (or the `-1` `memory.grow`-failed sentinel).763/// For table, `val` is holding a table length.764///765/// This might involve extending or truncating it depending on the memory/table's766/// index type and the target's pointer type.767fn convert_pointer_to_index_type(768&self,769mut pos: FuncCursor<'_>,770val: ir::Value,771index_type: IndexType,772// When it is a memory and the memory is using single-byte pages,773// we need to handle the truncation differently. See comments below.774//775// When it is a table, this should be set to false.776single_byte_pages: bool,777) -> ir::Value {778let desired_type = index_type_to_ir_type(index_type);779let pointer_type = self.pointer_type();780assert_eq!(pos.func.dfg.value_type(val), pointer_type);781782// The current length is of type `pointer_type` but we need to fit it783// into `desired_type`. We are guaranteed that the result will always784// fit, so we just need to do the right ireduce/sextend here.785if pointer_type == desired_type {786val787} else if pointer_type.bits() > desired_type.bits() {788pos.ins().ireduce(desired_type, val)789} else {790// We have a 64-bit memory/table on a 32-bit host -- this combo doesn't791// really make a whole lot of sense to do from a user perspective792// but that is neither here nor there. We want to logically do an793// unsigned extend *except* when we are given the `-1` sentinel,794// which we must preserve as `-1` in the wider type.795match single_byte_pages {796false => {797// In the case that we have default page sizes, we can798// always sign extend, since valid memory lengths (in pages)799// never have their sign bit set, and so if the sign bit is800// set then this must be the `-1` sentinel, which we want to801// preserve through the extension.802//803// When it comes to table, `single_byte_pages` should have always been set to false.804// Then we simply do a signed extension.805pos.ins().sextend(desired_type, val)806}807true => {808// For single-byte pages, we have to explicitly check for809// `-1` and choose whether to do an unsigned extension or810// return a larger `-1` because there are valid memory811// lengths (in pages) that have the sign bit set.812let extended = pos.ins().uextend(desired_type, val);813let neg_one = pos.ins().iconst(desired_type, -1);814let is_failure = pos.ins().icmp_imm(IntCC::Equal, val, -1);815pos.ins().select(is_failure, neg_one, extended)816}817}818}819}820821fn get_or_init_func_ref_table_elem(822&mut self,823builder: &mut FunctionBuilder,824table_index: TableIndex,825index: ir::Value,826cold_blocks: bool,827) -> ir::Value {828let pointer_type = self.pointer_type();829let table_data = self.get_or_create_table(builder.func, table_index);830831// To support lazy initialization of table832// contents, we check for a null entry here, and833// if null, we take a slow-path that invokes a834// libcall.835let (table_entry_addr, flags) = table_data.prepare_table_addr(self, builder, index);836let value = builder.ins().load(pointer_type, flags, table_entry_addr, 0);837838if !self.tunables.table_lazy_init {839return value;840}841842// Mask off the "initialized bit". See documentation on843// FUNCREF_INIT_BIT in crates/environ/src/ref_bits.rs for more844// details. Note that `FUNCREF_MASK` has type `usize` which may not be845// appropriate for the target architecture. Right now its value is846// always -2 so assert that part doesn't change and then thread through847// -2 as the immediate.848assert_eq!(FUNCREF_MASK as isize, -2);849let value_masked = builder.ins().band_imm(value, Imm64::from(-2));850851let null_block = builder.create_block();852let continuation_block = builder.create_block();853if cold_blocks {854builder.set_cold_block(null_block);855builder.set_cold_block(continuation_block);856}857let result_param = builder.append_block_param(continuation_block, pointer_type);858builder.set_cold_block(null_block);859860builder.ins().brif(861value,862continuation_block,863&[value_masked.into()],864null_block,865&[],866);867builder.seal_block(null_block);868869builder.switch_to_block(null_block);870let index_type = self.table(table_index).idx_type;871let table_index = builder.ins().iconst(I32, table_index.index() as i64);872let lazy_init = self873.builtin_functions874.table_get_lazy_init_func_ref(builder.func);875let vmctx = self.vmctx_val(&mut builder.cursor());876let index = self.cast_index_to_i64(&mut builder.cursor(), index, index_type);877let call_inst = builder.ins().call(lazy_init, &[vmctx, table_index, index]);878let returned_entry = builder.func.dfg.inst_results(call_inst)[0];879builder880.ins()881.jump(continuation_block, &[returned_entry.into()]);882builder.seal_block(continuation_block);883884builder.switch_to_block(continuation_block);885result_param886}887888#[cfg(feature = "wmemcheck")]889fn check_malloc_start(&mut self, builder: &mut FunctionBuilder) {890let malloc_start = self.builtin_functions.malloc_start(builder.func);891let vmctx = self.vmctx_val(&mut builder.cursor());892builder.ins().call(malloc_start, &[vmctx]);893}894895#[cfg(feature = "wmemcheck")]896fn check_free_start(&mut self, builder: &mut FunctionBuilder) {897let free_start = self.builtin_functions.free_start(builder.func);898let vmctx = self.vmctx_val(&mut builder.cursor());899builder.ins().call(free_start, &[vmctx]);900}901902#[cfg(feature = "wmemcheck")]903fn current_func_name(&self, builder: &mut FunctionBuilder) -> Option<&str> {904let func_index = match &builder.func.name {905ir::UserFuncName::User(user) => FuncIndex::from_u32(user.index),906_ => {907panic!("function name not a UserFuncName::User as expected")908}909};910self.translation911.debuginfo912.name_section913.func_names914.get(&func_index)915.copied()916}917918/// Proof-carrying code: create a memtype describing an empty919/// runtime struct (to be updated later).920fn create_empty_struct_memtype(&self, func: &mut ir::Function) -> ir::MemoryType {921func.create_memory_type(ir::MemoryTypeData::Struct {922size: 0,923fields: vec![],924})925}926927/// Proof-carrying code: add a new field to a memtype used to928/// describe a runtime struct. A memory region of type `memtype`929/// will have a pointer at `offset` pointing to another memory930/// region of type `pointee`. `readonly` indicates whether the931/// PCC-checked code is expected to update this field or not.932fn add_field_to_memtype(933&self,934func: &mut ir::Function,935memtype: ir::MemoryType,936offset: u32,937pointee: ir::MemoryType,938readonly: bool,939) {940let ptr_size = self.pointer_type().bytes();941match &mut func.memory_types[memtype] {942ir::MemoryTypeData::Struct { size, fields } => {943*size = std::cmp::max(*size, offset.checked_add(ptr_size).unwrap().into());944fields.push(ir::MemoryTypeField {945ty: self.pointer_type(),946offset: offset.into(),947readonly,948fact: Some(ir::Fact::Mem {949ty: pointee,950min_offset: 0,951max_offset: 0,952nullable: false,953}),954});955956// Sort fields by offset -- we need to do this now957// because we may create an arbitrary number of958// memtypes for imported memories and we don't959// otherwise track them.960fields.sort_by_key(|f| f.offset);961}962_ => panic!("Cannot add field to non-struct memtype"),963}964}965966/// Create an `ir::Global` that does `load(ptr + offset)` and, when PCC and967/// memory types are enabled, adds a field to the pointer's memory type for968/// this value we are loading.969pub(crate) fn global_load_with_memory_type(970&mut self,971func: &mut ir::Function,972ptr: ir::GlobalValue,973offset: u32,974flags: ir::MemFlags,975ptr_mem_ty: Option<ir::MemoryType>,976) -> (ir::GlobalValue, Option<ir::MemoryType>) {977let pointee = func.create_global_value(ir::GlobalValueData::Load {978base: ptr,979offset: Offset32::new(i32::try_from(offset).unwrap()),980global_type: self.pointer_type(),981flags,982});983984let pointee_mem_ty = ptr_mem_ty.map(|ptr_mem_ty| {985let pointee_mem_ty = self.create_empty_struct_memtype(func);986self.add_field_to_memtype(func, ptr_mem_ty, offset, pointee_mem_ty, flags.readonly());987func.global_value_facts[pointee] = Some(Fact::Mem {988ty: pointee_mem_ty,989min_offset: 0,990max_offset: 0,991nullable: false,992});993pointee_mem_ty994});995996(pointee, pointee_mem_ty)997}998999/// Like `global_load_with_memory_type` but specialized for loads out of the1000/// `vmctx`.1001pub(crate) fn global_load_from_vmctx_with_memory_type(1002&mut self,1003func: &mut ir::Function,1004offset: u32,1005flags: ir::MemFlags,1006) -> (ir::GlobalValue, Option<ir::MemoryType>) {1007let vmctx = self.vmctx(func);1008self.global_load_with_memory_type(func, vmctx, offset, flags, self.pcc_vmctx_memtype)1009}10101011/// Helper to emit a conditional trap based on `trap_cond`.1012///1013/// This should only be used if `self.clif_instruction_traps_enabled()` is1014/// false, otherwise native CLIF instructions should be used instead.1015pub fn conditionally_trap(1016&mut self,1017builder: &mut FunctionBuilder,1018trap_cond: ir::Value,1019trap: ir::TrapCode,1020) {1021assert!(!self.clif_instruction_traps_enabled());10221023let trap_block = builder.create_block();1024builder.set_cold_block(trap_block);1025let continuation_block = builder.create_block();10261027builder1028.ins()1029.brif(trap_cond, trap_block, &[], continuation_block, &[]);10301031builder.seal_block(trap_block);1032builder.seal_block(continuation_block);10331034builder.switch_to_block(trap_block);1035self.trap(builder, trap);1036builder.switch_to_block(continuation_block);1037}10381039/// Helper used when `!self.clif_instruction_traps_enabled()` is enabled to1040/// test whether the divisor is zero.1041fn guard_zero_divisor(&mut self, builder: &mut FunctionBuilder, rhs: ir::Value) {1042if self.clif_instruction_traps_enabled() {1043return;1044}1045self.trapz(builder, rhs, ir::TrapCode::INTEGER_DIVISION_BY_ZERO);1046}10471048/// Helper used when `!self.clif_instruction_traps_enabled()` is enabled to1049/// test whether a signed division operation will raise a trap.1050fn guard_signed_divide(1051&mut self,1052builder: &mut FunctionBuilder,1053lhs: ir::Value,1054rhs: ir::Value,1055) {1056if self.clif_instruction_traps_enabled() {1057return;1058}1059self.trapz(builder, rhs, ir::TrapCode::INTEGER_DIVISION_BY_ZERO);10601061let ty = builder.func.dfg.value_type(rhs);1062let minus_one = builder.ins().iconst(ty, -1);1063let rhs_is_minus_one = builder.ins().icmp(IntCC::Equal, rhs, minus_one);1064let int_min = builder.ins().iconst(1065ty,1066match ty {1067I32 => i64::from(i32::MIN),1068I64 => i64::MIN,1069_ => unreachable!(),1070},1071);1072let lhs_is_int_min = builder.ins().icmp(IntCC::Equal, lhs, int_min);1073let is_integer_overflow = builder.ins().band(rhs_is_minus_one, lhs_is_int_min);1074self.conditionally_trap(builder, is_integer_overflow, ir::TrapCode::INTEGER_OVERFLOW);1075}10761077/// Helper used when `!self.clif_instruction_traps_enabled()` is enabled to1078/// guard the traps from float-to-int conversions.1079fn guard_fcvt_to_int(1080&mut self,1081builder: &mut FunctionBuilder,1082ty: ir::Type,1083val: ir::Value,1084signed: bool,1085) {1086assert!(!self.clif_instruction_traps_enabled());1087let val_ty = builder.func.dfg.value_type(val);1088let val = if val_ty == F64 {1089val1090} else {1091builder.ins().fpromote(F64, val)1092};1093let isnan = builder.ins().fcmp(FloatCC::NotEqual, val, val);1094self.trapnz(builder, isnan, ir::TrapCode::BAD_CONVERSION_TO_INTEGER);1095let val = self.trunc_f64(builder, val);1096let (lower_bound, upper_bound) = f64_cvt_to_int_bounds(signed, ty.bits());1097let lower_bound = builder.ins().f64const(lower_bound);1098let too_small = builder1099.ins()1100.fcmp(FloatCC::LessThanOrEqual, val, lower_bound);1101self.trapnz(builder, too_small, ir::TrapCode::INTEGER_OVERFLOW);1102let upper_bound = builder.ins().f64const(upper_bound);1103let too_large = builder1104.ins()1105.fcmp(FloatCC::GreaterThanOrEqual, val, upper_bound);1106self.trapnz(builder, too_large, ir::TrapCode::INTEGER_OVERFLOW);1107}11081109/// Get the `ir::Type` for a `VMSharedTypeIndex`.1110pub(crate) fn vmshared_type_index_ty(&self) -> Type {1111Type::int_with_byte_size(self.offsets.size_of_vmshared_type_index().into()).unwrap()1112}11131114/// Given a `ModuleInternedTypeIndex`, emit code to get the corresponding1115/// `VMSharedTypeIndex` at runtime.1116pub(crate) fn module_interned_to_shared_ty(1117&mut self,1118pos: &mut FuncCursor,1119interned_ty: ModuleInternedTypeIndex,1120) -> ir::Value {1121let vmctx = self.vmctx_val(pos);1122let pointer_type = self.pointer_type();1123let mem_flags = ir::MemFlags::trusted().with_readonly().with_can_move();11241125// Load the base pointer of the array of `VMSharedTypeIndex`es.1126let shared_indices = pos.ins().load(1127pointer_type,1128mem_flags,1129vmctx,1130i32::from(self.offsets.ptr.vmctx_type_ids_array()),1131);11321133// Calculate the offset in that array for this type's entry.1134let ty = self.vmshared_type_index_ty();1135let offset = i32::try_from(interned_ty.as_u32().checked_mul(ty.bytes()).unwrap()).unwrap();11361137// Load the`VMSharedTypeIndex` that this `ModuleInternedTypeIndex` is1138// associated with at runtime from the array.1139pos.ins().load(ty, mem_flags, shared_indices, offset)1140}11411142/// Load the associated `VMSharedTypeIndex` from inside a `*const VMFuncRef`.1143///1144/// Does not check for null; just assumes that the `funcref` is a valid1145/// pointer.1146pub(crate) fn load_funcref_type_index(1147&mut self,1148pos: &mut FuncCursor,1149mem_flags: ir::MemFlags,1150funcref: ir::Value,1151) -> ir::Value {1152let ty = self.vmshared_type_index_ty();1153pos.ins().load(1154ty,1155mem_flags,1156funcref,1157i32::from(self.offsets.ptr.vm_func_ref_type_index()),1158)1159}11601161/// Does this function need a GC heap?1162pub fn needs_gc_heap(&self) -> bool {1163self.needs_gc_heap1164}11651166/// Get the number of Wasm parameters for the given function.1167pub(crate) fn num_params_for_func(&self, function_index: FuncIndex) -> usize {1168let ty = self.module.functions[function_index]1169.signature1170.unwrap_module_type_index();1171self.types[ty].unwrap_func().params().len()1172}11731174/// Get the number of Wasm parameters for the given function type.1175///1176/// Panics on non-function types.1177pub(crate) fn num_params_for_function_type(&self, type_index: TypeIndex) -> usize {1178let ty = self.module.types[type_index].unwrap_module_type_index();1179self.types[ty].unwrap_func().params().len()1180}1181}11821183#[derive(Default)]1184pub(crate) struct WasmEntities {1185/// Map from a Wasm global index from this module to its implementation in1186/// the Cranelift function we are building.1187pub(crate) globals: SecondaryMap<GlobalIndex, Option<GlobalVariable>>,11881189/// Map from a Wasm memory index to its `Heap` implementation in the1190/// Cranelift function we are building.1191pub(crate) memories: SecondaryMap<MemoryIndex, PackedOption<Heap>>,11921193/// Map from an (interned) Wasm type index from this module to its1194/// `ir::SigRef` in the Cranelift function we are building.1195pub(crate) sig_refs: SecondaryMap<ModuleInternedTypeIndex, PackedOption<ir::SigRef>>,11961197/// Map from a defined Wasm function index to its associated function1198/// reference in the Cranelift function we are building.1199pub(crate) defined_func_refs: SecondaryMap<DefinedFuncIndex, PackedOption<ir::FuncRef>>,12001201/// Map from an imported Wasm function index for which we statically know1202/// which function will always be used to satisfy that import to its1203/// associated function reference in the Cranelift function we are building.1204pub(crate) imported_func_refs: SecondaryMap<FuncIndex, PackedOption<ir::FuncRef>>,12051206/// Map from a Wasm table index to its associated implementation in the1207/// Cranelift function we are building.1208pub(crate) tables: SecondaryMap<TableIndex, Option<TableData>>,1209}12101211macro_rules! define_get_or_create_methods {1212( $( $name:ident ( $map:ident ) : $create:ident : $key:ty => $val:ty ; )* ) => {1213$(1214pub(crate) fn $name(&mut self, func: &mut ir::Function, key: $key) -> $val {1215match self.entities.$map[key].clone().into() {1216Some(val) => val,1217None => {1218let val = self.$create(func, key);1219self.entities.$map[key] = Some(val.clone()).into();1220val1221}1222}1223}1224)*1225};1226}12271228impl FuncEnvironment<'_> {1229define_get_or_create_methods! {1230get_or_create_global(globals) : make_global : GlobalIndex => GlobalVariable;1231get_or_create_heap(memories) : make_heap : MemoryIndex => Heap;1232get_or_create_interned_sig_ref(sig_refs) : make_sig_ref : ModuleInternedTypeIndex => ir::SigRef;1233get_or_create_defined_func_ref(defined_func_refs) : make_defined_func_ref : DefinedFuncIndex => ir::FuncRef;1234get_or_create_imported_func_ref(imported_func_refs) : make_imported_func_ref : FuncIndex => ir::FuncRef;1235get_or_create_table(tables) : make_table : TableIndex => TableData;1236}12371238fn make_global(&mut self, func: &mut ir::Function, index: GlobalIndex) -> GlobalVariable {1239let ty = self.module.globals[index].wasm_ty;12401241if ty.is_vmgcref_type() {1242// Although reference-typed globals live at the same memory location as1243// any other type of global at the same index would, getting or1244// setting them requires ref counting barriers. Therefore, we need1245// to use `GlobalVariable::Custom`, as that is the only kind of1246// `GlobalVariable` for which translation supports custom1247// access translation.1248return GlobalVariable::Custom;1249}12501251let (gv, offset) = self.get_global_location(func, index);1252GlobalVariable::Memory {1253gv,1254offset: offset.into(),1255ty: super::value_type(self.isa, ty),1256}1257}12581259pub(crate) fn get_or_create_sig_ref(1260&mut self,1261func: &mut ir::Function,1262ty: TypeIndex,1263) -> ir::SigRef {1264let ty = self.module.types[ty].unwrap_module_type_index();1265self.get_or_create_interned_sig_ref(func, ty)1266}12671268fn make_sig_ref(1269&mut self,1270func: &mut ir::Function,1271index: ModuleInternedTypeIndex,1272) -> ir::SigRef {1273let wasm_func_ty = self.types[index].unwrap_func();1274let sig = crate::wasm_call_signature(self.isa, wasm_func_ty, &self.tunables);1275let sig_ref = func.import_signature(sig);1276self.sig_ref_to_ty[sig_ref] = Some(wasm_func_ty);1277sig_ref1278}12791280fn make_defined_func_ref(1281&mut self,1282func: &mut ir::Function,1283def_func_index: DefinedFuncIndex,1284) -> ir::FuncRef {1285let func_index = self.module.func_index(def_func_index);12861287let ty = self.module.functions[func_index]1288.signature1289.unwrap_module_type_index();1290let signature = self.get_or_create_interned_sig_ref(func, ty);12911292let key = FuncKey::DefinedWasmFunction(self.translation.module_index, def_func_index);1293let (namespace, index) = key.into_raw_parts();1294let name = ir::ExternalName::User(1295func.declare_imported_user_function(ir::UserExternalName { namespace, index }),1296);12971298func.import_function(ir::ExtFuncData {1299name,1300signature,1301colocated: true,1302})1303}13041305fn make_imported_func_ref(1306&mut self,1307func: &mut ir::Function,1308func_index: FuncIndex,1309) -> ir::FuncRef {1310assert!(self.module.is_imported_function(func_index));1311assert!(self.translation.known_imported_functions[func_index].is_some());13121313let ty = self.module.functions[func_index]1314.signature1315.unwrap_module_type_index();1316let signature = self.get_or_create_interned_sig_ref(func, ty);13171318let (module, def_func_index) =1319self.translation.known_imported_functions[func_index].unwrap();1320let key = FuncKey::DefinedWasmFunction(module, def_func_index);1321let (namespace, index) = key.into_raw_parts();1322let name = ir::ExternalName::User(1323func.declare_imported_user_function(ir::UserExternalName { namespace, index }),1324);13251326func.import_function(ir::ExtFuncData {1327name,1328signature,1329colocated: true,1330})1331}13321333fn make_heap(&mut self, func: &mut ir::Function, index: MemoryIndex) -> Heap {1334let pointer_type = self.pointer_type();1335let memory = self.module.memories[index];1336let is_shared = memory.shared;13371338let (base_ptr, base_offset, current_length_offset, ptr_memtype) = {1339let vmctx = self.vmctx(func);1340if let Some(def_index) = self.module.defined_memory_index(index) {1341if is_shared {1342// As with imported memory, the `VMMemoryDefinition` for a1343// shared memory is stored elsewhere. We store a `*mut1344// VMMemoryDefinition` to it and dereference that when1345// atomically growing it.1346let from_offset = self.offsets.vmctx_vmmemory_pointer(def_index);1347let (memory, def_mt) = self.global_load_from_vmctx_with_memory_type(1348func,1349from_offset,1350ir::MemFlags::trusted().with_readonly().with_can_move(),1351);1352let base_offset = i32::from(self.offsets.ptr.vmmemory_definition_base());1353let current_length_offset =1354i32::from(self.offsets.ptr.vmmemory_definition_current_length());1355(memory, base_offset, current_length_offset, def_mt)1356} else {1357let owned_index = self.module.owned_memory_index(def_index);1358let owned_base_offset =1359self.offsets.vmctx_vmmemory_definition_base(owned_index);1360let owned_length_offset = self1361.offsets1362.vmctx_vmmemory_definition_current_length(owned_index);1363let current_base_offset = i32::try_from(owned_base_offset).unwrap();1364let current_length_offset = i32::try_from(owned_length_offset).unwrap();1365(1366vmctx,1367current_base_offset,1368current_length_offset,1369self.pcc_vmctx_memtype,1370)1371}1372} else {1373let from_offset = self.offsets.vmctx_vmmemory_import_from(index);1374let (memory, def_mt) = self.global_load_from_vmctx_with_memory_type(1375func,1376from_offset,1377ir::MemFlags::trusted().with_readonly().with_can_move(),1378);1379let base_offset = i32::from(self.offsets.ptr.vmmemory_definition_base());1380let current_length_offset =1381i32::from(self.offsets.ptr.vmmemory_definition_current_length());1382(memory, base_offset, current_length_offset, def_mt)1383}1384};13851386let bound = func.create_global_value(ir::GlobalValueData::Load {1387base: base_ptr,1388offset: Offset32::new(current_length_offset),1389global_type: pointer_type,1390flags: MemFlags::trusted(),1391});13921393let (base_fact, pcc_memory_type) = self.make_pcc_base_fact_and_type_for_memory(1394func,1395memory,1396base_offset,1397current_length_offset,1398ptr_memtype,1399bound,1400);14011402let base = self.make_heap_base(func, memory, base_ptr, base_offset, base_fact);14031404self.heaps.push(HeapData {1405base,1406bound,1407pcc_memory_type,1408memory,1409})1410}14111412pub(crate) fn make_heap_base(1413&self,1414func: &mut Function,1415memory: Memory,1416ptr: ir::GlobalValue,1417offset: i32,1418fact: Option<Fact>,1419) -> ir::GlobalValue {1420let pointer_type = self.pointer_type();14211422let mut flags = ir::MemFlags::trusted().with_checked().with_can_move();1423if !memory.memory_may_move(self.tunables) {1424flags.set_readonly();1425}14261427let heap_base = func.create_global_value(ir::GlobalValueData::Load {1428base: ptr,1429offset: Offset32::new(offset),1430global_type: pointer_type,1431flags,1432});1433func.global_value_facts[heap_base] = fact;1434heap_base1435}14361437pub(crate) fn make_pcc_base_fact_and_type_for_memory(1438&mut self,1439func: &mut Function,1440memory: Memory,1441base_offset: i32,1442current_length_offset: i32,1443ptr_memtype: Option<ir::MemoryType>,1444heap_bound: ir::GlobalValue,1445) -> (Option<Fact>, Option<ir::MemoryType>) {1446// If we have a declared maximum, we can make this a "static" heap, which is1447// allocated up front and never moved.1448let host_page_size_log2 = self.target_config().page_size_align_log2;1449let (base_fact, memory_type) = if !memory1450.can_elide_bounds_check(self.tunables, host_page_size_log2)1451{1452if let Some(ptr_memtype) = ptr_memtype {1453// Create a memtype representing the untyped memory region.1454let data_mt = func.create_memory_type(ir::MemoryTypeData::DynamicMemory {1455gv: heap_bound,1456size: self.tunables.memory_guard_size,1457});1458// This fact applies to any pointer to the start of the memory.1459let base_fact = ir::Fact::dynamic_base_ptr(data_mt);1460// This fact applies to the length.1461let length_fact = ir::Fact::global_value(1462u16::try_from(self.isa.pointer_type().bits()).unwrap(),1463heap_bound,1464);1465// Create a field in the vmctx for the base pointer.1466match &mut func.memory_types[ptr_memtype] {1467ir::MemoryTypeData::Struct { size, fields } => {1468let base_offset = u64::try_from(base_offset).unwrap();1469fields.push(ir::MemoryTypeField {1470offset: base_offset,1471ty: self.isa.pointer_type(),1472// Read-only field from the PoV of PCC checks:1473// don't allow stores to this field. (Even if1474// it is a dynamic memory whose base can1475// change, that update happens inside the1476// runtime, not in generated code.)1477readonly: true,1478fact: Some(base_fact.clone()),1479});1480let current_length_offset = u64::try_from(current_length_offset).unwrap();1481fields.push(ir::MemoryTypeField {1482offset: current_length_offset,1483ty: self.isa.pointer_type(),1484// As above, read-only; only the runtime modifies it.1485readonly: true,1486fact: Some(length_fact),1487});14881489let pointer_size = u64::from(self.isa.pointer_type().bytes());1490let fields_end = std::cmp::max(1491base_offset + pointer_size,1492current_length_offset + pointer_size,1493);1494*size = std::cmp::max(*size, fields_end);1495}1496_ => {1497panic!("Bad memtype");1498}1499}1500// Apply a fact to the base pointer.1501(Some(base_fact), Some(data_mt))1502} else {1503(None, None)1504}1505} else {1506if let Some(ptr_memtype) = ptr_memtype {1507// Create a memtype representing the untyped memory region.1508let data_mt = func.create_memory_type(ir::MemoryTypeData::Memory {1509size: self1510.tunables1511.memory_reservation1512.checked_add(self.tunables.memory_guard_size)1513.expect("Memory plan has overflowing size plus guard"),1514});1515// This fact applies to any pointer to the start of the memory.1516let base_fact = Fact::Mem {1517ty: data_mt,1518min_offset: 0,1519max_offset: 0,1520nullable: false,1521};1522// Create a field in the vmctx for the base pointer.1523match &mut func.memory_types[ptr_memtype] {1524ir::MemoryTypeData::Struct { size, fields } => {1525let offset = u64::try_from(base_offset).unwrap();1526fields.push(ir::MemoryTypeField {1527offset,1528ty: self.isa.pointer_type(),1529// Read-only field from the PoV of PCC checks:1530// don't allow stores to this field. (Even if1531// it is a dynamic memory whose base can1532// change, that update happens inside the1533// runtime, not in generated code.)1534readonly: true,1535fact: Some(base_fact.clone()),1536});1537*size = std::cmp::max(1538*size,1539offset + u64::from(self.isa.pointer_type().bytes()),1540);1541}1542_ => {1543panic!("Bad memtype");1544}1545}1546// Apply a fact to the base pointer.1547(Some(base_fact), Some(data_mt))1548} else {1549(None, None)1550}1551};1552(base_fact, memory_type)1553}15541555fn make_table(&mut self, func: &mut ir::Function, index: TableIndex) -> TableData {1556let pointer_type = self.pointer_type();15571558let (ptr, base_offset, current_elements_offset) = {1559let vmctx = self.vmctx(func);1560if let Some(def_index) = self.module.defined_table_index(index) {1561let base_offset =1562i32::try_from(self.offsets.vmctx_vmtable_definition_base(def_index)).unwrap();1563let current_elements_offset = i32::try_from(1564self.offsets1565.vmctx_vmtable_definition_current_elements(def_index),1566)1567.unwrap();1568(vmctx, base_offset, current_elements_offset)1569} else {1570let from_offset = self.offsets.vmctx_vmtable_from(index);1571let table = func.create_global_value(ir::GlobalValueData::Load {1572base: vmctx,1573offset: Offset32::new(i32::try_from(from_offset).unwrap()),1574global_type: pointer_type,1575flags: MemFlags::trusted().with_readonly().with_can_move(),1576});1577let base_offset = i32::from(self.offsets.vmtable_definition_base());1578let current_elements_offset =1579i32::from(self.offsets.vmtable_definition_current_elements());1580(table, base_offset, current_elements_offset)1581}1582};15831584let table = &self.module.tables[index];1585let element_size = if table.ref_type.is_vmgcref_type() {1586// For GC-managed references, tables store `Option<VMGcRef>`s.1587ir::types::I32.bytes()1588} else {1589self.reference_type(table.ref_type.heap_type).0.bytes()1590};15911592let base_gv = func.create_global_value(ir::GlobalValueData::Load {1593base: ptr,1594offset: Offset32::new(base_offset),1595global_type: pointer_type,1596flags: if Some(table.limits.min) == table.limits.max {1597// A fixed-size table can't be resized so its base address won't1598// change.1599MemFlags::trusted().with_readonly().with_can_move()1600} else {1601MemFlags::trusted()1602},1603});16041605let bound = if Some(table.limits.min) == table.limits.max {1606TableSize::Static {1607bound: table.limits.min,1608}1609} else {1610TableSize::Dynamic {1611bound_gv: func.create_global_value(ir::GlobalValueData::Load {1612base: ptr,1613offset: Offset32::new(current_elements_offset),1614global_type: ir::Type::int(1615u16::from(self.offsets.size_of_vmtable_definition_current_elements()) * 8,1616)1617.unwrap(),1618flags: MemFlags::trusted(),1619}),1620}1621};16221623TableData {1624base_gv,1625bound,1626element_size,1627}1628}16291630/// Get the type index associated with an exception object.1631#[cfg(feature = "gc")]1632pub(crate) fn exception_type_from_tag(&self, tag: TagIndex) -> EngineOrModuleTypeIndex {1633self.module.tags[tag].exception1634}16351636/// Get the parameter arity of the associated function type for the given tag.1637pub(crate) fn tag_param_arity(&self, tag: TagIndex) -> usize {1638let func_ty = self.module.tags[tag].signature.unwrap_module_type_index();1639let func_ty = self1640.types1641.unwrap_func(func_ty)1642.expect("already validated to refer to a function type");1643func_ty.params().len()1644}16451646/// Get the runtime instance ID and defined-tag ID in that1647/// instance for a particular static tag ID.1648#[cfg(feature = "gc")]1649pub(crate) fn get_instance_and_tag(1650&mut self,1651builder: &mut FunctionBuilder<'_>,1652tag_index: TagIndex,1653) -> (ir::Value, ir::Value) {1654if let Some(defined_tag_index) = self.module.defined_tag_index(tag_index) {1655// Our own tag -- we only need to get our instance ID.1656let builtin = self.builtin_functions.get_instance_id(builder.func);1657let vmctx = self.vmctx_val(&mut builder.cursor());1658let call = builder.ins().call(builtin, &[vmctx]);1659let instance_id = builder.func.dfg.inst_results(call)[0];1660let tag_id = builder1661.ins()1662.iconst(I32, i64::from(defined_tag_index.as_u32()));1663(instance_id, tag_id)1664} else {1665// An imported tag -- we need to load the VMTagImport struct.1666let vmctx_tag_vmctx_offset = self.offsets.vmctx_vmtag_import_vmctx(tag_index);1667let vmctx_tag_index_offset = self.offsets.vmctx_vmtag_import_index(tag_index);1668let vmctx = self.vmctx_val(&mut builder.cursor());1669let pointer_type = self.pointer_type();1670let from_vmctx = builder.ins().load(1671pointer_type,1672MemFlags::trusted().with_readonly(),1673vmctx,1674i32::try_from(vmctx_tag_vmctx_offset).unwrap(),1675);1676let index = builder.ins().load(1677I32,1678MemFlags::trusted().with_readonly(),1679vmctx,1680i32::try_from(vmctx_tag_index_offset).unwrap(),1681);1682let builtin = self.builtin_functions.get_instance_id(builder.func);1683let call = builder.ins().call(builtin, &[from_vmctx]);1684let from_instance_id = builder.func.dfg.inst_results(call)[0];1685(from_instance_id, index)1686}1687}1688}16891690struct Call<'a, 'func, 'module_env> {1691builder: &'a mut FunctionBuilder<'func>,1692env: &'a mut FuncEnvironment<'module_env>,1693handlers: Vec<(Option<ExceptionTag>, Block)>,1694tail: bool,1695}16961697enum CheckIndirectCallTypeSignature {1698Runtime,1699StaticMatch {1700/// Whether or not the funcref may be null or if it's statically known1701/// to not be null.1702may_be_null: bool,1703},1704StaticTrap,1705}17061707type CallRets = SmallVec<[ir::Value; 4]>;17081709impl<'a, 'func, 'module_env> Call<'a, 'func, 'module_env> {1710/// Create a new `Call` site that will do regular, non-tail calls.1711pub fn new(1712builder: &'a mut FunctionBuilder<'func>,1713env: &'a mut FuncEnvironment<'module_env>,1714handlers: impl IntoIterator<Item = (Option<ExceptionTag>, Block)>,1715) -> Self {1716let handlers = handlers.into_iter().collect();1717Call {1718builder,1719env,1720handlers,1721tail: false,1722}1723}17241725/// Create a new `Call` site that will perform tail calls.1726pub fn new_tail(1727builder: &'a mut FunctionBuilder<'func>,1728env: &'a mut FuncEnvironment<'module_env>,1729) -> Self {1730Call {1731builder,1732env,1733handlers: vec![],1734tail: true,1735}1736}17371738/// Do a Wasm-level direct call to the given callee function.1739pub fn direct_call(1740mut self,1741callee_index: FuncIndex,1742sig_ref: ir::SigRef,1743call_args: &[ir::Value],1744) -> WasmResult<CallRets> {1745let mut real_call_args = Vec::with_capacity(call_args.len() + 2);1746let caller_vmctx = self1747.builder1748.func1749.special_param(ArgumentPurpose::VMContext)1750.unwrap();17511752// Handle direct calls to locally-defined functions.1753if let Some(def_func_index) = self.env.module.defined_func_index(callee_index) {1754// First append the callee vmctx address, which is the same as the caller vmctx in1755// this case.1756real_call_args.push(caller_vmctx);17571758// Then append the caller vmctx address.1759real_call_args.push(caller_vmctx);17601761// Then append the regular call arguments.1762real_call_args.extend_from_slice(call_args);17631764// Finally, make the direct call!1765let callee = self1766.env1767.get_or_create_defined_func_ref(self.builder.func, def_func_index);1768return Ok(self.direct_call_inst(callee, &real_call_args));1769}17701771// Handle direct calls to imported functions. We use an indirect call1772// so that we don't have to patch the code at runtime.1773let pointer_type = self.env.pointer_type();1774let vmctx = self.env.vmctx(self.builder.func);1775let base = self.builder.ins().global_value(pointer_type, vmctx);17761777let mem_flags = ir::MemFlags::trusted().with_readonly().with_can_move();17781779// Load the callee address.1780let body_offset = i32::try_from(1781self.env1782.offsets1783.vmctx_vmfunction_import_wasm_call(callee_index),1784)1785.unwrap();17861787// First append the callee vmctx address.1788let vmctx_offset =1789i32::try_from(self.env.offsets.vmctx_vmfunction_import_vmctx(callee_index)).unwrap();1790let callee_vmctx = self1791.builder1792.ins()1793.load(pointer_type, mem_flags, base, vmctx_offset);1794real_call_args.push(callee_vmctx);1795real_call_args.push(caller_vmctx);17961797// Then append the regular call arguments.1798real_call_args.extend_from_slice(call_args);17991800// If we statically know the imported function (e.g. this is a1801// component-to-component call where we statically know both components)1802// then we can actually still make a direct call (although we do have to1803// pass the callee's vmctx that we just loaded, not our own). Otherwise,1804// we really do an indirect call.1805if self.env.translation.known_imported_functions[callee_index].is_some() {1806let callee = self1807.env1808.get_or_create_imported_func_ref(self.builder.func, callee_index);1809Ok(self.direct_call_inst(callee, &real_call_args))1810} else {1811let func_addr = self1812.builder1813.ins()1814.load(pointer_type, mem_flags, base, body_offset);1815Ok(self.indirect_call_inst(sig_ref, func_addr, &real_call_args))1816}1817}18181819/// Do a Wasm-level indirect call through the given funcref table.1820pub fn indirect_call(1821mut self,1822features: &WasmFeatures,1823table_index: TableIndex,1824ty_index: TypeIndex,1825sig_ref: ir::SigRef,1826callee: ir::Value,1827call_args: &[ir::Value],1828) -> WasmResult<Option<CallRets>> {1829let (code_ptr, callee_vmctx) = match self.check_and_load_code_and_callee_vmctx(1830features,1831table_index,1832ty_index,1833callee,1834false,1835)? {1836Some(pair) => pair,1837None => return Ok(None),1838};18391840self.unchecked_call_impl(sig_ref, code_ptr, callee_vmctx, call_args)1841.map(Some)1842}18431844fn check_and_load_code_and_callee_vmctx(1845&mut self,1846features: &WasmFeatures,1847table_index: TableIndex,1848ty_index: TypeIndex,1849callee: ir::Value,1850cold_blocks: bool,1851) -> WasmResult<Option<(ir::Value, ir::Value)>> {1852// Get the funcref pointer from the table.1853let funcref_ptr = self.env.get_or_init_func_ref_table_elem(1854self.builder,1855table_index,1856callee,1857cold_blocks,1858);18591860// If necessary, check the signature.1861let check =1862self.check_indirect_call_type_signature(features, table_index, ty_index, funcref_ptr);18631864let trap_code = match check {1865// `funcref_ptr` is checked at runtime that its type matches,1866// meaning that if code gets this far it's guaranteed to not be1867// null. That means nothing in `unchecked_call` can fail.1868CheckIndirectCallTypeSignature::Runtime => None,18691870// No type check was performed on `funcref_ptr` because it's1871// statically known to have the right type. Note that whether or1872// not the function is null is not necessarily tested so far since1873// no type information was inspected.1874//1875// If the table may hold null functions, then further loads in1876// `unchecked_call` may fail. If the table only holds non-null1877// functions, though, then there's no possibility of a trap.1878CheckIndirectCallTypeSignature::StaticMatch { may_be_null } => {1879if may_be_null {1880Some(crate::TRAP_INDIRECT_CALL_TO_NULL)1881} else {1882None1883}1884}18851886// Code has already trapped, so return nothing indicating that this1887// is now unreachable code.1888CheckIndirectCallTypeSignature::StaticTrap => return Ok(None),1889};18901891Ok(Some(self.load_code_and_vmctx(funcref_ptr, trap_code)))1892}18931894fn check_indirect_call_type_signature(1895&mut self,1896features: &WasmFeatures,1897table_index: TableIndex,1898ty_index: TypeIndex,1899funcref_ptr: ir::Value,1900) -> CheckIndirectCallTypeSignature {1901let table = &self.env.module.tables[table_index];1902let sig_id_size = self.env.offsets.size_of_vmshared_type_index();1903let sig_id_type = Type::int(u16::from(sig_id_size) * 8).unwrap();19041905// Test if a type check is necessary for this table. If this table is a1906// table of typed functions and that type matches `ty_index`, then1907// there's no need to perform a typecheck.1908match table.ref_type.heap_type {1909// Functions do not have a statically known type in the table, a1910// typecheck is required. Fall through to below to perform the1911// actual typecheck.1912WasmHeapType::Func => {}19131914// Functions that have a statically known type are either going to1915// always succeed or always fail. Figure out by inspecting the types1916// further.1917WasmHeapType::ConcreteFunc(EngineOrModuleTypeIndex::Module(table_ty)) => {1918// If `ty_index` matches `table_ty`, then this call is1919// statically known to have the right type, so no checks are1920// necessary.1921let specified_ty = self.env.module.types[ty_index].unwrap_module_type_index();1922if specified_ty == table_ty {1923return CheckIndirectCallTypeSignature::StaticMatch {1924may_be_null: table.ref_type.nullable,1925};1926}19271928if features.gc() {1929// If we are in the Wasm GC world, then we need to perform1930// an actual subtype check at runtime. Fall through to below1931// to do that.1932} else {1933// Otherwise if the types don't match then either (a) this1934// is a null pointer or (b) it's a pointer with the wrong1935// type. Figure out which and trap here.1936//1937// If it's possible to have a null here then try to load the1938// type information. If that fails due to the function being1939// a null pointer, then this was a call to null. Otherwise1940// if it succeeds then we know it won't match, so trap1941// anyway.1942if table.ref_type.nullable {1943if self.env.clif_memory_traps_enabled() {1944self.builder.ins().load(1945sig_id_type,1946ir::MemFlags::trusted()1947.with_readonly()1948.with_trap_code(Some(crate::TRAP_INDIRECT_CALL_TO_NULL)),1949funcref_ptr,1950i32::from(self.env.offsets.ptr.vm_func_ref_type_index()),1951);1952} else {1953self.env.trapz(1954self.builder,1955funcref_ptr,1956crate::TRAP_INDIRECT_CALL_TO_NULL,1957);1958}1959}1960self.env.trap(self.builder, crate::TRAP_BAD_SIGNATURE);1961return CheckIndirectCallTypeSignature::StaticTrap;1962}1963}19641965// Tables of `nofunc` can only be inhabited by null, so go ahead and1966// trap with that.1967WasmHeapType::NoFunc => {1968assert!(table.ref_type.nullable);1969self.env1970.trap(self.builder, crate::TRAP_INDIRECT_CALL_TO_NULL);1971return CheckIndirectCallTypeSignature::StaticTrap;1972}19731974// Engine-indexed types don't show up until runtime and it's a Wasm1975// validation error to perform a call through a non-function table,1976// so these cases are dynamically not reachable.1977WasmHeapType::ConcreteFunc(EngineOrModuleTypeIndex::Engine(_))1978| WasmHeapType::ConcreteFunc(EngineOrModuleTypeIndex::RecGroup(_))1979| WasmHeapType::Extern1980| WasmHeapType::NoExtern1981| WasmHeapType::Any1982| WasmHeapType::Eq1983| WasmHeapType::I311984| WasmHeapType::Array1985| WasmHeapType::ConcreteArray(_)1986| WasmHeapType::Struct1987| WasmHeapType::ConcreteStruct(_)1988| WasmHeapType::Exn1989| WasmHeapType::ConcreteExn(_)1990| WasmHeapType::NoExn1991| WasmHeapType::Cont1992| WasmHeapType::ConcreteCont(_)1993| WasmHeapType::NoCont1994| WasmHeapType::None => {1995unreachable!()1996}1997}19981999// Load the caller's `VMSharedTypeIndex.2000let interned_ty = self.env.module.types[ty_index].unwrap_module_type_index();2001let caller_sig_id = self2002.env2003.module_interned_to_shared_ty(&mut self.builder.cursor(), interned_ty);20042005// Load the callee's `VMSharedTypeIndex`.2006//2007// Note that the callee may be null in which case this load may2008// trap. If so use the `TRAP_INDIRECT_CALL_TO_NULL` trap code.2009let mut mem_flags = ir::MemFlags::trusted().with_readonly();2010if self.env.clif_memory_traps_enabled() {2011mem_flags = mem_flags.with_trap_code(Some(crate::TRAP_INDIRECT_CALL_TO_NULL));2012} else {2013self.env2014.trapz(self.builder, funcref_ptr, crate::TRAP_INDIRECT_CALL_TO_NULL);2015}2016let callee_sig_id =2017self.env2018.load_funcref_type_index(&mut self.builder.cursor(), mem_flags, funcref_ptr);20192020// Check that they match: in the case of Wasm GC, this means doing a2021// full subtype check. Otherwise, we do a simple equality check.2022let matches = if features.gc() {2023#[cfg(feature = "gc")]2024{2025self.env2026.is_subtype(self.builder, callee_sig_id, caller_sig_id)2027}2028#[cfg(not(feature = "gc"))]2029{2030unreachable!()2031}2032} else {2033self.builder2034.ins()2035.icmp(IntCC::Equal, callee_sig_id, caller_sig_id)2036};2037self.env2038.trapz(self.builder, matches, crate::TRAP_BAD_SIGNATURE);2039CheckIndirectCallTypeSignature::Runtime2040}20412042/// Call a typed function reference.2043pub fn call_ref(2044self,2045sig_ref: ir::SigRef,2046callee: ir::Value,2047args: &[ir::Value],2048) -> WasmResult<CallRets> {2049// FIXME: the wasm type system tracks enough information to know whether2050// `callee` is a null reference or not. In some situations it can be2051// statically known here that `callee` cannot be null in which case this2052// can be `None` instead. This requires feeding type information from2053// wasmparser's validator into this function, however, which is not2054// easily done at this time.2055let callee_load_trap_code = Some(crate::TRAP_NULL_REFERENCE);20562057self.unchecked_call(sig_ref, callee, callee_load_trap_code, args)2058}20592060/// This calls a function by reference without checking the signature.2061///2062/// It gets the function address, sets relevant flags, and passes the2063/// special callee/caller vmctxs. It is used by both call_indirect (which2064/// checks the signature) and call_ref (which doesn't).2065fn unchecked_call(2066mut self,2067sig_ref: ir::SigRef,2068callee: ir::Value,2069callee_load_trap_code: Option<ir::TrapCode>,2070call_args: &[ir::Value],2071) -> WasmResult<CallRets> {2072let (func_addr, callee_vmctx) = self.load_code_and_vmctx(callee, callee_load_trap_code);2073self.unchecked_call_impl(sig_ref, func_addr, callee_vmctx, call_args)2074}20752076fn load_code_and_vmctx(2077&mut self,2078callee: ir::Value,2079callee_load_trap_code: Option<ir::TrapCode>,2080) -> (ir::Value, ir::Value) {2081let pointer_type = self.env.pointer_type();20822083// Dereference callee pointer to get the function address.2084//2085// Note that this may trap if `callee` hasn't previously been verified2086// to be non-null. This means that this load is annotated with an2087// optional trap code provided by the caller of `unchecked_call` which2088// will handle the case where this is either already known to be2089// non-null or may trap.2090let mem_flags = ir::MemFlags::trusted().with_readonly();2091let mut callee_flags = mem_flags;2092if self.env.clif_memory_traps_enabled() {2093callee_flags = callee_flags.with_trap_code(callee_load_trap_code);2094} else {2095if let Some(trap) = callee_load_trap_code {2096self.env.trapz(self.builder, callee, trap);2097}2098}2099let func_addr = self.builder.ins().load(2100pointer_type,2101callee_flags,2102callee,2103i32::from(self.env.offsets.ptr.vm_func_ref_wasm_call()),2104);2105let callee_vmctx = self.builder.ins().load(2106pointer_type,2107mem_flags,2108callee,2109i32::from(self.env.offsets.ptr.vm_func_ref_vmctx()),2110);21112112(func_addr, callee_vmctx)2113}21142115fn caller_vmctx(&self) -> ir::Value {2116self.builder2117.func2118.special_param(ArgumentPurpose::VMContext)2119.unwrap()2120}21212122/// This calls a function by reference without checking the2123/// signature, given the raw code pointer to the2124/// Wasm-calling-convention entry point and the callee vmctx.2125fn unchecked_call_impl(2126mut self,2127sig_ref: ir::SigRef,2128func_addr: ir::Value,2129callee_vmctx: ir::Value,2130call_args: &[ir::Value],2131) -> WasmResult<CallRets> {2132let mut real_call_args = Vec::with_capacity(call_args.len() + 2);2133let caller_vmctx = self.caller_vmctx();21342135// First append the callee and caller vmctx addresses.2136real_call_args.push(callee_vmctx);2137real_call_args.push(caller_vmctx);21382139// Then append the regular call arguments.2140real_call_args.extend_from_slice(call_args);21412142Ok(self.indirect_call_inst(sig_ref, func_addr, &real_call_args))2143}21442145fn exception_table(2146&mut self,2147sig: ir::SigRef,2148) -> Option<(ir::ExceptionTable, Block, CallRets)> {2149if self.handlers.len() > 0 {2150let continuation_block = self.builder.create_block();2151let mut args = vec![];2152let mut results = smallvec![];2153for i in 0..self.builder.func.dfg.signatures[sig].returns.len() {2154let ty = self.builder.func.dfg.signatures[sig].returns[i].value_type;2155results.push(2156self.builder2157.func2158.dfg2159.append_block_param(continuation_block, ty),2160);2161args.push(BlockArg::TryCallRet(u32::try_from(i).unwrap()));2162}21632164let continuation = self2165.builder2166.func2167.dfg2168.block_call(continuation_block, args.iter());2169let mut handlers = vec![ExceptionTableItem::Context(self.caller_vmctx())];2170for (tag, block) in &self.handlers {2171let block_call = self2172.builder2173.func2174.dfg2175.block_call(*block, &[BlockArg::TryCallExn(0)]);2176handlers.push(match tag {2177Some(tag) => ExceptionTableItem::Tag(*tag, block_call),2178None => ExceptionTableItem::Default(block_call),2179});2180}2181let etd = ExceptionTableData::new(sig, continuation, handlers);2182let et = self.builder.func.dfg.exception_tables.push(etd);2183Some((et, continuation_block, results))2184} else {2185None2186}2187}21882189fn results_from_call_inst(&self, inst: ir::Inst) -> CallRets {2190self.builder2191.func2192.dfg2193.inst_results(inst)2194.iter()2195.copied()2196.collect()2197}21982199fn handle_call_result_stackmap(&mut self, results: &[ir::Value], sig_ref: ir::SigRef) {2200for (i, &val) in results.iter().enumerate() {2201if self.env.sig_ref_result_needs_stack_map(sig_ref, i) {2202self.builder.declare_value_needs_stack_map(val);2203}2204}2205}22062207fn direct_call_inst(&mut self, callee: ir::FuncRef, args: &[ir::Value]) -> CallRets {2208let sig_ref = self.builder.func.dfg.ext_funcs[callee].signature;2209if self.tail {2210self.builder.ins().return_call(callee, args);2211smallvec![]2212} else if let Some((exception_table, continuation_block, results)) =2213self.exception_table(sig_ref)2214{2215self.builder.ins().try_call(callee, args, exception_table);2216self.handle_call_result_stackmap(&results, sig_ref);2217self.builder.switch_to_block(continuation_block);2218self.builder.seal_block(continuation_block);2219results2220} else {2221let inst = self.builder.ins().call(callee, args);2222let results = self.results_from_call_inst(inst);2223self.handle_call_result_stackmap(&results, sig_ref);2224results2225}2226}22272228fn indirect_call_inst(2229&mut self,2230sig_ref: ir::SigRef,2231func_addr: ir::Value,2232args: &[ir::Value],2233) -> CallRets {2234if self.tail {2235self.builder2236.ins()2237.return_call_indirect(sig_ref, func_addr, args);2238smallvec![]2239} else if let Some((exception_table, continuation_block, results)) =2240self.exception_table(sig_ref)2241{2242self.builder2243.ins()2244.try_call_indirect(func_addr, args, exception_table);2245self.handle_call_result_stackmap(&results, sig_ref);2246self.builder.switch_to_block(continuation_block);2247self.builder.seal_block(continuation_block);2248results2249} else {2250let inst = self.builder.ins().call_indirect(sig_ref, func_addr, args);2251let results = self.results_from_call_inst(inst);2252self.handle_call_result_stackmap(&results, sig_ref);2253results2254}2255}2256}22572258impl TypeConvert for FuncEnvironment<'_> {2259fn lookup_heap_type(&self, ty: wasmparser::UnpackedIndex) -> WasmHeapType {2260wasmtime_environ::WasmparserTypeConverter::new(self.types, |idx| {2261self.module.types[idx].unwrap_module_type_index()2262})2263.lookup_heap_type(ty)2264}22652266fn lookup_type_index(&self, index: wasmparser::UnpackedIndex) -> EngineOrModuleTypeIndex {2267wasmtime_environ::WasmparserTypeConverter::new(self.types, |idx| {2268self.module.types[idx].unwrap_module_type_index()2269})2270.lookup_type_index(index)2271}2272}22732274impl<'module_environment> TargetEnvironment for FuncEnvironment<'module_environment> {2275fn target_config(&self) -> TargetFrontendConfig {2276self.isa.frontend_config()2277}22782279fn reference_type(&self, wasm_ty: WasmHeapType) -> (ir::Type, bool) {2280let ty = crate::reference_type(wasm_ty, self.pointer_type());2281let needs_stack_map = match wasm_ty.top() {2282WasmHeapTopType::Extern | WasmHeapTopType::Any | WasmHeapTopType::Exn => true,2283WasmHeapTopType::Func => false,2284// TODO(#10248) Once continuations can be stored on the GC heap, we2285// will need stack maps for continuation objects.2286WasmHeapTopType::Cont => false,2287};2288(ty, needs_stack_map)2289}22902291fn heap_access_spectre_mitigation(&self) -> bool {2292self.isa.flags().enable_heap_access_spectre_mitigation()2293}22942295fn proof_carrying_code(&self) -> bool {2296self.isa.flags().enable_pcc()2297}22982299fn tunables(&self) -> &Tunables {2300self.compiler.tunables()2301}2302}23032304impl FuncEnvironment<'_> {2305pub fn heaps(&self) -> &PrimaryMap<Heap, HeapData> {2306&self.heaps2307}23082309pub fn is_wasm_parameter(&self, _signature: &ir::Signature, index: usize) -> bool {2310// The first two parameters are the vmctx and caller vmctx. The rest are2311// the wasm parameters.2312index >= 22313}23142315pub fn param_needs_stack_map(&self, _signature: &ir::Signature, index: usize) -> bool {2316// Skip the caller and callee vmctx.2317if index < 2 {2318return false;2319}23202321self.wasm_func_ty.params()[index - 2].is_vmgcref_type_and_not_i31()2322}23232324pub fn sig_ref_result_needs_stack_map(&self, sig_ref: ir::SigRef, index: usize) -> bool {2325let wasm_func_ty = self.sig_ref_to_ty[sig_ref].as_ref().unwrap();2326wasm_func_ty.returns()[index].is_vmgcref_type_and_not_i31()2327}23282329pub fn translate_table_grow(2330&mut self,2331builder: &mut FunctionBuilder<'_>,2332table_index: TableIndex,2333delta: ir::Value,2334init_value: ir::Value,2335) -> WasmResult<ir::Value> {2336let mut pos = builder.cursor();2337let table = self.table(table_index);2338let ty = table.ref_type.heap_type;2339let (table_vmctx, defined_table_index) =2340self.table_vmctx_and_defined_index(&mut pos, table_index);2341let index_type = table.idx_type;2342let delta = self.cast_index_to_i64(&mut pos, delta, index_type);23432344let mut args: SmallVec<[_; 6]> = smallvec![table_vmctx, defined_table_index, delta];2345let grow = match ty.top() {2346WasmHeapTopType::Extern | WasmHeapTopType::Any | WasmHeapTopType::Exn => {2347args.push(init_value);2348gc::builtins::table_grow_gc_ref(self, pos.func)?2349}2350WasmHeapTopType::Func => {2351args.push(init_value);2352self.builtin_functions.table_grow_func_ref(pos.func)2353}2354WasmHeapTopType::Cont => {2355let (revision, contref) =2356stack_switching::fatpointer::deconstruct(self, &mut pos, init_value);2357args.extend_from_slice(&[contref, revision]);2358stack_switching::builtins::table_grow_cont_obj(self, pos.func)?2359}2360};23612362let call_inst = pos.ins().call(grow, &args);2363let result = builder.func.dfg.first_result(call_inst);23642365Ok(self.convert_pointer_to_index_type(builder.cursor(), result, index_type, false))2366}23672368pub fn translate_table_get(2369&mut self,2370builder: &mut FunctionBuilder,2371table_index: TableIndex,2372index: ir::Value,2373) -> WasmResult<ir::Value> {2374let table = self.module.tables[table_index];2375let table_data = self.get_or_create_table(builder.func, table_index);2376let heap_ty = table.ref_type.heap_type;2377match heap_ty.top() {2378// GC-managed types.2379WasmHeapTopType::Any | WasmHeapTopType::Extern | WasmHeapTopType::Exn => {2380let (src, flags) = table_data.prepare_table_addr(self, builder, index);2381gc::gc_compiler(self)?.translate_read_gc_reference(2382self,2383builder,2384table.ref_type,2385src,2386flags,2387)2388}23892390// Function types.2391WasmHeapTopType::Func => {2392Ok(self.get_or_init_func_ref_table_elem(builder, table_index, index, false))2393}23942395// Continuation types.2396WasmHeapTopType::Cont => {2397let (elem_addr, flags) = table_data.prepare_table_addr(self, builder, index);2398Ok(builder.ins().load(2399stack_switching::fatpointer::fatpointer_type(self),2400flags,2401elem_addr,24020,2403))2404}2405}2406}24072408pub fn translate_table_set(2409&mut self,2410builder: &mut FunctionBuilder,2411table_index: TableIndex,2412value: ir::Value,2413index: ir::Value,2414) -> WasmResult<()> {2415let table = self.module.tables[table_index];2416let table_data = self.get_or_create_table(builder.func, table_index);2417let heap_ty = table.ref_type.heap_type;2418match heap_ty.top() {2419// GC-managed types.2420WasmHeapTopType::Any | WasmHeapTopType::Extern | WasmHeapTopType::Exn => {2421let (dst, flags) = table_data.prepare_table_addr(self, builder, index);2422gc::gc_compiler(self)?.translate_write_gc_reference(2423self,2424builder,2425table.ref_type,2426dst,2427value,2428flags,2429)2430}24312432// Function types.2433WasmHeapTopType::Func => {2434let (elem_addr, flags) = table_data.prepare_table_addr(self, builder, index);2435// Set the "initialized bit". See doc-comment on2436// `FUNCREF_INIT_BIT` in2437// crates/environ/src/ref_bits.rs for details.2438let value_with_init_bit = if self.tunables.table_lazy_init {2439builder2440.ins()2441.bor_imm(value, Imm64::from(FUNCREF_INIT_BIT as i64))2442} else {2443value2444};2445builder2446.ins()2447.store(flags, value_with_init_bit, elem_addr, 0);2448Ok(())2449}24502451// Continuation types.2452WasmHeapTopType::Cont => {2453let (elem_addr, flags) = table_data.prepare_table_addr(self, builder, index);2454builder.ins().store(flags, value, elem_addr, 0);2455Ok(())2456}2457}2458}24592460pub fn translate_table_fill(2461&mut self,2462builder: &mut FunctionBuilder<'_>,2463table_index: TableIndex,2464dst: ir::Value,2465val: ir::Value,2466len: ir::Value,2467) -> WasmResult<()> {2468let mut pos = builder.cursor();2469let table = self.table(table_index);2470let ty = table.ref_type.heap_type;2471let dst = self.cast_index_to_i64(&mut pos, dst, table.idx_type);2472let len = self.cast_index_to_i64(&mut pos, len, table.idx_type);2473let (table_vmctx, table_index) = self.table_vmctx_and_defined_index(&mut pos, table_index);24742475let mut args: SmallVec<[_; 6]> = smallvec![table_vmctx, table_index, dst];2476let libcall = match ty.top() {2477WasmHeapTopType::Any | WasmHeapTopType::Extern | WasmHeapTopType::Exn => {2478args.push(val);2479gc::builtins::table_fill_gc_ref(self, &mut pos.func)?2480}2481WasmHeapTopType::Func => {2482args.push(val);2483self.builtin_functions.table_fill_func_ref(&mut pos.func)2484}2485WasmHeapTopType::Cont => {2486let (revision, contref) =2487stack_switching::fatpointer::deconstruct(self, &mut pos, val);2488args.extend_from_slice(&[contref, revision]);2489stack_switching::builtins::table_fill_cont_obj(self, &mut pos.func)?2490}2491};24922493args.push(len);2494builder.ins().call(libcall, &args);24952496Ok(())2497}24982499pub fn translate_ref_i31(2500&mut self,2501mut pos: FuncCursor,2502val: ir::Value,2503) -> WasmResult<ir::Value> {2504debug_assert_eq!(pos.func.dfg.value_type(val), ir::types::I32);2505let shifted = pos.ins().ishl_imm(val, 1);2506let tagged = pos2507.ins()2508.bor_imm(shifted, i64::from(crate::I31_REF_DISCRIMINANT));2509let (ref_ty, _needs_stack_map) = self.reference_type(WasmHeapType::I31);2510debug_assert_eq!(ref_ty, ir::types::I32);2511Ok(tagged)2512}25132514pub fn translate_i31_get_s(2515&mut self,2516builder: &mut FunctionBuilder,2517i31ref: ir::Value,2518) -> WasmResult<ir::Value> {2519// TODO: If we knew we have a `(ref i31)` here, instead of maybe a `(ref2520// null i31)`, we could omit the `trapz`. But plumbing that type info2521// from `wasmparser` and through to here is a bit funky.2522self.trapz(builder, i31ref, crate::TRAP_NULL_REFERENCE);2523Ok(builder.ins().sshr_imm(i31ref, 1))2524}25252526pub fn translate_i31_get_u(2527&mut self,2528builder: &mut FunctionBuilder,2529i31ref: ir::Value,2530) -> WasmResult<ir::Value> {2531// TODO: If we knew we have a `(ref i31)` here, instead of maybe a `(ref2532// null i31)`, we could omit the `trapz`. But plumbing that type info2533// from `wasmparser` and through to here is a bit funky.2534self.trapz(builder, i31ref, crate::TRAP_NULL_REFERENCE);2535Ok(builder.ins().ushr_imm(i31ref, 1))2536}25372538pub fn struct_fields_len(&mut self, struct_type_index: TypeIndex) -> WasmResult<usize> {2539let ty = self.module.types[struct_type_index].unwrap_module_type_index();2540match &self.types[ty].composite_type.inner {2541WasmCompositeInnerType::Struct(s) => Ok(s.fields.len()),2542_ => unreachable!(),2543}2544}25452546pub fn translate_struct_new(2547&mut self,2548builder: &mut FunctionBuilder,2549struct_type_index: TypeIndex,2550fields: StructFieldsVec,2551) -> WasmResult<ir::Value> {2552gc::translate_struct_new(self, builder, struct_type_index, &fields)2553}25542555pub fn translate_struct_new_default(2556&mut self,2557builder: &mut FunctionBuilder,2558struct_type_index: TypeIndex,2559) -> WasmResult<ir::Value> {2560gc::translate_struct_new_default(self, builder, struct_type_index)2561}25622563pub fn translate_struct_get(2564&mut self,2565builder: &mut FunctionBuilder,2566struct_type_index: TypeIndex,2567field_index: u32,2568struct_ref: ir::Value,2569extension: Option<Extension>,2570) -> WasmResult<ir::Value> {2571gc::translate_struct_get(2572self,2573builder,2574struct_type_index,2575field_index,2576struct_ref,2577extension,2578)2579}25802581pub fn translate_struct_set(2582&mut self,2583builder: &mut FunctionBuilder,2584struct_type_index: TypeIndex,2585field_index: u32,2586struct_ref: ir::Value,2587value: ir::Value,2588) -> WasmResult<()> {2589gc::translate_struct_set(2590self,2591builder,2592struct_type_index,2593field_index,2594struct_ref,2595value,2596)2597}25982599pub fn translate_exn_unbox(2600&mut self,2601builder: &mut FunctionBuilder<'_>,2602tag_index: TagIndex,2603exn_ref: ir::Value,2604) -> WasmResult<SmallVec<[ir::Value; 4]>> {2605gc::translate_exn_unbox(self, builder, tag_index, exn_ref)2606}26072608pub fn translate_exn_throw(2609&mut self,2610builder: &mut FunctionBuilder<'_>,2611tag_index: TagIndex,2612args: &[ir::Value],2613handlers: impl IntoIterator<Item = (Option<ExceptionTag>, Block)>,2614) -> WasmResult<()> {2615gc::translate_exn_throw(self, builder, tag_index, args, handlers)2616}26172618pub fn translate_exn_throw_ref(2619&mut self,2620builder: &mut FunctionBuilder<'_>,2621exnref: ir::Value,2622handlers: impl IntoIterator<Item = (Option<ExceptionTag>, Block)>,2623) -> WasmResult<()> {2624gc::translate_exn_throw_ref(self, builder, exnref, handlers)2625}26262627pub fn translate_array_new(2628&mut self,2629builder: &mut FunctionBuilder,2630array_type_index: TypeIndex,2631elem: ir::Value,2632len: ir::Value,2633) -> WasmResult<ir::Value> {2634gc::translate_array_new(self, builder, array_type_index, elem, len)2635}26362637pub fn translate_array_new_default(2638&mut self,2639builder: &mut FunctionBuilder,2640array_type_index: TypeIndex,2641len: ir::Value,2642) -> WasmResult<ir::Value> {2643gc::translate_array_new_default(self, builder, array_type_index, len)2644}26452646pub fn translate_array_new_fixed(2647&mut self,2648builder: &mut FunctionBuilder,2649array_type_index: TypeIndex,2650elems: &[ir::Value],2651) -> WasmResult<ir::Value> {2652gc::translate_array_new_fixed(self, builder, array_type_index, elems)2653}26542655pub fn translate_array_new_data(2656&mut self,2657builder: &mut FunctionBuilder,2658array_type_index: TypeIndex,2659data_index: DataIndex,2660data_offset: ir::Value,2661len: ir::Value,2662) -> WasmResult<ir::Value> {2663let libcall = gc::builtins::array_new_data(self, builder.func)?;2664let vmctx = self.vmctx_val(&mut builder.cursor());2665let interned_type_index = self.module.types[array_type_index].unwrap_module_type_index();2666let interned_type_index = builder2667.ins()2668.iconst(I32, i64::from(interned_type_index.as_u32()));2669let data_index = builder.ins().iconst(I32, i64::from(data_index.as_u32()));2670let call_inst = builder.ins().call(2671libcall,2672&[vmctx, interned_type_index, data_index, data_offset, len],2673);2674Ok(builder.func.dfg.first_result(call_inst))2675}26762677pub fn translate_array_new_elem(2678&mut self,2679builder: &mut FunctionBuilder,2680array_type_index: TypeIndex,2681elem_index: ElemIndex,2682elem_offset: ir::Value,2683len: ir::Value,2684) -> WasmResult<ir::Value> {2685let libcall = gc::builtins::array_new_elem(self, builder.func)?;2686let vmctx = self.vmctx_val(&mut builder.cursor());2687let interned_type_index = self.module.types[array_type_index].unwrap_module_type_index();2688let interned_type_index = builder2689.ins()2690.iconst(I32, i64::from(interned_type_index.as_u32()));2691let elem_index = builder.ins().iconst(I32, i64::from(elem_index.as_u32()));2692let call_inst = builder.ins().call(2693libcall,2694&[vmctx, interned_type_index, elem_index, elem_offset, len],2695);2696Ok(builder.func.dfg.first_result(call_inst))2697}26982699pub fn translate_array_copy(2700&mut self,2701builder: &mut FunctionBuilder,2702_dst_array_type_index: TypeIndex,2703dst_array: ir::Value,2704dst_index: ir::Value,2705_src_array_type_index: TypeIndex,2706src_array: ir::Value,2707src_index: ir::Value,2708len: ir::Value,2709) -> WasmResult<()> {2710let libcall = gc::builtins::array_copy(self, builder.func)?;2711let vmctx = self.vmctx_val(&mut builder.cursor());2712builder.ins().call(2713libcall,2714&[vmctx, dst_array, dst_index, src_array, src_index, len],2715);2716Ok(())2717}27182719pub fn translate_array_fill(2720&mut self,2721builder: &mut FunctionBuilder,2722array_type_index: TypeIndex,2723array: ir::Value,2724index: ir::Value,2725value: ir::Value,2726len: ir::Value,2727) -> WasmResult<()> {2728gc::translate_array_fill(self, builder, array_type_index, array, index, value, len)2729}27302731pub fn translate_array_init_data(2732&mut self,2733builder: &mut FunctionBuilder,2734array_type_index: TypeIndex,2735array: ir::Value,2736dst_index: ir::Value,2737data_index: DataIndex,2738data_offset: ir::Value,2739len: ir::Value,2740) -> WasmResult<()> {2741let libcall = gc::builtins::array_init_data(self, builder.func)?;2742let vmctx = self.vmctx_val(&mut builder.cursor());2743let interned_type_index = self.module.types[array_type_index].unwrap_module_type_index();2744let interned_type_index = builder2745.ins()2746.iconst(I32, i64::from(interned_type_index.as_u32()));2747let data_index = builder.ins().iconst(I32, i64::from(data_index.as_u32()));2748builder.ins().call(2749libcall,2750&[2751vmctx,2752interned_type_index,2753array,2754dst_index,2755data_index,2756data_offset,2757len,2758],2759);2760Ok(())2761}27622763pub fn translate_array_init_elem(2764&mut self,2765builder: &mut FunctionBuilder,2766array_type_index: TypeIndex,2767array: ir::Value,2768dst_index: ir::Value,2769elem_index: ElemIndex,2770elem_offset: ir::Value,2771len: ir::Value,2772) -> WasmResult<()> {2773let libcall = gc::builtins::array_init_elem(self, builder.func)?;2774let vmctx = self.vmctx_val(&mut builder.cursor());2775let interned_type_index = self.module.types[array_type_index].unwrap_module_type_index();2776let interned_type_index = builder2777.ins()2778.iconst(I32, i64::from(interned_type_index.as_u32()));2779let elem_index = builder.ins().iconst(I32, i64::from(elem_index.as_u32()));2780builder.ins().call(2781libcall,2782&[2783vmctx,2784interned_type_index,2785array,2786dst_index,2787elem_index,2788elem_offset,2789len,2790],2791);2792Ok(())2793}27942795pub fn translate_array_len(2796&mut self,2797builder: &mut FunctionBuilder,2798array: ir::Value,2799) -> WasmResult<ir::Value> {2800gc::translate_array_len(self, builder, array)2801}28022803pub fn translate_array_get(2804&mut self,2805builder: &mut FunctionBuilder,2806array_type_index: TypeIndex,2807array: ir::Value,2808index: ir::Value,2809extension: Option<Extension>,2810) -> WasmResult<ir::Value> {2811gc::translate_array_get(self, builder, array_type_index, array, index, extension)2812}28132814pub fn translate_array_set(2815&mut self,2816builder: &mut FunctionBuilder,2817array_type_index: TypeIndex,2818array: ir::Value,2819index: ir::Value,2820value: ir::Value,2821) -> WasmResult<()> {2822gc::translate_array_set(self, builder, array_type_index, array, index, value)2823}28242825pub fn translate_ref_test(2826&mut self,2827builder: &mut FunctionBuilder<'_>,2828test_ty: WasmRefType,2829gc_ref: ir::Value,2830gc_ref_ty: WasmRefType,2831) -> WasmResult<ir::Value> {2832gc::translate_ref_test(self, builder, test_ty, gc_ref, gc_ref_ty)2833}28342835pub fn translate_ref_null(2836&mut self,2837mut pos: cranelift_codegen::cursor::FuncCursor,2838ht: WasmHeapType,2839) -> WasmResult<ir::Value> {2840Ok(match ht.top() {2841WasmHeapTopType::Func => pos.ins().iconst(self.pointer_type(), 0),2842// NB: null GC references don't need to be in stack maps.2843WasmHeapTopType::Any | WasmHeapTopType::Extern | WasmHeapTopType::Exn => {2844pos.ins().iconst(types::I32, 0)2845}2846WasmHeapTopType::Cont => {2847let zero = pos.ins().iconst(self.pointer_type(), 0);2848stack_switching::fatpointer::construct(self, &mut pos, zero, zero)2849}2850})2851}28522853pub fn translate_ref_is_null(2854&mut self,2855mut pos: cranelift_codegen::cursor::FuncCursor,2856value: ir::Value,2857ty: WasmRefType,2858) -> WasmResult<ir::Value> {2859// If we know the type is not nullable, then we don't actually need to2860// check for null.2861if !ty.nullable {2862return Ok(pos.ins().iconst(ir::types::I32, 0));2863}28642865let byte_is_null = match ty.heap_type.top() {2866WasmHeapTopType::Cont => {2867let (_revision, contref) =2868stack_switching::fatpointer::deconstruct(self, &mut pos, value);2869pos.ins()2870.icmp_imm(cranelift_codegen::ir::condcodes::IntCC::Equal, contref, 0)2871}2872_ => pos2873.ins()2874.icmp_imm(cranelift_codegen::ir::condcodes::IntCC::Equal, value, 0),2875};28762877Ok(pos.ins().uextend(ir::types::I32, byte_is_null))2878}28792880pub fn translate_ref_func(2881&mut self,2882mut pos: cranelift_codegen::cursor::FuncCursor<'_>,2883func_index: FuncIndex,2884) -> WasmResult<ir::Value> {2885let func_index = pos.ins().iconst(I32, func_index.as_u32() as i64);2886let ref_func = self.builtin_functions.ref_func(&mut pos.func);2887let vmctx = self.vmctx_val(&mut pos);28882889let call_inst = pos.ins().call(ref_func, &[vmctx, func_index]);2890Ok(pos.func.dfg.first_result(call_inst))2891}28922893pub(crate) fn translate_global_get(2894&mut self,2895builder: &mut FunctionBuilder<'_>,2896global_index: GlobalIndex,2897) -> WasmResult<ir::Value> {2898match self.get_or_create_global(builder.func, global_index) {2899GlobalVariable::Memory { gv, offset, ty } => {2900let addr = builder.ins().global_value(self.pointer_type(), gv);2901let mut flags = ir::MemFlags::trusted();2902// Store vector globals in little-endian format to avoid2903// byte swaps on big-endian platforms since at-rest vectors2904// should already be in little-endian format anyway.2905if ty.is_vector() {2906flags.set_endianness(ir::Endianness::Little);2907}2908// Put globals in the "table" abstract heap category as well.2909flags.set_alias_region(Some(ir::AliasRegion::Table));2910Ok(builder.ins().load(ty, flags, addr, offset))2911}2912GlobalVariable::Custom => {2913let global_ty = self.module.globals[global_index];2914let wasm_ty = global_ty.wasm_ty;2915debug_assert!(2916wasm_ty.is_vmgcref_type(),2917"We only use GlobalVariable::Custom for VMGcRef types"2918);2919let WasmValType::Ref(ref_ty) = wasm_ty else {2920unreachable!()2921};29222923let (gv, offset) = self.get_global_location(builder.func, global_index);2924let gv = builder.ins().global_value(self.pointer_type(), gv);2925let src = builder.ins().iadd_imm(gv, i64::from(offset));29262927gc::gc_compiler(self)?.translate_read_gc_reference(2928self,2929builder,2930ref_ty,2931src,2932if global_ty.mutability {2933ir::MemFlags::trusted()2934} else {2935ir::MemFlags::trusted().with_readonly().with_can_move()2936},2937)2938}2939}2940}29412942pub(crate) fn translate_global_set(2943&mut self,2944builder: &mut FunctionBuilder<'_>,2945global_index: GlobalIndex,2946val: ir::Value,2947) -> WasmResult<()> {2948match self.get_or_create_global(builder.func, global_index) {2949GlobalVariable::Memory { gv, offset, ty } => {2950let addr = builder.ins().global_value(self.pointer_type(), gv);2951let mut flags = ir::MemFlags::trusted();2952// Like `global.get`, store globals in little-endian format.2953if ty.is_vector() {2954flags.set_endianness(ir::Endianness::Little);2955}2956// Put globals in the "table" abstract heap category as well.2957flags.set_alias_region(Some(ir::AliasRegion::Table));2958debug_assert_eq!(ty, builder.func.dfg.value_type(val));2959builder.ins().store(flags, val, addr, offset);2960self.update_global(builder, global_index, val);2961}2962GlobalVariable::Custom => {2963let ty = self.module.globals[global_index].wasm_ty;2964debug_assert!(2965ty.is_vmgcref_type(),2966"We only use GlobalVariable::Custom for VMGcRef types"2967);2968let WasmValType::Ref(ty) = ty else {2969unreachable!()2970};29712972let (gv, offset) = self.get_global_location(builder.func, global_index);2973let gv = builder.ins().global_value(self.pointer_type(), gv);2974let src = builder.ins().iadd_imm(gv, i64::from(offset));29752976gc::gc_compiler(self)?.translate_write_gc_reference(2977self,2978builder,2979ty,2980src,2981val,2982ir::MemFlags::trusted(),2983)?2984}2985}2986Ok(())2987}29882989pub fn translate_call_indirect<'a>(2990&mut self,2991builder: &'a mut FunctionBuilder,2992features: &WasmFeatures,2993table_index: TableIndex,2994ty_index: TypeIndex,2995sig_ref: ir::SigRef,2996callee: ir::Value,2997call_args: &[ir::Value],2998handlers: impl IntoIterator<Item = (Option<ExceptionTag>, Block)>,2999) -> WasmResult<Option<CallRets>> {3000Call::new(builder, self, handlers).indirect_call(3001features,3002table_index,3003ty_index,3004sig_ref,3005callee,3006call_args,3007)3008}30093010pub fn translate_call<'a>(3011&mut self,3012builder: &'a mut FunctionBuilder,3013callee_index: FuncIndex,3014sig_ref: ir::SigRef,3015call_args: &[ir::Value],3016handlers: impl IntoIterator<Item = (Option<ExceptionTag>, Block)>,3017) -> WasmResult<CallRets> {3018Call::new(builder, self, handlers).direct_call(callee_index, sig_ref, call_args)3019}30203021pub fn translate_call_ref<'a>(3022&mut self,3023builder: &'a mut FunctionBuilder,3024sig_ref: ir::SigRef,3025callee: ir::Value,3026call_args: &[ir::Value],3027handlers: impl IntoIterator<Item = (Option<ExceptionTag>, Block)>,3028) -> WasmResult<CallRets> {3029Call::new(builder, self, handlers).call_ref(sig_ref, callee, call_args)3030}30313032pub fn translate_return_call(3033&mut self,3034builder: &mut FunctionBuilder,3035callee_index: FuncIndex,3036sig_ref: ir::SigRef,3037call_args: &[ir::Value],3038) -> WasmResult<()> {3039Call::new_tail(builder, self).direct_call(callee_index, sig_ref, call_args)?;3040Ok(())3041}30423043pub fn translate_return_call_indirect(3044&mut self,3045builder: &mut FunctionBuilder,3046features: &WasmFeatures,3047table_index: TableIndex,3048ty_index: TypeIndex,3049sig_ref: ir::SigRef,3050callee: ir::Value,3051call_args: &[ir::Value],3052) -> WasmResult<()> {3053Call::new_tail(builder, self).indirect_call(3054features,3055table_index,3056ty_index,3057sig_ref,3058callee,3059call_args,3060)?;3061Ok(())3062}30633064pub fn translate_return_call_ref(3065&mut self,3066builder: &mut FunctionBuilder,3067sig_ref: ir::SigRef,3068callee: ir::Value,3069call_args: &[ir::Value],3070) -> WasmResult<()> {3071Call::new_tail(builder, self).call_ref(sig_ref, callee, call_args)?;3072Ok(())3073}30743075/// Returns two `ir::Value`s, the first of which is the vmctx for the memory3076/// `index` and the second of which is the `DefinedMemoryIndex` for `index`.3077///3078/// Handles internally whether `index` is an imported memory or not.3079fn memory_vmctx_and_defined_index(3080&mut self,3081pos: &mut FuncCursor,3082index: MemoryIndex,3083) -> (ir::Value, ir::Value) {3084let cur_vmctx = self.vmctx_val(pos);3085match self.module.defined_memory_index(index) {3086// This is a defined memory, so the vmctx is our own and the defined3087// index is `index` here.3088Some(index) => (cur_vmctx, pos.ins().iconst(I32, i64::from(index.as_u32()))),30893090// This is an imported memory, so load the vmctx/defined index from3091// the import definition itself.3092None => {3093let vmimport = self.offsets.vmctx_vmmemory_import(index);30943095let vmctx = pos.ins().load(3096self.isa.pointer_type(),3097ir::MemFlags::trusted(),3098cur_vmctx,3099i32::try_from(vmimport + u32::from(self.offsets.vmmemory_import_vmctx()))3100.unwrap(),3101);3102let index = pos.ins().load(3103ir::types::I32,3104ir::MemFlags::trusted(),3105cur_vmctx,3106i32::try_from(vmimport + u32::from(self.offsets.vmmemory_import_index()))3107.unwrap(),3108);3109(vmctx, index)3110}3111}3112}31133114/// Returns two `ir::Value`s, the first of which is the vmctx for the table3115/// `index` and the second of which is the `DefinedTableIndex` for `index`.3116///3117/// Handles internally whether `index` is an imported table or not.3118fn table_vmctx_and_defined_index(3119&mut self,3120pos: &mut FuncCursor,3121index: TableIndex,3122) -> (ir::Value, ir::Value) {3123// NB: the body of this method is similar to3124// `memory_vmctx_and_defined_index` above.3125let cur_vmctx = self.vmctx_val(pos);3126match self.module.defined_table_index(index) {3127Some(index) => (cur_vmctx, pos.ins().iconst(I32, i64::from(index.as_u32()))),3128None => {3129let vmimport = self.offsets.vmctx_vmtable_import(index);31303131let vmctx = pos.ins().load(3132self.isa.pointer_type(),3133ir::MemFlags::trusted(),3134cur_vmctx,3135i32::try_from(vmimport + u32::from(self.offsets.vmtable_import_vmctx()))3136.unwrap(),3137);3138let index = pos.ins().load(3139ir::types::I32,3140ir::MemFlags::trusted(),3141cur_vmctx,3142i32::try_from(vmimport + u32::from(self.offsets.vmtable_import_index()))3143.unwrap(),3144);3145(vmctx, index)3146}3147}3148}31493150pub fn translate_memory_grow(3151&mut self,3152builder: &mut FunctionBuilder<'_>,3153index: MemoryIndex,3154val: ir::Value,3155) -> WasmResult<ir::Value> {3156let mut pos = builder.cursor();3157let memory_grow = self.builtin_functions.memory_grow(&mut pos.func);31583159let (memory_vmctx, defined_memory_index) =3160self.memory_vmctx_and_defined_index(&mut pos, index);31613162let index_type = self.memory(index).idx_type;3163let val = self.cast_index_to_i64(&mut pos, val, index_type);3164let call_inst = pos3165.ins()3166.call(memory_grow, &[memory_vmctx, val, defined_memory_index]);3167let result = *pos.func.dfg.inst_results(call_inst).first().unwrap();3168let single_byte_pages = match self.memory(index).page_size_log2 {316916 => false,31700 => true,3171_ => unreachable!("only page sizes 2**0 and 2**16 are currently valid"),3172};3173Ok(self.convert_pointer_to_index_type(3174builder.cursor(),3175result,3176index_type,3177single_byte_pages,3178))3179}31803181pub fn translate_memory_size(3182&mut self,3183mut pos: FuncCursor<'_>,3184index: MemoryIndex,3185) -> WasmResult<ir::Value> {3186let pointer_type = self.pointer_type();3187let vmctx = self.vmctx(&mut pos.func);3188let is_shared = self.module.memories[index].shared;3189let base = pos.ins().global_value(pointer_type, vmctx);3190let current_length_in_bytes = match self.module.defined_memory_index(index) {3191Some(def_index) => {3192if is_shared {3193let offset =3194i32::try_from(self.offsets.vmctx_vmmemory_pointer(def_index)).unwrap();3195let vmmemory_ptr =3196pos.ins()3197.load(pointer_type, ir::MemFlags::trusted(), base, offset);3198let vmmemory_definition_offset =3199i64::from(self.offsets.ptr.vmmemory_definition_current_length());3200let vmmemory_definition_ptr =3201pos.ins().iadd_imm(vmmemory_ptr, vmmemory_definition_offset);3202// This atomic access of the3203// `VMMemoryDefinition::current_length` is direct; no bounds3204// check is needed. This is possible because shared memory3205// has a static size (the maximum is always known). Shared3206// memory is thus built with a static memory plan and no3207// bounds-checked version of this is implemented.3208pos.ins().atomic_load(3209pointer_type,3210ir::MemFlags::trusted(),3211vmmemory_definition_ptr,3212)3213} else {3214let owned_index = self.module.owned_memory_index(def_index);3215let offset = i32::try_from(3216self.offsets3217.vmctx_vmmemory_definition_current_length(owned_index),3218)3219.unwrap();3220pos.ins()3221.load(pointer_type, ir::MemFlags::trusted(), base, offset)3222}3223}3224None => {3225let offset = i32::try_from(self.offsets.vmctx_vmmemory_import_from(index)).unwrap();3226let vmmemory_ptr =3227pos.ins()3228.load(pointer_type, ir::MemFlags::trusted(), base, offset);3229if is_shared {3230let vmmemory_definition_offset =3231i64::from(self.offsets.ptr.vmmemory_definition_current_length());3232let vmmemory_definition_ptr =3233pos.ins().iadd_imm(vmmemory_ptr, vmmemory_definition_offset);3234pos.ins().atomic_load(3235pointer_type,3236ir::MemFlags::trusted(),3237vmmemory_definition_ptr,3238)3239} else {3240pos.ins().load(3241pointer_type,3242ir::MemFlags::trusted(),3243vmmemory_ptr,3244i32::from(self.offsets.ptr.vmmemory_definition_current_length()),3245)3246}3247}3248};32493250let page_size_log2 = i64::from(self.module.memories[index].page_size_log2);3251let current_length_in_pages = pos.ins().ushr_imm(current_length_in_bytes, page_size_log2);3252let single_byte_pages = match page_size_log2 {325316 => false,32540 => true,3255_ => unreachable!("only page sizes 2**0 and 2**16 are currently valid"),3256};3257Ok(self.convert_pointer_to_index_type(3258pos,3259current_length_in_pages,3260self.memory(index).idx_type,3261single_byte_pages,3262))3263}32643265pub fn translate_memory_copy(3266&mut self,3267builder: &mut FunctionBuilder<'_>,3268src_index: MemoryIndex,3269dst_index: MemoryIndex,3270dst: ir::Value,3271src: ir::Value,3272len: ir::Value,3273) -> WasmResult<()> {3274let mut pos = builder.cursor();3275let vmctx = self.vmctx_val(&mut pos);32763277let memory_copy = self.builtin_functions.memory_copy(&mut pos.func);3278let dst = self.cast_index_to_i64(&mut pos, dst, self.memory(dst_index).idx_type);3279let src = self.cast_index_to_i64(&mut pos, src, self.memory(src_index).idx_type);3280// The length is 32-bit if either memory is 32-bit, but if they're both3281// 64-bit then it's 64-bit. Our intrinsic takes a 64-bit length for3282// compatibility across all memories, so make sure that it's cast3283// correctly here (this is a bit special so no generic helper unlike for3284// `dst`/`src` above)3285let len = if index_type_to_ir_type(self.memory(dst_index).idx_type) == I643286&& index_type_to_ir_type(self.memory(src_index).idx_type) == I643287{3288len3289} else {3290pos.ins().uextend(I64, len)3291};3292let src_index = pos.ins().iconst(I32, i64::from(src_index.as_u32()));3293let dst_index = pos.ins().iconst(I32, i64::from(dst_index.as_u32()));3294pos.ins()3295.call(memory_copy, &[vmctx, dst_index, dst, src_index, src, len]);32963297Ok(())3298}32993300pub fn translate_memory_fill(3301&mut self,3302builder: &mut FunctionBuilder<'_>,3303memory_index: MemoryIndex,3304dst: ir::Value,3305val: ir::Value,3306len: ir::Value,3307) -> WasmResult<()> {3308let mut pos = builder.cursor();3309let memory_fill = self.builtin_functions.memory_fill(&mut pos.func);3310let dst = self.cast_index_to_i64(&mut pos, dst, self.memory(memory_index).idx_type);3311let len = self.cast_index_to_i64(&mut pos, len, self.memory(memory_index).idx_type);3312let (memory_vmctx, defined_memory_index) =3313self.memory_vmctx_and_defined_index(&mut pos, memory_index);33143315pos.ins().call(3316memory_fill,3317&[memory_vmctx, defined_memory_index, dst, val, len],3318);33193320Ok(())3321}33223323pub fn translate_memory_init(3324&mut self,3325builder: &mut FunctionBuilder<'_>,3326memory_index: MemoryIndex,3327seg_index: u32,3328dst: ir::Value,3329src: ir::Value,3330len: ir::Value,3331) -> WasmResult<()> {3332let mut pos = builder.cursor();3333let memory_init = self.builtin_functions.memory_init(&mut pos.func);33343335let memory_index_arg = pos.ins().iconst(I32, memory_index.index() as i64);3336let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);33373338let vmctx = self.vmctx_val(&mut pos);33393340let dst = self.cast_index_to_i64(&mut pos, dst, self.memory(memory_index).idx_type);33413342pos.ins().call(3343memory_init,3344&[vmctx, memory_index_arg, seg_index_arg, dst, src, len],3345);33463347Ok(())3348}33493350pub fn translate_data_drop(&mut self, mut pos: FuncCursor, seg_index: u32) -> WasmResult<()> {3351let data_drop = self.builtin_functions.data_drop(&mut pos.func);3352let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);3353let vmctx = self.vmctx_val(&mut pos);3354pos.ins().call(data_drop, &[vmctx, seg_index_arg]);3355Ok(())3356}33573358pub fn translate_table_size(3359&mut self,3360pos: FuncCursor,3361table_index: TableIndex,3362) -> WasmResult<ir::Value> {3363let table_data = self.get_or_create_table(pos.func, table_index);3364let index_type = index_type_to_ir_type(self.table(table_index).idx_type);3365Ok(table_data.bound.bound(&*self.isa, pos, index_type))3366}33673368pub fn translate_table_copy(3369&mut self,3370builder: &mut FunctionBuilder<'_>,3371dst_table_index: TableIndex,3372src_table_index: TableIndex,3373dst: ir::Value,3374src: ir::Value,3375len: ir::Value,3376) -> WasmResult<()> {3377let (table_copy, dst_table_index_arg, src_table_index_arg) =3378self.get_table_copy_func(&mut builder.func, dst_table_index, src_table_index);33793380let mut pos = builder.cursor();3381let dst = self.cast_index_to_i64(&mut pos, dst, self.table(dst_table_index).idx_type);3382let src = self.cast_index_to_i64(&mut pos, src, self.table(src_table_index).idx_type);3383let len = if index_type_to_ir_type(self.table(dst_table_index).idx_type) == I643384&& index_type_to_ir_type(self.table(src_table_index).idx_type) == I643385{3386len3387} else {3388pos.ins().uextend(I64, len)3389};3390let dst_table_index_arg = pos.ins().iconst(I32, dst_table_index_arg as i64);3391let src_table_index_arg = pos.ins().iconst(I32, src_table_index_arg as i64);3392let vmctx = self.vmctx_val(&mut pos);3393pos.ins().call(3394table_copy,3395&[3396vmctx,3397dst_table_index_arg,3398src_table_index_arg,3399dst,3400src,3401len,3402],3403);34043405Ok(())3406}34073408pub fn translate_table_init(3409&mut self,3410builder: &mut FunctionBuilder<'_>,3411seg_index: u32,3412table_index: TableIndex,3413dst: ir::Value,3414src: ir::Value,3415len: ir::Value,3416) -> WasmResult<()> {3417let mut pos = builder.cursor();3418let table_init = self.builtin_functions.table_init(&mut pos.func);3419let table_index_arg = pos.ins().iconst(I32, i64::from(table_index.as_u32()));3420let seg_index_arg = pos.ins().iconst(I32, i64::from(seg_index));3421let vmctx = self.vmctx_val(&mut pos);3422let index_type = self.table(table_index).idx_type;3423let dst = self.cast_index_to_i64(&mut pos, dst, index_type);3424let src = pos.ins().uextend(I64, src);3425let len = pos.ins().uextend(I64, len);34263427pos.ins().call(3428table_init,3429&[vmctx, table_index_arg, seg_index_arg, dst, src, len],3430);34313432Ok(())3433}34343435pub fn translate_elem_drop(&mut self, mut pos: FuncCursor, elem_index: u32) -> WasmResult<()> {3436let elem_drop = self.builtin_functions.elem_drop(&mut pos.func);3437let elem_index_arg = pos.ins().iconst(I32, elem_index as i64);3438let vmctx = self.vmctx_val(&mut pos);3439pos.ins().call(elem_drop, &[vmctx, elem_index_arg]);3440Ok(())3441}34423443pub fn translate_atomic_wait(3444&mut self,3445builder: &mut FunctionBuilder<'_>,3446memory_index: MemoryIndex,3447_heap: Heap,3448addr: ir::Value,3449expected: ir::Value,3450timeout: ir::Value,3451) -> WasmResult<ir::Value> {3452#[cfg(feature = "threads")]3453{3454let mut pos = builder.cursor();3455let addr = self.cast_index_to_i64(&mut pos, addr, self.memory(memory_index).idx_type);3456let implied_ty = pos.func.dfg.value_type(expected);3457let wait_func = self.get_memory_atomic_wait(&mut pos.func, implied_ty);34583459let (memory_vmctx, defined_memory_index) =3460self.memory_vmctx_and_defined_index(&mut pos, memory_index);34613462let call_inst = pos.ins().call(3463wait_func,3464&[memory_vmctx, defined_memory_index, addr, expected, timeout],3465);3466let ret = pos.func.dfg.inst_results(call_inst)[0];3467Ok(builder.ins().ireduce(ir::types::I32, ret))3468}3469#[cfg(not(feature = "threads"))]3470{3471let _ = (builder, memory_index, addr, expected, timeout);3472Err(wasmtime_environ::WasmError::Unsupported(3473"threads support disabled at compile time".to_string(),3474))3475}3476}34773478pub fn translate_atomic_notify(3479&mut self,3480builder: &mut FunctionBuilder<'_>,3481memory_index: MemoryIndex,3482_heap: Heap,3483addr: ir::Value,3484count: ir::Value,3485) -> WasmResult<ir::Value> {3486#[cfg(feature = "threads")]3487{3488let mut pos = builder.cursor();3489let addr = self.cast_index_to_i64(&mut pos, addr, self.memory(memory_index).idx_type);3490let atomic_notify = self.builtin_functions.memory_atomic_notify(&mut pos.func);34913492let (memory_vmctx, defined_memory_index) =3493self.memory_vmctx_and_defined_index(&mut pos, memory_index);3494let call_inst = pos.ins().call(3495atomic_notify,3496&[memory_vmctx, defined_memory_index, addr, count],3497);3498let ret = pos.func.dfg.inst_results(call_inst)[0];3499Ok(builder.ins().ireduce(ir::types::I32, ret))3500}3501#[cfg(not(feature = "threads"))]3502{3503let _ = (builder, memory_index, addr, count);3504Err(wasmtime_environ::WasmError::Unsupported(3505"threads support disabled at compile time".to_string(),3506))3507}3508}35093510pub fn translate_loop_header(&mut self, builder: &mut FunctionBuilder) -> WasmResult<()> {3511// Additionally if enabled check how much fuel we have remaining to see3512// if we've run out by this point.3513if self.tunables.consume_fuel {3514self.fuel_check(builder);3515}35163517// If we are performing epoch-based interruption, check to see3518// if the epoch counter has changed.3519if self.tunables.epoch_interruption {3520self.epoch_check(builder);3521}35223523Ok(())3524}35253526pub fn before_translate_operator(3527&mut self,3528op: &Operator,3529_operand_types: Option<&[WasmValType]>,3530builder: &mut FunctionBuilder,3531state: &FuncTranslationStacks,3532) -> WasmResult<()> {3533if self.tunables.consume_fuel {3534self.fuel_before_op(op, builder, state.reachable());3535}3536Ok(())3537}35383539pub fn after_translate_operator(3540&mut self,3541op: &Operator,3542_operand_types: Option<&[WasmValType]>,3543builder: &mut FunctionBuilder,3544state: &FuncTranslationStacks,3545) -> WasmResult<()> {3546if self.tunables.consume_fuel && state.reachable() {3547self.fuel_after_op(op, builder);3548}3549Ok(())3550}35513552pub fn before_unconditionally_trapping_memory_access(&mut self, builder: &mut FunctionBuilder) {3553if self.tunables.consume_fuel {3554self.fuel_increment_var(builder);3555self.fuel_save_from_var(builder);3556}3557}35583559pub fn before_translate_function(3560&mut self,3561builder: &mut FunctionBuilder,3562_state: &FuncTranslationStacks,3563) -> WasmResult<()> {3564// If an explicit stack limit is requested, emit one here at the start3565// of the function.3566if let Some(gv) = self.stack_limit_at_function_entry {3567let limit = builder.ins().global_value(self.pointer_type(), gv);3568let sp = builder.ins().get_stack_pointer(self.pointer_type());3569let overflow = builder.ins().icmp(IntCC::UnsignedLessThan, sp, limit);3570self.conditionally_trap(builder, overflow, ir::TrapCode::STACK_OVERFLOW);3571}35723573// Additionally we initialize `fuel_var` if it will get used.3574if self.tunables.consume_fuel {3575self.fuel_function_entry(builder);3576}35773578// Initialize `epoch_var` with the current epoch.3579if self.tunables.epoch_interruption {3580self.epoch_function_entry(builder);3581}35823583#[cfg(feature = "wmemcheck")]3584if self.compiler.wmemcheck {3585let func_name = self.current_func_name(builder);3586if func_name == Some("malloc") {3587self.check_malloc_start(builder);3588} else if func_name == Some("free") {3589self.check_free_start(builder);3590}3591}35923593Ok(())3594}35953596pub fn after_translate_function(3597&mut self,3598builder: &mut FunctionBuilder,3599state: &FuncTranslationStacks,3600) -> WasmResult<()> {3601if self.tunables.consume_fuel && state.reachable() {3602self.fuel_function_exit(builder);3603}3604Ok(())3605}36063607pub fn relaxed_simd_deterministic(&self) -> bool {3608self.tunables.relaxed_simd_deterministic3609}36103611pub fn has_native_fma(&self) -> bool {3612self.isa.has_native_fma()3613}36143615pub fn is_x86(&self) -> bool {3616self.isa.triple().architecture == target_lexicon::Architecture::X86_643617}36183619pub fn translate_cont_bind(3620&mut self,3621builder: &mut FunctionBuilder<'_>,3622contobj: ir::Value,3623args: &[ir::Value],3624) -> ir::Value {3625stack_switching::instructions::translate_cont_bind(self, builder, contobj, args)3626}36273628pub fn translate_cont_new(3629&mut self,3630builder: &mut FunctionBuilder<'_>,3631func: ir::Value,3632arg_types: &[WasmValType],3633return_types: &[WasmValType],3634) -> WasmResult<ir::Value> {3635stack_switching::instructions::translate_cont_new(3636self,3637builder,3638func,3639arg_types,3640return_types,3641)3642}36433644pub fn translate_resume(3645&mut self,3646builder: &mut FunctionBuilder<'_>,3647type_index: u32,3648contobj: ir::Value,3649resume_args: &[ir::Value],3650resumetable: &[(u32, Option<ir::Block>)],3651) -> WasmResult<Vec<ir::Value>> {3652stack_switching::instructions::translate_resume(3653self,3654builder,3655type_index,3656contobj,3657resume_args,3658resumetable,3659)3660}36613662pub fn translate_suspend(3663&mut self,3664builder: &mut FunctionBuilder<'_>,3665tag_index: u32,3666suspend_args: &[ir::Value],3667tag_return_types: &[ir::Type],3668) -> Vec<ir::Value> {3669stack_switching::instructions::translate_suspend(3670self,3671builder,3672tag_index,3673suspend_args,3674tag_return_types,3675)3676}36773678/// Translates switch instructions.3679pub fn translate_switch(3680&mut self,3681builder: &mut FunctionBuilder,3682tag_index: u32,3683contobj: ir::Value,3684switch_args: &[ir::Value],3685return_types: &[ir::Type],3686) -> WasmResult<Vec<ir::Value>> {3687stack_switching::instructions::translate_switch(3688self,3689builder,3690tag_index,3691contobj,3692switch_args,3693return_types,3694)3695}36963697pub fn continuation_arguments(&self, index: TypeIndex) -> &[WasmValType] {3698let idx = self.module.types[index].unwrap_module_type_index();3699self.types[self.types[idx]3700.unwrap_cont()3701.clone()3702.unwrap_module_type_index()]3703.unwrap_func()3704.params()3705}37063707pub fn continuation_returns(&self, index: TypeIndex) -> &[WasmValType] {3708let idx = self.module.types[index].unwrap_module_type_index();3709self.types[self.types[idx]3710.unwrap_cont()3711.clone()3712.unwrap_module_type_index()]3713.unwrap_func()3714.returns()3715}37163717pub fn tag_params(&self, tag_index: TagIndex) -> &[WasmValType] {3718let idx = self.module.tags[tag_index].signature;3719self.types[idx.unwrap_module_type_index()]3720.unwrap_func()3721.params()3722}37233724pub fn tag_returns(&self, tag_index: TagIndex) -> &[WasmValType] {3725let idx = self.module.tags[tag_index].signature;3726self.types[idx.unwrap_module_type_index()]3727.unwrap_func()3728.returns()3729}37303731pub fn use_x86_blendv_for_relaxed_laneselect(&self, ty: Type) -> bool {3732self.isa.has_x86_blendv_lowering(ty)3733}37343735pub fn use_x86_pmulhrsw_for_relaxed_q15mul(&self) -> bool {3736self.isa.has_x86_pmulhrsw_lowering()3737}37383739pub fn use_x86_pmaddubsw_for_dot(&self) -> bool {3740self.isa.has_x86_pmaddubsw_lowering()3741}37423743pub fn handle_before_return(&mut self, retvals: &[ir::Value], builder: &mut FunctionBuilder) {3744#[cfg(feature = "wmemcheck")]3745if self.compiler.wmemcheck {3746let func_name = self.current_func_name(builder);3747if func_name == Some("malloc") {3748self.hook_malloc_exit(builder, retvals);3749} else if func_name == Some("free") {3750self.hook_free_exit(builder);3751}3752}3753#[cfg(not(feature = "wmemcheck"))]3754let _ = (retvals, builder);3755}37563757pub fn before_load(3758&mut self,3759builder: &mut FunctionBuilder,3760val_size: u8,3761addr: ir::Value,3762offset: u64,3763) {3764#[cfg(feature = "wmemcheck")]3765if self.compiler.wmemcheck {3766let check_load = self.builtin_functions.check_load(builder.func);3767let vmctx = self.vmctx_val(&mut builder.cursor());3768let num_bytes = builder.ins().iconst(I32, val_size as i64);3769let offset_val = builder.ins().iconst(I64, offset as i64);3770builder3771.ins()3772.call(check_load, &[vmctx, num_bytes, addr, offset_val]);3773}3774#[cfg(not(feature = "wmemcheck"))]3775let _ = (builder, val_size, addr, offset);3776}37773778pub fn before_store(3779&mut self,3780builder: &mut FunctionBuilder,3781val_size: u8,3782addr: ir::Value,3783offset: u64,3784) {3785#[cfg(feature = "wmemcheck")]3786if self.compiler.wmemcheck {3787let check_store = self.builtin_functions.check_store(builder.func);3788let vmctx = self.vmctx_val(&mut builder.cursor());3789let num_bytes = builder.ins().iconst(I32, val_size as i64);3790let offset_val = builder.ins().iconst(I64, offset as i64);3791builder3792.ins()3793.call(check_store, &[vmctx, num_bytes, addr, offset_val]);3794}3795#[cfg(not(feature = "wmemcheck"))]3796let _ = (builder, val_size, addr, offset);3797}37983799pub fn update_global(3800&mut self,3801builder: &mut FunctionBuilder,3802global_index: GlobalIndex,3803value: ir::Value,3804) {3805#[cfg(feature = "wmemcheck")]3806if self.compiler.wmemcheck {3807if global_index.index() == 0 {3808// We are making the assumption that global 0 is the auxiliary stack pointer.3809let update_stack_pointer =3810self.builtin_functions.update_stack_pointer(builder.func);3811let vmctx = self.vmctx_val(&mut builder.cursor());3812builder.ins().call(update_stack_pointer, &[vmctx, value]);3813}3814}3815#[cfg(not(feature = "wmemcheck"))]3816let _ = (builder, global_index, value);3817}38183819pub fn before_memory_grow(3820&mut self,3821builder: &mut FunctionBuilder,3822num_pages: ir::Value,3823mem_index: MemoryIndex,3824) {3825#[cfg(feature = "wmemcheck")]3826if self.compiler.wmemcheck && mem_index.as_u32() == 0 {3827let update_mem_size = self.builtin_functions.update_mem_size(builder.func);3828let vmctx = self.vmctx_val(&mut builder.cursor());3829builder.ins().call(update_mem_size, &[vmctx, num_pages]);3830}3831#[cfg(not(feature = "wmemcheck"))]3832let _ = (builder, num_pages, mem_index);3833}38343835/// If the ISA has rounding instructions, let Cranelift use them. But if3836/// not, lower to a libcall here, rather than having Cranelift do it. We3837/// can pass our libcall the vmctx pointer, which we use for stack3838/// overflow checking.3839///3840/// This helper is generic for all rounding instructions below, both for3841/// scalar and simd types. The `clif_round` argument is the CLIF-level3842/// rounding instruction to use if the ISA has the instruction, and the3843/// `round_builtin` helper is used to determine which element-level3844/// rounding operation builtin is used. Note that this handles the case3845/// when `value` is a vector by doing an element-wise libcall invocation.3846fn isa_round(3847&mut self,3848builder: &mut FunctionBuilder,3849value: ir::Value,3850clif_round: fn(FuncInstBuilder<'_, '_>, ir::Value) -> ir::Value,3851round_builtin: fn(&mut BuiltinFunctions, &mut Function) -> ir::FuncRef,3852) -> ir::Value {3853if self.isa.has_round() {3854return clif_round(builder.ins(), value);3855}38563857let vmctx = self.vmctx_val(&mut builder.cursor());3858let round = round_builtin(&mut self.builtin_functions, builder.func);3859let round_one = |builder: &mut FunctionBuilder, value: ir::Value| {3860let call = builder.ins().call(round, &[vmctx, value]);3861*builder.func.dfg.inst_results(call).first().unwrap()3862};38633864let ty = builder.func.dfg.value_type(value);3865if !ty.is_vector() {3866return round_one(builder, value);3867}38683869assert_eq!(ty.bits(), 128);3870let zero = builder.func.dfg.constants.insert(V128Imm([0; 16]).into());3871let mut result = builder.ins().vconst(ty, zero);3872for i in 0..u8::try_from(ty.lane_count()).unwrap() {3873let element = builder.ins().extractlane(value, i);3874let element_rounded = round_one(builder, element);3875result = builder.ins().insertlane(result, element_rounded, i);3876}3877result3878}38793880pub fn ceil_f32(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {3881self.isa_round(3882builder,3883value,3884|ins, val| ins.ceil(val),3885BuiltinFunctions::ceil_f32,3886)3887}38883889pub fn ceil_f64(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {3890self.isa_round(3891builder,3892value,3893|ins, val| ins.ceil(val),3894BuiltinFunctions::ceil_f64,3895)3896}38973898pub fn ceil_f32x4(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {3899self.isa_round(3900builder,3901value,3902|ins, val| ins.ceil(val),3903BuiltinFunctions::ceil_f32,3904)3905}39063907pub fn ceil_f64x2(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {3908self.isa_round(3909builder,3910value,3911|ins, val| ins.ceil(val),3912BuiltinFunctions::ceil_f64,3913)3914}39153916pub fn floor_f32(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {3917self.isa_round(3918builder,3919value,3920|ins, val| ins.floor(val),3921BuiltinFunctions::floor_f32,3922)3923}39243925pub fn floor_f64(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {3926self.isa_round(3927builder,3928value,3929|ins, val| ins.floor(val),3930BuiltinFunctions::floor_f64,3931)3932}39333934pub fn floor_f32x4(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {3935self.isa_round(3936builder,3937value,3938|ins, val| ins.floor(val),3939BuiltinFunctions::floor_f32,3940)3941}39423943pub fn floor_f64x2(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {3944self.isa_round(3945builder,3946value,3947|ins, val| ins.floor(val),3948BuiltinFunctions::floor_f64,3949)3950}39513952pub fn trunc_f32(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {3953self.isa_round(3954builder,3955value,3956|ins, val| ins.trunc(val),3957BuiltinFunctions::trunc_f32,3958)3959}39603961pub fn trunc_f64(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {3962self.isa_round(3963builder,3964value,3965|ins, val| ins.trunc(val),3966BuiltinFunctions::trunc_f64,3967)3968}39693970pub fn trunc_f32x4(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {3971self.isa_round(3972builder,3973value,3974|ins, val| ins.trunc(val),3975BuiltinFunctions::trunc_f32,3976)3977}39783979pub fn trunc_f64x2(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {3980self.isa_round(3981builder,3982value,3983|ins, val| ins.trunc(val),3984BuiltinFunctions::trunc_f64,3985)3986}39873988pub fn nearest_f32(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {3989self.isa_round(3990builder,3991value,3992|ins, val| ins.nearest(val),3993BuiltinFunctions::nearest_f32,3994)3995}39963997pub fn nearest_f64(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {3998self.isa_round(3999builder,4000value,4001|ins, val| ins.nearest(val),4002BuiltinFunctions::nearest_f64,4003)4004}40054006pub fn nearest_f32x4(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {4007self.isa_round(4008builder,4009value,4010|ins, val| ins.nearest(val),4011BuiltinFunctions::nearest_f32,4012)4013}40144015pub fn nearest_f64x2(&mut self, builder: &mut FunctionBuilder, value: ir::Value) -> ir::Value {4016self.isa_round(4017builder,4018value,4019|ins, val| ins.nearest(val),4020BuiltinFunctions::nearest_f64,4021)4022}40234024pub fn swizzle(4025&mut self,4026builder: &mut FunctionBuilder,4027a: ir::Value,4028b: ir::Value,4029) -> ir::Value {4030// On x86, swizzle would typically be compiled to `pshufb`, except4031// that that's not available on CPUs that lack SSSE3. In that case,4032// fall back to a builtin function.4033if !self.is_x86() || self.isa.has_x86_pshufb_lowering() {4034builder.ins().swizzle(a, b)4035} else {4036let swizzle = self.builtin_functions.i8x16_swizzle(builder.func);4037let vmctx = self.vmctx_val(&mut builder.cursor());4038let call = builder.ins().call(swizzle, &[vmctx, a, b]);4039*builder.func.dfg.inst_results(call).first().unwrap()4040}4041}40424043pub fn relaxed_swizzle(4044&mut self,4045builder: &mut FunctionBuilder,4046a: ir::Value,4047b: ir::Value,4048) -> ir::Value {4049// As above, fall back to a builtin if we lack SSSE3.4050if !self.is_x86() || self.isa.has_x86_pshufb_lowering() {4051if !self.is_x86() || self.relaxed_simd_deterministic() {4052builder.ins().swizzle(a, b)4053} else {4054builder.ins().x86_pshufb(a, b)4055}4056} else {4057let swizzle = self.builtin_functions.i8x16_swizzle(builder.func);4058let vmctx = self.vmctx_val(&mut builder.cursor());4059let call = builder.ins().call(swizzle, &[vmctx, a, b]);4060*builder.func.dfg.inst_results(call).first().unwrap()4061}4062}40634064pub fn i8x16_shuffle(4065&mut self,4066builder: &mut FunctionBuilder,4067a: ir::Value,4068b: ir::Value,4069lanes: &[u8; 16],4070) -> ir::Value {4071// As with swizzle, i8x16.shuffle would also commonly be implemented4072// with pshufb, so if we lack SSSE3, fall back to a builtin.4073if !self.is_x86() || self.isa.has_x86_pshufb_lowering() {4074let lanes = ConstantData::from(&lanes[..]);4075let mask = builder.func.dfg.immediates.push(lanes);4076builder.ins().shuffle(a, b, mask)4077} else {4078let lanes = builder4079.func4080.dfg4081.constants4082.insert(ConstantData::from(&lanes[..]));4083let lanes = builder.ins().vconst(I8X16, lanes);4084let i8x16_shuffle = self.builtin_functions.i8x16_shuffle(builder.func);4085let vmctx = self.vmctx_val(&mut builder.cursor());4086let call = builder.ins().call(i8x16_shuffle, &[vmctx, a, b, lanes]);4087*builder.func.dfg.inst_results(call).first().unwrap()4088}4089}40904091pub fn fma_f32x4(4092&mut self,4093builder: &mut FunctionBuilder,4094a: ir::Value,4095b: ir::Value,4096c: ir::Value,4097) -> ir::Value {4098if self.has_native_fma() {4099builder.ins().fma(a, b, c)4100} else if self.relaxed_simd_deterministic() {4101// Deterministic semantics are "fused multiply and add".4102let fma = self.builtin_functions.fma_f32x4(builder.func);4103let vmctx = self.vmctx_val(&mut builder.cursor());4104let call = builder.ins().call(fma, &[vmctx, a, b, c]);4105*builder.func.dfg.inst_results(call).first().unwrap()4106} else {4107let mul = builder.ins().fmul(a, b);4108builder.ins().fadd(mul, c)4109}4110}41114112pub fn fma_f64x2(4113&mut self,4114builder: &mut FunctionBuilder,4115a: ir::Value,4116b: ir::Value,4117c: ir::Value,4118) -> ir::Value {4119if self.has_native_fma() {4120builder.ins().fma(a, b, c)4121} else if self.relaxed_simd_deterministic() {4122// Deterministic semantics are "fused multiply and add".4123let fma = self.builtin_functions.fma_f64x2(builder.func);4124let vmctx = self.vmctx_val(&mut builder.cursor());4125let call = builder.ins().call(fma, &[vmctx, a, b, c]);4126*builder.func.dfg.inst_results(call).first().unwrap()4127} else {4128let mul = builder.ins().fmul(a, b);4129builder.ins().fadd(mul, c)4130}4131}41324133pub fn isa(&self) -> &dyn TargetIsa {4134&*self.isa4135}41364137pub fn trap(&mut self, builder: &mut FunctionBuilder, trap: ir::TrapCode) {4138match (4139self.clif_instruction_traps_enabled(),4140crate::clif_trap_to_env_trap(trap),4141) {4142// If libcall traps are disabled or there's no wasmtime-defined trap4143// code for this, then emit a native trap instruction.4144(true, _) | (_, None) => {4145builder.ins().trap(trap);4146}4147// ... otherwise with libcall traps explicitly enabled and a4148// wasmtime-based trap code invoke the libcall to raise a trap and4149// pass in our trap code. Leave a debug `unreachable` in place4150// afterwards as a defense-in-depth measure.4151(false, Some(trap)) => {4152let libcall = self.builtin_functions.trap(&mut builder.func);4153let vmctx = self.vmctx_val(&mut builder.cursor());4154let trap_code = builder.ins().iconst(I8, i64::from(trap as u8));4155builder.ins().call(libcall, &[vmctx, trap_code]);4156let raise = self.builtin_functions.raise(&mut builder.func);4157builder.ins().call(raise, &[vmctx]);4158builder.ins().trap(TRAP_INTERNAL_ASSERT);4159}4160}4161}41624163pub fn trapz(&mut self, builder: &mut FunctionBuilder, value: ir::Value, trap: ir::TrapCode) {4164if self.clif_instruction_traps_enabled() {4165builder.ins().trapz(value, trap);4166} else {4167let ty = builder.func.dfg.value_type(value);4168let zero = builder.ins().iconst(ty, 0);4169let cmp = builder.ins().icmp(IntCC::Equal, value, zero);4170self.conditionally_trap(builder, cmp, trap);4171}4172}41734174pub fn trapnz(&mut self, builder: &mut FunctionBuilder, value: ir::Value, trap: ir::TrapCode) {4175if self.clif_instruction_traps_enabled() {4176builder.ins().trapnz(value, trap);4177} else {4178let ty = builder.func.dfg.value_type(value);4179let zero = builder.ins().iconst(ty, 0);4180let cmp = builder.ins().icmp(IntCC::NotEqual, value, zero);4181self.conditionally_trap(builder, cmp, trap);4182}4183}41844185pub fn uadd_overflow_trap(4186&mut self,4187builder: &mut FunctionBuilder,4188lhs: ir::Value,4189rhs: ir::Value,4190trap: ir::TrapCode,4191) -> ir::Value {4192if self.clif_instruction_traps_enabled() {4193builder.ins().uadd_overflow_trap(lhs, rhs, trap)4194} else {4195let (ret, overflow) = builder.ins().uadd_overflow(lhs, rhs);4196self.conditionally_trap(builder, overflow, trap);4197ret4198}4199}42004201pub fn translate_sdiv(4202&mut self,4203builder: &mut FunctionBuilder,4204lhs: ir::Value,4205rhs: ir::Value,4206) -> ir::Value {4207self.guard_signed_divide(builder, lhs, rhs);4208builder.ins().sdiv(lhs, rhs)4209}42104211pub fn translate_udiv(4212&mut self,4213builder: &mut FunctionBuilder,4214lhs: ir::Value,4215rhs: ir::Value,4216) -> ir::Value {4217self.guard_zero_divisor(builder, rhs);4218builder.ins().udiv(lhs, rhs)4219}42204221pub fn translate_srem(4222&mut self,4223builder: &mut FunctionBuilder,4224lhs: ir::Value,4225rhs: ir::Value,4226) -> ir::Value {4227self.guard_zero_divisor(builder, rhs);4228builder.ins().srem(lhs, rhs)4229}42304231pub fn translate_urem(4232&mut self,4233builder: &mut FunctionBuilder,4234lhs: ir::Value,4235rhs: ir::Value,4236) -> ir::Value {4237self.guard_zero_divisor(builder, rhs);4238builder.ins().urem(lhs, rhs)4239}42404241pub fn translate_fcvt_to_sint(4242&mut self,4243builder: &mut FunctionBuilder,4244ty: ir::Type,4245val: ir::Value,4246) -> ir::Value {4247// NB: for now avoid translating this entire instruction to CLIF and4248// just do it in a libcall.4249if !self.clif_instruction_traps_enabled() {4250self.guard_fcvt_to_int(builder, ty, val, true);4251}4252builder.ins().fcvt_to_sint(ty, val)4253}42544255pub fn translate_fcvt_to_uint(4256&mut self,4257builder: &mut FunctionBuilder,4258ty: ir::Type,4259val: ir::Value,4260) -> ir::Value {4261if !self.clif_instruction_traps_enabled() {4262self.guard_fcvt_to_int(builder, ty, val, false);4263}4264builder.ins().fcvt_to_uint(ty, val)4265}42664267/// Returns whether it's acceptable to rely on traps in CLIF memory-related4268/// instructions (e.g. loads and stores).4269///4270/// This is enabled if `signals_based_traps` is `true` since signal handlers4271/// are available, but this is additionally forcibly disabled if Pulley is4272/// being targeted since the Pulley runtime doesn't catch segfaults for4273/// itself.4274pub fn clif_memory_traps_enabled(&self) -> bool {4275self.tunables.signals_based_traps && !self.is_pulley()4276}42774278/// Returns whether it's acceptable to have CLIF instructions natively trap,4279/// such as division-by-zero.4280///4281/// This enabled if `signals_based_traps` is `true` or on Pulley4282/// unconditionally since Pulley doesn't use hardware-based traps in its4283/// runtime.4284pub fn clif_instruction_traps_enabled(&self) -> bool {4285self.tunables.signals_based_traps || self.is_pulley()4286}42874288/// Returns whether loads from the null address are allowed as signals of4289/// whether to trap or not.4290pub fn load_from_zero_allowed(&self) -> bool {4291// Pulley allows loads-from-zero and otherwise this is only allowed with4292// traps + spectre mitigations.4293self.is_pulley()4294|| (self.clif_memory_traps_enabled() && self.heap_access_spectre_mitigation())4295}42964297/// Returns whether translation is happening for Pulley bytecode.4298pub fn is_pulley(&self) -> bool {4299self.isa.triple().is_pulley()4300}4301}43024303// Helper function to convert an `IndexType` to an `ir::Type`.4304//4305// Implementing From/Into trait for `IndexType` or `ir::Type` would4306// introduce an extra dependency between `wasmtime_types` and `cranelift_codegen`.4307fn index_type_to_ir_type(index_type: IndexType) -> ir::Type {4308match index_type {4309IndexType::I32 => I32,4310IndexType::I64 => I64,4311}4312}431343144315