Path: blob/main/crates/cranelift/src/func_environ/gc/enabled/drc.rs
1693 views
//! Compiler for the deferred reference-counting (DRC) collector and its1//! barriers.23use super::*;4use crate::translate::TargetEnvironment;5use crate::{TRAP_INTERNAL_ASSERT, func_environ::FuncEnvironment};6use cranelift_codegen::ir::condcodes::IntCC;7use cranelift_codegen::ir::{self, InstBuilder};8use cranelift_frontend::FunctionBuilder;9use smallvec::SmallVec;10use wasmtime_environ::drc::{EXCEPTION_TAG_DEFINED_OFFSET, EXCEPTION_TAG_INSTANCE_OFFSET};11use wasmtime_environ::{12GcTypeLayouts, ModuleInternedTypeIndex, PtrSize, TypeIndex, VMGcKind, WasmHeapTopType,13WasmHeapType, WasmRefType, WasmResult, WasmStorageType, WasmValType, drc::DrcTypeLayouts,14};1516#[derive(Default)]17pub struct DrcCompiler {18layouts: DrcTypeLayouts,19}2021impl DrcCompiler {22/// Generate code to load the given GC reference's ref count.23///24/// Assumes that the given `gc_ref` is a non-null, non-i31 GC reference.25fn load_ref_count(26&mut self,27func_env: &mut FuncEnvironment<'_>,28builder: &mut FunctionBuilder,29gc_ref: ir::Value,30) -> ir::Value {31let offset = func_env.offsets.vm_drc_header_ref_count();32let pointer = func_env.prepare_gc_ref_access(33builder,34gc_ref,35BoundsCheck::StaticOffset {36offset,37access_size: u8::try_from(ir::types::I64.bytes()).unwrap(),38},39);40builder41.ins()42.load(ir::types::I64, ir::MemFlags::trusted(), pointer, 0)43}4445/// Generate code to update the given GC reference's ref count to the new46/// value.47///48/// Assumes that the given `gc_ref` is a non-null, non-i31 GC reference.49fn store_ref_count(50&mut self,51func_env: &mut FuncEnvironment<'_>,52builder: &mut FunctionBuilder,53gc_ref: ir::Value,54new_ref_count: ir::Value,55) {56let offset = func_env.offsets.vm_drc_header_ref_count();57let pointer = func_env.prepare_gc_ref_access(58builder,59gc_ref,60BoundsCheck::StaticOffset {61offset,62access_size: u8::try_from(ir::types::I64.bytes()).unwrap(),63},64);65builder66.ins()67.store(ir::MemFlags::trusted(), new_ref_count, pointer, 0);68}6970/// Generate code to increment or decrement the given GC reference's ref71/// count.72///73/// The new ref count is returned.74///75/// Assumes that the given `gc_ref` is a non-null, non-i31 GC reference.76fn mutate_ref_count(77&mut self,78func_env: &mut FuncEnvironment<'_>,79builder: &mut FunctionBuilder,80gc_ref: ir::Value,81delta: i64,82) -> ir::Value {83debug_assert!(delta == -1 || delta == 1);84let old_ref_count = self.load_ref_count(func_env, builder, gc_ref);85let new_ref_count = builder.ins().iadd_imm(old_ref_count, delta);86self.store_ref_count(func_env, builder, gc_ref, new_ref_count);87new_ref_count88}8990/// Push `gc_ref` onto the over-approximated-stack-roots list.91///92/// `gc_ref` must not already be in the list.93///94/// `reserved` must be the current reserved bits for this `gc_ref`.95fn push_onto_over_approximated_stack_roots(96&mut self,97func_env: &mut FuncEnvironment<'_>,98builder: &mut FunctionBuilder<'_>,99gc_ref: ir::Value,100reserved: ir::Value,101) {102debug_assert_eq!(builder.func.dfg.value_type(gc_ref), ir::types::I32);103debug_assert_eq!(builder.func.dfg.value_type(reserved), ir::types::I32);104105let head = self.load_over_approximated_stack_roots_head(func_env, builder);106107// Load the current first list element, which will be our new next list108// element.109let next = builder110.ins()111.load(ir::types::I32, ir::MemFlags::trusted(), head, 0);112113// Update our object's header to point to `next` and consider itself part of the list.114self.set_next_over_approximated_stack_root(func_env, builder, gc_ref, next);115self.set_in_over_approximated_stack_roots_bit(func_env, builder, gc_ref, reserved);116117// Increment our ref count because the list is logically holding a strong reference.118self.mutate_ref_count(func_env, builder, gc_ref, 1);119120// Commit this object as the new head of the list.121builder122.ins()123.store(ir::MemFlags::trusted(), gc_ref, head, 0);124}125126/// Load a pointer to the first element of the DRC heap's127/// over-approximated-stack-roots list.128fn load_over_approximated_stack_roots_head(129&mut self,130func_env: &mut FuncEnvironment<'_>,131builder: &mut FunctionBuilder,132) -> ir::Value {133let ptr_ty = func_env.pointer_type();134let vmctx = func_env.vmctx(&mut builder.func);135let vmctx = builder.ins().global_value(ptr_ty, vmctx);136builder.ins().load(137ptr_ty,138ir::MemFlags::trusted().with_readonly(),139vmctx,140i32::from(func_env.offsets.ptr.vmctx_gc_heap_data()),141)142}143144/// Set the `VMDrcHeader::next_over_approximated_stack_root` field.145fn set_next_over_approximated_stack_root(146&mut self,147func_env: &mut FuncEnvironment<'_>,148builder: &mut FunctionBuilder<'_>,149gc_ref: ir::Value,150next: ir::Value,151) {152debug_assert_eq!(builder.func.dfg.value_type(gc_ref), ir::types::I32);153debug_assert_eq!(builder.func.dfg.value_type(next), ir::types::I32);154let ptr = func_env.prepare_gc_ref_access(155builder,156gc_ref,157BoundsCheck::StaticOffset {158offset: func_env159.offsets160.vm_drc_header_next_over_approximated_stack_root(),161access_size: u8::try_from(ir::types::I32.bytes()).unwrap(),162},163);164builder.ins().store(ir::MemFlags::trusted(), next, ptr, 0);165}166167/// Set the in-over-approximated-stack-roots list bit in a `VMDrcHeader`'s168/// reserved bits.169fn set_in_over_approximated_stack_roots_bit(170&mut self,171func_env: &mut FuncEnvironment<'_>,172builder: &mut FunctionBuilder<'_>,173gc_ref: ir::Value,174old_reserved_bits: ir::Value,175) {176let in_set_bit = builder.ins().iconst(177ir::types::I32,178i64::from(wasmtime_environ::drc::HEADER_IN_OVER_APPROX_LIST_BIT),179);180let new_reserved = builder.ins().bor(old_reserved_bits, in_set_bit);181self.set_reserved_bits(func_env, builder, gc_ref, new_reserved);182}183184/// Update the reserved bits in a `VMDrcHeader`.185fn set_reserved_bits(186&mut self,187func_env: &mut FuncEnvironment<'_>,188builder: &mut FunctionBuilder<'_>,189gc_ref: ir::Value,190new_reserved: ir::Value,191) {192let ptr = func_env.prepare_gc_ref_access(193builder,194gc_ref,195BoundsCheck::StaticOffset {196offset: func_env.offsets.vm_gc_header_reserved_bits(),197access_size: u8::try_from(ir::types::I32.bytes()).unwrap(),198},199);200builder201.ins()202.store(ir::MemFlags::trusted(), new_reserved, ptr, 0);203}204205/// Write to an uninitialized field or element inside a GC object.206fn init_field(207&mut self,208func_env: &mut FuncEnvironment<'_>,209builder: &mut FunctionBuilder<'_>,210field_addr: ir::Value,211ty: WasmStorageType,212val: ir::Value,213) -> WasmResult<()> {214// Data inside GC objects is always little endian.215let flags = ir::MemFlags::trusted().with_endianness(ir::Endianness::Little);216217match ty {218WasmStorageType::Val(WasmValType::Ref(r))219if r.heap_type.top() == WasmHeapTopType::Func =>220{221write_func_ref_at_addr(func_env, builder, r, flags, field_addr, val)?;222}223WasmStorageType::Val(WasmValType::Ref(r)) => {224self.translate_init_gc_reference(func_env, builder, r, field_addr, val, flags)?;225}226WasmStorageType::I8 => {227assert_eq!(builder.func.dfg.value_type(val), ir::types::I32);228builder.ins().istore8(flags, val, field_addr, 0);229}230WasmStorageType::I16 => {231assert_eq!(builder.func.dfg.value_type(val), ir::types::I32);232builder.ins().istore16(flags, val, field_addr, 0);233}234WasmStorageType::Val(_) => {235let size_of_access = wasmtime_environ::byte_size_of_wasm_ty_in_gc_heap(&ty);236assert_eq!(builder.func.dfg.value_type(val).bytes(), size_of_access);237builder.ins().store(flags, val, field_addr, 0);238}239}240241Ok(())242}243244/// Write to an uninitialized GC reference field, initializing it.245///246/// ```text247/// *dst = new_val248/// ```249///250/// Doesn't need to do a full write barrier: we don't have an old reference251/// that is being overwritten and needs its refcount decremented, just a new252/// reference whose count should be incremented.253fn translate_init_gc_reference(254&mut self,255func_env: &mut FuncEnvironment<'_>,256builder: &mut FunctionBuilder,257ty: WasmRefType,258dst: ir::Value,259new_val: ir::Value,260flags: ir::MemFlags,261) -> WasmResult<()> {262let (ref_ty, needs_stack_map) = func_env.reference_type(ty.heap_type);263debug_assert!(needs_stack_map);264265// Special case for references to uninhabited bottom types: see266// `translate_write_gc_reference` for details.267if let WasmHeapType::None = ty.heap_type {268if ty.nullable {269let null = builder.ins().iconst(ref_ty, 0);270builder.ins().store(flags, null, dst, 0);271} else {272let zero = builder.ins().iconst(ir::types::I32, 0);273builder.ins().trapz(zero, TRAP_INTERNAL_ASSERT);274}275return Ok(());276};277278// Special case for `i31ref`s: no need for any barriers.279if let WasmHeapType::I31 = ty.heap_type {280return unbarriered_store_gc_ref(builder, ty.heap_type, dst, new_val, flags);281}282283// Our initialization barrier for GC references being copied out of the284// stack and initializing a table/global/struct field/etc... is roughly285// equivalent to the following pseudo-CLIF:286//287// ```288// current_block:289// ...290// let new_val_is_null_or_i31 = ...291// brif new_val_is_null_or_i31, continue_block, inc_ref_block292//293// inc_ref_block:294// let ref_count = load new_val.ref_count295// let new_ref_count = iadd_imm ref_count, 1296// store new_val.ref_count, new_ref_count297// jump check_old_val_block298//299// continue_block:300// store dst, new_val301// ...302// ```303//304// This write barrier is responsible for ensuring that the new value's305// ref count is incremented now that the table/global/struct/etc... is306// holding onto it.307308let current_block = builder.current_block().unwrap();309let inc_ref_block = builder.create_block();310let continue_block = builder.create_block();311312builder.ensure_inserted_block();313builder.insert_block_after(inc_ref_block, current_block);314builder.insert_block_after(continue_block, inc_ref_block);315316// Current block: check whether the new value is non-null and317// non-i31. If so, branch to the `inc_ref_block`.318log::trace!("DRC initialization barrier: check if the value is null or i31");319let new_val_is_null_or_i31 = func_env.gc_ref_is_null_or_i31(builder, ty, new_val);320builder.ins().brif(321new_val_is_null_or_i31,322continue_block,323&[],324inc_ref_block,325&[],326);327328// Block to increment the ref count of the new value when it is non-null329// and non-i31.330builder.switch_to_block(inc_ref_block);331builder.seal_block(inc_ref_block);332log::trace!("DRC initialization barrier: increment the ref count of the initial value");333self.mutate_ref_count(func_env, builder, new_val, 1);334builder.ins().jump(continue_block, &[]);335336// Join point after we're done with the GC barrier: do the actual store337// to initialize the field.338builder.switch_to_block(continue_block);339builder.seal_block(continue_block);340log::trace!(341"DRC initialization barrier: finally, store into {dst:?} to initialize the field"342);343unbarriered_store_gc_ref(builder, ty.heap_type, dst, new_val, flags)?;344345Ok(())346}347}348349/// Emit CLIF to call the `gc_raw_alloc` libcall.350fn emit_gc_raw_alloc(351func_env: &mut FuncEnvironment<'_>,352builder: &mut FunctionBuilder<'_>,353kind: VMGcKind,354ty: ModuleInternedTypeIndex,355size: ir::Value,356align: u32,357) -> ir::Value {358let gc_alloc_raw_builtin = func_env.builtin_functions.gc_alloc_raw(builder.func);359let vmctx = func_env.vmctx_val(&mut builder.cursor());360361let kind = builder362.ins()363.iconst(ir::types::I32, i64::from(kind.as_u32()));364365let ty = builder.ins().iconst(ir::types::I32, i64::from(ty.as_u32()));366367assert!(align.is_power_of_two());368let align = builder.ins().iconst(ir::types::I32, i64::from(align));369370let call_inst = builder371.ins()372.call(gc_alloc_raw_builtin, &[vmctx, kind, ty, size, align]);373374let gc_ref = builder.func.dfg.first_result(call_inst);375builder.declare_value_needs_stack_map(gc_ref);376gc_ref377}378379impl GcCompiler for DrcCompiler {380fn layouts(&self) -> &dyn GcTypeLayouts {381&self.layouts382}383384fn alloc_array(385&mut self,386func_env: &mut FuncEnvironment<'_>,387builder: &mut FunctionBuilder<'_>,388array_type_index: TypeIndex,389init: super::ArrayInit<'_>,390) -> WasmResult<ir::Value> {391let interned_type_index =392func_env.module.types[array_type_index].unwrap_module_type_index();393let ptr_ty = func_env.pointer_type();394395let len_offset = gc_compiler(func_env)?.layouts().array_length_field_offset();396let array_layout = func_env.array_layout(interned_type_index).clone();397let base_size = array_layout.base_size;398let align = array_layout.align;399let len_to_elems_delta = base_size.checked_sub(len_offset).unwrap();400401// First, compute the array's total size from its base size, element402// size, and length.403let len = init.len(&mut builder.cursor());404let size = emit_array_size(func_env, builder, &array_layout, len);405406// Second, now that we have the array object's total size, call the407// `gc_alloc_raw` builtin libcall to allocate the array.408let array_ref = emit_gc_raw_alloc(409func_env,410builder,411VMGcKind::ArrayRef,412interned_type_index,413size,414align,415);416417// Write the array's length into the appropriate slot.418//419// Note: we don't need to bounds-check the GC ref access here, since we420// trust the results of the allocation libcall.421let base = func_env.get_gc_heap_base(builder);422let extended_array_ref =423uextend_i32_to_pointer_type(builder, func_env.pointer_type(), array_ref);424let object_addr = builder.ins().iadd(base, extended_array_ref);425let len_addr = builder.ins().iadd_imm(object_addr, i64::from(len_offset));426let len = init.len(&mut builder.cursor());427builder428.ins()429.store(ir::MemFlags::trusted(), len, len_addr, 0);430431// Finally, initialize the elements.432let len_to_elems_delta = builder.ins().iconst(ptr_ty, i64::from(len_to_elems_delta));433let elems_addr = builder.ins().iadd(len_addr, len_to_elems_delta);434init.initialize(435func_env,436builder,437interned_type_index,438base_size,439size,440elems_addr,441|func_env, builder, elem_ty, elem_addr, val| {442self.init_field(func_env, builder, elem_addr, elem_ty, val)443},444)?;445Ok(array_ref)446}447448fn alloc_struct(449&mut self,450func_env: &mut FuncEnvironment<'_>,451builder: &mut FunctionBuilder<'_>,452struct_type_index: TypeIndex,453field_vals: &[ir::Value],454) -> WasmResult<ir::Value> {455let interned_type_index =456func_env.module.types[struct_type_index].unwrap_module_type_index();457let struct_layout = func_env.struct_or_exn_layout(interned_type_index);458459// Copy some stuff out of the struct layout to avoid borrowing issues.460let struct_size = struct_layout.size;461let struct_align = struct_layout.align;462let field_offsets: SmallVec<[_; 8]> = struct_layout.fields.iter().copied().collect();463assert_eq!(field_vals.len(), field_offsets.len());464465let struct_size_val = builder.ins().iconst(ir::types::I32, i64::from(struct_size));466467let struct_ref = emit_gc_raw_alloc(468func_env,469builder,470VMGcKind::StructRef,471interned_type_index,472struct_size_val,473struct_align,474);475476// Second, initialize each of the newly-allocated struct's fields.477//478// Note: we don't need to bounds-check the GC ref access here, since we479// trust the results of the allocation libcall.480let base = func_env.get_gc_heap_base(builder);481let extended_struct_ref =482uextend_i32_to_pointer_type(builder, func_env.pointer_type(), struct_ref);483let raw_ptr_to_struct = builder.ins().iadd(base, extended_struct_ref);484initialize_struct_fields(485func_env,486builder,487interned_type_index,488raw_ptr_to_struct,489field_vals,490|func_env, builder, ty, field_addr, val| {491self.init_field(func_env, builder, field_addr, ty, val)492},493)?;494495Ok(struct_ref)496}497498fn alloc_exn(499&mut self,500func_env: &mut FuncEnvironment<'_>,501builder: &mut FunctionBuilder<'_>,502tag_index: TagIndex,503field_vals: &[ir::Value],504instance_id: ir::Value,505tag: ir::Value,506) -> WasmResult<ir::Value> {507let interned_type_index = func_env.module.tags[tag_index]508.exception509.unwrap_module_type_index();510let exn_layout = func_env.struct_or_exn_layout(interned_type_index);511512// Copy some stuff out of the exception layout to avoid borrowing issues.513let exn_size = exn_layout.size;514let exn_align = exn_layout.align;515let field_offsets: SmallVec<[_; 8]> = exn_layout.fields.iter().copied().collect();516assert_eq!(field_vals.len(), field_offsets.len());517518let exn_size_val = builder.ins().iconst(ir::types::I32, i64::from(exn_size));519520let exn_ref = emit_gc_raw_alloc(521func_env,522builder,523VMGcKind::ExnRef,524interned_type_index,525exn_size_val,526exn_align,527);528529// Second, initialize each of the newly-allocated exception530// object's fields.531//532// Note: we don't need to bounds-check the GC ref access here, since we533// trust the results of the allocation libcall.534let base = func_env.get_gc_heap_base(builder);535let extended_exn_ref =536uextend_i32_to_pointer_type(builder, func_env.pointer_type(), exn_ref);537let raw_ptr_to_exn = builder.ins().iadd(base, extended_exn_ref);538initialize_struct_fields(539func_env,540builder,541interned_type_index,542raw_ptr_to_exn,543field_vals,544|func_env, builder, ty, field_addr, val| {545self.init_field(func_env, builder, field_addr, ty, val)546},547)?;548549// Finally, initialize the tag fields.550let instance_id_addr = builder551.ins()552.iadd_imm(raw_ptr_to_exn, i64::from(EXCEPTION_TAG_INSTANCE_OFFSET));553self.init_field(554func_env,555builder,556instance_id_addr,557WasmStorageType::Val(WasmValType::I32),558instance_id,559)?;560let tag_addr = builder561.ins()562.iadd_imm(raw_ptr_to_exn, i64::from(EXCEPTION_TAG_DEFINED_OFFSET));563self.init_field(564func_env,565builder,566tag_addr,567WasmStorageType::Val(WasmValType::I32),568tag,569)?;570571Ok(exn_ref)572}573574fn translate_read_gc_reference(575&mut self,576func_env: &mut FuncEnvironment<'_>,577builder: &mut FunctionBuilder,578ty: WasmRefType,579src: ir::Value,580flags: ir::MemFlags,581) -> WasmResult<ir::Value> {582log::trace!("translate_read_gc_reference({ty:?}, {src:?}, {flags:?})");583584assert!(ty.is_vmgcref_type());585586let (reference_type, needs_stack_map) = func_env.reference_type(ty.heap_type);587debug_assert!(needs_stack_map);588589// Special case for references to uninhabited bottom types: the590// reference must either be nullable and we can just eagerly return591// null, or we are in dynamically unreachable code and should just trap.592if let WasmHeapType::None = ty.heap_type {593let null = builder.ins().iconst(reference_type, 0);594595// If the `flags` can trap, then we need to do an actual load. We596// might be relying on, e.g., this load trapping to raise a597// out-of-bounds-table-index trap, rather than successfully loading598// a null `noneref`.599//600// That said, while we will do the load, we won't use the loaded601// value, and will still use our null constant below. This will602// avoid an unnecessary load dependency, slightly improving the code603// we ultimately emit. This probably doesn't matter, but it is easy604// to do and can only improve things, so we do it.605if flags.trap_code().is_some() {606let _ = builder.ins().load(reference_type, flags, src, 0);607}608609if !ty.nullable {610// NB: Don't use an unconditional trap instruction, since that611// is a block terminator, and we still need to integrate with612// the rest of the surrounding code.613let zero = builder.ins().iconst(ir::types::I32, 0);614builder.ins().trapz(zero, TRAP_INTERNAL_ASSERT);615}616617return Ok(null);618};619620// Special case for `i31` references: they don't need barriers.621if let WasmHeapType::I31 = ty.heap_type {622return unbarriered_load_gc_ref(builder, ty.heap_type, src, flags);623}624625// Our read barrier for GC references is roughly equivalent to the626// following pseudo-CLIF:627//628// ```629// current_block:630// ...631// let gc_ref = load src632// let gc_ref_is_null = is_null gc_ref633// let gc_ref_is_i31 = ...634// let gc_ref_is_null_or_i31 = bor gc_ref_is_null, gc_ref_is_i31635// brif gc_ref_is_null_or_i31, continue_block, non_null_gc_ref_block636//637// non_null_gc_ref_block:638// let reserved = load reserved bits from gc_ref's header639// let in_set_bit = iconst OVER_APPROX_SET_BIT640// let in_set = band reserved, in_set_bit641// br_if in_set, continue_block, insert_block642//643// insert_block:644// let next = load over-approximated-stack-roots head from DRC heap645// store gc_ref to over-approximated-stack-roots head in DRC heap646// store next to gc_ref's header's next_over_approximated_stack_root field647// let new_reserved = bor reserved, in_set_bit648// store new_reserved to gc_ref's headers reserved bits649// inc_ref(gc_ref)650// jump continue_block651//652// continue_block:653// ...654// ```655//656// This ensures that all GC references entering the Wasm stack are in657// the over-approximated-stack-roots list.658659let current_block = builder.current_block().unwrap();660let non_null_gc_ref_block = builder.create_block();661let insert_block = builder.create_block();662let continue_block = builder.create_block();663664builder.ensure_inserted_block();665builder.insert_block_after(non_null_gc_ref_block, current_block);666builder.insert_block_after(insert_block, non_null_gc_ref_block);667builder.insert_block_after(continue_block, insert_block);668669log::trace!("DRC read barrier: load the gc reference and check for null or i31");670let gc_ref = unbarriered_load_gc_ref(builder, ty.heap_type, src, flags)?;671let gc_ref_is_null_or_i31 = func_env.gc_ref_is_null_or_i31(builder, ty, gc_ref);672builder.ins().brif(673gc_ref_is_null_or_i31,674continue_block,675&[],676non_null_gc_ref_block,677&[],678);679680// Block for when the GC reference is not null and is not an `i31ref`.681//682// Tests whether the object is already in the683// over-approximated-stack-roots list or not.684builder.switch_to_block(non_null_gc_ref_block);685builder.seal_block(non_null_gc_ref_block);686log::trace!(687"DRC read barrier: check whether this object is already in the \688over-approximated-stack-roots list"689);690let ptr = func_env.prepare_gc_ref_access(691builder,692gc_ref,693BoundsCheck::StaticOffset {694offset: func_env.offsets.vm_gc_header_reserved_bits(),695access_size: u8::try_from(ir::types::I32.bytes()).unwrap(),696},697);698let reserved = builder699.ins()700.load(ir::types::I32, ir::MemFlags::trusted(), ptr, 0);701let in_set_bit = builder.ins().iconst(702ir::types::I32,703i64::from(wasmtime_environ::drc::HEADER_IN_OVER_APPROX_LIST_BIT),704);705let in_set = builder.ins().band(reserved, in_set_bit);706builder707.ins()708.brif(in_set, continue_block, &[], insert_block, &[]);709710// Block for when the object needs to be inserted into the711// over-approximated-stack-roots list.712builder.switch_to_block(insert_block);713builder.seal_block(insert_block);714log::trace!(715"DRC read barrier: push the object onto the over-approximated-stack-roots list"716);717self.push_onto_over_approximated_stack_roots(func_env, builder, gc_ref, reserved);718builder.ins().jump(continue_block, &[]);719720// Join point after we're done with the GC barrier.721builder.switch_to_block(continue_block);722builder.seal_block(continue_block);723log::trace!("translate_read_gc_reference(..) -> {gc_ref:?}");724Ok(gc_ref)725}726727fn translate_write_gc_reference(728&mut self,729func_env: &mut FuncEnvironment<'_>,730builder: &mut FunctionBuilder,731ty: WasmRefType,732dst: ir::Value,733new_val: ir::Value,734flags: ir::MemFlags,735) -> WasmResult<()> {736assert!(ty.is_vmgcref_type());737738let (ref_ty, needs_stack_map) = func_env.reference_type(ty.heap_type);739debug_assert!(needs_stack_map);740741// Special case for references to uninhabited bottom types: either the742// reference is nullable and we can just eagerly store null into `dst`743// or we are in unreachable code and should just trap.744if let WasmHeapType::None = ty.heap_type {745if ty.nullable {746let null = builder.ins().iconst(ref_ty, 0);747builder.ins().store(flags, null, dst, 0);748} else {749// NB: Don't use an unconditional trap instruction, since that750// is a block terminator, and we still need to integrate with751// the rest of the surrounding code.752let zero = builder.ins().iconst(ir::types::I32, 0);753builder.ins().trapz(zero, TRAP_INTERNAL_ASSERT);754}755return Ok(());756};757758// Special case for `i31` references: they don't need barriers.759if let WasmHeapType::I31 = ty.heap_type {760return unbarriered_store_gc_ref(builder, ty.heap_type, dst, new_val, flags);761}762763// Our write barrier for GC references being copied out of the stack and764// written into a table/global/etc... is roughly equivalent to the765// following pseudo-CLIF:766//767// ```768// current_block:769// ...770// let old_val = *dst771// let new_val_is_null = ref.null new_val772// let new_val_is_i31 = ...773// let new_val_is_null_or_i31 = bor new_val_is_null, new_val_is_i31774// brif new_val_is_null_or_i31, check_old_val_block, inc_ref_block775//776// inc_ref_block:777// let ref_count = load new_val.ref_count778// let new_ref_count = iadd_imm ref_count, 1779// store new_val.ref_count, new_ref_count780// jump check_old_val_block781//782// check_old_val_block:783// store dst, new_val784// let old_val_is_null = ref.null old_val785// let old_val_is_i31 = ...786// let old_val_is_null_or_i31 = bor old_val_is_null, old_val_is_i31787// brif old_val_is_null_or_i31, continue_block, dec_ref_block788//789// dec_ref_block:790// let ref_count = load old_val.ref_count791// let new_ref_count = isub_imm ref_count, 1792// let old_val_needs_drop = icmp_imm eq new_ref_count, 0793// brif old_val_needs_drop, drop_old_val_block, store_dec_ref_block794//795// cold drop_old_val_block:796// call drop_gc_ref(old_val)797// jump continue_block798//799// store_dec_ref_block:800// store old_val.ref_count, new_ref_count801// jump continue_block802//803// continue_block:804// ...805// ```806//807// This write barrier is responsible for ensuring that:808//809// 1. The new value's ref count is incremented now that the table is810// holding onto it.811//812// 2. The old value's ref count is decremented, and that it is dropped813// if the ref count reaches zero.814//815// We must do the increment before the decrement. If we did it in the816// other order, then when `*dst == new_val`, we could confuse ourselves817// by observing a zero ref count after the decrement but before it would818// become non-zero again with the subsequent increment.819//820// Additionally, we take care that we don't ever call out-out-of-line to821// drop the old value until all the new value has been written into822// `dst` and its reference count has been updated. This makes sure that823// host code has a consistent view of the world.824825let current_block = builder.current_block().unwrap();826let inc_ref_block = builder.create_block();827let check_old_val_block = builder.create_block();828let dec_ref_block = builder.create_block();829let drop_old_val_block = builder.create_block();830let store_dec_ref_block = builder.create_block();831let continue_block = builder.create_block();832833builder.ensure_inserted_block();834builder.set_cold_block(drop_old_val_block);835836builder.insert_block_after(inc_ref_block, current_block);837builder.insert_block_after(check_old_val_block, inc_ref_block);838builder.insert_block_after(dec_ref_block, check_old_val_block);839builder.insert_block_after(drop_old_val_block, dec_ref_block);840builder.insert_block_after(store_dec_ref_block, drop_old_val_block);841builder.insert_block_after(continue_block, store_dec_ref_block);842843// Load the old value and then check whether the new value is non-null844// and non-i31.845log::trace!("DRC write barrier: load old ref; check if new ref is null or i31");846let old_val = unbarriered_load_gc_ref(builder, ty.heap_type, dst, flags)?;847let new_val_is_null_or_i31 = func_env.gc_ref_is_null_or_i31(builder, ty, new_val);848builder.ins().brif(849new_val_is_null_or_i31,850check_old_val_block,851&[],852inc_ref_block,853&[],854);855856// Block to increment the ref count of the new value when it is non-null857// and non-i31.858builder.switch_to_block(inc_ref_block);859log::trace!("DRC write barrier: increment new ref's ref count");860builder.seal_block(inc_ref_block);861self.mutate_ref_count(func_env, builder, new_val, 1);862builder.ins().jump(check_old_val_block, &[]);863864// Block to store the new value into `dst` and then check whether the865// old value is non-null and non-i31 and therefore needs its ref count866// decremented.867builder.switch_to_block(check_old_val_block);868builder.seal_block(check_old_val_block);869log::trace!("DRC write barrier: store new ref into field; check if old ref is null or i31");870unbarriered_store_gc_ref(builder, ty.heap_type, dst, new_val, flags)?;871let old_val_is_null_or_i31 = func_env.gc_ref_is_null_or_i31(builder, ty, old_val);872builder.ins().brif(873old_val_is_null_or_i31,874continue_block,875&[],876dec_ref_block,877&[],878);879880// Block to decrement the ref count of the old value when it is non-null881// and non-i31.882builder.switch_to_block(dec_ref_block);883builder.seal_block(dec_ref_block);884log::trace!(885"DRC write barrier: decrement old ref's ref count and check for zero ref count"886);887let ref_count = self.load_ref_count(func_env, builder, old_val);888let new_ref_count = builder.ins().iadd_imm(ref_count, -1);889let old_val_needs_drop = builder.ins().icmp_imm(IntCC::Equal, new_ref_count, 0);890builder.ins().brif(891old_val_needs_drop,892drop_old_val_block,893&[],894store_dec_ref_block,895&[],896);897898// Block to call out-of-line to drop a GC reference when its ref count899// reaches zero.900//901// Note that this libcall does its own dec-ref operation, so we only902// actually store `new_ref_count` back to the `old_val` object when903// `new_ref_count != 0`.904builder.switch_to_block(drop_old_val_block);905builder.seal_block(drop_old_val_block);906log::trace!("DRC write barrier: drop old ref with a ref count of zero");907let drop_gc_ref_libcall = func_env.builtin_functions.drop_gc_ref(builder.func);908let vmctx = func_env.vmctx_val(&mut builder.cursor());909builder.ins().call(drop_gc_ref_libcall, &[vmctx, old_val]);910builder.ins().jump(continue_block, &[]);911912// Block to store the new ref count back to `old_val` for when913// `new_ref_count != 0`, as explained above.914builder.switch_to_block(store_dec_ref_block);915builder.seal_block(store_dec_ref_block);916log::trace!("DRC write barrier: store decremented ref count into old ref");917self.store_ref_count(func_env, builder, old_val, new_ref_count);918builder.ins().jump(continue_block, &[]);919920// Join point after we're done with the GC barrier.921builder.switch_to_block(continue_block);922builder.seal_block(continue_block);923log::trace!("DRC write barrier: finished");924Ok(())925}926}927928929