Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bytecodealliance
GitHub Repository: bytecodealliance/wasmtime
Path: blob/main/crates/fuzzing/src/generators/gc_ops/ops.rs
3068 views
1
//! Operations for the `gc` operations.
2
3
use crate::generators::gc_ops::types::StackType;
4
use crate::generators::gc_ops::{
5
limits::GcOpsLimits,
6
types::{CompositeType, RecGroupId, StructType, TypeId, Types},
7
};
8
use serde::{Deserialize, Serialize};
9
use std::collections::BTreeMap;
10
use wasm_encoder::{
11
CodeSection, ConstExpr, EntityType, ExportKind, ExportSection, Function, FunctionSection,
12
GlobalSection, ImportSection, Instruction, Module, RefType, TableSection, TableType,
13
TypeSection, ValType,
14
};
15
16
/// The base offsets and indices for various Wasm entities within
17
/// their index spaces in the the encoded Wasm binary.
18
#[derive(Clone, Copy)]
19
struct WasmEncodingBases {
20
struct_type_base: u32,
21
typed_first_func_index: u32,
22
struct_local_idx: u32,
23
typed_local_base: u32,
24
struct_global_idx: u32,
25
typed_global_base: u32,
26
struct_table_idx: u32,
27
typed_table_base: u32,
28
}
29
30
/// A description of a Wasm module that makes a series of `externref` table
31
/// operations.
32
#[derive(Debug, Default, Serialize, Deserialize)]
33
pub struct GcOps {
34
pub(crate) limits: GcOpsLimits,
35
pub(crate) ops: Vec<GcOp>,
36
pub(crate) types: Types,
37
}
38
39
impl GcOps {
40
/// Serialize this module into a Wasm binary.
41
///
42
/// The module requires several function imports. See this function's
43
/// implementation for their exact types.
44
///
45
/// The single export of the module is a function "run" that takes
46
/// `self.num_params` parameters of type `externref`.
47
///
48
/// The "run" function does not terminate; you should run it with limited
49
/// fuel. It also is not guaranteed to avoid traps: it may access
50
/// out-of-bounds of the table.
51
pub fn to_wasm_binary(&mut self) -> Vec<u8> {
52
self.fixup();
53
54
let mut module = Module::new();
55
56
// Encode the types for all functions that we are using.
57
let mut types = TypeSection::new();
58
59
// 0: "gc"
60
types.ty().function(
61
vec![],
62
// Return a bunch of stuff from `gc` so that we exercise GCing when
63
// there is return pointer space allocated on the stack. This is
64
// especially important because the x64 backend currently
65
// dynamically adjusts the stack pointer for each call that uses
66
// return pointers rather than statically allocating space in the
67
// stack frame.
68
vec![ValType::EXTERNREF, ValType::EXTERNREF, ValType::EXTERNREF],
69
);
70
71
// 1: "run"
72
let mut params: Vec<ValType> = Vec::with_capacity(self.limits.num_params as usize);
73
for _i in 0..self.limits.num_params {
74
params.push(ValType::EXTERNREF);
75
}
76
let params_len =
77
u32::try_from(params.len()).expect("params len should be within u32 range");
78
let results = vec![];
79
types.ty().function(params, results);
80
81
// 2: `take_refs`
82
types.ty().function(
83
vec![ValType::EXTERNREF, ValType::EXTERNREF, ValType::EXTERNREF],
84
vec![],
85
);
86
87
// 3: `make_refs`
88
types.ty().function(
89
vec![],
90
vec![ValType::EXTERNREF, ValType::EXTERNREF, ValType::EXTERNREF],
91
);
92
93
// 4: `take_struct`
94
types.ty().function(
95
vec![ValType::Ref(RefType {
96
nullable: true,
97
heap_type: wasm_encoder::HeapType::Abstract {
98
shared: false,
99
ty: wasm_encoder::AbstractHeapType::Struct,
100
},
101
})],
102
vec![],
103
);
104
105
let struct_type_base: u32 = types.len();
106
107
let mut rec_groups: BTreeMap<RecGroupId, Vec<TypeId>> = self
108
.types
109
.rec_groups
110
.iter()
111
.copied()
112
.map(|id| (id, Vec::new()))
113
.collect();
114
115
for (id, ty) in self.types.type_defs.iter() {
116
rec_groups.entry(ty.rec_group).or_default().push(id.clone());
117
}
118
119
let encode_ty_id = |ty_id: &TypeId| -> wasm_encoder::SubType {
120
let def = &self.types.type_defs[ty_id];
121
match &def.composite_type {
122
CompositeType::Struct(StructType {}) => wasm_encoder::SubType {
123
is_final: true,
124
supertype_idx: None,
125
composite_type: wasm_encoder::CompositeType {
126
inner: wasm_encoder::CompositeInnerType::Struct(wasm_encoder::StructType {
127
fields: Box::new([]),
128
}),
129
shared: false,
130
describes: None,
131
descriptor: None,
132
},
133
},
134
}
135
};
136
137
let mut struct_count = 0;
138
139
for type_ids in rec_groups.values() {
140
let members: Vec<wasm_encoder::SubType> = type_ids.iter().map(encode_ty_id).collect();
141
types.ty().rec(members);
142
struct_count += type_ids.len() as u32;
143
}
144
145
let typed_fn_type_base: u32 = struct_type_base + struct_count;
146
147
for i in 0..struct_count {
148
let concrete = struct_type_base + i;
149
types.ty().function(
150
vec![ValType::Ref(RefType {
151
nullable: true,
152
heap_type: wasm_encoder::HeapType::Concrete(concrete),
153
})],
154
vec![],
155
);
156
}
157
158
// Import the GC function.
159
let mut imports = ImportSection::new();
160
imports.import("", "gc", EntityType::Function(0));
161
imports.import("", "take_refs", EntityType::Function(2));
162
imports.import("", "make_refs", EntityType::Function(3));
163
imports.import("", "take_struct", EntityType::Function(4));
164
165
// For each of our concrete struct types, define a function
166
// import that takes an argument of that concrete type.
167
let typed_first_func_index: u32 = imports.len();
168
169
for i in 0..struct_count {
170
let ty_idx = typed_fn_type_base + i;
171
let name = format!("take_struct_{}", struct_type_base + i);
172
imports.import("", &name, EntityType::Function(ty_idx));
173
}
174
175
// Define our table.
176
let mut tables = TableSection::new();
177
tables.table(TableType {
178
element_type: RefType::EXTERNREF,
179
minimum: u64::from(self.limits.table_size),
180
maximum: None,
181
table64: false,
182
shared: false,
183
});
184
185
let struct_table_idx = tables.len();
186
tables.table(TableType {
187
element_type: RefType {
188
nullable: true,
189
heap_type: wasm_encoder::HeapType::Abstract {
190
shared: false,
191
ty: wasm_encoder::AbstractHeapType::Struct,
192
},
193
},
194
minimum: u64::from(self.limits.table_size),
195
maximum: None,
196
table64: false,
197
shared: false,
198
});
199
200
let typed_table_base = tables.len();
201
for i in 0..struct_count {
202
let concrete = struct_type_base + i;
203
tables.table(TableType {
204
element_type: RefType {
205
nullable: true,
206
heap_type: wasm_encoder::HeapType::Concrete(concrete),
207
},
208
minimum: u64::from(self.limits.table_size),
209
maximum: None,
210
table64: false,
211
shared: false,
212
});
213
}
214
215
// Define our globals.
216
let mut globals = GlobalSection::new();
217
for _ in 0..self.limits.num_globals {
218
globals.global(
219
wasm_encoder::GlobalType {
220
val_type: wasm_encoder::ValType::EXTERNREF,
221
mutable: true,
222
shared: false,
223
},
224
&ConstExpr::ref_null(wasm_encoder::HeapType::EXTERN),
225
);
226
}
227
228
// Add exactly one (ref.null struct) global.
229
let struct_global_idx = globals.len();
230
globals.global(
231
wasm_encoder::GlobalType {
232
val_type: ValType::Ref(RefType {
233
nullable: true,
234
heap_type: wasm_encoder::HeapType::Abstract {
235
shared: false,
236
ty: wasm_encoder::AbstractHeapType::Struct,
237
},
238
}),
239
mutable: true,
240
shared: false,
241
},
242
&ConstExpr::ref_null(wasm_encoder::HeapType::Abstract {
243
shared: false,
244
ty: wasm_encoder::AbstractHeapType::Struct,
245
}),
246
);
247
248
// Add one typed (ref <type>) global per struct type.
249
let typed_global_base = globals.len();
250
for i in 0..struct_count {
251
let concrete = struct_type_base + i;
252
globals.global(
253
wasm_encoder::GlobalType {
254
val_type: ValType::Ref(RefType {
255
nullable: true,
256
heap_type: wasm_encoder::HeapType::Concrete(concrete),
257
}),
258
mutable: true,
259
shared: false,
260
},
261
&ConstExpr::ref_null(wasm_encoder::HeapType::Concrete(concrete)),
262
);
263
}
264
265
// Define the "run" function export.
266
let mut functions = FunctionSection::new();
267
let mut exports = ExportSection::new();
268
269
let run_defined_idx = functions.len();
270
functions.function(1);
271
let run_func_index = imports.len() + run_defined_idx;
272
exports.export("run", ExportKind::Func, run_func_index);
273
274
// Give ourselves one scratch local that we can use in various `GcOp`
275
// implementations.
276
let mut local_decls: Vec<(u32, ValType)> = vec![(1, ValType::EXTERNREF)];
277
278
let scratch_local = params_len;
279
let struct_local_idx = scratch_local + 1;
280
local_decls.push((
281
1,
282
ValType::Ref(RefType {
283
nullable: true,
284
heap_type: wasm_encoder::HeapType::Abstract {
285
shared: false,
286
ty: wasm_encoder::AbstractHeapType::Struct,
287
},
288
}),
289
));
290
291
let typed_local_base: u32 = struct_local_idx + 1;
292
for i in 0..struct_count {
293
let concrete = struct_type_base + i;
294
local_decls.push((
295
1,
296
ValType::Ref(RefType {
297
nullable: true,
298
heap_type: wasm_encoder::HeapType::Concrete(concrete),
299
}),
300
));
301
}
302
303
let storage_bases = WasmEncodingBases {
304
struct_type_base,
305
typed_first_func_index,
306
struct_local_idx,
307
typed_local_base,
308
struct_global_idx,
309
typed_global_base,
310
struct_table_idx,
311
typed_table_base,
312
};
313
314
let mut func = Function::new(local_decls);
315
func.instruction(&Instruction::Loop(wasm_encoder::BlockType::Empty));
316
for op in &self.ops {
317
op.encode(&mut func, scratch_local, storage_bases);
318
}
319
func.instruction(&Instruction::Br(0));
320
func.instruction(&Instruction::End);
321
func.instruction(&Instruction::End);
322
323
let mut code = CodeSection::new();
324
code.function(&func);
325
326
module
327
.section(&types)
328
.section(&imports)
329
.section(&functions)
330
.section(&tables)
331
.section(&globals)
332
.section(&exports)
333
.section(&code);
334
335
module.finish()
336
}
337
338
/// Fixes this test case such that it becomes valid.
339
///
340
/// This is necessary because a random mutation (e.g. removing an op in the
341
/// middle of our sequence) might have made it so that subsequent ops won't
342
/// have their expected operand types on the Wasm stack
343
/// anymore. Furthermore, because we serialize and deserialize test cases,
344
/// and libFuzzer will occasionally mutate those serialized bytes directly,
345
/// rather than use one of our custom mutations, we have no guarantee that
346
/// pre-mutation test cases are even valid! Therefore, we always call this
347
/// method before translating this "AST"-style representation into a raw
348
/// Wasm binary.
349
pub fn fixup(&mut self) {
350
self.limits.fixup();
351
self.types.fixup(&self.limits);
352
353
let mut new_ops = Vec::with_capacity(self.ops.len());
354
let mut stack: Vec<StackType> = Vec::new();
355
let num_types = u32::try_from(self.types.type_defs.len()).unwrap();
356
357
let mut operand_types = Vec::new();
358
for op in &self.ops {
359
let Some(op) = op.fixup(&self.limits, num_types) else {
360
continue;
361
};
362
363
debug_assert!(operand_types.is_empty());
364
op.operand_types(&mut operand_types);
365
for ty in operand_types.drain(..) {
366
StackType::fixup(ty, &mut stack, &mut new_ops, num_types);
367
}
368
369
// Finally, emit the op itself (updates stack abstractly)
370
let mut result_types = Vec::new();
371
StackType::emit(op, &mut stack, &mut new_ops, num_types, &mut result_types);
372
}
373
374
// Drop any remaining values on the operand stack.
375
for _ in 0..stack.len() {
376
new_ops.push(GcOp::Drop);
377
}
378
379
log::trace!("ops after fixup: {new_ops:#?}");
380
self.ops = new_ops;
381
}
382
383
/// Attempts to remove the last opcode from the sequence.
384
///
385
/// Returns `true` if an opcode was successfully removed, or `false` if the
386
/// list was already empty.
387
pub fn pop(&mut self) -> bool {
388
self.ops.pop().is_some()
389
}
390
}
391
392
macro_rules! for_each_gc_op {
393
( $mac:ident ) => {
394
$mac! {
395
#[operands([])]
396
#[results([ExternRef, ExternRef, ExternRef])]
397
Gc,
398
399
#[operands([])]
400
#[results([ExternRef, ExternRef, ExternRef])]
401
MakeRefs,
402
403
#[operands([Some(ExternRef), Some(ExternRef), Some(ExternRef)])]
404
#[results([])]
405
TakeRefs,
406
407
#[operands([])]
408
#[results([ExternRef])]
409
#[fixup(|limits, _num_types| {
410
// Add one to make sure that out-of-bounds table accesses are
411
// possible, but still rare.
412
elem_index = elem_index % (limits.table_size + 1);
413
})]
414
TableGet { elem_index: u32 },
415
416
#[operands([Some(ExternRef)])]
417
#[results([])]
418
#[fixup(|limits, _num_types| {
419
// Add one to make sure that out-of-bounds table accesses are
420
// possible, but still rare.
421
elem_index = elem_index % (limits.table_size + 1);
422
})]
423
TableSet { elem_index: u32 },
424
425
#[operands([])]
426
#[results([ExternRef])]
427
#[fixup(|limits, _num_types| {
428
global_index = global_index.checked_rem(limits.num_globals)?;
429
})]
430
GlobalGet { global_index: u32 },
431
432
#[operands([Some(ExternRef)])]
433
#[results([])]
434
#[fixup(|limits, _num_types| {
435
global_index = global_index.checked_rem(limits.num_globals)?;
436
})]
437
GlobalSet { global_index: u32 },
438
439
#[operands([])]
440
#[results([ExternRef])]
441
#[fixup(|limits, _num_types| {
442
local_index = local_index.checked_rem(limits.num_params)?;
443
})]
444
LocalGet { local_index: u32 },
445
446
#[operands([Some(ExternRef)])]
447
#[results([])]
448
#[fixup(|limits, _num_types| {
449
local_index = local_index.checked_rem(limits.num_params)?;
450
})]
451
LocalSet { local_index: u32 },
452
453
#[operands([])]
454
#[results([Struct(Some(type_index))])]
455
#[fixup(|_limits, num_types| {
456
type_index = type_index.checked_rem(num_types)?;
457
})]
458
StructNew { type_index: u32 },
459
460
#[operands([Some(Struct(None))])]
461
#[results([])]
462
TakeStructCall,
463
464
#[operands([Some(Struct(Some(type_index)))])]
465
#[results([])]
466
#[fixup(|_limits, num_types| {
467
type_index = type_index.checked_rem(num_types)?;
468
})]
469
TakeTypedStructCall { type_index: u32 },
470
471
#[operands([Some(Struct(None))])]
472
#[results([])]
473
StructLocalSet,
474
475
#[operands([])]
476
#[results([Struct(None)])]
477
StructLocalGet,
478
479
#[operands([Some(Struct(Some(type_index)))])]
480
#[results([])]
481
#[fixup(|_limits, num_types| {
482
type_index = type_index.checked_rem(num_types)?;
483
})]
484
TypedStructLocalSet { type_index: u32 },
485
486
#[operands([])]
487
#[results([Struct(Some(type_index))])]
488
#[fixup(|_limits, num_types| {
489
type_index = type_index.checked_rem(num_types)?;
490
})]
491
TypedStructLocalGet { type_index: u32 },
492
493
#[operands([Some(Struct(None))])]
494
#[results([])]
495
StructGlobalSet,
496
497
#[operands([])]
498
#[results([Struct(None)])]
499
StructGlobalGet,
500
501
#[operands([Some(Struct(Some(type_index)))])]
502
#[results([])]
503
#[fixup(|_limits, num_types| {
504
type_index = type_index.checked_rem(num_types)?;
505
})]
506
TypedStructGlobalSet { type_index: u32 },
507
508
#[operands([])]
509
#[results([Struct(Some(type_index))])]
510
#[fixup(|_limits, num_types| {
511
type_index = type_index.checked_rem(num_types)?;
512
})]
513
TypedStructGlobalGet { type_index: u32 },
514
515
#[operands([Some(Struct(None))])]
516
#[results([])]
517
#[fixup(|limits, _num_types| {
518
// Add one to make sure that out-of-bounds table accesses are
519
// possible, but still rare.
520
elem_index = elem_index % (limits.table_size + 1);
521
})]
522
StructTableSet { elem_index: u32 },
523
524
#[operands([])]
525
#[results([Struct(None)])]
526
#[fixup(|limits, _num_types| {
527
// Add one to make sure that out-of-bounds table accesses are
528
// possible, but still rare.
529
elem_index = elem_index % (limits.table_size + 1);
530
})]
531
StructTableGet { elem_index: u32 },
532
533
#[operands([Some(Struct(Some(type_index)))])]
534
#[results([])]
535
#[fixup(|limits, num_types| {
536
// Add one to make sure that out-of-bounds table accesses are
537
// possible, but still rare.
538
elem_index = elem_index % (limits.table_size + 1);
539
type_index = type_index.checked_rem(num_types)?;
540
})]
541
TypedStructTableSet { elem_index: u32, type_index: u32 },
542
543
#[operands([])]
544
#[results([Struct(Some(type_index))])]
545
#[fixup(|limits, num_types| {
546
// Add one to make sure that out-of-bounds table accesses are
547
// possible, but still rare.
548
elem_index = elem_index % (limits.table_size + 1);
549
type_index = type_index.checked_rem(num_types)?;
550
})]
551
TypedStructTableGet { elem_index: u32, type_index: u32 },
552
553
#[operands([None])]
554
#[results([])]
555
Drop,
556
557
#[operands([])]
558
#[results([ExternRef])]
559
NullExtern,
560
561
#[operands([])]
562
#[results([Struct(None)])]
563
NullStruct,
564
565
#[operands([])]
566
#[results([Struct(Some(type_index))])]
567
#[fixup(|_limits, num_types| {
568
type_index = type_index.checked_rem(num_types)?;
569
})]
570
NullTypedStruct { type_index: u32 },
571
}
572
};
573
}
574
575
macro_rules! define_gc_op_variants {
576
(
577
$(
578
$( #[$attr:meta] )*
579
$op:ident $( { $( $field:ident : $field_ty:ty ),* } )? ,
580
)*
581
) => {
582
/// The operations that can be performed by the `gc` function.
583
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
584
#[allow(missing_docs, reason = "self-describing")]
585
pub enum GcOp {
586
$(
587
$op $( { $( $field : $field_ty ),* } )? ,
588
)*
589
}
590
};
591
}
592
for_each_gc_op!(define_gc_op_variants);
593
594
macro_rules! define_op_names {
595
(
596
$(
597
$( #[$attr:meta] )*
598
$op:ident $( { $( $field:ident : $field_ty:ty ),* } )? ,
599
)*
600
) => {
601
#[cfg(test)]
602
pub(crate) const OP_NAMES: &[&str] = &[
603
$(stringify!($op)),*
604
];
605
}
606
}
607
for_each_gc_op!(define_op_names);
608
609
impl GcOp {
610
#[cfg(test)]
611
pub(crate) fn name(&self) -> &'static str {
612
macro_rules! define_gc_op_name {
613
(
614
$(
615
$( #[$attr:meta] )*
616
$op:ident $( { $( $field:ident : $field_ty:ty ),* } )? ,
617
)*
618
) => {
619
match self {
620
$(
621
Self::$op $( { $($field: _),* } )? => stringify!($op),
622
)*
623
}
624
};
625
}
626
for_each_gc_op!(define_gc_op_name)
627
}
628
629
pub(crate) fn operand_types(&self, out: &mut Vec<Option<StackType>>) {
630
macro_rules! define_gc_op_operand_types {
631
(
632
$(
633
#[operands($operands:expr)]
634
$( #[$attr:meta] )*
635
$op:ident $( { $( $field:ident : $field_ty:ty ),* } )? ,
636
)*
637
) => {{
638
use StackType::*;
639
match self {
640
$(
641
Self::$op $( { $($field),* } )? => {
642
$(
643
$(
644
#[allow(unused, reason = "macro code")]
645
let $field = *$field;
646
)*
647
)?
648
let operands: [Option<StackType>; _] = $operands;
649
out.extend(operands);
650
}
651
)*
652
}
653
}};
654
}
655
for_each_gc_op!(define_gc_op_operand_types)
656
}
657
658
pub(crate) fn result_types(&self, out: &mut Vec<StackType>) {
659
macro_rules! define_gc_op_result_types {
660
(
661
$(
662
#[operands($operands:expr)]
663
#[results($results:expr)]
664
$( #[$attr:meta] )*
665
$op:ident $( { $( $field:ident : $field_ty:ty ),* } )? ,
666
)*
667
) => {{
668
use StackType::*;
669
match self {
670
$(
671
Self::$op $( { $($field),* } )? => {
672
$(
673
$(
674
#[allow(unused, reason = "macro code")]
675
let $field = *$field;
676
)*
677
)?
678
let results: [StackType; _] = $results;
679
out.extend(results);
680
}
681
)*
682
}
683
}};
684
}
685
for_each_gc_op!(define_gc_op_result_types)
686
}
687
688
pub(crate) fn fixup(&self, limits: &GcOpsLimits, num_types: u32) -> Option<Self> {
689
macro_rules! define_gc_op_fixup {
690
(
691
$(
692
#[operands($operands:expr)]
693
#[results($results:expr)]
694
$( #[fixup(|$limits:ident, $num_types:ident| $fixup:expr)] )?
695
$op:ident $( { $( $field:ident : $field_ty:ty ),* } )? ,
696
)*
697
) => {{
698
match self {
699
$(
700
Self::$op $( { $($field),* } )? => {
701
$(
702
$(
703
#[allow(unused_mut, reason = "macro code")]
704
let mut $field = *$field;
705
)*
706
let $limits = limits;
707
let $num_types = num_types;
708
$fixup;
709
)?
710
Some(Self::$op $( { $( $field ),* } )? )
711
}
712
)*
713
}
714
}};
715
}
716
for_each_gc_op!(define_gc_op_fixup)
717
}
718
719
pub(crate) fn generate(ctx: &mut mutatis::Context) -> mutatis::Result<GcOp> {
720
macro_rules! define_gc_op_generate {
721
(
722
$(
723
$( #[$attr:meta] )*
724
$op:ident $( { $( $field:ident : $field_ty:ty ),* } )? ,
725
)*
726
) => {{
727
let choices: &[fn(&mut mutatis::Context) -> mutatis::Result<GcOp>] = &[
728
$(
729
|_ctx| Ok(GcOp::$op $( {
730
$(
731
$field: {
732
let mut mutator = <$field_ty as mutatis::DefaultMutate>::DefaultMutate::default();
733
mutatis::Generate::<$field_ty>::generate(&mut mutator, _ctx)?
734
}
735
),*
736
} )? ),
737
)*
738
];
739
740
let f = *ctx.rng()
741
.choose(choices)
742
.unwrap();
743
(f)(ctx)
744
}};
745
}
746
for_each_gc_op!(define_gc_op_generate)
747
}
748
749
fn encode(&self, func: &mut Function, scratch_local: u32, encoding_bases: WasmEncodingBases) {
750
let gc_func_idx = 0;
751
let take_refs_func_idx = 1;
752
let make_refs_func_idx = 2;
753
let take_structref_idx = 3;
754
755
match *self {
756
Self::Gc => {
757
func.instruction(&Instruction::Call(gc_func_idx));
758
}
759
Self::MakeRefs => {
760
func.instruction(&Instruction::Call(make_refs_func_idx));
761
}
762
Self::TakeRefs => {
763
func.instruction(&Instruction::Call(take_refs_func_idx));
764
}
765
Self::TableGet { elem_index: x } => {
766
func.instruction(&Instruction::I32Const(x.cast_signed()));
767
func.instruction(&Instruction::TableGet(0));
768
}
769
Self::TableSet { elem_index: x } => {
770
func.instruction(&Instruction::LocalSet(scratch_local));
771
func.instruction(&Instruction::I32Const(x.cast_signed()));
772
func.instruction(&Instruction::LocalGet(scratch_local));
773
func.instruction(&Instruction::TableSet(0));
774
}
775
Self::GlobalGet { global_index: x } => {
776
func.instruction(&Instruction::GlobalGet(x));
777
}
778
Self::GlobalSet { global_index: x } => {
779
func.instruction(&Instruction::GlobalSet(x));
780
}
781
Self::LocalGet { local_index: x } => {
782
func.instruction(&Instruction::LocalGet(x));
783
}
784
Self::LocalSet { local_index: x } => {
785
func.instruction(&Instruction::LocalSet(x));
786
}
787
Self::Drop => {
788
func.instruction(&Instruction::Drop);
789
}
790
Self::NullExtern => {
791
func.instruction(&Instruction::RefNull(wasm_encoder::HeapType::EXTERN));
792
}
793
Self::NullStruct => {
794
func.instruction(&Instruction::RefNull(wasm_encoder::HeapType::Abstract {
795
shared: false,
796
ty: wasm_encoder::AbstractHeapType::Struct,
797
}));
798
}
799
Self::NullTypedStruct { type_index } => {
800
func.instruction(&Instruction::RefNull(wasm_encoder::HeapType::Concrete(
801
encoding_bases.struct_type_base + type_index,
802
)));
803
}
804
Self::StructNew { type_index: x } => {
805
func.instruction(&Instruction::StructNew(encoding_bases.struct_type_base + x));
806
}
807
Self::TakeStructCall => {
808
func.instruction(&Instruction::Call(take_structref_idx));
809
}
810
Self::TakeTypedStructCall { type_index: x } => {
811
let f = encoding_bases.typed_first_func_index + x;
812
func.instruction(&Instruction::Call(f));
813
}
814
Self::StructLocalGet => {
815
func.instruction(&Instruction::LocalGet(encoding_bases.struct_local_idx));
816
}
817
Self::TypedStructLocalGet { type_index: x } => {
818
func.instruction(&Instruction::LocalGet(encoding_bases.typed_local_base + x));
819
}
820
Self::StructLocalSet => {
821
func.instruction(&Instruction::LocalSet(encoding_bases.struct_local_idx));
822
}
823
Self::TypedStructLocalSet { type_index: x } => {
824
func.instruction(&Instruction::LocalSet(encoding_bases.typed_local_base + x));
825
}
826
Self::StructGlobalGet => {
827
func.instruction(&Instruction::GlobalGet(encoding_bases.struct_global_idx));
828
}
829
Self::TypedStructGlobalGet { type_index: x } => {
830
func.instruction(&Instruction::GlobalGet(
831
encoding_bases.typed_global_base + x,
832
));
833
}
834
Self::StructGlobalSet => {
835
func.instruction(&Instruction::GlobalSet(encoding_bases.struct_global_idx));
836
}
837
Self::TypedStructGlobalSet { type_index: x } => {
838
func.instruction(&Instruction::GlobalSet(
839
encoding_bases.typed_global_base + x,
840
));
841
}
842
Self::StructTableGet { elem_index } => {
843
func.instruction(&Instruction::I32Const(elem_index.cast_signed()));
844
func.instruction(&Instruction::TableGet(encoding_bases.struct_table_idx));
845
}
846
Self::TypedStructTableGet {
847
elem_index,
848
type_index,
849
} => {
850
func.instruction(&Instruction::I32Const(elem_index.cast_signed()));
851
func.instruction(&Instruction::TableGet(
852
encoding_bases.typed_table_base + type_index,
853
));
854
}
855
Self::StructTableSet { elem_index } => {
856
// Use struct_local_idx (anyref) to temporarily store the value before table.set
857
func.instruction(&Instruction::LocalSet(encoding_bases.struct_local_idx));
858
func.instruction(&Instruction::I32Const(elem_index.cast_signed()));
859
func.instruction(&Instruction::LocalGet(encoding_bases.struct_local_idx));
860
func.instruction(&Instruction::TableSet(encoding_bases.struct_table_idx));
861
}
862
Self::TypedStructTableSet {
863
elem_index,
864
type_index,
865
} => {
866
func.instruction(&Instruction::LocalSet(
867
encoding_bases.typed_local_base + type_index,
868
));
869
func.instruction(&Instruction::I32Const(elem_index.cast_signed()));
870
func.instruction(&Instruction::LocalGet(
871
encoding_bases.typed_local_base + type_index,
872
));
873
func.instruction(&Instruction::TableSet(
874
encoding_bases.typed_table_base + type_index,
875
));
876
}
877
}
878
}
879
}
880
881