Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bytecodealliance
GitHub Repository: bytecodealliance/wasmtime
Path: blob/main/crates/cranelift/src/compiler/component.rs
3080 views
1
//! Compilation support for the component model.
2
3
use crate::{TRAP_CANNOT_LEAVE_COMPONENT, TRAP_INTERNAL_ASSERT, compiler::Compiler};
4
use cranelift_codegen::ir::condcodes::IntCC;
5
use cranelift_codegen::ir::{self, InstBuilder, MemFlags, Value};
6
use cranelift_codegen::isa::{CallConv, TargetIsa};
7
use cranelift_frontend::FunctionBuilder;
8
use wasmtime_environ::error::{Result, bail};
9
use wasmtime_environ::{
10
Abi, CompiledFunctionBody, EntityRef, FuncKey, HostCall, PtrSize, TrapSentinel, Tunables,
11
WasmFuncType, WasmValType, component::*, fact::PREPARE_CALL_FIXED_PARAMS,
12
};
13
14
struct TrampolineCompiler<'a> {
15
compiler: &'a Compiler,
16
isa: &'a (dyn TargetIsa + 'static),
17
builder: FunctionBuilder<'a>,
18
component: &'a Component,
19
types: &'a ComponentTypesBuilder,
20
offsets: VMComponentOffsets<u8>,
21
block0: ir::Block,
22
signature: &'a WasmFuncType,
23
}
24
25
/// What host functions can be called, used in `translate_hostcall` below.
26
enum HostCallee {
27
/// Call a host-lowered function specified by this index.
28
Lowering(LoweredIndex),
29
/// Call a host libcall, specified by this accessor.
30
Libcall(GetLibcallFn),
31
}
32
33
type GetLibcallFn =
34
fn(&dyn TargetIsa, &mut ir::Function) -> (ir::SigRef, ComponentBuiltinFunctionIndex);
35
36
impl From<LoweredIndex> for HostCallee {
37
fn from(index: LoweredIndex) -> HostCallee {
38
HostCallee::Lowering(index)
39
}
40
}
41
42
impl From<GetLibcallFn> for HostCallee {
43
fn from(f: GetLibcallFn) -> HostCallee {
44
HostCallee::Libcall(f)
45
}
46
}
47
48
/// How to interpret the results of a host function.
49
enum HostResult {
50
/// The host function returns the sentinel specified which is interpreted
51
/// and translated to the real return value.
52
Sentinel(TrapSentinel),
53
54
/// The host function returns a `bool` indicating whether it succeeded or
55
/// not.
56
///
57
/// After the return value is interpreted the host function also filled in
58
/// `ptr` and `len` with wasm return values which need to be returned.
59
///
60
/// If `ptr` and `len` are not specified then this must be used with
61
/// `WasmArgs::ValRawList` and that ptr/len is used.
62
MultiValue {
63
/// The base pointer of the `ValRaw` list on the stack.
64
ptr: Option<ir::Value>,
65
/// The length of the `ValRaw` list on the stack.
66
len: Option<ir::Value>,
67
},
68
}
69
70
impl From<TrapSentinel> for HostResult {
71
fn from(sentinel: TrapSentinel) -> HostResult {
72
HostResult::Sentinel(sentinel)
73
}
74
}
75
76
/// Different means of passing WebAssembly arguments to host calls.
77
#[derive(Debug, Copy, Clone)]
78
enum WasmArgs {
79
/// All wasm arguments to the host are passed directly as values, typically
80
/// through registers.
81
InRegisters,
82
83
/// All wasm arguments to the host are passed indirectly by spilling them
84
/// to the stack as a sequence of contiguous `ValRaw`s.
85
ValRawList,
86
87
/// The first `n` arguments are passed in registers, but everything after
88
/// that is spilled to the stack.
89
InRegistersUpTo(usize),
90
}
91
92
impl<'a> TrampolineCompiler<'a> {
93
fn new(
94
compiler: &'a Compiler,
95
func_compiler: &'a mut super::FunctionCompiler<'_>,
96
component: &'a Component,
97
types: &'a ComponentTypesBuilder,
98
signature: &'a WasmFuncType,
99
) -> TrampolineCompiler<'a> {
100
let isa = &*compiler.isa;
101
let func = ir::Function::with_name_signature(
102
ir::UserFuncName::user(0, 0),
103
crate::wasm_call_signature(isa, signature, &compiler.tunables),
104
);
105
let (builder, block0) = func_compiler.builder(func);
106
TrampolineCompiler {
107
compiler,
108
isa,
109
builder,
110
component,
111
types,
112
offsets: VMComponentOffsets::new(isa.pointer_bytes(), component),
113
block0,
114
signature,
115
}
116
}
117
118
fn translate(&mut self, trampoline: &Trampoline) {
119
self.check_may_leave(trampoline);
120
121
match trampoline {
122
Trampoline::Transcoder {
123
op,
124
from,
125
from64,
126
to,
127
to64,
128
} => {
129
self.translate_transcode(*op, *from, *from64, *to, *to64);
130
}
131
Trampoline::LowerImport {
132
index,
133
options,
134
lower_ty,
135
} => {
136
let pointer_type = self.isa.pointer_type();
137
self.translate_hostcall(
138
HostCallee::Lowering(*index),
139
HostResult::MultiValue {
140
ptr: None,
141
len: None,
142
},
143
WasmArgs::ValRawList,
144
|me, params| {
145
let vmctx = params[0];
146
params.extend([
147
me.builder.ins().load(
148
pointer_type,
149
MemFlags::trusted(),
150
vmctx,
151
i32::try_from(me.offsets.lowering_data(*index)).unwrap(),
152
),
153
me.index_value(*lower_ty),
154
me.index_value(*options),
155
]);
156
},
157
);
158
}
159
Trampoline::ResourceNew { instance, ty } => {
160
// Currently this only supports resources represented by `i32`
161
assert_eq!(self.signature.params()[0], WasmValType::I32);
162
self.translate_libcall(
163
host::resource_new32,
164
TrapSentinel::NegativeOne,
165
WasmArgs::InRegisters,
166
|me, params| {
167
params.push(me.index_value(*instance));
168
params.push(me.index_value(*ty));
169
},
170
);
171
}
172
Trampoline::ResourceRep { instance, ty } => {
173
// Currently this only supports resources represented by `i32`
174
assert_eq!(self.signature.returns()[0], WasmValType::I32);
175
self.translate_libcall(
176
host::resource_rep32,
177
TrapSentinel::NegativeOne,
178
WasmArgs::InRegisters,
179
|me, params| {
180
params.push(me.index_value(*instance));
181
params.push(me.index_value(*ty));
182
},
183
);
184
}
185
Trampoline::ResourceDrop { instance, ty } => {
186
self.translate_resource_drop(*instance, *ty);
187
}
188
Trampoline::BackpressureInc { instance } => {
189
self.translate_libcall(
190
host::backpressure_modify,
191
TrapSentinel::Falsy,
192
WasmArgs::InRegisters,
193
|me, params| {
194
params.push(me.index_value(*instance));
195
params.push(me.builder.ins().iconst(ir::types::I8, 1));
196
},
197
);
198
}
199
Trampoline::BackpressureDec { instance } => {
200
self.translate_libcall(
201
host::backpressure_modify,
202
TrapSentinel::Falsy,
203
WasmArgs::InRegisters,
204
|me, params| {
205
params.push(me.index_value(*instance));
206
params.push(me.builder.ins().iconst(ir::types::I8, 0));
207
},
208
);
209
}
210
Trampoline::TaskReturn {
211
instance,
212
results,
213
options,
214
} => {
215
self.translate_libcall(
216
host::task_return,
217
TrapSentinel::Falsy,
218
WasmArgs::ValRawList,
219
|me, params| {
220
params.push(me.index_value(*instance));
221
params.push(me.index_value(*results));
222
params.push(me.index_value(*options));
223
},
224
);
225
}
226
Trampoline::TaskCancel { instance } => {
227
self.translate_libcall(
228
host::task_cancel,
229
TrapSentinel::Falsy,
230
WasmArgs::InRegisters,
231
|me, params| {
232
params.push(me.index_value(*instance));
233
},
234
);
235
}
236
Trampoline::WaitableSetNew { instance } => {
237
self.translate_libcall(
238
host::waitable_set_new,
239
TrapSentinel::NegativeOne,
240
WasmArgs::InRegisters,
241
|me, params| {
242
params.push(me.index_value(*instance));
243
},
244
);
245
}
246
Trampoline::WaitableSetWait { instance, options } => {
247
self.translate_libcall(
248
host::waitable_set_wait,
249
TrapSentinel::NegativeOne,
250
WasmArgs::InRegisters,
251
|me, params| {
252
params.push(me.index_value(*instance));
253
params.push(me.index_value(*options));
254
},
255
);
256
}
257
Trampoline::WaitableSetPoll { instance, options } => {
258
self.translate_libcall(
259
host::waitable_set_poll,
260
TrapSentinel::NegativeOne,
261
WasmArgs::InRegisters,
262
|me, params| {
263
params.push(me.index_value(*instance));
264
params.push(me.index_value(*options));
265
},
266
);
267
}
268
Trampoline::WaitableSetDrop { instance } => {
269
self.translate_libcall(
270
host::waitable_set_drop,
271
TrapSentinel::Falsy,
272
WasmArgs::InRegisters,
273
|me, params| {
274
params.push(me.index_value(*instance));
275
},
276
);
277
}
278
Trampoline::WaitableJoin { instance } => {
279
self.translate_libcall(
280
host::waitable_join,
281
TrapSentinel::Falsy,
282
WasmArgs::InRegisters,
283
|me, params| {
284
params.push(me.index_value(*instance));
285
},
286
);
287
}
288
Trampoline::ThreadYield {
289
instance,
290
cancellable,
291
} => {
292
self.translate_libcall(
293
host::thread_yield,
294
TrapSentinel::NegativeOne,
295
WasmArgs::InRegisters,
296
|me, params| {
297
params.push(me.index_value(*instance));
298
params.push(
299
me.builder
300
.ins()
301
.iconst(ir::types::I8, i64::from(*cancellable)),
302
);
303
},
304
);
305
}
306
Trampoline::SubtaskDrop { instance } => {
307
self.translate_libcall(
308
host::subtask_drop,
309
TrapSentinel::Falsy,
310
WasmArgs::InRegisters,
311
|me, params| {
312
params.push(me.index_value(*instance));
313
},
314
);
315
}
316
Trampoline::SubtaskCancel { instance, async_ } => {
317
self.translate_libcall(
318
host::subtask_cancel,
319
TrapSentinel::NegativeOne,
320
WasmArgs::InRegisters,
321
|me, params| {
322
params.push(me.index_value(*instance));
323
params.push(me.builder.ins().iconst(ir::types::I8, i64::from(*async_)));
324
},
325
);
326
}
327
Trampoline::StreamNew { instance, ty } => {
328
self.translate_libcall(
329
host::stream_new,
330
TrapSentinel::NegativeOne,
331
WasmArgs::InRegisters,
332
|me, params| {
333
params.push(me.index_value(*instance));
334
params.push(me.index_value(*ty));
335
},
336
);
337
}
338
Trampoline::StreamRead {
339
instance,
340
ty,
341
options,
342
} => {
343
if let Some(info) = self.flat_stream_element_info(*ty).cloned() {
344
self.translate_libcall(
345
host::flat_stream_read,
346
TrapSentinel::NegativeOne,
347
WasmArgs::InRegisters,
348
|me, params| {
349
params.extend([
350
me.index_value(*instance),
351
me.index_value(*ty),
352
me.index_value(*options),
353
me.builder
354
.ins()
355
.iconst(ir::types::I32, i64::from(info.size32)),
356
me.builder
357
.ins()
358
.iconst(ir::types::I32, i64::from(info.align32)),
359
]);
360
},
361
);
362
} else {
363
self.translate_libcall(
364
host::stream_read,
365
TrapSentinel::NegativeOne,
366
WasmArgs::InRegisters,
367
|me, params| {
368
params.push(me.index_value(*instance));
369
params.push(me.index_value(*ty));
370
params.push(me.index_value(*options));
371
},
372
);
373
}
374
}
375
Trampoline::StreamWrite {
376
instance,
377
ty,
378
options,
379
} => {
380
if let Some(info) = self.flat_stream_element_info(*ty).cloned() {
381
self.translate_libcall(
382
host::flat_stream_write,
383
TrapSentinel::NegativeOne,
384
WasmArgs::InRegisters,
385
|me, params| {
386
params.extend([
387
me.index_value(*instance),
388
me.index_value(*ty),
389
me.index_value(*options),
390
me.builder
391
.ins()
392
.iconst(ir::types::I32, i64::from(info.size32)),
393
me.builder
394
.ins()
395
.iconst(ir::types::I32, i64::from(info.align32)),
396
]);
397
},
398
);
399
} else {
400
self.translate_libcall(
401
host::stream_write,
402
TrapSentinel::NegativeOne,
403
WasmArgs::InRegisters,
404
|me, params| {
405
params.push(me.index_value(*instance));
406
params.push(me.index_value(*ty));
407
params.push(me.index_value(*options));
408
},
409
);
410
}
411
}
412
Trampoline::StreamCancelRead {
413
instance,
414
ty,
415
async_,
416
} => {
417
self.translate_libcall(
418
host::stream_cancel_read,
419
TrapSentinel::NegativeOne,
420
WasmArgs::InRegisters,
421
|me, params| {
422
params.push(me.index_value(*instance));
423
params.push(me.index_value(*ty));
424
params.push(me.builder.ins().iconst(ir::types::I8, i64::from(*async_)));
425
},
426
);
427
}
428
Trampoline::StreamCancelWrite {
429
instance,
430
ty,
431
async_,
432
} => {
433
self.translate_libcall(
434
host::stream_cancel_write,
435
TrapSentinel::NegativeOne,
436
WasmArgs::InRegisters,
437
|me, params| {
438
params.push(me.index_value(*instance));
439
params.push(me.index_value(*ty));
440
params.push(me.builder.ins().iconst(ir::types::I8, i64::from(*async_)));
441
},
442
);
443
}
444
Trampoline::StreamDropReadable { instance, ty } => {
445
self.translate_libcall(
446
host::stream_drop_readable,
447
TrapSentinel::Falsy,
448
WasmArgs::InRegisters,
449
|me, params| {
450
params.push(me.index_value(*instance));
451
params.push(me.index_value(*ty));
452
},
453
);
454
}
455
Trampoline::StreamDropWritable { instance, ty } => {
456
self.translate_libcall(
457
host::stream_drop_writable,
458
TrapSentinel::Falsy,
459
WasmArgs::InRegisters,
460
|me, params| {
461
params.push(me.index_value(*instance));
462
params.push(me.index_value(*ty));
463
},
464
);
465
}
466
Trampoline::FutureNew { instance, ty } => {
467
self.translate_libcall(
468
host::future_new,
469
TrapSentinel::NegativeOne,
470
WasmArgs::InRegisters,
471
|me, params| {
472
params.push(me.index_value(*instance));
473
params.push(me.index_value(*ty));
474
},
475
);
476
}
477
Trampoline::FutureRead {
478
instance,
479
ty,
480
options,
481
} => {
482
self.translate_libcall(
483
host::future_read,
484
TrapSentinel::NegativeOne,
485
WasmArgs::InRegisters,
486
|me, params| {
487
params.push(me.index_value(*instance));
488
params.push(me.index_value(*ty));
489
params.push(me.index_value(*options));
490
},
491
);
492
}
493
Trampoline::FutureWrite {
494
instance,
495
ty,
496
options,
497
} => {
498
self.translate_libcall(
499
host::future_write,
500
TrapSentinel::NegativeOne,
501
WasmArgs::InRegisters,
502
|me, params| {
503
params.push(me.index_value(*instance));
504
params.push(me.index_value(*ty));
505
params.push(me.index_value(*options));
506
},
507
);
508
}
509
Trampoline::FutureCancelRead {
510
instance,
511
ty,
512
async_,
513
} => {
514
self.translate_libcall(
515
host::future_cancel_read,
516
TrapSentinel::NegativeOne,
517
WasmArgs::InRegisters,
518
|me, params| {
519
params.push(me.index_value(*instance));
520
params.push(me.index_value(*ty));
521
params.push(me.builder.ins().iconst(ir::types::I8, i64::from(*async_)));
522
},
523
);
524
}
525
Trampoline::FutureCancelWrite {
526
instance,
527
ty,
528
async_,
529
} => {
530
self.translate_libcall(
531
host::future_cancel_write,
532
TrapSentinel::NegativeOne,
533
WasmArgs::InRegisters,
534
|me, params| {
535
params.push(me.index_value(*instance));
536
params.push(me.index_value(*ty));
537
params.push(me.builder.ins().iconst(ir::types::I8, i64::from(*async_)));
538
},
539
);
540
}
541
Trampoline::FutureDropReadable { instance, ty } => {
542
self.translate_libcall(
543
host::future_drop_readable,
544
TrapSentinel::Falsy,
545
WasmArgs::InRegisters,
546
|me, params| {
547
params.push(me.index_value(*instance));
548
params.push(me.index_value(*ty));
549
},
550
);
551
}
552
Trampoline::FutureDropWritable { instance, ty } => {
553
self.translate_libcall(
554
host::future_drop_writable,
555
TrapSentinel::Falsy,
556
WasmArgs::InRegisters,
557
|me, params| {
558
params.push(me.index_value(*instance));
559
params.push(me.index_value(*ty));
560
},
561
);
562
}
563
Trampoline::ErrorContextNew {
564
instance,
565
ty,
566
options,
567
} => {
568
self.translate_libcall(
569
host::error_context_new,
570
TrapSentinel::NegativeOne,
571
WasmArgs::InRegisters,
572
|me, params| {
573
params.push(me.index_value(*instance));
574
params.push(me.index_value(*ty));
575
params.push(me.index_value(*options));
576
},
577
);
578
}
579
Trampoline::ErrorContextDebugMessage {
580
instance,
581
ty,
582
options,
583
} => {
584
self.translate_libcall(
585
host::error_context_debug_message,
586
TrapSentinel::Falsy,
587
WasmArgs::InRegisters,
588
|me, params| {
589
params.push(me.index_value(*instance));
590
params.push(me.index_value(*ty));
591
params.push(me.index_value(*options));
592
},
593
);
594
}
595
Trampoline::ErrorContextDrop { instance, ty } => {
596
self.translate_libcall(
597
host::error_context_drop,
598
TrapSentinel::Falsy,
599
WasmArgs::InRegisters,
600
|me, params| {
601
params.push(me.index_value(*instance));
602
params.push(me.index_value(*ty));
603
},
604
);
605
}
606
Trampoline::ResourceTransferOwn => {
607
self.translate_libcall(
608
host::resource_transfer_own,
609
TrapSentinel::NegativeOne,
610
WasmArgs::InRegisters,
611
|_, _| {},
612
);
613
}
614
Trampoline::ResourceTransferBorrow => {
615
self.translate_libcall(
616
host::resource_transfer_borrow,
617
TrapSentinel::NegativeOne,
618
WasmArgs::InRegisters,
619
|_, _| {},
620
);
621
}
622
Trampoline::PrepareCall { memory } => {
623
self.translate_libcall(
624
host::prepare_call,
625
TrapSentinel::Falsy,
626
WasmArgs::InRegistersUpTo(PREPARE_CALL_FIXED_PARAMS.len()),
627
|me, params| {
628
let vmctx = params[0];
629
params.push(me.load_optional_memory(vmctx, *memory));
630
},
631
);
632
}
633
Trampoline::SyncStartCall { callback } => {
634
let pointer_type = self.isa.pointer_type();
635
let (values_vec_ptr, len) = self.compiler.allocate_stack_array_and_spill_args(
636
&WasmFuncType::new(
637
Box::new([]),
638
self.signature.returns().iter().copied().collect(),
639
),
640
&mut self.builder,
641
&[],
642
);
643
let values_vec_len = self.builder.ins().iconst(pointer_type, i64::from(len));
644
self.translate_libcall(
645
host::sync_start,
646
HostResult::MultiValue {
647
ptr: Some(values_vec_ptr),
648
len: Some(values_vec_len),
649
},
650
WasmArgs::InRegisters,
651
|me, params| {
652
let vmctx = params[0];
653
params.push(me.load_callback(vmctx, *callback));
654
params.push(values_vec_ptr);
655
params.push(values_vec_len);
656
},
657
);
658
}
659
Trampoline::AsyncStartCall {
660
callback,
661
post_return,
662
} => {
663
self.translate_libcall(
664
host::async_start,
665
TrapSentinel::NegativeOne,
666
WasmArgs::InRegisters,
667
|me, params| {
668
let vmctx = params[0];
669
params.extend([
670
me.load_callback(vmctx, *callback),
671
me.load_post_return(vmctx, *post_return),
672
]);
673
},
674
);
675
}
676
Trampoline::FutureTransfer => {
677
self.translate_libcall(
678
host::future_transfer,
679
TrapSentinel::NegativeOne,
680
WasmArgs::InRegisters,
681
|_, _| {},
682
);
683
}
684
Trampoline::StreamTransfer => {
685
self.translate_libcall(
686
host::stream_transfer,
687
TrapSentinel::NegativeOne,
688
WasmArgs::InRegisters,
689
|_, _| {},
690
);
691
}
692
Trampoline::ErrorContextTransfer => {
693
self.translate_libcall(
694
host::error_context_transfer,
695
TrapSentinel::NegativeOne,
696
WasmArgs::InRegisters,
697
|_, _| {},
698
);
699
}
700
Trampoline::Trap => {
701
self.translate_libcall(
702
host::trap,
703
TrapSentinel::Falsy,
704
WasmArgs::InRegisters,
705
|_, _| {},
706
);
707
}
708
Trampoline::EnterSyncCall => {
709
self.translate_libcall(
710
host::enter_sync_call,
711
TrapSentinel::Falsy,
712
WasmArgs::InRegisters,
713
|_, _| {},
714
);
715
}
716
Trampoline::ExitSyncCall => {
717
self.translate_libcall(
718
host::exit_sync_call,
719
TrapSentinel::Falsy,
720
WasmArgs::InRegisters,
721
|_, _| {},
722
);
723
}
724
Trampoline::ContextGet { instance, slot } => {
725
self.translate_libcall(
726
host::context_get,
727
TrapSentinel::NegativeOne,
728
WasmArgs::InRegisters,
729
|me, params| {
730
params.push(me.index_value(*instance));
731
params.push(me.builder.ins().iconst(ir::types::I32, i64::from(*slot)));
732
},
733
);
734
}
735
Trampoline::ContextSet { instance, slot } => {
736
self.translate_libcall(
737
host::context_set,
738
TrapSentinel::Falsy,
739
WasmArgs::InRegisters,
740
|me, params| {
741
params.push(me.index_value(*instance));
742
params.push(me.builder.ins().iconst(ir::types::I32, i64::from(*slot)));
743
},
744
);
745
}
746
Trampoline::ThreadIndex => {
747
self.translate_libcall(
748
host::thread_index,
749
TrapSentinel::NegativeOne,
750
WasmArgs::InRegisters,
751
|_, _| {},
752
);
753
}
754
Trampoline::ThreadNewIndirect {
755
instance,
756
start_func_table_idx,
757
start_func_ty_idx,
758
} => {
759
self.translate_libcall(
760
host::thread_new_indirect,
761
TrapSentinel::NegativeOne,
762
WasmArgs::InRegisters,
763
|me, params| {
764
params.push(me.index_value(*instance));
765
params.push(me.index_value(*start_func_table_idx));
766
params.push(me.index_value(*start_func_ty_idx));
767
},
768
);
769
}
770
Trampoline::ThreadSwitchTo {
771
instance,
772
cancellable,
773
} => {
774
self.translate_libcall(
775
host::thread_switch_to,
776
TrapSentinel::NegativeOne,
777
WasmArgs::InRegisters,
778
|me, params| {
779
params.push(me.index_value(*instance));
780
params.push(
781
me.builder
782
.ins()
783
.iconst(ir::types::I8, i64::from(*cancellable)),
784
);
785
},
786
);
787
}
788
Trampoline::ThreadSuspend {
789
instance,
790
cancellable,
791
} => {
792
self.translate_libcall(
793
host::thread_suspend,
794
TrapSentinel::NegativeOne,
795
WasmArgs::InRegisters,
796
|me, params| {
797
params.push(me.index_value(*instance));
798
params.push(
799
me.builder
800
.ins()
801
.iconst(ir::types::I8, i64::from(*cancellable)),
802
);
803
},
804
);
805
}
806
Trampoline::ThreadResumeLater { instance } => {
807
self.translate_libcall(
808
host::thread_resume_later,
809
TrapSentinel::Falsy,
810
WasmArgs::InRegisters,
811
|me, params| {
812
params.push(me.index_value(*instance));
813
},
814
);
815
}
816
Trampoline::ThreadYieldTo {
817
instance,
818
cancellable,
819
} => {
820
self.translate_libcall(
821
host::thread_yield_to,
822
TrapSentinel::NegativeOne,
823
WasmArgs::InRegisters,
824
|me, params| {
825
params.push(me.index_value(*instance));
826
params.push(
827
me.builder
828
.ins()
829
.iconst(ir::types::I8, i64::from(*cancellable)),
830
);
831
},
832
);
833
}
834
}
835
}
836
837
/// Determine whether the specified type can be optimized as a stream
838
/// payload by lifting and lowering with a simple `memcpy`.
839
///
840
/// Any type containing only "flat", primitive data for which all bit
841
/// patterns are valid (i.e. no pointers, handles, bools, or chars) should
842
/// qualify for this optimization, but it's also okay to conservatively
843
/// return `None` here; the fallback slow path will always work -- it just
844
/// won't be as efficient.
845
fn flat_stream_element_info(&self, ty: TypeStreamTableIndex) -> Option<&CanonicalAbiInfo> {
846
let payload = self.types[self.types[ty].ty].payload;
847
match payload {
848
None => Some(&CanonicalAbiInfo::ZERO),
849
Some(
850
// Note that we exclude `Bool` and `Char` from this list because
851
// not all bit patterns are valid for those types.
852
payload @ (InterfaceType::S8
853
| InterfaceType::U8
854
| InterfaceType::S16
855
| InterfaceType::U16
856
| InterfaceType::S32
857
| InterfaceType::U32
858
| InterfaceType::S64
859
| InterfaceType::U64
860
| InterfaceType::Float32
861
| InterfaceType::Float64),
862
) => Some(self.types.canonical_abi(&payload)),
863
// TODO: Recursively check for other "flat" types (i.e. those without pointers or handles),
864
// e.g. `record`s, `variant`s, etc. which contain only flat types.
865
_ => None,
866
}
867
}
868
869
/// Helper function to spill the wasm arguments `args` to this function into
870
/// a stack-allocated array.
871
fn store_wasm_arguments(&mut self, args: &[Value]) -> (Value, Value) {
872
let pointer_type = self.isa.pointer_type();
873
874
let (ptr, len) = self.compiler.allocate_stack_array_and_spill_args(
875
self.signature,
876
&mut self.builder,
877
args,
878
);
879
let len = self.builder.ins().iconst(pointer_type, i64::from(len));
880
(ptr, len)
881
}
882
883
/// Convenience wrapper around `translate_hostcall` to enable type inference
884
/// on the `get_libcall` parameter here.
885
fn translate_libcall(
886
&mut self,
887
get_libcall: GetLibcallFn,
888
host_result: impl Into<HostResult>,
889
wasm_args: WasmArgs,
890
extra_host_args: impl FnOnce(&mut Self, &mut Vec<ir::Value>),
891
) {
892
self.translate_hostcall(
893
HostCallee::Libcall(get_libcall),
894
host_result.into(),
895
wasm_args,
896
extra_host_args,
897
)
898
}
899
900
/// Translates an invocation of a host function and interpret the result.
901
///
902
/// This is intended to be a relatively narrow waist which most intrinsics
903
/// go through. The configuration supported here is:
904
///
905
/// * `host_callee` - what's being called, either a libcall or a lowered
906
/// function
907
/// * `host_result` - how to interpret the return value to see if it's a
908
/// trap
909
/// * `wasm_args` - how to pass wasm args to the host, either in registers
910
/// or on the stack
911
/// * `extra_host_args` - a closure used to push extra arguments just before
912
/// the wasm arguments are forwarded.
913
fn translate_hostcall(
914
&mut self,
915
host_callee: HostCallee,
916
host_result: impl Into<HostResult>,
917
wasm_args: WasmArgs,
918
extra_host_args: impl FnOnce(&mut Self, &mut Vec<ir::Value>),
919
) {
920
let pointer_type = self.isa.pointer_type();
921
922
// Load all parameters in an ABI-agnostic fashion, of which the
923
// `VMComponentContext` will be the first.
924
let params = self.abi_load_params();
925
let vmctx = params[0];
926
let wasm_params = &params[2..];
927
928
// Start building up arguments to the host. The first is always the
929
// vmctx. After is whatever `extra_host_args` appends, and then finally
930
// is what `WasmArgs` specifies.
931
let mut host_args = vec![vmctx];
932
extra_host_args(self, &mut host_args);
933
let mut val_raw_ptr = None;
934
let mut val_raw_len = None;
935
match wasm_args {
936
// Wasm params are passed through as values themselves.
937
WasmArgs::InRegisters => host_args.extend(wasm_params.iter().copied()),
938
939
// Wasm params are spilled and then the ptr/len is passed.
940
WasmArgs::ValRawList => {
941
let (ptr, len) = self.store_wasm_arguments(wasm_params);
942
val_raw_ptr = Some(ptr);
943
val_raw_len = Some(len);
944
host_args.push(ptr);
945
host_args.push(len);
946
}
947
948
// A mixture of the above two.
949
WasmArgs::InRegistersUpTo(n) => {
950
let (values_vec_ptr, len) = self.compiler.allocate_stack_array_and_spill_args(
951
&WasmFuncType::new(
952
self.signature.params().iter().skip(n).copied().collect(),
953
Box::new([]),
954
),
955
&mut self.builder,
956
&wasm_params[n..],
957
);
958
let values_vec_len = self.builder.ins().iconst(pointer_type, i64::from(len));
959
960
host_args.extend(wasm_params[..n].iter().copied());
961
host_args.push(values_vec_ptr);
962
host_args.push(values_vec_len);
963
}
964
}
965
966
// Next perform the actual invocation of the host with `host_args`.
967
let call = match host_callee {
968
HostCallee::Libcall(get_libcall) => self.call_libcall(vmctx, get_libcall, &host_args),
969
HostCallee::Lowering(index) => {
970
// Load host function pointer from the vmcontext and then call that
971
// indirect function pointer with the list of arguments.
972
let host_fn = self.builder.ins().load(
973
pointer_type,
974
MemFlags::trusted(),
975
vmctx,
976
i32::try_from(self.offsets.lowering_callee(index)).unwrap(),
977
);
978
let host_sig = {
979
let mut sig = ir::Signature::new(CallConv::triple_default(self.isa.triple()));
980
for param in host_args.iter() {
981
let ty = self.builder.func.dfg.value_type(*param);
982
sig.params.push(ir::AbiParam::new(ty));
983
}
984
// return value is a bool whether a trap was raised or not
985
sig.returns.push(ir::AbiParam::new(ir::types::I8));
986
self.builder.import_signature(sig)
987
};
988
self.compiler.call_indirect_host(
989
&mut self.builder,
990
HostCall::ComponentLowerImport,
991
host_sig,
992
host_fn,
993
&host_args,
994
)
995
}
996
};
997
998
// Acquire the result of this function (if any) and interpret it
999
// according to `host_result`.
1000
//
1001
// Note that all match arms here end with `abi_store_results` which
1002
// accounts for the ABI of this function when storing results.
1003
let result = self.builder.func.dfg.inst_results(call).get(0).copied();
1004
let result_ty = result.map(|v| self.builder.func.dfg.value_type(v));
1005
let expected = self.signature.returns();
1006
match host_result.into() {
1007
HostResult::Sentinel(TrapSentinel::NegativeOne) => {
1008
assert_eq!(expected.len(), 1);
1009
let (result, result_ty) = (result.unwrap(), result_ty.unwrap());
1010
let result = match (result_ty, expected[0]) {
1011
(ir::types::I64, WasmValType::I32) => {
1012
self.raise_if_negative_one_and_truncate(result)
1013
}
1014
(ir::types::I64, WasmValType::I64) | (ir::types::I32, WasmValType::I32) => {
1015
self.raise_if_negative_one(result)
1016
}
1017
other => panic!("unsupported NegativeOne combo {other:?}"),
1018
};
1019
self.abi_store_results(&[result]);
1020
}
1021
HostResult::Sentinel(TrapSentinel::Falsy) => {
1022
assert_eq!(expected.len(), 0);
1023
self.raise_if_host_trapped(result.unwrap());
1024
self.abi_store_results(&[]);
1025
}
1026
HostResult::Sentinel(_) => todo!("support additional return types if/when necessary"),
1027
1028
HostResult::MultiValue { ptr, len } => {
1029
let ptr = ptr.or(val_raw_ptr).unwrap();
1030
let len = len.or(val_raw_len).unwrap();
1031
self.raise_if_host_trapped(result.unwrap());
1032
let results = self.compiler.load_values_from_array(
1033
self.signature.returns(),
1034
&mut self.builder,
1035
ptr,
1036
len,
1037
);
1038
self.abi_store_results(&results);
1039
}
1040
}
1041
}
1042
1043
fn index_value(&mut self, index: impl EntityRef) -> ir::Value {
1044
self.builder
1045
.ins()
1046
.iconst(ir::types::I32, i64::try_from(index.index()).unwrap())
1047
}
1048
1049
fn translate_resource_drop(
1050
&mut self,
1051
instance: RuntimeComponentInstanceIndex,
1052
resource: TypeResourceTableIndex,
1053
) {
1054
let args = self.abi_load_params();
1055
let vmctx = args[0];
1056
let caller_vmctx = args[1];
1057
let pointer_type = self.isa.pointer_type();
1058
1059
// The arguments this shim passes along to the libcall are:
1060
//
1061
// * the vmctx
1062
// * the calling component instance index
1063
// * a constant value for this `ResourceDrop` intrinsic
1064
// * the wasm handle index to drop
1065
let mut host_args = Vec::new();
1066
host_args.push(vmctx);
1067
host_args.push(
1068
self.builder
1069
.ins()
1070
.iconst(ir::types::I32, i64::from(instance.as_u32())),
1071
);
1072
host_args.push(
1073
self.builder
1074
.ins()
1075
.iconst(ir::types::I32, i64::from(resource.as_u32())),
1076
);
1077
host_args.push(args[2]);
1078
1079
let call = self.call_libcall(vmctx, host::resource_drop, &host_args);
1080
1081
// Immediately raise a trap if requested by the host
1082
let should_run_destructor =
1083
self.raise_if_negative_one(self.builder.func.dfg.inst_results(call)[0]);
1084
1085
let resource_ty = self.types[resource].unwrap_concrete_ty();
1086
let resource_def = self
1087
.component
1088
.defined_resource_index(resource_ty)
1089
.map(|idx| {
1090
self.component
1091
.initializers
1092
.iter()
1093
.filter_map(|i| match i {
1094
GlobalInitializer::Resource(r) if r.index == idx => Some(r),
1095
_ => None,
1096
})
1097
.next()
1098
.unwrap()
1099
});
1100
let has_destructor = match resource_def {
1101
Some(def) => def.dtor.is_some(),
1102
None => true,
1103
};
1104
// Synthesize the following:
1105
//
1106
// ...
1107
// brif should_run_destructor, run_destructor_block, return_block
1108
//
1109
// run_destructor_block:
1110
// ;; test may_leave, but only if the component instances
1111
// ;; differ
1112
// flags = load.i32 vmctx+$instance_flags_offset
1113
// masked = band flags, $FLAG_MAY_LEAVE
1114
// trapz masked, $TRAP_CANNOT_LEAVE_COMPONENT
1115
//
1116
// ;; set may_block to false, saving the old value to restore
1117
// ;; later, but only if the component instances differ and
1118
// ;; concurrency is enabled
1119
// old_may_block = load.i32 vmctx+$may_block_offset
1120
// store 0, vmctx+$may_block_offset
1121
//
1122
// ;; call enter_sync_call, but only if the component instances
1123
// ;; differ and concurrency is enabled
1124
// ...
1125
//
1126
// ;; ============================================================
1127
// ;; this is conditionally emitted based on whether the resource
1128
// ;; has a destructor or not, and can be statically omitted
1129
// ;; because that information is known at compile time here.
1130
// rep = ushr.i64 rep, 1
1131
// rep = ireduce.i32 rep
1132
// dtor = load.ptr vmctx+$offset
1133
// func_addr = load.ptr dtor+$offset
1134
// callee_vmctx = load.ptr dtor+$offset
1135
// call_indirect func_addr, callee_vmctx, vmctx, rep
1136
// ;; ============================================================
1137
//
1138
// ;; restore old value of may_block
1139
// store old_may_block, vmctx+$may_block_offset
1140
//
1141
// ;; if needed, call exit_sync_call
1142
// ...
1143
//
1144
// ;; if needed, restore the old value of may_block
1145
// store old_may_block, vmctx+$may_block_offset
1146
//
1147
// jump return_block
1148
//
1149
// return_block:
1150
// return
1151
//
1152
// This will decode `should_run_destructor` and run the destructor
1153
// funcref if one is specified for this resource. Note that not all
1154
// resources have destructors, hence the null check.
1155
self.builder.ensure_inserted_block();
1156
let current_block = self.builder.current_block().unwrap();
1157
let run_destructor_block = self.builder.create_block();
1158
self.builder
1159
.insert_block_after(run_destructor_block, current_block);
1160
let return_block = self.builder.create_block();
1161
self.builder
1162
.insert_block_after(return_block, run_destructor_block);
1163
1164
self.builder.ins().brif(
1165
should_run_destructor,
1166
run_destructor_block,
1167
&[],
1168
return_block,
1169
&[],
1170
);
1171
1172
let trusted = ir::MemFlags::trusted().with_readonly();
1173
1174
self.builder.switch_to_block(run_destructor_block);
1175
1176
// If this is a component-defined resource, the `may_leave` flag must be
1177
// checked. Additionally, if concurrency is enabled, the `may_block`
1178
// field must be updated and `enter_sync_call` called. Note though that
1179
// all of that may be elided if the resource table resides in the same
1180
// component instance that defined the resource as the component is
1181
// calling itself.
1182
let old_may_block = if let Some(def) = resource_def {
1183
if self.types[resource].unwrap_concrete_instance() != def.instance {
1184
self.check_may_leave_instance(self.types[resource].unwrap_concrete_instance());
1185
1186
if self.compiler.tunables.concurrency_support {
1187
// Stash the old value of `may_block` and then set it to false.
1188
let old_may_block = self.builder.ins().load(
1189
ir::types::I32,
1190
trusted,
1191
vmctx,
1192
i32::try_from(self.offsets.task_may_block()).unwrap(),
1193
);
1194
let zero = self.builder.ins().iconst(ir::types::I32, i64::from(0));
1195
self.builder.ins().store(
1196
ir::MemFlags::trusted(),
1197
zero,
1198
vmctx,
1199
i32::try_from(self.offsets.task_may_block()).unwrap(),
1200
);
1201
1202
// Call `enter_sync_call`
1203
//
1204
// FIXME: Apply the optimizations described in #12311.
1205
let host_args = vec![
1206
vmctx,
1207
self.builder
1208
.ins()
1209
.iconst(ir::types::I32, i64::from(instance.as_u32())),
1210
self.builder.ins().iconst(ir::types::I32, i64::from(0)),
1211
self.builder
1212
.ins()
1213
.iconst(ir::types::I32, i64::from(def.instance.as_u32())),
1214
];
1215
let call = self.call_libcall(vmctx, host::enter_sync_call, &host_args);
1216
let result = self.builder.func.dfg.inst_results(call).get(0).copied();
1217
self.raise_if_host_trapped(result.unwrap());
1218
1219
Some(old_may_block)
1220
} else {
1221
None
1222
}
1223
} else {
1224
None
1225
}
1226
} else {
1227
None
1228
};
1229
1230
// Conditionally emit destructor-execution code based on whether we
1231
// statically know that a destructor exists or not.
1232
if has_destructor {
1233
let rep = self.builder.ins().ushr_imm(should_run_destructor, 1);
1234
let rep = self.builder.ins().ireduce(ir::types::I32, rep);
1235
let index = self.types[resource].unwrap_concrete_ty();
1236
// NB: despite the vmcontext storing nullable funcrefs for function
1237
// pointers we know this is statically never null due to the
1238
// `has_destructor` check above.
1239
let dtor_func_ref = self.builder.ins().load(
1240
pointer_type,
1241
trusted,
1242
vmctx,
1243
i32::try_from(self.offsets.resource_destructor(index)).unwrap(),
1244
);
1245
if self.compiler.emit_debug_checks {
1246
self.builder
1247
.ins()
1248
.trapz(dtor_func_ref, TRAP_INTERNAL_ASSERT);
1249
}
1250
let func_addr = self.builder.ins().load(
1251
pointer_type,
1252
trusted,
1253
dtor_func_ref,
1254
i32::from(self.offsets.ptr.vm_func_ref_wasm_call()),
1255
);
1256
let callee_vmctx = self.builder.ins().load(
1257
pointer_type,
1258
trusted,
1259
dtor_func_ref,
1260
i32::from(self.offsets.ptr.vm_func_ref_vmctx()),
1261
);
1262
1263
let sig = crate::wasm_call_signature(self.isa, self.signature, &self.compiler.tunables);
1264
let sig_ref = self.builder.import_signature(sig);
1265
1266
// NB: note that the "caller" vmctx here is the caller of this
1267
// intrinsic itself, not the `VMComponentContext`. This effectively
1268
// takes ourselves out of the chain here but that's ok since the
1269
// caller is only used for store/limits and that same info is
1270
// stored, but elsewhere, in the component context.
1271
self.builder.ins().call_indirect(
1272
sig_ref,
1273
func_addr,
1274
&[callee_vmctx, caller_vmctx, rep],
1275
);
1276
}
1277
1278
if let Some(old_may_block) = old_may_block {
1279
// Call `exit_sync_call`
1280
//
1281
// FIXME: Apply the optimizations described in #12311.
1282
let call = self.call_libcall(vmctx, host::exit_sync_call, &[vmctx]);
1283
let result = self.builder.func.dfg.inst_results(call).get(0).copied();
1284
self.raise_if_host_trapped(result.unwrap());
1285
1286
// Restore the old value of `may_block`
1287
self.builder.ins().store(
1288
ir::MemFlags::trusted(),
1289
old_may_block,
1290
vmctx,
1291
i32::try_from(self.offsets.task_may_block()).unwrap(),
1292
);
1293
}
1294
1295
self.builder.ins().jump(return_block, &[]);
1296
self.builder.seal_block(run_destructor_block);
1297
1298
self.builder.switch_to_block(return_block);
1299
self.builder.seal_block(return_block);
1300
self.abi_store_results(&[]);
1301
}
1302
1303
fn load_optional_memory(
1304
&mut self,
1305
vmctx: ir::Value,
1306
memory: Option<RuntimeMemoryIndex>,
1307
) -> ir::Value {
1308
match memory {
1309
Some(idx) => self.load_memory(vmctx, idx),
1310
None => self.builder.ins().iconst(self.isa.pointer_type(), 0),
1311
}
1312
}
1313
1314
fn load_memory(&mut self, vmctx: ir::Value, memory: RuntimeMemoryIndex) -> ir::Value {
1315
self.builder.ins().load(
1316
self.isa.pointer_type(),
1317
MemFlags::trusted(),
1318
vmctx,
1319
i32::try_from(self.offsets.runtime_memory(memory)).unwrap(),
1320
)
1321
}
1322
1323
fn load_callback(
1324
&mut self,
1325
vmctx: ir::Value,
1326
callback: Option<RuntimeCallbackIndex>,
1327
) -> ir::Value {
1328
let pointer_type = self.isa.pointer_type();
1329
match callback {
1330
Some(idx) => self.builder.ins().load(
1331
pointer_type,
1332
MemFlags::trusted(),
1333
vmctx,
1334
i32::try_from(self.offsets.runtime_callback(idx)).unwrap(),
1335
),
1336
None => self.builder.ins().iconst(pointer_type, 0),
1337
}
1338
}
1339
1340
fn load_post_return(
1341
&mut self,
1342
vmctx: ir::Value,
1343
post_return: Option<RuntimePostReturnIndex>,
1344
) -> ir::Value {
1345
let pointer_type = self.isa.pointer_type();
1346
match post_return {
1347
Some(idx) => self.builder.ins().load(
1348
pointer_type,
1349
MemFlags::trusted(),
1350
vmctx,
1351
i32::try_from(self.offsets.runtime_post_return(idx)).unwrap(),
1352
),
1353
None => self.builder.ins().iconst(pointer_type, 0),
1354
}
1355
}
1356
1357
/// Loads a host function pointer for a libcall stored at the `offset`
1358
/// provided in the libcalls array.
1359
///
1360
/// The offset is calculated in the `host` module below.
1361
fn load_libcall(
1362
&mut self,
1363
vmctx: ir::Value,
1364
index: ComponentBuiltinFunctionIndex,
1365
) -> ir::Value {
1366
let pointer_type = self.isa.pointer_type();
1367
// First load the pointer to the builtins structure which is static
1368
// per-process.
1369
let builtins_array = self.builder.ins().load(
1370
pointer_type,
1371
MemFlags::trusted().with_readonly(),
1372
vmctx,
1373
i32::try_from(self.offsets.builtins()).unwrap(),
1374
);
1375
// Next load the function pointer at `offset` and return that.
1376
self.builder.ins().load(
1377
pointer_type,
1378
MemFlags::trusted().with_readonly(),
1379
builtins_array,
1380
i32::try_from(index.index() * u32::from(self.offsets.ptr.size())).unwrap(),
1381
)
1382
}
1383
1384
/// Get a function's parameters regardless of the ABI in use.
1385
///
1386
/// This emits code to load the parameters from the array-call's ABI's values
1387
/// vector, if necessary.
1388
fn abi_load_params(&mut self) -> Vec<ir::Value> {
1389
self.builder.func.dfg.block_params(self.block0).to_vec()
1390
}
1391
1392
/// Emit code to return the given result values, regardless of the ABI in use.
1393
fn abi_store_results(&mut self, results: &[ir::Value]) {
1394
self.builder.ins().return_(results);
1395
}
1396
1397
fn raise_if_host_trapped(&mut self, succeeded: ir::Value) {
1398
let caller_vmctx = self.builder.func.dfg.block_params(self.block0)[1];
1399
self.compiler
1400
.raise_if_host_trapped(&mut self.builder, caller_vmctx, succeeded);
1401
}
1402
1403
fn raise_if_transcode_trapped(&mut self, amount_copied: ir::Value) {
1404
let pointer_type = self.isa.pointer_type();
1405
let minus_one = self.builder.ins().iconst(pointer_type, -1);
1406
let succeeded = self
1407
.builder
1408
.ins()
1409
.icmp(IntCC::NotEqual, amount_copied, minus_one);
1410
self.raise_if_host_trapped(succeeded);
1411
}
1412
1413
fn raise_if_negative_one_and_truncate(&mut self, ret: ir::Value) -> ir::Value {
1414
let ret = self.raise_if_negative_one(ret);
1415
self.builder.ins().ireduce(ir::types::I32, ret)
1416
}
1417
1418
fn raise_if_negative_one(&mut self, ret: ir::Value) -> ir::Value {
1419
let result_ty = self.builder.func.dfg.value_type(ret);
1420
let minus_one = self.builder.ins().iconst(result_ty, -1);
1421
let succeeded = self.builder.ins().icmp(IntCC::NotEqual, ret, minus_one);
1422
self.raise_if_host_trapped(succeeded);
1423
ret
1424
}
1425
1426
fn call_libcall(
1427
&mut self,
1428
vmctx: ir::Value,
1429
get_libcall: GetLibcallFn,
1430
args: &[ir::Value],
1431
) -> ir::Inst {
1432
let (host_sig, index) = get_libcall(self.isa, &mut self.builder.func);
1433
let host_fn = self.load_libcall(vmctx, index);
1434
self.compiler
1435
.call_indirect_host(&mut self.builder, index, host_sig, host_fn, args)
1436
}
1437
1438
fn check_may_leave(&mut self, trampoline: &Trampoline) {
1439
let instance = match trampoline {
1440
// These intrinsics explicitly do not check the may-leave flag.
1441
Trampoline::ResourceRep { .. }
1442
| Trampoline::ThreadIndex
1443
| Trampoline::BackpressureInc { .. }
1444
| Trampoline::BackpressureDec { .. }
1445
| Trampoline::ContextGet { .. }
1446
| Trampoline::ContextSet { .. } => return,
1447
1448
// Intrinsics used in adapters generated by FACT that aren't called
1449
// directly from guest wasm, so no check is needed.
1450
Trampoline::ResourceTransferOwn
1451
| Trampoline::ResourceTransferBorrow
1452
| Trampoline::PrepareCall { .. }
1453
| Trampoline::SyncStartCall { .. }
1454
| Trampoline::AsyncStartCall { .. }
1455
| Trampoline::FutureTransfer
1456
| Trampoline::StreamTransfer
1457
| Trampoline::ErrorContextTransfer
1458
| Trampoline::Trap
1459
| Trampoline::EnterSyncCall
1460
| Trampoline::ExitSyncCall
1461
| Trampoline::Transcoder { .. } => return,
1462
1463
Trampoline::LowerImport { options, .. } => self.component.options[*options].instance,
1464
1465
Trampoline::ResourceNew { instance, .. }
1466
| Trampoline::ResourceDrop { instance, .. }
1467
| Trampoline::TaskReturn { instance, .. }
1468
| Trampoline::TaskCancel { instance }
1469
| Trampoline::WaitableSetNew { instance }
1470
| Trampoline::WaitableSetWait { instance, .. }
1471
| Trampoline::WaitableSetPoll { instance, .. }
1472
| Trampoline::WaitableSetDrop { instance }
1473
| Trampoline::WaitableJoin { instance }
1474
| Trampoline::ThreadYield { instance, .. }
1475
| Trampoline::ThreadSwitchTo { instance, .. }
1476
| Trampoline::ThreadNewIndirect { instance, .. }
1477
| Trampoline::ThreadSuspend { instance, .. }
1478
| Trampoline::ThreadResumeLater { instance }
1479
| Trampoline::ThreadYieldTo { instance, .. }
1480
| Trampoline::SubtaskDrop { instance }
1481
| Trampoline::SubtaskCancel { instance, .. }
1482
| Trampoline::ErrorContextNew { instance, .. }
1483
| Trampoline::ErrorContextDebugMessage { instance, .. }
1484
| Trampoline::ErrorContextDrop { instance, .. }
1485
| Trampoline::StreamNew { instance, .. }
1486
| Trampoline::StreamRead { instance, .. }
1487
| Trampoline::StreamWrite { instance, .. }
1488
| Trampoline::StreamCancelRead { instance, .. }
1489
| Trampoline::StreamCancelWrite { instance, .. }
1490
| Trampoline::StreamDropReadable { instance, .. }
1491
| Trampoline::StreamDropWritable { instance, .. }
1492
| Trampoline::FutureNew { instance, .. }
1493
| Trampoline::FutureRead { instance, .. }
1494
| Trampoline::FutureWrite { instance, .. }
1495
| Trampoline::FutureCancelRead { instance, .. }
1496
| Trampoline::FutureCancelWrite { instance, .. }
1497
| Trampoline::FutureDropReadable { instance, .. }
1498
| Trampoline::FutureDropWritable { instance, .. } => *instance,
1499
};
1500
1501
self.check_may_leave_instance(instance)
1502
}
1503
1504
fn check_may_leave_instance(&mut self, instance: RuntimeComponentInstanceIndex) {
1505
let vmctx = self.builder.func.dfg.block_params(self.block0)[0];
1506
1507
let flags = self.builder.ins().load(
1508
ir::types::I32,
1509
ir::MemFlags::trusted(),
1510
vmctx,
1511
i32::try_from(self.offsets.instance_flags(instance)).unwrap(),
1512
);
1513
let may_leave_bit = self
1514
.builder
1515
.ins()
1516
.band_imm(flags, i64::from(FLAG_MAY_LEAVE));
1517
self.builder
1518
.ins()
1519
.trapz(may_leave_bit, TRAP_CANNOT_LEAVE_COMPONENT);
1520
}
1521
}
1522
1523
impl ComponentCompiler for Compiler {
1524
fn compile_trampoline(
1525
&self,
1526
component: &ComponentTranslation,
1527
types: &ComponentTypesBuilder,
1528
key: FuncKey,
1529
abi: Abi,
1530
_tunables: &Tunables,
1531
symbol: &str,
1532
) -> Result<CompiledFunctionBody> {
1533
let (abi2, trampoline_index) = key.unwrap_component_trampoline();
1534
debug_assert_eq!(abi, abi2);
1535
let sig = types[component.component.trampolines[trampoline_index]].unwrap_func();
1536
1537
match abi {
1538
// Fall through to the trampoline compiler.
1539
Abi::Wasm => {}
1540
1541
// Implement the array-abi trampoline in terms of calling the
1542
// wasm-abi trampoline.
1543
Abi::Array => {
1544
let offsets =
1545
VMComponentOffsets::new(self.isa.pointer_bytes(), &component.component);
1546
return Ok(self.array_to_wasm_trampoline(
1547
key,
1548
FuncKey::ComponentTrampoline(Abi::Wasm, trampoline_index),
1549
sig,
1550
symbol,
1551
offsets.vm_store_context(),
1552
wasmtime_environ::component::VMCOMPONENT_MAGIC,
1553
)?);
1554
}
1555
1556
Abi::Patchable => unreachable!(
1557
"We should not be compiling a patchable-ABI trampoline for a component function"
1558
),
1559
}
1560
1561
let mut compiler = self.function_compiler();
1562
let mut c = TrampolineCompiler::new(self, &mut compiler, &component.component, types, sig);
1563
1564
// If we are crossing the Wasm-to-native boundary, we need to save the
1565
// exit FP and return address for stack walking purposes. However, we
1566
// always debug assert that our vmctx is a component context, regardless
1567
// whether we are actually crossing that boundary because it should
1568
// always hold.
1569
let vmctx = c.builder.block_params(c.block0)[0];
1570
let pointer_type = self.isa.pointer_type();
1571
self.debug_assert_vmctx_kind(
1572
&mut c.builder,
1573
vmctx,
1574
wasmtime_environ::component::VMCOMPONENT_MAGIC,
1575
);
1576
let vm_store_context = c.builder.ins().load(
1577
pointer_type,
1578
MemFlags::trusted(),
1579
vmctx,
1580
i32::try_from(c.offsets.vm_store_context()).unwrap(),
1581
);
1582
super::save_last_wasm_exit_fp_and_pc(
1583
&mut c.builder,
1584
pointer_type,
1585
&c.offsets.ptr,
1586
vm_store_context,
1587
);
1588
1589
c.translate(&component.trampolines[trampoline_index]);
1590
c.builder.finalize();
1591
compiler.cx.abi = Some(abi);
1592
1593
Ok(CompiledFunctionBody {
1594
code: super::box_dyn_any_compiler_context(Some(compiler.cx)),
1595
needs_gc_heap: false,
1596
})
1597
}
1598
1599
fn compile_intrinsic(
1600
&self,
1601
_tunables: &Tunables,
1602
component: &ComponentTranslation,
1603
types: &ComponentTypesBuilder,
1604
intrinsic: UnsafeIntrinsic,
1605
abi: Abi,
1606
symbol: &str,
1607
) -> Result<CompiledFunctionBody> {
1608
let wasm_func_ty = WasmFuncType::new(
1609
intrinsic.core_params().into(),
1610
intrinsic.core_results().into(),
1611
);
1612
1613
match abi {
1614
// Fall through to the trampoline compiler.
1615
Abi::Wasm => {}
1616
1617
// Implement the array-abi trampoline in terms of calling the
1618
// wasm-abi trampoline.
1619
Abi::Array => {
1620
let offsets =
1621
VMComponentOffsets::new(self.isa.pointer_bytes(), &component.component);
1622
return Ok(self.array_to_wasm_trampoline(
1623
FuncKey::UnsafeIntrinsic(abi, intrinsic),
1624
FuncKey::UnsafeIntrinsic(Abi::Wasm, intrinsic),
1625
&wasm_func_ty,
1626
symbol,
1627
offsets.vm_store_context(),
1628
wasmtime_environ::component::VMCOMPONENT_MAGIC,
1629
)?);
1630
}
1631
1632
Abi::Patchable => {
1633
unreachable!(
1634
"We should not be compiling a patchable trampoline for a component intrinsic"
1635
)
1636
}
1637
}
1638
1639
let mut compiler = self.function_compiler();
1640
let mut c = TrampolineCompiler::new(
1641
self,
1642
&mut compiler,
1643
&component.component,
1644
&types,
1645
&wasm_func_ty,
1646
);
1647
1648
match intrinsic {
1649
UnsafeIntrinsic::U8NativeLoad
1650
| UnsafeIntrinsic::U16NativeLoad
1651
| UnsafeIntrinsic::U32NativeLoad
1652
| UnsafeIntrinsic::U64NativeLoad => c.translate_load_intrinsic(intrinsic)?,
1653
UnsafeIntrinsic::U8NativeStore
1654
| UnsafeIntrinsic::U16NativeStore
1655
| UnsafeIntrinsic::U32NativeStore
1656
| UnsafeIntrinsic::U64NativeStore => c.translate_store_intrinsic(intrinsic)?,
1657
UnsafeIntrinsic::StoreDataAddress => {
1658
let [callee_vmctx, _caller_vmctx] = *c.abi_load_params() else {
1659
unreachable!()
1660
};
1661
let pointer_type = self.isa.pointer_type();
1662
1663
// Load the `*mut VMStoreContext` out of our vmctx.
1664
let store_ctx = c.builder.ins().load(
1665
pointer_type,
1666
ir::MemFlags::trusted()
1667
.with_readonly()
1668
.with_alias_region(Some(ir::AliasRegion::Vmctx))
1669
.with_can_move(),
1670
callee_vmctx,
1671
i32::try_from(c.offsets.vm_store_context()).unwrap(),
1672
);
1673
1674
// Load the `*mut T` out of the `VMStoreContext`.
1675
let data_address = c.builder.ins().load(
1676
pointer_type,
1677
ir::MemFlags::trusted()
1678
.with_readonly()
1679
.with_alias_region(Some(ir::AliasRegion::Vmctx))
1680
.with_can_move(),
1681
store_ctx,
1682
i32::from(c.offsets.ptr.vmstore_context_store_data()),
1683
);
1684
1685
// Zero-extend the address if we are on a 32-bit architecture.
1686
let data_address = match pointer_type.bits() {
1687
32 => c.builder.ins().uextend(ir::types::I64, data_address),
1688
64 => data_address,
1689
p => bail!("unsupported architecture: no support for {p}-bit pointers"),
1690
};
1691
1692
c.abi_store_results(&[data_address]);
1693
}
1694
}
1695
1696
c.builder.finalize();
1697
compiler.cx.abi = Some(abi);
1698
1699
Ok(CompiledFunctionBody {
1700
code: super::box_dyn_any_compiler_context(Some(compiler.cx)),
1701
needs_gc_heap: false,
1702
})
1703
}
1704
}
1705
1706
macro_rules! unsafe_intrinsic_clif_params_results {
1707
(
1708
$(
1709
$symbol:expr => $variant:ident : $ctor:ident ( $( $param:ident : $param_ty:ident ),* ) $( -> $result_ty:ident )? ;
1710
)*
1711
) => {
1712
fn unsafe_intrinsic_clif_params(intrinsic: UnsafeIntrinsic) -> &'static [ir::types::Type] {
1713
match intrinsic {
1714
$(
1715
UnsafeIntrinsic::$variant => &[ $( unsafe_intrinsic_clif_params_results!(@clif_type $param_ty) ),* ],
1716
)*
1717
}
1718
}
1719
1720
fn unsafe_intrinsic_clif_results(intrinsic: UnsafeIntrinsic) -> &'static [ir::types::Type] {
1721
match intrinsic {
1722
$(
1723
UnsafeIntrinsic::$variant => &[ $( unsafe_intrinsic_clif_params_results!(@clif_type $result_ty) )? ],
1724
)*
1725
}
1726
}
1727
};
1728
1729
(@clif_type u8) => { ir::types::I8 };
1730
(@clif_type u16) => { ir::types::I16 };
1731
(@clif_type u32) => { ir::types::I32 };
1732
(@clif_type u64) => { ir::types::I64 };
1733
}
1734
1735
wasmtime_environ::for_each_unsafe_intrinsic!(unsafe_intrinsic_clif_params_results);
1736
1737
impl TrampolineCompiler<'_> {
1738
fn translate_transcode(
1739
&mut self,
1740
op: Transcode,
1741
from: RuntimeMemoryIndex,
1742
from64: bool,
1743
to: RuntimeMemoryIndex,
1744
to64: bool,
1745
) {
1746
let pointer_type = self.isa.pointer_type();
1747
let vmctx = self.builder.func.dfg.block_params(self.block0)[0];
1748
1749
// Determine the static signature of the host libcall for this transcode
1750
// operation and additionally calculate the static offset within the
1751
// transode libcalls array.
1752
let get_libcall = match op {
1753
Transcode::Copy(FixedEncoding::Utf8) => host::utf8_to_utf8,
1754
Transcode::Copy(FixedEncoding::Utf16) => host::utf16_to_utf16,
1755
Transcode::Copy(FixedEncoding::Latin1) => host::latin1_to_latin1,
1756
Transcode::Latin1ToUtf16 => host::latin1_to_utf16,
1757
Transcode::Latin1ToUtf8 => host::latin1_to_utf8,
1758
Transcode::Utf16ToCompactProbablyUtf16 => host::utf16_to_compact_probably_utf16,
1759
Transcode::Utf16ToCompactUtf16 => host::utf16_to_compact_utf16,
1760
Transcode::Utf16ToLatin1 => host::utf16_to_latin1,
1761
Transcode::Utf16ToUtf8 => host::utf16_to_utf8,
1762
Transcode::Utf8ToCompactUtf16 => host::utf8_to_compact_utf16,
1763
Transcode::Utf8ToLatin1 => host::utf8_to_latin1,
1764
Transcode::Utf8ToUtf16 => host::utf8_to_utf16,
1765
};
1766
1767
// Load the base pointers for the from/to linear memories.
1768
let from_base = self.load_runtime_memory_base(vmctx, from);
1769
let to_base = self.load_runtime_memory_base(vmctx, to);
1770
1771
let mut args = Vec::new();
1772
args.push(vmctx);
1773
1774
let uses_retptr = match op {
1775
Transcode::Utf16ToUtf8
1776
| Transcode::Latin1ToUtf8
1777
| Transcode::Utf8ToLatin1
1778
| Transcode::Utf16ToLatin1 => true,
1779
_ => false,
1780
};
1781
1782
// Most transcoders share roughly the same signature despite doing very
1783
// different things internally, so most libcalls are lumped together
1784
// here.
1785
match op {
1786
Transcode::Copy(_)
1787
| Transcode::Latin1ToUtf16
1788
| Transcode::Utf16ToCompactProbablyUtf16
1789
| Transcode::Utf8ToLatin1
1790
| Transcode::Utf16ToLatin1
1791
| Transcode::Utf8ToUtf16 => {
1792
args.push(self.ptr_param(0, from64, from_base));
1793
args.push(self.len_param(1, from64));
1794
args.push(self.ptr_param(2, to64, to_base));
1795
}
1796
1797
Transcode::Utf16ToUtf8 | Transcode::Latin1ToUtf8 => {
1798
args.push(self.ptr_param(0, from64, from_base));
1799
args.push(self.len_param(1, from64));
1800
args.push(self.ptr_param(2, to64, to_base));
1801
args.push(self.len_param(3, to64));
1802
}
1803
1804
Transcode::Utf8ToCompactUtf16 | Transcode::Utf16ToCompactUtf16 => {
1805
args.push(self.ptr_param(0, from64, from_base));
1806
args.push(self.len_param(1, from64));
1807
args.push(self.ptr_param(2, to64, to_base));
1808
args.push(self.len_param(3, to64));
1809
args.push(self.len_param(4, to64));
1810
}
1811
};
1812
if uses_retptr {
1813
let slot = self
1814
.builder
1815
.func
1816
.create_sized_stack_slot(ir::StackSlotData::new(
1817
ir::StackSlotKind::ExplicitSlot,
1818
pointer_type.bytes(),
1819
0,
1820
));
1821
args.push(self.builder.ins().stack_addr(pointer_type, slot, 0));
1822
}
1823
let call = self.call_libcall(vmctx, get_libcall, &args);
1824
let mut results = self.builder.func.dfg.inst_results(call).to_vec();
1825
if uses_retptr {
1826
results.push(self.builder.ins().load(
1827
pointer_type,
1828
ir::MemFlags::trusted(),
1829
*args.last().unwrap(),
1830
0,
1831
));
1832
}
1833
let mut raw_results = Vec::new();
1834
1835
// Like the arguments the results are fairly similar across libcalls, so
1836
// they're lumped into various buckets here.
1837
match op {
1838
Transcode::Copy(_) | Transcode::Latin1ToUtf16 => {
1839
self.raise_if_host_trapped(results[0]);
1840
}
1841
1842
Transcode::Utf8ToUtf16
1843
| Transcode::Utf16ToCompactProbablyUtf16
1844
| Transcode::Utf8ToCompactUtf16
1845
| Transcode::Utf16ToCompactUtf16 => {
1846
self.raise_if_transcode_trapped(results[0]);
1847
raw_results.push(self.cast_from_pointer(results[0], to64));
1848
}
1849
1850
Transcode::Latin1ToUtf8
1851
| Transcode::Utf16ToUtf8
1852
| Transcode::Utf8ToLatin1
1853
| Transcode::Utf16ToLatin1 => {
1854
self.raise_if_transcode_trapped(results[0]);
1855
raw_results.push(self.cast_from_pointer(results[0], from64));
1856
raw_results.push(self.cast_from_pointer(results[1], to64));
1857
}
1858
};
1859
1860
self.builder.ins().return_(&raw_results);
1861
}
1862
1863
// Helper function to cast an input parameter to the host pointer type.
1864
fn len_param(&mut self, param: usize, is64: bool) -> ir::Value {
1865
let val = self.builder.func.dfg.block_params(self.block0)[2 + param];
1866
self.cast_to_pointer(val, is64)
1867
}
1868
1869
// Helper function to interpret an input parameter as a pointer into
1870
// linear memory. This will cast the input parameter to the host integer
1871
// type and then add that value to the base.
1872
//
1873
// Note that bounds-checking happens in adapter modules, and this
1874
// trampoline is simply calling the host libcall.
1875
fn ptr_param(&mut self, param: usize, is64: bool, base: ir::Value) -> ir::Value {
1876
let val = self.len_param(param, is64);
1877
self.builder.ins().iadd(base, val)
1878
}
1879
1880
// Helper function to cast a core wasm input to a host pointer type
1881
// which will go into the host libcall.
1882
fn cast_to_pointer(&mut self, val: ir::Value, is64: bool) -> ir::Value {
1883
let pointer_type = self.isa.pointer_type();
1884
let host64 = pointer_type == ir::types::I64;
1885
if is64 == host64 {
1886
val
1887
} else if !is64 {
1888
assert!(host64);
1889
self.builder.ins().uextend(pointer_type, val)
1890
} else {
1891
assert!(!host64);
1892
self.builder.ins().ireduce(pointer_type, val)
1893
}
1894
}
1895
1896
// Helper to cast a host pointer integer type to the destination type.
1897
fn cast_from_pointer(&mut self, val: ir::Value, is64: bool) -> ir::Value {
1898
let host64 = self.isa.pointer_type() == ir::types::I64;
1899
if is64 == host64 {
1900
val
1901
} else if !is64 {
1902
assert!(host64);
1903
self.builder.ins().ireduce(ir::types::I32, val)
1904
} else {
1905
assert!(!host64);
1906
self.builder.ins().uextend(ir::types::I64, val)
1907
}
1908
}
1909
1910
fn load_runtime_memory_base(&mut self, vmctx: ir::Value, mem: RuntimeMemoryIndex) -> ir::Value {
1911
let pointer_type = self.isa.pointer_type();
1912
let from_vmmemory_definition = self.load_memory(vmctx, mem);
1913
self.builder.ins().load(
1914
pointer_type,
1915
MemFlags::trusted(),
1916
from_vmmemory_definition,
1917
i32::from(self.offsets.ptr.vmmemory_definition_base()),
1918
)
1919
}
1920
1921
fn translate_load_intrinsic(&mut self, intrinsic: UnsafeIntrinsic) -> Result<()> {
1922
// Emit code for a native-load intrinsic.
1923
debug_assert_eq!(intrinsic.core_params(), &[WasmValType::I64]);
1924
debug_assert_eq!(intrinsic.core_results().len(), 1);
1925
1926
let wasm_ty = intrinsic.core_results()[0];
1927
let clif_ty = unsafe_intrinsic_clif_results(intrinsic)[0];
1928
1929
let [_callee_vmctx, _caller_vmctx, pointer] = *self.abi_load_params() else {
1930
unreachable!()
1931
};
1932
1933
// Truncate the pointer, if necessary.
1934
debug_assert_eq!(self.builder.func.dfg.value_type(pointer), ir::types::I64);
1935
let pointer = match self.isa.pointer_bits() {
1936
32 => self.builder.ins().ireduce(ir::types::I32, pointer),
1937
64 => pointer,
1938
p => bail!("unsupported architecture: no support for {p}-bit pointers"),
1939
};
1940
1941
// Do the load!
1942
let mut value = self
1943
.builder
1944
.ins()
1945
.load(clif_ty, ir::MemFlags::trusted(), pointer, 0);
1946
1947
// Extend the value, if necessary. When implementing the
1948
// `u8-native-load` intrinsic, for example, we will load a Cranelift
1949
// value of type `i8` but we need to extend it to an `i32` because
1950
// Wasm doesn't have an `i8` core value type.
1951
let wasm_clif_ty = crate::value_type(self.isa, wasm_ty);
1952
if clif_ty != wasm_clif_ty {
1953
assert!(clif_ty.bytes() < wasm_clif_ty.bytes());
1954
// NB: all of our unsafe intrinsics for native loads are
1955
// unsigned, so we always zero-extend.
1956
value = self.builder.ins().uextend(wasm_clif_ty, value);
1957
}
1958
1959
self.abi_store_results(&[value]);
1960
Ok(())
1961
}
1962
1963
fn translate_store_intrinsic(&mut self, intrinsic: UnsafeIntrinsic) -> Result<()> {
1964
debug_assert!(intrinsic.core_results().is_empty());
1965
debug_assert!(matches!(intrinsic.core_params(), [WasmValType::I64, _]));
1966
1967
let wasm_ty = intrinsic.core_params()[1];
1968
let clif_ty = unsafe_intrinsic_clif_params(intrinsic)[1];
1969
1970
let [_callee_vmctx, _caller_vmctx, pointer, mut value] = *self.abi_load_params() else {
1971
unreachable!()
1972
};
1973
1974
// Truncate the pointer, if necessary.
1975
debug_assert_eq!(self.builder.func.dfg.value_type(pointer), ir::types::I64);
1976
let pointer = match self.isa.pointer_bits() {
1977
32 => self.builder.ins().ireduce(ir::types::I32, pointer),
1978
64 => pointer,
1979
p => bail!("unsupported architecture: no support for {p}-bit pointers"),
1980
};
1981
1982
// Truncate the value, if necessary. For example, with
1983
// `u8-native-store` we will be given an `i32` from Wasm (because
1984
// core Wasm does not have an 8-bit integer value type) and we need
1985
// to reduce that into an `i8`.
1986
let wasm_ty = crate::value_type(self.isa, wasm_ty);
1987
if clif_ty != wasm_ty {
1988
assert!(clif_ty.bytes() < wasm_ty.bytes());
1989
value = self.builder.ins().ireduce(clif_ty, value);
1990
}
1991
1992
// Do the store!
1993
self.builder
1994
.ins()
1995
.store(ir::MemFlags::trusted(), value, pointer, 0);
1996
1997
self.abi_store_results(&[]);
1998
Ok(())
1999
}
2000
}
2001
2002
/// Module with macro-generated contents that will return the signature and
2003
/// offset for each of the host transcoder functions.
2004
///
2005
/// Note that a macro is used here to keep this in sync with the actual
2006
/// transcoder functions themselves which are also defined via a macro.
2007
mod host {
2008
use cranelift_codegen::ir::{self, AbiParam};
2009
use cranelift_codegen::isa::{CallConv, TargetIsa};
2010
use wasmtime_environ::component::ComponentBuiltinFunctionIndex;
2011
2012
macro_rules! define {
2013
(
2014
$(
2015
$( #[$attr:meta] )*
2016
$name:ident( $( $pname:ident: $param:ident ),* ) $( -> $result:ident )?;
2017
)*
2018
) => {
2019
$(
2020
pub(super) fn $name(isa: &dyn TargetIsa, func: &mut ir::Function) -> (ir::SigRef, ComponentBuiltinFunctionIndex) {
2021
let pointer_type = isa.pointer_type();
2022
let sig = build_sig(
2023
isa,
2024
func,
2025
&[$( define!(@ty pointer_type $param) ),*],
2026
&[$( define!(@ty pointer_type $result) ),*],
2027
);
2028
2029
return (sig, ComponentBuiltinFunctionIndex::$name())
2030
}
2031
)*
2032
};
2033
2034
(@ty $ptr:ident size) => ($ptr);
2035
(@ty $ptr:ident ptr_u8) => ($ptr);
2036
(@ty $ptr:ident ptr_u16) => ($ptr);
2037
(@ty $ptr:ident ptr_size) => ($ptr);
2038
(@ty $ptr:ident bool) => (ir::types::I8);
2039
(@ty $ptr:ident u8) => (ir::types::I8);
2040
(@ty $ptr:ident u32) => (ir::types::I32);
2041
(@ty $ptr:ident u64) => (ir::types::I64);
2042
(@ty $ptr:ident vmctx) => ($ptr);
2043
}
2044
2045
wasmtime_environ::foreach_builtin_component_function!(define);
2046
2047
fn build_sig(
2048
isa: &dyn TargetIsa,
2049
func: &mut ir::Function,
2050
params: &[ir::Type],
2051
returns: &[ir::Type],
2052
) -> ir::SigRef {
2053
let mut sig = ir::Signature {
2054
params: params.iter().map(|ty| AbiParam::new(*ty)).collect(),
2055
returns: returns.iter().map(|ty| AbiParam::new(*ty)).collect(),
2056
call_conv: CallConv::triple_default(isa.triple()),
2057
};
2058
2059
// Once we're declaring the signature of a host function we must respect
2060
// the default ABI of the platform which is where argument extension of
2061
// params/results may come into play.
2062
let extension = isa.default_argument_extension();
2063
for arg in sig.params.iter_mut().chain(sig.returns.iter_mut()) {
2064
if arg.value_type.is_int() {
2065
arg.extension = extension;
2066
}
2067
}
2068
func.import_signature(sig)
2069
}
2070
}
2071
2072