Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bytecodealliance
GitHub Repository: bytecodealliance/wasmtime
Path: blob/main/crates/cranelift/src/compiler/component.rs
1692 views
1
//! Compilation support for the component model.
2
3
use crate::{
4
TRAP_ALWAYS, TRAP_CANNOT_ENTER, TRAP_INTERNAL_ASSERT,
5
compiler::{Abi, Compiler},
6
};
7
use anyhow::Result;
8
use cranelift_codegen::ir::condcodes::IntCC;
9
use cranelift_codegen::ir::{self, InstBuilder, MemFlags, Value};
10
use cranelift_codegen::isa::{CallConv, TargetIsa};
11
use cranelift_frontend::FunctionBuilder;
12
use wasmtime_environ::{CompiledFunctionBody, component::*};
13
use wasmtime_environ::{
14
EntityRef, HostCall, ModuleInternedTypeIndex, PtrSize, TrapSentinel, Tunables, WasmFuncType,
15
WasmValType,
16
};
17
use wasmtime_environ::{FuncKey, fact::PREPARE_CALL_FIXED_PARAMS};
18
19
struct TrampolineCompiler<'a> {
20
compiler: &'a Compiler,
21
isa: &'a (dyn TargetIsa + 'static),
22
builder: FunctionBuilder<'a>,
23
component: &'a Component,
24
types: &'a ComponentTypesBuilder,
25
offsets: VMComponentOffsets<u8>,
26
abi: Abi,
27
block0: ir::Block,
28
signature: ModuleInternedTypeIndex,
29
tunables: &'a Tunables,
30
}
31
32
/// What host functions can be called, used in `translate_hostcall` below.
33
enum HostCallee {
34
/// Call a host-lowered function specified by this index.
35
Lowering(LoweredIndex),
36
/// Call a host libcall, specified by this accessor.
37
Libcall(GetLibcallFn),
38
}
39
40
type GetLibcallFn =
41
fn(&dyn TargetIsa, &mut ir::Function) -> (ir::SigRef, ComponentBuiltinFunctionIndex);
42
43
impl From<LoweredIndex> for HostCallee {
44
fn from(index: LoweredIndex) -> HostCallee {
45
HostCallee::Lowering(index)
46
}
47
}
48
49
impl From<GetLibcallFn> for HostCallee {
50
fn from(f: GetLibcallFn) -> HostCallee {
51
HostCallee::Libcall(f)
52
}
53
}
54
55
/// How to interpret the results of a host function.
56
enum HostResult {
57
/// The host function has no results.
58
None,
59
60
/// The host function returns the sentinel specified which is interpreted
61
/// and translated to the real return value.
62
Sentinel(TrapSentinel),
63
64
/// The host function returns a `bool` indicating whether it succeeded or
65
/// not.
66
///
67
/// After the return value is interpreted the host function also filled in
68
/// `ptr` and `len` with wasm return values which need to be returned.
69
///
70
/// If `ptr` and `len` are not specified then this must be used with
71
/// `WasmArgs::ValRawList` and that ptr/len is used.
72
MultiValue {
73
/// The base pointer of the `ValRaw` list on the stack.
74
ptr: Option<ir::Value>,
75
/// The length of the `ValRaw` list on the stack.
76
len: Option<ir::Value>,
77
},
78
}
79
80
impl From<TrapSentinel> for HostResult {
81
fn from(sentinel: TrapSentinel) -> HostResult {
82
HostResult::Sentinel(sentinel)
83
}
84
}
85
86
/// Different means of passing WebAssembly arguments to host calls.
87
#[derive(Debug, Copy, Clone)]
88
enum WasmArgs {
89
/// All wasm arguments to the host are passed directly as values, typically
90
/// through registers.
91
InRegisters,
92
93
/// All wasm arguments to the host are passed indirectly by spilling them
94
/// to the stack as a sequence of contiguous `ValRaw`s.
95
ValRawList,
96
97
/// The first `n` arguments are passed in registers, but everything after
98
/// that is spilled to the stack.
99
InRegistersUpTo(usize),
100
}
101
102
impl<'a> TrampolineCompiler<'a> {
103
fn new(
104
compiler: &'a Compiler,
105
func_compiler: &'a mut super::FunctionCompiler<'_>,
106
component: &'a Component,
107
types: &'a ComponentTypesBuilder,
108
index: TrampolineIndex,
109
abi: Abi,
110
tunables: &'a Tunables,
111
) -> TrampolineCompiler<'a> {
112
let isa = &*compiler.isa;
113
let signature = component.trampolines[index];
114
let ty = types[signature].unwrap_func();
115
let func = ir::Function::with_name_signature(
116
ir::UserFuncName::user(0, 0),
117
match abi {
118
Abi::Wasm => crate::wasm_call_signature(isa, ty, &compiler.tunables),
119
Abi::Array => crate::array_call_signature(isa),
120
},
121
);
122
let (builder, block0) = func_compiler.builder(func);
123
TrampolineCompiler {
124
compiler,
125
isa,
126
builder,
127
component,
128
types,
129
offsets: VMComponentOffsets::new(isa.pointer_bytes(), component),
130
abi,
131
block0,
132
signature,
133
tunables,
134
}
135
}
136
137
fn translate(&mut self, trampoline: &Trampoline) {
138
match trampoline {
139
Trampoline::Transcoder {
140
op,
141
from,
142
from64,
143
to,
144
to64,
145
} => {
146
match self.abi {
147
Abi::Wasm => {
148
self.translate_transcode(*op, *from, *from64, *to, *to64);
149
}
150
// Transcoders can only actually be called by Wasm, so let's assert
151
// that here.
152
Abi::Array => {
153
self.builder.ins().trap(TRAP_INTERNAL_ASSERT);
154
}
155
}
156
}
157
Trampoline::LowerImport {
158
index,
159
options,
160
lower_ty,
161
} => {
162
let pointer_type = self.isa.pointer_type();
163
self.translate_hostcall(
164
HostCallee::Lowering(*index),
165
HostResult::MultiValue {
166
ptr: None,
167
len: None,
168
},
169
WasmArgs::ValRawList,
170
|me, params| {
171
let vmctx = params[0];
172
params.extend([
173
me.builder.ins().load(
174
pointer_type,
175
MemFlags::trusted(),
176
vmctx,
177
i32::try_from(me.offsets.lowering_data(*index)).unwrap(),
178
),
179
me.index_value(*lower_ty),
180
me.index_value(*options),
181
]);
182
},
183
);
184
}
185
Trampoline::AlwaysTrap => {
186
if self.tunables.signals_based_traps {
187
self.builder.ins().trap(TRAP_ALWAYS);
188
return;
189
}
190
self.translate_libcall(
191
host::trap,
192
TrapSentinel::Falsy,
193
WasmArgs::InRegisters,
194
|me, params| {
195
let code = wasmtime_environ::Trap::AlwaysTrapAdapter as u8;
196
params.push(me.builder.ins().iconst(ir::types::I8, i64::from(code)));
197
},
198
);
199
}
200
Trampoline::ResourceNew(ty) => {
201
// Currently this only supports resources represented by `i32`
202
assert_eq!(
203
self.types[self.signature].unwrap_func().params()[0],
204
WasmValType::I32
205
);
206
self.translate_libcall(
207
host::resource_new32,
208
TrapSentinel::NegativeOne,
209
WasmArgs::InRegisters,
210
|me, params| {
211
params.push(me.index_value(*ty));
212
},
213
);
214
}
215
Trampoline::ResourceRep(ty) => {
216
// Currently this only supports resources represented by `i32`
217
assert_eq!(
218
self.types[self.signature].unwrap_func().returns()[0],
219
WasmValType::I32
220
);
221
self.translate_libcall(
222
host::resource_rep32,
223
TrapSentinel::NegativeOne,
224
WasmArgs::InRegisters,
225
|me, params| {
226
params.push(me.index_value(*ty));
227
},
228
);
229
}
230
Trampoline::ResourceDrop(ty) => {
231
self.translate_resource_drop(*ty);
232
}
233
Trampoline::BackpressureSet { instance } => {
234
self.translate_libcall(
235
host::backpressure_set,
236
TrapSentinel::Falsy,
237
WasmArgs::InRegisters,
238
|me, params| {
239
params.push(me.index_value(*instance));
240
},
241
);
242
}
243
Trampoline::TaskReturn { results, options } => {
244
self.translate_libcall(
245
host::task_return,
246
TrapSentinel::Falsy,
247
WasmArgs::ValRawList,
248
|me, params| {
249
params.push(me.index_value(*results));
250
params.push(me.index_value(*options));
251
},
252
);
253
}
254
Trampoline::TaskCancel { instance } => {
255
self.translate_libcall(
256
host::task_cancel,
257
TrapSentinel::Falsy,
258
WasmArgs::InRegisters,
259
|me, params| {
260
params.push(me.index_value(*instance));
261
},
262
);
263
}
264
Trampoline::WaitableSetNew { instance } => {
265
self.translate_libcall(
266
host::waitable_set_new,
267
TrapSentinel::NegativeOne,
268
WasmArgs::InRegisters,
269
|me, params| {
270
params.push(me.index_value(*instance));
271
},
272
);
273
}
274
Trampoline::WaitableSetWait { options } => {
275
self.translate_libcall(
276
host::waitable_set_wait,
277
TrapSentinel::NegativeOne,
278
WasmArgs::InRegisters,
279
|me, params| {
280
params.push(me.index_value(*options));
281
},
282
);
283
}
284
Trampoline::WaitableSetPoll { options } => {
285
self.translate_libcall(
286
host::waitable_set_poll,
287
TrapSentinel::NegativeOne,
288
WasmArgs::InRegisters,
289
|me, params| {
290
params.push(me.index_value(*options));
291
},
292
);
293
}
294
Trampoline::WaitableSetDrop { instance } => {
295
self.translate_libcall(
296
host::waitable_set_drop,
297
TrapSentinel::Falsy,
298
WasmArgs::InRegisters,
299
|me, params| {
300
params.push(me.index_value(*instance));
301
},
302
);
303
}
304
Trampoline::WaitableJoin { instance } => {
305
self.translate_libcall(
306
host::waitable_join,
307
TrapSentinel::Falsy,
308
WasmArgs::InRegisters,
309
|me, params| {
310
params.push(me.index_value(*instance));
311
},
312
);
313
}
314
Trampoline::Yield { async_ } => {
315
self.translate_libcall(
316
host::yield_,
317
TrapSentinel::NegativeOne,
318
WasmArgs::InRegisters,
319
|me, params| {
320
params.push(me.builder.ins().iconst(ir::types::I8, i64::from(*async_)));
321
},
322
);
323
}
324
Trampoline::SubtaskDrop { instance } => {
325
self.translate_libcall(
326
host::subtask_drop,
327
TrapSentinel::Falsy,
328
WasmArgs::InRegisters,
329
|me, params| {
330
params.push(me.index_value(*instance));
331
},
332
);
333
}
334
Trampoline::SubtaskCancel { instance, async_ } => {
335
self.translate_libcall(
336
host::subtask_cancel,
337
TrapSentinel::NegativeOne,
338
WasmArgs::InRegisters,
339
|me, params| {
340
params.push(me.index_value(*instance));
341
params.push(me.builder.ins().iconst(ir::types::I8, i64::from(*async_)));
342
},
343
);
344
}
345
Trampoline::StreamNew { ty } => {
346
self.translate_libcall(
347
host::stream_new,
348
TrapSentinel::NegativeOne,
349
WasmArgs::InRegisters,
350
|me, params| {
351
params.push(me.index_value(*ty));
352
},
353
);
354
}
355
Trampoline::StreamRead { ty, options } => {
356
if let Some(info) = self.flat_stream_element_info(*ty).cloned() {
357
self.translate_libcall(
358
host::flat_stream_read,
359
TrapSentinel::NegativeOne,
360
WasmArgs::InRegisters,
361
|me, params| {
362
params.extend([
363
me.index_value(*ty),
364
me.index_value(*options),
365
me.builder
366
.ins()
367
.iconst(ir::types::I32, i64::from(info.size32)),
368
me.builder
369
.ins()
370
.iconst(ir::types::I32, i64::from(info.align32)),
371
]);
372
},
373
);
374
} else {
375
self.translate_libcall(
376
host::stream_read,
377
TrapSentinel::NegativeOne,
378
WasmArgs::InRegisters,
379
|me, params| {
380
params.push(me.index_value(*ty));
381
params.push(me.index_value(*options));
382
},
383
);
384
}
385
}
386
Trampoline::StreamWrite { ty, options } => {
387
if let Some(info) = self.flat_stream_element_info(*ty).cloned() {
388
self.translate_libcall(
389
host::flat_stream_write,
390
TrapSentinel::NegativeOne,
391
WasmArgs::InRegisters,
392
|me, params| {
393
params.extend([
394
me.index_value(*ty),
395
me.index_value(*options),
396
me.builder
397
.ins()
398
.iconst(ir::types::I32, i64::from(info.size32)),
399
me.builder
400
.ins()
401
.iconst(ir::types::I32, i64::from(info.align32)),
402
]);
403
},
404
);
405
} else {
406
self.translate_libcall(
407
host::stream_write,
408
TrapSentinel::NegativeOne,
409
WasmArgs::InRegisters,
410
|me, params| {
411
params.push(me.index_value(*ty));
412
params.push(me.index_value(*options));
413
},
414
);
415
}
416
}
417
Trampoline::StreamCancelRead { ty, async_ } => {
418
self.translate_libcall(
419
host::stream_cancel_read,
420
TrapSentinel::NegativeOne,
421
WasmArgs::InRegisters,
422
|me, params| {
423
params.push(me.index_value(*ty));
424
params.push(me.builder.ins().iconst(ir::types::I8, i64::from(*async_)));
425
},
426
);
427
}
428
Trampoline::StreamCancelWrite { ty, async_ } => {
429
self.translate_libcall(
430
host::stream_cancel_write,
431
TrapSentinel::NegativeOne,
432
WasmArgs::InRegisters,
433
|me, params| {
434
params.push(me.index_value(*ty));
435
params.push(me.builder.ins().iconst(ir::types::I8, i64::from(*async_)));
436
},
437
);
438
}
439
Trampoline::StreamDropReadable { ty } => {
440
self.translate_libcall(
441
host::stream_drop_readable,
442
TrapSentinel::Falsy,
443
WasmArgs::InRegisters,
444
|me, params| {
445
params.push(me.index_value(*ty));
446
},
447
);
448
}
449
Trampoline::StreamDropWritable { ty } => {
450
self.translate_libcall(
451
host::stream_drop_writable,
452
TrapSentinel::Falsy,
453
WasmArgs::InRegisters,
454
|me, params| {
455
params.push(me.index_value(*ty));
456
},
457
);
458
}
459
Trampoline::FutureNew { ty } => {
460
self.translate_libcall(
461
host::future_new,
462
TrapSentinel::NegativeOne,
463
WasmArgs::InRegisters,
464
|me, params| {
465
params.push(me.index_value(*ty));
466
},
467
);
468
}
469
Trampoline::FutureRead { ty, options } => {
470
self.translate_libcall(
471
host::future_read,
472
TrapSentinel::NegativeOne,
473
WasmArgs::InRegisters,
474
|me, params| {
475
params.push(me.index_value(*ty));
476
params.push(me.index_value(*options));
477
},
478
);
479
}
480
Trampoline::FutureWrite { ty, options } => {
481
self.translate_libcall(
482
host::future_write,
483
TrapSentinel::NegativeOne,
484
WasmArgs::InRegisters,
485
|me, params| {
486
params.push(me.index_value(*ty));
487
params.push(me.index_value(*options));
488
},
489
);
490
}
491
Trampoline::FutureCancelRead { ty, async_ } => {
492
self.translate_libcall(
493
host::future_cancel_read,
494
TrapSentinel::NegativeOne,
495
WasmArgs::InRegisters,
496
|me, params| {
497
params.push(me.index_value(*ty));
498
params.push(me.builder.ins().iconst(ir::types::I8, i64::from(*async_)));
499
},
500
);
501
}
502
Trampoline::FutureCancelWrite { ty, async_ } => {
503
self.translate_libcall(
504
host::future_cancel_write,
505
TrapSentinel::NegativeOne,
506
WasmArgs::InRegisters,
507
|me, params| {
508
params.push(me.index_value(*ty));
509
params.push(me.builder.ins().iconst(ir::types::I8, i64::from(*async_)));
510
},
511
);
512
}
513
Trampoline::FutureDropReadable { ty } => {
514
self.translate_libcall(
515
host::future_drop_readable,
516
TrapSentinel::Falsy,
517
WasmArgs::InRegisters,
518
|me, params| {
519
params.push(me.index_value(*ty));
520
},
521
);
522
}
523
Trampoline::FutureDropWritable { ty } => {
524
self.translate_libcall(
525
host::future_drop_writable,
526
TrapSentinel::Falsy,
527
WasmArgs::InRegisters,
528
|me, params| {
529
params.push(me.index_value(*ty));
530
},
531
);
532
}
533
Trampoline::ErrorContextNew { ty, options } => {
534
self.translate_libcall(
535
host::error_context_new,
536
TrapSentinel::NegativeOne,
537
WasmArgs::InRegisters,
538
|me, params| {
539
params.push(me.index_value(*ty));
540
params.push(me.index_value(*options));
541
},
542
);
543
}
544
Trampoline::ErrorContextDebugMessage { ty, options } => {
545
self.translate_libcall(
546
host::error_context_debug_message,
547
TrapSentinel::Falsy,
548
WasmArgs::InRegisters,
549
|me, params| {
550
params.push(me.index_value(*ty));
551
params.push(me.index_value(*options));
552
},
553
);
554
}
555
Trampoline::ErrorContextDrop { ty } => {
556
self.translate_libcall(
557
host::error_context_drop,
558
TrapSentinel::Falsy,
559
WasmArgs::InRegisters,
560
|me, params| {
561
params.push(me.index_value(*ty));
562
},
563
);
564
}
565
Trampoline::ResourceTransferOwn => {
566
self.translate_libcall(
567
host::resource_transfer_own,
568
TrapSentinel::NegativeOne,
569
WasmArgs::InRegisters,
570
|_, _| {},
571
);
572
}
573
Trampoline::ResourceTransferBorrow => {
574
self.translate_libcall(
575
host::resource_transfer_borrow,
576
TrapSentinel::NegativeOne,
577
WasmArgs::InRegisters,
578
|_, _| {},
579
);
580
}
581
Trampoline::ResourceEnterCall => {
582
self.translate_libcall(
583
host::resource_enter_call,
584
HostResult::None,
585
WasmArgs::InRegisters,
586
|_, _| {},
587
);
588
}
589
Trampoline::ResourceExitCall => {
590
self.translate_libcall(
591
host::resource_exit_call,
592
TrapSentinel::Falsy,
593
WasmArgs::InRegisters,
594
|_, _| {},
595
);
596
}
597
Trampoline::PrepareCall { memory } => {
598
self.translate_libcall(
599
host::prepare_call,
600
TrapSentinel::Falsy,
601
WasmArgs::InRegistersUpTo(PREPARE_CALL_FIXED_PARAMS.len()),
602
|me, params| {
603
let vmctx = params[0];
604
params.push(me.load_optional_memory(vmctx, *memory));
605
},
606
);
607
}
608
Trampoline::SyncStartCall { callback } => {
609
let pointer_type = self.isa.pointer_type();
610
let wasm_func_ty = &self.types[self.signature].unwrap_func();
611
let (values_vec_ptr, len) = self.compiler.allocate_stack_array_and_spill_args(
612
&WasmFuncType::new(
613
Box::new([]),
614
wasm_func_ty.returns().iter().copied().collect(),
615
),
616
&mut self.builder,
617
&[],
618
);
619
let values_vec_len = self.builder.ins().iconst(pointer_type, i64::from(len));
620
self.translate_libcall(
621
host::sync_start,
622
HostResult::MultiValue {
623
ptr: Some(values_vec_ptr),
624
len: Some(values_vec_len),
625
},
626
WasmArgs::InRegisters,
627
|me, params| {
628
let vmctx = params[0];
629
params.push(me.load_callback(vmctx, *callback));
630
params.push(values_vec_ptr);
631
params.push(values_vec_len);
632
},
633
);
634
}
635
Trampoline::AsyncStartCall {
636
callback,
637
post_return,
638
} => {
639
self.translate_libcall(
640
host::async_start,
641
TrapSentinel::NegativeOne,
642
WasmArgs::InRegisters,
643
|me, params| {
644
let vmctx = params[0];
645
params.extend([
646
me.load_callback(vmctx, *callback),
647
me.load_post_return(vmctx, *post_return),
648
]);
649
},
650
);
651
}
652
Trampoline::FutureTransfer => {
653
self.translate_libcall(
654
host::future_transfer,
655
TrapSentinel::NegativeOne,
656
WasmArgs::InRegisters,
657
|_, _| {},
658
);
659
}
660
Trampoline::StreamTransfer => {
661
self.translate_libcall(
662
host::stream_transfer,
663
TrapSentinel::NegativeOne,
664
WasmArgs::InRegisters,
665
|_, _| {},
666
);
667
}
668
Trampoline::ErrorContextTransfer => {
669
self.translate_libcall(
670
host::error_context_transfer,
671
TrapSentinel::NegativeOne,
672
WasmArgs::InRegisters,
673
|_, _| {},
674
);
675
}
676
Trampoline::ContextGet(i) => {
677
self.translate_libcall(
678
host::context_get,
679
TrapSentinel::NegativeOne,
680
WasmArgs::InRegisters,
681
|me, params| {
682
params.push(me.builder.ins().iconst(ir::types::I32, i64::from(*i)));
683
},
684
);
685
}
686
Trampoline::ContextSet(i) => {
687
self.translate_libcall(
688
host::context_set,
689
TrapSentinel::Falsy,
690
WasmArgs::InRegisters,
691
|me, params| {
692
params.push(me.builder.ins().iconst(ir::types::I32, i64::from(*i)));
693
},
694
);
695
}
696
}
697
}
698
699
/// Determine whether the specified type can be optimized as a stream
700
/// payload by lifting and lowering with a simple `memcpy`.
701
///
702
/// Any type containing only "flat", primitive data (i.e. no pointers or
703
/// handles) should qualify for this optimization, but it's also okay to
704
/// conservatively return `None` here; the fallback slow path will always
705
/// work -- it just won't be as efficient.
706
fn flat_stream_element_info(&self, ty: TypeStreamTableIndex) -> Option<&CanonicalAbiInfo> {
707
let payload = self.types[self.types[ty].ty].payload;
708
match payload {
709
None => Some(&CanonicalAbiInfo::ZERO),
710
Some(
711
payload @ (InterfaceType::Bool
712
| InterfaceType::S8
713
| InterfaceType::U8
714
| InterfaceType::S16
715
| InterfaceType::U16
716
| InterfaceType::S32
717
| InterfaceType::U32
718
| InterfaceType::S64
719
| InterfaceType::U64
720
| InterfaceType::Float32
721
| InterfaceType::Float64
722
| InterfaceType::Char),
723
) => Some(self.types.canonical_abi(&payload)),
724
// TODO: Recursively check for other "flat" types (i.e. those without pointers or handles),
725
// e.g. `record`s, `variant`s, etc. which contain only flat types.
726
_ => None,
727
}
728
}
729
730
/// Helper function to spill the wasm arguments `args` to this function into
731
/// a stack-allocated array.
732
fn store_wasm_arguments(&mut self, args: &[Value]) -> (Value, Value) {
733
let pointer_type = self.isa.pointer_type();
734
let wasm_func_ty = &self.types[self.signature].unwrap_func();
735
736
match self.abi {
737
// For the wasm ABI a stack needs to be allocated and these
738
// arguments are stored onto the stack.
739
Abi::Wasm => {
740
let (ptr, len) = self.compiler.allocate_stack_array_and_spill_args(
741
wasm_func_ty,
742
&mut self.builder,
743
args,
744
);
745
let len = self.builder.ins().iconst(pointer_type, i64::from(len));
746
(ptr, len)
747
}
748
749
// For the array ABI all arguments were already in a stack, so
750
// forward along that pointer/len.
751
Abi::Array => {
752
let params = self.builder.func.dfg.block_params(self.block0);
753
(params[2], params[3])
754
}
755
}
756
}
757
758
/// Convenience wrapper around `translate_hostcall` to enable type inference
759
/// on the `get_libcall` parameter here.
760
fn translate_libcall(
761
&mut self,
762
get_libcall: GetLibcallFn,
763
host_result: impl Into<HostResult>,
764
wasm_args: WasmArgs,
765
extra_host_args: impl FnOnce(&mut Self, &mut Vec<ir::Value>),
766
) {
767
self.translate_hostcall(
768
HostCallee::Libcall(get_libcall),
769
host_result.into(),
770
wasm_args,
771
extra_host_args,
772
)
773
}
774
775
/// Translates an invocation of a host function and interpret the result.
776
///
777
/// This is intended to be a relatively narrow waist which most intrinsics
778
/// go through. The configuration supported here is:
779
///
780
/// * `host_callee` - what's being called, either a libcall or a lowered
781
/// function
782
/// * `host_result` - how to interpret the return value to see if it's a
783
/// trap
784
/// * `wasm_args` - how to pass wasm args to the host, either in registers
785
/// or on the stack
786
/// * `extra_host_args` - a closure used to push extra arguments just before
787
/// the wasm arguments are forwarded.
788
fn translate_hostcall(
789
&mut self,
790
host_callee: HostCallee,
791
host_result: impl Into<HostResult>,
792
wasm_args: WasmArgs,
793
extra_host_args: impl FnOnce(&mut Self, &mut Vec<ir::Value>),
794
) {
795
let pointer_type = self.isa.pointer_type();
796
let wasm_func_ty = self.types[self.signature].unwrap_func();
797
798
// Load all parameters in an ABI-agnostic fashion, of which the
799
// `VMComponentContext` will be the first.
800
let params = self.abi_load_params();
801
let vmctx = params[0];
802
let wasm_params = &params[2..];
803
804
// Start building up arguments to the host. The first is always the
805
// vmctx. After is whatever `extra_host_args` appends, and then finally
806
// is what `WasmArgs` specifies.
807
let mut host_args = vec![vmctx];
808
extra_host_args(self, &mut host_args);
809
let mut val_raw_ptr = None;
810
let mut val_raw_len = None;
811
match wasm_args {
812
// Wasm params are passed through as values themselves.
813
WasmArgs::InRegisters => host_args.extend(wasm_params.iter().copied()),
814
815
// Wasm params are spilled and then the ptr/len is passed.
816
WasmArgs::ValRawList => {
817
let (ptr, len) = self.store_wasm_arguments(wasm_params);
818
val_raw_ptr = Some(ptr);
819
val_raw_len = Some(len);
820
host_args.push(ptr);
821
host_args.push(len);
822
}
823
824
// A mixture of the above two.
825
WasmArgs::InRegistersUpTo(n) => {
826
let (values_vec_ptr, len) = self.compiler.allocate_stack_array_and_spill_args(
827
&WasmFuncType::new(
828
wasm_func_ty.params().iter().skip(n).copied().collect(),
829
Box::new([]),
830
),
831
&mut self.builder,
832
&wasm_params[n..],
833
);
834
let values_vec_len = self.builder.ins().iconst(pointer_type, i64::from(len));
835
836
host_args.extend(wasm_params[..n].iter().copied());
837
host_args.push(values_vec_ptr);
838
host_args.push(values_vec_len);
839
}
840
}
841
842
// Next perform the actual invocation of the host with `host_args`.
843
let call = match host_callee {
844
HostCallee::Libcall(get_libcall) => self.call_libcall(vmctx, get_libcall, &host_args),
845
HostCallee::Lowering(index) => {
846
// Load host function pointer from the vmcontext and then call that
847
// indirect function pointer with the list of arguments.
848
let host_fn = self.builder.ins().load(
849
pointer_type,
850
MemFlags::trusted(),
851
vmctx,
852
i32::try_from(self.offsets.lowering_callee(index)).unwrap(),
853
);
854
let host_sig = {
855
let mut sig = ir::Signature::new(CallConv::triple_default(self.isa.triple()));
856
for param in host_args.iter() {
857
let ty = self.builder.func.dfg.value_type(*param);
858
sig.params.push(ir::AbiParam::new(ty));
859
}
860
// return value is a bool whether a trap was raised or not
861
sig.returns.push(ir::AbiParam::new(ir::types::I8));
862
self.builder.import_signature(sig)
863
};
864
self.compiler.call_indirect_host(
865
&mut self.builder,
866
HostCall::ComponentLowerImport,
867
host_sig,
868
host_fn,
869
&host_args,
870
)
871
}
872
};
873
874
// Acquire the result of this function (if any) and interpret it
875
// according to `host_result`.
876
//
877
// NOte that all match arms here end with `abi_store_results` which
878
// accounts for the ABI of this function when storing results.
879
let result = self.builder.func.dfg.inst_results(call).get(0).copied();
880
let result_ty = result.map(|v| self.builder.func.dfg.value_type(v));
881
let expected = wasm_func_ty.returns();
882
match host_result.into() {
883
HostResult::Sentinel(TrapSentinel::NegativeOne) => {
884
assert_eq!(expected.len(), 1);
885
let (result, result_ty) = (result.unwrap(), result_ty.unwrap());
886
let result = match (result_ty, expected[0]) {
887
(ir::types::I64, WasmValType::I32) => {
888
self.raise_if_negative_one_and_truncate(result)
889
}
890
(ir::types::I64, WasmValType::I64) | (ir::types::I32, WasmValType::I32) => {
891
self.raise_if_negative_one(result)
892
}
893
other => panic!("unsupported NegativeOne combo {other:?}"),
894
};
895
self.abi_store_results(&[result]);
896
}
897
HostResult::Sentinel(TrapSentinel::Falsy) => {
898
assert_eq!(expected.len(), 0);
899
self.raise_if_host_trapped(result.unwrap());
900
self.abi_store_results(&[]);
901
}
902
HostResult::Sentinel(_) => todo!("support additional return types if/when necessary"),
903
HostResult::None => {
904
assert!(result.is_none());
905
self.abi_store_results(&[]);
906
}
907
908
HostResult::MultiValue { ptr, len } => {
909
let ptr = ptr.or(val_raw_ptr).unwrap();
910
let len = len.or(val_raw_len).unwrap();
911
self.raise_if_host_trapped(result.unwrap());
912
let results = self.compiler.load_values_from_array(
913
wasm_func_ty.returns(),
914
&mut self.builder,
915
ptr,
916
len,
917
);
918
self.abi_store_results(&results);
919
}
920
}
921
}
922
923
fn index_value(&mut self, index: impl EntityRef) -> ir::Value {
924
self.builder
925
.ins()
926
.iconst(ir::types::I32, i64::try_from(index.index()).unwrap())
927
}
928
929
fn translate_resource_drop(&mut self, resource: TypeResourceTableIndex) {
930
let args = self.abi_load_params();
931
let vmctx = args[0];
932
let caller_vmctx = args[1];
933
let pointer_type = self.isa.pointer_type();
934
935
// The arguments this shim passes along to the libcall are:
936
//
937
// * the vmctx
938
// * a constant value for this `ResourceDrop` intrinsic
939
// * the wasm handle index to drop
940
let mut host_args = Vec::new();
941
host_args.push(vmctx);
942
host_args.push(
943
self.builder
944
.ins()
945
.iconst(ir::types::I32, i64::from(resource.as_u32())),
946
);
947
host_args.push(args[2]);
948
949
let call = self.call_libcall(vmctx, host::resource_drop, &host_args);
950
951
// Immediately raise a trap if requested by the host
952
let should_run_destructor =
953
self.raise_if_negative_one(self.builder.func.dfg.inst_results(call)[0]);
954
955
let resource_ty = self.types[resource].ty;
956
let resource_def = self
957
.component
958
.defined_resource_index(resource_ty)
959
.map(|idx| {
960
self.component
961
.initializers
962
.iter()
963
.filter_map(|i| match i {
964
GlobalInitializer::Resource(r) if r.index == idx => Some(r),
965
_ => None,
966
})
967
.next()
968
.unwrap()
969
});
970
let has_destructor = match resource_def {
971
Some(def) => def.dtor.is_some(),
972
None => true,
973
};
974
// Synthesize the following:
975
//
976
// ...
977
// brif should_run_destructor, run_destructor_block, return_block
978
//
979
// run_destructor_block:
980
// ;; test may_enter, but only if the component instances
981
// ;; differ
982
// flags = load.i32 vmctx+$offset
983
// masked = band flags, $FLAG_MAY_ENTER
984
// trapz masked, CANNOT_ENTER_CODE
985
//
986
// ;; ============================================================
987
// ;; this is conditionally emitted based on whether the resource
988
// ;; has a destructor or not, and can be statically omitted
989
// ;; because that information is known at compile time here.
990
// rep = ushr.i64 rep, 1
991
// rep = ireduce.i32 rep
992
// dtor = load.ptr vmctx+$offset
993
// func_addr = load.ptr dtor+$offset
994
// callee_vmctx = load.ptr dtor+$offset
995
// call_indirect func_addr, callee_vmctx, vmctx, rep
996
// ;; ============================================================
997
//
998
// jump return_block
999
//
1000
// return_block:
1001
// return
1002
//
1003
// This will decode `should_run_destructor` and run the destructor
1004
// funcref if one is specified for this resource. Note that not all
1005
// resources have destructors, hence the null check.
1006
self.builder.ensure_inserted_block();
1007
let current_block = self.builder.current_block().unwrap();
1008
let run_destructor_block = self.builder.create_block();
1009
self.builder
1010
.insert_block_after(run_destructor_block, current_block);
1011
let return_block = self.builder.create_block();
1012
self.builder
1013
.insert_block_after(return_block, run_destructor_block);
1014
1015
self.builder.ins().brif(
1016
should_run_destructor,
1017
run_destructor_block,
1018
&[],
1019
return_block,
1020
&[],
1021
);
1022
1023
let trusted = ir::MemFlags::trusted().with_readonly();
1024
1025
self.builder.switch_to_block(run_destructor_block);
1026
1027
// If this is a defined resource within the component itself then a
1028
// check needs to be emitted for the `may_enter` flag. Note though
1029
// that this check can be elided if the resource table resides in
1030
// the same component instance that defined the resource as the
1031
// component is calling itself.
1032
if let Some(def) = resource_def {
1033
if self.types[resource].instance != def.instance {
1034
let flags = self.builder.ins().load(
1035
ir::types::I32,
1036
trusted,
1037
vmctx,
1038
i32::try_from(self.offsets.instance_flags(def.instance)).unwrap(),
1039
);
1040
let masked = self
1041
.builder
1042
.ins()
1043
.band_imm(flags, i64::from(FLAG_MAY_ENTER));
1044
self.builder.ins().trapz(masked, TRAP_CANNOT_ENTER);
1045
}
1046
}
1047
1048
// Conditionally emit destructor-execution code based on whether we
1049
// statically know that a destructor exists or not.
1050
if has_destructor {
1051
let rep = self.builder.ins().ushr_imm(should_run_destructor, 1);
1052
let rep = self.builder.ins().ireduce(ir::types::I32, rep);
1053
let index = self.types[resource].ty;
1054
// NB: despite the vmcontext storing nullable funcrefs for function
1055
// pointers we know this is statically never null due to the
1056
// `has_destructor` check above.
1057
let dtor_func_ref = self.builder.ins().load(
1058
pointer_type,
1059
trusted,
1060
vmctx,
1061
i32::try_from(self.offsets.resource_destructor(index)).unwrap(),
1062
);
1063
if self.compiler.emit_debug_checks {
1064
self.builder
1065
.ins()
1066
.trapz(dtor_func_ref, TRAP_INTERNAL_ASSERT);
1067
}
1068
let func_addr = self.builder.ins().load(
1069
pointer_type,
1070
trusted,
1071
dtor_func_ref,
1072
i32::from(self.offsets.ptr.vm_func_ref_wasm_call()),
1073
);
1074
let callee_vmctx = self.builder.ins().load(
1075
pointer_type,
1076
trusted,
1077
dtor_func_ref,
1078
i32::from(self.offsets.ptr.vm_func_ref_vmctx()),
1079
);
1080
1081
let sig = crate::wasm_call_signature(
1082
self.isa,
1083
&self.types[self.signature].unwrap_func(),
1084
&self.compiler.tunables,
1085
);
1086
let sig_ref = self.builder.import_signature(sig);
1087
1088
// NB: note that the "caller" vmctx here is the caller of this
1089
// intrinsic itself, not the `VMComponentContext`. This effectively
1090
// takes ourselves out of the chain here but that's ok since the
1091
// caller is only used for store/limits and that same info is
1092
// stored, but elsewhere, in the component context.
1093
self.builder.ins().call_indirect(
1094
sig_ref,
1095
func_addr,
1096
&[callee_vmctx, caller_vmctx, rep],
1097
);
1098
}
1099
self.builder.ins().jump(return_block, &[]);
1100
self.builder.seal_block(run_destructor_block);
1101
1102
self.builder.switch_to_block(return_block);
1103
self.builder.seal_block(return_block);
1104
self.abi_store_results(&[]);
1105
}
1106
1107
fn load_optional_memory(
1108
&mut self,
1109
vmctx: ir::Value,
1110
memory: Option<RuntimeMemoryIndex>,
1111
) -> ir::Value {
1112
match memory {
1113
Some(idx) => self.load_memory(vmctx, idx),
1114
None => self.builder.ins().iconst(self.isa.pointer_type(), 0),
1115
}
1116
}
1117
1118
fn load_memory(&mut self, vmctx: ir::Value, memory: RuntimeMemoryIndex) -> ir::Value {
1119
self.builder.ins().load(
1120
self.isa.pointer_type(),
1121
MemFlags::trusted(),
1122
vmctx,
1123
i32::try_from(self.offsets.runtime_memory(memory)).unwrap(),
1124
)
1125
}
1126
1127
fn load_callback(
1128
&mut self,
1129
vmctx: ir::Value,
1130
callback: Option<RuntimeCallbackIndex>,
1131
) -> ir::Value {
1132
let pointer_type = self.isa.pointer_type();
1133
match callback {
1134
Some(idx) => self.builder.ins().load(
1135
pointer_type,
1136
MemFlags::trusted(),
1137
vmctx,
1138
i32::try_from(self.offsets.runtime_callback(idx)).unwrap(),
1139
),
1140
None => self.builder.ins().iconst(pointer_type, 0),
1141
}
1142
}
1143
1144
fn load_post_return(
1145
&mut self,
1146
vmctx: ir::Value,
1147
post_return: Option<RuntimePostReturnIndex>,
1148
) -> ir::Value {
1149
let pointer_type = self.isa.pointer_type();
1150
match post_return {
1151
Some(idx) => self.builder.ins().load(
1152
pointer_type,
1153
MemFlags::trusted(),
1154
vmctx,
1155
i32::try_from(self.offsets.runtime_post_return(idx)).unwrap(),
1156
),
1157
None => self.builder.ins().iconst(pointer_type, 0),
1158
}
1159
}
1160
1161
/// Loads a host function pointer for a libcall stored at the `offset`
1162
/// provided in the libcalls array.
1163
///
1164
/// The offset is calculated in the `host` module below.
1165
fn load_libcall(
1166
&mut self,
1167
vmctx: ir::Value,
1168
index: ComponentBuiltinFunctionIndex,
1169
) -> ir::Value {
1170
let pointer_type = self.isa.pointer_type();
1171
// First load the pointer to the builtins structure which is static
1172
// per-process.
1173
let builtins_array = self.builder.ins().load(
1174
pointer_type,
1175
MemFlags::trusted().with_readonly(),
1176
vmctx,
1177
i32::try_from(self.offsets.builtins()).unwrap(),
1178
);
1179
// Next load the function pointer at `offset` and return that.
1180
self.builder.ins().load(
1181
pointer_type,
1182
MemFlags::trusted().with_readonly(),
1183
builtins_array,
1184
i32::try_from(index.index() * u32::from(self.offsets.ptr.size())).unwrap(),
1185
)
1186
}
1187
1188
fn abi_load_params(&mut self) -> Vec<ir::Value> {
1189
let mut block0_params = self.builder.func.dfg.block_params(self.block0).to_vec();
1190
match self.abi {
1191
// Wasm and native ABIs pass parameters as normal function
1192
// parameters.
1193
Abi::Wasm => block0_params,
1194
1195
// The array ABI passes a pointer/length as the 3rd/4th arguments
1196
// and those are used to load the actual wasm parameters.
1197
Abi::Array => {
1198
let results = self.compiler.load_values_from_array(
1199
self.types[self.signature].unwrap_func().params(),
1200
&mut self.builder,
1201
block0_params[2],
1202
block0_params[3],
1203
);
1204
block0_params.truncate(2);
1205
block0_params.extend(results);
1206
block0_params
1207
}
1208
}
1209
}
1210
1211
fn abi_store_results(&mut self, results: &[ir::Value]) {
1212
match self.abi {
1213
// Wasm/native ABIs return values as usual.
1214
Abi::Wasm => {
1215
self.builder.ins().return_(results);
1216
}
1217
1218
// The array ABI stores all results in the pointer/length passed
1219
// as arguments to this function, which contractually are required
1220
// to have enough space for the results.
1221
Abi::Array => {
1222
let block0_params = self.builder.func.dfg.block_params(self.block0);
1223
let (ptr, len) = (block0_params[2], block0_params[3]);
1224
self.compiler.store_values_to_array(
1225
&mut self.builder,
1226
self.types[self.signature].unwrap_func().returns(),
1227
results,
1228
ptr,
1229
len,
1230
);
1231
let true_value = self.builder.ins().iconst(ir::types::I8, 1);
1232
self.builder.ins().return_(&[true_value]);
1233
}
1234
}
1235
}
1236
1237
fn raise_if_host_trapped(&mut self, succeeded: ir::Value) {
1238
let caller_vmctx = self.builder.func.dfg.block_params(self.block0)[1];
1239
self.compiler
1240
.raise_if_host_trapped(&mut self.builder, caller_vmctx, succeeded);
1241
}
1242
1243
fn raise_if_transcode_trapped(&mut self, amount_copied: ir::Value) {
1244
let pointer_type = self.isa.pointer_type();
1245
let minus_one = self.builder.ins().iconst(pointer_type, -1);
1246
let succeeded = self
1247
.builder
1248
.ins()
1249
.icmp(IntCC::NotEqual, amount_copied, minus_one);
1250
self.raise_if_host_trapped(succeeded);
1251
}
1252
1253
fn raise_if_negative_one_and_truncate(&mut self, ret: ir::Value) -> ir::Value {
1254
let ret = self.raise_if_negative_one(ret);
1255
self.builder.ins().ireduce(ir::types::I32, ret)
1256
}
1257
1258
fn raise_if_negative_one(&mut self, ret: ir::Value) -> ir::Value {
1259
let result_ty = self.builder.func.dfg.value_type(ret);
1260
let minus_one = self.builder.ins().iconst(result_ty, -1);
1261
let succeeded = self.builder.ins().icmp(IntCC::NotEqual, ret, minus_one);
1262
self.raise_if_host_trapped(succeeded);
1263
ret
1264
}
1265
1266
fn call_libcall(
1267
&mut self,
1268
vmctx: ir::Value,
1269
get_libcall: GetLibcallFn,
1270
args: &[ir::Value],
1271
) -> ir::Inst {
1272
let (host_sig, index) = get_libcall(self.isa, &mut self.builder.func);
1273
let host_fn = self.load_libcall(vmctx, index);
1274
self.compiler
1275
.call_indirect_host(&mut self.builder, index, host_sig, host_fn, args)
1276
}
1277
}
1278
1279
impl ComponentCompiler for Compiler {
1280
fn compile_trampoline(
1281
&self,
1282
component: &ComponentTranslation,
1283
types: &ComponentTypesBuilder,
1284
key: FuncKey,
1285
tunables: &Tunables,
1286
_symbol: &str,
1287
) -> Result<AllCallFunc<CompiledFunctionBody>> {
1288
let compile = |abi: Abi| -> Result<_> {
1289
let mut compiler = self.function_compiler();
1290
let mut c = TrampolineCompiler::new(
1291
self,
1292
&mut compiler,
1293
&component.component,
1294
types,
1295
key.unwrap_component_trampoline(),
1296
abi,
1297
tunables,
1298
);
1299
1300
// If we are crossing the Wasm-to-native boundary, we need to save the
1301
// exit FP and return address for stack walking purposes. However, we
1302
// always debug assert that our vmctx is a component context, regardless
1303
// whether we are actually crossing that boundary because it should
1304
// always hold.
1305
let vmctx = c.builder.block_params(c.block0)[0];
1306
let pointer_type = self.isa.pointer_type();
1307
self.debug_assert_vmctx_kind(
1308
&mut c.builder,
1309
vmctx,
1310
wasmtime_environ::component::VMCOMPONENT_MAGIC,
1311
);
1312
if let Abi::Wasm = abi {
1313
let vm_store_context = c.builder.ins().load(
1314
pointer_type,
1315
MemFlags::trusted(),
1316
vmctx,
1317
i32::try_from(c.offsets.vm_store_context()).unwrap(),
1318
);
1319
super::save_last_wasm_exit_fp_and_pc(
1320
&mut c.builder,
1321
pointer_type,
1322
&c.offsets.ptr,
1323
vm_store_context,
1324
);
1325
}
1326
1327
c.translate(&component.trampolines[key.unwrap_component_trampoline()]);
1328
c.builder.finalize();
1329
compiler.cx.abi = Some(abi);
1330
1331
Ok(CompiledFunctionBody {
1332
code: super::box_dyn_any_compiler_context(Some(compiler.cx)),
1333
needs_gc_heap: false,
1334
})
1335
};
1336
1337
Ok(AllCallFunc {
1338
wasm_call: compile(Abi::Wasm)?,
1339
array_call: compile(Abi::Array)?,
1340
})
1341
}
1342
}
1343
1344
impl TrampolineCompiler<'_> {
1345
fn translate_transcode(
1346
&mut self,
1347
op: Transcode,
1348
from: RuntimeMemoryIndex,
1349
from64: bool,
1350
to: RuntimeMemoryIndex,
1351
to64: bool,
1352
) {
1353
let pointer_type = self.isa.pointer_type();
1354
let vmctx = self.builder.func.dfg.block_params(self.block0)[0];
1355
1356
// Determine the static signature of the host libcall for this transcode
1357
// operation and additionally calculate the static offset within the
1358
// transode libcalls array.
1359
let get_libcall = match op {
1360
Transcode::Copy(FixedEncoding::Utf8) => host::utf8_to_utf8,
1361
Transcode::Copy(FixedEncoding::Utf16) => host::utf16_to_utf16,
1362
Transcode::Copy(FixedEncoding::Latin1) => host::latin1_to_latin1,
1363
Transcode::Latin1ToUtf16 => host::latin1_to_utf16,
1364
Transcode::Latin1ToUtf8 => host::latin1_to_utf8,
1365
Transcode::Utf16ToCompactProbablyUtf16 => host::utf16_to_compact_probably_utf16,
1366
Transcode::Utf16ToCompactUtf16 => host::utf16_to_compact_utf16,
1367
Transcode::Utf16ToLatin1 => host::utf16_to_latin1,
1368
Transcode::Utf16ToUtf8 => host::utf16_to_utf8,
1369
Transcode::Utf8ToCompactUtf16 => host::utf8_to_compact_utf16,
1370
Transcode::Utf8ToLatin1 => host::utf8_to_latin1,
1371
Transcode::Utf8ToUtf16 => host::utf8_to_utf16,
1372
};
1373
1374
// Load the base pointers for the from/to linear memories.
1375
let from_base = self.load_runtime_memory_base(vmctx, from);
1376
let to_base = self.load_runtime_memory_base(vmctx, to);
1377
1378
let mut args = Vec::new();
1379
args.push(vmctx);
1380
1381
let uses_retptr = match op {
1382
Transcode::Utf16ToUtf8
1383
| Transcode::Latin1ToUtf8
1384
| Transcode::Utf8ToLatin1
1385
| Transcode::Utf16ToLatin1 => true,
1386
_ => false,
1387
};
1388
1389
// Most transcoders share roughly the same signature despite doing very
1390
// different things internally, so most libcalls are lumped together
1391
// here.
1392
match op {
1393
Transcode::Copy(_)
1394
| Transcode::Latin1ToUtf16
1395
| Transcode::Utf16ToCompactProbablyUtf16
1396
| Transcode::Utf8ToLatin1
1397
| Transcode::Utf16ToLatin1
1398
| Transcode::Utf8ToUtf16 => {
1399
args.push(self.ptr_param(0, from64, from_base));
1400
args.push(self.len_param(1, from64));
1401
args.push(self.ptr_param(2, to64, to_base));
1402
}
1403
1404
Transcode::Utf16ToUtf8 | Transcode::Latin1ToUtf8 => {
1405
args.push(self.ptr_param(0, from64, from_base));
1406
args.push(self.len_param(1, from64));
1407
args.push(self.ptr_param(2, to64, to_base));
1408
args.push(self.len_param(3, to64));
1409
}
1410
1411
Transcode::Utf8ToCompactUtf16 | Transcode::Utf16ToCompactUtf16 => {
1412
args.push(self.ptr_param(0, from64, from_base));
1413
args.push(self.len_param(1, from64));
1414
args.push(self.ptr_param(2, to64, to_base));
1415
args.push(self.len_param(3, to64));
1416
args.push(self.len_param(4, to64));
1417
}
1418
};
1419
if uses_retptr {
1420
let slot = self
1421
.builder
1422
.func
1423
.create_sized_stack_slot(ir::StackSlotData::new(
1424
ir::StackSlotKind::ExplicitSlot,
1425
pointer_type.bytes(),
1426
0,
1427
));
1428
args.push(self.builder.ins().stack_addr(pointer_type, slot, 0));
1429
}
1430
let call = self.call_libcall(vmctx, get_libcall, &args);
1431
let mut results = self.builder.func.dfg.inst_results(call).to_vec();
1432
if uses_retptr {
1433
results.push(self.builder.ins().load(
1434
pointer_type,
1435
ir::MemFlags::trusted(),
1436
*args.last().unwrap(),
1437
0,
1438
));
1439
}
1440
let mut raw_results = Vec::new();
1441
1442
// Like the arguments the results are fairly similar across libcalls, so
1443
// they're lumped into various buckets here.
1444
match op {
1445
Transcode::Copy(_) | Transcode::Latin1ToUtf16 => {
1446
self.raise_if_host_trapped(results[0]);
1447
}
1448
1449
Transcode::Utf8ToUtf16
1450
| Transcode::Utf16ToCompactProbablyUtf16
1451
| Transcode::Utf8ToCompactUtf16
1452
| Transcode::Utf16ToCompactUtf16 => {
1453
self.raise_if_transcode_trapped(results[0]);
1454
raw_results.push(self.cast_from_pointer(results[0], to64));
1455
}
1456
1457
Transcode::Latin1ToUtf8
1458
| Transcode::Utf16ToUtf8
1459
| Transcode::Utf8ToLatin1
1460
| Transcode::Utf16ToLatin1 => {
1461
self.raise_if_transcode_trapped(results[0]);
1462
raw_results.push(self.cast_from_pointer(results[0], from64));
1463
raw_results.push(self.cast_from_pointer(results[1], to64));
1464
}
1465
};
1466
1467
self.builder.ins().return_(&raw_results);
1468
}
1469
1470
// Helper function to cast an input parameter to the host pointer type.
1471
fn len_param(&mut self, param: usize, is64: bool) -> ir::Value {
1472
let val = self.builder.func.dfg.block_params(self.block0)[2 + param];
1473
self.cast_to_pointer(val, is64)
1474
}
1475
1476
// Helper function to interpret an input parameter as a pointer into
1477
// linear memory. This will cast the input parameter to the host integer
1478
// type and then add that value to the base.
1479
//
1480
// Note that bounds-checking happens in adapter modules, and this
1481
// trampoline is simply calling the host libcall.
1482
fn ptr_param(&mut self, param: usize, is64: bool, base: ir::Value) -> ir::Value {
1483
let val = self.len_param(param, is64);
1484
self.builder.ins().iadd(base, val)
1485
}
1486
1487
// Helper function to cast a core wasm input to a host pointer type
1488
// which will go into the host libcall.
1489
fn cast_to_pointer(&mut self, val: ir::Value, is64: bool) -> ir::Value {
1490
let pointer_type = self.isa.pointer_type();
1491
let host64 = pointer_type == ir::types::I64;
1492
if is64 == host64 {
1493
val
1494
} else if !is64 {
1495
assert!(host64);
1496
self.builder.ins().uextend(pointer_type, val)
1497
} else {
1498
assert!(!host64);
1499
self.builder.ins().ireduce(pointer_type, val)
1500
}
1501
}
1502
1503
// Helper to cast a host pointer integer type to the destination type.
1504
fn cast_from_pointer(&mut self, val: ir::Value, is64: bool) -> ir::Value {
1505
let host64 = self.isa.pointer_type() == ir::types::I64;
1506
if is64 == host64 {
1507
val
1508
} else if !is64 {
1509
assert!(host64);
1510
self.builder.ins().ireduce(ir::types::I32, val)
1511
} else {
1512
assert!(!host64);
1513
self.builder.ins().uextend(ir::types::I64, val)
1514
}
1515
}
1516
1517
fn load_runtime_memory_base(&mut self, vmctx: ir::Value, mem: RuntimeMemoryIndex) -> ir::Value {
1518
let pointer_type = self.isa.pointer_type();
1519
let from_vmmemory_definition = self.load_memory(vmctx, mem);
1520
self.builder.ins().load(
1521
pointer_type,
1522
MemFlags::trusted(),
1523
from_vmmemory_definition,
1524
i32::from(self.offsets.ptr.vmmemory_definition_base()),
1525
)
1526
}
1527
}
1528
1529
/// Module with macro-generated contents that will return the signature and
1530
/// offset for each of the host transcoder functions.
1531
///
1532
/// Note that a macro is used here to keep this in sync with the actual
1533
/// transcoder functions themselves which are also defined via a macro.
1534
mod host {
1535
use cranelift_codegen::ir::{self, AbiParam};
1536
use cranelift_codegen::isa::{CallConv, TargetIsa};
1537
use wasmtime_environ::component::ComponentBuiltinFunctionIndex;
1538
1539
macro_rules! define {
1540
(
1541
$(
1542
$( #[$attr:meta] )*
1543
$name:ident( $( $pname:ident: $param:ident ),* ) $( -> $result:ident )?;
1544
)*
1545
) => {
1546
$(
1547
pub(super) fn $name(isa: &dyn TargetIsa, func: &mut ir::Function) -> (ir::SigRef, ComponentBuiltinFunctionIndex) {
1548
let pointer_type = isa.pointer_type();
1549
let sig = build_sig(
1550
isa,
1551
func,
1552
&[$( define!(@ty pointer_type $param) ),*],
1553
&[$( define!(@ty pointer_type $result) ),*],
1554
);
1555
1556
return (sig, ComponentBuiltinFunctionIndex::$name())
1557
}
1558
)*
1559
};
1560
1561
(@ty $ptr:ident size) => ($ptr);
1562
(@ty $ptr:ident ptr_u8) => ($ptr);
1563
(@ty $ptr:ident ptr_u16) => ($ptr);
1564
(@ty $ptr:ident ptr_size) => ($ptr);
1565
(@ty $ptr:ident bool) => (ir::types::I8);
1566
(@ty $ptr:ident u8) => (ir::types::I8);
1567
(@ty $ptr:ident u32) => (ir::types::I32);
1568
(@ty $ptr:ident u64) => (ir::types::I64);
1569
(@ty $ptr:ident vmctx) => ($ptr);
1570
}
1571
1572
wasmtime_environ::foreach_builtin_component_function!(define);
1573
1574
fn build_sig(
1575
isa: &dyn TargetIsa,
1576
func: &mut ir::Function,
1577
params: &[ir::Type],
1578
returns: &[ir::Type],
1579
) -> ir::SigRef {
1580
let mut sig = ir::Signature {
1581
params: params.iter().map(|ty| AbiParam::new(*ty)).collect(),
1582
returns: returns.iter().map(|ty| AbiParam::new(*ty)).collect(),
1583
call_conv: CallConv::triple_default(isa.triple()),
1584
};
1585
1586
// Once we're declaring the signature of a host function we must respect
1587
// the default ABI of the platform which is where argument extension of
1588
// params/results may come into play.
1589
let extension = isa.default_argument_extension();
1590
for arg in sig.params.iter_mut().chain(sig.returns.iter_mut()) {
1591
if arg.value_type.is_int() {
1592
arg.extension = extension;
1593
}
1594
}
1595
func.import_signature(sig)
1596
}
1597
}
1598
1599