Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bytecodealliance
GitHub Repository: bytecodealliance/wasmtime
Path: blob/main/tests/all/memory.rs
1692 views
1
use rayon::prelude::*;
2
use std::sync::atomic::{AtomicU32, Ordering::SeqCst};
3
use std::time::Duration;
4
use wasmtime::*;
5
use wasmtime_test_macros::wasmtime_test;
6
7
fn module(engine: &Engine) -> Result<Module> {
8
let mut wat = format!("(module\n");
9
wat.push_str("(import \"\" \"\" (memory 0))\n");
10
for i in 0..=33 {
11
let offset = if i == 0 {
12
0
13
} else if i == 33 {
14
!0
15
} else {
16
1u32 << (i - 1)
17
};
18
19
for (width, instr) in [
20
(1, &["i32.load8_s"][..]),
21
(2, &["i32.load16_s"]),
22
(4, &["i32.load" /*, "f32.load"*/]),
23
(8, &["i64.load" /*, "f64.load"*/]),
24
#[cfg(not(any(target_arch = "s390x", target_arch = "riscv64")))]
25
(16, &["v128.load"]),
26
]
27
.iter()
28
{
29
for (j, instr) in instr.iter().enumerate() {
30
wat.push_str(&format!(
31
"(func (export \"{width} {offset} v{j}\") (param i32)\n"
32
));
33
wat.push_str("local.get 0\n");
34
wat.push_str(instr);
35
wat.push_str(&format!(" offset={offset}\n"));
36
wat.push_str("drop\n)");
37
}
38
}
39
}
40
wat.push_str(")");
41
Module::new(engine, &wat)
42
}
43
44
struct TestFunc {
45
width: u32,
46
offset: u32,
47
func: TypedFunc<u32, ()>,
48
}
49
50
fn find_funcs(store: &mut Store<()>, instance: &Instance) -> Vec<TestFunc> {
51
let list = instance
52
.exports(&mut *store)
53
.map(|export| {
54
let name = export.name();
55
let mut parts = name.split_whitespace();
56
(
57
parts.next().unwrap().parse().unwrap(),
58
parts.next().unwrap().parse().unwrap(),
59
export.into_func().unwrap(),
60
)
61
})
62
.collect::<Vec<_>>();
63
list.into_iter()
64
.map(|(width, offset, func)| TestFunc {
65
width,
66
offset,
67
func: func.typed(&store).unwrap(),
68
})
69
.collect()
70
}
71
72
fn test_traps(store: &mut Store<()>, funcs: &[TestFunc], addr: u32, mem: &Memory) {
73
let mem_size = mem.data_size(&store) as u64;
74
for func in funcs {
75
let result = func.func.call(&mut *store, addr);
76
let base = u64::from(func.offset) + u64::from(addr);
77
let range = base..base + u64::from(func.width);
78
if range.start >= mem_size || range.end >= mem_size {
79
assert!(
80
result.is_err(),
81
"access at {}+{}+{} succeeded but should have failed when memory has {} bytes",
82
addr,
83
func.offset,
84
func.width,
85
mem_size
86
);
87
} else {
88
assert!(result.is_ok());
89
}
90
}
91
}
92
93
#[wasmtime_test(wasm_features(simd))]
94
#[cfg_attr(miri, ignore)]
95
fn offsets_static_dynamic_oh_my(config: &mut Config) -> Result<()> {
96
const GB: u64 = 1 << 30;
97
const MB: u64 = 1 << 20;
98
99
let mut engines = Vec::new();
100
let sizes = if cfg!(target_pointer_width = "32") {
101
[0, 10 * MB, 20 * MB]
102
} else {
103
[0, 1 * GB, 4 * GB]
104
};
105
for &memory_reservation in sizes.iter() {
106
for &guard_size in sizes.iter() {
107
for &guard_before_linear_memory in [true, false].iter() {
108
config.memory_reservation(memory_reservation);
109
config.memory_guard_size(guard_size);
110
config.guard_before_linear_memory(guard_before_linear_memory);
111
config.cranelift_debug_verifier(true);
112
engines.push(Engine::new(&config)?);
113
}
114
}
115
}
116
117
engines.par_iter().try_for_each(|engine| {
118
let module = module(&engine)?;
119
120
for (min, max) in [(1, Some(2)), (1, None)].iter() {
121
let mut store = Store::new(&engine, ());
122
let mem = Memory::new(&mut store, MemoryType::new(*min, *max)).unwrap();
123
let instance = Instance::new(&mut store, &module, &[mem.into()]).unwrap();
124
let funcs = find_funcs(&mut store, &instance);
125
126
test_traps(&mut store, &funcs, 0, &mem);
127
test_traps(&mut store, &funcs, 65536, &mem);
128
test_traps(&mut store, &funcs, u32::MAX, &mem);
129
130
mem.grow(&mut store, 1).unwrap();
131
132
test_traps(&mut store, &funcs, 0, &mem);
133
test_traps(&mut store, &funcs, 65536, &mem);
134
test_traps(&mut store, &funcs, u32::MAX, &mem);
135
}
136
Ok::<_, wasmtime::Error>(())
137
})?;
138
139
Ok(())
140
}
141
142
#[test]
143
#[cfg_attr(miri, ignore)]
144
#[cfg_attr(asan, ignore)]
145
fn guards_present() -> Result<()> {
146
const GUARD_SIZE: u64 = 65536;
147
148
let mut config = Config::new();
149
config.memory_reservation(1 << 20);
150
config.memory_guard_size(GUARD_SIZE);
151
config.guard_before_linear_memory(true);
152
let engine = Engine::new(&config)?;
153
let mut store = Store::new(&engine, ());
154
let static_mem = Memory::new(&mut store, MemoryType::new(1, Some(2)))?;
155
let dynamic_mem = Memory::new(&mut store, MemoryType::new(1, None))?;
156
157
let assert_guards = |store: &Store<()>| unsafe {
158
// guards before
159
println!("check pre-static-mem");
160
assert_faults(static_mem.data_ptr(&store).offset(-(GUARD_SIZE as isize)));
161
println!("check pre-dynamic-mem");
162
assert_faults(dynamic_mem.data_ptr(&store).offset(-(GUARD_SIZE as isize)));
163
164
// guards after
165
println!("check post-static-mem");
166
assert_faults(
167
static_mem
168
.data_ptr(&store)
169
.add(static_mem.data_size(&store)),
170
);
171
println!("check post-dynamic-mem");
172
assert_faults(
173
dynamic_mem
174
.data_ptr(&store)
175
.add(dynamic_mem.data_size(&store)),
176
);
177
};
178
assert_guards(&store);
179
// static memory should start with the second page unmapped
180
unsafe {
181
assert_faults(static_mem.data_ptr(&store).add(65536));
182
}
183
println!("growing");
184
static_mem.grow(&mut store, 1).unwrap();
185
dynamic_mem.grow(&mut store, 1).unwrap();
186
assert_guards(&store);
187
188
Ok(())
189
}
190
191
#[wasmtime_test]
192
#[cfg_attr(miri, ignore)]
193
#[cfg_attr(asan, ignore)]
194
fn guards_present_pooling(config: &mut Config) -> Result<()> {
195
const GUARD_SIZE: u64 = 65536;
196
197
let mut pool = crate::small_pool_config();
198
pool.total_memories(2)
199
.max_memory_size(10 << 16)
200
.memory_protection_keys(Enabled::No);
201
config.memory_reservation(1 << 20);
202
config.memory_guard_size(GUARD_SIZE);
203
config.guard_before_linear_memory(true);
204
config.allocation_strategy(InstanceAllocationStrategy::Pooling(pool));
205
let engine = Engine::new(&config)?;
206
207
let mut store = Store::new(&engine, ());
208
209
let mem1 = {
210
let m = Module::new(&engine, "(module (memory (export \"\") 1 2))")?;
211
Instance::new(&mut store, &m, &[])?
212
.get_memory(&mut store, "")
213
.unwrap()
214
};
215
let mem2 = {
216
let m = Module::new(&engine, "(module (memory (export \"\") 1))")?;
217
Instance::new(&mut store, &m, &[])?
218
.get_memory(&mut store, "")
219
.unwrap()
220
};
221
222
unsafe fn assert_guards(store: &Store<()>, mem: &Memory) {
223
unsafe {
224
// guards before
225
println!("check pre-mem");
226
assert_faults(mem.data_ptr(&store).offset(-(GUARD_SIZE as isize)));
227
228
// unmapped just after memory
229
println!("check mem");
230
assert_faults(mem.data_ptr(&store).add(mem.data_size(&store)));
231
232
// guards after memory
233
println!("check post-mem");
234
assert_faults(mem.data_ptr(&store).add(1 << 20));
235
}
236
}
237
unsafe {
238
assert_guards(&store, &mem1);
239
assert_guards(&store, &mem2);
240
println!("growing");
241
mem1.grow(&mut store, 1).unwrap();
242
mem2.grow(&mut store, 1).unwrap();
243
assert_guards(&store, &mem1);
244
assert_guards(&store, &mem2);
245
}
246
247
Ok(())
248
}
249
250
#[wasmtime_test]
251
#[cfg_attr(miri, ignore)]
252
#[cfg_attr(asan, ignore)]
253
#[cfg(target_arch = "x86_64")] // only platform with mpk
254
fn guards_present_pooling_mpk(config: &mut Config) -> Result<()> {
255
if !wasmtime::PoolingAllocationConfig::are_memory_protection_keys_available() {
256
println!("skipping `guards_present_pooling_mpk` test; mpk is not supported");
257
return Ok(());
258
}
259
260
const GUARD_SIZE: u64 = 65536;
261
let mut pool = crate::small_pool_config();
262
pool.total_memories(4)
263
.max_memory_size(10 << 16)
264
.memory_protection_keys(Enabled::Yes)
265
.max_memory_protection_keys(2);
266
config.memory_reservation(1 << 20);
267
config.memory_guard_size(GUARD_SIZE);
268
config.guard_before_linear_memory(true);
269
config.allocation_strategy(InstanceAllocationStrategy::Pooling(pool));
270
let engine = Engine::new(&config)?;
271
272
let mut store = Store::new(&engine, ());
273
274
let mem1 = {
275
let m = Module::new(&engine, "(module (memory (export \"\") 1 2))")?;
276
Instance::new(&mut store, &m, &[])?
277
.get_memory(&mut store, "")
278
.unwrap()
279
};
280
let mem2 = {
281
let m = Module::new(&engine, "(module (memory (export \"\") 1))")?;
282
Instance::new(&mut store, &m, &[])?
283
.get_memory(&mut store, "")
284
.unwrap()
285
};
286
287
unsafe fn assert_guards(store: &Store<()>, mem: &Memory) {
288
unsafe {
289
// guards before
290
println!("check pre-mem");
291
assert_faults(mem.data_ptr(&store).offset(-(GUARD_SIZE as isize)));
292
293
// unmapped just after memory
294
println!("check mem");
295
assert_faults(mem.data_ptr(&store).add(mem.data_size(&store)));
296
297
// guards after memory
298
println!("check post-mem");
299
assert_faults(mem.data_ptr(&store).add(1 << 20));
300
}
301
}
302
unsafe {
303
assert_guards(&store, &mem1);
304
assert_guards(&store, &mem2);
305
println!("growing");
306
mem1.grow(&mut store, 1).unwrap();
307
mem2.grow(&mut store, 1).unwrap();
308
assert_guards(&store, &mem1);
309
assert_guards(&store, &mem2);
310
}
311
312
Ok(())
313
}
314
315
unsafe fn assert_faults(ptr: *mut u8) {
316
use std::io::Error;
317
#[cfg(unix)]
318
unsafe {
319
// There's probably a faster way to do this here, but, uh, when in rome?
320
match libc::fork() {
321
0 => {
322
*ptr = 4;
323
324
std::process::exit(0);
325
}
326
-1 => panic!("failed to fork: {}", Error::last_os_error()),
327
n => {
328
let mut status = 0;
329
assert!(
330
libc::waitpid(n, &mut status, 0) == n,
331
"failed to wait: {}",
332
Error::last_os_error()
333
);
334
assert!(libc::WIFSIGNALED(status));
335
}
336
}
337
}
338
#[cfg(windows)]
339
unsafe {
340
use windows_sys::Win32::System::Memory::*;
341
342
let mut info = std::mem::MaybeUninit::uninit();
343
let r = VirtualQuery(
344
ptr as *const _,
345
info.as_mut_ptr(),
346
std::mem::size_of_val(&info),
347
);
348
if r == 0 {
349
panic!("failed to VirtualAlloc: {}", Error::last_os_error());
350
}
351
let info = info.assume_init();
352
assert_eq!(info.AllocationProtect, PAGE_NOACCESS);
353
}
354
}
355
356
// Disable test on s390x because the large allocation may actually succeed;
357
// the whole 64-bit address space is available on this platform.
358
#[test]
359
#[cfg(not(target_arch = "s390x"))]
360
fn massive_64_bit_still_limited() -> Result<()> {
361
// Creating a 64-bit memory which exceeds the limits of the address space
362
// should still send a request to the `ResourceLimiter` to ensure that it
363
// gets at least some chance to see that oom was requested.
364
let mut config = Config::new();
365
config.wasm_memory64(true);
366
let engine = Engine::new(&config)?;
367
368
let mut store = Store::new(&engine, MyLimiter { hit: false });
369
store.limiter(|x| x);
370
let ty = MemoryType::new64(1 << 46, None);
371
assert!(Memory::new(&mut store, ty).is_err());
372
assert!(store.data().hit);
373
374
return Ok(());
375
376
struct MyLimiter {
377
hit: bool,
378
}
379
380
impl ResourceLimiter for MyLimiter {
381
fn memory_growing(
382
&mut self,
383
_current: usize,
384
_request: usize,
385
_max: Option<usize>,
386
) -> Result<bool> {
387
self.hit = true;
388
Ok(true)
389
}
390
fn table_growing(
391
&mut self,
392
_current: usize,
393
_request: usize,
394
_max: Option<usize>,
395
) -> Result<bool> {
396
unreachable!()
397
}
398
}
399
}
400
401
#[wasmtime_test]
402
#[cfg_attr(miri, ignore)]
403
fn tiny_static_heap(config: &mut Config) -> Result<()> {
404
// The size of the memory in the module below is the exact same size as
405
// the static memory size limit in the configuration. This is intended to
406
// specifically test that a load of all the valid addresses of the memory
407
// all pass bounds-checks in cranelift to help weed out any off-by-one bugs.
408
config.memory_reservation(1 << 16);
409
let engine = Engine::new(&config)?;
410
let mut store = Store::new(&engine, ());
411
412
let module = Module::new(
413
&engine,
414
r#"
415
(module
416
(memory 1 1)
417
(func (export "run")
418
(local $i i32)
419
420
(loop
421
(if (i32.eq (local.get $i) (i32.const 65536))
422
(then (return)))
423
(drop (i32.load8_u (local.get $i)))
424
(local.set $i (i32.add (local.get $i) (i32.const 1)))
425
br 0
426
)
427
)
428
)
429
"#,
430
)?;
431
432
let i = Instance::new(&mut store, &module, &[])?;
433
let f = i.get_typed_func::<(), ()>(&mut store, "run")?;
434
f.call(&mut store, ())?;
435
Ok(())
436
}
437
438
#[test]
439
fn static_forced_max() -> Result<()> {
440
let mut config = Config::new();
441
config.memory_reservation(5 << 16);
442
config.memory_may_move(false);
443
let engine = Engine::new(&config)?;
444
let mut store = Store::new(&engine, ());
445
446
let mem = Memory::new(&mut store, MemoryType::new(0, None))?;
447
mem.grow(&mut store, 5).unwrap();
448
assert!(mem.grow(&mut store, 1).is_err());
449
Ok(())
450
}
451
452
#[wasmtime_test]
453
fn dynamic_extra_growth_unchanged_pointer(config: &mut Config) -> Result<()> {
454
const EXTRA_PAGES: u64 = 5;
455
config.memory_reservation(0);
456
// 5 wasm pages extra
457
config.memory_reservation_for_growth(EXTRA_PAGES * (1 << 16));
458
let engine = Engine::new(&config)?;
459
let mut store = Store::new(&engine, ());
460
461
fn assert_behaves_well(store: &mut Store<()>, mem: &Memory) -> Result<()> {
462
let ptr = mem.data_ptr(&store);
463
464
// Each growth here should retain the same linear pointer in memory and the
465
// memory shouldn't get moved.
466
for _ in 0..EXTRA_PAGES {
467
mem.grow(&mut *store, 1)?;
468
assert_eq!(ptr, mem.data_ptr(&store));
469
}
470
471
// Growth afterwards though will be forced to move the pointer
472
mem.grow(&mut *store, 1)?;
473
let new_ptr = mem.data_ptr(&store);
474
assert_ne!(ptr, new_ptr);
475
476
for _ in 0..EXTRA_PAGES - 1 {
477
mem.grow(&mut *store, 1)?;
478
assert_eq!(new_ptr, mem.data_ptr(&store));
479
}
480
Ok(())
481
}
482
483
let mem = Memory::new(&mut store, MemoryType::new(10, None))?;
484
assert_behaves_well(&mut store, &mem)?;
485
486
let module = Module::new(&engine, r#"(module (memory (export "mem") 10))"#)?;
487
let instance = Instance::new(&mut store, &module, &[])?;
488
let mem = instance.get_memory(&mut store, "mem").unwrap();
489
assert_behaves_well(&mut store, &mem)?;
490
491
let module = Module::new(
492
&engine,
493
r#"
494
(module
495
(memory (export "mem") 10)
496
(data (i32.const 0) ""))
497
"#,
498
)?;
499
let instance = Instance::new(&mut store, &module, &[])?;
500
let mem = instance.get_memory(&mut store, "mem").unwrap();
501
assert_behaves_well(&mut store, &mem)?;
502
503
Ok(())
504
}
505
506
// This test exercises trying to create memories of the maximum 64-bit memory
507
// size of `1 << 48` pages. This should always fail but in the process of
508
// determining this failure we shouldn't hit any overflows or anything like that
509
// (checked via debug-mode tests).
510
#[wasmtime_test]
511
fn memory64_maximum_minimum(config: &mut Config) -> Result<()> {
512
config.wasm_memory64(true);
513
let engine = Engine::new(&config)?;
514
let mut store = Store::new(&engine, ());
515
516
assert!(
517
MemoryTypeBuilder::default()
518
.memory64(true)
519
.min(1 << 48)
520
.build()
521
.is_err()
522
);
523
524
let module = Module::new(
525
&engine,
526
format!(r#"(module (import "" "" (memory i64 {})))"#, 1u64 << 48),
527
)?;
528
let mem_ty = module
529
.imports()
530
.next()
531
.unwrap()
532
.ty()
533
.unwrap_memory()
534
.clone();
535
assert!(Memory::new(&mut store, mem_ty).is_err());
536
537
let module = Module::new(
538
&engine,
539
&format!(
540
r#"
541
(module
542
(memory i64 {})
543
)
544
"#,
545
1u64 << 48,
546
),
547
)?;
548
assert!(Instance::new(&mut store, &module, &[]).is_err());
549
550
let module = Module::new(
551
&engine,
552
&format!(
553
r#"
554
(module
555
(memory i64 {})
556
(data (i64.const 0) "")
557
)
558
"#,
559
1u64 << 48,
560
),
561
)?;
562
assert!(Instance::new(&mut store, &module, &[]).is_err());
563
564
Ok(())
565
}
566
567
#[test]
568
fn shared_memory_basics() -> Result<()> {
569
let engine = Engine::default();
570
assert!(SharedMemory::new(&engine, MemoryType::new(1, None)).is_err());
571
assert!(SharedMemory::new(&engine, MemoryType::new(1, Some(1))).is_err());
572
assert!(SharedMemory::new(&engine, MemoryType::new64(1, None)).is_err());
573
assert!(SharedMemory::new(&engine, MemoryType::new64(1, Some(1))).is_err());
574
assert!(
575
MemoryTypeBuilder::default()
576
.shared(true)
577
.min(1)
578
.max(Some(0))
579
.build()
580
.is_err()
581
);
582
583
let memory = SharedMemory::new(&engine, MemoryType::shared(1, 1))?;
584
assert!(memory.ty().is_shared());
585
assert_eq!(memory.ty().minimum(), 1);
586
assert_eq!(memory.ty().maximum(), Some(1));
587
assert_eq!(memory.size(), 1);
588
assert_eq!(memory.data_size(), 65536);
589
assert_eq!(memory.data().len(), 65536);
590
assert!(memory.grow(1).is_err());
591
592
// misaligned
593
assert_eq!(memory.atomic_notify(1, 100), Err(Trap::HeapMisaligned));
594
assert_eq!(
595
memory.atomic_wait32(1, 100, None),
596
Err(Trap::HeapMisaligned)
597
);
598
assert_eq!(
599
memory.atomic_wait64(1, 100, None),
600
Err(Trap::HeapMisaligned)
601
);
602
603
// oob
604
assert_eq!(
605
memory.atomic_notify(1 << 20, 100),
606
Err(Trap::MemoryOutOfBounds)
607
);
608
assert_eq!(
609
memory.atomic_wait32(1 << 20, 100, None),
610
Err(Trap::MemoryOutOfBounds)
611
);
612
assert_eq!(
613
memory.atomic_wait64(1 << 20, 100, None),
614
Err(Trap::MemoryOutOfBounds)
615
);
616
617
// ok
618
assert_eq!(memory.atomic_notify(8, 100), Ok(0));
619
assert_eq!(memory.atomic_wait32(8, 1, None), Ok(WaitResult::Mismatch));
620
assert_eq!(memory.atomic_wait64(8, 1, None), Ok(WaitResult::Mismatch));
621
622
// timeout
623
let near_future = Duration::new(0, 100);
624
assert_eq!(
625
memory.atomic_wait32(8, 0, Some(near_future)),
626
Ok(WaitResult::TimedOut)
627
);
628
assert_eq!(
629
memory.atomic_wait64(8, 0, Some(near_future)),
630
Ok(WaitResult::TimedOut)
631
);
632
633
Ok(())
634
}
635
636
#[test]
637
#[cfg_attr(miri, ignore)]
638
fn shared_memory_wait_notify() -> Result<()> {
639
const THREADS: usize = 8;
640
const COUNT: usize = 100_000;
641
642
let engine = Engine::default();
643
let memory = SharedMemory::new(&engine, MemoryType::shared(1, 1))?;
644
let data = unsafe { AtomicU32::from_ptr(memory.data().as_ptr().cast_mut().cast()) };
645
let locked = unsafe { AtomicU32::from_ptr(memory.data().as_ptr().add(4).cast_mut().cast()) };
646
647
// Note that `SeqCst` is used here to not think much about the orderings
648
// here, and it also somewhat more closely mirrors what's happening in wasm.
649
let lock = || {
650
while locked.swap(1, SeqCst) == 1 {
651
memory.atomic_wait32(0, 1, None).unwrap();
652
}
653
};
654
let unlock = || {
655
locked.store(0, SeqCst);
656
memory.atomic_notify(0, 1).unwrap();
657
};
658
659
std::thread::scope(|s| {
660
for _ in 0..THREADS {
661
s.spawn(|| {
662
for _ in 0..COUNT {
663
lock();
664
let next = data.load(SeqCst) + 1;
665
data.store(next, SeqCst);
666
unlock();
667
}
668
});
669
}
670
});
671
672
assert_eq!(data.load(SeqCst), (THREADS * COUNT) as u32);
673
674
Ok(())
675
}
676
677
#[wasmtime_test]
678
#[cfg_attr(miri, ignore)]
679
#[cfg(target_pointer_width = "64")] // requires large VM reservation
680
fn init_with_negative_segment(cfg: &mut Config) -> Result<()> {
681
let engine = Engine::new(cfg)?;
682
let module = Module::new(
683
&engine,
684
r#"
685
(module
686
(memory 65536)
687
(data (i32.const 0x8000_0000) "x")
688
)
689
"#,
690
)?;
691
let mut store = Store::new(&engine, ());
692
Instance::new(&mut store, &module, &[])?;
693
Ok(())
694
}
695
696
#[test]
697
fn non_page_aligned_static_memory() -> Result<()> {
698
let mut config = Config::new();
699
config.memory_reservation(100_000);
700
config.memory_may_move(false);
701
let engine = Engine::new(&config)?;
702
let ty = MemoryType::new(1, None);
703
Memory::new(&mut Store::new(&engine, ()), ty)?;
704
Ok(())
705
}
706
707
#[test]
708
fn new_memory_with_custom_page_size() -> Result<()> {
709
let engine = Engine::default();
710
let mut store = Store::new(&engine, ());
711
712
let ty = MemoryTypeBuilder::default()
713
.page_size_log2(0)
714
.min(4096)
715
.max(Some(9000))
716
.build()?;
717
718
let mem = Memory::new(&mut store, ty)?;
719
assert_eq!(mem.data_size(&store), 4096);
720
assert_eq!(mem.size(&store), 4096);
721
722
mem.grow(&mut store, 9000 - 4096)?;
723
assert_eq!(mem.data_size(&store), 9000);
724
assert_eq!(mem.size(&store), 9000);
725
726
assert!(mem.grow(&mut store, 1).is_err());
727
assert_eq!(mem.data_size(&store), 9000);
728
assert_eq!(mem.size(&store), 9000);
729
730
Ok(())
731
}
732
733
#[wasmtime_test]
734
#[cfg_attr(miri, ignore)]
735
fn get_memory_type_with_custom_page_size_from_wasm(config: &mut Config) -> Result<()> {
736
config.wasm_custom_page_sizes(true);
737
let engine = Engine::new(&config)?;
738
let mut store = Store::new(&engine, ());
739
740
let module = Module::new(
741
&engine,
742
r#"
743
(module
744
(memory (export "memory") 1 0xffffffff (pagesize 1))
745
)
746
"#,
747
)?;
748
749
let instance = Instance::new(&mut store, &module, &[])?;
750
let memory = instance.get_memory(&mut store, "memory").unwrap();
751
let mem_ty = memory.ty(&store);
752
753
assert_eq!(mem_ty.minimum(), 1);
754
assert_eq!(mem_ty.maximum(), Some(0xffffffff));
755
assert_eq!(mem_ty.page_size(), 1);
756
assert_eq!(mem_ty.page_size_log2(), 0);
757
758
Ok(())
759
}
760
761
#[wasmtime_test]
762
fn configure_zero(config: &mut Config) -> Result<()> {
763
config.guard_before_linear_memory(false);
764
config.memory_guard_size(0);
765
config.memory_reservation(0);
766
config.memory_reservation_for_growth(0);
767
let engine = Engine::new(&config)?;
768
let mut store = Store::new(&engine, ());
769
770
let ty = MemoryType::new(0, None);
771
let memory = Memory::new(&mut store, ty)?;
772
assert_eq!(memory.data_size(&store), 0);
773
774
Ok(())
775
}
776
777