Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bytecodealliance
GitHub Repository: bytecodealliance/wasmtime
Path: blob/main/crates/wasi-nn/examples/classification-example/src/main.rs
1692 views
1
use std::convert::TryInto;
2
use std::fs;
3
use wasi_nn;
4
5
pub fn main() {
6
let xml = fs::read_to_string("fixture/model.xml").unwrap();
7
println!("Read graph XML, first 50 characters: {}", &xml[..50]);
8
9
let weights = fs::read("fixture/model.bin").unwrap();
10
println!("Read graph weights, size in bytes: {}", weights.len());
11
12
let graph = unsafe {
13
wasi_nn::load(
14
&[&xml.into_bytes(), &weights],
15
wasi_nn::GRAPH_ENCODING_OPENVINO,
16
wasi_nn::EXECUTION_TARGET_CPU,
17
)
18
.unwrap()
19
};
20
println!("Loaded graph into wasi-nn with ID: {}", graph);
21
22
let context = unsafe { wasi_nn::init_execution_context(graph).unwrap() };
23
println!("Created wasi-nn execution context with ID: {}", context);
24
25
// Load a tensor that precisely matches the graph input tensor (see
26
// `fixture/frozen_inference_graph.xml`).
27
let tensor_data = fs::read("fixture/tensor.bgr").unwrap();
28
println!("Read input tensor, size in bytes: {}", tensor_data.len());
29
let tensor = wasi_nn::Tensor {
30
dimensions: &[1, 3, 224, 224],
31
r#type: wasi_nn::TENSOR_TYPE_F32,
32
data: &tensor_data,
33
};
34
unsafe {
35
wasi_nn::set_input(context, 0, tensor).unwrap();
36
}
37
38
// Execute the inference.
39
unsafe {
40
wasi_nn::compute(context).unwrap();
41
}
42
println!("Executed graph inference");
43
44
// Retrieve the output.
45
let mut output_buffer = vec![0f32; 1001];
46
unsafe {
47
wasi_nn::get_output(
48
context,
49
0,
50
&mut output_buffer[..] as *mut [f32] as *mut u8,
51
(output_buffer.len() * 4).try_into().unwrap(),
52
)
53
.unwrap();
54
}
55
println!(
56
"Found results, sorted top 5: {:?}",
57
&sort_results(&output_buffer)[..5]
58
)
59
}
60
61
// Sort the buffer of probabilities. The graph places the match probability for each class at the
62
// index for that class (e.g. the probability of class 42 is placed at buffer[42]). Here we convert
63
// to a wrapping InferenceResult and sort the results. It is unclear why the MobileNet output
64
// indices are "off by one" but the `.skip(1)` below seems necessary to get results that make sense
65
// (e.g. 763 = "revolver" vs 762 = "restaurant")
66
fn sort_results(buffer: &[f32]) -> Vec<InferenceResult> {
67
let mut results: Vec<InferenceResult> = buffer
68
.iter()
69
.skip(1)
70
.enumerate()
71
.map(|(c, p)| InferenceResult(c, *p))
72
.collect();
73
results.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap());
74
results
75
}
76
77
// A wrapper for class ID and match probabilities.
78
#[derive(Debug, PartialEq)]
79
struct InferenceResult(usize, f32);
80
81