Skip to content

Commit

Permalink
Enable unit test for wasi-nn WinML backend.
Browse files Browse the repository at this point in the history
This test was disabled because GitHub Actions Windows Server image
doesn't have desktop experience included. But it looks like we can have
a standalone WinML binary downloaded from ONNX Runtime project.

Wasi-nn WinML backend and ONNX Runtime backend now share the same test
code as they accept the same input, and they are expected to produce the
same result.

This change also make wasi-nn WinML backend as a default feature.

prtest:full
  • Loading branch information
jianjunz committed Apr 23, 2024
1 parent bfc9f31 commit ed75bca
Show file tree
Hide file tree
Showing 10 changed files with 173 additions and 103 deletions.
9 changes: 9 additions & 0 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -480,6 +480,15 @@ jobs:
- uses: abrown/install-openvino-action@v8
if: runner.arch == 'X64'

# Install WinML for testing wasi-nn WinML backend. WinML is only available
# on Windows clients and Windows Server with desktop experience enabled.
# GitHub Actions Window Server image doesn't have desktop experience
# enabled, so we download the standalone library from ONNX Runtime project.
- uses: nuget/setup-nuget@v2
if: matrix.os == 'windows-latest'
- run: nuget install Microsoft.AI.MachineLearning
if: matrix.os == 'windows-latest'

# Fix an ICE for now in gcc when compiling zstd with debuginfo (??)
- run: echo CFLAGS=-g0 >> $GITHUB_ENV
if: matrix.target == 'x86_64-pc-windows-gnu'
Expand Down
80 changes: 80 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 3 additions & 0 deletions crates/test-programs/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,3 +20,6 @@ futures = { workspace = true, default-features = false, features = ['alloc'] }
url = { workspace = true }
sha2 = "0.10.2"
base64 = "0.21.0"
# image and ndarray are used by nn_image_classification_onnx for image preprocessing.
image = { version = "0.24.6", default-features = false, features = ["jpeg"] }
ndarray = "0.15.3"
67 changes: 62 additions & 5 deletions crates/test-programs/src/bin/nn_image_classification_onnx.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
use anyhow::Result;
use image::{DynamicImage, RgbImage};
use ndarray::Array;
use std::fs;
use wasi_nn::*;

Expand All @@ -17,8 +19,16 @@ pub fn main() -> Result<()> {

// Prepare WASI-NN tensor - Tensor data is always a bytes vector
// Load a tensor that precisely matches the graph input tensor
let data = fs::read("fixture/tensor.bgr").unwrap();
let data = fs::read("fixture/dog.jpg").unwrap();
println!("[ONNX] Read input tensor, size in bytes: {}", data.len());
let data = preprocess(
data.as_slice(),
224,
224,
&[0.485, 0.456, 0.406],
&[0.229, 0.224, 0.225],
);

context.set_input(0, wasi_nn::TensorType::F32, &[1, 3, 224, 224], &data)?;

// Execute the inferencing
Expand All @@ -28,10 +38,22 @@ pub fn main() -> Result<()> {
// Retrieve the output.
let mut output_buffer = vec![0f32; 1000];
context.get_output(0, &mut output_buffer[..])?;
println!(
"[ONNX] Found results, sorted top 5: {:?}",
&sort_results(&output_buffer)[..5]
);

// Post-processing.
let output_shape = [1, 1000, 1, 1];
let output_tensor = Array::from_shape_vec(output_shape, output_buffer).unwrap();

let exp_output = output_tensor.mapv(|x| x.exp());
let sum_exp_output = exp_output.sum_axis(ndarray::Axis(1));
let softmax_output = exp_output / &sum_exp_output;

let sorted = sort_results(&softmax_output.into_raw_vec());

println!("[ONNX] Found results, sorted top 5: {:?}", &sorted[..5]);

// Index 207 is curly-coated retriever.
// https://github.com/onnx/models/blob/bec48b6a70e5e9042c0badbaafefe4454e072d08/validated/vision/classification/synset.txt#L207
assert_eq!(sorted[0].0, 207);

Ok(())
}
Expand All @@ -50,6 +72,41 @@ fn sort_results(buffer: &[f32]) -> Vec<InferenceResult> {
results
}

// Take the image located at 'path', open it, resize it to height x width, and then converts
// the pixel precision to FP32. The resulting RGB pixel vector is then returned.
fn preprocess(image: &[u8], height: u32, width: u32, mean: &[f32], std: &[f32]) -> Vec<u8> {
let dyn_img: DynamicImage = image::load_from_memory(image).unwrap().resize_exact(
width,
height,
image::imageops::Triangle,
);
let rgb_img: RgbImage = dyn_img.to_rgb8();

// Get an array of the pixel values
let raw_u8_arr: &[u8] = &rgb_img.as_raw()[..];

// Create an array to hold the f32 value of those pixels
let bytes_required = raw_u8_arr.len() * 4;
let mut u8_f32_arr: Vec<u8> = vec![0; bytes_required];

// Read the number as a f32 and break it into u8 bytes
for i in 0..raw_u8_arr.len() {
let u8_f32: f32 = raw_u8_arr[i] as f32;
let rgb_iter = i % 3;

// Normalize the pixel
let norm_u8_f32: f32 = (u8_f32 / 255.0 - mean[rgb_iter]) / std[rgb_iter];

// Convert it to u8 bytes and write it with new shape
let u8_bytes = norm_u8_f32.to_ne_bytes();
for j in 0..4 {
u8_f32_arr[(raw_u8_arr.len() * 4 * rgb_iter / 3) + (i / 3) * 4 + j] = u8_bytes[j];
}
}

return u8_f32_arr;
}

// A wrapper for class ID and match probabilities.
#[derive(Debug, PartialEq)]
struct InferenceResult(usize, f32);
58 changes: 0 additions & 58 deletions crates/test-programs/src/bin/nn_image_classification_winml.rs

This file was deleted.

2 changes: 1 addition & 1 deletion crates/wasi-nn/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ wasi-common = { workspace = true, features = ["sync"] }
wasmtime = { workspace = true, features = ["cranelift"] }

[features]
default = ["openvino"]
default = ["openvino", "winml"]
# openvino is available on all platforms, it requires openvino installed.
openvino = ["dep:openvino"]
# onnx is available on all platforms.
Expand Down
Loading

0 comments on commit ed75bca

Please sign in to comment.