diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index f75d7812859e..3ba1b221ea80 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -678,6 +678,9 @@ jobs: matrix: feature: ["openvino", "onnx"] os: ["ubuntu-latest", "windows-latest"] + include: + - os: windows-latest + feature: winml name: Test wasi-nn (${{ matrix.feature }}, ${{ matrix.os }}) runs-on: ${{ matrix.os }} needs: determine @@ -692,6 +695,15 @@ jobs: - uses: abrown/install-openvino-action@v8 if: runner.arch == 'X64' + # Install WinML for testing wasi-nn WinML backend. WinML is only available + # on Windows clients and Windows Server with desktop experience enabled. + # GitHub Actions Window Server image doesn't have desktop experience + # enabled, so we download the standalone library from ONNX Runtime project. + - uses: nuget/setup-nuget@v2 + if: (matrix.os == 'windows-latest') && (matrix.feature == 'winml') + - run: nuget install Microsoft.AI.MachineLearning + if: (matrix.os == 'windows-latest') && (matrix.feature == 'winml') + # Install Rust targets. - run: rustup target add wasm32-wasi diff --git a/crates/test-programs/src/bin/nn_image_classification_onnx.rs b/crates/test-programs/src/bin/nn_image_classification_onnx.rs index 30d20330f39f..df8f961c7516 100644 --- a/crates/test-programs/src/bin/nn_image_classification_onnx.rs +++ b/crates/test-programs/src/bin/nn_image_classification_onnx.rs @@ -8,10 +8,13 @@ pub fn main() -> Result<()> { .expect("the model file to be mapped to the fixture directory"); let graph = GraphBuilder::new(GraphEncoding::Onnx, ExecutionTarget::CPU).build_from_bytes([&model])?; - let tensor = fs::read("fixture/tensor.bgr") + let tensor = fs::read("fixture/000000062808.rgb") .expect("the tensor file to be mapped to the fixture directory"); let results = classify(graph, tensor)?; let top_five = &sort_results(&results)[..5]; + // 963 is meat loaf, meatloaf. + // https://github.com/onnx/models/blob/bec48b6a70e5e9042c0badbaafefe4454e072d08/validated/vision/classification/synset.txt#L963 + assert_eq!(top_five[0].class_id(), 963); println!("found results, sorted top 5: {:?}", top_five); Ok(()) } diff --git a/crates/test-programs/src/nn.rs b/crates/test-programs/src/nn.rs index cf25e4b8979e..b7cc0e562230 100644 --- a/crates/test-programs/src/nn.rs +++ b/crates/test-programs/src/nn.rs @@ -42,12 +42,15 @@ pub fn classify(graph: Graph, tensor: Vec) -> Result> { /// placing the match probability for each class at the index for that class /// (the probability of class `N` is stored at `probabilities[N]`). pub fn sort_results(probabilities: &[f32]) -> Vec { + let probabilities_iter = probabilities.iter(); + // It is unclear why the MobileNet output indices are "off by one" but the // `.skip(1)` below seems necessary to get results that make sense (e.g. 763 // = "revolver" vs 762 = "restaurant"). - let mut results: Vec = probabilities - .iter() - .skip(1) + #[cfg(feature = "openvino")] + probabilities_iter.skip(1); + + let mut results: Vec = probabilities_iter .enumerate() .map(|(c, p)| InferenceResult(c, *p)) .collect(); diff --git a/crates/wasi-nn/Cargo.toml b/crates/wasi-nn/Cargo.toml index 1b8dc95d99d6..43dba88519c8 100644 --- a/crates/wasi-nn/Cargo.toml +++ b/crates/wasi-nn/Cargo.toml @@ -50,7 +50,7 @@ wasi-common = { workspace = true, features = ["sync"] } wasmtime = { workspace = true, features = ["cranelift"] } [features] -default = ["openvino"] +default = ["openvino", "winml"] # openvino is available on all platforms, it requires openvino installed. openvino = ["dep:openvino"] # onnx is available on all platforms. diff --git a/crates/wasi-nn/src/testing.rs b/crates/wasi-nn/src/testing.rs index 51b543e15a95..da0360898da5 100644 --- a/crates/wasi-nn/src/testing.rs +++ b/crates/wasi-nn/src/testing.rs @@ -8,7 +8,12 @@ #[allow(unused_imports)] use anyhow::{anyhow, Context, Result}; -use std::{env, fs, path::Path, path::PathBuf, process::Command, sync::Mutex}; +use std::{ + env, fs, + path::{Path, PathBuf}, + process::Command, + sync::Mutex, +}; #[cfg(all(feature = "winml", target_arch = "x86_64", target_os = "windows"))] use windows::AI::MachineLearning::{LearningModelDevice, LearningModelDeviceKind}; @@ -50,13 +55,12 @@ pub fn check() -> Result<()> { #[cfg(feature = "openvino")] check_openvino_artifacts_are_available()?; - #[cfg(feature = "onnx")] + #[cfg(any(feature = "onnx", all(feature = "winml", target_os = "windows")))] check_onnx_artifacts_are_available()?; - #[cfg(all(feature = "winml", target_arch = "x86_64", target_os = "windows"))] + #[cfg(all(feature = "winml", target_os = "windows"))] { check_winml_is_available()?; - check_winml_artifacts_are_available()?; } Ok(()) } @@ -108,7 +112,7 @@ fn check_openvino_artifacts_are_available() -> Result<()> { Ok(()) } -#[cfg(all(feature = "winml", target_arch = "x86_64", target_os = "windows"))] +#[cfg(all(feature = "winml", target_os = "windows"))] fn check_winml_is_available() -> Result<()> { match std::panic::catch_unwind(|| { println!( @@ -121,27 +125,19 @@ fn check_winml_is_available() -> Result<()> { } } -#[cfg(feature = "onnx")] +#[cfg(any(feature = "onnx", all(feature = "winml", target_os = "windows")))] fn check_onnx_artifacts_are_available() -> Result<()> { let _exclusively_retrieve_artifacts = ARTIFACTS.lock().unwrap(); - const OPENVINO_BASE_URL: &str = - "https://github.com/intel/openvino-rs/raw/main/crates/openvino/tests/fixtures/mobilenet"; const ONNX_BASE_URL: &str = - "https://github.com/onnx/models/raw/bec48b6a70e5e9042c0badbaafefe4454e072d08/validated/vision/classification/mobilenet/model/mobilenetv2-7.onnx?download="; + "https://github.com/onnx/models/raw/bec48b6a70e5e9042c0badbaafefe4454e072d08/validated/vision/classification/mobilenet/model/mobilenetv2-10.onnx?download="; let artifacts_dir = artifacts_dir(); if !artifacts_dir.is_dir() { fs::create_dir(&artifacts_dir)?; } - for (from, to) in [ - ( - [OPENVINO_BASE_URL, "tensor-1x224x224x3-f32.bgr"].join("/"), - "tensor.bgr", - ), - (ONNX_BASE_URL.to_string(), "model.onnx"), - ] { + for (from, to) in [(ONNX_BASE_URL.to_string(), "model.onnx")] { let local_path = artifacts_dir.join(to); if !local_path.is_file() { download(&from, &local_path).with_context(|| "unable to retrieve test artifact")?; @@ -149,31 +145,14 @@ fn check_onnx_artifacts_are_available() -> Result<()> { println!("> using cached artifact: {}", local_path.display()) } } - Ok(()) -} -#[cfg(all(feature = "winml", target_arch = "x86_64", target_os = "windows"))] -fn check_winml_artifacts_are_available() -> Result<()> { - let _exclusively_retrieve_artifacts = ARTIFACTS.lock().unwrap(); - let artifacts_dir = artifacts_dir(); - if !artifacts_dir.is_dir() { - fs::create_dir(&artifacts_dir)?; - } - const MODEL_URL: &str="https://github.com/onnx/models/raw/5faef4c33eba0395177850e1e31c4a6a9e634c82/vision/classification/mobilenet/model/mobilenetv2-12.onnx"; - for (from, to) in [(MODEL_URL, "model.onnx")] { - let local_path = artifacts_dir.join(to); - if !local_path.is_file() { - download(&from, &local_path).with_context(|| "unable to retrieve test artifact")?; - } else { - println!("> using cached artifact: {}", local_path.display()) - } - } - // kitten.rgb is converted from https://github.com/microsoft/Windows-Machine-Learning/blob/master/SharedContent/media/kitten_224.png?raw=true. - let tensor_path = env::current_dir()? + // Copy image from source tree to artifact directory. + let image_path = env::current_dir()? .join("tests") .join("fixtures") - .join("kitten.rgb"); - fs::copy(tensor_path, artifacts_dir.join("kitten.rgb"))?; + .join("000000062808.rgb"); + let dest_path = artifacts_dir.join("000000062808.rgb"); + fs::copy(&image_path, &dest_path)?; Ok(()) } diff --git a/crates/wasi-nn/tests/all.rs b/crates/wasi-nn/tests/all.rs index 06be156079d6..2c41f17db2e3 100644 --- a/crates/wasi-nn/tests/all.rs +++ b/crates/wasi-nn/tests/all.rs @@ -97,10 +97,10 @@ fn nn_image_classification_named() { #[cfg_attr(not(all(feature = "winml", target_os = "windows")), ignore)] #[test] fn nn_image_classification_winml() { - #[cfg(feature = "winml")] + #[cfg(all(feature = "winml", target_os = "windows"))] { let backend = Backend::from(backend::winml::WinMLBackend::default()); - run(NN_IMAGE_CLASSIFICATION_WINML, backend, true).unwrap() + run(NN_IMAGE_CLASSIFICATION_ONNX, backend, true).unwrap() } } diff --git a/crates/wasi-nn/tests/fixtures/000000062808.rgb b/crates/wasi-nn/tests/fixtures/000000062808.rgb new file mode 100644 index 000000000000..ac79a904fb92 Binary files /dev/null and b/crates/wasi-nn/tests/fixtures/000000062808.rgb differ diff --git a/crates/wasi-nn/tests/fixtures/kitten.rgb b/crates/wasi-nn/tests/fixtures/kitten.rgb deleted file mode 100644 index d39e9f9e20cb..000000000000 Binary files a/crates/wasi-nn/tests/fixtures/kitten.rgb and /dev/null differ diff --git a/crates/wasi-nn/tests/fixtures/readme.md b/crates/wasi-nn/tests/fixtures/readme.md new file mode 100644 index 000000000000..7001167b666c --- /dev/null +++ b/crates/wasi-nn/tests/fixtures/readme.md @@ -0,0 +1,9 @@ +The original image of 000000062808.rgb is 000000062808.jpg from ImageNet +database. It processed by following Python code with +https://github.com/onnx/models/blob/bec48b6a70e5e9042c0badbaafefe4454e072d08/validated/vision/classification/imagenet_preprocess.py + +``` +image = mxnet.image.imread('000000062808.jpg') +image = preprocess_mxnet(image) +image.asnumpy().tofile('000000062808.rgb') +``` \ No newline at end of file