From 548eb7f1e356ffc9d78e344afdf2717e1965fd78 Mon Sep 17 00:00:00 2001 From: Aria Ghora Prabono Date: Fri, 25 Aug 2023 18:03:54 +0900 Subject: [PATCH 1/3] feat: implement HardSigmoid operation --- wonnx/src/compiler.rs | 16 +++- .../templates/snippets/activation_scalar.wgsl | 9 +++ wonnx/templates/snippets/activation_vec.wgsl | 9 +++ wonnx/tests/hard_sigmoid.rs | 78 +++++++++++++++++++ 4 files changed, 108 insertions(+), 4 deletions(-) create mode 100644 wonnx/tests/hard_sigmoid.rs diff --git a/wonnx/src/compiler.rs b/wonnx/src/compiler.rs index ba757c20..c81a3e6b 100644 --- a/wonnx/src/compiler.rs +++ b/wonnx/src/compiler.rs @@ -739,13 +739,21 @@ pub fn compile( } } op @ ("Relu" | "Sigmoid" | "Softsign" | "Softplus" | "Clip" | "Celu" | "Elu" - | "LeakyRelu") => { - let alpha = if op == "LeakyRelu" { - node.get_attribute_value("alpha", Some(0.01))? + | "LeakyRelu" | "HardSigmoid") => { + let alpha = match op { + "LeakyRelu" => node.get_attribute_value("alpha", Some(0.01))?, + "HardSigmoid" => node.get_attribute_value("alpha", Some(0.2))?, + _ => node.get_attribute_value("alpha", Some(1.0))?, + }; + + let beta = if op == "HardSigmoid" { + node.get_attribute_value("beta", Some(0.5))? } else { - node.get_attribute_value("alpha", Some(1.0))? + node.get_attribute_value("beta", Some(1.0))? }; + context.insert("alpha", &alpha); + context.insert("beta", &beta); if op == "Clip" { let min: Vec = diff --git a/wonnx/templates/snippets/activation_scalar.wgsl b/wonnx/templates/snippets/activation_scalar.wgsl index d3ab618b..19df0721 100644 --- a/wonnx/templates/snippets/activation_scalar.wgsl +++ b/wonnx/templates/snippets/activation_scalar.wgsl @@ -38,6 +38,15 @@ {{ scalar_type }}({{ alpha }}) * (exp(input_vec) - {{ scalar_type }}(1)) ); +{%- elif activation_type == "HardSigmoid" -%} + {{ activation_output }} = max( + {{ scalar_type }}(0), + min( + {{ scalar_type }}(1), + {{ scalar_type }}({{ alpha }}) * {{ activation_input }} + {{ scalar_type }}({{ beta }}) + ) + ); + {%- elif activation_output != activation_input -%} {{ activation_output }} = {{ activation_input }}; diff --git a/wonnx/templates/snippets/activation_vec.wgsl b/wonnx/templates/snippets/activation_vec.wgsl index c9a0a38b..c1e183dc 100644 --- a/wonnx/templates/snippets/activation_vec.wgsl +++ b/wonnx/templates/snippets/activation_vec.wgsl @@ -46,6 +46,15 @@ {{ activation_output }} = max({{ activation_input }}, Vec4(Scalar(), Scalar(), Scalar(), Scalar())) + min({{ scalar_type }}({{ alpha }}) * {{ activation_input }}, Vec4(Scalar(), Scalar(), Scalar(), Scalar())); +{%- elif activation_type == "HardSigmoid" -%} + {{ activation_output }} = max( + Vec4(Scalar(), Scalar(), Scalar(), Scalar()), + min( + Vec4({{ scalar_type }}(1), {{ scalar_type }}(1), {{ scalar_type }}(1), {{ scalar_type }}(1)), + {{ scalar_type }}({{ alpha }}) * {{ activation_input }} + {{ scalar_type }}({{ beta }}) + ) + ); + {%- elif activation_output != activation_input -%} {{ activation_output }} = {{ activation_input }}; diff --git a/wonnx/tests/hard_sigmoid.rs b/wonnx/tests/hard_sigmoid.rs new file mode 100644 index 00000000..0af4ea88 --- /dev/null +++ b/wonnx/tests/hard_sigmoid.rs @@ -0,0 +1,78 @@ +use std::{collections::HashMap, convert::TryInto}; +use wonnx::utils::{attribute, graph, model, node, tensor}; +mod common; + +/// Test HardSigmoid node with default alpha and beta +/// https://github.com/onnx/onnx/blob/main/docs/Operators.md#examples-68 +#[test] +fn test_hard_sigmoid_default() { + let input_data = [-2.0, -1.0, 1.0, 2.0]; + let shape = vec![2, 2]; + + let (default_alpha, default_beta) = (0.2, 0.5); + let expected_output_data: Vec = input_data + .iter() + .map(|x| x * default_alpha + default_beta) + .collect(); + + let mut model_input = HashMap::new(); + model_input.insert("X".to_string(), input_data.as_slice().into()); + + let node = node(vec!["X"], vec!["Y"], "hard_sigmoid", "HardSigmoid", vec![]); + + let model = model(graph( + vec![tensor("X", &shape)], + vec![tensor("Y", &shape)], + vec![], + vec![], + vec![node], + )); + + let session = + pollster::block_on(wonnx::Session::from_model(model)).expect("Session did not create"); + + let output = pollster::block_on(session.run(&model_input)).unwrap(); + let output_data: &[f32] = (&output["Y"]).try_into().unwrap(); + + common::assert_eq_vector(output_data, expected_output_data.as_slice()); +} + +/// Test HardSigmoid node with predefined alpha and beta +/// https://github.com/onnx/onnx/blob/main/docs/Operators.md#examples-68 +#[test] +fn test_hard_sigmoid() { + let input_data: Vec = vec![-1.0, 0.0, 1.0]; + let shape = vec![1, 3]; + + let mut model_input = HashMap::new(); + model_input.insert("X".to_string(), input_data.as_slice().into()); + + let alpha = attribute("alpha", 0.5); + let beta = attribute("beta", 0.6); + + let node = node( + vec!["X"], + vec!["Y"], + "hard_sigmoid", + "HardSigmoid", + vec![alpha, beta], + ); + + let model = model(graph( + vec![tensor("X", &shape)], + vec![tensor("Y", &shape)], + vec![], + vec![], + vec![node], + )); + + let session = + pollster::block_on(wonnx::Session::from_model(model)).expect("Session did not create"); + + let output = pollster::block_on(session.run(&model_input)).unwrap(); + println!("{:?}", output); + + let expected_output = &[0.1, 0.6, 1.0]; + let output_data: &[f32] = (&output["Y"]).try_into().unwrap(); + common::assert_eq_vector(output_data, expected_output); +} From 33f2e8873bf188359248536b81ffdd841e37cf06 Mon Sep 17 00:00:00 2001 From: Aria Ghora Prabono Date: Mon, 28 Aug 2023 14:43:46 +0900 Subject: [PATCH 2/3] chore: enable hardsigmoid tests for onnx backend Other changes: - Update repo README.md to indicate support for hardsigmoid - Rename `hard_sigmoid` to `hardsigmoid` in few places to be consistent with onnx standard --- README.md | 2 +- wonnx-py/tests/test_onnx_backend.py | 2 +- wonnx/tests/{hard_sigmoid.rs => hardsigmoid.rs} | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) rename wonnx/tests/{hard_sigmoid.rs => hardsigmoid.rs} (97%) diff --git a/README.md b/README.md index 5c9b430d..c132704a 100644 --- a/README.md +++ b/README.md @@ -265,7 +265,7 @@ fn test_matmul_square_matrix() { |GlobalMaxPool|1| |Greater|13, 9, 7, 1|✅| |GridSample|16| -|HardSigmoid|6, 1| +|HardSigmoid|6, 1|✅|✅| |Hardmax|13, 11, 1| |Identity|16, 14, 13, 1|✅|✅| |If|16, 13, 11, 1| diff --git a/wonnx-py/tests/test_onnx_backend.py b/wonnx-py/tests/test_onnx_backend.py index a0dd935a..c52c2ae8 100644 --- a/wonnx-py/tests/test_onnx_backend.py +++ b/wonnx-py/tests/test_onnx_backend.py @@ -124,7 +124,6 @@ def do_enforce_test_coverage_safelist(model): # type: (ModelProto) -> bool backend_test = onnx.backend.test.BackendTest(DummyBackend, __name__) - backend_test.include(f"test_constant_cpu") backend_test.include(f"test_conv_[a-z,_]*") backend_test.include(f"test_Conv2d[a-z,_]*") @@ -146,6 +145,7 @@ def do_enforce_test_coverage_safelist(model): # type: (ModelProto) -> bool backend_test.include(f"test_shape_[a-z,_]*") backend_test.include(f"test_size_[a-z,_]*") backend_test.include(f"test_celu_[a-z,_]*") +backend_test.include(f"test_hardsigmoid_[a-z,_]*") # For these we only test the default version, as we don't support the bool type backend_test.include(f"test_prelu_broadcast_cpu$") diff --git a/wonnx/tests/hard_sigmoid.rs b/wonnx/tests/hardsigmoid.rs similarity index 97% rename from wonnx/tests/hard_sigmoid.rs rename to wonnx/tests/hardsigmoid.rs index 0af4ea88..206c5111 100644 --- a/wonnx/tests/hard_sigmoid.rs +++ b/wonnx/tests/hardsigmoid.rs @@ -5,7 +5,7 @@ mod common; /// Test HardSigmoid node with default alpha and beta /// https://github.com/onnx/onnx/blob/main/docs/Operators.md#examples-68 #[test] -fn test_hard_sigmoid_default() { +fn test_hardsigmoid_default() { let input_data = [-2.0, -1.0, 1.0, 2.0]; let shape = vec![2, 2]; @@ -40,7 +40,7 @@ fn test_hard_sigmoid_default() { /// Test HardSigmoid node with predefined alpha and beta /// https://github.com/onnx/onnx/blob/main/docs/Operators.md#examples-68 #[test] -fn test_hard_sigmoid() { +fn test_hardsigmoid() { let input_data: Vec = vec![-1.0, 0.0, 1.0]; let shape = vec![1, 3]; From 95e7b3884b20ae2d274d2eeb5923e140bc6a1997 Mon Sep 17 00:00:00 2001 From: Aria Ghora Prabono Date: Wed, 8 Nov 2023 10:23:46 +0700 Subject: [PATCH 3/3] chore: disable HardSigmoid tests until CastLike is implemented --- wonnx-py/tests/test_onnx_backend.py | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/wonnx-py/tests/test_onnx_backend.py b/wonnx-py/tests/test_onnx_backend.py index c52c2ae8..7b608872 100644 --- a/wonnx-py/tests/test_onnx_backend.py +++ b/wonnx-py/tests/test_onnx_backend.py @@ -145,7 +145,9 @@ def do_enforce_test_coverage_safelist(model): # type: (ModelProto) -> bool backend_test.include(f"test_shape_[a-z,_]*") backend_test.include(f"test_size_[a-z,_]*") backend_test.include(f"test_celu_[a-z,_]*") -backend_test.include(f"test_hardsigmoid_[a-z,_]*") + +# Disabled until CastLike is implemented +# backend_test.include(f"test_hardsigmoid_[a-z,_]*") # For these we only test the default version, as we don't support the bool type backend_test.include(f"test_prelu_broadcast_cpu$") @@ -162,15 +164,15 @@ def do_enforce_test_coverage_safelist(model): # type: (ModelProto) -> bool # Disable tests for ReduceSum because ReduceSum accepts the 'axes' list as input instead of as an attribute, and the test # case sets the 'axes' input dynamically, which we don't support (yet?). # backend_test.include(f"test_reduce_sum_[a-z,_]*") -#backend_test.include(f"test_reduce_mean_[a-z,_]*") -#backend_test.include(f"test_reduce_l1_[a-z,_]*") -#backend_test.include(f"test_reduce_l2_[a-z,_]*") -#backend_test.include(f"test_reduce_min_[a-z,_]*") -#backend_test.include(f"test_reduce_prod_[a-z,_]*") -#backend_test.include(f"test_reduce_sum_square_[a-z,_]*") -#backend_test.include(f"test_reduce_max_[a-z,_]*") -#backend_test.include(f"test_reduce_log_sum_[a-z,_]*") -#backend_test.include(f"test_reduce_log_sum_exp_[a-z,_]*") +# backend_test.include(f"test_reduce_mean_[a-z,_]*") +# backend_test.include(f"test_reduce_l1_[a-z,_]*") +# backend_test.include(f"test_reduce_l2_[a-z,_]*") +# backend_test.include(f"test_reduce_min_[a-z,_]*") +# backend_test.include(f"test_reduce_prod_[a-z,_]*") +# backend_test.include(f"test_reduce_sum_square_[a-z,_]*") +# backend_test.include(f"test_reduce_max_[a-z,_]*") +# backend_test.include(f"test_reduce_log_sum_[a-z,_]*") +# backend_test.include(f"test_reduce_log_sum_exp_[a-z,_]*") # Takes dynamic input, we don't support that yet # backend_test.include(f"test_constantofshape_[a-z,_]*")