diff --git a/LICENSE-APACHE b/LICENSE-APACHE new file mode 100644 index 00000000..323c394a --- /dev/null +++ b/LICENSE-APACHE @@ -0,0 +1,13 @@ +Copyright (c) 2021 Xavier Tao, Tommy van der Vorst & WONNX contributors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. \ No newline at end of file diff --git a/LICENSE b/LICENSE-MIT similarity index 100% rename from LICENSE rename to LICENSE-MIT diff --git a/README.md b/README.md index a54fd0ea..54ec77ab 100644 --- a/README.md +++ b/README.md @@ -434,3 +434,27 @@ supported in the following cases: Constant folding is performed as part of shape inference, unless disabled (from the CLI pass `--no-fold-constants` to disable). This is done in order to support models that dynamically calculate shapes using operators such as `Shape`/`Squeeze`/`Unsqueeze` depending on dynamically set dimension parameters (e.g. batch size). + +## License + +Licensed under either of + * Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) + * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) +at your option. + +Except for the following files: + +* `data/models`: + * `mobilenetv2-7.onnx`: [source](https://github.com/onnx/models/blob/main/vision/classification/mobilenet/model/mobilenetv2-7.onnx), Apache-2.0 license only. + * `squeezenet-labels.txt`: [source](https://github.com/onnx/models/blob/main/vision/classification/synset.txt), Apache-2.0 license only. + +* `data/images`: + * `pelican.jpeg`: [source](https://en.wikipedia.org/wiki/Pelican#/media/File:Pelikan_Walvis_Bay.jpg), (C) Rui Ornelas, [CC-BY 2.0](https://creativecommons.org/licenses/by/2.0/). + * `bald_eagle.jpeg`: [source](https://en.wikipedia.org/wiki/Bald_eagle#/media/File:Bald-Eagle-9114-cropped.jpg), (C) David R. Tribble, [CC BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/) + + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in the work by you shall be dual licensed as above, without any +additional terms or conditions. \ No newline at end of file diff --git a/data/images/bald_eagle.jpeg b/data/images/bald_eagle.jpeg index 7d3e344e..4d1ff228 100644 Binary files a/data/images/bald_eagle.jpeg and b/data/images/bald_eagle.jpeg differ diff --git a/data/images/italian_greyhound.jpeg b/data/images/italian_greyhound.jpeg deleted file mode 100644 index 30a57b8c..00000000 Binary files a/data/images/italian_greyhound.jpeg and /dev/null differ diff --git a/data/images/pelican.jpeg b/data/images/pelican.jpeg index 8d2bb6ac..b8ac39d9 100644 Binary files a/data/images/pelican.jpeg and b/data/images/pelican.jpeg differ diff --git a/wonnx-py/tests/test_onnx_backend.py b/wonnx-py/tests/test_onnx_backend.py index 688acd84..5f46e489 100644 --- a/wonnx-py/tests/test_onnx_backend.py +++ b/wonnx-py/tests/test_onnx_backend.py @@ -1,4 +1,4 @@ -# SPDX-License-Identifier: Apache-2.0 +# SPDX-License-Identifier: MIT OR Apache-2.0 import itertools import os diff --git a/wonnx-py/tests/test_specific_op.py b/wonnx-py/tests/test_specific_op.py index d8c1a64f..12ab6110 100644 --- a/wonnx-py/tests/test_specific_op.py +++ b/wonnx-py/tests/test_specific_op.py @@ -1,4 +1,4 @@ -# SPDX-License-Identifier: Apache-2.0 +# SPDX-License-Identifier: MIT OR Apache-2.0 import onnx.backend.test pytest_plugins = ("onnx.backend.test.report",) diff --git a/wonnx/src/compiler.rs b/wonnx/src/compiler.rs index 3c66f66e..fe9a3fcf 100644 --- a/wonnx/src/compiler.rs +++ b/wonnx/src/compiler.rs @@ -83,11 +83,6 @@ lazy_static! { include_str!("../templates/matrix/transpose.wgsl"), ) .unwrap(); - tera.add_raw_template( - "matrix/lrn.wgsl", - include_str!("../templates/matrix/lrn.wgsl"), - ) - .unwrap(); tera.add_raw_template( "pool/aggregate.wgsl", include_str!("../templates/pool/aggregate.wgsl"), @@ -1326,38 +1321,6 @@ pub fn compile( threads: (ceil(output_lengths[0], 256) as _, 1, 1), } } - "LRN" => { - // https://github.com/onnx/onnx/blob/main/docs/Operators.md#lrn - let alpha = node.get_attribute_value("alpha", Some(0.0001))?; - let beta = node.get_attribute_value("beta", Some(0.75))?; - let bias = node.get_attribute_value("bias", Some(1.0))?; - let size = node.get_attribute_value("size", Some(1))?; - - context.insert("alpha", &alpha); - context.insert("beta", &beta); - context.insert("bias", &bias); - context.insert("size", &size); - - let left_size = f64::floor((size - 1) as f64 / 2.0) as u32; - let right_size = f64::ceil((size - 1) as f64 / 2.0) as u32; - - context.insert("left_size", &left_size); - context.insert("right_size", &right_size); - - let (x_threads, workgroup_size_x) = workgroup_size( - output_lengths[0], - MAX_COMPUTE_WORKGROUPS_PER_DIMENSION, - MAX_WORKGROUP_SIZE_X, - )?; - context.insert("workgroup_size_x", &workgroup_size_x); - context.insert("i_chunks", &input_chunks); - - NodeTemplate { - scalar_type: agreed_type(input_shapes, output_shapes)?, - template: "matrix/lrn.wgsl", - threads: (x_threads, 1, 1), - } - } op => return Err(CompileError::UnimplementedOp(op.to_string())), }; diff --git a/wonnx/templates/matrix/lrn.wgsl b/wonnx/templates/matrix/lrn.wgsl deleted file mode 100644 index c3517ab2..00000000 --- a/wonnx/templates/matrix/lrn.wgsl +++ /dev/null @@ -1,23 +0,0 @@ -{%- include "structs.wgsl" -%} - -@group(0) @binding(0) -var input_0: Array; - -@group(0) @binding(1) -var output_0: Array; - -@compute @workgroup_size({{ workgroup_size_x }}) -fn main(@builtin(global_invocation_id) global_id: vec3) { - let c = global_id.x; - //let chunk_start = {{ i_chunks[0][1] }}u * c; - let start = (c / {{ i_shape[0][1] }}u) * {{ i_shape[0][1] }}u; - let end = start + {{ i_shape[0][1] - 1 }}u; - - var square_sum: Scalar = Scalar(); - for (var i = max(start, c - {{left_size}}u); i <= min(end, c + {{right_size}}u); i++) { - let I = input_0.data[i]; - square_sum += I * I; - } - - output_0.data[c] = input_0.data[ c ] / pow({{ scalar_type }}({{ bias }}) + ({{ scalar_type }}({{ alpha }}) / {{ scalar_type }}({{ size }})) * square_sum, {{ scalar_type }}({{ beta }})); -} diff --git a/wonnx/tests/localresponsenormalization.rs b/wonnx/tests/localresponsenormalization.rs deleted file mode 100644 index 8f4202fa..00000000 --- a/wonnx/tests/localresponsenormalization.rs +++ /dev/null @@ -1,61 +0,0 @@ -use std::{collections::HashMap, convert::TryInto}; -use wonnx::utils::{attribute, graph, model, node, tensor}; -mod common; - -#[test] -fn local_response_normalization() { - let mut input_data = HashMap::new(); - - let batches = 1; - let width_height: usize = 3; - let channels: usize = 4; - let data: Vec = [ - 1., 1., 2., 4., 2., 2., 1., 2., 3., 1., 2., 1., 4., 2., 3., 5., 3., 3., 2., 2., 6., 2., 3., - 1., 7., 3., 4., 2., 8., 4., 3., 2., 9., 3., 4., 4., - ] - .to_vec(); - - let shape = vec![ - batches as i64, - channels as i64, - width_height as i64, - width_height as i64, - ]; - input_data.insert("X".to_string(), data.as_slice().into()); - - let bn_model = model(graph( - vec![tensor("X", &shape)], // input - vec![tensor("Y", &shape)], // output - vec![], // infos - vec![], // intializers - // nodes - vec![node( - vec!["X"], - vec!["Y"], - "lrn", - "LRN", - vec![ - attribute("alpha", 1.0), - attribute("beta", 1.0), - attribute("bias", 0.0), - attribute("size", 2), - ], - )], - )); - - // LOGIC - let session = - pollster::block_on(wonnx::Session::from_model(bn_model)).expect("Session did not create"); - - let result = pollster::block_on(session.run(&input_data)).unwrap(); - let out_y = &result["Y"]; - - common::assert_eq_vector( - out_y.try_into().unwrap(), - &[ - 1.0, 0.4, 0.2, 0.5, 0.5, 0.8, 0.4, 1.0, 0.6, 0.4, 0.8, 2.0, 0.4, 0.30769232, 0.1764706, - 0.39999998, 0.33333334, 0.4615385, 0.5, 1.0, 0.3, 0.30769232, 0.6, 2.0, 0.2413793, - 0.24, 0.4, 1.0, 0.2, 0.32, 0.4615385, 1.0, 0.2, 0.24, 0.25, 0.5, - ], - ); -}