Commit adfe479a authored by Tomasz Dołbniak's avatar Tomasz Dołbniak Committed by Scott Cyphers

[ONNX] Sign operator support (#2412)

* [ONNX] Add support for ONNX sinh op

* [ONNX] Test the sinh op and improve an error msg about not supported op version

* [ONNX] Fix the failing tests by moving sinh to opset 1

* [ONNX] Apply clang-format to fix failing CI builds

* [ONNX] Add and test ONNX cosh op

* [ONNX] Add and test ONNX cosh op

* [ONNX] Add and test ONNX sign op

* [ONNX] Test the sign operator with more challenging values

* [ONNX] Update of supported ops documentation table
parent 5b63a3c7
......@@ -124,6 +124,7 @@ add_library(onnx_import STATIC
op/shape.hpp
op/shape.cpp
op/sigmoid.hpp
op/sign.hpp
op/sin.hpp
op/sinh.hpp
op/size.cpp
......
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#pragma once
#include <memory>
#include "core/node.hpp"
#include "ngraph/node_vector.hpp"
#include "ngraph/op/sign.hpp"
namespace ngraph
{
namespace onnx_import
{
namespace op
{
namespace set_1
{
inline NodeVector sign(const Node& node)
{
return {std::make_shared<ngraph::op::Sign>(node.get_ng_inputs().at(0))};
}
} // namespace set_1
} //namespace op
} // namespace onnx_import
} // namespace ngraph
......@@ -35,6 +35,7 @@ opset versions starting from `1` to `6` and to the latest opset version.
| Conv | 1- |
| ConvTranspose | 1- |
| Cos | 7- |
| Cosh | 9- |
| Div | 1-6-7- |
| Dropout | 1-6-7- | Only for inference.
| Elu | 1-6- |
......@@ -79,7 +80,9 @@ opset versions starting from `1` to `6` and to the latest opset version.
| Selu | 1-6- |
| Shape | 1- |
| Sigmoid | 1-6- |
| Sign | 9- |
| Sin | 7- |
| Sinh | 9- |
| Size | 1- |
| Slice | 1- |
| Softmax | 1- |
......@@ -151,7 +154,4 @@ opset versions starting from `1` to `6` and to the latest opset version.
| InstanceNormalization | - | | 436 | Just an equation. For per channel computation may _slice/op/concat_ pattern need to be used. |
| Shrink | (9) | | 449 | Just an easy equation. |
| TopK | - | | 327. | Use nGraph `Topk`. |
| Cosh | (9) | | 448 | Use nGraph `Cosh`. |
| Sign | (9) | | 448 | Use nGraph `Sign`. |
| Sinh | (9) | | 448 | Use nGraph `Sinh`. |
| Where | (9) | | 448 | Use nGraph `Select`. |
......@@ -80,6 +80,7 @@
#include "op/selu.hpp"
#include "op/shape.hpp"
#include "op/sigmoid.hpp"
#include "op/sign.hpp"
#include "op/sin.hpp"
#include "op/sinh.hpp"
#include "op/size.hpp"
......@@ -275,6 +276,7 @@ namespace ngraph
REGISTER_OPERATOR("Selu", 1, selu);
REGISTER_OPERATOR("Shape", 1, shape);
REGISTER_OPERATOR("Sigmoid", 1, sigmoid);
REGISTER_OPERATOR("Sign", 1, sign);
REGISTER_OPERATOR("Sin", 1, sin);
REGISTER_OPERATOR("Sinh", 1, sinh);
REGISTER_OPERATOR("Size", 1, size);
......
ngraph ONNXImporter:D
xy"Sign
sign_graphZ
x


b
y


B
\ No newline at end of file
......@@ -1951,3 +1951,21 @@ TEST(onnx_${BACKEND_NAME}, model_initializer_wo_input)
Outputs output{execute(function, inputs, "${BACKEND_NAME}")};
EXPECT_TRUE(test::all_close_f(expected_output, output.front()));
}
TEST(onnx_${BACKEND_NAME}, model_sign)
{
auto function =
onnx_import::import_onnx_model(file_util::path_join(SERIALIZED_ZOO, "onnx/sign.onnx"));
Inputs inputs{std::vector<float>{-std::numeric_limits<float>::infinity(),
-3.141592f,
0.0f,
2.71828f,
std::numeric_limits<float>::infinity()}};
Outputs expected_outputs{std::vector<float>{-1.0f, -1.0f, 0.0f, 1.0f, 1.0f}};
Outputs outputs{execute<float>(function, inputs, "${BACKEND_NAME}")};
EXPECT_TRUE(test::all_close_f(expected_outputs.front(), outputs.front()));
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment