Commit 6beb6732 authored by Tomasz Dołbniak's avatar Tomasz Dołbniak Committed by Robert Kimball

[ONNX] Add hyperbolic functions support (#2402)

* [ONNX] Add support for ONNX sinh op

* [ONNX] Test the sinh op and improve an error msg about not supported op version

* [ONNX] Fix the failing tests by moving sinh to opset 1

* [ONNX] Apply clang-format to fix failing CI builds

* [ONNX] Add and test ONNX cosh op

* [ONNX] Add and test ONNX cosh op
parent e327fe57
...@@ -55,6 +55,7 @@ add_library(onnx_import STATIC ...@@ -55,6 +55,7 @@ add_library(onnx_import STATIC
op/clip.cpp op/clip.cpp
op/clip.hpp op/clip.hpp
op/cos.hpp op/cos.hpp
op/cosh.hpp
op/concat.cpp op/concat.cpp
op/concat.hpp op/concat.hpp
op/constant.cpp op/constant.cpp
...@@ -122,6 +123,7 @@ add_library(onnx_import STATIC ...@@ -122,6 +123,7 @@ add_library(onnx_import STATIC
op/shape.cpp op/shape.cpp
op/sigmoid.hpp op/sigmoid.hpp
op/sin.hpp op/sin.hpp
op/sinh.hpp
op/size.cpp op/size.cpp
op/size.hpp op/size.hpp
op/slice.cpp op/slice.cpp
......
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#pragma once
#include <memory>
#include "core/node.hpp"
#include "ngraph/node_vector.hpp"
#include "ngraph/op/cosh.hpp"
namespace ngraph
{
namespace onnx_import
{
namespace op
{
namespace set_1
{
inline NodeVector cosh(const Node& node)
{
return {std::make_shared<ngraph::op::Cosh>(node.get_ng_inputs().at(0))};
}
} // namespace set_1
} //namespace op
} // namespace onnx_import
} // namespace ngraph
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#pragma once
#include <memory>
#include "core/node.hpp"
#include "ngraph/node_vector.hpp"
#include "ngraph/op/sinh.hpp"
namespace ngraph
{
namespace onnx_import
{
namespace op
{
namespace set_1
{
inline NodeVector sinh(const Node& node)
{
return {std::make_shared<ngraph::op::Sinh>(node.get_ng_inputs().at(0))};
}
} // namespace set_1
} //namespace op
} // namespace onnx_import
} // namespace ngraph
...@@ -40,6 +40,7 @@ ...@@ -40,6 +40,7 @@
#include "op/conv.hpp" #include "op/conv.hpp"
#include "op/conv_transpose.hpp" #include "op/conv_transpose.hpp"
#include "op/cos.hpp" #include "op/cos.hpp"
#include "op/cosh.hpp"
#include "op/depth_to_space.hpp" #include "op/depth_to_space.hpp"
#include "op/div.hpp" #include "op/div.hpp"
#include "op/elu.hpp" #include "op/elu.hpp"
...@@ -80,6 +81,7 @@ ...@@ -80,6 +81,7 @@
#include "op/shape.hpp" #include "op/shape.hpp"
#include "op/sigmoid.hpp" #include "op/sigmoid.hpp"
#include "op/sin.hpp" #include "op/sin.hpp"
#include "op/sinh.hpp"
#include "op/size.hpp" #include "op/size.hpp"
#include "op/slice.hpp" #include "op/slice.hpp"
#include "op/softmax.hpp" #include "op/softmax.hpp"
...@@ -219,6 +221,7 @@ namespace ngraph ...@@ -219,6 +221,7 @@ namespace ngraph
REGISTER_OPERATOR("Conv", 1, conv); REGISTER_OPERATOR("Conv", 1, conv);
REGISTER_OPERATOR("ConvTranspose", 1, conv_transpose); REGISTER_OPERATOR("ConvTranspose", 1, conv_transpose);
REGISTER_OPERATOR("Cos", 1, cos); REGISTER_OPERATOR("Cos", 1, cos);
REGISTER_OPERATOR("Cosh", 1, cosh);
REGISTER_OPERATOR("DepthToSpace", 1, depth_to_space); REGISTER_OPERATOR("DepthToSpace", 1, depth_to_space);
REGISTER_OPERATOR("Div", 1, div); REGISTER_OPERATOR("Div", 1, div);
REGISTER_OPERATOR("Div", 7, div); REGISTER_OPERATOR("Div", 7, div);
...@@ -273,6 +276,7 @@ namespace ngraph ...@@ -273,6 +276,7 @@ namespace ngraph
REGISTER_OPERATOR("Shape", 1, shape); REGISTER_OPERATOR("Shape", 1, shape);
REGISTER_OPERATOR("Sigmoid", 1, sigmoid); REGISTER_OPERATOR("Sigmoid", 1, sigmoid);
REGISTER_OPERATOR("Sin", 1, sin); REGISTER_OPERATOR("Sin", 1, sin);
REGISTER_OPERATOR("Sinh", 1, sinh);
REGISTER_OPERATOR("Size", 1, size); REGISTER_OPERATOR("Size", 1, size);
REGISTER_OPERATOR("Slice", 1, slice); REGISTER_OPERATOR("Slice", 1, slice);
REGISTER_OPERATOR("Softmax", 1, softmax); REGISTER_OPERATOR("Softmax", 1, softmax);
......
...@@ -51,7 +51,8 @@ namespace ngraph ...@@ -51,7 +51,8 @@ namespace ngraph
UnsupportedVersion(const std::string& name, UnsupportedVersion(const std::string& name,
std::int64_t version, std::int64_t version,
const std::string& domain) const std::string& domain)
: ngraph_error{(domain.empty() ? "" : domain + ".") + name + ":" + : ngraph_error{"Unsupported operator version: " +
(domain.empty() ? "" : domain + ".") + name + ":" +
std::to_string(version)} std::to_string(version)}
{ {
} }
......
ngraph ONNXImporter:D
xy"Cosh
cosh_graphZ
x


b
y


B
\ No newline at end of file
ngraph ONNXImporter:D
xy"Sinh
sinh_graphZ
x


b
y


B
\ No newline at end of file
...@@ -1863,3 +1863,29 @@ TEST(onnx_${BACKEND_NAME}, model_top_k) ...@@ -1863,3 +1863,29 @@ TEST(onnx_${BACKEND_NAME}, model_top_k)
EXPECT_TRUE(test::all_close_f(expected_values_output, values_output)); EXPECT_TRUE(test::all_close_f(expected_values_output, values_output));
EXPECT_TRUE(test::all_close(expected_indices_output, indices_output)); EXPECT_TRUE(test::all_close(expected_indices_output, indices_output));
} }
TEST(onnx_${BACKEND_NAME}, model_sinh)
{
auto function =
onnx_import::import_onnx_model(file_util::path_join(SERIALIZED_ZOO, "onnx/sinh.onnx"));
Inputs inputs{std::vector<float>{-1.0f, 0.0f, 1.0f}};
Outputs expected_outputs{std::vector<float>{-1.1752012f, 0.f, 1.1752012f}};
Outputs outputs{execute(function, inputs, "${BACKEND_NAME}")};
EXPECT_TRUE(test::all_close_f(expected_outputs.front(), outputs.front()));
}
TEST(onnx_${BACKEND_NAME}, model_cosh)
{
auto function =
onnx_import::import_onnx_model(file_util::path_join(SERIALIZED_ZOO, "onnx/cosh.onnx"));
Inputs inputs{std::vector<float>{-1.0f, 0.0f, 1.0f}};
Outputs expected_outputs{std::vector<float>{1.54308069f, 1.f, 1.54308069f}};
Outputs outputs{execute(function, inputs, "${BACKEND_NAME}")};
EXPECT_TRUE(test::all_close_f(expected_outputs.front(), outputs.front()));
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment