Commit 09abff6d authored by Ewa Tusień's avatar Ewa Tusień Committed by Sang Ik Lee

[ONNX] Remove Reciprocal FusedOp and move definition to ONNX importer. (#4155)

* Removed Recoprocal FusedOp and moved definition to ONNX importer.

* Removed unnecessary changes.

* Added test.

* Undo unnecessary changes.
Co-authored-by: 's avatarSang Ik Lee <sang.ik.lee@intel.com>
parent 1597487e
......@@ -401,8 +401,6 @@ set (SRC
op/fused/partial_slice.hpp
op/fused/prelu.cpp
op/fused/prelu.hpp
op/fused/reciprocal.cpp
op/fused/reciprocal.hpp
op/fused/rnn_cell.cpp
op/fused/rnn_cell.hpp
op/fused/scale_shift.cpp
......
......@@ -17,11 +17,9 @@
#include <memory>
#include <vector>
#include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/opsets/opset0.hpp"
#include "ngraph/shape.hpp"
#include "default_opset.hpp"
#include "ngraph/op/constant.hpp"
#include "reciprocal.hpp"
namespace ngraph
{
namespace onnx_import
......@@ -34,7 +32,9 @@ namespace ngraph
{
auto data = node.get_ng_inputs().at(0);
return {std::make_shared<ngraph::opset0::Reciprocal>(data)};
auto one_node = default_opset::Constant::create(
data->get_element_type(), data->get_shape(), {1});
return {std::make_shared<default_opset::Divide>(one_node, data)};
}
} // namespace set_1
......
......@@ -186,7 +186,6 @@ NGRAPH_OP(RNNCell, ngraph::op::v0, 0)
NGRAPH_OP(ROIPooling, ngraph::op::v0, 0)
NGRAPH_OP(RandomUniform, ngraph::op, 0)
NGRAPH_OP(Range, ngraph::op, 0)
NGRAPH_OP(Reciprocal, ngraph::op, 0)
NGRAPH_OP(Recv, ngraph::op::v0, 0)
NGRAPH_OP(ReduceMax, ngraph::op::v1, 1)
NGRAPH_OP(ReduceLogicalAnd, ngraph::op::v1, 1)
......
......@@ -107,7 +107,6 @@
#include "ngraph/op/fused/normalize_l2.hpp"
#include "ngraph/op/fused/partial_slice.hpp"
#include "ngraph/op/fused/prelu.hpp"
#include "ngraph/op/fused/reciprocal.hpp"
#include "ngraph/op/fused/rnn_cell.hpp"
#include "ngraph/op/fused/scale_shift.hpp"
#include "ngraph/op/fused/scatter_nd.hpp"
......
......@@ -165,7 +165,6 @@ NGRAPH_OP(QuantizedDotBias, ngraph::op)
NGRAPH_OP(RandomUniform, ngraph::op)
NGRAPH_OP(Recv, ngraph::op)
NGRAPH_OP(Range, ngraph::op)
NGRAPH_OP(Reciprocal, ngraph::op)
NGRAPH_OP(Relu, ngraph::op)
NGRAPH_OP(ReluBackprop, ngraph::op)
NGRAPH_OP(ReplaceSlice, ngraph::op)
......
......@@ -1514,11 +1514,6 @@ protected:
throw unsupported_op("Unsupported op '" + node.description() + "'");
break;
}
case OP_TYPEID::Reciprocal:
{
throw unsupported_op("Unsupported op '" + node.description() + "'");
break;
}
case OP_TYPEID::Relu:
{
size_t element_count = shape_size(node.get_output_shape(0));
......
......@@ -2507,11 +2507,6 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js)
node = make_shared<op::Range>(args[0], args[1], args[2]);
break;
}
case OP_TYPEID::Reciprocal:
{
node = make_shared<op::Reciprocal>(args[0]);
break;
}
case OP_TYPEID::ReduceMean_v1:
{
auto keep_dims = node_js.at("keep_dims").get<bool>();
......@@ -4278,8 +4273,6 @@ json JSONSerializer::serialize_node(const Node& n)
}
case OP_TYPEID::Range: { break;
}
case OP_TYPEID::Reciprocal: { break;
}
case OP_TYPEID::Recv:
{
auto tmp = static_cast<const op::Recv*>(&n);
......
......@@ -183,20 +183,6 @@ NGRAPH_TEST(${BACKEND_NAME}, prelu)
EXPECT_EQ(expected, read_vector<float>(result0));
}
NGRAPH_TEST(${BACKEND_NAME}, reciprocal)
{
Shape shape{3, 2};
auto A = make_shared<op::Parameter>(element::f32, shape);
auto reciprocal = make_shared<op::Reciprocal>(A);
auto f0 = make_shared<Function>(NodeVector{reciprocal}, ParameterVector{A});
auto test_case = test::NgraphTestCase(f0, "${BACKEND_NAME}");
test_case.add_input(vector<float>{1, 2, 3, 4, 5, 6});
test_case.add_expected_output(
Shape{3, 2}, vector<float>{1.0f, 1 / 2.0f, 1 / 3.0f, 1 / 4.0f, 1 / 5.0f, 1 / 6.0f});
test_case.run();
}
NGRAPH_TEST(${BACKEND_NAME}, hardsigmoid)
{
const Shape shape{2, 7};
......
ir_version: 3
producer_name: "backend-test"
graph {
node {
input: "x"
output: "y"
op_type: "Reciprocal"
}
name: "test_reciprocal"
input {
name: "x"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 3
}
dim {
dim_value: 2
}
}
}
}
}
output {
name: "y"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 3
}
dim {
dim_value: 2
}
}
}
}
}
}
opset_import {
version: 9
}
......@@ -1831,3 +1831,16 @@ NGRAPH_TEST(onnx_${BACKEND_NAME}, model_gatherND_float)
test_case.run();
}
NGRAPH_TEST(onnx_${BACKEND_NAME}, model_reciprocal)
{
const auto reciprocal_fn = onnx_import::import_onnx_model(
file_util::path_join(SERIALIZED_ZOO, "onnx/reciprocal.prototxt"));
auto test_case = ngraph::test::NgraphTestCase(reciprocal_fn, "${BACKEND_NAME}");
test_case.add_input<float>({1.f, 2.f, 3.f, 4.f, 5.f, 6.f});
test_case.add_expected_output<float>(Shape{3, 2},
{1.f, 1 / 2.f, 1 / 3.f, 1 / 4.f, 1 / 5.f, 1 / 6.f});
test_case.run();
}
......@@ -1059,15 +1059,6 @@ namespace
EXPECT_FALSE(node.is_binary_elementwise_logical());
}
void op_is_Reciprocal()
{
op::Reciprocal node;
EXPECT_FALSE(node.is_unary_elementwise_arithmetic());
EXPECT_FALSE(node.is_binary_elementwise_arithmetic());
EXPECT_FALSE(node.is_binary_elementwise_comparison());
EXPECT_FALSE(node.is_binary_elementwise_logical());
}
void op_is_Relu()
{
op::Relu node;
......
......@@ -40,11 +40,3 @@ TEST(type_prop, unary_arithmetic_bad_argument_element_types)
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, reciprocal)
{
auto param = make_shared<op::Parameter>(element::f32, Shape{2, 3, 4});
auto pad = make_shared<op::Reciprocal>(param);
EXPECT_EQ(pad->get_element_type(), element::f32);
EXPECT_EQ(pad->get_shape(), (Shape{2, 3, 4}));
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment