Unverified Commit 7f779a1f authored by Scott Cyphers's avatar Scott Cyphers Committed by GitHub

Merge branch 'master' into ayzhuang/cf_gather

parents 6ed9e990 f7ea45e6
......@@ -213,6 +213,8 @@ set (SRC
op/strided_slice.hpp
op/floor.cpp
op/floor.hpp
op/floor_mod.cpp
op/floor_mod.hpp
op/gather.cpp
op/gather.hpp
op/gather_nd.cpp
......@@ -373,6 +375,8 @@ set (SRC
op/fused/partial_slice.hpp
op/fused/prelu.cpp
op/fused/prelu.hpp
op/fused/reciprocal.cpp
op/fused/reciprocal.hpp
op/fused/rnn_cell.cpp
op/fused/rnn_cell.hpp
op/fused/scale_shift.cpp
......
......@@ -17,8 +17,7 @@
#include <memory>
#include <vector>
#include "ngraph/op/constant.hpp"
#include "ngraph/op/divide.hpp"
#include "ngraph/op/fused/reciprocal.hpp"
#include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/shape.hpp"
......@@ -36,11 +35,7 @@ namespace ngraph
{
auto data = node.get_ng_inputs().at(0);
std::shared_ptr<ngraph::Node> one_node = std::make_shared<ngraph::op::Constant>(
data->get_element_type(), Shape{}, std::vector<double>{1});
one_node = ngraph::op::make_broadcast_node(one_node, data->get_shape());
return {one_node / data};
return {std::make_shared<ngraph::op::Reciprocal>(data)};
}
} // namespace set_1
......
......@@ -130,6 +130,7 @@ namespace ngraph
#include "ngraph/op/experimental/tile.hpp"
#include "ngraph/op/experimental/transpose.hpp"
#include "ngraph/op/floor.hpp"
#include "ngraph/op/floor_mod.hpp"
#include "ngraph/op/fused/clamp.hpp"
#include "ngraph/op/fused/conv_fused.hpp"
#include "ngraph/op/fused/depth_to_space.hpp"
......@@ -151,6 +152,7 @@ namespace ngraph
#include "ngraph/op/fused/normalize_l2.hpp"
#include "ngraph/op/fused/partial_slice.hpp"
#include "ngraph/op/fused/prelu.hpp"
#include "ngraph/op/fused/reciprocal.hpp"
#include "ngraph/op/fused/rnn_cell.hpp"
#include "ngraph/op/fused/scale_shift.hpp"
#include "ngraph/op/fused/selu.hpp"
......
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include "ngraph/op/floor_mod.hpp"
using namespace std;
using namespace ngraph;
constexpr NodeTypeInfo op::v1::FloorMod::type_info;
op::v1::FloorMod::FloorMod(const Output<Node>& arg0,
const Output<Node>& arg1,
const AutoBroadcastSpec& auto_broadcast)
: BinaryElementwiseArithmetic(arg0, arg1, auto_broadcast)
{
constructor_validate_and_infer_types();
}
shared_ptr<Node> op::v1::FloorMod::copy_with_new_args(const NodeVector& new_args) const
{
check_new_args_count(this, new_args);
return make_shared<FloorMod>(new_args.at(0), new_args.at(1), this->get_autob());
}
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#pragma once
#include <memory>
#include "ngraph/op/util/binary_elementwise_arithmetic.hpp"
namespace ngraph
{
namespace op
{
namespace v1
{
/// \brief Elementwise FloorMod operation.
///
class FloorMod : public util::BinaryElementwiseArithmetic
{
public:
NGRAPH_API
static constexpr NodeTypeInfo type_info{"FloorMod", 1};
const NodeTypeInfo& get_type_info() const override { return type_info; }
/// \brief Constructs an uninitialized addition operation
FloorMod() = default;
/// \brief Constructs an Floor Mod operation.
///
/// \param arg0 Output that produces the first input tensor.<br>
/// `[d0, ...]`
/// \param arg1 Output that produces the second input tensor.<br>
/// `[d0, ...]`
/// \param auto_broadcast Auto broadcast specification
///
/// Output `[d0, ...]`
///
FloorMod(const Output<Node>& arg0,
const Output<Node>& arg1,
const AutoBroadcastSpec& auto_broadcast = AutoBroadcastType::NUMPY);
std::shared_ptr<Node> copy_with_new_args(const NodeVector& new_args) const override;
};
} // namespace v1
using v1::FloorMod;
} // namespace op
} // namespace ngraph
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include "ngraph/op/fused/reciprocal.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/divide.hpp"
using namespace std;
using namespace ngraph;
constexpr NodeTypeInfo op::Reciprocal::type_info;
op::Reciprocal::Reciprocal(const Output<Node>& data)
: FusedOp({data})
{
constructor_validate_and_infer_types();
}
NodeVector op::Reciprocal::decompose_op() const
{
auto data = input_value(0);
auto one_node = op::Constant::create(data.get_element_type(), data.get_shape(), {1});
return {make_shared<op::v1::Divide>(one_node, data)};
}
shared_ptr<Node> op::Reciprocal::copy_with_new_args(const NodeVector& new_args) const
{
check_new_args_count(this, new_args);
return make_shared<Reciprocal>(new_args.at(0));
}
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#pragma once
#include "ngraph/node.hpp"
#include "ngraph/op/op.hpp"
#include "ngraph/op/util/fused_op.hpp"
namespace ngraph
{
namespace op
{
/// \brief Reciprocal operation
/// f(x) = 1 / x
class Reciprocal : public ngraph::op::util::FusedOp
{
public:
NGRAPH_API
static constexpr NodeTypeInfo type_info{"Reciprocal", 0};
const NodeTypeInfo& get_type_info() const override { return type_info; }
Reciprocal() = default;
/// \brief Constructs a Reciprocal operation.
///
/// \param data Input tensor
Reciprocal(const Output<Node>& data);
virtual NodeVector decompose_op() const override;
virtual std::shared_ptr<Node>
copy_with_new_args(const NodeVector& new_args) const override;
};
} // namespace op
} // namespace ngraph
......@@ -48,6 +48,7 @@ NGRAPH_OP(NormalizeL2, ngraph::op)
NGRAPH_OP(PartialSlice, ngraph::op)
NGRAPH_OP(PartialSliceBackprop, ngraph::op)
NGRAPH_OP(PRelu, ngraph::op)
NGRAPH_OP(Reciprocal, ngraph::op)
NGRAPH_OP(RNNCell, ngraph::op)
NGRAPH_OP(ScaleShift, ngraph::op)
NGRAPH_OP(Selu, ngraph::op)
......
......@@ -94,6 +94,7 @@ NGRAPH_OP(Equal, ngraph::op)
NGRAPH_OP(Erf, ngraph::op)
NGRAPH_OP(Exp, ngraph::op)
NGRAPH_OP(Floor, ngraph::op)
NGRAPH_OP(FloorMod, ngraph::op)
NGRAPH_OP(Gather, ngraph::op)
NGRAPH_OP(GatherND, ngraph::op)
NGRAPH_OP(GenerateMask, ngraph::op)
......
......@@ -1867,6 +1867,7 @@ private:
case OP_TYPEID::DynPad:
case OP_TYPEID::Tile:
case OP_TYPEID::DynReplaceSlice:
case OP_TYPEID::FloorMod:
throw unsupported_op("Unsupported op '" + node.description() + "'");
#if defined(__GNUC__) && !(__GNUC__ == 4 && __GNUC_MINOR__ == 8)
#pragma GCC diagnostic pop
......
......@@ -70,6 +70,7 @@
#include "ngraph/op/experimental/tile.hpp"
#include "ngraph/op/experimental/transpose.hpp"
#include "ngraph/op/floor.hpp"
#include "ngraph/op/floor_mod.hpp"
#include "ngraph/op/fused/clamp.hpp"
#include "ngraph/op/fused/conv_fused.hpp"
#include "ngraph/op/fused/depth_to_space.hpp"
......@@ -91,6 +92,7 @@
#include "ngraph/op/fused/normalize_l2.hpp"
#include "ngraph/op/fused/partial_slice.hpp"
#include "ngraph/op/fused/prelu.hpp"
#include "ngraph/op/fused/reciprocal.hpp"
#include "ngraph/op/fused/rnn_cell.hpp"
#include "ngraph/op/fused/scale_shift.hpp"
#include "ngraph/op/fused/selu.hpp"
......@@ -1543,6 +1545,12 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js)
node = make_shared<op::Floor>(args[0]);
break;
}
case OP_TYPEID::FloorMod:
{
node = make_shared<op::FloorMod>(
args[0], args[1], read_auto_broadcast(node_js, "auto_broadcast"));
break;
}
case OP_TYPEID::Gather:
{
if (op_version == 0)
......@@ -2379,6 +2387,11 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js)
node = make_shared<op::Range>(args[0], args[1], args[2]);
break;
}
case OP_TYPEID::Reciprocal:
{
node = make_shared<op::Reciprocal>(args[0]);
break;
}
case OP_TYPEID::Relu:
{
node = make_shared<op::Relu>(args[0]);
......@@ -3383,6 +3396,15 @@ json JSONSerializer::serialize_node(const Node& n)
}
case OP_TYPEID::Floor: { break;
}
case OP_TYPEID::FloorMod:
{
auto tmp = static_cast<const op::FloorMod*>(&n);
if (tmp->get_autob().m_type != op::AutoBroadcastType::NONE)
{
node["auto_broadcast"] = write_auto_broadcast(tmp->get_autob());
}
break;
}
case OP_TYPEID::Gather:
{
if (op_version == 0)
......@@ -3945,6 +3967,8 @@ json JSONSerializer::serialize_node(const Node& n)
}
case OP_TYPEID::Range: { break;
}
case OP_TYPEID::Reciprocal: { break;
}
case OP_TYPEID::Relu: { break;
}
case OP_TYPEID::ReluBackprop: { break;
......
......@@ -95,6 +95,20 @@ NGRAPH_TEST(${BACKEND_NAME}, prelu)
EXPECT_EQ(expected, read_vector<float>(result0));
}
NGRAPH_TEST(${BACKEND_NAME}, reciprocal)
{
Shape shape{3, 2};
auto A = make_shared<op::Parameter>(element::f32, shape);
auto reciprocal = make_shared<op::Reciprocal>(A);
auto f0 = make_shared<Function>(NodeVector{reciprocal}, ParameterVector{A});
auto test_case = test::NgraphTestCase(f0, "${BACKEND_NAME}");
test_case.add_input(vector<float>{1, 2, 3, 4, 5, 6});
test_case.add_expected_output(
Shape{3, 2}, vector<float>{1.0f, 1 / 2.0f, 1 / 3.0f, 1 / 4.0f, 1 / 5.0f, 1 / 6.0f});
test_case.run();
}
NGRAPH_TEST(${BACKEND_NAME}, hardsigmoid)
{
Shape shape{2, 7};
......
......@@ -40,3 +40,11 @@ TEST(type_prop, unary_arithmetic_bad_argument_element_types)
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, reciprocal)
{
auto param = make_shared<op::Parameter>(element::f32, Shape{2, 3, 4});
auto pad = make_shared<op::Reciprocal>(param);
EXPECT_EQ(pad->get_element_type(), element::f32);
EXPECT_EQ(pad->get_shape(), (Shape{2, 3, 4}));
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment