Commit c831dc71 authored by Tomasz Socha's avatar Tomasz Socha Committed by Scott Cyphers

[FUSED] Change alpha attribute in Elu from dynamic to static (#3332)

* [FUSED] Change alpha attribute in Elu from dynamic to static

* Fix python API

* Fix failing python test

* Remove unnecesary test

* Fix merge artifact

* Review fix I

* Review Fix PyAPI
parent 64e975c6
...@@ -62,7 +62,7 @@ def constant(value, dtype=None, name=None): # type: (NumericData, NumericType, ...@@ -62,7 +62,7 @@ def constant(value, dtype=None, name=None): # type: (NumericData, NumericType,
@nameable_op @nameable_op
def elu(data, alpha, name=None): # type: (NodeInput, NodeInput, str) -> Node def elu(data, alpha, name=None): # type: (NodeInput, NumericType, str) -> Node
"""Perform Exponential Linear Unit operation element-wise on data from input node. """Perform Exponential Linear Unit operation element-wise on data from input node.
Computes exponential linear: alpha * (exp(data) - 1) if < 0, data otherwise. Computes exponential linear: alpha * (exp(data) - 1) if < 0, data otherwise.
...@@ -72,11 +72,11 @@ def elu(data, alpha, name=None): # type: (NodeInput, NodeInput, str) -> Node ...@@ -72,11 +72,11 @@ def elu(data, alpha, name=None): # type: (NodeInput, NodeInput, str) -> Node
<http://arxiv.org/abs/1511.07289>`_ <http://arxiv.org/abs/1511.07289>`_
:param data: Input tensor. One of: input node, array or scalar. :param data: Input tensor. One of: input node, array or scalar.
:param alpha: Multiplier for negative values. One of: input node or scalar value. :param alpha: Scalar multiplier for negative values.
:param name: Optional output node name. :param name: Optional output node name.
:return: The new node performing an ELU operation on its input data element-wise. :return: The new node performing an ELU operation on its input data element-wise.
""" """
return Elu(as_node(data), as_node(alpha)) return Elu(as_node(data), alpha)
@nameable_op @nameable_op
......
...@@ -26,5 +26,5 @@ void regclass_pyngraph_op_Elu(py::module m) ...@@ -26,5 +26,5 @@ void regclass_pyngraph_op_Elu(py::module m)
{ {
py::class_<ngraph::op::Elu, std::shared_ptr<ngraph::op::Elu>, ngraph::op::Op> elu(m, "Elu"); py::class_<ngraph::op::Elu, std::shared_ptr<ngraph::op::Elu>, ngraph::op::Op> elu(m, "Elu");
elu.doc() = "ngraph.impl.op.Elu wraps ngraph::op::Elu"; elu.doc() = "ngraph.impl.op.Elu wraps ngraph::op::Elu";
elu.def(py::init<const std::shared_ptr<ngraph::Node>&, const std::shared_ptr<ngraph::Node>&>()); elu.def(py::init<const std::shared_ptr<ngraph::Node>&, const double>());
} }
...@@ -19,25 +19,6 @@ import ngraph as ng ...@@ -19,25 +19,6 @@ import ngraph as ng
from test.ngraph.util import get_runtime from test.ngraph.util import get_runtime
def test_elu_operator_with_parameters():
runtime = get_runtime()
data_shape = [2, 2]
alpha_shape = [2]
parameter_data = ng.parameter(data_shape, name='Data', dtype=np.float32)
parameter_alpha = ng.parameter(alpha_shape, name='Alpha', dtype=np.float32)
model = ng.elu(parameter_data, parameter_alpha)
computation = runtime.computation(model, parameter_data, parameter_alpha)
value_data = np.array([[-5, 1], [-2, 3]], dtype=np.float32)
value_alpha = np.array([3, 3], dtype=np.float32)
result = computation(value_data, value_alpha)
expected = np.array([[-2.9797862, 1.], [-2.5939941, 3.]], dtype=np.float32)
assert np.allclose(result, expected)
def test_elu_operator_with_scalar_and_array(): def test_elu_operator_with_scalar_and_array():
runtime = get_runtime() runtime = get_runtime()
......
...@@ -35,11 +35,7 @@ namespace ngraph ...@@ -35,11 +35,7 @@ namespace ngraph
auto data = node.get_ng_inputs().at(0); auto data = node.get_ng_inputs().at(0);
double alpha = node.get_attribute_value<double>("alpha", 1); double alpha = node.get_attribute_value<double>("alpha", 1);
std::shared_ptr<ngraph::Node> alpha_node = return NodeVector{std::make_shared<ngraph::op::Elu>(data, alpha)};
std::make_shared<ngraph::op::Constant>(
data->get_element_type(), Shape{}, std::vector<double>{alpha});
return NodeVector{std::make_shared<ngraph::op::Elu>(data, alpha_node)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -17,6 +17,7 @@ ...@@ -17,6 +17,7 @@
#include "ngraph/builder/make_constant.hpp" #include "ngraph/builder/make_constant.hpp"
#include "ngraph/op/add.hpp" #include "ngraph/op/add.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/exp.hpp" #include "ngraph/op/exp.hpp"
#include "ngraph/op/maximum.hpp" #include "ngraph/op/maximum.hpp"
#include "ngraph/op/minimum.hpp" #include "ngraph/op/minimum.hpp"
...@@ -29,8 +30,9 @@ using namespace ngraph; ...@@ -29,8 +30,9 @@ using namespace ngraph;
const string op::Elu::type_name{"Elu"}; const string op::Elu::type_name{"Elu"};
op::Elu::Elu(const Output<Node>& data, const Output<Node>& alpha) op::Elu::Elu(const Output<Node>& data, const double alpha)
: FusedOp({data, alpha}) : FusedOp({data})
, m_alpha{alpha}
{ {
constructor_validate_and_infer_types(); constructor_validate_and_infer_types();
} }
...@@ -38,7 +40,8 @@ op::Elu::Elu(const Output<Node>& data, const Output<Node>& alpha) ...@@ -38,7 +40,8 @@ op::Elu::Elu(const Output<Node>& data, const Output<Node>& alpha)
NodeVector op::Elu::decompose_op() const NodeVector op::Elu::decompose_op() const
{ {
auto data = input_value(0); auto data = input_value(0);
auto alpha_node = input_value(1); shared_ptr<Node> alpha_node =
make_shared<op::Constant>(data.get_element_type(), Shape{}, vector<double>{m_alpha});
alpha_node = ngraph::op::numpy_style_broadcast(alpha_node, data.get_shape()); alpha_node = ngraph::op::numpy_style_broadcast(alpha_node, data.get_shape());
...@@ -53,9 +56,6 @@ NodeVector op::Elu::decompose_op() const ...@@ -53,9 +56,6 @@ NodeVector op::Elu::decompose_op() const
shared_ptr<Node> op::Elu::copy_with_new_args(const NodeVector& new_args) const shared_ptr<Node> op::Elu::copy_with_new_args(const NodeVector& new_args) const
{ {
if (new_args.size() != 2) check_new_args_count(this, new_args);
{ return make_shared<Elu>(new_args.at(0), m_alpha);
throw ngraph_error("Incorrect number of new arguments");
}
return make_shared<Elu>(new_args.at(0), new_args.at(1));
} }
...@@ -39,12 +39,16 @@ namespace ngraph ...@@ -39,12 +39,16 @@ namespace ngraph
/// ///
/// \param data Input tensor /// \param data Input tensor
/// \param alpha Multiplier for negative values /// \param alpha Multiplier for negative values
Elu(const Output<Node>& data, const Output<Node>& alpha); Elu(const Output<Node>& data, const double alpha);
virtual NodeVector decompose_op() const override; virtual NodeVector decompose_op() const override;
virtual std::shared_ptr<Node> virtual std::shared_ptr<Node>
copy_with_new_args(const NodeVector& new_args) const override; copy_with_new_args(const NodeVector& new_args) const override;
double get_alpha() const { return m_alpha; }
private:
const double m_alpha;
}; };
} } // namespace op
} } // namespace ngraph
...@@ -1170,7 +1170,8 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js) ...@@ -1170,7 +1170,8 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js)
} }
case OP_TYPEID::Elu: case OP_TYPEID::Elu:
{ {
node = make_shared<op::Elu>(args[0], args[1]); auto alpha = node_js.at("alpha").get<double>();
node = make_shared<op::Elu>(args[0], alpha);
break; break;
} }
case OP_TYPEID::EmbeddingLookup: case OP_TYPEID::EmbeddingLookup:
...@@ -2375,7 +2376,11 @@ json JSONSerializer::serialize_node(const Node& n) ...@@ -2375,7 +2376,11 @@ json JSONSerializer::serialize_node(const Node& n)
node["ellipsis_mask"] = tmp->get_ellipsis_mask(); node["ellipsis_mask"] = tmp->get_ellipsis_mask();
break; break;
} }
case OP_TYPEID::Elu: { break; case OP_TYPEID::Elu:
{
auto tmp = dynamic_cast<const op::Elu*>(&n);
node["alpha"] = tmp->get_alpha();
break;
} }
case OP_TYPEID::EmbeddingLookup: { break; case OP_TYPEID::EmbeddingLookup: { break;
} }
......
...@@ -42,13 +42,11 @@ static string s_manifest = "${MANIFEST}"; ...@@ -42,13 +42,11 @@ static string s_manifest = "${MANIFEST}";
NGRAPH_TEST(${BACKEND_NAME}, elu) NGRAPH_TEST(${BACKEND_NAME}, elu)
{ {
auto A = make_shared<op::Parameter>(element::f32, Shape{3, 2}); auto A = make_shared<op::Parameter>(element::f32, Shape{3, 2});
auto B = make_shared<op::Parameter>(element::f32, Shape{}); auto elu = make_shared<op::Elu>(A, 0.5f);
auto elu = make_shared<op::Elu>(A, B); auto function = make_shared<Function>(NodeVector{elu}, ParameterVector{A});
auto function = make_shared<Function>(NodeVector{elu}, ParameterVector{A, B});
auto test_case = test::NgraphTestCase(function, "${BACKEND_NAME}"); auto test_case = test::NgraphTestCase(function, "${BACKEND_NAME}");
test_case.add_input(vector<float>{-2.f, 3.f, -2.f, 1.f, -1.f, 0.f}); test_case.add_input(vector<float>{-2.f, 3.f, -2.f, 1.f, -1.f, 0.f});
test_case.add_input(vector<float>{0.5f});
test_case.add_expected_output( test_case.add_expected_output(
vector<float>{-0.432332358f, 3.f, -0.432332358f, 1.f, -0.316060279f, 0.f}); vector<float>{-0.432332358f, 3.f, -0.432332358f, 1.f, -0.316060279f, 0.f});
test_case.run(); test_case.run();
...@@ -57,13 +55,11 @@ NGRAPH_TEST(${BACKEND_NAME}, elu) ...@@ -57,13 +55,11 @@ NGRAPH_TEST(${BACKEND_NAME}, elu)
NGRAPH_TEST(${BACKEND_NAME}, elu_negative_alpha) NGRAPH_TEST(${BACKEND_NAME}, elu_negative_alpha)
{ {
auto A = make_shared<op::Parameter>(element::f32, Shape{3, 2}); auto A = make_shared<op::Parameter>(element::f32, Shape{3, 2});
auto B = make_shared<op::Parameter>(element::f32, Shape{}); auto elu = make_shared<op::Elu>(A, -1.f);
auto elu = make_shared<op::Elu>(A, B); auto function = make_shared<Function>(NodeVector{elu}, ParameterVector{A});
auto function = make_shared<Function>(NodeVector{elu}, ParameterVector{A, B});
auto test_case = test::NgraphTestCase(function, "${BACKEND_NAME}"); auto test_case = test::NgraphTestCase(function, "${BACKEND_NAME}");
test_case.add_input(vector<float>{-2.f, 3.f, -2.f, 1.f, -1.f, 0.f}); test_case.add_input(vector<float>{-2.f, 3.f, -2.f, 1.f, -1.f, 0.f});
test_case.add_input(vector<float>{-1.f});
test_case.add_expected_output( test_case.add_expected_output(
vector<float>{0.864664717f, 3.f, 0.864664717f, 1.f, 0.632120559f, 0.f}); vector<float>{0.864664717f, 3.f, 0.864664717f, 1.f, 0.632120559f, 0.f});
test_case.run(); test_case.run();
......
...@@ -25,8 +25,7 @@ TEST(type_prop, elu) ...@@ -25,8 +25,7 @@ TEST(type_prop, elu)
{ {
Shape data_shape{2, 4}; Shape data_shape{2, 4};
auto data = make_shared<op::Parameter>(element::f32, data_shape); auto data = make_shared<op::Parameter>(element::f32, data_shape);
auto alpha = make_shared<op::Parameter>(element::f32, Shape{}); auto elu = make_shared<op::Elu>(data, 1);
auto elu = make_shared<op::Elu>(data, alpha);
ASSERT_EQ(elu->get_element_type(), element::f32); ASSERT_EQ(elu->get_element_type(), element::f32);
ASSERT_EQ(elu->get_shape(), data_shape); ASSERT_EQ(elu->get_shape(), data_shape);
} }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment