Commit 28a96f83 authored by Tomasz Dołbniak's avatar Tomasz Dołbniak Committed by Scott Cyphers

[SPEC] HardSigmoid adjustments (#3857)

* Construct HardSigmoid with alpha and beta as inputs

* Switch to the new HardSigmoid constructor entirely

* Broadcast with numpy style in hard sigmoid

* Python bindings adjustment to the new constructor

* Different way of creating constants

* Accept scalars instead of 1D vectors for alpha and beta

* Adjust the python tests to the new HardSigmoid constructor

* Use v1 ops in fused HardSigmoid

* Relax the static shape requirement for alpha and beta

* Fix merge
parent 0d688830
......@@ -27,5 +27,7 @@ void regclass_pyngraph_op_HardSigmoid(py::module m)
py::class_<ngraph::op::HardSigmoid, std::shared_ptr<ngraph::op::HardSigmoid>, ngraph::op::Op>
hardsigmoid(m, "HardSigmoid");
hardsigmoid.doc() = "ngraph.impl.op.HardSigmoid wraps ngraph::op::HardSigmoid";
hardsigmoid.def(py::init<const std::shared_ptr<ngraph::Node>&, float&, float&>());
hardsigmoid.def(py::init<const std::shared_ptr<ngraph::Node>&,
const std::shared_ptr<ngraph::Node>&,
const std::shared_ptr<ngraph::Node>&>());
}
......@@ -351,17 +351,19 @@ def test_hard_sigmoid_operator():
runtime = get_runtime()
data_shape = [3]
alpha = np.float32(0.5)
beta = np.float32(0.6)
alpha_value = np.float32(0.5)
beta_value = np.float32(0.6)
data_value = np.array([-1, 0, 1], dtype=np.float32)
parameter_data = ng.parameter(data_shape, name='Data', dtype=np.float32)
parameter_alpha = ng.parameter([], name='Alpha', dtype=np.float32)
parameter_beta = ng.parameter([], name='Beta', dtype=np.float32)
model = ng.hard_sigmoid(parameter_data, alpha, beta)
computation = runtime.computation(model, parameter_data)
model = ng.hard_sigmoid(parameter_data, parameter_alpha, parameter_beta)
computation = runtime.computation(model, parameter_data, parameter_alpha, parameter_beta)
result = computation(data_value)
result = computation(data_value, alpha_value, beta_value)
expected = [0.1, 0.6, 1.]
assert np.allclose(result, expected)
......
......@@ -17,6 +17,7 @@
#include <memory>
#include "hard_sigmoid.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/fused/hard_sigmoid.hpp"
using namespace ngraph::op;
......@@ -31,10 +32,17 @@ namespace ngraph
{
NodeVector hard_sigmoid(const Node& node)
{
auto data = node.get_ng_inputs().at(0);
const auto data = node.get_ng_inputs().at(0);
double alpha = node.get_attribute_value<double>("alpha", 0.2);
double beta = node.get_attribute_value<double>("beta", 0.5);
const auto alpha = Constant::create<double>(
data->get_element_type(),
Shape{},
std::vector<double>{node.get_attribute_value<double>("alpha", 0.2)});
const auto beta = Constant::create<double>(
data->get_element_type(),
Shape{},
std::vector<double>{node.get_attribute_value<double>("beta", 0.5)});
return {std::make_shared<ngraph::op::HardSigmoid>(data, alpha, beta)};
}
......
......@@ -29,41 +29,73 @@ using namespace ngraph;
constexpr NodeTypeInfo op::HardSigmoid::type_info;
op::HardSigmoid::HardSigmoid(const Output<Node>& data, float alpha, float beta)
: FusedOp({data})
, m_alpha(alpha)
, m_beta(beta)
op::HardSigmoid::HardSigmoid(const Output<Node>& data,
const Output<Node>& alpha,
const Output<Node>& beta)
: FusedOp({data, alpha, beta})
{
constructor_validate_and_infer_types();
}
void op::HardSigmoid::pre_validate_and_infer_types()
{
const auto& alpha_pshape = get_input_partial_shape(1);
const auto& beta_pshape = get_input_partial_shape(2);
if (alpha_pshape.is_static())
{
const auto alpha_shape = alpha_pshape.to_shape();
NODE_VALIDATION_CHECK(this,
is_scalar(alpha_shape),
"A scalar is expected for the 'alpha' input. Got: ",
alpha_shape);
}
if (beta_pshape.is_static())
{
const auto beta_shape = beta_pshape.to_shape();
NODE_VALIDATION_CHECK(this,
is_scalar(beta_shape),
"A scalar is expected for the 'beta' input. Got: ",
beta_shape);
}
const auto& data_et = input(0).get_element_type();
const auto& alpha_et = input(1).get_element_type();
const auto& beta_et = input(2).get_element_type();
NODE_VALIDATION_CHECK(
this,
data_et == alpha_et && data_et == beta_et,
"The element types of both alpha and beta inputs must match the data input type.");
}
NodeVector op::HardSigmoid::decompose_op() const
{
auto data = input_value(0);
auto data_shape = data.get_shape();
size_t elem_count = shape_size(data_shape);
const auto data = input_value(0);
const auto one_node =
ngraph::op::Constant::create<float>(data.get_element_type(), data.get_shape(), {1.0f});
std::shared_ptr<ngraph::Node> alpha_node = ngraph::op::Constant::create<float>(
data.get_element_type(), data_shape, std::vector<float>(elem_count, m_alpha));
const auto zero_node =
ngraph::op::Constant::create<float>(data.get_element_type(), data.get_shape(), {0.0f});
std::shared_ptr<ngraph::Node> beta_node = ngraph::op::Constant::create<float>(
data.get_element_type(), data_shape, std::vector<float>(elem_count, m_beta));
const auto alpha_node = input_value(1).get_node_shared_ptr();
const auto beta_node = input_value(2).get_node_shared_ptr();
std::shared_ptr<ngraph::Node> one_node = ngraph::op::Constant::create<float>(
data.get_element_type(), data_shape, std::vector<float>(elem_count, 1.0));
std::shared_ptr<Node> alpha_x_plus_beta =
std::make_shared<op::v1::Multiply>(alpha_node, data, AutoBroadcastType::NUMPY);
std::shared_ptr<ngraph::Node> zero_node = ngraph::op::Constant::create<float>(
data.get_element_type(), data_shape, std::vector<float>(elem_count, 0.0));
alpha_x_plus_beta =
std::make_shared<op::v1::Add>(alpha_x_plus_beta, beta_node, AutoBroadcastType::NUMPY);
return {std::make_shared<op::Minimum>(
std::make_shared<op::Maximum>(alpha_node * data + beta_node, zero_node), one_node)};
return {std::make_shared<op::v1::Minimum>(
std::make_shared<op::v1::Maximum>(alpha_x_plus_beta, zero_node), one_node)};
}
shared_ptr<Node> op::HardSigmoid::copy_with_new_args(const NodeVector& new_args) const
{
if (new_args.size() != 1)
{
throw ngraph_error("Incorrect number of new arguments");
}
return make_shared<HardSigmoid>(new_args.at(0), m_alpha, m_beta);
check_new_args_count(this, new_args);
return make_shared<HardSigmoid>(new_args.at(0), new_args.at(1), new_args.at(2));
}
......@@ -34,23 +34,21 @@ namespace ngraph
static constexpr NodeTypeInfo type_info{"HardSigmoid", 0};
const NodeTypeInfo& get_type_info() const override { return type_info; }
HardSigmoid() = default;
/// \brief Constructs a HardSigmoid operation.
///
/// \param data Input tensor.
/// \param[in] alpha The alpha parameter.
/// \param[in] beta The beta parameter.
/// \param[in] alpha A scalar value representing the alpha parameter.
/// \param[in] beta A scalar value representing the beta parameter.
///
HardSigmoid(const Output<Node>& data, float alpha, float beta);
HardSigmoid(const Output<Node>& data,
const Output<Node>& alpha,
const Output<Node>& beta);
virtual void pre_validate_and_infer_types() override;
virtual NodeVector decompose_op() const override;
virtual std::shared_ptr<Node>
copy_with_new_args(const NodeVector& new_args) const override;
float get_alpha() const { return m_alpha; }
float get_beta() const { return m_beta; }
private:
float m_alpha;
float m_beta;
};
}
}
......@@ -20,6 +20,7 @@
#include <unordered_map>
#include "activation_functions.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/fused/hard_sigmoid.hpp"
#include "ngraph/op/relu.hpp"
#include "ngraph/op/sigmoid.hpp"
......@@ -45,7 +46,10 @@ static shared_ptr<Node> relu(const shared_ptr<Node>& arg, float /* alpha */, flo
static shared_ptr<Node> hardsigmoid(const shared_ptr<Node>& arg, float alpha, float beta)
{
return make_shared<op::HardSigmoid>(arg, alpha, beta);
const auto alpha_node = op::Constant::create<float>(arg->get_element_type(), Shape{}, {alpha});
const auto beta_node = op::Constant::create<float>(arg->get_element_type(), Shape{}, {beta});
return make_shared<op::HardSigmoid>(arg, alpha_node, beta_node);
}
op::util::ActivationFunction::ActivationFunction(ActivationFunctionType f, float alpha, float beta)
......
......@@ -1798,9 +1798,7 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js)
}
case OP_TYPEID::HardSigmoid_v1:
{
auto alpha = node_js.at("alpha").get<float>();
auto beta = node_js.at("beta").get<float>();
node = make_shared<op::HardSigmoid>(args[0], alpha, beta);
node = make_shared<op::HardSigmoid>(args[0], args[1], args[2]);
break;
}
case OP_TYPEID::Interpolate_v1: { break;
......@@ -3703,12 +3701,7 @@ json JSONSerializer::serialize_node(const Node& n)
node["output_shape"] = tmp->get_output_shape();
break;
}
case OP_TYPEID::HardSigmoid_v1:
{
auto tmp = static_cast<const op::HardSigmoid*>(&n);
node["alpha"] = tmp->get_alpha();
node["beta"] = tmp->get_beta();
break;
case OP_TYPEID::HardSigmoid_v1: { break;
}
case OP_TYPEID::Interpolate_v1: { break;
}
......
......@@ -111,11 +111,15 @@ NGRAPH_TEST(${BACKEND_NAME}, reciprocal)
NGRAPH_TEST(${BACKEND_NAME}, hardsigmoid)
{
Shape shape{2, 7};
float alpha = 0.125f;
float beta = 0.642f;
const Shape shape{2, 7};
const float alpha_f = 0.125f;
const float beta_f = 0.642f;
const auto A = make_shared<op::Parameter>(element::f32, shape);
const auto alpha = op::Constant::create<float>(A->get_element_type(), Shape{}, {alpha_f});
const auto beta = op::Constant::create<float>(A->get_element_type(), Shape{}, {beta_f});
auto A = make_shared<op::Parameter>(element::f32, shape);
auto hardsigmoid = make_shared<op::HardSigmoid>(A, alpha, beta);
auto f0 = make_shared<Function>(NodeVector{hardsigmoid}, ParameterVector{A});
......@@ -137,7 +141,7 @@ NGRAPH_TEST(${BACKEND_NAME}, hardsigmoid)
numeric_limits<float>::min() / 16.f,
-numeric_limits<float>::min() / 16.f};
auto impl = [alpha, beta](float val) { return min(max(alpha * val + beta, 0.f), 1.f); };
auto impl = [alpha_f, beta_f](float val) { return min(max(alpha_f * val + beta_f, 0.f), 1.f); };
vector<float> expected_output;
transform(begin(input_data), end(input_data), back_inserter(expected_output), impl);
......
......@@ -23,11 +23,12 @@ using namespace ngraph;
TEST(type_prop, hardsigmoid)
{
Shape data_shape{3, 5};
float alpha = 0.1;
float beta = 1.2;
auto P = make_shared<op::Parameter>(element::f32, data_shape);
auto H = make_shared<op::HardSigmoid>(P, alpha, beta);
const Shape data_shape{3, 5};
const auto P = make_shared<op::Parameter>(element::f32, data_shape);
const auto alpha = op::Constant::create<float>(P->get_element_type(), Shape{}, {0.1f});
const auto beta = op::Constant::create<float>(P->get_element_type(), Shape{}, {1.2f});
const auto H = make_shared<op::HardSigmoid>(P, alpha, beta);
ASSERT_EQ(H->get_element_type(), element::f32);
ASSERT_EQ(H->get_shape(), data_shape);
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment