Commit c732705f authored by mbencer's avatar mbencer

Removed unecessary reshape, added additional asserts

parent 090f16ec
......@@ -42,55 +42,66 @@ op::NormalizeL2::NormalizeL2(const Output<Node>& data,
void op::NormalizeL2::pre_validate_and_infer_types()
{
const auto& data_pshape = get_input_partial_shape(0);
auto axes_node = input(1).get_source_output().get_node_shared_ptr();
const auto& input_pshape = get_input_partial_shape(0);
const auto& axes_pshape = get_input_partial_shape(1);
NODE_VALIDATION_CHECK(this, data_pshape.is_static(), "Input data must be static.");
NODE_VALIDATION_CHECK(this, axes_pshape.is_static(), "Input axes must be static.");
const auto& input_rank = input_pshape.rank();
const auto& axes_rank = axes_pshape.rank();
NODE_VALIDATION_CHECK(this,
static_cast<size_t>(axes_pshape.rank()) == 1,
"Input axes must have rank equals 1 (axes shape: ",
axes_pshape,
").");
axes_node->is_constant(),
"doesn't support 'axes' input of other type than a Constant.");
if (axes_rank.is_static())
{
NODE_VALIDATION_CHECK(this,
static_cast<size_t>(axes_pshape.rank()) == 1,
"Input axes must have rank equals 1 (axes shape: ",
axes_pshape,
").");
if (input_rank.is_static())
{
const auto reduction_axes = get_reduction_axes();
for (auto axis : reduction_axes)
{
NODE_VALIDATION_CHECK(this,
axis < size_t(input_rank),
"Reduction axis (",
axis,
") is out of bounds ",
"(argument shape: ",
input_pshape,
")");
}
}
}
}
NodeVector op::NormalizeL2::decompose_op() const
AxisSet op::NormalizeL2::get_reduction_axes() const
{
Output<Node> data{input_value(0)};
const Shape input_shape{data.get_shape()};
// Reshape to 4D tensor.
if (input_shape.size() < 4)
AxisSet axes;
auto axes_input_node = input_value(1).get_node_shared_ptr();
if (auto const_op = dynamic_pointer_cast<op::Constant>(axes_input_node))
{
Shape data_shape(4 - input_shape.size(), 1);
copy(begin(input_shape), end(input_shape), back_inserter(data_shape));
data = builder::reshape(data, data_shape);
axes = const_op->get_axis_set_val();
}
return axes;
}
auto axes_node = input(1).get_source_output().get_node_shared_ptr();
NODE_VALIDATION_CHECK(this,
axes_node->is_constant(),
"doesn't support 'axes' input of other type than a Constant.");
NodeVector op::NormalizeL2::decompose_op() const
{
Output<Node> data{input_value(0)};
const Shape input_shape{data.get_shape()};
// Calculate norm over axes indicated by axes input param
auto axes_constant = dynamic_pointer_cast<op::Constant>(axes_node);
auto axes_vector = axes_constant->get_vector<size_t>();
AxisSet reduction_axes{axes_vector};
AxisSet reduction_axes = get_reduction_axes();
// Calculate l2 norm across axes determined by axes input
auto builder_bias_mode =
(m_eps_mode == EpsMode::MAX) ? builder::BiasMode::MAX : builder::BiasMode::ADD;
Output<Node> norm = builder::l2_norm(data, reduction_axes, m_eps, builder_bias_mode);
norm = numpy_style_broadcast(norm, data.get_shape());
data = data / norm;
// get back original input tensor rank
if (input_shape.size() < 4)
{
data = builder::reshape(data, input_shape);
}
data = make_shared<op::Divide>(data, norm, AutoBroadcastSpec(AutoBroadcastType::NUMPY));
return as_node_vector({data});
}
......
......@@ -54,6 +54,7 @@ namespace ngraph
EpsMode get_eps_mode() const { return m_eps_mode; }
virtual NodeVector decompose_op() const override;
virtual void pre_validate_and_infer_types() override;
AxisSet get_reduction_axes() const;
virtual std::shared_ptr<Node>
copy_with_new_args(const NodeVector& new_args) const override;
......
......@@ -69,13 +69,14 @@ gather_4d_indices_no_axis_2d_input
gemm
gemm_broadcast_input_C
normalize_across_chw_4d
normalize_across_empty_axes_input
normalize_across_h_4d
normalize_across_1axis_5d
normalize_across_123axes_5d
normalize_across_chw_4d_max_bias
normalize_across_chw_3d
normalize_across_chw_2d
normalize_across_hw_4d
normalize_invalid_input_tensor_rank
normalize_axes_input_not_constant
normalize_invalid_axes_rank
normalize_output_shape_across_chw
normalize_axes_out_of_bounds
hardsigmoid
model_erf
model_erf_int32
......
......@@ -582,7 +582,7 @@ NGRAPH_TEST(${BACKEND_NAME}, normalize_across_chw_4d)
{
Shape data_shape{1, 2, 3, 4};
auto data = make_shared<op::Parameter>(element::f32, data_shape);
const auto axes = make_shared<op::Constant>(element::u64, Shape{3}, vector<int64_t>{1, 2, 3});
const auto axes = make_shared<op::Constant>(element::i64, Shape{3}, vector<int64_t>{1, 2, 3});
float eps{1e-6f};
auto eps_mode = op::EpsMode::ADD;
......@@ -605,73 +605,11 @@ NGRAPH_TEST(${BACKEND_NAME}, normalize_across_chw_4d)
test_case.run(DEFAULT_FLOAT_TOLERANCE_BITS + 1);
}
NGRAPH_TEST(${BACKEND_NAME}, normalize_across_chw_3d)
{
Shape data_shape{2, 3, 4};
auto data = make_shared<op::Parameter>(element::f32, data_shape);
const auto axes = make_shared<op::Constant>(element::u64, Shape{3}, vector<int64_t>{1, 2, 3});
float eps{1e-6f};
auto eps_mode = op::EpsMode::ADD;
auto normalize = make_shared<op::NormalizeL2>(data, axes, eps, eps_mode);
auto function = make_shared<Function>(NodeVector{normalize}, ParameterVector{data});
auto test_case = test::NgraphTestCase(function, "${BACKEND_NAME}");
vector<float> input_data(shape_size(data_shape));
iota(begin(input_data), end(input_data), 1);
test_case.add_input<float>(input_data);
test_case.add_expected_output<float>(
data_shape, {0.01428571f, 0.02857143f, 0.04285714f, 0.05714286f, 0.07142857f, 0.08571429f,
0.1f, 0.11428571f, 0.12857144f, 0.14285715f, 0.15714286f, 0.17142858f,
0.18571429f, 0.2f, 0.21428572f, 0.22857143f, 0.24285714f, 0.25714287f,
0.27142859f, 0.2857143f, 0.30000001f, 0.31428573f, 0.32857144f, 0.34285715f});
test_case.run(DEFAULT_FLOAT_TOLERANCE_BITS + 1);
}
NGRAPH_TEST(${BACKEND_NAME}, normalize_across_chw_2d)
{
Shape data_shape{3, 4};
auto data = make_shared<op::Parameter>(element::f32, data_shape);
const auto axes = make_shared<op::Constant>(element::u64, Shape{3}, vector<int64_t>{1, 2, 3});
float eps{1e-6f};
auto eps_mode = op::EpsMode::ADD;
auto normalize = make_shared<op::NormalizeL2>(data, axes, eps, eps_mode);
auto function = make_shared<Function>(NodeVector{normalize}, ParameterVector{data});
auto test_case = test::NgraphTestCase(function, "${BACKEND_NAME}");
vector<float> input_data(shape_size(data_shape));
iota(begin(input_data), end(input_data), 1);
test_case.add_input<float>(input_data);
test_case.add_expected_output<float>(data_shape,
{0.03922323f,
0.07844646f,
0.11766968f,
0.15689291f,
0.19611613f,
0.23533936f,
0.2745626f,
0.31378582f,
0.35300905f,
0.39223227f,
0.43145549f,
0.47067872f});
test_case.run();
}
NGRAPH_TEST(${BACKEND_NAME}, normalize_across_empty_axes_input)
{
Shape data_shape{1, 2, 3, 4};
auto data = make_shared<op::Parameter>(element::f32, data_shape);
const auto axes = make_shared<op::Constant>(element::u64, Shape{0}, vector<int64_t>{});
const auto axes = make_shared<op::Constant>(element::i64, Shape{0}, vector<int64_t>{});
float eps{1e-6f};
auto eps_mode = op::EpsMode::ADD;
......@@ -695,7 +633,7 @@ NGRAPH_TEST(${BACKEND_NAME}, normalize_across_h_4d)
{
Shape data_shape{1, 2, 3, 4};
auto data = make_shared<op::Parameter>(element::f32, data_shape);
const auto axes = make_shared<op::Constant>(element::u64, Shape{1}, vector<int64_t>{1});
const auto axes = make_shared<op::Constant>(element::i64, Shape{1}, vector<int64_t>{1});
float eps{1e-6f};
auto eps_mode = op::EpsMode::ADD;
......@@ -721,7 +659,7 @@ NGRAPH_TEST(${BACKEND_NAME}, normalize_across_1axis_5d)
{
Shape data_shape{1, 2, 2, 2, 3};
auto data = make_shared<op::Parameter>(element::f32, data_shape);
const auto axes = make_shared<op::Constant>(element::u64, Shape{1}, vector<int64_t>{1});
const auto axes = make_shared<op::Constant>(element::i64, Shape{1}, vector<int64_t>{1});
float eps{1e-6f};
auto eps_mode = op::EpsMode::ADD;
......@@ -747,7 +685,7 @@ NGRAPH_TEST(${BACKEND_NAME}, normalize_across_123axes_5d)
{
Shape data_shape{1, 2, 2, 2, 3};
auto data = make_shared<op::Parameter>(element::f32, data_shape);
const auto axes = make_shared<op::Constant>(element::u64, Shape{3}, vector<int64_t>{1, 2, 3});
const auto axes = make_shared<op::Constant>(element::i64, Shape{3}, vector<int64_t>{1, 2, 3});
float eps{1e-6f};
auto eps_mode = op::EpsMode::ADD;
......@@ -773,7 +711,7 @@ NGRAPH_TEST(${BACKEND_NAME}, normalize_across_chw_4d_max_bias)
{
Shape data_shape{1, 2, 3, 4};
auto data = make_shared<op::Parameter>(element::f32, data_shape);
const auto axes = make_shared<op::Constant>(element::u64, Shape{3}, vector<int64_t>{1, 2, 3});
const auto axes = make_shared<op::Constant>(element::i64, Shape{3}, vector<int64_t>{1, 2, 3});
float eps{5000};
auto eps_mode = op::EpsMode::MAX;
......
......@@ -21,11 +21,37 @@
using namespace std;
using namespace ngraph;
TEST(type_prop, normalize_axes_input_not_constant)
{
Shape data_shape{1, 2, 3, 4};
auto data = make_shared<op::Parameter>(element::f32, data_shape);
auto axes = make_shared<op::Parameter>(element::u64, Shape{1});
float eps{1e-6f};
auto eps_mode = op::EpsMode::ADD;
try
{
auto normalize = make_shared<op::NormalizeL2>(data, axes, eps, eps_mode);
// Should have thrown, so fail if it didn't
FAIL() << "Invalid input tensor rank.";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("doesn't support 'axes' input of other type than a Constant."));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, normalize_invalid_axes_rank)
{
Shape data_shape{1, 2, 3, 4};
auto data = make_shared<op::Parameter>(element::f32, data_shape);
auto axes = make_shared<op::Parameter>(element::u64, Shape{1, 2});
const auto axes = make_shared<op::Constant>(element::i64, Shape{1, 2}, vector<int64_t>{1, 2});
float eps{1e-6f};
auto eps_mode = op::EpsMode::ADD;
......@@ -45,15 +71,26 @@ TEST(type_prop, normalize_invalid_axes_rank)
}
}
TEST(type_prop, normalize_output_shape_across_chw)
TEST(type_prop, normalize_axes_out_of_bounds)
{
Shape data_shape{2, 3, 4};
Shape data_shape{1, 2, 3, 4};
auto data = make_shared<op::Parameter>(element::f32, data_shape);
const auto axes = make_shared<op::Constant>(element::u64, Shape{3}, vector<int64_t>{1, 2, 3});
const auto axes = make_shared<op::Constant>(element::i64, Shape{2}, vector<int64_t>{3, 4});
float eps{1e-6f};
auto eps_mode = op::EpsMode::ADD;
auto normalize = make_shared<op::NormalizeL2>(data, axes, eps, eps_mode);
EXPECT_EQ(normalize->get_element_type(), element::f32);
EXPECT_EQ(normalize->get_shape(), (Shape{2, 3, 4}));
try
{
auto normalize = make_shared<op::NormalizeL2>(data, axes, eps, eps_mode);
// Should have thrown, so fail if it didn't
FAIL() << "Invalid input tensor rank.";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), std::string("Reduction axis ("));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment