Commit 66c72c18 authored by Mateusz Bencer's avatar Mateusz Bencer Committed by Sang Ik Lee

[SPEC] Implement MaxPool:v1 and AvgPool:v1 downgrade pass (#3788)

* Implemented downgrade pass

* Added v1 support in onnx importer

* Clang style applied

* Pooling factory refactor

* Removed unused variables

* clang styles apllied

* Fixed avg pool bprop UT
parent d2e652f6
......@@ -28,7 +28,7 @@ namespace ngraph
{
NodeVector average_pool(const Node& node)
{
return pooling::PoolingFactory(node).make_pooling_op<ngraph::op::AvgPool>();
return pooling::PoolingFactory(node).make_avg_pool();
}
} // namespace set_1
......
......@@ -28,8 +28,7 @@ namespace ngraph
{
NodeVector global_average_pool(const Node& node)
{
return pooling::GlobalPoolingFactory(node)
.make_pooling_op<ngraph::op::AvgPool>();
return pooling::GlobalPoolingFactory(node).make_avg_pool();
}
} // namespace set_1
......
......@@ -28,8 +28,7 @@ namespace ngraph
{
NodeVector global_max_pool(const Node& node)
{
return pooling::GlobalPoolingFactory(node)
.make_pooling_op<ngraph::op::MaxPool>();
return pooling::GlobalPoolingFactory(node).make_max_pool();
}
} // namespace set_1
......
......@@ -29,8 +29,7 @@ namespace ngraph
{
NodeVector max_pool(const Node& node)
{
auto max_pool =
pooling::PoolingFactory(node).make_pooling_op<ngraph::op::MaxPool>();
auto max_pool = pooling::PoolingFactory(node).make_max_pool();
max_pool.emplace_back(std::make_shared<NullNode>()); // Indices (optional)
return max_pool;
}
......
......@@ -41,18 +41,29 @@ namespace ngraph
m_padding_above = Shape{std::begin(padding_above), std::end(padding_above)};
}
template <>
NodeVector PoolingFactory::make_pooling_op<ngraph::op::AvgPool>() const
NodeVector PoolingFactory::make_avg_pool() const
{
bool count_include_pad =
m_onnx_node.get_attribute_value<std::int64_t>("count_include_pad", 0);
return {std::make_shared<ngraph::op::AvgPool>(m_inputs.at(0),
m_kernel_shape,
m_strides,
m_padding_below,
m_padding_above,
count_include_pad,
m_auto_pad)};
return {std::make_shared<ngraph::op::v1::AvgPool>(m_inputs.at(0),
m_strides,
m_padding_below,
m_padding_above,
m_kernel_shape,
!count_include_pad,
op::RoundingType::FLOOR,
m_auto_pad)};
}
NodeVector PoolingFactory::make_max_pool() const
{
return {std::make_shared<ngraph::op::v1::MaxPool>(m_inputs.at(0),
m_strides,
m_padding_below,
m_padding_above,
m_kernel_shape,
op::RoundingType::FLOOR,
m_auto_pad)};
}
GlobalPoolingFactory::GlobalPoolingFactory(const Node& node)
......
......@@ -21,6 +21,7 @@
#include "core/node.hpp"
#include "ngraph/node.hpp"
#include "ngraph/op/avg_pool.hpp"
#include "ngraph/op/max_pool.hpp"
#include "ngraph/op/op.hpp"
#include "ngraph/op/util/attr_types.hpp"
#include "ngraph/shape.hpp"
......@@ -51,23 +52,16 @@ namespace ngraph
virtual ~PoolingFactory() = default;
///
/// \brief Creates a sub-graph representing appropriate ONNX operation.
/// \brief Creates average pooling ONNX operation.
/// \return Vector of output nodes.
///
/// \tparam NgraphOperator nGraph operator class type used to build ONNX
/// operation.
NodeVector make_avg_pool() const;
///
/// \brief Creates max pooling ONNX operation.
/// \return Vector of output nodes.
///
template <typename NgraphOperator>
NodeVector make_pooling_op() const
{
return {std::make_shared<NgraphOperator>(m_inputs.at(0),
m_kernel_shape,
m_strides,
m_padding_below,
m_padding_above,
m_auto_pad)};
}
NodeVector make_max_pool() const;
protected:
Node m_onnx_node;
......@@ -80,10 +74,6 @@ namespace ngraph
ngraph::op::PadType m_auto_pad;
};
// AvgPool accepts some additional parameters thus we have specialization for it.
template <>
NodeVector PoolingFactory::make_pooling_op<ngraph::op::AvgPool>() const;
///
/// \brief Factory class which generates sub-graphs for ONNX 'global' pooling
/// operators.
......
......@@ -23,6 +23,7 @@
#include "ngraph/op/constant.hpp"
#include "ngraph/op/convolution.hpp"
#include "ngraph/op/get_output_element.hpp"
#include "ngraph/op/max_pool.hpp"
#include "ngraph/op/pad.hpp"
#include "ngraph/op/product.hpp"
#include "ngraph/op/reduce_prod.hpp"
......@@ -91,6 +92,57 @@ bool pass::Opset0Downgrade::run_on_node(shared_ptr<Node> node)
#endif
switch (get_typeid(node))
{
case OP_TYPEID::AvgPool:
{
const auto tmp = as_type_ptr<op::v1::AvgPool>(node);
auto const input_arg = node->input(0).get_source_output();
const auto ceil_mode = static_cast<bool>(tmp->get_rounding_type());
const auto include_padding_in_avg_computation = !tmp->get_exclude_pad();
const auto pad_type = tmp->get_auto_pad();
const auto padding_below = tmp->get_pads_begin();
const auto padding_above = tmp->get_pads_end();
const auto window_movement_strides = tmp->get_strides();
const auto window_shape = tmp->get_kernel();
auto replacement_node = make_shared<op::v0::AvgPool>(input_arg,
window_shape,
window_movement_strides,
padding_below,
padding_above,
include_padding_in_avg_computation,
pad_type,
ceil_mode);
replace_node(node, replacement_node);
modified = true;
break;
}
case OP_TYPEID::AvgPoolBackprop:
{
const auto tmp = as_type_ptr<op::v1::AvgPoolBackprop>(node);
NGRAPH_CHECK(node->input_value(1).get_node_shared_ptr()->is_constant());
const auto forward_arg_shape =
static_pointer_cast<op::Constant>(node->input_value(1).get_node_shared_ptr())
->get_shape_val();
const auto delta = node->input(0).get_source_output();
const auto include_padding_in_avg_computation = !tmp->get_exclude_pad();
const auto padding_below = tmp->get_pads_begin();
const auto padding_above = tmp->get_pads_end();
const auto window_movement_strides = tmp->get_strides();
const auto window_shape = tmp->get_kernel();
auto replacement_node =
make_shared<op::v0::AvgPoolBackprop>(forward_arg_shape,
delta,
window_shape,
window_movement_strides,
padding_below,
padding_above,
include_padding_in_avg_computation);
replace_node(node, replacement_node);
modified = true;
break;
}
case OP_TYPEID::Broadcast:
{
auto tmp = dynamic_cast<const op::v1::Broadcast*>(node.get());
......@@ -178,6 +230,66 @@ bool pass::Opset0Downgrade::run_on_node(shared_ptr<Node> node)
modified = true;
break;
}
case OP_TYPEID::MaxPool:
{
auto tmp = as_type_ptr<op::v1::MaxPool>(node);
auto const input_arg = node->input(0).get_source_output();
auto ceil_mode = static_cast<bool>(tmp->get_rounding_type());
auto pad_type = tmp->get_auto_pad();
auto padding_below = tmp->get_pads_begin();
auto padding_above = tmp->get_pads_end();
auto window_movement_strides = tmp->get_strides();
auto window_shape = tmp->get_kernel();
auto replacement_node = make_shared<op::v0::MaxPool>(input_arg,
window_shape,
window_movement_strides,
padding_below,
padding_above,
pad_type,
ceil_mode);
replace_node(node, replacement_node);
modified = true;
break;
}
case OP_TYPEID::MaxPoolBackprop:
{
const auto tmp = as_type_ptr<op::v1::MaxPoolBackprop>(node);
const auto padding_below = tmp->get_pads_begin();
const auto padding_above = tmp->get_pads_end();
const auto window_movement_strides = tmp->get_strides();
const auto window_shape = tmp->get_kernel();
const auto arg_forward = node->input(0).get_source_output();
const auto delta = node->input(1).get_source_output();
shared_ptr<Node> replacement_node;
if (node->get_inputs().size() == 3)
{
const auto result_forward = node->input(2).get_source_output();
replacement_node = make_shared<op::v0::MaxPoolBackprop>(arg_forward,
delta,
result_forward,
window_shape,
window_movement_strides,
padding_below,
padding_above);
}
else
{
replacement_node = make_shared<op::v0::MaxPoolBackprop>(arg_forward,
delta,
window_movement_strides,
window_shape,
padding_below,
padding_above);
}
replace_node(node, replacement_node);
modified = true;
break;
}
case OP_TYPEID::Pad:
{
auto tmp = as_type_ptr<op::v1::Pad>(node);
......@@ -290,31 +402,6 @@ bool pass::Opset0Downgrade::run_on_node(shared_ptr<Node> node)
modified = true;
break;
}
case OP_TYPEID::AvgPoolBackprop:
{
auto tmp = dynamic_cast<const op::v1::AvgPoolBackprop*>(node.get());
NGRAPH_CHECK(node->input_value(1).get_node_shared_ptr()->is_constant());
auto forward_arg_shape =
static_pointer_cast<op::Constant>(node->input_value(1).get_node_shared_ptr())
->get_shape_val();
auto exclude_pad = tmp->get_exclude_pad();
auto pads_begin = tmp->get_pads_begin();
auto pads_end = tmp->get_pads_end();
auto strides = tmp->get_strides();
auto kernel = tmp->get_kernel();
auto replacement_node =
make_shared<op::v0::AvgPoolBackprop>(forward_arg_shape,
node->input(0).get_source_output(),
kernel,
strides,
pads_begin,
pads_end,
exclude_pad);
replace_node(node, replacement_node);
modified = true;
break;
}
default: break;
}
#if defined(__clang__)
......
......@@ -79,7 +79,6 @@ set(SRC
opset_pass/softmax_opset_pass.cpp
opset_pass/sum_opset_pass.cpp
opset_pass/topk_opset_pass.cpp
opset_pass/avg_pool_opset_pass.cpp
partial_shape.cpp
pass.cpp
pass_liveness.cpp
......
......@@ -1374,7 +1374,7 @@ NGRAPH_TEST(${BACKEND_NAME}, avg_pool_bprop_2d_2channel_2image_dyn_shape)
padding_below,
padding_above,
window_shape,
false);
true);
auto f = make_shared<Function>(NodeVector{avg_pool_bprop},
ParameterVector{delta, forward_arg_shape});
......
#include "gmock/gmock.h"
#include "gtest/gtest.h"
#include "ngraph/ngraph.hpp"
#include "ngraph/op/util/attr_types.hpp"
#include "ngraph/pass/manager.hpp"
#include "ngraph/pass/opset0_downgrade.hpp"
#include "util/type_prop.hpp"
using namespace std;
using namespace ngraph;
TEST(opset_transform, opset1_avgpool_downgrade_pass)
{
Shape window_shape{2, 2};
auto window_movement_strides = Strides{1, 1};
Shape padding_below{0, 0};
Shape padding_above{0, 0};
auto delta = make_shared<op::Parameter>(element::f32, Shape{2, 2, 2, 2});
auto forward_arg_shape = op::Constant::create<int64_t>(element::i64, Shape{4}, {2, 2, 3, 3});
auto avgpool_v1 = make_shared<op::v1::AvgPoolBackprop>(delta,
forward_arg_shape,
window_movement_strides,
padding_below,
padding_above,
window_shape,
false);
auto f = make_shared<Function>(NodeVector{avgpool_v1}, ParameterVector{delta});
ngraph::pass::Manager pass_manager;
pass_manager.register_pass<pass::Opset0Downgrade>();
pass_manager.run_passes(f);
auto avgpool_v0 = static_pointer_cast<op::v0::AvgPoolBackprop>(
f->get_results().at(0)->input_value(0).get_node_shared_ptr());
EXPECT_EQ(avgpool_v0->description(), "AvgPoolBackprop");
EXPECT_EQ(avgpool_v0->get_version(), 0);
EXPECT_EQ(avgpool_v0->get_forward_arg_shape(), (Shape{2, 2, 3, 3}));
}
......@@ -3,6 +3,7 @@
#include "ngraph/ngraph.hpp"
#include "ngraph/pass/manager.hpp"
#include "ngraph/pass/opset0_downgrade.hpp"
#include "ngraph/pass/opset1_upgrade.hpp"
#include "util/test_control.hpp"
#include "util/type_prop.hpp"
......@@ -79,3 +80,161 @@ TEST(opset_transform, opset1_maxpool_upgrade_pass)
EXPECT_EQ(max_pool_v1_node->get_rounding_type(), op::RoundingType::FLOOR);
EXPECT_EQ(max_pool_v1_node->get_auto_pad(), pad_mode);
}
TEST(opset_transform, opset1_avgpool_downgrade_pass)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{1, 3, 6, 9});
Shape padding_below{1, 0};
Shape padding_above{0, 1};
Strides window_movement_strides{1, 1};
Shape window_shape{3, 3};
bool exclude_pad = false;
auto rounding_type = op::RoundingType::FLOOR;
op::PadType auto_pad = op::PadType::EXPLICIT;
auto avgpool_v1 = make_shared<op::v1::AvgPool>(arg,
window_movement_strides,
padding_below,
padding_above,
window_shape,
exclude_pad,
rounding_type,
auto_pad);
auto result = make_shared<op::Result>(avgpool_v1);
auto f = make_shared<Function>(ResultVector{result}, ParameterVector{arg});
ngraph::pass::Manager pass_manager;
pass_manager.register_pass<pass::Opset0Downgrade>();
pass_manager.run_passes(f);
auto avgpool_s0_result = f->get_results().at(0);
auto node = avgpool_s0_result->input(0).get_source_output().get_node_shared_ptr();
auto avg_pool_v0_node = static_pointer_cast<op::v0::AvgPool>(node);
EXPECT_EQ(avg_pool_v0_node->description(), "AvgPool");
EXPECT_EQ(avg_pool_v0_node->get_version(), 0);
EXPECT_EQ(avg_pool_v0_node->get_padding_below(), padding_below);
EXPECT_EQ(avg_pool_v0_node->get_padding_above(), padding_above);
EXPECT_EQ(avg_pool_v0_node->get_window_movement_strides(), window_movement_strides);
EXPECT_EQ(avg_pool_v0_node->get_window_shape(), window_shape);
EXPECT_EQ(avg_pool_v0_node->get_ceil_mode(), false);
EXPECT_EQ(avg_pool_v0_node->get_include_padding_in_avg_computation(), !exclude_pad);
EXPECT_EQ(avg_pool_v0_node->get_pad_type(), auto_pad);
}
TEST(opset_transform, opset1_maxpool_downgrade_pass)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{1, 3, 6, 9});
Shape padding_below{1, 0};
Shape padding_above{0, 1};
Strides window_movement_strides{1, 1};
Shape window_shape{3, 3};
auto rounding_type = op::RoundingType::FLOOR;
op::PadType pad_type = op::PadType::EXPLICIT;
auto maxpool_v1 = make_shared<op::v1::MaxPool>(arg,
window_movement_strides,
padding_below,
padding_above,
window_shape,
rounding_type,
pad_type);
auto result = make_shared<op::Result>(maxpool_v1);
auto f = make_shared<Function>(ResultVector{result}, ParameterVector{arg});
ngraph::pass::Manager pass_manager;
pass_manager.register_pass<pass::Opset0Downgrade>();
pass_manager.run_passes(f);
auto maxpool_s0_result = f->get_results().at(0);
auto node = maxpool_s0_result->input(0).get_source_output().get_node_shared_ptr();
auto max_pool_v0_node = static_pointer_cast<op::v0::MaxPool>(node);
EXPECT_EQ(max_pool_v0_node->description(), "MaxPool");
EXPECT_EQ(max_pool_v0_node->get_version(), 0);
EXPECT_EQ(max_pool_v0_node->get_padding_below(), padding_below);
EXPECT_EQ(max_pool_v0_node->get_padding_above(), padding_above);
EXPECT_EQ(max_pool_v0_node->get_window_movement_strides(), window_movement_strides);
EXPECT_EQ(max_pool_v0_node->get_window_shape(), window_shape);
EXPECT_EQ(max_pool_v0_node->get_ceil_mode(), false);
EXPECT_EQ(max_pool_v0_node->get_pad_type(), pad_type);
}
TEST(opset_transform, opset1_avgpool_backprop_downgrade_pass)
{
auto delta = make_shared<op::Parameter>(element::f32, Shape{1, 3, 6, 9});
auto forward_arg_shape =
op::Constant::create(element::i64, Shape{4}, vector<int64_t>{1, 3, 7, 10});
Shape padding_below{1, 0};
Shape padding_above{0, 1};
Strides window_movement_strides{1, 1};
Shape window_shape{3, 3};
bool exclude_pad = false;
auto avgpool_backprop_v1 = make_shared<op::v1::AvgPoolBackprop>(delta,
forward_arg_shape,
window_movement_strides,
padding_below,
padding_above,
window_shape,
exclude_pad);
auto result = make_shared<op::Result>(avgpool_backprop_v1);
auto f = make_shared<Function>(ResultVector{result}, ParameterVector{delta});
ngraph::pass::Manager pass_manager;
pass_manager.register_pass<pass::Opset0Downgrade>();
pass_manager.run_passes(f);
auto avgpool_backprop_s0_result = f->get_results().at(0);
auto node = avgpool_backprop_s0_result->input(0).get_source_output().get_node_shared_ptr();
auto avg_pool_backprop_v0_node = static_pointer_cast<op::v0::AvgPoolBackprop>(node);
EXPECT_EQ(avg_pool_backprop_v0_node->description(), "AvgPoolBackprop");
EXPECT_EQ(avg_pool_backprop_v0_node->get_version(), 0);
EXPECT_EQ(avg_pool_backprop_v0_node->get_padding_below(), padding_below);
EXPECT_EQ(avg_pool_backprop_v0_node->get_padding_above(), padding_above);
EXPECT_EQ(avg_pool_backprop_v0_node->get_window_movement_strides(), window_movement_strides);
EXPECT_EQ(avg_pool_backprop_v0_node->get_window_shape(), window_shape);
EXPECT_EQ(avg_pool_backprop_v0_node->get_forward_arg_shape(), Shape({1, 3, 7, 10}));
EXPECT_EQ(avg_pool_backprop_v0_node->get_include_padding_in_avg_computation(), !exclude_pad);
}
TEST(opset_transform, opset1_maxpool_backprop_downgrade_pass)
{
auto arg_forward = make_shared<op::Parameter>(element::f32, Shape{1, 3, 7, 10});
auto delta = make_shared<op::Parameter>(element::f32, Shape{1, 3, 6, 9});
auto result_forward = make_shared<op::Parameter>(element::f32, Shape{1, 3, 6, 9});
Shape padding_below{1, 0};
Shape padding_above{0, 1};
Strides window_movement_strides{1, 1};
Shape window_shape{3, 3};
auto max_pool_backprop_v1 = make_shared<op::v1::MaxPoolBackprop>(arg_forward,
delta,
result_forward,
window_movement_strides,
padding_below,
padding_above,
window_shape);
auto result = make_shared<op::Result>(max_pool_backprop_v1);
auto f = make_shared<Function>(ResultVector{result},
ParameterVector{arg_forward, delta, result_forward});
ngraph::pass::Manager pass_manager;
pass_manager.register_pass<pass::Opset0Downgrade>();
pass_manager.run_passes(f);
auto max_pool_backprop_s0_result = f->get_results().at(0);
auto node = max_pool_backprop_s0_result->input(0).get_source_output().get_node_shared_ptr();
auto max_pool_backprop_v0_node = static_pointer_cast<op::v0::MaxPoolBackprop>(node);
EXPECT_EQ(max_pool_backprop_v0_node->description(), "MaxPoolBackprop");
EXPECT_EQ(max_pool_backprop_v0_node->get_version(), 0);
EXPECT_EQ(max_pool_backprop_v0_node->get_padding_below(), padding_below);
EXPECT_EQ(max_pool_backprop_v0_node->get_padding_above(), padding_above);
EXPECT_EQ(max_pool_backprop_v0_node->get_window_movement_strides(), window_movement_strides);
EXPECT_EQ(max_pool_backprop_v0_node->get_window_shape(), window_shape);
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment