Commit 20b4ce27 authored by Nishant Patel's avatar Nishant Patel Committed by Scott Cyphers

AvgPoolBprop dynamic shape support (#3774)

* AvgPoolBprop dynamic shape support

* Add opset_transform test

* Call the correct shape relevance function

* Unused variable
parent ccf1ab8a
......@@ -558,15 +558,14 @@ shared_ptr<Node> op::v1::AvgPool::copy_with_new_args(const NodeVector& new_args)
constexpr NodeTypeInfo op::v1::AvgPoolBackprop::type_info;
op::v1::AvgPoolBackprop::AvgPoolBackprop(const Shape& forward_arg_shape,
const Output<Node>& delta,
op::v1::AvgPoolBackprop::AvgPoolBackprop(const Output<Node>& delta,
const Output<Node>& forward_arg_shape,
const Strides& strides,
const Shape& pads_begin,
const Shape& pads_end,
const Shape& kernel,
bool exclude_pad)
: Op(check_single_output_args({delta.get_node_shared_ptr()}))
, m_forward_arg_shape(forward_arg_shape)
: Op({delta, forward_arg_shape})
, m_kernel(kernel)
, m_strides(strides)
, m_pads_begin(pads_begin)
......@@ -576,6 +575,16 @@ op::v1::AvgPoolBackprop::AvgPoolBackprop(const Shape& forward_arg_shape,
constructor_validate_and_infer_types();
}
const Shape op::v1::AvgPoolBackprop::get_forward_arg_shape() const
{
Shape shape;
if (auto const_op = as_type<op::Constant>(input_value(1).get_node()))
{
shape = const_op->get_shape_val();
}
return shape;
}
void op::v1::AvgPoolBackprop::validate_and_infer_types()
{
// infer_batched_forward_pooling wants CoordinateDiffs for these, while the pooling ops for
......@@ -583,8 +592,15 @@ void op::v1::AvgPoolBackprop::validate_and_infer_types()
CoordinateDiff pads_begin(m_pads_begin.begin(), m_pads_begin.end());
CoordinateDiff pads_end(m_pads_end.begin(), m_pads_end.end());
PartialShape forward_arg_shape{PartialShape::dynamic()};
if (input_value(1).get_node_shared_ptr()->is_constant())
{
forward_arg_shape = get_forward_arg_shape();
}
PartialShape forward_result_shape = infer_batched_pooling_forward(
this, m_forward_arg_shape, pads_begin, pads_end, m_kernel, m_strides, m_exclude_pad);
this, forward_arg_shape, pads_begin, pads_end, m_kernel, m_strides, m_exclude_pad);
const PartialShape& delta_shape = get_input_partial_shape(0);
......@@ -598,17 +614,8 @@ void op::v1::AvgPoolBackprop::validate_and_infer_types()
delta_shape,
").");
set_output_type(0, get_input_element_type(0), m_forward_arg_shape);
}
const Shape& op::v1::AvgPoolBackprop::get_forward_arg_shape() const
{
return m_forward_arg_shape;
}
void op::v1::AvgPoolBackprop::set_forward_arg_shape(const Shape& forward_arg_shape)
{
m_forward_arg_shape = forward_arg_shape;
set_input_is_relevant_to_shape(1);
set_output_type(0, get_input_element_type(0), forward_arg_shape);
}
const Shape& op::v1::AvgPoolBackprop::get_kernel() const
......@@ -664,8 +671,8 @@ void op::v1::AvgPoolBackprop::set_exclude_pad(bool exclude_pad)
shared_ptr<Node> op::v1::AvgPoolBackprop::copy_with_new_args(const NodeVector& new_args) const
{
check_new_args_count(this, new_args);
return make_shared<v1::AvgPoolBackprop>(m_forward_arg_shape,
new_args.at(0),
return make_shared<v1::AvgPoolBackprop>(new_args.at(0),
new_args.at(1),
m_strides,
m_pads_begin,
m_pads_end,
......@@ -683,9 +690,8 @@ void op::v1::AvgPool::generate_adjoints(autodiff::Adjoints& adjoints, const Node
auto delta = deltas.at(0);
auto operand = input_value(0);
auto& operand_shape = get_input_shape(0);
auto backprop = make_shared<op::v1::AvgPoolBackprop>(
operand_shape, delta, m_strides, m_pads_begin, m_pads_end, m_kernel, m_exclude_pad);
delta, input_value(1), m_strides, m_pads_begin, m_pads_end, m_kernel, m_exclude_pad);
adjoints.add_delta(operand, backprop);
}
......
......@@ -327,8 +327,8 @@ namespace ngraph
static constexpr NodeTypeInfo type_info{"AvgPoolBackprop", 1};
const NodeTypeInfo& get_type_info() const override { return type_info; }
AvgPoolBackprop() = default;
AvgPoolBackprop(const Shape& forward_arg_shape,
const Output<Node>& delta,
AvgPoolBackprop(const Output<Node>& delta,
const Output<Node>& forward_arg_shape,
const Strides& strides,
const Shape& pads_begin,
const Shape& pads_end,
......@@ -341,8 +341,7 @@ namespace ngraph
virtual std::shared_ptr<Node>
copy_with_new_args(const NodeVector& new_args) const override;
const Shape& get_forward_arg_shape() const;
void set_forward_arg_shape(const Shape& forward_arg_shape);
const Shape get_forward_arg_shape() const;
const Shape& get_kernel() const;
void set_kernel(const Shape& kernel);
const Strides& get_strides() const;
......@@ -355,7 +354,6 @@ namespace ngraph
void set_exclude_pad(bool exclude_pad);
protected:
Shape m_forward_arg_shape;
Shape m_kernel;
Strides m_strides;
Shape m_pads_begin;
......
......@@ -16,6 +16,7 @@
#include "ngraph/pass/opset0_downgrade.hpp"
#include "ngraph/graph_util.hpp"
#include "ngraph/node.hpp"
#include "ngraph/op/avg_pool.hpp"
#include "ngraph/op/broadcast.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/get_output_element.hpp"
......@@ -214,6 +215,31 @@ bool pass::Opset0Downgrade::run_on_node(shared_ptr<Node> node)
modified = true;
break;
}
case OP_TYPEID::AvgPoolBackprop:
{
auto tmp = dynamic_cast<const op::v1::AvgPoolBackprop*>(node.get());
NGRAPH_CHECK(node->input_value(1).get_node_shared_ptr()->is_constant());
auto forward_arg_shape =
static_pointer_cast<op::Constant>(node->input_value(1).get_node_shared_ptr())
->get_shape_val();
auto exclude_pad = tmp->get_exclude_pad();
auto pads_begin = tmp->get_pads_begin();
auto pads_end = tmp->get_pads_end();
auto strides = tmp->get_strides();
auto kernel = tmp->get_kernel();
auto replacement_node =
make_shared<op::v0::AvgPoolBackprop>(forward_arg_shape,
node->input(0).get_source_output(),
kernel,
strides,
pads_begin,
pads_end,
exclude_pad);
replace_node(node, replacement_node);
modified = true;
break;
}
default: break;
}
#if defined(__clang__)
......
......@@ -130,8 +130,8 @@ bool pass::Opset1Upgrade::run_on_node(shared_ptr<Node> node)
auto kernel = tmp->get_window_shape();
auto replacement_node =
make_shared<op::v1::AvgPoolBackprop>(tmp->get_forward_arg_shape(),
node->input(0).get_source_output(),
make_shared<op::v1::AvgPoolBackprop>(node->input(0).get_source_output(),
node->input(1).get_source_output(),
strides,
pads_begin,
pads_end,
......
......@@ -16,6 +16,7 @@
#include "ngraph/runtime/dynamic/dynamic_backend.hpp"
#include "ngraph/graph_util.hpp"
#include "ngraph/op/avg_pool.hpp"
#include "ngraph/op/broadcast.hpp"
#include "ngraph/op/experimental/dyn_broadcast.hpp"
#include "ngraph/op/experimental/dyn_replace_slice.hpp"
......@@ -88,7 +89,7 @@ bool is_dynamic_op(const std::shared_ptr<Node>& op)
return is_type<op::Transpose>(op) || is_type<op::DynBroadcast>(op) ||
is_type<op::DynReplaceSlice>(op) || is_type<op::DynSlice>(op) ||
is_type<op::v1::Reshape>(op) || is_type<op::DynReshape>(op) || is_type<op::Range>(op) ||
is_type<op::v1::Broadcast>(op);
is_type<op::v1::AvgPoolBackprop>(op) || is_type<op::v1::Broadcast>(op);
}
// Helper for a vile hack in DynamicExecutable::call. See body of that function for details.
......
......@@ -871,14 +871,13 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js)
}
if (op_version == 1)
{
auto forward_arg_shape = node_js.at("forward_arg_shape").get<vector<size_t>>();
auto kernel = node_js.at("kernel").get<vector<size_t>>();
auto strides = node_js.at("strides").get<vector<size_t>>();
auto pads_begin = node_js.at("pads_begin").get<vector<size_t>>();
auto pads_end = node_js.at("pads_end").get<vector<size_t>>();
auto exclude_pad = get_or_default<bool>(node_js, "exclude_pad", true);
node = make_shared<op::v1::AvgPoolBackprop>(
forward_arg_shape, args[0], strides, pads_begin, pads_end, kernel, exclude_pad);
args[0], args[1], strides, pads_begin, pads_end, kernel, exclude_pad);
}
break;
}
......
......@@ -79,6 +79,7 @@ set(SRC
opset_pass/softmax_opset_pass.cpp
opset_pass/sum_opset_pass.cpp
opset_pass/topk_opset_pass.cpp
opset_pass/avg_pool_opset_pass.cpp
partial_shape.cpp
pass.cpp
pass_liveness.cpp
......
......@@ -1357,5 +1357,80 @@ NGRAPH_TEST_P(${BACKEND_NAME}, avg_pool_3d_params, avg_pool_3d_uneven_strided_pa
}
}
NGRAPH_TEST(${BACKEND_NAME}, avg_pool_bprop_2d_2channel_2image_dyn_shape)
{
Shape window_shape{2, 2};
auto window_movement_strides = Strides{1, 1};
Shape padding_below{0, 0};
Shape padding_above{0, 0};
Shape shape_d{2, 2, 2, 2};
auto delta = make_shared<op::Parameter>(element::f32, shape_d);
auto forward_arg_shape =
make_shared<op::Parameter>(element::i64, PartialShape{Dimension::dynamic()});
auto avg_pool_bprop = make_shared<op::v1::AvgPoolBackprop>(delta,
forward_arg_shape,
window_movement_strides,
padding_below,
padding_above,
window_shape,
false);
auto f = make_shared<Function>(NodeVector{avg_pool_bprop},
ParameterVector{delta, forward_arg_shape});
auto backend = runtime::Backend::create("${BACKEND_NAME}", true);
auto ex = backend->compile(f);
auto t_r = backend->create_dynamic_tensor(element::f32, PartialShape::dynamic());
vector<int64_t> shapes = {2, 2, 3, 3};
// Create some tensors for input/output
auto deltas = backend->create_tensor(element::f32, shape_d);
copy_data(deltas,
test::NDArray<float, 4>({{{{0.3, 0.3}, // img 0 chan 0
{0.3, 0.3}},
{{0.2, 0.2}, // img 0 chan 1
{0.2, 0.2}}},
{{{0.1, 0.1}, // img 1 chan 0
{0.1, 0.1}},
{{0.4, 0.4}, // img 1 chan 1
{0.4, 0.4}}}})
.get_vector());
auto forward_shape = backend->create_tensor(element::i64, Shape{shapes.size()});
copy_data(forward_shape, shapes);
float denom = 2 * 2;
ex->call_with_validate({t_r}, {deltas, forward_shape});
ASSERT_EQ(t_r->get_shape(), (Shape{2, 2, 3, 3}));
EXPECT_TRUE(test::all_close_f(
(test::NDArray<float, 4>({{{{0.3f / denom, 0.6f / denom, 0.3f / denom}, // img 0 chan 0
{0.6f / denom, 1.2f / denom, 0.6f / denom},
{0.3f / denom, 0.6f / denom, 0.3f / denom}},
{{0.2f / denom, 0.4f / denom, 0.2f / denom}, // img 0 chan 1
{0.4f / denom, 0.8f / denom, 0.4f / denom},
{0.2f / denom, 0.4f / denom, 0.2f / denom}}},
{{{0.1f / denom, 0.2f / denom, 0.1f / denom}, // img 1 chan 0
{0.2f / denom, 0.4f / denom, 0.2f / denom},
{0.1f / denom, 0.2f / denom, 0.1f / denom}},
{{0.4f / denom, 0.8f / denom, 0.4f / denom}, // img 1 chan 1
{0.8f / denom, 1.6f / denom, 0.8f / denom},
{0.4f / denom, 0.8f / denom, 0.4f / denom}}}})
.get_vector()),
read_vector<float>(t_r),
MIN_FLOAT_TOLERANCE_BITS));
}
// avg_pool_3d case generation
NGRAPH_INSTANTIATE_TEST_CASE_P(${BACKEND_NAME}, include_pad, avg_pool_3d_params, testing::Bool());
#include "gmock/gmock.h"
#include "gtest/gtest.h"
#include "ngraph/ngraph.hpp"
#include "ngraph/op/util/attr_types.hpp"
#include "ngraph/pass/manager.hpp"
#include "ngraph/pass/opset0_downgrade.hpp"
#include "util/type_prop.hpp"
using namespace std;
using namespace ngraph;
TEST(opset_transform, opset1_avgpool_downgrade_pass)
{
Shape window_shape{2, 2};
auto window_movement_strides = Strides{1, 1};
Shape padding_below{0, 0};
Shape padding_above{0, 0};
auto delta = make_shared<op::Parameter>(element::f32, Shape{2, 2, 2, 2});
auto forward_arg_shape = op::Constant::create<int64_t>(element::i64, Shape{4}, {2, 2, 3, 3});
auto avgpool_v1 = make_shared<op::v1::AvgPoolBackprop>(delta,
forward_arg_shape,
window_movement_strides,
padding_below,
padding_above,
window_shape,
false);
auto f = make_shared<Function>(NodeVector{avgpool_v1}, ParameterVector{delta});
ngraph::pass::Manager pass_manager;
pass_manager.register_pass<pass::Opset0Downgrade>();
pass_manager.run_passes(f);
auto avgpool_v0 = static_pointer_cast<op::v0::AvgPoolBackprop>(
f->get_results().at(0)->input_value(0).get_node_shared_ptr());
EXPECT_EQ(avgpool_v0->description(), "AvgPoolBackprop");
EXPECT_EQ(avgpool_v0->get_version(), 0);
EXPECT_EQ(avgpool_v0->get_forward_arg_shape(), (Shape{2, 2, 3, 3}));
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment