Commit 81c0ef79 authored by Jaikrishnan Menon's avatar Jaikrishnan Menon Committed by Scott Cyphers

Fuse zero-padded convolution backprop filters (#828)

* CPU: Fuse zero-padded convolution backprop filters

* CPU: Add a testcase for zero-padded convolution backprop filters fusion
parent 1adb84a1
......@@ -337,7 +337,8 @@ static bool
const std::shared_ptr<ngraph::op::Constant>& pad_value_op,
const std::shared_ptr<ngraph::Node>& pad_input,
const std::shared_ptr<ngraph::op::Pad>& matched_pad,
const std::shared_ptr<ngraph::op::Convolution>& matched_conv,
const ngraph::CoordinateDiff& padding_below,
const ngraph::CoordinateDiff& padding_above,
size_t batch_index,
size_t channel_index)
{
......@@ -366,8 +367,7 @@ static bool
}
// Only match convolutions with no padding specification
if (matched_conv->get_padding_below() != ngraph::CoordinateDiff(2) ||
matched_conv->get_padding_above() != ngraph::CoordinateDiff(2))
if (padding_below != ngraph::CoordinateDiff(2) || padding_above != ngraph::CoordinateDiff(2))
{
return false;
}
......@@ -437,7 +437,8 @@ void ngraph::runtime::cpu::pass::CPUFusion::construct_zero_padded_reshaped_conv(
pad_value_op,
pattern_map[pad_input],
matched_pad,
matched_conv,
matched_conv->get_padding_below(),
matched_conv->get_padding_above(),
input_order[0],
input_order[1]))
{
......@@ -502,7 +503,8 @@ void ngraph::runtime::cpu::pass::CPUFusion::construct_zero_padded_conv()
pad_value_op,
pattern_map[pad_input],
matched_pad,
matched_conv,
matched_conv->get_padding_below(),
matched_conv->get_padding_above(),
0,
1))
{
......@@ -532,6 +534,73 @@ void ngraph::runtime::cpu::pass::CPUFusion::construct_zero_padded_conv()
this->add_matcher(std::make_shared<ngraph::pattern::Matcher>(conv_label, callback));
}
void ngraph::runtime::cpu::pass::CPUFusion::construct_zero_padded_conv_backprop_filters()
{
auto pad_input = std::make_shared<pattern::op::Label>(element::f32, Shape{1, 1, 1, 1});
auto pad_value = std::make_shared<pattern::op::Label>(element::f32, Shape{});
auto pad = std::make_shared<op::Pad>(
pad_input, pad_value, Shape{0, 0, 0, 0}, Shape{0, 0, 0, 0}, Shape{0, 0, 0, 0});
auto pad_label = std::make_shared<pattern::op::Label>(pad, nullptr, NodeVector{pad});
auto output_delta = std::make_shared<pattern::op::Label>(element::f32, Shape{1, 1, 1, 1});
auto conv = std::make_shared<op::ConvolutionBackpropFilters>(pad_label,
Shape{1, 1, 3, 3},
output_delta,
Strides{1, 1},
Strides{1, 1},
CoordinateDiff{1, 1},
CoordinateDiff{1, 1},
Strides{1, 1});
auto conv_label = std::make_shared<pattern::op::Label>(conv, nullptr, NodeVector{conv});
ngraph::pattern::gr_callback_fn callback =
[pad_input, pad_value, pad_label, output_delta, conv_label](pattern::Matcher& m) {
auto pattern_map = m.get_pattern_map();
auto pad_value_op = std::dynamic_pointer_cast<op::Constant>(pattern_map[pad_value]);
const auto& matched_conv =
std::dynamic_pointer_cast<op::ConvolutionBackpropFilters>(pattern_map[conv_label]);
const auto& matched_pad = std::dynamic_pointer_cast<op::Pad>(pattern_map[pad_label]);
if (!zero_padded_conv_consistency_check(m.match_root(),
pad_value_op,
pattern_map[pad_input],
matched_pad,
matched_conv->get_padding_below_forward(),
matched_conv->get_padding_above_forward(),
0,
1))
{
return false;
}
CoordinateDiff padding_below{
static_cast<CoordinateDiff::value_type>(matched_pad->get_padding_below().at(2)),
static_cast<CoordinateDiff::value_type>(matched_pad->get_padding_below().at(3))};
CoordinateDiff padding_above{
static_cast<CoordinateDiff::value_type>(matched_pad->get_padding_above().at(2)),
static_cast<CoordinateDiff::value_type>(matched_pad->get_padding_above().at(3))};
auto zero_padded_conv_backprop_filters =
std::make_shared<op::ConvolutionBackpropFilters>(
pattern_map[pad_input],
matched_conv->get_filters_shape(),
pattern_map[output_delta],
matched_conv->get_window_movement_strides_forward(),
matched_conv->get_window_dilation_strides_forward(),
padding_below,
padding_above,
matched_conv->get_data_dilation_strides_forward());
ngraph::replace_node(m.match_root(), zero_padded_conv_backprop_filters);
return true;
};
this->add_matcher(std::make_shared<ngraph::pattern::Matcher>(conv_label, callback));
}
void ngraph::runtime::cpu::pass::CPUFusion::construct_sigmoid()
{
//construct variance
......
......@@ -43,6 +43,7 @@ public:
construct_fprop_bn();
construct_zero_padded_reshaped_conv();
construct_zero_padded_conv();
construct_zero_padded_conv_backprop_filters();
construct_sigmoid();
construct_sigmoid_bprop();
construct_conv_bias();
......@@ -59,6 +60,7 @@ private:
void construct_sigmoid_bprop();
void construct_zero_padded_reshaped_conv();
void construct_zero_padded_conv();
void construct_zero_padded_conv_backprop_filters();
void construct_batch_norm_relu();
void construct_conv_relu();
};
......@@ -458,6 +458,38 @@ TEST(cpu_fusion, non_zero_padded_conv)
ASSERT_EQ(count_ops_of_type<op::Pad>(func), 1);
}
TEST(cpu_fusion, zero_padded_conv_backprop_filters)
{
auto X = make_shared<op::Parameter>(element::f32, Shape{1, 1, 2, 2});
auto F = make_shared<op::Parameter>(element::f32, Shape{1, 1, 2, 2});
auto pad_value = op::Constant::create<float>(element::f32, Shape{}, std::vector<float>{0.0f});
auto pad =
make_shared<op::Pad>(X, pad_value, Shape{0, 0, 0, 1}, Shape{0, 0, 1, 0}, Shape{0, 0, 0, 0});
auto conv = make_shared<op::ConvolutionBackpropFilters>(pad,
Shape{1, 1, 2, 2},
F,
Strides{1, 1},
Strides{1, 1},
CoordinateDiff{0, 0},
CoordinateDiff{0, 0},
Strides{1, 1});
auto func = make_shared<Function>(conv, op::ParameterVector{X, F});
ASSERT_EQ(count_ops_of_type<op::Pad>(func), 1);
auto manager = runtime::Manager::get("CPU");
auto external = manager->compile(func);
auto backend = manager->allocate_backend();
auto cf = backend->make_call_frame(external);
ASSERT_EQ(count_ops_of_type<op::Pad>(func), 0);
}
TEST(cpu_fusion, fuse_conv_bias)
{
pass::Manager pass_manager;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment