Commit a1d78033 authored by Nishant Patel's avatar Nishant Patel Committed by Robert Kimball

Fuse conv+bias bprop debug (#1038)

parent d051f5fa
...@@ -753,6 +753,61 @@ void ngraph::runtime::cpu::pass::CPUFusion::construct_conv_bias() ...@@ -753,6 +753,61 @@ void ngraph::runtime::cpu::pass::CPUFusion::construct_conv_bias()
this->add_matcher(m); this->add_matcher(m);
} }
void ngraph::runtime::cpu::pass::CPUFusion::construct_conv_bias_bprop()
{
Shape shape{2, 2, 1, 1};
auto data_batch = std::make_shared<pattern::op::Label>(element::f32, shape);
auto delta = std::make_shared<pattern::op::Label>(element::f32, shape);
auto conv_bprop_filter = std::make_shared<op::ConvolutionBackpropFilters>(data_batch,
shape,
delta,
Strides{1, 1},
Strides{1, 1},
CoordinateDiff{0, 0},
CoordinateDiff{0, 0},
Strides{1, 1});
ngraph::pattern::graph_rewrite_callback callback = [data_batch, delta](pattern::Matcher& m) {
NGRAPH_DEBUG << "In callback for construct_conv_bias_bprop against node = "
<< m.get_match_root()->get_name();
auto pattern_map = m.get_pattern_map();
auto conv_bprop =
std::dynamic_pointer_cast<op::ConvolutionBackpropFilters>(m.get_match_root());
for (auto delta_user : pattern_map[delta]->get_users())
{
if (std::dynamic_pointer_cast<op::Sum>(delta_user))
{
auto bias_shape = delta_user->get_output_shape(0);
auto conv_bias_bprop = std::make_shared<op::ConvolutionBiasBackpropFiltersBias>(
pattern_map[data_batch],
conv_bprop->get_filters_shape(),
bias_shape,
pattern_map[delta],
conv_bprop->get_window_movement_strides_forward(),
conv_bprop->get_window_dilation_strides_forward(),
conv_bprop->get_padding_below_forward(),
conv_bprop->get_padding_above_forward(),
conv_bprop->get_data_dilation_strides_forward());
auto goe1 = std::make_shared<op::GetOutputElement>(conv_bias_bprop, 0);
auto goe2 = std::make_shared<op::GetOutputElement>(conv_bias_bprop, 1);
NGRAPH_DEBUG << "Replacing " << m.get_match_root()->get_name()
<< "with ConvolutionBiasBackpropFiltersBias";
ngraph::replace_node(m.get_match_root(), goe1);
NGRAPH_DEBUG << "Replacing bias and adding it as a second o/p of "
"ConvolutionBiasBackpropFiltersBias";
ngraph::replace_node(delta_user, goe2);
return true;
}
}
return false;
};
auto m = std::make_shared<ngraph::pattern::Matcher>(conv_bprop_filter, callback);
this->add_matcher(m);
}
void ngraph::runtime::cpu::pass::CPUFusion::construct_batch_norm_relu() void ngraph::runtime::cpu::pass::CPUFusion::construct_batch_norm_relu()
{ {
auto input_shape = Shape{1, 2, 2, 2}; auto input_shape = Shape{1, 2, 2, 2};
......
...@@ -59,7 +59,7 @@ public: ...@@ -59,7 +59,7 @@ public:
construct_zero_padded_conv_backprop_filters(); construct_zero_padded_conv_backprop_filters();
construct_sigmoid(); construct_sigmoid();
construct_sigmoid_bprop(); construct_sigmoid_bprop();
construct_conv_bias_bprop();
construct_batch_norm_relu(); construct_batch_norm_relu();
construct_batch_norm_relu_global_stats(); construct_batch_norm_relu_global_stats();
construct_conv_relu(); construct_conv_relu();
...@@ -76,6 +76,7 @@ private: ...@@ -76,6 +76,7 @@ private:
void construct_matmul(); void construct_matmul();
void construct_matmulbias(); void construct_matmulbias();
void construct_conv_bias(); void construct_conv_bias();
void construct_conv_bias_bprop();
void construct_fprop_bn(); void construct_fprop_bn();
void construct_sigmoid(); void construct_sigmoid();
void construct_sigmoid_bprop(); void construct_sigmoid_bprop();
......
...@@ -650,7 +650,6 @@ TEST(cpu_fusion, conv_bias_bprop_n1c1h3w3) ...@@ -650,7 +650,6 @@ TEST(cpu_fusion, conv_bias_bprop_n1c1h3w3)
auto df = make_shared<Function>( auto df = make_shared<Function>(
NodeVector{d_data, d_weights, d_bias}, NodeVector{d_data, d_weights, d_bias},
op::ParameterVector{conv_test.data, conv_test.weights, conv_test.bias, conv_test.delta}); op::ParameterVector{conv_test.data, conv_test.weights, conv_test.bias, conv_test.delta});
backend->call( backend->call(
df, df,
{conv_test.d_data_val, conv_test.d_weights_val, conv_test.d_bias_val}, {conv_test.d_data_val, conv_test.d_weights_val, conv_test.d_bias_val},
...@@ -664,6 +663,36 @@ TEST(cpu_fusion, conv_bias_bprop_n1c1h3w3) ...@@ -664,6 +663,36 @@ TEST(cpu_fusion, conv_bias_bprop_n1c1h3w3)
test::all_close(conv_test.expected_d_bias_val, read_vector<float>(conv_test.d_bias_val))); test::all_close(conv_test.expected_d_bias_val, read_vector<float>(conv_test.d_bias_val)));
} }
TEST(cpu_fusion, conv_bias_bprop)
{
Shape shape{2, 2, 1, 1};
auto data_batch = std::make_shared<op::Parameter>(element::f32, shape);
auto filters = std::make_shared<op::Parameter>(element::f32, shape);
auto delta = std::make_shared<op::Parameter>(element::f32, shape);
auto bias = make_shared<op::Parameter>(element::f32, Shape{});
auto pbroadcast = std::make_shared<op::Broadcast>(bias, shape, AxisSet{0, 1, 2, 3});
auto conv = std::make_shared<op::Convolution>(data_batch, filters);
auto conv_bias = std::make_shared<op::Add>(conv, pbroadcast);
pass::Manager pass_manager;
pass_manager.register_pass<runtime::cpu::pass::CPUFusion>();
pass_manager.register_pass<pass::VisualizeTree>("conv_bias_bprop_fusion");
auto f = make_shared<Function>(conv_bias, op::ParameterVector{data_batch, filters, bias});
ngraph::autodiff::Adjoints adjoints(NodeVector{conv_bias}, NodeVector{delta});
auto d_data = adjoints.backprop_node(data_batch);
auto d_weights = adjoints.backprop_node(filters);
auto d_bias = adjoints.backprop_node(bias);
auto df = make_shared<Function>(NodeVector{d_data, d_weights, d_bias},
op::ParameterVector{data_batch, filters, bias, delta});
pass_manager.run_passes(df);
size_t ccg = count_ops_of_type<op::ConvolutionBiasBackpropFiltersBias>(df);
ASSERT_EQ(ccg, 1);
}
TEST(cpu_fusion, sigmoid_fprop_fusion) TEST(cpu_fusion, sigmoid_fprop_fusion)
{ {
pass::Manager pass_manager; pass::Manager pass_manager;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment