Commit 1ce31a49 authored by Tomasz Socha's avatar Tomasz Socha Committed by Michał Karzyński

[SPEC] Add v1:Convolution operator (#3636)

parent ac4676ff
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
...@@ -22,6 +22,7 @@ ...@@ -22,6 +22,7 @@
#include "ngraph/code_writer.hpp" #include "ngraph/code_writer.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/avg_pool.hpp" #include "ngraph/op/avg_pool.hpp"
#include "ngraph/op/convolution.hpp"
#include "ngraph/op/gather.hpp" #include "ngraph/op/gather.hpp"
#include "ngraph/op/max_pool.hpp" #include "ngraph/op/max_pool.hpp"
#include "ngraph/op/pad.hpp" #include "ngraph/op/pad.hpp"
...@@ -109,10 +110,7 @@ namespace ngraph ...@@ -109,10 +110,7 @@ namespace ngraph
class QuantizedConvolution; class QuantizedConvolution;
class GroupConvolution; class GroupConvolution;
class GroupConvolutionBias; class GroupConvolutionBias;
class Convolution;
class ConvolutionBackpropFilters;
class DeconvolutionBias; class DeconvolutionBias;
class ConvolutionBackpropData;
class QuantizedConvolutionBias; class QuantizedConvolutionBias;
class QuantizedConvolutionBiasAdd; class QuantizedConvolutionBiasAdd;
class QuantizedConvolutionBiasSignedAdd; class QuantizedConvolutionBiasSignedAdd;
......
...@@ -964,6 +964,8 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js) ...@@ -964,6 +964,8 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js)
break; break;
} }
case OP_TYPEID::Convolution: case OP_TYPEID::Convolution:
{
if (op_version == 0)
{ {
auto window_movement_strides = auto window_movement_strides =
node_js.at("window_movement_strides").get<vector<size_t>>(); node_js.at("window_movement_strides").get<vector<size_t>>();
...@@ -988,7 +990,7 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js) ...@@ -988,7 +990,7 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js)
if (data_dilation_strides.empty()) if (data_dilation_strides.empty())
{ {
node = make_shared<op::Convolution>(args[0], node = make_shared<op::v0::Convolution>(args[0],
args[1], args[1],
window_movement_strides, window_movement_strides,
window_dilation_strides, window_dilation_strides,
...@@ -997,8 +999,8 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js) ...@@ -997,8 +999,8 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js)
} }
else else
{ {
node = node = make_shared<op::v0::Convolution>(
make_shared<op::Convolution>(args[0], args[0],
args[1], args[1],
window_movement_strides, window_movement_strides,
window_dilation_strides, window_dilation_strides,
...@@ -1007,9 +1009,24 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js) ...@@ -1007,9 +1009,24 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js)
data_dilation_strides.get<std::vector<size_t>>(), data_dilation_strides.get<std::vector<size_t>>(),
pad_type); pad_type);
} }
}
if (op_version == 1)
{
auto strides = node_js.at("strides").get<vector<size_t>>();
auto dilations = node_js.at("dilations").get<vector<size_t>>();
auto pads_begin = node_js.at("pads_begin").get<vector<std::ptrdiff_t>>();
auto pads_end = node_js.at("pads_end").get<vector<std::ptrdiff_t>>();
op::PadType auto_pad = read_pad_type(node_js);
node = make_shared<op::v1::Convolution>(
args[0], args[1], strides, pads_begin, pads_end, dilations, auto_pad);
}
break; break;
} }
case OP_TYPEID::ConvolutionBackpropData: case OP_TYPEID::ConvolutionBackpropData:
{
if (op_version == 0)
{ {
auto data_batch_shape = node_js.at("data_batch_shape").get<vector<size_t>>(); auto data_batch_shape = node_js.at("data_batch_shape").get<vector<size_t>>();
auto window_movement_strides_forward = auto window_movement_strides_forward =
...@@ -1022,7 +1039,7 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js) ...@@ -1022,7 +1039,7 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js)
node_js.at("padding_above_forward").get<vector<std::ptrdiff_t>>(); node_js.at("padding_above_forward").get<vector<std::ptrdiff_t>>();
auto data_dilation_strides_forward = auto data_dilation_strides_forward =
node_js.at("data_dilation_strides_forward").get<vector<size_t>>(); node_js.at("data_dilation_strides_forward").get<vector<size_t>>();
node = make_shared<op::ConvolutionBackpropData>(data_batch_shape, node = make_shared<op::v0::ConvolutionBackpropData>(data_batch_shape,
args[0], args[0],
args[1], args[1],
window_movement_strides_forward, window_movement_strides_forward,
...@@ -1030,9 +1047,22 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js) ...@@ -1030,9 +1047,22 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js)
padding_below_forward, padding_below_forward,
padding_above_forward, padding_above_forward,
data_dilation_strides_forward); data_dilation_strides_forward);
}
if (op_version == 1)
{
auto data_batch_shape = node_js.at("data_batch_shape").get<vector<size_t>>();
auto strides = node_js.at("strides").get<vector<size_t>>();
auto dilations = node_js.at("dilations").get<vector<size_t>>();
auto pads_begin = node_js.at("pads_begin").get<vector<std::ptrdiff_t>>();
auto pads_end = node_js.at("pads_end").get<vector<std::ptrdiff_t>>();
node = make_shared<op::v1::ConvolutionBackpropData>(
data_batch_shape, args[0], args[1], strides, dilations, pads_begin, pads_end);
}
break; break;
} }
case OP_TYPEID::ConvolutionBackpropFilters: case OP_TYPEID::ConvolutionBackpropFilters:
{
if (op_version == 0)
{ {
auto filters_shape = node_js.at("filters_shape").get<vector<size_t>>(); auto filters_shape = node_js.at("filters_shape").get<vector<size_t>>();
auto window_movement_strides_forward = auto window_movement_strides_forward =
...@@ -1045,7 +1075,8 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js) ...@@ -1045,7 +1075,8 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js)
node_js.at("padding_above_forward").get<vector<std::ptrdiff_t>>(); node_js.at("padding_above_forward").get<vector<std::ptrdiff_t>>();
auto data_dilation_strides_forward = auto data_dilation_strides_forward =
node_js.at("data_dilation_strides_forward").get<vector<size_t>>(); node_js.at("data_dilation_strides_forward").get<vector<size_t>>();
node = make_shared<op::ConvolutionBackpropFilters>(args[0], node =
make_shared<op::v0::ConvolutionBackpropFilters>(args[0],
filters_shape, filters_shape,
args[1], args[1],
window_movement_strides_forward, window_movement_strides_forward,
...@@ -1053,6 +1084,17 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js) ...@@ -1053,6 +1084,17 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js)
padding_below_forward, padding_below_forward,
padding_above_forward, padding_above_forward,
data_dilation_strides_forward); data_dilation_strides_forward);
}
if (op_version == 1)
{
auto filters_shape = node_js.at("filters_shape").get<vector<size_t>>();
auto strides = node_js.at("strides").get<vector<size_t>>();
auto dilations = node_js.at("dilations").get<vector<size_t>>();
auto pads_begin = node_js.at("pads_begin").get<vector<std::ptrdiff_t>>();
auto pads_end = node_js.at("pads_end").get<vector<std::ptrdiff_t>>();
node = make_shared<op::v1::ConvolutionBackpropFilters>(
args[0], filters_shape, args[1], strides, dilations, pads_begin, pads_end);
}
break; break;
} }
case OP_TYPEID::ConvolutionBias: case OP_TYPEID::ConvolutionBias:
...@@ -2483,35 +2525,71 @@ json JSONSerializer::serialize_node(const Node& n) ...@@ -2483,35 +2525,71 @@ json JSONSerializer::serialize_node(const Node& n)
} }
case OP_TYPEID::Convolution: case OP_TYPEID::Convolution:
{ {
auto tmp = dynamic_cast<const op::Convolution*>(&n); if (op_version == 0)
{
auto tmp = dynamic_cast<const op::v0::Convolution*>(&n);
node["window_movement_strides"] = tmp->get_window_movement_strides(); node["window_movement_strides"] = tmp->get_window_movement_strides();
node["window_dilation_strides"] = tmp->get_window_dilation_strides(); node["window_dilation_strides"] = tmp->get_window_dilation_strides();
node["padding_below"] = tmp->get_padding_below(); node["padding_below"] = tmp->get_padding_below();
node["padding_above"] = tmp->get_padding_above(); node["padding_above"] = tmp->get_padding_above();
node["data_dilation_strides"] = tmp->get_data_dilation_strides(); node["data_dilation_strides"] = tmp->get_data_dilation_strides();
node["pad_type"] = tmp->get_pad_type(); node["pad_type"] = tmp->get_pad_type();
}
if (op_version == 1)
{
auto tmp = dynamic_cast<const op::v1::Convolution*>(&n);
node["strides"] = tmp->get_strides();
node["dilations"] = tmp->get_dilations();
node["pads_begin"] = tmp->get_pads_begin();
node["pads_end"] = tmp->get_pads_end();
node["auto_pad"] = tmp->get_auto_pad();
}
break; break;
} }
case OP_TYPEID::ConvolutionBackpropData: case OP_TYPEID::ConvolutionBackpropData:
{ {
auto tmp = dynamic_cast<const op::ConvolutionBackpropData*>(&n); if (op_version == 0)
{
auto tmp = dynamic_cast<const op::v0::ConvolutionBackpropData*>(&n);
node["data_batch_shape"] = tmp->get_data_batch_shape(); node["data_batch_shape"] = tmp->get_data_batch_shape();
node["window_movement_strides_forward"] = tmp->get_window_movement_strides_forward(); node["window_movement_strides_forward"] = tmp->get_window_movement_strides_forward();
node["window_dilation_strides_forward"] = tmp->get_window_dilation_strides_forward(); node["window_dilation_strides_forward"] = tmp->get_window_dilation_strides_forward();
node["padding_below_forward"] = tmp->get_padding_below_forward(); node["padding_below_forward"] = tmp->get_padding_below_forward();
node["padding_above_forward"] = tmp->get_padding_above_forward(); node["padding_above_forward"] = tmp->get_padding_above_forward();
node["data_dilation_strides_forward"] = tmp->get_data_dilation_strides_forward(); node["data_dilation_strides_forward"] = tmp->get_data_dilation_strides_forward();
}
if (op_version == 1)
{
auto tmp = dynamic_cast<const op::v1::ConvolutionBackpropData*>(&n);
node["data_batch_shape"] = tmp->get_data_batch_shape();
node["strides"] = tmp->get_strides();
node["dilations"] = tmp->get_dilations();
node["pads_begin"] = tmp->get_pads_begin();
node["pads_end"] = tmp->get_pads_end();
}
break; break;
} }
case OP_TYPEID::ConvolutionBackpropFilters: case OP_TYPEID::ConvolutionBackpropFilters:
{ {
auto tmp = dynamic_cast<const op::ConvolutionBackpropFilters*>(&n); if (op_version == 0)
{
auto tmp = dynamic_cast<const op::v0::ConvolutionBackpropFilters*>(&n);
node["filters_shape"] = tmp->get_filters_shape(); node["filters_shape"] = tmp->get_filters_shape();
node["window_movement_strides_forward"] = tmp->get_window_movement_strides_forward(); node["window_movement_strides_forward"] = tmp->get_window_movement_strides_forward();
node["window_dilation_strides_forward"] = tmp->get_window_dilation_strides_forward(); node["window_dilation_strides_forward"] = tmp->get_window_dilation_strides_forward();
node["padding_below_forward"] = tmp->get_padding_below_forward(); node["padding_below_forward"] = tmp->get_padding_below_forward();
node["padding_above_forward"] = tmp->get_padding_above_forward(); node["padding_above_forward"] = tmp->get_padding_above_forward();
node["data_dilation_strides_forward"] = tmp->get_data_dilation_strides_forward(); node["data_dilation_strides_forward"] = tmp->get_data_dilation_strides_forward();
}
if (op_version == 1)
{
auto tmp = dynamic_cast<const op::v1::ConvolutionBackpropFilters*>(&n);
node["filters_shape"] = tmp->get_filters_shape();
node["strides"] = tmp->get_strides();
node["dilations"] = tmp->get_dilations();
node["pads_begin"] = tmp->get_pads_begin();
node["pads_end"] = tmp->get_pads_end();
}
break; break;
} }
case OP_TYPEID::ConvolutionBias: case OP_TYPEID::ConvolutionBias:
......
...@@ -69,13 +69,14 @@ set(SRC ...@@ -69,13 +69,14 @@ set(SRC
node_input_output.cpp node_input_output.cpp
nop_elimination.cpp nop_elimination.cpp
op.cpp op.cpp
opset_pass/convolution_opset_pass.cpp
opset_pass/gather_opset_pass.cpp opset_pass/gather_opset_pass.cpp
opset_pass/pad_opset_pass.cpp opset_pass/pad_opset_pass.cpp
opset_pass/poolings_opset_pass.cpp
opset_pass/product_opset_pass.cpp opset_pass/product_opset_pass.cpp
opset_pass/reverse_opset_pass.cpp opset_pass/reverse_opset_pass.cpp
opset_pass/softmax_opset_pass.cpp opset_pass/softmax_opset_pass.cpp
opset_pass/sum_opset_pass.cpp opset_pass/sum_opset_pass.cpp
opset_pass/poolings_opset_pass.cpp
partial_shape.cpp partial_shape.cpp
pass.cpp pass.cpp
pass_liveness.cpp pass_liveness.cpp
......
#include "gmock/gmock.h"
#include "gtest/gtest.h"
#include "ngraph/ngraph.hpp"
#include "ngraph/pass/manager.hpp"
#include "ngraph/pass/opset1_upgrade.hpp"
#include "util/test_control.hpp"
#include "util/type_prop.hpp"
using namespace std;
using namespace ngraph;
TEST(upgrade_pass, opset1_convolution_pass)
{
auto data = make_shared<op::Parameter>(element::f32, Shape{1, 3, 6, 9});
auto filters = make_shared<op::Parameter>(element::f32, Shape{1, 3, 3, 3});
CoordinateDiff pads_begin{0, 0};
CoordinateDiff pads_end{0, 0};
Strides strides{1, 1};
Strides dilations{1, 1};
Strides data_dilations_strides{1, 1};
op::PadType pad_type = op::PadType::EXPLICIT;
auto convolution_v0 = make_shared<op::v0::Convolution>(
data, filters, strides, dilations, pads_begin, pads_end, data_dilations_strides, pad_type);
auto result = make_shared<op::Result>(convolution_v0);
auto f = make_shared<Function>(ResultVector{result}, ParameterVector{data, filters});
ngraph::pass::Manager pass_manager;
pass_manager.register_pass<pass::Opset1Upgrade>();
pass_manager.run_passes(f);
auto convolution_s1_result = f->get_results().at(0);
auto node = convolution_s1_result->input(0).get_source_output().get_node_shared_ptr();
auto convolution_v1_node = static_pointer_cast<op::v1::Convolution>(node);
EXPECT_EQ(convolution_v1_node->description(), "Convolution");
EXPECT_EQ(convolution_v1_node->get_version(), 1);
EXPECT_EQ(convolution_v1_node->get_pads_begin(), pads_begin);
EXPECT_EQ(convolution_v1_node->get_pads_end(), pads_end);
EXPECT_EQ(convolution_v1_node->get_strides(), strides);
EXPECT_EQ(convolution_v1_node->get_auto_pad(), pad_type);
EXPECT_EQ(convolution_v1_node->get_dilations(), dilations);
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment