Commit 7a3f6480 authored by Ewa Tusień's avatar Ewa Tusień Committed by Sang Ik Lee

[ONNX] Upgrade Pad op to version 11 (#4082)

* Added GatherElements op to ONNX importer.

* Changed attributes to inputs.

* Undo unnecessary changes.

* Added Pad version 11.

* Added 11 version for Pad op.

* Changed axis element type.

* Added optional input.

* Code formatting.

* Added test.

* Update onnx_import.in.cpp

* Removed debug code.

* Added function for setting mode.

* Added support for pads argument provided as Constant.

* Code refactoring.

* Code refactoring.
Co-authored-by: 's avatarScott Cyphers <diyessi@users.noreply.github.com>
Co-authored-by: 's avatarSang Ik Lee <sang.ik.lee@intel.com>
parent 09abff6d
......@@ -18,13 +18,42 @@
#include "default_opset.hpp"
#include "exceptions.hpp"
#include "ngraph/builder/split.hpp"
#include "ngraph/coordinate_diff.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/convert.hpp"
#include "ngraph/op/pad.hpp"
#include "ngraph/shape.hpp"
#include "pad.hpp"
#include "utils/convpool.hpp"
namespace
{
ngraph::op::PadMode get_pad_mode(std::string mode)
{
ngraph::op::PadMode pad_mode;
if (mode == "constant")
{
pad_mode = ngraph::op::PadMode::CONSTANT;
}
else if (mode == "reflect")
{
pad_mode = ngraph::op::PadMode::REFLECT;
}
else if (mode == "edge")
{
pad_mode = ngraph::op::PadMode::EDGE;
}
else
{
throw ngraph::onnx_import::error::InvalidArgument("Unsupported padding mode: [" + mode +
"]");
}
return pad_mode;
}
}
namespace ngraph
{
namespace onnx_import
......@@ -39,24 +68,10 @@ namespace ngraph
const Shape& data_shape = data->get_shape();
double value = node.get_attribute_value<double>("value", 0);
std::string mode = node.get_attribute_value<std::string>("mode", "constant");
ngraph::op::PadMode pad_mode;
if (mode == "constant")
{
pad_mode = ngraph::op::PadMode::CONSTANT;
}
else if (mode == "reflect")
{
pad_mode = ngraph::op::PadMode::REFLECT;
}
else if (mode == "edge")
{
pad_mode = ngraph::op::PadMode::EDGE;
}
else
{
throw error::InvalidArgument("Unsupported padding mode: [" + mode + "]");
}
const std::string mode =
node.get_attribute_value<std::string>("mode", "constant");
ngraph::op::PadMode pad_mode = get_pad_mode(mode);
auto paddings = convpool::get_pads(node, data_shape);
ngraph::CoordinateDiff padding_below = paddings.first;
ngraph::CoordinateDiff padding_above = paddings.second;
......@@ -73,6 +88,64 @@ namespace ngraph
}
} // namespace set_1
namespace set_11
{
NodeVector pad(const Node& node)
{
auto data = node.get_ng_inputs().at(0);
auto pads = node.get_ng_inputs().at(1);
std::shared_ptr<ngraph::Node> values;
std::shared_ptr<ngraph::Node> padding_begin;
std::shared_ptr<ngraph::Node> padding_end;
if (node.get_ng_inputs().size() == 3)
{
values = node.get_ng_inputs().at(2);
}
else
{
values = default_opset::Constant::create(
data->get_element_type(), ngraph::Shape{}, {0});
}
if (pads->is_constant())
{
std::vector<std::int64_t> pads_vector =
ngraph::as_type_ptr<default_opset::Constant>(pads)
->get_vector<std::int64_t>();
std::size_t const half_size = pads_vector.size() / 2;
std::vector<std::int64_t> padding_begin_values(
pads_vector.begin(), pads_vector.begin() + half_size);
std::vector<std::int64_t> padding_end_values(
pads_vector.begin() + half_size, pads_vector.end());
padding_begin = default_opset::Constant::create(
element::i64, ngraph::Shape{half_size}, padding_begin_values);
padding_end = default_opset::Constant::create(
element::i64, ngraph::Shape{half_size}, padding_end_values);
}
else
{
auto axis =
default_opset::Constant::create(element::i64, ngraph::Shape{}, {0});
NodeVector padding = builder::split(pads, 2, 0);
padding_begin =
std::make_shared<default_opset::Convert>(padding.at(0), element::i64);
padding_end =
std::make_shared<default_opset::Convert>(padding.at(1), element::i64);
}
const std::string mode =
node.get_attribute_value<std::string>("mode", "constant");
ngraph::op::PadMode pad_mode = get_pad_mode(mode);
return {std::make_shared<default_opset::Pad>(
data, padding_begin, padding_end, values, pad_mode)};
}
} // namespace set_11
} // namespace op
......
......@@ -31,6 +31,12 @@ namespace ngraph
} // namespace set_1
namespace set_11
{
NodeVector pad(const Node& node);
} // namespace set_11
} // namespace op
} // namespace onnx_import
......
......@@ -315,6 +315,7 @@ namespace ngraph
REGISTER_OPERATOR("Or", 1, logical_or);
REGISTER_OPERATOR("OneHot", 1, onehot);
REGISTER_OPERATOR("Pad", 1, pad);
REGISTER_OPERATOR("Pad", 11, pad);
REGISTER_OPERATOR("Pow", 1, pow);
REGISTER_OPERATOR("PRelu", 1, prelu);
REGISTER_OPERATOR("QLinearConv", 1, quant_conv);
......
ir_version: 6
producer_name: "backend-test"
graph {
node {
input: "x"
input: "pads"
output: "y"
op_type: "Pad"
attribute {
name: "mode"
s: "constant"
type: STRING
}
}
name: "test_constant_pad"
initializer {
dims: 4
data_type: 7
int64_data: 0
int64_data: 2
int64_data: 0
int64_data: 0
name: "pads"
}
input {
name: "x"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 3
}
dim {
dim_value: 2
}
}
}
}
}
output {
name: "y"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 3
}
dim {
dim_value: 4
}
}
}
}
}
}
opset_import {
version: 11
}
......@@ -1832,6 +1832,19 @@ NGRAPH_TEST(onnx_${BACKEND_NAME}, model_gatherND_float)
test_case.run();
}
NGRAPH_TEST(onnx_${BACKEND_NAME}, model_pad_constant)
{
const auto pad_fn = onnx_import::import_onnx_model(
file_util::path_join(SERIALIZED_ZOO, "onnx/pad_constant.prototxt"));
auto test_case = ngraph::test::NgraphTestCase(pad_fn, "${BACKEND_NAME}");
test_case.add_input<float>({1.f, 1.2f, 2.3f, 3.4f, 4.5f, 5.7f});
test_case.add_expected_output<float>(
Shape{3, 4}, {0.f, 0.f, 1.f, 1.2f, 0.f, 0.f, 2.3f, 3.4f, 0.f, 0.f, 4.5f, 5.7f});
test_case.run();
}
NGRAPH_TEST(onnx_${BACKEND_NAME}, model_reciprocal)
{
const auto reciprocal_fn = onnx_import::import_onnx_model(
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment