Commit 81b492d4 authored by Mateusz Bencer's avatar Mateusz Bencer Committed by Scott Cyphers

[Spec] Implement Pad:v1 (#3574)

* Pad:v1 was introduced

* Fixed problem with foward declaration

* Code review remarks introduced

* Addtinal asserts and unit-test for Pad:v1 introduced

* Handled optinal arg_pad_value input

* Fixed unit tests messages and clang styles applied

* Styles applied, checking rank improved

* Updated changes.md

* Revert "Updated changes.md"

This reverts commit 8ee16e580705275ee0719648d5251021354e4d84.

* Clang warning fixed
parent 779a9300
This diff is collapsed.
......@@ -24,61 +24,134 @@ namespace ngraph
{
namespace op
{
/// \brief Generic padding operation.
class Pad : public Op
namespace v0
{
public:
NGRAPH_API
static constexpr NodeTypeInfo type_info{"Pad", 0};
const NodeTypeInfo& get_type_info() const override { return type_info; }
/// \brief Constructs a generic padding operation.
Pad() = default;
/// \brief Constructs a generic padding operation.
///
/// \param arg The node producing input tensor to be padded.
/// \param arg_pad_value The node producing the scalar value to be inserted for padding.
/// \param padding_below The padding-below widths.
/// \param padding_above The padding-above widths.
/// \param pad_mode The padding mode: CONSTANT(default), EDGE, REFLECT or SYMMETRIC.
Pad(const Output<Node>& arg,
const Output<Node>& arg_pad_value,
const CoordinateDiff& padding_below,
const CoordinateDiff& padding_above,
PadMode pad_mode = PadMode::CONSTANT);
virtual std::shared_ptr<Node>
copy_with_new_args(const NodeVector& new_args) const override;
void validate_and_infer_types() override;
/// \return The padding-below sizes.
const CoordinateDiff& get_padding_below() const { return m_padding_below; }
void set_padding_below(const CoordinateDiff& padding_below)
/// \brief Generic padding operation.
class Pad : public Op
{
m_padding_below = padding_below;
}
/// \return The padding-above sizes.
const CoordinateDiff& get_padding_above() const { return m_padding_above; }
void set_padding_above(const CoordinateDiff& padding_above)
public:
NGRAPH_API
static constexpr NodeTypeInfo type_info{"Pad", 0};
const NodeTypeInfo& get_type_info() const override { return type_info; }
/// \brief Constructs a generic padding operation.
Pad() = default;
/// \brief Constructs a generic padding operation.
///
/// \param arg The node producing input tensor to be padded.
/// \param arg_pad_value The node producing the scalar value
/// to be inserted for padding.
/// \param padding_below The padding-below widths.
/// \param padding_above The padding-above widths.
/// \param pad_mode The padding mode: CONSTANT(default), EDGE, REFLECT or SYMMETRIC.
Pad(const Output<Node>& arg,
const Output<Node>& arg_pad_value,
const CoordinateDiff& padding_below,
const CoordinateDiff& padding_above,
PadMode pad_mode = PadMode::CONSTANT);
virtual std::shared_ptr<Node>
copy_with_new_args(const NodeVector& new_args) const override;
void validate_and_infer_types() override;
/// \return The padding-below sizes.
const CoordinateDiff& get_padding_below() const { return m_padding_below; }
void set_padding_below(const CoordinateDiff& padding_below)
{
m_padding_below = padding_below;
}
/// \return The padding-above sizes.
const CoordinateDiff& get_padding_above() const { return m_padding_above; }
void set_padding_above(const CoordinateDiff& padding_above)
{
m_padding_below = padding_above;
}
/// \brief DEPRECATED. This is just a stub for backends that used to implement the
/// interior padding feature, which is no longer supported.
/// \return Returns a shape full of zeros,
/// with the same rank as get_padding_below().
const Shape& get_padding_interior() const { return m_padding_interior_fake; }
/// \return The padding mode.
PadMode get_pad_mode() const { return m_pad_mode; }
void set_pad_mode(PadMode pad_mode) { m_pad_mode = pad_mode; }
/// \return The default value for Pad.
virtual std::shared_ptr<Node> get_default_value() const override;
protected:
virtual void generate_adjoints(autodiff::Adjoints& adjoints,
const NodeVector& deltas) override;
CoordinateDiff m_padding_below;
CoordinateDiff m_padding_above;
Shape m_padding_interior_fake; // LEGACY: This is all zeros.
PadMode m_pad_mode;
};
}
namespace v1
{
/// \brief Generic padding operation.
class Pad : public Op
{
m_padding_below = padding_above;
}
public:
NGRAPH_API
static constexpr NodeTypeInfo type_info{"Pad", 1};
const NodeTypeInfo& get_type_info() const override { return type_info; }
/// \brief Constructs a generic padding operation.
///
/// \param arg The node producing input tensor to be padded.
/// \param pads_begin The node which specifies the number of padding elements at the
/// beginning of each axis
/// \param pads_end The node which specifies the number of padding elements at the
/// end of each axis
/// \param arg_pad_value The node with value which set to extended elements
/// if pad_mode is CONSTANT
/// \param pad_mode The padding mode: CONSTANT, EDGE, REFLECT or SYMMETRIC.
Pad(const Output<Node>& arg,
const Output<Node>& pads_begin,
const Output<Node>& pads_end,
const Output<Node>& arg_pad_value,
PadMode pad_mode);
/// \brief Constructs a generic padding operation.
///
/// \param arg The node producing input tensor to be padded.
/// \param pads_begin The node which specifies the number of padding elements
/// at the beginning of each axis
/// \param pads_end The node which specifies the number of padding elements
/// at the end of each axis
/// \param pad_mode The padding mode: CONSTANT, EDGE, REFLECT or SYMMETRIC.
Pad(const Output<Node>& arg,
const Output<Node>& pads_begin,
const Output<Node>& pads_end,
PadMode pad_mode);
/// \brief Constructs a generic padding operation.
Pad() = default;
size_t get_version() const override { return 1; }
void validate_and_infer_types() override;
virtual std::shared_ptr<Node>
copy_with_new_args(const NodeVector& new_args) const override;
/// return The node which specifies the number of padding elements
/// at the beginning of each axis
CoordinateDiff get_pads_begin() const;
/// return The node which specifies the number of padding elements
/// at the end of each axis
CoordinateDiff get_pads_end() const;
/// \return The padding mode.
PadMode get_pad_mode() const { return m_pad_mode; }
void set_pad_mode(PadMode pad_mode) { m_pad_mode = pad_mode; }
protected:
virtual void generate_adjoints(autodiff::Adjoints& adjoints,
const NodeVector& deltas) override;
/// \brief DEPRECATED. This is just a stub for backends that used to implement the
/// interior padding feature, which is no longer supported.
/// \return Returns a shape full of zeros, with the same rank as get_padding_below().
const Shape& get_padding_interior() const { return m_padding_interior_fake; }
/// \return The padding mode.
PadMode get_pad_mode() const { return m_pad_mode; }
void set_pad_mode(PadMode pad_mode) { m_pad_mode = pad_mode; }
/// \return The default value for Pad.
virtual std::shared_ptr<Node> get_default_value() const override;
private:
PadMode m_pad_mode;
};
}
protected:
virtual void generate_adjoints(autodiff::Adjoints& adjoints,
const NodeVector& deltas) override;
CoordinateDiff m_padding_below;
CoordinateDiff m_padding_above;
Shape m_padding_interior_fake; // LEGACY: This is all zeros.
PadMode m_pad_mode;
};
// latest stable opset version
using v0::Pad;
}
}
......@@ -15,7 +15,9 @@
//*****************************************************************************
#include "ngraph/pass/opset1_upgrade.hpp"
#include "ngraph/graph_util.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/get_output_element.hpp"
#include "ngraph/op/pad.hpp"
#include "ngraph/op/softmax.hpp"
using namespace std;
......@@ -91,6 +93,26 @@ bool pass::Opset1Upgrade::run_on_node(shared_ptr<Node> node)
modified = true;
break;
}
case OP_TYPEID::Pad:
{
auto tmp = dynamic_cast<const op::v0::Pad*>(node.get());
auto padding_below = tmp->get_padding_below();
auto pads_begin_node =
make_shared<op::Constant>(element::i64, Shape{padding_below.size()}, padding_below);
auto padding_above = tmp->get_padding_above();
auto pads_end_node =
make_shared<op::Constant>(element::i64, Shape{padding_above.size()}, padding_above);
auto replacement_node = make_shared<op::v1::Pad>(node->input(0).get_source_output(),
pads_begin_node,
pads_end_node,
node->input(1).get_source_output(),
tmp->get_pad_mode());
replace_node(node, replacement_node);
modified = true;
break;
}
default: break;
}
#if defined(__clang__)
......
......@@ -21,6 +21,7 @@
#include "ngraph/code_writer.hpp"
#include "ngraph/node.hpp"
#include "ngraph/op/pad.hpp"
#include "ngraph/runtime/cpu/cpu_external_function.hpp"
#include "ngraph/runtime/cpu/cpu_tensor_view_wrapper.hpp"
......@@ -126,7 +127,6 @@ namespace ngraph
class Reverse;
class ReverseSequence;
class AvgPool;
class Pad;
class AvgPoolBackprop;
class MaxPoolBackprop;
class MaxPoolWithIndicesBackprop;
......
......@@ -1550,21 +1550,37 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js)
}
case OP_TYPEID::Pad:
{
auto padding_below = node_js.at("padding_below").get<vector<ptrdiff_t>>();
auto padding_above = node_js.at("padding_above").get<vector<ptrdiff_t>>();
// This is a legacy field whose functionality is no longer supported. The new
// behavior is equivalent to interior padding of 0, so we will accept it under
// those conditions.
auto padding_interior = get_value<vector<size_t>>(node_js, "padding_interior");
NGRAPH_CHECK(std::all_of(padding_interior.begin(),
padding_interior.end(),
[](size_t s) { return s == 0; }),
"Legacy padding_interior field must be zero everywhere.");
auto pad_mode = read_pad_mode(node_js);
node = make_shared<op::Pad>(args[0], args[1], padding_below, padding_above, pad_mode);
if (op_version == 0)
{
auto padding_below = node_js.at("padding_below").get<vector<ptrdiff_t>>();
auto padding_above = node_js.at("padding_above").get<vector<ptrdiff_t>>();
// This is a legacy field whose functionality is no longer supported. The new
// behavior is equivalent to interior padding of 0, so we will accept it under
// those conditions.
auto padding_interior = get_value<vector<size_t>>(node_js, "padding_interior");
NGRAPH_CHECK(std::all_of(padding_interior.begin(),
padding_interior.end(),
[](size_t s) { return s == 0; }),
"Legacy padding_interior field must be zero everywhere.");
auto pad_mode = read_pad_mode(node_js);
node = make_shared<op::v0::Pad>(
args[0], args[1], padding_below, padding_above, pad_mode);
}
if (op_version == 1)
{
auto pad_mode = read_pad_mode(node_js);
if (args.size() == 4)
{
node = make_shared<op::v1::Pad>(args[0], args[1], args[2], args[3], pad_mode);
}
else
{
node = make_shared<op::v1::Pad>(args[0], args[1], args[2], pad_mode);
}
}
break;
}
case OP_TYPEID::Parameter:
......@@ -2675,10 +2691,18 @@ json JSONSerializer::serialize_node(const Node& n)
}
case OP_TYPEID::Pad:
{
auto tmp = dynamic_cast<const op::Pad*>(&n);
node["padding_below"] = tmp->get_padding_below();
node["padding_above"] = tmp->get_padding_above();
node["pad_mode"] = tmp->get_pad_mode();
if (op_version == 0)
{
auto tmp = dynamic_cast<const op::v0::Pad*>(&n);
node["padding_below"] = tmp->get_padding_below();
node["padding_above"] = tmp->get_padding_above();
node["pad_mode"] = tmp->get_pad_mode();
}
if (op_version == 1)
{
auto tmp = dynamic_cast<const op::v1::Pad*>(&n);
node["pad_mode"] = tmp->get_pad_mode();
}
break;
}
case OP_TYPEID::Parameter:
......
......@@ -70,6 +70,7 @@ set(SRC
nop_elimination.cpp
op.cpp
opset_pass/softmax_opset_pass.cpp
opset_pass/pad_opset_pass.cpp
partial_shape.cpp
pass.cpp
pass_liveness.cpp
......
#include "gmock/gmock.h"
#include "gtest/gtest.h"
#include "ngraph/ngraph.hpp"
#include "ngraph/pass/manager.hpp"
#include "ngraph/pass/opset1_upgrade.hpp"
#include "util/type_prop.hpp"
using namespace std;
using namespace ngraph;
TEST(serialize, opset1_pad_pass)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{5, 6});
auto arg_pad_value = make_shared<op::Parameter>(element::f32, Shape{});
CoordinateDiff padding_below{1, 2};
CoordinateDiff padding_above{3, 4};
auto pad_mode = op::PadMode::EDGE;
auto pad_v0 =
make_shared<op::v0::Pad>(arg, arg_pad_value, padding_below, padding_above, pad_mode);
auto result = make_shared<op::Result>(pad_v0);
auto f = make_shared<Function>(ResultVector{result}, ParameterVector{arg, arg_pad_value});
ngraph::pass::Manager pass_manager;
pass_manager.register_pass<pass::Opset1Upgrade>();
pass_manager.run_passes(f);
auto pad_s1_result = f->get_results().at(0);
auto node = pad_s1_result->input(0).get_source_output().get_node_shared_ptr();
auto pad_v1_node = static_pointer_cast<op::v1::Pad>(node);
EXPECT_EQ(pad_v1_node->description(), "Pad");
EXPECT_EQ(pad_v1_node->get_version(), 1);
EXPECT_EQ(pad_v1_node->get_pad_mode(), pad_mode);
EXPECT_EQ(pad_v1_node->get_pads_begin(), padding_below);
EXPECT_EQ(pad_v1_node->get_pads_end(), padding_above);
}
......@@ -356,3 +356,26 @@ TEST(serialize, opset1_softmax)
EXPECT_EQ(g_softmax->description(), "Softmax");
EXPECT_EQ(g_softmax->get_version(), 1);
}
TEST(serialize, opset1_pad)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{4, 5, 6});
auto pads_begin = make_shared<op::Parameter>(element::i64, Shape{1});
auto pads_end = make_shared<op::Parameter>(element::i64, Shape{2});
auto arg_pad_value = make_shared<op::Parameter>(element::f32, Shape{});
auto pad_mode = op::PadMode::EDGE;
auto pad = make_shared<op::v1::Pad>(arg, pads_begin, pads_end, arg_pad_value, pad_mode);
auto result = make_shared<op::Result>(pad);
auto f = make_shared<Function>(ResultVector{result},
ParameterVector{arg, pads_begin, pads_end, arg_pad_value});
string s = serialize(f);
shared_ptr<Function> g = deserialize(s);
auto g_result = g->get_results().at(0);
auto g_pad = g_result->input(0).get_source_output().get_node_shared_ptr();
EXPECT_EQ(g_pad->description(), "Pad");
EXPECT_EQ(g_pad->get_version(), 1);
EXPECT_EQ(dynamic_cast<const op::v1::Pad*>(g_pad.get())->get_pad_mode(), pad_mode);
}
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment