Commit 81b492d4 authored by Mateusz Bencer's avatar Mateusz Bencer Committed by Scott Cyphers

[Spec] Implement Pad:v1 (#3574)

* Pad:v1 was introduced

* Fixed problem with foward declaration

* Code review remarks introduced

* Addtinal asserts and unit-test for Pad:v1 introduced

* Handled optinal arg_pad_value input

* Fixed unit tests messages and clang styles applied

* Styles applied, checking rank improved

* Updated changes.md

* Revert "Updated changes.md"

This reverts commit 8ee16e580705275ee0719648d5251021354e4d84.

* Clang warning fixed
parent 779a9300
......@@ -16,17 +16,18 @@
#include "ngraph/op/pad.hpp"
#include "ngraph/op/broadcast.hpp"
#include "ngraph/op/constant.hpp"
using namespace std;
using namespace ngraph;
constexpr NodeTypeInfo op::Pad::type_info;
constexpr NodeTypeInfo op::v0::Pad::type_info;
op::Pad::Pad(const Output<Node>& arg,
const Output<Node>& arg_pad_value,
const CoordinateDiff& padding_below,
const CoordinateDiff& padding_above,
PadMode pad_mode)
op::v0::Pad::Pad(const Output<Node>& arg,
const Output<Node>& arg_pad_value,
const CoordinateDiff& padding_below,
const CoordinateDiff& padding_above,
PadMode pad_mode)
: Op({arg, arg_pad_value})
, m_padding_below(padding_below)
, m_padding_above(padding_above)
......@@ -36,7 +37,7 @@ op::Pad::Pad(const Output<Node>& arg,
constructor_validate_and_infer_types();
}
void op::Pad::validate_and_infer_types()
void op::v0::Pad::validate_and_infer_types()
{
element::Type result_et;
......@@ -118,10 +119,10 @@ void op::Pad::validate_and_infer_types()
set_output_type(0, result_et, PartialShape(result_dims));
}
shared_ptr<Node> op::Pad::copy_with_new_args(const NodeVector& new_args) const
shared_ptr<Node> op::v0::Pad::copy_with_new_args(const NodeVector& new_args) const
{
check_new_args_count(this, new_args);
return make_shared<Pad>(
return make_shared<v0::Pad>(
new_args.at(0), new_args.at(1), m_padding_below, m_padding_above, m_pad_mode);
}
......@@ -160,7 +161,8 @@ shared_ptr<Node> op::Pad::copy_with_new_args(const NodeVector& new_args) const
and push that back.
*/
void op::Pad::generate_adjoints(autodiff::Adjoints& /* adjoints */, const NodeVector& /* deltas */)
void op::v0::Pad::generate_adjoints(autodiff::Adjoints& /* adjoints */,
const NodeVector& /* deltas */)
{
throw invalid_argument("Autodiff is not yet implemented for Pad");
}
......@@ -174,3 +176,207 @@ std::shared_ptr<Node> op::Pad::get_default_value() const
}
return std::make_shared<op::Broadcast>(input_value(1), get_shape(), axes);
}
constexpr NodeTypeInfo op::v1::Pad::type_info;
op::v1::Pad::Pad(const Output<Node>& arg,
const Output<Node>& pads_begin,
const Output<Node>& pads_end,
const Output<Node>& arg_pad_value,
PadMode pad_mode)
: Op({arg, pads_begin, pads_end, arg_pad_value})
, m_pad_mode{pad_mode}
{
constructor_validate_and_infer_types();
}
op::v1::Pad::Pad(const Output<Node>& arg,
const Output<Node>& pads_begin,
const Output<Node>& pads_end,
PadMode pad_mode)
: Op({arg, pads_begin, pads_end})
, m_pad_mode{pad_mode}
{
constructor_validate_and_infer_types();
}
CoordinateDiff op::v1::Pad::get_pads_begin() const
{
auto pads_begin_node = input_value(1).get_node_shared_ptr();
CoordinateDiff pads_begin_coord{};
if (auto pads_begin_const = dynamic_pointer_cast<op::Constant>(pads_begin_node))
{
pads_begin_coord = pads_begin_const->get_vector<ptrdiff_t>();
}
return pads_begin_coord;
}
CoordinateDiff op::v1::Pad::get_pads_end() const
{
auto pads_end_node = input_value(2).get_node_shared_ptr();
CoordinateDiff pads_end_coord{};
if (auto pads_end_const = dynamic_pointer_cast<op::Constant>(pads_end_node))
{
pads_end_coord = pads_end_const->get_vector<ptrdiff_t>();
}
return pads_end_coord;
}
void op::v1::Pad::validate_and_infer_types()
{
element::Type result_et;
const auto& arg_element_type = get_input_element_type(0);
const auto& pads_begin_element_type = get_input_element_type(1);
const auto& pads_end_element_type = get_input_element_type(2);
const auto arg_pad_value_provided = get_input_size() == 4;
if (m_pad_mode == PadMode::CONSTANT && arg_pad_value_provided)
{
const auto& arg_pad_element_type = get_input_element_type(3);
const auto& arg_pad_shape = get_input_partial_shape(3);
NODE_VALIDATION_CHECK(
this,
element::Type::merge(result_et, arg_element_type, arg_pad_element_type),
"Argument element types do not match (input arg element type: ",
arg_element_type,
", arg_pad element type: ",
arg_pad_element_type,
").");
NODE_VALIDATION_CHECK(this,
arg_pad_shape.compatible(PartialShape{}),
"Argument for padding value is not a scalar (shape: ",
arg_pad_shape,
").");
}
NODE_VALIDATION_CHECK(this,
pads_begin_element_type.compatible(element::Type_t::i64),
"pads_begin must be type i64 (axes type: ",
pads_begin_element_type,
").");
NODE_VALIDATION_CHECK(this,
pads_end_element_type.compatible(element::Type_t::i64),
"pads_end must be type i64 (axes type: ",
pads_end_element_type,
").");
const auto& pads_begin_shape = get_input_partial_shape(1);
const auto& pads_begin_rank = pads_begin_shape.rank();
NODE_VALIDATION_CHECK(this,
pads_begin_rank.compatible(1),
"Argument for pads_begin is not 1D (shape: ",
pads_begin_rank,
").");
const auto& pads_end_shape = get_input_partial_shape(2);
const auto& pads_end_rank = pads_end_shape.rank();
NODE_VALIDATION_CHECK(this,
pads_end_rank.compatible(1),
"Argument for pads_end is not 1D (shape: ",
pads_end_rank,
").");
const auto& arg_shape = get_input_partial_shape(0);
const auto& arg_shape_rank = arg_shape.rank();
if (arg_shape_rank.is_static() && pads_begin_shape.is_static())
{
NODE_VALIDATION_CHECK(
this,
static_cast<size_t>(pads_begin_shape[0]) >= 0 &&
static_cast<size_t>(pads_begin_shape[0]) <= static_cast<size_t>(arg_shape_rank),
"Number of elements of pads_begin must be >= 0 and <= arg rank (pads_begin_shape[0]: ",
pads_begin_shape[0],
").");
}
if (arg_shape_rank.is_static() && pads_end_shape.is_static())
{
NODE_VALIDATION_CHECK(
this,
static_cast<size_t>(pads_end_shape[0]) >= 0 &&
static_cast<size_t>(pads_end_shape[0]) <= static_cast<size_t>(arg_shape_rank),
"Number of elements of pads_end must be >= 0 and <= arg rank (pads_end_shape[0]: ",
pads_end_shape[0],
").");
}
const auto& pads_begin_coord = get_pads_begin();
const auto& pads_end_coord = get_pads_end();
for (const auto& pads_begin_dim : pads_begin_coord)
{
NODE_VALIDATION_CHECK(this,
pads_begin_dim >= 0,
"All pads_begin element must be non-negative (pads_begin_coord ",
pads_begin_coord,
")");
}
for (const auto& pads_end_dim : pads_end_coord)
{
NODE_VALIDATION_CHECK(this,
pads_end_dim >= 0,
"All pads_end element must be non-negative (pads_end_coord ",
pads_end_coord,
")");
}
auto pads_begin_node = input_value(1).get_node_shared_ptr();
auto pads_end_node = input_value(2).get_node_shared_ptr();
if (arg_shape_rank.is_static() && pads_begin_node->is_constant() &&
pads_end_node->is_constant())
{
const auto implied_rank = pads_begin_coord.size();
std::vector<Dimension> result_dims(implied_rank, Dimension::dynamic());
for (size_t i = 0; i < implied_rank; i++)
{
if (arg_shape[i].is_static())
{
ptrdiff_t result_dim =
pads_begin_coord[i] + static_cast<int64_t>(arg_shape[i]) + pads_end_coord[i];
result_dims[i] = static_cast<size_t>(result_dim);
if (i > 1)
{
NODE_VALIDATION_CHECK(
this,
m_pad_mode != op::PadMode::EDGE || static_cast<size_t>(arg_shape[i]) >= 1,
"EDGE padding mode requires an input of dimension of at least 1 at each "
"spatial axis.");
NODE_VALIDATION_CHECK(
this,
m_pad_mode != op::PadMode::REFLECT ||
static_cast<size_t>(arg_shape[i]) >= 2,
"REFLECT padding mode requires an input of dimension of at least 2 at each "
"spatial axis.");
}
}
}
set_output_type(0, get_input_element_type(0), result_dims);
}
else
{
set_output_type(0, get_input_element_type(0), PartialShape::dynamic());
}
}
shared_ptr<Node> op::v1::Pad::copy_with_new_args(const NodeVector& new_args) const
{
check_new_args_count(this, new_args);
const auto arg_pad_value_provided = get_input_size() == 4;
if (arg_pad_value_provided)
{
return make_shared<v1::Pad>(
new_args.at(0), new_args.at(1), new_args.at(2), new_args.at(3), m_pad_mode);
}
else
{
return make_shared<v1::Pad>(new_args.at(0), new_args.at(1), new_args.at(2), m_pad_mode);
}
}
void op::v1::Pad::generate_adjoints(autodiff::Adjoints& /* adjoints */,
const NodeVector& /* deltas */)
{
throw invalid_argument("Autodiff is not yet implemented for Pad:v1");
}
......@@ -24,61 +24,134 @@ namespace ngraph
{
namespace op
{
/// \brief Generic padding operation.
class Pad : public Op
namespace v0
{
public:
NGRAPH_API
static constexpr NodeTypeInfo type_info{"Pad", 0};
const NodeTypeInfo& get_type_info() const override { return type_info; }
/// \brief Constructs a generic padding operation.
Pad() = default;
/// \brief Constructs a generic padding operation.
///
/// \param arg The node producing input tensor to be padded.
/// \param arg_pad_value The node producing the scalar value to be inserted for padding.
/// \param padding_below The padding-below widths.
/// \param padding_above The padding-above widths.
/// \param pad_mode The padding mode: CONSTANT(default), EDGE, REFLECT or SYMMETRIC.
Pad(const Output<Node>& arg,
const Output<Node>& arg_pad_value,
const CoordinateDiff& padding_below,
const CoordinateDiff& padding_above,
PadMode pad_mode = PadMode::CONSTANT);
virtual std::shared_ptr<Node>
copy_with_new_args(const NodeVector& new_args) const override;
void validate_and_infer_types() override;
/// \return The padding-below sizes.
const CoordinateDiff& get_padding_below() const { return m_padding_below; }
void set_padding_below(const CoordinateDiff& padding_below)
/// \brief Generic padding operation.
class Pad : public Op
{
m_padding_below = padding_below;
}
/// \return The padding-above sizes.
const CoordinateDiff& get_padding_above() const { return m_padding_above; }
void set_padding_above(const CoordinateDiff& padding_above)
public:
NGRAPH_API
static constexpr NodeTypeInfo type_info{"Pad", 0};
const NodeTypeInfo& get_type_info() const override { return type_info; }
/// \brief Constructs a generic padding operation.
Pad() = default;
/// \brief Constructs a generic padding operation.
///
/// \param arg The node producing input tensor to be padded.
/// \param arg_pad_value The node producing the scalar value
/// to be inserted for padding.
/// \param padding_below The padding-below widths.
/// \param padding_above The padding-above widths.
/// \param pad_mode The padding mode: CONSTANT(default), EDGE, REFLECT or SYMMETRIC.
Pad(const Output<Node>& arg,
const Output<Node>& arg_pad_value,
const CoordinateDiff& padding_below,
const CoordinateDiff& padding_above,
PadMode pad_mode = PadMode::CONSTANT);
virtual std::shared_ptr<Node>
copy_with_new_args(const NodeVector& new_args) const override;
void validate_and_infer_types() override;
/// \return The padding-below sizes.
const CoordinateDiff& get_padding_below() const { return m_padding_below; }
void set_padding_below(const CoordinateDiff& padding_below)
{
m_padding_below = padding_below;
}
/// \return The padding-above sizes.
const CoordinateDiff& get_padding_above() const { return m_padding_above; }
void set_padding_above(const CoordinateDiff& padding_above)
{
m_padding_below = padding_above;
}
/// \brief DEPRECATED. This is just a stub for backends that used to implement the
/// interior padding feature, which is no longer supported.
/// \return Returns a shape full of zeros,
/// with the same rank as get_padding_below().
const Shape& get_padding_interior() const { return m_padding_interior_fake; }
/// \return The padding mode.
PadMode get_pad_mode() const { return m_pad_mode; }
void set_pad_mode(PadMode pad_mode) { m_pad_mode = pad_mode; }
/// \return The default value for Pad.
virtual std::shared_ptr<Node> get_default_value() const override;
protected:
virtual void generate_adjoints(autodiff::Adjoints& adjoints,
const NodeVector& deltas) override;
CoordinateDiff m_padding_below;
CoordinateDiff m_padding_above;
Shape m_padding_interior_fake; // LEGACY: This is all zeros.
PadMode m_pad_mode;
};
}
namespace v1
{
/// \brief Generic padding operation.
class Pad : public Op
{
m_padding_below = padding_above;
}
public:
NGRAPH_API
static constexpr NodeTypeInfo type_info{"Pad", 1};
const NodeTypeInfo& get_type_info() const override { return type_info; }
/// \brief Constructs a generic padding operation.
///
/// \param arg The node producing input tensor to be padded.
/// \param pads_begin The node which specifies the number of padding elements at the
/// beginning of each axis
/// \param pads_end The node which specifies the number of padding elements at the
/// end of each axis
/// \param arg_pad_value The node with value which set to extended elements
/// if pad_mode is CONSTANT
/// \param pad_mode The padding mode: CONSTANT, EDGE, REFLECT or SYMMETRIC.
Pad(const Output<Node>& arg,
const Output<Node>& pads_begin,
const Output<Node>& pads_end,
const Output<Node>& arg_pad_value,
PadMode pad_mode);
/// \brief Constructs a generic padding operation.
///
/// \param arg The node producing input tensor to be padded.
/// \param pads_begin The node which specifies the number of padding elements
/// at the beginning of each axis
/// \param pads_end The node which specifies the number of padding elements
/// at the end of each axis
/// \param pad_mode The padding mode: CONSTANT, EDGE, REFLECT or SYMMETRIC.
Pad(const Output<Node>& arg,
const Output<Node>& pads_begin,
const Output<Node>& pads_end,
PadMode pad_mode);
/// \brief Constructs a generic padding operation.
Pad() = default;
size_t get_version() const override { return 1; }
void validate_and_infer_types() override;
virtual std::shared_ptr<Node>
copy_with_new_args(const NodeVector& new_args) const override;
/// return The node which specifies the number of padding elements
/// at the beginning of each axis
CoordinateDiff get_pads_begin() const;
/// return The node which specifies the number of padding elements
/// at the end of each axis
CoordinateDiff get_pads_end() const;
/// \return The padding mode.
PadMode get_pad_mode() const { return m_pad_mode; }
void set_pad_mode(PadMode pad_mode) { m_pad_mode = pad_mode; }
protected:
virtual void generate_adjoints(autodiff::Adjoints& adjoints,
const NodeVector& deltas) override;
/// \brief DEPRECATED. This is just a stub for backends that used to implement the
/// interior padding feature, which is no longer supported.
/// \return Returns a shape full of zeros, with the same rank as get_padding_below().
const Shape& get_padding_interior() const { return m_padding_interior_fake; }
/// \return The padding mode.
PadMode get_pad_mode() const { return m_pad_mode; }
void set_pad_mode(PadMode pad_mode) { m_pad_mode = pad_mode; }
/// \return The default value for Pad.
virtual std::shared_ptr<Node> get_default_value() const override;
private:
PadMode m_pad_mode;
};
}
protected:
virtual void generate_adjoints(autodiff::Adjoints& adjoints,
const NodeVector& deltas) override;
CoordinateDiff m_padding_below;
CoordinateDiff m_padding_above;
Shape m_padding_interior_fake; // LEGACY: This is all zeros.
PadMode m_pad_mode;
};
// latest stable opset version
using v0::Pad;
}
}
......@@ -15,7 +15,9 @@
//*****************************************************************************
#include "ngraph/pass/opset1_upgrade.hpp"
#include "ngraph/graph_util.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/get_output_element.hpp"
#include "ngraph/op/pad.hpp"
#include "ngraph/op/softmax.hpp"
using namespace std;
......@@ -91,6 +93,26 @@ bool pass::Opset1Upgrade::run_on_node(shared_ptr<Node> node)
modified = true;
break;
}
case OP_TYPEID::Pad:
{
auto tmp = dynamic_cast<const op::v0::Pad*>(node.get());
auto padding_below = tmp->get_padding_below();
auto pads_begin_node =
make_shared<op::Constant>(element::i64, Shape{padding_below.size()}, padding_below);
auto padding_above = tmp->get_padding_above();
auto pads_end_node =
make_shared<op::Constant>(element::i64, Shape{padding_above.size()}, padding_above);
auto replacement_node = make_shared<op::v1::Pad>(node->input(0).get_source_output(),
pads_begin_node,
pads_end_node,
node->input(1).get_source_output(),
tmp->get_pad_mode());
replace_node(node, replacement_node);
modified = true;
break;
}
default: break;
}
#if defined(__clang__)
......
......@@ -21,6 +21,7 @@
#include "ngraph/code_writer.hpp"
#include "ngraph/node.hpp"
#include "ngraph/op/pad.hpp"
#include "ngraph/runtime/cpu/cpu_external_function.hpp"
#include "ngraph/runtime/cpu/cpu_tensor_view_wrapper.hpp"
......@@ -126,7 +127,6 @@ namespace ngraph
class Reverse;
class ReverseSequence;
class AvgPool;
class Pad;
class AvgPoolBackprop;
class MaxPoolBackprop;
class MaxPoolWithIndicesBackprop;
......
......@@ -1550,21 +1550,37 @@ shared_ptr<Node> JSONDeserializer::deserialize_node(json node_js)
}
case OP_TYPEID::Pad:
{
auto padding_below = node_js.at("padding_below").get<vector<ptrdiff_t>>();
auto padding_above = node_js.at("padding_above").get<vector<ptrdiff_t>>();
// This is a legacy field whose functionality is no longer supported. The new
// behavior is equivalent to interior padding of 0, so we will accept it under
// those conditions.
auto padding_interior = get_value<vector<size_t>>(node_js, "padding_interior");
NGRAPH_CHECK(std::all_of(padding_interior.begin(),
padding_interior.end(),
[](size_t s) { return s == 0; }),
"Legacy padding_interior field must be zero everywhere.");
auto pad_mode = read_pad_mode(node_js);
node = make_shared<op::Pad>(args[0], args[1], padding_below, padding_above, pad_mode);
if (op_version == 0)
{
auto padding_below = node_js.at("padding_below").get<vector<ptrdiff_t>>();
auto padding_above = node_js.at("padding_above").get<vector<ptrdiff_t>>();
// This is a legacy field whose functionality is no longer supported. The new
// behavior is equivalent to interior padding of 0, so we will accept it under
// those conditions.
auto padding_interior = get_value<vector<size_t>>(node_js, "padding_interior");
NGRAPH_CHECK(std::all_of(padding_interior.begin(),
padding_interior.end(),
[](size_t s) { return s == 0; }),
"Legacy padding_interior field must be zero everywhere.");
auto pad_mode = read_pad_mode(node_js);
node = make_shared<op::v0::Pad>(
args[0], args[1], padding_below, padding_above, pad_mode);
}
if (op_version == 1)
{
auto pad_mode = read_pad_mode(node_js);
if (args.size() == 4)
{
node = make_shared<op::v1::Pad>(args[0], args[1], args[2], args[3], pad_mode);
}
else
{
node = make_shared<op::v1::Pad>(args[0], args[1], args[2], pad_mode);
}
}
break;
}
case OP_TYPEID::Parameter:
......@@ -2675,10 +2691,18 @@ json JSONSerializer::serialize_node(const Node& n)
}
case OP_TYPEID::Pad:
{
auto tmp = dynamic_cast<const op::Pad*>(&n);
node["padding_below"] = tmp->get_padding_below();
node["padding_above"] = tmp->get_padding_above();
node["pad_mode"] = tmp->get_pad_mode();
if (op_version == 0)
{
auto tmp = dynamic_cast<const op::v0::Pad*>(&n);
node["padding_below"] = tmp->get_padding_below();
node["padding_above"] = tmp->get_padding_above();
node["pad_mode"] = tmp->get_pad_mode();
}
if (op_version == 1)
{
auto tmp = dynamic_cast<const op::v1::Pad*>(&n);
node["pad_mode"] = tmp->get_pad_mode();
}
break;
}
case OP_TYPEID::Parameter:
......
......@@ -70,6 +70,7 @@ set(SRC
nop_elimination.cpp
op.cpp
opset_pass/softmax_opset_pass.cpp
opset_pass/pad_opset_pass.cpp
partial_shape.cpp
pass.cpp
pass_liveness.cpp
......
#include "gmock/gmock.h"
#include "gtest/gtest.h"
#include "ngraph/ngraph.hpp"
#include "ngraph/pass/manager.hpp"
#include "ngraph/pass/opset1_upgrade.hpp"
#include "util/type_prop.hpp"
using namespace std;
using namespace ngraph;
TEST(serialize, opset1_pad_pass)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{5, 6});
auto arg_pad_value = make_shared<op::Parameter>(element::f32, Shape{});
CoordinateDiff padding_below{1, 2};
CoordinateDiff padding_above{3, 4};
auto pad_mode = op::PadMode::EDGE;
auto pad_v0 =
make_shared<op::v0::Pad>(arg, arg_pad_value, padding_below, padding_above, pad_mode);
auto result = make_shared<op::Result>(pad_v0);
auto f = make_shared<Function>(ResultVector{result}, ParameterVector{arg, arg_pad_value});
ngraph::pass::Manager pass_manager;
pass_manager.register_pass<pass::Opset1Upgrade>();
pass_manager.run_passes(f);
auto pad_s1_result = f->get_results().at(0);
auto node = pad_s1_result->input(0).get_source_output().get_node_shared_ptr();
auto pad_v1_node = static_pointer_cast<op::v1::Pad>(node);
EXPECT_EQ(pad_v1_node->description(), "Pad");
EXPECT_EQ(pad_v1_node->get_version(), 1);
EXPECT_EQ(pad_v1_node->get_pad_mode(), pad_mode);
EXPECT_EQ(pad_v1_node->get_pads_begin(), padding_below);
EXPECT_EQ(pad_v1_node->get_pads_end(), padding_above);
}
......@@ -356,3 +356,26 @@ TEST(serialize, opset1_softmax)
EXPECT_EQ(g_softmax->description(), "Softmax");
EXPECT_EQ(g_softmax->get_version(), 1);
}
TEST(serialize, opset1_pad)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{4, 5, 6});
auto pads_begin = make_shared<op::Parameter>(element::i64, Shape{1});
auto pads_end = make_shared<op::Parameter>(element::i64, Shape{2});
auto arg_pad_value = make_shared<op::Parameter>(element::f32, Shape{});
auto pad_mode = op::PadMode::EDGE;
auto pad = make_shared<op::v1::Pad>(arg, pads_begin, pads_end, arg_pad_value, pad_mode);
auto result = make_shared<op::Result>(pad);
auto f = make_shared<Function>(ResultVector{result},
ParameterVector{arg, pads_begin, pads_end, arg_pad_value});
string s = serialize(f);
shared_ptr<Function> g = deserialize(s);
auto g_result = g->get_results().at(0);
auto g_pad = g_result->input(0).get_source_output().get_node_shared_ptr();
EXPECT_EQ(g_pad->description(), "Pad");
EXPECT_EQ(g_pad->get_version(), 1);
EXPECT_EQ(dynamic_cast<const op::v1::Pad*>(g_pad.get())->get_pad_mode(), pad_mode);
}
......@@ -402,3 +402,312 @@ TEST(type_prop, pad_partial_data_rank_dynamic_padding_static_attribs_rank_incons
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, pad_v1_arg_pad_value_type_mismatch)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{1, 2, 3});
auto pads_begin = make_shared<op::Parameter>(element::i64, Shape{1});
auto pads_end = make_shared<op::Parameter>(element::i64, Shape{1});
auto arg_pad_value = make_shared<op::Parameter>(element::f16, Shape{1});
try
{
auto pad_v1 = make_shared<op::v1::Pad>(
arg, pads_begin, pads_end, arg_pad_value, op::PadMode::CONSTANT);
// Should have thrown, so fail if it didn't
FAIL() << "Incorrect arg_pad_value type exception not handled";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("Argument element types do not match (input arg element type:"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, pad_v1_arg_pad_value_shape_not_compatible)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{1, 2, 3});
auto pads_begin = make_shared<op::Parameter>(element::i64, Shape{1});
auto pads_end = make_shared<op::Parameter>(element::i64, Shape{1});
auto arg_pad_value = make_shared<op::Parameter>(element::f32, Shape{1});
try
{
auto pad_v1 = make_shared<op::v1::Pad>(
arg, pads_begin, pads_end, arg_pad_value, op::PadMode::CONSTANT);
// Should have thrown, so fail if it didn't
FAIL() << "Incorrect arg_pad_value shape exception not handled";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Argument for padding value is not a scalar (shape:"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, pad_v1_pads_begin_shape_not_1D)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{1, 2, 3});
auto pads_begin = make_shared<op::Parameter>(element::i64, Shape{1, 2});
auto pads_end = make_shared<op::Parameter>(element::i64, Shape{1});
try
{
auto pad_v1 = make_shared<op::v1::Pad>(arg, pads_begin, pads_end, op::PadMode::SYMMETRIC);
// Should have thrown, so fail if it didn't
FAIL() << "Incorrect pads_begin shape exception not handled";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Argument for pads_begin is not 1D (shape:"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, pad_v1_pads_end_shape_not_1D)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{1, 2, 3});
auto pads_begin = make_shared<op::Parameter>(element::i64, Shape{1});
auto pads_end = make_shared<op::Parameter>(element::i64, Shape{1, 2});
try
{
auto pad_v1 = make_shared<op::v1::Pad>(arg, pads_begin, pads_end, op::PadMode::SYMMETRIC);
// Should have thrown, so fail if it didn't
FAIL() << "Incorrect pads_end shape exception not handled";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), std::string("Argument for pads_end is not 1D (shape:"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, pad_v1_pads_begin_size_not_correct)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{1, 2, 3});
auto pads_begin = make_shared<op::Parameter>(element::i64, Shape{4});
auto pads_end = make_shared<op::Parameter>(element::i64, Shape{1});
try
{
auto pad_v1 = make_shared<op::v1::Pad>(arg, pads_begin, pads_end, op::PadMode::SYMMETRIC);
// Should have thrown, so fail if it didn't
FAIL() << "Incorrect pads_begin size exception not handled";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("Number of elements of pads_begin must be >= 0 and <= arg "
"rank (pads_begin_shape[0]:"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, pad_v1_pads_end_size_not_correct)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{1, 2, 3});
auto pads_begin = make_shared<op::Parameter>(element::i64, Shape{1});
auto pads_end = make_shared<op::Parameter>(element::i64, Shape{4});
auto arg_pad_value = make_shared<op::Parameter>(element::f32, Shape{});
try
{
auto pad_v1 = make_shared<op::v1::Pad>(
arg, pads_begin, pads_end, arg_pad_value, op::PadMode::CONSTANT);
// Should have thrown, so fail if it didn't
FAIL() << "Incorrect pads_end size exception not handled";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(
error.what(),
std::string(
"Number of elements of pads_end must be >= 0 and <= arg rank (pads_end_shape[0]:"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, pad_v1_arg_pads_begin_incompatible_type)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{1, 2, 3});
auto pads_begin = make_shared<op::Parameter>(element::f32, Shape{1});
auto pads_end = make_shared<op::Parameter>(element::i64, Shape{1});
try
{
auto pad_v1 = make_shared<op::v1::Pad>(arg, pads_begin, pads_end, op::PadMode::REFLECT);
// Should have thrown, so fail if it didn't
FAIL() << "Incorrect pad_begin type exception not handled";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), std::string("pads_begin must be type i64 (axes type:"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, pad_v1_arg_pads_end_incompatible_type)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{1, 2, 3});
auto pads_begin = make_shared<op::Parameter>(element::i64, Shape{1});
auto pads_end = make_shared<op::Parameter>(element::f32, Shape{1});
try
{
auto pad = make_shared<op::v1::Pad>(arg, pads_begin, pads_end, op::PadMode::REFLECT);
// Should have thrown, so fail if it didn't
FAIL() << "Incorrect pads_end type exception not thrown";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), std::string("pads_end must be type i64 (axes type:"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, pad_v1_pads_begin_elem_negative_value)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{5, 4, 2});
auto pads_begin =
make_shared<op::Constant>(element::i64, Shape{3}, std::vector<int64_t>{6, 9, -3});
auto pads_end =
make_shared<op::Constant>(element::i64, Shape{3}, std::vector<int64_t>{5, 3, 0});
auto arg_pad_value = make_shared<op::Parameter>(element::f32, Shape{});
try
{
auto pad_v1 = make_shared<op::v1::Pad>(arg, pads_begin, pads_end, op::PadMode::REFLECT);
// Should have thrown, so fail if it didn't
FAIL() << "Negative pads_begin element exception not thrown";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("All pads_begin element must be non-negative (pads_begin_coord"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, pad_v1_pads_end_elem_negative_value)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{5, 4, 2});
auto pads_begin =
make_shared<op::Constant>(element::i64, Shape{3}, std::vector<int64_t>{5, 3, 0});
auto pads_end =
make_shared<op::Constant>(element::i64, Shape{3}, std::vector<int64_t>{6, 9, -3});
try
{
auto pad_v1 = make_shared<op::v1::Pad>(arg, pads_begin, pads_end, op::PadMode::REFLECT);
// Should have thrown, so fail if it didn't
FAIL() << "Negative pads_end element exception not thrown";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(
error.what(), std::string("All pads_end element must be non-negative (pads_end_coord"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, pad_v1_deduce_too_small_for_edge)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{1, 5, 0, 2});
auto pads_begin =
make_shared<op::Constant>(element::i64, Shape{4}, std::vector<int64_t>{0, 1, 2, 3});
auto pads_end =
make_shared<op::Constant>(element::i64, Shape{4}, std::vector<int64_t>{0, 1, 2, 3});
auto arg_pad_value = make_shared<op::Parameter>(element::f32, Shape{});
try
{
auto pad_v1 =
make_shared<op::v1::Pad>(arg, pads_begin, pads_end, arg_pad_value, op::PadMode::EDGE);
// Should have thrown, so fail if it didn't
FAIL() << "Incorrect input shape exception for EDGE mode not thrown";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("EDGE padding mode requires an input of dimension of at "
"least 1 at each spatial axis"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, pad_v1_deduce_too_small_for_reflect)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{1, 5, 1, 2});
auto pads_begin =
make_shared<op::Constant>(element::i64, Shape{4}, std::vector<int64_t>{0, 1, 2, 3});
auto pads_end =
make_shared<op::Constant>(element::i64, Shape{4}, std::vector<int64_t>{0, 1, 2, 3});
auto arg_pad_value = make_shared<op::Parameter>(element::f32, Shape{});
try
{
auto pad_v1 = make_shared<op::v1::Pad>(
arg, pads_begin, pads_end, arg_pad_value, op::PadMode::REFLECT);
// Should have thrown, so fail if it didn't
FAIL() << "Incorrect input shape exception for REFLECT mode not thrown";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(),
std::string("REFLECT padding mode requires an input of dimension of "
"at least 2 at each spatial axis"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment