Commit 9ae3f6be authored by Mateusz Bencer's avatar Mateusz Bencer Committed by Scott Cyphers

[SPEC] Implement Pad:v1 downgrade pass (#3670)

* Implemented downgrade pass

* Using Pad:v1 in onnx_importer

* Downgrade transformation doc fixed

* Downgrade pass added for all backends

* Apply suggestions from code review

Changed pad_opset_pass to opset_downgrade
Co-Authored-By: 's avatarTomasz Socha <tomasz.socha@intel.com>

* Changed order of passes

* Changed downgrade pass order of CPU backend

* Code review remarks introduced
parent e5f54b6d
......@@ -456,6 +456,8 @@ set (SRC
pass/nop_elimination.hpp
pass/pass.cpp
pass/pass.hpp
pass/opset0_downgrade.cpp
pass/opset0_downgrade.hpp
pass/opset1_upgrade.cpp
pass/opset1_upgrade.hpp
pass/pass_config.cpp
......
......@@ -60,12 +60,14 @@ namespace ngraph
ngraph::CoordinateDiff padding_below = paddings.first;
ngraph::CoordinateDiff padding_above = paddings.second;
return {std::make_shared<ngraph::op::Pad>(
return {std::make_shared<ngraph::op::v1::Pad>(
data,
std::make_shared<ngraph::op::Constant>(
element::i64, ngraph::Shape{padding_below.size()}, padding_below),
std::make_shared<ngraph::op::Constant>(
element::i64, ngraph::Shape{padding_above.size()}, padding_above),
std::make_shared<ngraph::op::Constant>(
data->get_element_type(), ngraph::Shape{}, std::vector<double>{value}),
padding_below,
padding_above,
pad_mode)};
}
......
......@@ -305,23 +305,6 @@ void op::v1::Pad::validate_and_infer_types()
const auto& pads_begin_coord = get_pads_begin();
const auto& pads_end_coord = get_pads_end();
for (const auto& pads_begin_dim : pads_begin_coord)
{
NODE_VALIDATION_CHECK(this,
pads_begin_dim >= 0,
"All pads_begin element must be non-negative (pads_begin_coord ",
pads_begin_coord,
")");
}
for (const auto& pads_end_dim : pads_end_coord)
{
NODE_VALIDATION_CHECK(this,
pads_end_dim >= 0,
"All pads_end element must be non-negative (pads_end_coord ",
pads_end_coord,
")");
}
auto pads_begin_node = input_value(1).get_node_shared_ptr();
auto pads_end_node = input_value(2).get_node_shared_ptr();
if (arg_shape_rank.is_static() && pads_begin_node->is_constant() &&
......
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include "ngraph/pass/opset0_downgrade.hpp"
#include "ngraph/graph_util.hpp"
#include "ngraph/op/get_output_element.hpp"
#include "ngraph/op/pad.hpp"
using namespace std;
using namespace ngraph;
#define NGRAPH_OP(a, b) a,
enum class OP_TYPEID
{
#include "ngraph/op/op_tbl.hpp"
};
#undef NGRAPH_OP
#define NGRAPH_OP(a, b) {#a, OP_TYPEID::a},
static unordered_map<string, OP_TYPEID> typeid_map{
#include "ngraph/op/op_tbl.hpp"
};
#undef NGRAPH_OP
static OP_TYPEID get_typeid(shared_ptr<Node> node)
{
OP_TYPEID type_id;
auto it = typeid_map.find(node->description());
if (it != typeid_map.end())
{
type_id = it->second;
}
else
{
throw unsupported_op("Unsupported op '" + node->description() + "'");
}
return type_id;
}
// END mapping to OP_TYPEID
bool pass::Opset0Downgrade::run_on_node(shared_ptr<Node> node)
{
bool modified = false;
size_t op_version = node->get_version();
if (op_version == 0)
{
return modified;
}
NGRAPH_CHECK(op_version == 1,
"Op version 1 transformation pass failed for ",
*node,
", only op version 1 operations expected. Op version ",
op_version,
" found.");
// Not all enumeration values explicitly handled in switch
#if defined(__clang__)
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wswitch-enum"
#endif
switch (get_typeid(node))
{
case OP_TYPEID::Pad:
{
auto tmp = dynamic_cast<const op::v1::Pad*>(node.get());
const auto pad_arg = node->input(0).get_source_output();
const auto pad_value = node->input(3).get_source_output();
auto replacement_node = make_shared<op::v0::Pad>(
pad_arg, pad_value, tmp->get_pads_begin(), tmp->get_pads_end(), tmp->get_pad_mode());
replace_node(node, replacement_node);
modified = true;
break;
}
default: break;
}
#if defined(__clang__)
#pragma clang diagnostic pop
#endif
return modified;
}
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#pragma once
#include "ngraph/pass/pass.hpp"
namespace ngraph
{
namespace pass
{
class Opset0Downgrade : public NodePass
{
public:
///
/// \brief Constructor for the Opset 1 downgrade transformation pass.
///
/// \details This transformation pass iterates over all nodes in a graph
/// and updates opset version 1 ops to their opset version 0 equivalents.
/// All ops in the final graph have opset version 0.
Opset0Downgrade() = default;
bool run_on_node(std::shared_ptr<ngraph::Node> node) override;
};
}
}
......@@ -151,6 +151,7 @@
#include "ngraph/pass/manager.hpp"
#include "ngraph/pass/memory_layout.hpp"
#include "ngraph/pass/nop_elimination.hpp"
#include "ngraph/pass/opset0_downgrade.hpp"
#include "ngraph/pass/propagate_cacheability.hpp"
#include "ngraph/pass/reshape_elimination.hpp"
#include "ngraph/pass/reshape_sinking.hpp"
......@@ -1218,6 +1219,7 @@ void runtime::cpu::CPU_ExternalFunction::register_common_passes(
REGISTER_KNOBBED_PASS(LikeReplacement, true, ngraph::pass)
REGISTER_KNOBBED_PASS_WITH_ARGS(FusedOpDecomposition, true, ngraph::pass, is_supported)
REGISTER_KNOBBED_PASS(Opset0Downgrade, true, ngraph::pass)
REGISTER_KNOBBED_PASS(ImplicitBroadcastElimination, true, ngraph::pass)
REGISTER_KNOBBED_PASS(NopElimination, true, ngraph::pass)
REGISTER_KNOBBED_PASS(ZeroDimTensorElimination, true, ngraph::pass)
......
......@@ -29,6 +29,7 @@
#include "ngraph/pass/liveness.hpp"
#include "ngraph/pass/manager.hpp"
#include "ngraph/pass/memory_layout.hpp"
#include "ngraph/pass/opset0_downgrade.hpp"
#include "ngraph/runtime/backend_manager.hpp"
#include "ngraph/serializer.hpp"
#include "ngraph/util.hpp"
......@@ -47,6 +48,7 @@ runtime::gcpu::GCPUExecutable::GCPUExecutable(const shared_ptr<Function>& functi
pass::Manager pass_manager;
pass_manager.register_pass<pass::LikeReplacement>();
pass_manager.register_pass<pass::FusedOpDecomposition>();
pass_manager.register_pass<pass::Opset0Downgrade>();
pass_manager.register_pass<pass::ImplicitBroadcastElimination>();
pass_manager.register_pass<pass::AssignLayout<DenseTensorLayout>>();
pass_manager.register_pass<pass::Liveness>();
......
......@@ -28,6 +28,7 @@
#include "ngraph/pass/liveness.hpp"
#include "ngraph/pass/manager.hpp"
#include "ngraph/pass/memory_layout.hpp"
#include "ngraph/pass/opset0_downgrade.hpp"
#include "ngraph/runtime/backend_manager.hpp"
#include "ngraph/runtime/chrome_trace.hpp"
#include "ngraph/serializer.hpp"
......@@ -47,6 +48,7 @@ runtime::interpreter::INTExecutable::INTExecutable(const shared_ptr<Function>& f
pass::Manager pass_manager;
pass_manager.register_pass<pass::LikeReplacement>();
pass_manager.register_pass<pass::FusedOpDecomposition>();
pass_manager.register_pass<pass::Opset0Downgrade>();
pass_manager.register_pass<pass::AssignLayout<DenseTensorLayout>>();
pass_manager.register_pass<pass::Liveness>();
pass_manager.run_passes(m_function);
......
......@@ -28,6 +28,7 @@
#include "ngraph/pass/liveness.hpp"
#include "ngraph/pass/manager.hpp"
#include "ngraph/pass/nop_elimination.hpp"
#include "ngraph/pass/opset0_downgrade.hpp"
#include "ngraph/pass/visualize_tree.hpp"
#include "ngraph/pass/zero_dim_tensor_elimination.hpp"
#include "ngraph/runtime/plaidml/plaidml_impl.hpp"
......@@ -100,6 +101,7 @@ std::shared_ptr<ngraph::runtime::plaidml::PlaidML_Executable>
}
return false;
});
pass_manager.register_pass<ngraph::pass::Opset0Downgrade>();
pass_manager.register_pass<ngraph::pass::LikeReplacement>();
pass_manager.register_pass<ngraph::pass::NopElimination>();
pass_manager.register_pass<ngraph::pass::ZeroDimTensorElimination>();
......
......@@ -3,13 +3,14 @@
#include "ngraph/ngraph.hpp"
#include "ngraph/pass/manager.hpp"
#include "ngraph/pass/opset0_downgrade.hpp"
#include "ngraph/pass/opset1_upgrade.hpp"
#include "util/type_prop.hpp"
using namespace std;
using namespace ngraph;
TEST(serialize, opset1_pad_pass)
TEST(opset_upgrade, opset1_pad_upgrade_pass)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{5, 6});
auto arg_pad_value = make_shared<op::Parameter>(element::f32, Shape{});
......@@ -37,3 +38,32 @@ TEST(serialize, opset1_pad_pass)
EXPECT_EQ(pad_v1_node->get_pads_begin(), padding_below);
EXPECT_EQ(pad_v1_node->get_pads_end(), padding_above);
}
TEST(opset_downgrade, opset1_pad_downgrade_pass)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{5, 6});
auto arg_pad_value = make_shared<op::Parameter>(element::f32, Shape{});
const auto pads_begin =
make_shared<op::Constant>(element::i64, Shape{2}, vector<int64_t>{1, 2});
const auto pads_end = make_shared<op::Constant>(element::i64, Shape{2}, vector<int64_t>{3, 4});
auto pad_mode = op::PadMode::EDGE;
auto pad_v1 = make_shared<op::v1::Pad>(arg, pads_begin, pads_end, arg_pad_value, pad_mode);
auto result = make_shared<op::Result>(pad_v1);
auto f = make_shared<Function>(ResultVector{result}, ParameterVector{arg, arg_pad_value});
ngraph::pass::Manager pass_manager;
pass_manager.register_pass<pass::Opset0Downgrade>();
pass_manager.run_passes(f);
auto pad_s0_result = f->get_results().at(0);
auto node = pad_s0_result->input(0).get_source_output().get_node_shared_ptr();
auto pad_v0_node = static_pointer_cast<op::v0::Pad>(node);
EXPECT_EQ(pad_v0_node->description(), "Pad");
EXPECT_EQ(pad_v0_node->get_version(), 0);
EXPECT_EQ(pad_v0_node->get_pad_mode(), pad_mode);
EXPECT_EQ(pad_v0_node->get_padding_below(), CoordinateDiff({1, 2}));
EXPECT_EQ(pad_v0_node->get_padding_above(), CoordinateDiff({3, 4}));
}
......@@ -602,58 +602,6 @@ TEST(type_prop, pad_v1_arg_pads_end_incompatible_type)
}
}
TEST(type_prop, pad_v1_pads_begin_elem_negative_value)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{5, 4, 2});
auto pads_begin =
make_shared<op::Constant>(element::i64, Shape{3}, std::vector<int64_t>{6, 9, -3});
auto pads_end =
make_shared<op::Constant>(element::i64, Shape{3}, std::vector<int64_t>{5, 3, 0});
auto arg_pad_value = make_shared<op::Parameter>(element::f32, Shape{});
try
{
auto pad_v1 = make_shared<op::v1::Pad>(arg, pads_begin, pads_end, op::PadMode::REFLECT);
// Should have thrown, so fail if it didn't
FAIL() << "Negative pads_begin element exception not thrown";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("All pads_begin element must be non-negative (pads_begin_coord"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, pad_v1_pads_end_elem_negative_value)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{5, 4, 2});
auto pads_begin =
make_shared<op::Constant>(element::i64, Shape{3}, std::vector<int64_t>{5, 3, 0});
auto pads_end =
make_shared<op::Constant>(element::i64, Shape{3}, std::vector<int64_t>{6, 9, -3});
try
{
auto pad_v1 = make_shared<op::v1::Pad>(arg, pads_begin, pads_end, op::PadMode::REFLECT);
// Should have thrown, so fail if it didn't
FAIL() << "Negative pads_end element exception not thrown";
}
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(
error.what(), std::string("All pads_end element must be non-negative (pads_end_coord"));
}
catch (...)
{
FAIL() << "Deduced type check failed for unexpected reason";
}
}
TEST(type_prop, pad_v1_deduce_too_small_for_edge)
{
auto arg = make_shared<op::Parameter>(element::f32, Shape{1, 5, 0, 2});
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment