Commit d33698fa authored by Katarzyna Mitrus's avatar Katarzyna Mitrus Committed by Michał Karzyński

Update ONNX importer to use nGraph ops from new opset header (#3994)

parent 04ea0671
...@@ -37,12 +37,14 @@ add_library(onnx_import STATIC ...@@ -37,12 +37,14 @@ add_library(onnx_import STATIC
core/operator_set.hpp core/operator_set.hpp
core/tensor.hpp core/tensor.hpp
core/value_info.hpp core/value_info.hpp
default_opset.hpp
exceptions.cpp exceptions.cpp
exceptions.hpp exceptions.hpp
op/acos.hpp op/acos.hpp
op/acosh.cpp op/acosh.cpp
op/acosh.hpp op/acosh.hpp
op/add.hpp op/add.hpp
op/add.cpp
op/and.hpp op/and.hpp
op/argmax.cpp op/argmax.cpp
op/argmax.hpp op/argmax.hpp
......
#include "ngraph/opsets/opset1.hpp"
namespace ngraph
{
namespace onnx_import
{
namespace default_opset = ngraph::opset1;
}
}
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/abs.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -32,7 +32,7 @@ namespace ngraph ...@@ -32,7 +32,7 @@ namespace ngraph
{ {
inline NodeVector abs(const Node& node) inline NodeVector abs(const Node& node)
{ {
return {std::make_shared<ngraph::op::Abs>(node.get_ng_inputs().at(0))}; return {std::make_shared<default_opset::Abs>(node.get_ng_inputs().at(0))};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/acos.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -32,7 +32,7 @@ namespace ngraph ...@@ -32,7 +32,7 @@ namespace ngraph
{ {
inline NodeVector acos(const Node& node) inline NodeVector acos(const Node& node)
{ {
return {std::make_shared<ngraph::op::Acos>(node.get_ng_inputs().at(0))}; return {std::make_shared<default_opset::Acos>(node.get_ng_inputs().at(0))};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -17,11 +17,9 @@ ...@@ -17,11 +17,9 @@
#include <memory> #include <memory>
#include "acosh.hpp" #include "acosh.hpp"
#include "default_opset.hpp"
#include "ngraph/op/add.hpp" #include "ngraph/op/add.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/log.hpp"
#include "ngraph/op/multiply.hpp" #include "ngraph/op/multiply.hpp"
#include "ngraph/op/sqrt.hpp"
#include "ngraph/op/subtract.hpp" #include "ngraph/op/subtract.hpp"
#include "ngraph/shape.hpp" #include "ngraph/shape.hpp"
...@@ -42,15 +40,15 @@ namespace ngraph ...@@ -42,15 +40,15 @@ namespace ngraph
// arccosh(x) = ln(x + sqrt(x^2 - 1)) // arccosh(x) = ln(x + sqrt(x^2 - 1))
// //
std::shared_ptr<ngraph::Node> one_node{ngraph::op::Constant::create( std::shared_ptr<ngraph::Node> one_node{default_opset::Constant::create(
data->get_element_type(), data->get_element_type(),
data->get_shape(), data->get_shape(),
std::vector<float>(ngraph::shape_size(data->get_shape()), 1.f))}; std::vector<float>(ngraph::shape_size(data->get_shape()), 1.f))};
std::shared_ptr<ngraph::Node> sqrt_node{ std::shared_ptr<ngraph::Node> sqrt_node{
std::make_shared<ngraph::op::Sqrt>(data * data - one_node)}; std::make_shared<default_opset::Sqrt>(data * data - one_node)};
return {std::make_shared<ngraph::op::Log>(data + sqrt_node)}; return {std::make_shared<default_opset::Log>(data + sqrt_node)};
} }
} // namespace set_1 } // namespace set_1
......
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include "add.hpp"
#include "default_opset.hpp"
#include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/opsets/opset0.hpp"
namespace ngraph
{
namespace onnx_import
{
namespace op
{
namespace set_1
{
NodeVector add(const Node& node)
{
auto left_rank = node.get_ng_inputs().at(0)->get_shape().size();
auto right_rank = node.get_ng_inputs().at(1)->get_shape().size();
auto axis =
node.get_attribute_value<std::int64_t>("axis", left_rank - right_rank);
NodeVector ng_inputs{ngraph::op::legacy_style_broadcast_for_binary_operation(
node.get_ng_inputs().at(0), node.get_ng_inputs().at(1), axis)};
return {
std::make_shared<ngraph::opset0::Add>(ng_inputs.at(0), ng_inputs.at(1))};
}
} // namespace set_1
namespace set_7
{
NodeVector add(const Node& node)
{
return {std::make_shared<default_opset::Add>(node.get_ng_inputs().at(0),
node.get_ng_inputs().at(1))};
}
} // namespace set_7
} // namespace op
} // namespace onnx_import
} // namespace ngraph
...@@ -20,8 +20,6 @@ ...@@ -20,8 +20,6 @@
#include "core/node.hpp" #include "core/node.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/add.hpp"
#include "ngraph/op/util/broadcasting.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -31,27 +29,13 @@ namespace ngraph ...@@ -31,27 +29,13 @@ namespace ngraph
{ {
namespace set_1 namespace set_1
{ {
inline NodeVector add(const Node& node) NodeVector add(const Node& node);
{
auto left_rank = node.get_ng_inputs().at(0)->get_shape().size();
auto right_rank = node.get_ng_inputs().at(1)->get_shape().size();
auto axis =
node.get_attribute_value<std::int64_t>("axis", left_rank - right_rank);
NodeVector ng_inputs{ngraph::op::legacy_style_broadcast_for_binary_operation(
node.get_ng_inputs().at(0), node.get_ng_inputs().at(1), axis)};
return {std::make_shared<ngraph::op::Add>(ng_inputs.at(0), ng_inputs.at(1))};
}
} // namespace set_1 } // namespace set_1
namespace set_7 namespace set_7
{ {
inline NodeVector add(const Node& node) NodeVector add(const Node& node);
{
return {std::make_shared<ngraph::op::v1::Add>(node.get_ng_inputs().at(0),
node.get_ng_inputs().at(1))};
}
} // namespace set_7 } // namespace set_7
......
...@@ -19,6 +19,7 @@ ...@@ -19,6 +19,7 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/and.hpp" #include "ngraph/op/and.hpp"
...@@ -32,7 +33,7 @@ namespace ngraph ...@@ -32,7 +33,7 @@ namespace ngraph
{ {
inline NodeVector logical_and(const Node& node) inline NodeVector logical_and(const Node& node)
{ {
return {std::make_shared<ngraph::op::v1::LogicalAnd>( return {std::make_shared<default_opset::LogicalAnd>(
node.get_ng_inputs().at(0), node.get_ng_inputs().at(1))}; node.get_ng_inputs().at(0), node.get_ng_inputs().at(1))};
} }
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
#include "argmax.hpp" #include "argmax.hpp"
#include "core/node.hpp" #include "core/node.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/argmax.hpp" #include "ngraph/opsets/opset0.hpp"
#include "utils/reduction.hpp" #include "utils/reduction.hpp"
namespace ngraph namespace ngraph
...@@ -30,7 +30,7 @@ namespace ngraph ...@@ -30,7 +30,7 @@ namespace ngraph
{ {
NodeVector argmax(const Node& node) NodeVector argmax(const Node& node)
{ {
return {reduction::make_ng_index_reduction_op<ngraph::op::ArgMax>(node)}; return {reduction::make_ng_index_reduction_op<ngraph::opset0::ArgMax>(node)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
#include "argmin.hpp" #include "argmin.hpp"
#include "core/node.hpp" #include "core/node.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/argmin.hpp" #include "ngraph/opsets/opset0.hpp"
#include "utils/reduction.hpp" #include "utils/reduction.hpp"
namespace ngraph namespace ngraph
...@@ -30,7 +30,7 @@ namespace ngraph ...@@ -30,7 +30,7 @@ namespace ngraph
{ {
NodeVector argmin(const Node& node) NodeVector argmin(const Node& node)
{ {
return {reduction::make_ng_index_reduction_op<ngraph::op::ArgMin>(node)}; return {reduction::make_ng_index_reduction_op<ngraph::opset0::ArgMin>(node)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -19,6 +19,7 @@ ...@@ -19,6 +19,7 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/asin.hpp" #include "ngraph/op/asin.hpp"
...@@ -32,7 +33,7 @@ namespace ngraph ...@@ -32,7 +33,7 @@ namespace ngraph
{ {
inline NodeVector asin(const Node& node) inline NodeVector asin(const Node& node)
{ {
return {std::make_shared<ngraph::op::Asin>(node.get_ng_inputs().at(0))}; return {std::make_shared<default_opset::Asin>(node.get_ng_inputs().at(0))};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -17,11 +17,9 @@ ...@@ -17,11 +17,9 @@
#include <memory> #include <memory>
#include "asinh.hpp" #include "asinh.hpp"
#include "default_opset.hpp"
#include "ngraph/op/add.hpp" #include "ngraph/op/add.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/log.hpp"
#include "ngraph/op/multiply.hpp" #include "ngraph/op/multiply.hpp"
#include "ngraph/op/sqrt.hpp"
#include "ngraph/shape.hpp" #include "ngraph/shape.hpp"
namespace ngraph namespace ngraph
...@@ -41,15 +39,15 @@ namespace ngraph ...@@ -41,15 +39,15 @@ namespace ngraph
// asinh(x) = ln(x + sqrt(x^2 + 1)) // asinh(x) = ln(x + sqrt(x^2 + 1))
// //
std::shared_ptr<ngraph::Node> one_node{ngraph::op::Constant::create( std::shared_ptr<ngraph::Node> one_node{default_opset::Constant::create(
data->get_element_type(), data->get_element_type(),
data->get_shape(), data->get_shape(),
std::vector<float>(ngraph::shape_size(data->get_shape()), 1.f))}; std::vector<float>(ngraph::shape_size(data->get_shape()), 1.f))};
std::shared_ptr<ngraph::Node> sqrt_node{ std::shared_ptr<ngraph::Node> sqrt_node{
std::make_shared<ngraph::op::Sqrt>(data * data + one_node)}; std::make_shared<default_opset::Sqrt>(data * data + one_node)};
return {std::make_shared<ngraph::op::Log>(data + sqrt_node)}; return {std::make_shared<default_opset::Log>(data + sqrt_node)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/atan.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -32,7 +32,7 @@ namespace ngraph ...@@ -32,7 +32,7 @@ namespace ngraph
{ {
inline NodeVector atan(const Node& node) inline NodeVector atan(const Node& node)
{ {
return {std::make_shared<ngraph::op::Atan>(node.get_ng_inputs().at(0))}; return {std::make_shared<default_opset::Atan>(node.get_ng_inputs().at(0))};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -17,9 +17,8 @@ ...@@ -17,9 +17,8 @@
#include <memory> #include <memory>
#include "atanh.hpp" #include "atanh.hpp"
#include "default_opset.hpp"
#include "ngraph/op/add.hpp" #include "ngraph/op/add.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/log.hpp"
#include "ngraph/op/multiply.hpp" #include "ngraph/op/multiply.hpp"
#include "ngraph/op/subtract.hpp" #include "ngraph/op/subtract.hpp"
#include "ngraph/shape.hpp" #include "ngraph/shape.hpp"
...@@ -42,18 +41,18 @@ namespace ngraph ...@@ -42,18 +41,18 @@ namespace ngraph
// = 0.5 * (ln(1 + x) - ln(1 - x)) // = 0.5 * (ln(1 + x) - ln(1 - x))
// //
std::shared_ptr<ngraph::Node> one_node{ngraph::op::Constant::create( std::shared_ptr<ngraph::Node> one_node{default_opset::Constant::create(
data->get_element_type(), data->get_element_type(),
data->get_shape(), data->get_shape(),
std::vector<float>(ngraph::shape_size(data->get_shape()), 1.f))}; std::vector<float>(ngraph::shape_size(data->get_shape()), 1.f))};
std::shared_ptr<ngraph::Node> half_node{ngraph::op::Constant::create( std::shared_ptr<ngraph::Node> half_node{default_opset::Constant::create(
data->get_element_type(), data->get_element_type(),
data->get_shape(), data->get_shape(),
std::vector<float>(ngraph::shape_size(data->get_shape()), 0.5f))}; std::vector<float>(ngraph::shape_size(data->get_shape()), 0.5f))};
return {half_node * (std::make_shared<ngraph::op::Log>(one_node + data) - return {half_node * (std::make_shared<default_opset::Log>(one_node + data) -
std::make_shared<ngraph::op::Log>(one_node - data))}; std::make_shared<default_opset::Log>(one_node - data))};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -19,8 +19,9 @@ ...@@ -19,8 +19,9 @@
#include "batch_norm.hpp" #include "batch_norm.hpp"
#include "core/null_node.hpp" #include "core/null_node.hpp"
#include "default_opset.hpp"
#include "exceptions.hpp" #include "exceptions.hpp"
#include "ngraph/op/batch_norm.hpp" #include "ngraph/opsets/opset0.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -56,7 +57,7 @@ namespace ngraph ...@@ -56,7 +57,7 @@ namespace ngraph
{ {
mean = inputs.at(3); mean = inputs.at(3);
var = inputs.at(4); var = inputs.at(4);
return {std::make_shared<ngraph::op::BatchNormInference>( return {std::make_shared<default_opset::BatchNormInference>(
x, scale, bias, mean, var, epsilon), x, scale, bias, mean, var, epsilon),
after_bn_mean, after_bn_mean,
after_bn_var, after_bn_var,
...@@ -64,8 +65,8 @@ namespace ngraph ...@@ -64,8 +65,8 @@ namespace ngraph
saved_var}; saved_var};
} }
return { return {std::make_shared<ngraph::opset0::BatchNormTraining>(
std::make_shared<ngraph::op::BatchNormTraining>(x, scale, bias, epsilon), x, scale, bias, epsilon),
after_bn_mean, after_bn_mean,
after_bn_var, after_bn_var,
saved_mean, saved_mean,
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
#include <memory> #include <memory>
#include "cast.hpp" #include "cast.hpp"
#include "ngraph/op/convert.hpp" #include "default_opset.hpp"
#include "ngraph/type/element_type.hpp" #include "ngraph/type/element_type.hpp"
#include "utils/common.hpp" #include "utils/common.hpp"
...@@ -34,7 +34,7 @@ namespace ngraph ...@@ -34,7 +34,7 @@ namespace ngraph
int64_t target_type = node.get_attribute_value<int64_t>("to"); int64_t target_type = node.get_attribute_value<int64_t>("to");
element::Type elem_type = common::get_ngraph_element_type(target_type); element::Type elem_type = common::get_ngraph_element_type(target_type);
return {std::make_shared<ngraph::op::Convert>(data, elem_type)}; return {std::make_shared<default_opset::Convert>(data, elem_type)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/ceiling.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -32,7 +32,7 @@ namespace ngraph ...@@ -32,7 +32,7 @@ namespace ngraph
{ {
inline NodeVector ceil(const Node& node) inline NodeVector ceil(const Node& node)
{ {
return {std::make_shared<ngraph::op::Ceiling>(node.get_ng_inputs().at(0))}; return {std::make_shared<default_opset::Ceiling>(node.get_ng_inputs().at(0))};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -18,12 +18,9 @@ ...@@ -18,12 +18,9 @@
#include <memory> #include <memory>
#include "clip.hpp" #include "clip.hpp"
#include "default_opset.hpp"
#include "ngraph/builder/make_constant.hpp" #include "ngraph/builder/make_constant.hpp"
#include "ngraph/op/constant.hpp" #include "ngraph/opsets/opset0.hpp"
#include "ngraph/op/fused/clamp.hpp"
#include "ngraph/op/maximum.hpp"
#include "ngraph/op/minimum.hpp"
#include "ngraph/op/reshape.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -43,7 +40,7 @@ namespace ngraph ...@@ -43,7 +40,7 @@ namespace ngraph
const double min_value = node.get_attribute_value<double>( const double min_value = node.get_attribute_value<double>(
"min", std::numeric_limits<double>::lowest()); "min", std::numeric_limits<double>::lowest());
return {std::make_shared<ngraph::op::Clamp>(data, min_value, max_value)}; return {std::make_shared<default_opset::Clamp>(data, min_value, max_value)};
} }
} // namespace set_1 } // namespace set_1
...@@ -83,12 +80,12 @@ namespace ngraph ...@@ -83,12 +80,12 @@ namespace ngraph
data_type, data_shape, std::numeric_limits<double>::max()); data_type, data_shape, std::numeric_limits<double>::max());
} }
auto max_of_min_and_data = std::make_shared<ngraph::op::Maximum>( auto max_of_min_and_data = std::make_shared<ngraph::opset0::Maximum>(
min, min,
data, data,
ngraph::op::AutoBroadcastSpec(ngraph::op::AutoBroadcastType::NUMPY)); ngraph::op::AutoBroadcastSpec(ngraph::op::AutoBroadcastType::NUMPY));
return {std::make_shared<ngraph::op::Minimum>( return {std::make_shared<ngraph::opset0::Minimum>(
max, max,
max_of_min_and_data, max_of_min_and_data,
ngraph::op::AutoBroadcastSpec(ngraph::op::AutoBroadcastType::NUMPY))}; ngraph::op::AutoBroadcastSpec(ngraph::op::AutoBroadcastType::NUMPY))};
......
...@@ -17,6 +17,7 @@ ...@@ -17,6 +17,7 @@
#include <cstdint> #include <cstdint>
#include "concat.hpp" #include "concat.hpp"
#include "default_opset.hpp"
#include "exceptions.hpp" #include "exceptions.hpp"
#include "ngraph/op/concat.hpp" #include "ngraph/op/concat.hpp"
#include "utils/common.hpp" #include "utils/common.hpp"
...@@ -36,7 +37,7 @@ namespace ngraph ...@@ -36,7 +37,7 @@ namespace ngraph
size_t valid_axis = size_t valid_axis =
common::validate_axis(node, axis, inputs.at(0)->get_shape().size()); common::validate_axis(node, axis, inputs.at(0)->get_shape().size());
return {std::make_shared<ngraph::op::Concat>(inputs, valid_axis)}; return {std::make_shared<default_opset::Concat>(inputs, valid_axis)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -16,6 +16,7 @@ ...@@ -16,6 +16,7 @@
#include "constant.hpp" #include "constant.hpp"
#include "core/tensor.hpp" #include "core/tensor.hpp"
#include "default_opset.hpp"
#include "ngraph/op/constant.hpp" #include "ngraph/op/constant.hpp"
namespace ngraph namespace ngraph
...@@ -29,98 +30,99 @@ namespace ngraph ...@@ -29,98 +30,99 @@ namespace ngraph
namespace namespace
{ {
template <typename T> template <typename T>
inline std::shared_ptr<ngraph::op::Constant> inline std::shared_ptr<default_opset::Constant>
__make_ng_constant(const element::Type& type, const Tensor& tensor) __make_ng_constant(const element::Type& type, const Tensor& tensor)
{ {
return std::make_shared<ngraph::op::Constant>( return std::make_shared<default_opset::Constant>(
type, tensor.get_shape(), tensor.get_data<T>()); type, tensor.get_shape(), tensor.get_data<T>());
} }
template <Tensor::Type> template <Tensor::Type>
inline std::shared_ptr<ngraph::op::Constant> inline std::shared_ptr<default_opset::Constant>
make_ng_constant(const Tensor& tensor) make_ng_constant(const Tensor& tensor)
{ {
throw error::tensor::unsupported_data_type{tensor}; throw error::tensor::unsupported_data_type{tensor};
} }
template <> template <>
inline std::shared_ptr<ngraph::op::Constant> inline std::shared_ptr<default_opset::Constant>
make_ng_constant<Tensor::Type::float16>(const Tensor& tensor) make_ng_constant<Tensor::Type::float16>(const Tensor& tensor)
{ {
return __make_ng_constant<ngraph::float16>(element::f16, tensor); return __make_ng_constant<ngraph::float16>(element::f16, tensor);
} }
template <> template <>
inline std::shared_ptr<ngraph::op::Constant> inline std::shared_ptr<default_opset::Constant>
make_ng_constant<Tensor::Type::float32>(const Tensor& tensor) make_ng_constant<Tensor::Type::float32>(const Tensor& tensor)
{ {
return __make_ng_constant<float>(element::f32, tensor); return __make_ng_constant<float>(element::f32, tensor);
} }
template <> template <>
inline std::shared_ptr<ngraph::op::Constant> inline std::shared_ptr<default_opset::Constant>
make_ng_constant<Tensor::Type::float64>(const Tensor& tensor) make_ng_constant<Tensor::Type::float64>(const Tensor& tensor)
{ {
return __make_ng_constant<double>(element::f64, tensor); return __make_ng_constant<double>(element::f64, tensor);
} }
template <> template <>
inline std::shared_ptr<ngraph::op::Constant> inline std::shared_ptr<default_opset::Constant>
make_ng_constant<Tensor::Type::int8>(const Tensor& tensor) make_ng_constant<Tensor::Type::int8>(const Tensor& tensor)
{ {
return __make_ng_constant<int8_t>(element::i8, tensor); return __make_ng_constant<int8_t>(element::i8, tensor);
} }
template <> template <>
inline std::shared_ptr<ngraph::op::Constant> inline std::shared_ptr<default_opset::Constant>
make_ng_constant<Tensor::Type::int16>(const Tensor& tensor) make_ng_constant<Tensor::Type::int16>(const Tensor& tensor)
{ {
return __make_ng_constant<int16_t>(element::i16, tensor); return __make_ng_constant<int16_t>(element::i16, tensor);
} }
template <> template <>
inline std::shared_ptr<ngraph::op::Constant> inline std::shared_ptr<default_opset::Constant>
make_ng_constant<Tensor::Type::int32>(const Tensor& tensor) make_ng_constant<Tensor::Type::int32>(const Tensor& tensor)
{ {
return __make_ng_constant<int32_t>(element::i32, tensor); return __make_ng_constant<int32_t>(element::i32, tensor);
} }
template <> template <>
inline std::shared_ptr<ngraph::op::Constant> inline std::shared_ptr<default_opset::Constant>
make_ng_constant<Tensor::Type::int64>(const Tensor& tensor) make_ng_constant<Tensor::Type::int64>(const Tensor& tensor)
{ {
return __make_ng_constant<int64_t>(element::i64, tensor); return __make_ng_constant<int64_t>(element::i64, tensor);
} }
template <> template <>
inline std::shared_ptr<ngraph::op::Constant> inline std::shared_ptr<default_opset::Constant>
make_ng_constant<Tensor::Type::uint8>(const Tensor& tensor) make_ng_constant<Tensor::Type::uint8>(const Tensor& tensor)
{ {
return __make_ng_constant<uint8_t>(element::u8, tensor); return __make_ng_constant<uint8_t>(element::u8, tensor);
} }
template <> template <>
inline std::shared_ptr<ngraph::op::Constant> inline std::shared_ptr<default_opset::Constant>
make_ng_constant<Tensor::Type::uint16>(const Tensor& tensor) make_ng_constant<Tensor::Type::uint16>(const Tensor& tensor)
{ {
return __make_ng_constant<uint16_t>(element::u16, tensor); return __make_ng_constant<uint16_t>(element::u16, tensor);
} }
template <> template <>
inline std::shared_ptr<ngraph::op::Constant> inline std::shared_ptr<default_opset::Constant>
make_ng_constant<Tensor::Type::uint32>(const Tensor& tensor) make_ng_constant<Tensor::Type::uint32>(const Tensor& tensor)
{ {
return __make_ng_constant<uint32_t>(element::u32, tensor); return __make_ng_constant<uint32_t>(element::u32, tensor);
} }
template <> template <>
inline std::shared_ptr<ngraph::op::Constant> inline std::shared_ptr<default_opset::Constant>
make_ng_constant<Tensor::Type::uint64>(const Tensor& tensor) make_ng_constant<Tensor::Type::uint64>(const Tensor& tensor)
{ {
return __make_ng_constant<uint64_t>(element::u64, tensor); return __make_ng_constant<uint64_t>(element::u64, tensor);
} }
inline std::shared_ptr<ngraph::op::Constant> make_constant(const Tensor& tensor) inline std::shared_ptr<default_opset::Constant>
make_constant(const Tensor& tensor)
{ {
#define MAKE_NG_CONSTANT(data_type_) \ #define MAKE_NG_CONSTANT(data_type_) \
case data_type_: return make_ng_constant<data_type_>(tensor) case data_type_: return make_ng_constant<data_type_>(tensor)
......
...@@ -19,17 +19,14 @@ ...@@ -19,17 +19,14 @@
#include <vector> #include <vector>
#include "conv.hpp" #include "conv.hpp"
#include "default_opset.hpp"
#include "exceptions.hpp" #include "exceptions.hpp"
#include "ngraph/builder/reshape.hpp" #include "ngraph/builder/reshape.hpp"
#include "ngraph/op/add.hpp"
#include "ngraph/op/broadcast.hpp"
#include "ngraph/op/concat.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/convolution.hpp"
#include "ngraph/op/fused/group_conv.hpp" #include "ngraph/op/fused/group_conv.hpp"
#include "ngraph/op/slice.hpp" #include "ngraph/op/slice.hpp"
#include "ngraph/op/util/attr_types.hpp" #include "ngraph/op/util/attr_types.hpp"
#include "ngraph/op/util/broadcasting.hpp" #include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/opsets/opset0.hpp"
#include "utils/convpool.hpp" #include "utils/convpool.hpp"
namespace ngraph namespace ngraph
...@@ -61,7 +58,7 @@ namespace ngraph ...@@ -61,7 +58,7 @@ namespace ngraph
auto reshaped_filters = auto reshaped_filters =
ngraph::builder::reshape(filters, filters_shape); ngraph::builder::reshape(filters, filters_shape);
return std::make_shared<ngraph::op::v1::GroupConvolution>( return std::make_shared<default_opset::GroupConvolution>(
data, data,
reshaped_filters, reshaped_filters,
strides, strides,
...@@ -72,7 +69,7 @@ namespace ngraph ...@@ -72,7 +69,7 @@ namespace ngraph
} }
else else
{ {
return std::make_shared<ngraph::op::v1::Convolution>(data, return std::make_shared<default_opset::Convolution>(data,
filters, filters,
strides, strides,
padding_below, padding_below,
...@@ -134,11 +131,11 @@ namespace ngraph ...@@ -134,11 +131,11 @@ namespace ngraph
auto bias = inputs.at(2); auto bias = inputs.at(2);
const Shape& new_shape = conv_node->get_shape(); const Shape& new_shape = conv_node->get_shape();
auto broadcasted_bias = std::make_shared<ngraph::op::Broadcast>( auto broadcasted_bias = std::make_shared<ngraph::opset0::Broadcast>(
bias, bias,
new_shape, new_shape,
ngraph::op::calculate_broadcast_axes(new_shape, bias->get_shape(), 1)); ngraph::op::calculate_broadcast_axes(new_shape, bias->get_shape(), 1));
return {std::make_shared<ngraph::op::Add>(conv_node, broadcasted_bias)}; return {std::make_shared<ngraph::opset0::Add>(conv_node, broadcasted_bias)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -15,11 +15,11 @@ ...@@ -15,11 +15,11 @@
//***************************************************************************** //*****************************************************************************
#include "conv_integer.hpp" #include "conv_integer.hpp"
#include "exceptions.hpp"
#include "ngraph/builder/make_constant.hpp" #include "ngraph/builder/make_constant.hpp"
#include "ngraph/frontend/onnx_import/exceptions.hpp"
#include "ngraph/frontend/onnx_import/utils/convpool.hpp"
#include "ngraph/op/quantized_convolution.hpp"
#include "ngraph/op/util/attr_types.hpp" #include "ngraph/op/util/attr_types.hpp"
#include "ngraph/opsets/opset0.hpp"
#include "utils/convpool.hpp"
using namespace ngraph::builder; using namespace ngraph::builder;
...@@ -66,7 +66,7 @@ namespace ngraph ...@@ -66,7 +66,7 @@ namespace ngraph
if (num_inputs == 2) if (num_inputs == 2)
{ {
return {std::make_shared<ngraph::op::QuantizedConvolution>( return {std::make_shared<ngraph::opset0::QuantizedConvolution>(
input, input,
filters, filters,
window_movement_strides, window_movement_strides,
...@@ -92,7 +92,7 @@ namespace ngraph ...@@ -92,7 +92,7 @@ namespace ngraph
filters_zero_point = inputs.at(3); filters_zero_point = inputs.at(3);
} }
return {std::make_shared<ngraph::op::QuantizedConvolution>( return {std::make_shared<ngraph::opset0::QuantizedConvolution>(
input, input,
filters, filters,
window_movement_strides, window_movement_strides,
......
...@@ -27,6 +27,7 @@ ...@@ -27,6 +27,7 @@
#include "ngraph/op/fused/group_conv_transpose.hpp" #include "ngraph/op/fused/group_conv_transpose.hpp"
#include "ngraph/op/util/attr_types.hpp" #include "ngraph/op/util/attr_types.hpp"
#include "ngraph/op/util/broadcasting.hpp" #include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/opsets/opset0.hpp"
#include "ngraph/shape.hpp" #include "ngraph/shape.hpp"
#include "utils/convpool.hpp" #include "utils/convpool.hpp"
...@@ -84,7 +85,7 @@ namespace ngraph ...@@ -84,7 +85,7 @@ namespace ngraph
std::shared_ptr<ngraph::Node> conv_node; std::shared_ptr<ngraph::Node> conv_node;
if (!output_shape.empty()) if (!output_shape.empty())
{ {
conv_node = std::make_shared<ngraph::op::GroupConvolutionTranspose>( conv_node = std::make_shared<ngraph::opset0::GroupConvolutionTranspose>(
data, data,
filters, filters,
strides, strides,
...@@ -95,7 +96,7 @@ namespace ngraph ...@@ -95,7 +96,7 @@ namespace ngraph
} }
else else
{ {
conv_node = std::make_shared<ngraph::op::GroupConvolutionTranspose>( conv_node = std::make_shared<ngraph::opset0::GroupConvolutionTranspose>(
data, data,
filters, filters,
strides, strides,
...@@ -115,7 +116,7 @@ namespace ngraph ...@@ -115,7 +116,7 @@ namespace ngraph
auto bias = inputs.at(2); auto bias = inputs.at(2);
return {std::make_shared<ngraph::op::Add>( return {std::make_shared<ngraph::opset0::Add>(
conv_node, conv_node,
ngraph::op::make_broadcast_node(bias, conv_node->get_shape(), 1))}; ngraph::op::make_broadcast_node(bias, conv_node->get_shape(), 1))};
} }
......
...@@ -17,8 +17,8 @@ ...@@ -17,8 +17,8 @@
#include <memory> #include <memory>
#include "cum_sum.hpp" #include "cum_sum.hpp"
#include "ngraph/op/constant.hpp" #include "default_opset.hpp"
#include "ngraph/op/cum_sum.hpp" #include "ngraph/opsets/opset0.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -42,10 +42,11 @@ namespace ngraph ...@@ -42,10 +42,11 @@ namespace ngraph
} }
else else
{ {
axis = ngraph::op::Constant::create(element::i64, Shape{}, {0}); // default axis =
default_opset::Constant::create(element::i64, Shape{}, {0}); // default
} }
return NodeVector{ return NodeVector{
std::make_shared<ngraph::op::CumSum>(data, axis, exclusive, reverse)}; std::make_shared<ngraph::opset0::CumSum>(data, axis, exclusive, reverse)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -15,7 +15,7 @@ ...@@ -15,7 +15,7 @@
//***************************************************************************** //*****************************************************************************
#include "depth_to_space.hpp" #include "depth_to_space.hpp"
#include "ngraph/op/fused/depth_to_space.hpp" #include "default_opset.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -30,11 +30,12 @@ namespace ngraph ...@@ -30,11 +30,12 @@ namespace ngraph
auto data = node.get_ng_inputs().at(0); auto data = node.get_ng_inputs().at(0);
const auto mode = node.get_attribute_value<std::string>("mode", "DCR"); const auto mode = node.get_attribute_value<std::string>("mode", "DCR");
const auto ngraph_mode = const auto ngraph_mode =
(mode == "DCR") ? ngraph::op::DepthToSpace::DepthToSpaceMode::BLOCKS_FIRST (mode == "DCR")
: ngraph::op::DepthToSpace::DepthToSpaceMode::DEPTH_FIRST; ? default_opset::DepthToSpace::DepthToSpaceMode::BLOCKS_FIRST
: default_opset::DepthToSpace::DepthToSpaceMode::DEPTH_FIRST;
const auto block_size = node.get_attribute_value<std::int64_t>("blocksize"); const auto block_size = node.get_attribute_value<std::int64_t>("blocksize");
return NodeVector{ return NodeVector{std::make_shared<default_opset::DepthToSpace>(
std::make_shared<ngraph::op::DepthToSpace>(data, ngraph_mode, block_size)}; data, ngraph_mode, block_size)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -17,11 +17,13 @@ ...@@ -17,11 +17,13 @@
#include <cstdint> #include <cstdint>
#include <memory> #include <memory>
#include "default_opset.hpp"
#include "dequantize_linear.hpp" #include "dequantize_linear.hpp"
#include "ngraph/axis_set.hpp" #include "ngraph/axis_set.hpp"
#include "ngraph/builder/make_constant.hpp" #include "ngraph/builder/make_constant.hpp"
#include "ngraph/op/convert.hpp" #include "ngraph/op/convert.hpp"
#include "ngraph/op/dequantize.hpp" #include "ngraph/op/dequantize.hpp"
#include "ngraph/opsets/opset0.hpp"
#include "ngraph/shape.hpp" #include "ngraph/shape.hpp"
namespace ngraph namespace ngraph
...@@ -73,11 +75,11 @@ namespace ngraph ...@@ -73,11 +75,11 @@ namespace ngraph
if (x->get_element_type() != zero_point->get_element_type()) if (x->get_element_type() != zero_point->get_element_type())
{ {
zero_point = std::make_shared<ngraph::op::Convert>(zero_point, zero_point = std::make_shared<default_opset::Convert>(
x->get_element_type()); zero_point, x->get_element_type());
} }
return {std::make_shared<ngraph::op::Dequantize>( return {std::make_shared<ngraph::opset0::Dequantize>(
x, x_scale, zero_point, x_scale->get_element_type(), axes)}; x, x_scale, zero_point, x_scale->get_element_type(), axes)};
} }
......
...@@ -19,9 +19,10 @@ ...@@ -19,9 +19,10 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/divide.hpp"
#include "ngraph/op/util/broadcasting.hpp" #include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/opsets/opset0.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -40,7 +41,8 @@ namespace ngraph ...@@ -40,7 +41,8 @@ namespace ngraph
NodeVector ng_inputs{ngraph::op::legacy_style_broadcast_for_binary_operation( NodeVector ng_inputs{ngraph::op::legacy_style_broadcast_for_binary_operation(
node.get_ng_inputs().at(0), node.get_ng_inputs().at(1), axis)}; node.get_ng_inputs().at(0), node.get_ng_inputs().at(1), axis)};
return {std::make_shared<ngraph::op::Divide>(ng_inputs.at(0), ng_inputs.at(1))}; return {
std::make_shared<ngraph::opset0::Divide>(ng_inputs.at(0), ng_inputs.at(1))};
} }
} // namespace set_1 } // namespace set_1
...@@ -49,7 +51,7 @@ namespace ngraph ...@@ -49,7 +51,7 @@ namespace ngraph
{ {
inline NodeVector div(const Node& node) inline NodeVector div(const Node& node)
{ {
return {std::make_shared<ngraph::op::v1::Divide>(node.get_ng_inputs().at(0), return {std::make_shared<default_opset::Divide>(node.get_ng_inputs().at(0),
node.get_ng_inputs().at(1))}; node.get_ng_inputs().at(1))};
} }
......
...@@ -17,9 +17,8 @@ ...@@ -17,9 +17,8 @@
#include <memory> #include <memory>
#include <vector> #include <vector>
#include "default_opset.hpp"
#include "elu.hpp" #include "elu.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/fused/elu.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -34,7 +33,7 @@ namespace ngraph ...@@ -34,7 +33,7 @@ namespace ngraph
auto data = node.get_ng_inputs().at(0); auto data = node.get_ng_inputs().at(0);
double alpha = node.get_attribute_value<double>("alpha", 1); double alpha = node.get_attribute_value<double>("alpha", 1);
return NodeVector{std::make_shared<ngraph::op::Elu>(data, alpha)}; return NodeVector{std::make_shared<default_opset::Elu>(data, alpha)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -19,9 +19,8 @@ ...@@ -19,9 +19,8 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/equal.hpp"
#include "ngraph/op/util/broadcasting.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -33,7 +32,7 @@ namespace ngraph ...@@ -33,7 +32,7 @@ namespace ngraph
{ {
inline NodeVector equal(const Node& node) inline NodeVector equal(const Node& node)
{ {
return {std::make_shared<ngraph::op::v1::Equal>(node.get_ng_inputs().at(0), return {std::make_shared<default_opset::Equal>(node.get_ng_inputs().at(0),
node.get_ng_inputs().at(1))}; node.get_ng_inputs().at(1))};
} }
......
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/erf.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -32,7 +32,7 @@ namespace ngraph ...@@ -32,7 +32,7 @@ namespace ngraph
{ {
inline NodeVector erf(const Node& node) inline NodeVector erf(const Node& node)
{ {
return {std::make_shared<ngraph::op::Erf>(node.get_ng_inputs().at(0))}; return {std::make_shared<default_opset::Erf>(node.get_ng_inputs().at(0))};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/exp.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -32,7 +32,7 @@ namespace ngraph ...@@ -32,7 +32,7 @@ namespace ngraph
{ {
inline NodeVector exp(const Node& node) inline NodeVector exp(const Node& node)
{ {
return {std::make_shared<ngraph::op::Exp>(node.get_ng_inputs().at(0))}; return {std::make_shared<default_opset::Exp>(node.get_ng_inputs().at(0))};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -18,10 +18,10 @@ ...@@ -18,10 +18,10 @@
#include <cstdint> #include <cstdint>
#include <memory> #include <memory>
#include "default_opset.hpp"
#include "expand.hpp" #include "expand.hpp"
#include "ngraph/descriptor/output.hpp" #include "ngraph/descriptor/output.hpp"
#include "ngraph/op/broadcast.hpp" #include "ngraph/op/broadcast.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/experimental/dyn_broadcast.hpp" #include "ngraph/op/experimental/dyn_broadcast.hpp"
#include "ngraph/op/experimental/dyn_reshape.hpp" #include "ngraph/op/experimental/dyn_reshape.hpp"
#include "ngraph/op/experimental/range.hpp" #include "ngraph/op/experimental/range.hpp"
...@@ -46,7 +46,8 @@ namespace ngraph ...@@ -46,7 +46,8 @@ namespace ngraph
"Ngraph does not support dynamic braodcasting for Expand op."); "Ngraph does not support dynamic braodcasting for Expand op.");
std::vector<std::size_t> shape_vector = std::vector<std::size_t> shape_vector =
ngraph::as_type_ptr<ngraph::op::Constant>(shape)->get_vector<std::size_t>(); ngraph::as_type_ptr<default_opset::Constant>(shape)
->get_vector<std::size_t>();
const ngraph::Shape shape_shape{shape_vector}; const ngraph::Shape shape_shape{shape_vector};
return {ngraph::op::numpy_style_broadcast(data, shape_shape)}; return {ngraph::op::numpy_style_broadcast(data, shape_shape)};
......
...@@ -19,6 +19,7 @@ ...@@ -19,6 +19,7 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/floor.hpp" #include "ngraph/op/floor.hpp"
...@@ -32,7 +33,7 @@ namespace ngraph ...@@ -32,7 +33,7 @@ namespace ngraph
{ {
inline NodeVector floor(const Node& node) inline NodeVector floor(const Node& node)
{ {
return {std::make_shared<ngraph::op::Floor>(node.get_ng_inputs().at(0))}; return {std::make_shared<default_opset::Floor>(node.get_ng_inputs().at(0))};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/opsets/opset1.hpp"
#include "utils/common.hpp" #include "utils/common.hpp"
namespace ngraph namespace ngraph
...@@ -39,10 +39,10 @@ namespace ngraph ...@@ -39,10 +39,10 @@ namespace ngraph
auto axis = node.get_attribute_value<int64_t>("axis", 0); auto axis = node.get_attribute_value<int64_t>("axis", 0);
auto valid_axis = common::validate_axis(node, axis, data->get_shape().size()); auto valid_axis = common::validate_axis(node, axis, data->get_shape().size());
return {std::make_shared<opset1::Gather>( return {std::make_shared<default_opset::Gather>(
data, data,
indices, indices,
opset1::Constant::create(element::i64, Shape{}, {valid_axis}))}; default_opset::Constant::create(element::i64, Shape{}, {valid_axis}))};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -14,7 +14,7 @@ ...@@ -14,7 +14,7 @@
// limitations under the License. // limitations under the License.
//***************************************************************************** //*****************************************************************************
#include "ngraph/op/gather_nd.hpp" #include "ngraph/opsets/opset0.hpp"
#include "utils/common.hpp" #include "utils/common.hpp"
namespace ngraph namespace ngraph
...@@ -31,7 +31,7 @@ namespace ngraph ...@@ -31,7 +31,7 @@ namespace ngraph
auto data = ng_inputs.at(0); auto data = ng_inputs.at(0);
auto indices = ng_inputs.at(1); auto indices = ng_inputs.at(1);
return {std::make_shared<ngraph::op::GatherND>(data, indices)}; return {std::make_shared<ngraph::opset0::GatherND>(data, indices)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -16,6 +16,7 @@ ...@@ -16,6 +16,7 @@
#include <memory> #include <memory>
#include "default_opset.hpp"
#include "gemm.hpp" #include "gemm.hpp"
#include "ngraph/builder/reshape.hpp" #include "ngraph/builder/reshape.hpp"
#include "ngraph/op/add.hpp" #include "ngraph/op/add.hpp"
...@@ -44,16 +45,16 @@ namespace ngraph ...@@ -44,16 +45,16 @@ namespace ngraph
} }
else else
{ {
input_c = ngraph::op::Constant::create( input_c = default_opset::Constant::create(
input_b->get_element_type(), ngraph::Shape{}, {0}); input_b->get_element_type(), ngraph::Shape{}, {0});
} }
const auto alpha = node.get_attribute_value<float>("alpha", 1); const auto alpha = node.get_attribute_value<float>("alpha", 1);
const auto beta = node.get_attribute_value<float>("beta", 1); const auto beta = node.get_attribute_value<float>("beta", 1);
const auto alpha_node = ngraph::op::Constant::create( const auto alpha_node = default_opset::Constant::create(
element::Type_t::f32, Shape{}, std::vector<float>{alpha}); element::Type_t::f32, Shape{}, std::vector<float>{alpha});
const auto beta_node = ngraph::op::Constant::create( const auto beta_node = default_opset::Constant::create(
element::Type_t::f32, Shape{}, std::vector<float>{beta}); element::Type_t::f32, Shape{}, std::vector<float>{beta});
const bool trans_a = node.get_attribute_value<int64_t>("transA", 0); const bool trans_a = node.get_attribute_value<int64_t>("transA", 0);
...@@ -75,11 +76,11 @@ namespace ngraph ...@@ -75,11 +76,11 @@ namespace ngraph
auto matmul_node = std::make_shared<ngraph::op::MatMul>(input_a, input_b); auto matmul_node = std::make_shared<ngraph::op::MatMul>(input_a, input_b);
auto alpha_times_product = auto alpha_times_product =
std::make_shared<ngraph::op::v1::Multiply>(alpha_node, matmul_node); std::make_shared<default_opset::Multiply>(alpha_node, matmul_node);
auto beta_times_input_c = auto beta_times_input_c =
std::make_shared<ngraph::op::v1::Multiply>(beta_node, input_c); std::make_shared<default_opset::Multiply>(beta_node, input_c);
return NodeVector{std::make_shared<ngraph::op::v1::Add>(alpha_times_product, return NodeVector{std::make_shared<default_opset::Add>(alpha_times_product,
beta_times_input_c)}; beta_times_input_c)};
} }
...@@ -100,30 +101,30 @@ namespace ngraph ...@@ -100,30 +101,30 @@ namespace ngraph
} }
else else
{ {
input_c = ngraph::op::Constant::create( input_c = default_opset::Constant::create(
input_b->get_element_type(), ngraph::Shape{}, {0}); input_b->get_element_type(), ngraph::Shape{}, {0});
} }
const auto alpha = node.get_attribute_value<float>("alpha", 1); const auto alpha = node.get_attribute_value<float>("alpha", 1);
const auto beta = node.get_attribute_value<float>("beta", 1); const auto beta = node.get_attribute_value<float>("beta", 1);
const auto alpha_node = ngraph::op::Constant::create( const auto alpha_node = default_opset::Constant::create(
element::Type_t::f32, Shape{}, std::vector<float>{alpha}); element::Type_t::f32, Shape{}, std::vector<float>{alpha});
const auto beta_node = ngraph::op::Constant::create( const auto beta_node = default_opset::Constant::create(
element::Type_t::f32, Shape{}, std::vector<float>{beta}); element::Type_t::f32, Shape{}, std::vector<float>{beta});
const bool trans_a = node.get_attribute_value<int64_t>("transA", 0); const bool trans_a = node.get_attribute_value<int64_t>("transA", 0);
const bool trans_b = node.get_attribute_value<int64_t>("transB", 0); const bool trans_b = node.get_attribute_value<int64_t>("transB", 0);
auto matmul_node = auto matmul_node =
std::make_shared<ngraph::op::MatMul>(input_a, input_b, trans_a, trans_b); std::make_shared<default_opset::MatMul>(input_a, input_b, trans_a, trans_b);
auto alpha_times_product = auto alpha_times_product =
std::make_shared<ngraph::op::v1::Multiply>(alpha_node, matmul_node); std::make_shared<default_opset::Multiply>(alpha_node, matmul_node);
auto beta_times_input_c = auto beta_times_input_c =
std::make_shared<ngraph::op::v1::Multiply>(beta_node, input_c); std::make_shared<default_opset::Multiply>(beta_node, input_c);
return NodeVector{std::make_shared<ngraph::op::v1::Add>(alpha_times_product, return NodeVector{std::make_shared<default_opset::Add>(alpha_times_product,
beta_times_input_c)}; beta_times_input_c)};
} }
......
...@@ -19,6 +19,7 @@ ...@@ -19,6 +19,7 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/greater.hpp" #include "ngraph/op/greater.hpp"
#include "ngraph/op/util/broadcasting.hpp" #include "ngraph/op/util/broadcasting.hpp"
...@@ -33,7 +34,7 @@ namespace ngraph ...@@ -33,7 +34,7 @@ namespace ngraph
{ {
inline NodeVector greater(const Node& node) inline NodeVector greater(const Node& node)
{ {
return {std::make_shared<ngraph::op::v1::Greater>(node.get_ng_inputs().at(0), return {std::make_shared<default_opset::Greater>(node.get_ng_inputs().at(0),
node.get_ng_inputs().at(1))}; node.get_ng_inputs().at(1))};
} }
......
...@@ -16,11 +16,8 @@ ...@@ -16,11 +16,8 @@
#include <memory> #include <memory>
#include "default_opset.hpp"
#include "hard_sigmoid.hpp" #include "hard_sigmoid.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/fused/hard_sigmoid.hpp"
using namespace ngraph::op;
namespace ngraph namespace ngraph
{ {
...@@ -34,17 +31,17 @@ namespace ngraph ...@@ -34,17 +31,17 @@ namespace ngraph
{ {
const auto data = node.get_ng_inputs().at(0); const auto data = node.get_ng_inputs().at(0);
const auto alpha = Constant::create<double>( const auto alpha = default_opset::Constant::create<double>(
data->get_element_type(), data->get_element_type(),
Shape{}, Shape{},
std::vector<double>{node.get_attribute_value<double>("alpha", 0.2)}); std::vector<double>{node.get_attribute_value<double>("alpha", 0.2)});
const auto beta = Constant::create<double>( const auto beta = default_opset::Constant::create<double>(
data->get_element_type(), data->get_element_type(),
Shape{}, Shape{},
std::vector<double>{node.get_attribute_value<double>("beta", 0.5)}); std::vector<double>{node.get_attribute_value<double>("beta", 0.5)});
return {std::make_shared<ngraph::op::HardSigmoid>(data, alpha, beta)}; return {std::make_shared<default_opset::HardSigmoid>(data, alpha, beta)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -18,8 +18,7 @@ ...@@ -18,8 +18,7 @@
#include "exceptions.hpp" #include "exceptions.hpp"
#include "ngraph/builder/reshape.hpp" #include "ngraph/builder/reshape.hpp"
#include "ngraph/frontend/onnx_import/utils/common.hpp" #include "ngraph/frontend/onnx_import/utils/common.hpp"
#include "ngraph/op/argmax.hpp" #include "ngraph/opsets/opset0.hpp"
#include "ngraph/op/embedding_lookup.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -42,7 +41,7 @@ namespace ngraph ...@@ -42,7 +41,7 @@ namespace ngraph
const auto& coerced_shape = coerced_tensor->get_shape(); const auto& coerced_shape = coerced_tensor->get_shape();
const std::shared_ptr<ngraph::Node> argmax_2d = const std::shared_ptr<ngraph::Node> argmax_2d =
std::make_shared<ngraph::op::ArgMax>(coerced_tensor, 1, element::i64); std::make_shared<ngraph::opset0::ArgMax>(coerced_tensor, 1, element::i64);
std::shared_ptr<ngraph::Node> eye_matrix = std::shared_ptr<ngraph::Node> eye_matrix =
common::square_identity(coerced_shape.at(1), input->get_element_type()); common::square_identity(coerced_shape.at(1), input->get_element_type());
...@@ -50,7 +49,7 @@ namespace ngraph ...@@ -50,7 +49,7 @@ namespace ngraph
// the results are elements of the eye_matrix indexed by argmax_2d values // the results are elements of the eye_matrix indexed by argmax_2d values
// in other words: eye_matrix[argmax_2d] // in other words: eye_matrix[argmax_2d]
auto results = auto results =
std::make_shared<ngraph::op::EmbeddingLookup>(argmax_2d, eye_matrix); std::make_shared<ngraph::opset0::EmbeddingLookup>(argmax_2d, eye_matrix);
return {ngraph::builder::reshape(results, input_shape)}; return {ngraph::builder::reshape(results, input_shape)};
} }
......
...@@ -17,18 +17,18 @@ ...@@ -17,18 +17,18 @@
#include <cstddef> #include <cstddef>
#include <memory> #include <memory>
#include "default_opset.hpp"
#include "exceptions.hpp" #include "exceptions.hpp"
#include "instance_norm.hpp" #include "instance_norm.hpp"
#include "ngraph/axis_set.hpp" #include "ngraph/axis_set.hpp"
#include "ngraph/builder/reduce_ops.hpp" #include "ngraph/builder/reduce_ops.hpp"
#include "ngraph/op/add.hpp" #include "ngraph/op/add.hpp"
#include "ngraph/op/broadcast.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/divide.hpp" #include "ngraph/op/divide.hpp"
#include "ngraph/op/multiply.hpp" #include "ngraph/op/multiply.hpp"
#include "ngraph/op/sqrt.hpp" #include "ngraph/op/sqrt.hpp"
#include "ngraph/op/subtract.hpp" #include "ngraph/op/subtract.hpp"
#include "ngraph/op/util/broadcasting.hpp" #include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/opsets/opset0.hpp"
#include "utils/common.hpp" #include "utils/common.hpp"
namespace ngraph namespace ngraph
...@@ -63,7 +63,7 @@ namespace ngraph ...@@ -63,7 +63,7 @@ namespace ngraph
common::get_monotonic_range<std::size_t>(data->get_shape().size(), 2)}; common::get_monotonic_range<std::size_t>(data->get_shape().size(), 2)};
const std::shared_ptr<ngraph::Node> eps_node = const std::shared_ptr<ngraph::Node> eps_node =
std::make_shared<ngraph::op::Constant>(data->get_element_type(), std::make_shared<default_opset::Constant>(data->get_element_type(),
data->get_shape(), data->get_shape(),
std::vector<float>{epsilon}); std::vector<float>{epsilon});
...@@ -73,14 +73,14 @@ namespace ngraph ...@@ -73,14 +73,14 @@ namespace ngraph
.at(1); .at(1);
std::shared_ptr<ngraph::Node> mean = builder::mean(data, reduction_axes); std::shared_ptr<ngraph::Node> mean = builder::mean(data, reduction_axes);
mean = std::make_shared<ngraph::op::Broadcast>( mean = std::make_shared<ngraph::opset0::Broadcast>(
mean, data->get_shape(), reduction_axes); mean, data->get_shape(), reduction_axes);
std::shared_ptr<ngraph::Node> variance = std::shared_ptr<ngraph::Node> variance =
builder::variance(data, reduction_axes); builder::variance(data, reduction_axes);
variance = std::make_shared<ngraph::op::Broadcast>( variance = std::make_shared<ngraph::opset0::Broadcast>(
variance, data->get_shape(), reduction_axes); variance, data->get_shape(), reduction_axes);
const auto sqrt = std::make_shared<ngraph::op::Sqrt>(variance + eps_node); const auto sqrt = std::make_shared<default_opset::Sqrt>(variance + eps_node);
return {scale * (data - mean) / sqrt + bias}; return {scale * (data - mean) / sqrt + bias};
} }
......
...@@ -16,11 +16,12 @@ ...@@ -16,11 +16,12 @@
#include <memory> #include <memory>
#include "default_opset.hpp"
#include "exceptions.hpp" #include "exceptions.hpp"
#include "leaky_relu.hpp" #include "leaky_relu.hpp"
#include "ngraph/op/constant.hpp" #include "ngraph/op/constant.hpp"
#include "ngraph/op/maximum.hpp"
#include "ngraph/op/multiply.hpp" #include "ngraph/op/multiply.hpp"
#include "ngraph/opsets/opset0.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -39,10 +40,10 @@ namespace ngraph ...@@ -39,10 +40,10 @@ namespace ngraph
<< " alpha value should be in range (0,1)"; << " alpha value should be in range (0,1)";
std::shared_ptr<ngraph::Node> alpha_node = std::shared_ptr<ngraph::Node> alpha_node =
std::make_shared<ngraph::op::Constant>(data->get_element_type(), std::make_shared<default_opset::Constant>(data->get_element_type(),
data->get_shape(), data->get_shape(),
std::vector<double>{alpha}); std::vector<double>{alpha});
return {std::make_shared<ngraph::op::Maximum>(data * alpha_node, data)}; return {std::make_shared<ngraph::opset0::Maximum>(data * alpha_node, data)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -19,6 +19,7 @@ ...@@ -19,6 +19,7 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/less.hpp" #include "ngraph/op/less.hpp"
#include "ngraph/op/util/broadcasting.hpp" #include "ngraph/op/util/broadcasting.hpp"
...@@ -33,7 +34,7 @@ namespace ngraph ...@@ -33,7 +34,7 @@ namespace ngraph
{ {
inline NodeVector less(const Node& node) inline NodeVector less(const Node& node)
{ {
return {std::make_shared<ngraph::op::v1::Less>(node.get_ng_inputs().at(0), return {std::make_shared<default_opset::Less>(node.get_ng_inputs().at(0),
node.get_ng_inputs().at(1))}; node.get_ng_inputs().at(1))};
} }
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
#include <memory> #include <memory>
#include "log_softmax.hpp" #include "log_softmax.hpp"
#include "ngraph/op/fused/log_softmax.hpp" #include "ngraph/opsets/opset0.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -34,7 +34,7 @@ namespace ngraph ...@@ -34,7 +34,7 @@ namespace ngraph
auto data_shape = data->get_shape(); auto data_shape = data->get_shape();
int axis = node.get_attribute_value<int64_t>("axis", 1); int axis = node.get_attribute_value<int64_t>("axis", 1);
return {std::make_shared<ngraph::op::LogSoftmax>(data, axis)}; return {std::make_shared<ngraph::opset0::LogSoftmax>(data, axis)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -22,8 +22,8 @@ ...@@ -22,8 +22,8 @@
#include "lp_norm.hpp" #include "lp_norm.hpp"
#include "ngraph/axis_set.hpp" #include "ngraph/axis_set.hpp"
#include "ngraph/builder/norm.hpp" #include "ngraph/builder/norm.hpp"
#include "ngraph/op/broadcast.hpp"
#include "ngraph/op/divide.hpp" #include "ngraph/op/divide.hpp"
#include "ngraph/opsets/opset0.hpp"
#include "utils/common.hpp" #include "utils/common.hpp"
namespace ngraph namespace ngraph
...@@ -49,7 +49,7 @@ namespace ngraph ...@@ -49,7 +49,7 @@ namespace ngraph
const AxisSet reduction_axes{valid_axis}; const AxisSet reduction_axes{valid_axis};
std::shared_ptr<ngraph::Node> norm = ngraph::builder::lp_norm( std::shared_ptr<ngraph::Node> norm = ngraph::builder::lp_norm(
data, reduction_axes, static_cast<std::size_t>(p_norm)); data, reduction_axes, static_cast<std::size_t>(p_norm));
norm = std::make_shared<ngraph::op::Broadcast>( norm = std::make_shared<ngraph::opset0::Broadcast>(
norm, data->get_shape(), reduction_axes); norm, data->get_shape(), reduction_axes);
return {data / norm}; return {data / norm};
......
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
#include <cstdint> #include <cstdint>
#include <memory> #include <memory>
#include "default_opset.hpp"
#include "exceptions.hpp" #include "exceptions.hpp"
#include "lp_pool.hpp" #include "lp_pool.hpp"
#include "ngraph/axis_set.hpp" #include "ngraph/axis_set.hpp"
...@@ -25,6 +26,7 @@ ...@@ -25,6 +26,7 @@
#include "ngraph/builder/split.hpp" #include "ngraph/builder/split.hpp"
#include "ngraph/op/concat.hpp" #include "ngraph/op/concat.hpp"
#include "ngraph/op/reshape.hpp" #include "ngraph/op/reshape.hpp"
#include "ngraph/opsets/opset0.hpp"
#include "ngraph/util.hpp" #include "ngraph/util.hpp"
#include "utils/common.hpp" #include "utils/common.hpp"
...@@ -61,13 +63,13 @@ namespace ngraph ...@@ -61,13 +63,13 @@ namespace ngraph
// output shape is all ones except N channel // output shape is all ones except N channel
Shape output_shape(orig_shape.size(), 1); Shape output_shape(orig_shape.size(), 1);
output_shape.at(0) = orig_shape.at(0); output_shape.at(0) = orig_shape.at(0);
slice = std::make_shared<ngraph::op::Reshape>( slice = std::make_shared<ngraph::opset0::Reshape>(
slice, slice,
ngraph::get_default_order(slice->get_shape().size()), ngraph::get_default_order(slice->get_shape().size()),
output_shape); output_shape);
} }
return {std::make_shared<ngraph::op::Concat>(slices, channel_axis)}; return {std::make_shared<default_opset::Concat>(slices, channel_axis)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -16,8 +16,8 @@ ...@@ -16,8 +16,8 @@
#include <memory> #include <memory>
#include "default_opset.hpp"
#include "lrn.hpp" #include "lrn.hpp"
#include "ngraph/op/lrn.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -35,7 +35,7 @@ namespace ngraph ...@@ -35,7 +35,7 @@ namespace ngraph
double bias = node.get_attribute_value<double>("bias", 1); double bias = node.get_attribute_value<double>("bias", 1);
size_t size = node.get_attribute_value<size_t>("size"); size_t size = node.get_attribute_value<size_t>("size");
return {std::make_shared<ngraph::op::LRN>(data, alpha, beta, bias, size)}; return {std::make_shared<default_opset::LRN>(data, alpha, beta, bias, size)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -21,6 +21,7 @@ ...@@ -21,6 +21,7 @@
#include <string> #include <string>
#include <vector> #include <vector>
#include "default_opset.hpp"
#include "exceptions.hpp" #include "exceptions.hpp"
#include "lstm.hpp" #include "lstm.hpp"
#include "ngraph/builder/split.hpp" #include "ngraph/builder/split.hpp"
...@@ -29,6 +30,7 @@ ...@@ -29,6 +30,7 @@
#include "ngraph/op/constant.hpp" #include "ngraph/op/constant.hpp"
#include "ngraph/op/fused/lstm_sequence.hpp" #include "ngraph/op/fused/lstm_sequence.hpp"
#include "ngraph/op/get_output_element.hpp" #include "ngraph/op/get_output_element.hpp"
#include "ngraph/opsets/opset0.hpp"
#include "ngraph/shape.hpp" #include "ngraph/shape.hpp"
#include "ngraph/type/element_type.hpp" #include "ngraph/type/element_type.hpp"
...@@ -94,7 +96,7 @@ namespace ngraph ...@@ -94,7 +96,7 @@ namespace ngraph
} }
else else
{ {
m_map[LSTMInput::LSTM_INPUT_B] = ngraph::op::Constant::create( m_map[LSTMInput::LSTM_INPUT_B] = default_opset::Constant::create(
element::f32, element::f32,
Shape{num_directions, gates_count * hidden_size}, Shape{num_directions, gates_count * hidden_size},
std::vector<float>(num_directions * gates_count * hidden_size, std::vector<float>(num_directions * gates_count * hidden_size,
...@@ -107,11 +109,13 @@ namespace ngraph ...@@ -107,11 +109,13 @@ namespace ngraph
} }
else else
{ {
m_map[LSTMInput::LSTM_INPUT_SEQ_LENGTHS] = ngraph::op::Constant::create( m_map[LSTMInput::LSTM_INPUT_SEQ_LENGTHS] =
default_opset::Constant::create(
element::i32, element::i32,
Shape{batch_size}, Shape{batch_size},
std::vector<std::int32_t>( std::vector<std::int32_t>(
batch_size, m_map[LSTMInput::LSTM_INPUT_X]->get_shape().at(0))); batch_size,
m_map[LSTMInput::LSTM_INPUT_X]->get_shape().at(0)));
} }
// The initial value of the hidden. // The initial value of the hidden.
// Shape [num_directions, batch_size, hidden_size] // Shape [num_directions, batch_size, hidden_size]
...@@ -121,7 +125,7 @@ namespace ngraph ...@@ -121,7 +125,7 @@ namespace ngraph
} }
else else
{ {
m_map[LSTMInput::LSTM_INPUT_INIT_H] = ngraph::op::Constant::create( m_map[LSTMInput::LSTM_INPUT_INIT_H] = default_opset::Constant::create(
element::f32, element::f32,
Shape{num_directions, batch_size, hidden_size}, Shape{num_directions, batch_size, hidden_size},
std::vector<float>(num_directions * batch_size * hidden_size, 0.f)); std::vector<float>(num_directions * batch_size * hidden_size, 0.f));
...@@ -134,7 +138,7 @@ namespace ngraph ...@@ -134,7 +138,7 @@ namespace ngraph
} }
else else
{ {
m_map[LSTMInput::LSTM_INPUT_INIT_C] = ngraph::op::Constant::create( m_map[LSTMInput::LSTM_INPUT_INIT_C] = default_opset::Constant::create(
element::f32, element::f32,
Shape{num_directions, batch_size, hidden_size}, Shape{num_directions, batch_size, hidden_size},
std::vector<float>(num_directions * batch_size * hidden_size, 0.f)); std::vector<float>(num_directions * batch_size * hidden_size, 0.f));
...@@ -146,7 +150,7 @@ namespace ngraph ...@@ -146,7 +150,7 @@ namespace ngraph
} }
else else
{ {
m_map[LSTMInput::LSTM_INPUT_P] = ngraph::op::Constant::create( m_map[LSTMInput::LSTM_INPUT_P] = default_opset::Constant::create(
element::f32, element::f32,
Shape{num_directions, peepholes_count * hidden_size}, Shape{num_directions, peepholes_count * hidden_size},
std::vector<float>(num_directions * peepholes_count * hidden_size, std::vector<float>(num_directions * peepholes_count * hidden_size,
...@@ -188,15 +192,15 @@ namespace ngraph ...@@ -188,15 +192,15 @@ namespace ngraph
" is invalid"); " is invalid");
if (direction == "forward") if (direction == "forward")
{ {
m_direction = ngraph::op::LSTMSequence::direction::FORWARD; m_direction = default_opset::LSTMSequence::direction::FORWARD;
} }
else if (direction == "reverse") else if (direction == "reverse")
{ {
m_direction = ngraph::op::LSTMSequence::direction::REVERSE; m_direction = default_opset::LSTMSequence::direction::REVERSE;
} }
else // (direction == "bidirectional") else // (direction == "bidirectional")
{ {
m_direction = ngraph::op::LSTMSequence::direction::BIDIRECTIONAL; m_direction = default_opset::LSTMSequence::direction::BIDIRECTIONAL;
} }
} }
...@@ -218,7 +222,7 @@ namespace ngraph ...@@ -218,7 +222,7 @@ namespace ngraph
LSTMNgInputMap input_map{node}; LSTMNgInputMap input_map{node};
LSTMAttributes attributes{node}; LSTMAttributes attributes{node};
auto lstmSequence = std::make_shared<ngraph::op::LSTMSequence>( auto lstmSequence = std::make_shared<default_opset::LSTMSequence>(
input_map.at(LSTMInput::LSTM_INPUT_X), input_map.at(LSTMInput::LSTM_INPUT_X),
input_map.at(LSTMInput::LSTM_INPUT_INIT_H), input_map.at(LSTMInput::LSTM_INPUT_INIT_H),
input_map.at(LSTMInput::LSTM_INPUT_INIT_C), input_map.at(LSTMInput::LSTM_INPUT_INIT_C),
...@@ -235,9 +239,9 @@ namespace ngraph ...@@ -235,9 +239,9 @@ namespace ngraph
attributes.m_activations, attributes.m_activations,
attributes.m_clip_threshold, attributes.m_clip_threshold,
attributes.m_input_forget); attributes.m_input_forget);
return {std::make_shared<ngraph::op::GetOutputElement>(lstmSequence, 0), return {std::make_shared<ngraph::opset0::GetOutputElement>(lstmSequence, 0),
std::make_shared<ngraph::op::GetOutputElement>(lstmSequence, 1), std::make_shared<ngraph::opset0::GetOutputElement>(lstmSequence, 1),
std::make_shared<ngraph::op::GetOutputElement>(lstmSequence, 2)}; std::make_shared<ngraph::opset0::GetOutputElement>(lstmSequence, 2)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/fused/matmul.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -32,7 +32,7 @@ namespace ngraph ...@@ -32,7 +32,7 @@ namespace ngraph
{ {
NodeVector matmul(const Node& node) NodeVector matmul(const Node& node)
{ {
return {std::make_shared<ngraph::op::MatMul>(node.get_ng_inputs().at(0), return {std::make_shared<default_opset::MatMul>(node.get_ng_inputs().at(0),
node.get_ng_inputs().at(1))}; node.get_ng_inputs().at(1))};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -17,8 +17,9 @@ ...@@ -17,8 +17,9 @@
#pragma once #pragma once
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/maximum.hpp" #include "ngraph/opsets/opset0.hpp"
#include "utils/variadic.hpp" #include "utils/variadic.hpp"
namespace ngraph namespace ngraph
...@@ -31,7 +32,7 @@ namespace ngraph ...@@ -31,7 +32,7 @@ namespace ngraph
{ {
inline NodeVector max(const Node& node) inline NodeVector max(const Node& node)
{ {
return variadic::make_ng_variadic_op<ngraph::op::Maximum>(node); return variadic::make_ng_variadic_op<ngraph::opset0::Maximum>(node);
} }
} // namespace set_1 } // namespace set_1
...@@ -40,7 +41,7 @@ namespace ngraph ...@@ -40,7 +41,7 @@ namespace ngraph
{ {
inline NodeVector max(const Node& node) inline NodeVector max(const Node& node)
{ {
return variadic::make_ng_variadic_op<ngraph::op::v1::Maximum>(node); return variadic::make_ng_variadic_op<default_opset::Maximum>(node);
} }
} // namespace set_8 } // namespace set_8
......
...@@ -15,9 +15,11 @@ ...@@ -15,9 +15,11 @@
//***************************************************************************** //*****************************************************************************
#include "mean.hpp" #include "mean.hpp"
#include "default_opset.hpp"
#include "ngraph/op/add.hpp" #include "ngraph/op/add.hpp"
#include "ngraph/op/constant.hpp" #include "ngraph/op/constant.hpp"
#include "ngraph/op/divide.hpp" #include "ngraph/op/divide.hpp"
#include "ngraph/opsets/opset0.hpp"
#include "utils/variadic.hpp" #include "utils/variadic.hpp"
namespace ngraph namespace ngraph
...@@ -30,12 +32,12 @@ namespace ngraph ...@@ -30,12 +32,12 @@ namespace ngraph
{ {
NodeVector mean(const Node& node) NodeVector mean(const Node& node)
{ {
auto sum = variadic::make_ng_variadic_op<ngraph::op::Add>(node).front(); auto sum = variadic::make_ng_variadic_op<ngraph::opset0::Add>(node).front();
auto shape = sum->get_shape(); auto shape = sum->get_shape();
// Create a Constant representing the number of inputs with the same shape as // Create a Constant representing the number of inputs with the same shape as
// sum // sum
auto count = ngraph::op::Constant::create( auto count = default_opset::Constant::create(
sum->get_element_type(), sum->get_element_type(),
shape, shape,
std::vector<int>(shape_size(shape), node.get_ng_inputs().size())); std::vector<int>(shape_size(shape), node.get_ng_inputs().size()));
...@@ -49,12 +51,12 @@ namespace ngraph ...@@ -49,12 +51,12 @@ namespace ngraph
{ {
NodeVector mean(const Node& node) NodeVector mean(const Node& node)
{ {
auto sum = variadic::make_ng_variadic_op<ngraph::op::v1::Add>(node).front(); auto sum = variadic::make_ng_variadic_op<default_opset::Add>(node).front();
auto shape = sum->get_shape(); auto shape = sum->get_shape();
// Create a Constant representing the number of inputs with the same shape as // Create a Constant representing the number of inputs with the same shape as
// sum // sum
auto count = ngraph::op::Constant::create( auto count = default_opset::Constant::create(
sum->get_element_type(), sum->get_element_type(),
shape, shape,
std::vector<int>(shape_size(shape), node.get_ng_inputs().size())); std::vector<int>(shape_size(shape), node.get_ng_inputs().size()));
......
...@@ -19,6 +19,7 @@ ...@@ -19,6 +19,7 @@
#include "mean_variance_normalization.hpp" #include "mean_variance_normalization.hpp"
#include "ngraph/axis_set.hpp" #include "ngraph/axis_set.hpp"
#include "ngraph/op/fused/mvn.hpp" #include "ngraph/op/fused/mvn.hpp"
#include "ngraph/opsets/opset0.hpp"
#include "utils/common.hpp" #include "utils/common.hpp"
namespace ngraph namespace ngraph
...@@ -37,7 +38,7 @@ namespace ngraph ...@@ -37,7 +38,7 @@ namespace ngraph
bool normalize_variance = bool normalize_variance =
node.get_attribute_value<std::int64_t>("normalize_variance", 1); node.get_attribute_value<std::int64_t>("normalize_variance", 1);
return {std::make_shared<ngraph::op::MVN>( return {std::make_shared<ngraph::opset0::MVN>(
data, across_channels, normalize_variance)}; data, across_channels, normalize_variance)};
} }
...@@ -52,7 +53,7 @@ namespace ngraph ...@@ -52,7 +53,7 @@ namespace ngraph
std::vector<std::size_t> valid_axes = std::vector<std::size_t> valid_axes =
common::validate_axes(node, axes, data->get_shape().size()); common::validate_axes(node, axes, data->get_shape().size());
return {std::make_shared<ngraph::op::MVN>(data, AxisSet(valid_axes))}; return {std::make_shared<ngraph::opset0::MVN>(data, AxisSet(valid_axes))};
} }
} // namespace set_9 } // namespace set_9
......
...@@ -17,8 +17,9 @@ ...@@ -17,8 +17,9 @@
#pragma once #pragma once
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/minimum.hpp" #include "ngraph/opsets/opset0.hpp"
#include "utils/variadic.hpp" #include "utils/variadic.hpp"
namespace ngraph namespace ngraph
...@@ -31,7 +32,7 @@ namespace ngraph ...@@ -31,7 +32,7 @@ namespace ngraph
{ {
inline NodeVector min(const Node& node) inline NodeVector min(const Node& node)
{ {
return variadic::make_ng_variadic_op<ngraph::op::Minimum>(node); return variadic::make_ng_variadic_op<ngraph::opset0::Minimum>(node);
} }
} // namespace set_1 } // namespace set_1
...@@ -40,7 +41,7 @@ namespace ngraph ...@@ -40,7 +41,7 @@ namespace ngraph
{ {
inline NodeVector min(const Node& node) inline NodeVector min(const Node& node)
{ {
return variadic::make_ng_variadic_op<ngraph::op::v1::Minimum>(node); return variadic::make_ng_variadic_op<default_opset::Minimum>(node);
} }
} // namespace set_8 } // namespace set_8
......
...@@ -16,6 +16,7 @@ ...@@ -16,6 +16,7 @@
#include <memory> #include <memory>
#include "default_opset.hpp"
#include "exceptions.hpp" #include "exceptions.hpp"
#include "mod.hpp" #include "mod.hpp"
#include "ngraph/op/abs.hpp" #include "ngraph/op/abs.hpp"
...@@ -39,7 +40,7 @@ namespace ngraph ...@@ -39,7 +40,7 @@ namespace ngraph
ASSERT_IS_SUPPORTED(node, fmod == 1) ASSERT_IS_SUPPORTED(node, fmod == 1)
<< "Only 'fmod=1' mode is supported for mod operator."; << "Only 'fmod=1' mode is supported for mod operator.";
return {std::make_shared<ngraph::op::v1::Mod>(dividend, divisor)}; return {std::make_shared<default_opset::Mod>(dividend, divisor)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -19,10 +19,12 @@ ...@@ -19,10 +19,12 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/broadcast.hpp" #include "ngraph/op/broadcast.hpp"
#include "ngraph/op/multiply.hpp" #include "ngraph/op/multiply.hpp"
#include "ngraph/op/util/broadcasting.hpp" #include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/opsets/opset0.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -41,8 +43,8 @@ namespace ngraph ...@@ -41,8 +43,8 @@ namespace ngraph
NodeVector ng_inputs{ngraph::op::legacy_style_broadcast_for_binary_operation( NodeVector ng_inputs{ngraph::op::legacy_style_broadcast_for_binary_operation(
node.get_ng_inputs().at(0), node.get_ng_inputs().at(1), axis)}; node.get_ng_inputs().at(0), node.get_ng_inputs().at(1), axis)};
return { return {std::make_shared<ngraph::opset0::Multiply>(ng_inputs.at(0),
std::make_shared<ngraph::op::Multiply>(ng_inputs.at(0), ng_inputs.at(1))}; ng_inputs.at(1))};
} }
} // namespace set_1 } // namespace set_1
...@@ -51,7 +53,7 @@ namespace ngraph ...@@ -51,7 +53,7 @@ namespace ngraph
{ {
inline NodeVector mul(const Node& node) inline NodeVector mul(const Node& node)
{ {
return {std::make_shared<ngraph::op::v1::Multiply>(node.get_ng_inputs().at(0), return {std::make_shared<default_opset::Multiply>(node.get_ng_inputs().at(0),
node.get_ng_inputs().at(1))}; node.get_ng_inputs().at(1))};
} }
......
...@@ -16,8 +16,8 @@ ...@@ -16,8 +16,8 @@
#include <memory> #include <memory>
#include "ngraph/frontend/onnx_import/exceptions.hpp" #include "default_opset.hpp"
#include "ngraph/op/constant.hpp" #include "exceptions.hpp"
#include "ngraph/op/non_max_suppression.hpp" #include "ngraph/op/non_max_suppression.hpp"
#include "ngraph/op/util/attr_types.hpp" #include "ngraph/op/util/attr_types.hpp"
#include "non_max_suppression.hpp" #include "non_max_suppression.hpp"
...@@ -47,7 +47,7 @@ namespace ngraph ...@@ -47,7 +47,7 @@ namespace ngraph
else else
{ {
max_output_boxes_per_class = max_output_boxes_per_class =
ngraph::op::Constant::create(element::i64, Shape{}, {0}); default_opset::Constant::create(element::i64, Shape{}, {0});
} }
std::shared_ptr<ngraph::Node> iou_threshold; std::shared_ptr<ngraph::Node> iou_threshold;
...@@ -57,7 +57,8 @@ namespace ngraph ...@@ -57,7 +57,8 @@ namespace ngraph
} }
else else
{ {
iou_threshold = ngraph::op::Constant::create(element::f32, Shape{}, {.0f}); iou_threshold =
default_opset::Constant::create(element::f32, Shape{}, {.0f});
} }
std::shared_ptr<ngraph::Node> score_threshold; std::shared_ptr<ngraph::Node> score_threshold;
...@@ -68,7 +69,7 @@ namespace ngraph ...@@ -68,7 +69,7 @@ namespace ngraph
else else
{ {
score_threshold = score_threshold =
ngraph::op::Constant::create(element::f32, Shape{}, {.0f}); default_opset::Constant::create(element::f32, Shape{}, {.0f});
} }
const auto center_point_box = const auto center_point_box =
...@@ -79,10 +80,10 @@ namespace ngraph ...@@ -79,10 +80,10 @@ namespace ngraph
const auto box_encoding = const auto box_encoding =
center_point_box == 0 center_point_box == 0
? ngraph::op::v1::NonMaxSuppression::BoxEncodingType::CORNER ? default_opset::NonMaxSuppression::BoxEncodingType::CORNER
: ngraph::op::v1::NonMaxSuppression::BoxEncodingType::CENTER; : default_opset::NonMaxSuppression::BoxEncodingType::CENTER;
return {std::make_shared<ngraph::op::v1::NonMaxSuppression>( return {std::make_shared<default_opset::NonMaxSuppression>(
boxes, boxes,
scores, scores,
max_output_boxes_per_class, max_output_boxes_per_class,
......
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/not.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -33,7 +33,7 @@ namespace ngraph ...@@ -33,7 +33,7 @@ namespace ngraph
inline NodeVector logical_not(const Node& node) inline NodeVector logical_not(const Node& node)
{ {
return { return {
std::make_shared<ngraph::op::v1::LogicalNot>(node.get_ng_inputs().at(0))}; std::make_shared<default_opset::LogicalNot>(node.get_ng_inputs().at(0))};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -17,9 +17,8 @@ ...@@ -17,9 +17,8 @@
#include <cstdint> #include <cstdint>
#include <memory> #include <memory>
#include "ngraph/op/convert.hpp" #include "default_opset.hpp"
#include "ngraph/op/one_hot.hpp" #include "ngraph/opsets/opset0.hpp"
#include "ngraph/op/slice.hpp"
#include "onehot.hpp" #include "onehot.hpp"
#include "utils/common.hpp" #include "utils/common.hpp"
#include "utils/reshape.hpp" #include "utils/reshape.hpp"
...@@ -36,18 +35,20 @@ namespace ngraph ...@@ -36,18 +35,20 @@ namespace ngraph
{ {
NodeVector inputs{node.get_ng_inputs()}; NodeVector inputs{node.get_ng_inputs()};
auto indices = auto indices =
std::make_shared<ngraph::op::Convert>(inputs.at(0), element::i64); std::make_shared<default_opset::Convert>(inputs.at(0), element::i64);
auto depth = reshape::interpret_as_scalar(inputs.at(1)); auto depth = reshape::interpret_as_scalar(inputs.at(1));
auto values = inputs.at(2); auto values = inputs.at(2);
std::shared_ptr<ngraph::Node> off_value = reshape::interpret_as_scalar( std::shared_ptr<ngraph::Node> off_value =
std::make_shared<ngraph::op::Slice>(values, Coordinate{0}, Coordinate{1})); reshape::interpret_as_scalar(std::make_shared<ngraph::opset0::Slice>(
std::shared_ptr<ngraph::Node> on_value = reshape::interpret_as_scalar( values, Coordinate{0}, Coordinate{1}));
std::make_shared<ngraph::op::Slice>(values, Coordinate{1}, Coordinate{2})); std::shared_ptr<ngraph::Node> on_value =
reshape::interpret_as_scalar(std::make_shared<ngraph::opset0::Slice>(
values, Coordinate{1}, Coordinate{2}));
auto axis = node.get_attribute_value<std::int64_t>("axis", -1); auto axis = node.get_attribute_value<std::int64_t>("axis", -1);
return {std::make_shared<ngraph::op::v1::OneHot>( return {std::make_shared<default_opset::OneHot>(
indices, depth, on_value, off_value, axis)}; indices, depth, on_value, off_value, axis)};
} }
......
...@@ -19,9 +19,8 @@ ...@@ -19,9 +19,8 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/or.hpp"
#include "ngraph/op/util/broadcasting.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -33,8 +32,8 @@ namespace ngraph ...@@ -33,8 +32,8 @@ namespace ngraph
{ {
inline NodeVector logical_or(const Node& node) inline NodeVector logical_or(const Node& node)
{ {
return {std::make_shared<ngraph::op::v1::LogicalOr>( return {std::make_shared<default_opset::LogicalOr>(node.get_ng_inputs().at(0),
node.get_ng_inputs().at(0), node.get_ng_inputs().at(1))}; node.get_ng_inputs().at(1))};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -16,6 +16,7 @@ ...@@ -16,6 +16,7 @@
#include <memory> #include <memory>
#include "default_opset.hpp"
#include "exceptions.hpp" #include "exceptions.hpp"
#include "ngraph/coordinate_diff.hpp" #include "ngraph/coordinate_diff.hpp"
#include "ngraph/op/constant.hpp" #include "ngraph/op/constant.hpp"
...@@ -60,13 +61,13 @@ namespace ngraph ...@@ -60,13 +61,13 @@ namespace ngraph
ngraph::CoordinateDiff padding_below = paddings.first; ngraph::CoordinateDiff padding_below = paddings.first;
ngraph::CoordinateDiff padding_above = paddings.second; ngraph::CoordinateDiff padding_above = paddings.second;
return {std::make_shared<ngraph::op::v1::Pad>( return {std::make_shared<default_opset::Pad>(
data, data,
std::make_shared<ngraph::op::Constant>( std::make_shared<default_opset::Constant>(
element::i64, ngraph::Shape{padding_below.size()}, padding_below), element::i64, ngraph::Shape{padding_below.size()}, padding_below),
std::make_shared<ngraph::op::Constant>( std::make_shared<default_opset::Constant>(
element::i64, ngraph::Shape{padding_above.size()}, padding_above), element::i64, ngraph::Shape{padding_above.size()}, padding_above),
std::make_shared<ngraph::op::Constant>( std::make_shared<default_opset::Constant>(
data->get_element_type(), ngraph::Shape{}, std::vector<double>{value}), data->get_element_type(), ngraph::Shape{}, std::vector<double>{value}),
pad_mode)}; pad_mode)};
} }
......
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/power.hpp"
#include "ngraph/op/util/broadcasting.hpp" #include "ngraph/op/util/broadcasting.hpp"
namespace ngraph namespace ngraph
...@@ -33,7 +33,7 @@ namespace ngraph ...@@ -33,7 +33,7 @@ namespace ngraph
{ {
inline NodeVector pow(const Node& node) inline NodeVector pow(const Node& node)
{ {
return {std::make_shared<ngraph::op::v1::Power>(node.get_ng_inputs().at(0), return {std::make_shared<default_opset::Power>(node.get_ng_inputs().at(0),
node.get_ng_inputs().at(1))}; node.get_ng_inputs().at(1))};
} }
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
#include <memory> #include <memory>
#include "ngraph/op/fused/prelu.hpp" #include "default_opset.hpp"
#include "prelu.hpp" #include "prelu.hpp"
namespace ngraph namespace ngraph
...@@ -32,7 +32,7 @@ namespace ngraph ...@@ -32,7 +32,7 @@ namespace ngraph
NodeVector ng_inputs{node.get_ng_inputs()}; NodeVector ng_inputs{node.get_ng_inputs()};
const auto& data = ng_inputs.at(0); const auto& data = ng_inputs.at(0);
const auto& slope = ng_inputs.at(1); const auto& slope = ng_inputs.at(1);
return {std::make_shared<ngraph::op::PRelu>(data, slope)}; return {std::make_shared<default_opset::PRelu>(data, slope)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -18,14 +18,13 @@ ...@@ -18,14 +18,13 @@
#include <memory> #include <memory>
#include <vector> #include <vector>
#include "default_opset.hpp"
#include "exceptions.hpp" #include "exceptions.hpp"
#include "ngraph/builder/quantization/quantized_linear_convolution.hpp" #include "ngraph/builder/quantization/quantized_linear_convolution.hpp"
#include "ngraph/coordinate_diff.hpp" #include "ngraph/coordinate_diff.hpp"
#include "ngraph/frontend/onnx_import/utils/convpool.hpp" #include "ngraph/frontend/onnx_import/utils/convpool.hpp"
#include "ngraph/op/concat.hpp"
#include "ngraph/op/quantized_convolution.hpp"
#include "ngraph/op/slice.hpp"
#include "ngraph/op/util/attr_types.hpp" #include "ngraph/op/util/attr_types.hpp"
#include "ngraph/opsets/opset0.hpp"
#include "ngraph/strides.hpp" #include "ngraph/strides.hpp"
#include "quant_conv.hpp" #include "quant_conv.hpp"
...@@ -102,12 +101,12 @@ namespace ngraph ...@@ -102,12 +101,12 @@ namespace ngraph
// slice data // slice data
data_lower_bounds[1] = group * data_group_size; data_lower_bounds[1] = group * data_group_size;
data_upper_bounds[1] = (group + 1) * data_group_size; data_upper_bounds[1] = (group + 1) * data_group_size;
auto sliced_data = std::make_shared<ngraph::op::Slice>( auto sliced_data = std::make_shared<ngraph::opset0::Slice>(
data, data_lower_bounds, data_upper_bounds); data, data_lower_bounds, data_upper_bounds);
// slice filters // slice filters
filters_lower_bounds[0] = group * filters_group_size; filters_lower_bounds[0] = group * filters_group_size;
filters_upper_bounds[0] = (group + 1) * filters_group_size; filters_upper_bounds[0] = (group + 1) * filters_group_size;
auto sliced_filters = std::make_shared<ngraph::op::Slice>( auto sliced_filters = std::make_shared<ngraph::opset0::Slice>(
filters, filters_lower_bounds, filters_upper_bounds); filters, filters_lower_bounds, filters_upper_bounds);
if (bias) if (bias)
...@@ -119,7 +118,7 @@ namespace ngraph ...@@ -119,7 +118,7 @@ namespace ngraph
else else
{ {
convolution_nodes.push_back( convolution_nodes.push_back(
std::make_shared<ngraph::op::QuantizedConvolution>( std::make_shared<ngraph::opset0::QuantizedConvolution>(
sliced_data, sliced_data,
sliced_filters, sliced_filters,
strides, strides,
...@@ -140,7 +139,7 @@ namespace ngraph ...@@ -140,7 +139,7 @@ namespace ngraph
} }
} }
std::size_t concatenation_axis = 1; std::size_t concatenation_axis = 1;
return std::make_shared<ngraph::op::Concat>(convolution_nodes, return std::make_shared<default_opset::Concat>(convolution_nodes,
concatenation_axis); concatenation_axis);
} }
else else
...@@ -162,7 +161,7 @@ namespace ngraph ...@@ -162,7 +161,7 @@ namespace ngraph
} }
else else
{ {
return std::make_shared<ngraph::op::QuantizedConvolution>( return std::make_shared<ngraph::opset0::QuantizedConvolution>(
data, data,
filters, filters,
strides, strides,
......
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
#include <memory> #include <memory>
#include "ngraph/axis_set.hpp" #include "ngraph/axis_set.hpp"
#include "ngraph/op/quantize.hpp" #include "ngraph/opsets/opset0.hpp"
#include "ngraph/shape.hpp" #include "ngraph/shape.hpp"
#include "quantize_linear.hpp" #include "quantize_linear.hpp"
...@@ -61,13 +61,13 @@ namespace ngraph ...@@ -61,13 +61,13 @@ namespace ngraph
Shape y_scale_shape = y_scale->get_shape(); Shape y_scale_shape = y_scale->get_shape();
Shape y_zero_point_shape = y_zero_point->get_shape(); Shape y_zero_point_shape = y_zero_point->get_shape();
return {std::make_shared<ngraph::op::Quantize>( return {std::make_shared<ngraph::opset0::Quantize>(
x, x,
y_scale, y_scale,
y_zero_point, y_zero_point,
y_zero_point->get_element_type(), y_zero_point->get_element_type(),
axes, axes,
ngraph::op::Quantize::RoundMode::ROUND_NEAREST_TOWARD_EVEN)}; ngraph::opset0::Quantize::RoundMode::ROUND_NEAREST_TOWARD_EVEN)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -17,8 +17,8 @@ ...@@ -17,8 +17,8 @@
#include <memory> #include <memory>
#include <vector> #include <vector>
#include "ngraph/op/fused/reciprocal.hpp"
#include "ngraph/op/util/broadcasting.hpp" #include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/opsets/opset0.hpp"
#include "ngraph/shape.hpp" #include "ngraph/shape.hpp"
#include "reciprocal.hpp" #include "reciprocal.hpp"
...@@ -34,7 +34,7 @@ namespace ngraph ...@@ -34,7 +34,7 @@ namespace ngraph
{ {
auto data = node.get_ng_inputs().at(0); auto data = node.get_ng_inputs().at(0);
return {std::make_shared<ngraph::op::Reciprocal>(data)}; return {std::make_shared<ngraph::opset0::Reciprocal>(data)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -18,8 +18,7 @@ ...@@ -18,8 +18,7 @@
#include <iterator> // std::begin, std::end #include <iterator> // std::begin, std::end
#include <numeric> // std::accumulate #include <numeric> // std::accumulate
#include "ngraph/op/constant.hpp" #include "default_opset.hpp"
#include "ngraph/op/divide.hpp"
#include "ngraph/shape.hpp" #include "ngraph/shape.hpp"
#include "reduce.hpp" #include "reduce.hpp"
...@@ -46,17 +45,17 @@ namespace ngraph ...@@ -46,17 +45,17 @@ namespace ngraph
auto sum_node = std::shared_ptr<ngraph::Node>{reduction::make_ng_reduction_op( auto sum_node = std::shared_ptr<ngraph::Node>{reduction::make_ng_reduction_op(
node, node,
node.get_ng_inputs().at(0), node.get_ng_inputs().at(0),
std::make_shared<ngraph::op::Sum, std::make_shared<ngraph::opset0::Sum,
const std::shared_ptr<ngraph::Node>&, const std::shared_ptr<ngraph::Node>&,
const ngraph::AxisSet&>)}; const ngraph::AxisSet&>)};
auto const_node = ngraph::op::Constant::create( auto const_node = default_opset::Constant::create(
sum_node->get_element_type(), sum_node->get_element_type(),
sum_node->get_shape(), sum_node->get_shape(),
std::vector<std::size_t>(shape_size(sum_node->get_shape()), std::vector<std::size_t>(shape_size(sum_node->get_shape()),
elem_count_product)); elem_count_product));
return {std::make_shared<ngraph::op::Divide>(sum_node, const_node)}; return {std::make_shared<ngraph::opset0::Divide>(sum_node, const_node)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -20,6 +20,7 @@ ...@@ -20,6 +20,7 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/builder/norm.hpp" #include "ngraph/builder/norm.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/abs.hpp" #include "ngraph/op/abs.hpp"
...@@ -32,6 +33,7 @@ ...@@ -32,6 +33,7 @@
#include "ngraph/op/reduce_sum.hpp" #include "ngraph/op/reduce_sum.hpp"
#include "ngraph/op/sum.hpp" #include "ngraph/op/sum.hpp"
#include "ngraph/op/util/broadcasting.hpp" #include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/opsets/opset0.hpp"
#include "utils/reduction.hpp" #include "utils/reduction.hpp"
namespace ngraph namespace ngraph
...@@ -59,10 +61,10 @@ namespace ngraph ...@@ -59,10 +61,10 @@ namespace ngraph
std::shared_ptr<ngraph::Node> sum_node{reduction::make_ng_reduction_op( std::shared_ptr<ngraph::Node> sum_node{reduction::make_ng_reduction_op(
node, node,
node.get_ng_inputs().at(0), node.get_ng_inputs().at(0),
std::make_shared<ngraph::op::Sum, std::make_shared<ngraph::opset0::Sum,
const std::shared_ptr<ngraph::Node>&, const std::shared_ptr<ngraph::Node>&,
const ngraph::AxisSet&>)}; const ngraph::AxisSet&>)};
return {std::make_shared<ngraph::op::Log>(sum_node)}; return {std::make_shared<default_opset::Log>(sum_node)};
} }
/// \brief Compute the log sum exponent of the input tensor's elements along /// \brief Compute the log sum exponent of the input tensor's elements along
...@@ -79,14 +81,15 @@ namespace ngraph ...@@ -79,14 +81,15 @@ namespace ngraph
/// ///
inline NodeVector reduce_log_sum_exp(const Node& node) inline NodeVector reduce_log_sum_exp(const Node& node)
{ {
auto exp_node = std::make_shared<ngraph::op::Exp>(node.get_ng_inputs().at(0)); auto exp_node =
std::make_shared<default_opset::Exp>(node.get_ng_inputs().at(0));
std::shared_ptr<ngraph::Node> sum_node{reduction::make_ng_reduction_op( std::shared_ptr<ngraph::Node> sum_node{reduction::make_ng_reduction_op(
node, node,
exp_node, exp_node,
std::make_shared<ngraph::op::Sum, std::make_shared<ngraph::opset0::Sum,
const std::shared_ptr<ngraph::Node>&, const std::shared_ptr<ngraph::Node>&,
const ngraph::AxisSet&>)}; const ngraph::AxisSet&>)};
return {std::make_shared<ngraph::op::Log>(sum_node)}; return {std::make_shared<default_opset::Log>(sum_node)};
} }
/// \brief Compute the L1 norm of the input tensor's element along the provided /// \brief Compute the L1 norm of the input tensor's element along the provided
...@@ -152,7 +155,7 @@ namespace ngraph ...@@ -152,7 +155,7 @@ namespace ngraph
return {reduction::make_ng_reduction_op( return {reduction::make_ng_reduction_op(
node, node,
node.get_ng_inputs().at(0), node.get_ng_inputs().at(0),
std::make_shared<ngraph::op::Max, std::make_shared<ngraph::opset0::Max,
const std::shared_ptr<ngraph::Node>&, const std::shared_ptr<ngraph::Node>&,
const ngraph::AxisSet&>)}; const ngraph::AxisSet&>)};
} }
...@@ -188,7 +191,7 @@ namespace ngraph ...@@ -188,7 +191,7 @@ namespace ngraph
return {reduction::make_ng_reduction_op( return {reduction::make_ng_reduction_op(
node, node,
node.get_ng_inputs().at(0), node.get_ng_inputs().at(0),
std::make_shared<ngraph::op::Min, std::make_shared<ngraph::opset0::Min,
const std::shared_ptr<ngraph::Node>&, const std::shared_ptr<ngraph::Node>&,
const ngraph::AxisSet&>)}; const ngraph::AxisSet&>)};
} }
...@@ -210,7 +213,7 @@ namespace ngraph ...@@ -210,7 +213,7 @@ namespace ngraph
return {reduction::make_ng_reduction_op( return {reduction::make_ng_reduction_op(
node, node,
node.get_ng_inputs().at(0), node.get_ng_inputs().at(0),
std::make_shared<ngraph::op::v1::ReduceProd, std::make_shared<default_opset::ReduceProd,
const std::shared_ptr<ngraph::Node>&, const std::shared_ptr<ngraph::Node>&,
const std::shared_ptr<ngraph::Node>&, const std::shared_ptr<ngraph::Node>&,
bool>)}; bool>)};
...@@ -233,7 +236,7 @@ namespace ngraph ...@@ -233,7 +236,7 @@ namespace ngraph
return {reduction::make_ng_reduction_op( return {reduction::make_ng_reduction_op(
node, node,
node.get_ng_inputs().at(0), node.get_ng_inputs().at(0),
std::make_shared<ngraph::op::v1::ReduceSum, std::make_shared<default_opset::ReduceSum,
const std::shared_ptr<ngraph::Node>&, const std::shared_ptr<ngraph::Node>&,
const std::shared_ptr<ngraph::Node>&, const std::shared_ptr<ngraph::Node>&,
bool>)}; bool>)};
...@@ -258,7 +261,7 @@ namespace ngraph ...@@ -258,7 +261,7 @@ namespace ngraph
return {reduction::make_ng_reduction_op( return {reduction::make_ng_reduction_op(
node, node,
square_node, square_node,
std::make_shared<ngraph::op::Sum, std::make_shared<ngraph::opset0::Sum,
const std::shared_ptr<ngraph::Node>&, const std::shared_ptr<ngraph::Node>&,
const ngraph::AxisSet&>)}; const ngraph::AxisSet&>)};
} }
......
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/relu.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -33,7 +33,7 @@ namespace ngraph ...@@ -33,7 +33,7 @@ namespace ngraph
inline NodeVector relu(const Node& node) inline NodeVector relu(const Node& node)
{ {
NodeVector ng_inputs{node.get_ng_inputs()}; NodeVector ng_inputs{node.get_ng_inputs()};
return {std::make_shared<ngraph::op::Relu>(ng_inputs.at(0))}; return {std::make_shared<default_opset::Relu>(ng_inputs.at(0))};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -18,10 +18,9 @@ ...@@ -18,10 +18,9 @@
#include <memory> #include <memory>
#include <vector> #include <vector>
#include "default_opset.hpp"
#include "exceptions.hpp" #include "exceptions.hpp"
#include "ngraph/axis_vector.hpp" #include "ngraph/axis_vector.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/reshape.hpp"
#include "ngraph/shape.hpp" #include "ngraph/shape.hpp"
#include "reshape.hpp" #include "reshape.hpp"
#include "utils/reshape.hpp" #include "utils/reshape.hpp"
...@@ -54,11 +53,11 @@ namespace ngraph ...@@ -54,11 +53,11 @@ namespace ngraph
const auto output_shape = const auto output_shape =
node.get_attribute_value<std::vector<int64_t>>("shape", {}); node.get_attribute_value<std::vector<int64_t>>("shape", {});
pattern = ngraph::op::Constant::create( pattern = default_opset::Constant::create(
element::i64, Shape{output_shape.size()}, output_shape); element::i64, Shape{output_shape.size()}, output_shape);
} }
return {std::make_shared<ngraph::op::v1::Reshape>(data, pattern, true)}; return {std::make_shared<default_opset::Reshape>(data, pattern, true)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -17,9 +17,8 @@ ...@@ -17,9 +17,8 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/convert.hpp"
#include "ngraph/op/reverse_sequence.hpp"
#include "ngraph/type/element_type.hpp" #include "ngraph/type/element_type.hpp"
#include "reverse_sequence.hpp" #include "reverse_sequence.hpp"
#include "utils/common.hpp" #include "utils/common.hpp"
...@@ -38,7 +37,7 @@ namespace ngraph ...@@ -38,7 +37,7 @@ namespace ngraph
const auto sequence_lengths = node.get_ng_inputs().at(1); const auto sequence_lengths = node.get_ng_inputs().at(1);
// nGraph supports only int32 type of sequence_lengths // nGraph supports only int32 type of sequence_lengths
const auto sequence_lengths_i32 = std::make_shared<ngraph::op::Convert>( const auto sequence_lengths_i32 = std::make_shared<default_opset::Convert>(
node.get_ng_inputs().at(1), element::i32); node.get_ng_inputs().at(1), element::i32);
const auto batch_axis = node.get_attribute_value<int64_t>("batch_axis", 1); const auto batch_axis = node.get_attribute_value<int64_t>("batch_axis", 1);
...@@ -60,7 +59,7 @@ namespace ngraph ...@@ -60,7 +59,7 @@ namespace ngraph
"'batch_axis' and 'time_axis' attributes of the ReverseSequence " "'batch_axis' and 'time_axis' attributes of the ReverseSequence "
"operator can't point to the same dimension"); "operator can't point to the same dimension");
return {std::make_shared<ngraph::op::ReverseSequence>( return {std::make_shared<default_opset::ReverseSequence>(
data, sequence_lengths_i32, valid_batch_axis, valid_time_axis)}; data, sequence_lengths_i32, valid_batch_axis, valid_time_axis)};
} }
......
...@@ -17,12 +17,11 @@ ...@@ -17,12 +17,11 @@
#include <memory> #include <memory>
#include <vector> #include <vector>
#include "default_opset.hpp"
#include "ngraph/op/constant.hpp" #include "ngraph/op/constant.hpp"
#include "ngraph/op/fused/selu.hpp" #include "ngraph/op/fused/selu.hpp"
#include "selu.hpp" #include "selu.hpp"
using namespace ngraph::op;
namespace ngraph namespace ngraph
{ {
namespace onnx_import namespace onnx_import
...@@ -39,13 +38,13 @@ namespace ngraph ...@@ -39,13 +38,13 @@ namespace ngraph
auto gamma = auto gamma =
node.get_attribute_value<double>("gamma", 1.05070102214813232421875); node.get_attribute_value<double>("gamma", 1.05070102214813232421875);
auto alpha_node = std::make_shared<ngraph::op::Constant>( auto alpha_node = std::make_shared<default_opset::Constant>(
data->get_element_type(), data->get_shape(), std::vector<double>{alpha}); data->get_element_type(), data->get_shape(), std::vector<double>{alpha});
auto gamma_node = std::make_shared<ngraph::op::Constant>( auto gamma_node = std::make_shared<default_opset::Constant>(
data->get_element_type(), data->get_shape(), std::vector<double>{gamma}); data->get_element_type(), data->get_shape(), std::vector<double>{gamma});
return {std::make_shared<ngraph::op::v0::Selu>(data, alpha_node, gamma_node)}; return {std::make_shared<default_opset::Selu>(data, alpha_node, gamma_node)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -16,8 +16,8 @@ ...@@ -16,8 +16,8 @@
#include <memory> #include <memory>
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/shape.hpp" #include "ngraph/shape.hpp"
#include "ngraph/type/element_type.hpp" #include "ngraph/type/element_type.hpp"
#include "shape.hpp" #include "shape.hpp"
...@@ -35,7 +35,7 @@ namespace ngraph ...@@ -35,7 +35,7 @@ namespace ngraph
auto data = node.get_ng_inputs().at(0); auto data = node.get_ng_inputs().at(0);
auto data_shape = data->get_shape(); auto data_shape = data->get_shape();
return {std::make_shared<ngraph::op::Constant>( return {std::make_shared<default_opset::Constant>(
ngraph::element::i64, Shape{data_shape.size()}, data_shape)}; ngraph::element::i64, Shape{data_shape.size()}, data_shape)};
} }
......
...@@ -16,14 +16,12 @@ ...@@ -16,14 +16,12 @@
#include <memory> #include <memory>
#include "default_opset.hpp"
#include "exceptions.hpp" #include "exceptions.hpp"
#include "ngraph/op/add.hpp" #include "ngraph/op/add.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/convert.hpp"
#include "ngraph/op/greater.hpp"
#include "ngraph/op/less.hpp"
#include "ngraph/op/multiply.hpp" #include "ngraph/op/multiply.hpp"
#include "ngraph/op/subtract.hpp" #include "ngraph/op/subtract.hpp"
#include "ngraph/opsets/opset0.hpp"
#include "shrink.hpp" #include "shrink.hpp"
namespace ngraph namespace ngraph
...@@ -43,40 +41,40 @@ namespace ngraph ...@@ -43,40 +41,40 @@ namespace ngraph
ASSERT_VALID_ARGUMENT(node, !(lambd < 0.0f)) ASSERT_VALID_ARGUMENT(node, !(lambd < 0.0f))
<< " The provided 'lambd' value:" << lambd << " must not be negative."; << " The provided 'lambd' value:" << lambd << " must not be negative.";
std::shared_ptr<ngraph::op::Constant> negative_lambd; std::shared_ptr<default_opset::Constant> negative_lambd;
const auto input_element_type = input->get_element_type(); const auto input_element_type = input->get_element_type();
if (input_element_type.is_signed()) if (input_element_type.is_signed())
{ {
negative_lambd = ngraph::op::Constant::create( negative_lambd = default_opset::Constant::create(
input_element_type, input->get_shape(), {-lambd}); input_element_type, input->get_shape(), {-lambd});
} }
else else
{ {
// Passing -lambd to unsigned type constant will cause an overflow. // Passing -lambd to unsigned type constant will cause an overflow.
// For unsigned types the lowest possible value is 0. // For unsigned types the lowest possible value is 0.
negative_lambd = ngraph::op::Constant::create( negative_lambd = default_opset::Constant::create(
input_element_type, input->get_shape(), {0}); input_element_type, input->get_shape(), {0});
} }
const auto positive_lambd = ngraph::op::Constant::create( const auto positive_lambd = default_opset::Constant::create(
input_element_type, input->get_shape(), {lambd}); input_element_type, input->get_shape(), {lambd});
const auto bias_tensor = ngraph::op::Constant::create( const auto bias_tensor = default_opset::Constant::create(
input_element_type, input->get_shape(), {bias}); input_element_type, input->get_shape(), {bias});
// Create a mask indicating locations of values that need to be adjusted // Create a mask indicating locations of values that need to be adjusted
// by adding and subtracting bias // by adding and subtracting bias
// All other values indicated by 'false' in the masks need to be zeroed out // All other values indicated by 'false' in the masks need to be zeroed out
std::shared_ptr<ngraph::Node> values_below_neg_lambd = std::shared_ptr<ngraph::Node> values_below_neg_lambd =
std::make_shared<ngraph::op::Less>(input, negative_lambd); std::make_shared<ngraph::opset0::Less>(input, negative_lambd);
std::shared_ptr<ngraph::Node> values_above_pos_lambd = std::shared_ptr<ngraph::Node> values_above_pos_lambd =
std::make_shared<ngraph::op::v1::Greater>(input, positive_lambd); std::make_shared<default_opset::Greater>(input, positive_lambd);
// Convert from bool to the input type to be able to multiply adjusted inputs // Convert from bool to the input type to be able to multiply adjusted inputs
// by the created masks // by the created masks
values_below_neg_lambd = std::make_shared<ngraph::op::Convert>( values_below_neg_lambd = std::make_shared<default_opset::Convert>(
values_below_neg_lambd, input_element_type); values_below_neg_lambd, input_element_type);
values_above_pos_lambd = std::make_shared<ngraph::op::Convert>( values_above_pos_lambd = std::make_shared<default_opset::Convert>(
values_above_pos_lambd, input_element_type); values_above_pos_lambd, input_element_type);
std::shared_ptr<ngraph::Node> input_minus_bias = input - bias_tensor; std::shared_ptr<ngraph::Node> input_minus_bias = input - bias_tensor;
......
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/sigmoid.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -32,7 +32,7 @@ namespace ngraph ...@@ -32,7 +32,7 @@ namespace ngraph
{ {
inline NodeVector sigmoid(const Node& node) inline NodeVector sigmoid(const Node& node)
{ {
return {std::make_shared<ngraph::op::Sigmoid>(node.get_ng_inputs().at(0))}; return {std::make_shared<default_opset::Sigmoid>(node.get_ng_inputs().at(0))};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/sign.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -32,7 +32,7 @@ namespace ngraph ...@@ -32,7 +32,7 @@ namespace ngraph
{ {
inline NodeVector sign(const Node& node) inline NodeVector sign(const Node& node)
{ {
return {std::make_shared<ngraph::op::Sign>(node.get_ng_inputs().at(0))}; return {std::make_shared<default_opset::Sign>(node.get_ng_inputs().at(0))};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/sin.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -32,7 +32,7 @@ namespace ngraph ...@@ -32,7 +32,7 @@ namespace ngraph
{ {
inline NodeVector sin(const Node& node) inline NodeVector sin(const Node& node)
{ {
return {std::make_shared<ngraph::op::Sin>(node.get_ng_inputs().at(0))}; return {std::make_shared<default_opset::Sin>(node.get_ng_inputs().at(0))};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/sinh.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -32,7 +32,7 @@ namespace ngraph ...@@ -32,7 +32,7 @@ namespace ngraph
{ {
inline NodeVector sinh(const Node& node) inline NodeVector sinh(const Node& node)
{ {
return {std::make_shared<ngraph::op::Sinh>(node.get_ng_inputs().at(0))}; return {std::make_shared<default_opset::Sinh>(node.get_ng_inputs().at(0))};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
#include <memory> #include <memory>
#include <vector> #include <vector>
#include "ngraph/op/constant.hpp" #include "default_opset.hpp"
#include "ngraph/shape.hpp" #include "ngraph/shape.hpp"
#include "ngraph/type/element_type.hpp" #include "ngraph/type/element_type.hpp"
#include "size.hpp" #include "size.hpp"
...@@ -37,7 +37,7 @@ namespace ngraph ...@@ -37,7 +37,7 @@ namespace ngraph
std::int64_t tensor_elements_count{ std::int64_t tensor_elements_count{
static_cast<std::int64_t>(shape_size(data->get_shape()))}; static_cast<std::int64_t>(shape_size(data->get_shape()))};
return {std::make_shared<ngraph::op::Constant>( return {std::make_shared<default_opset::Constant>(
ngraph::element::i64, ngraph::element::i64,
Shape{}, Shape{},
std::vector<std::int64_t>{tensor_elements_count})}; std::vector<std::int64_t>{tensor_elements_count})};
......
...@@ -19,7 +19,7 @@ ...@@ -19,7 +19,7 @@
#include <vector> #include <vector>
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/slice.hpp" #include "ngraph/opsets/opset0.hpp"
#include "slice.hpp" #include "slice.hpp"
#include "utils/common.hpp" #include "utils/common.hpp"
...@@ -72,7 +72,8 @@ namespace ngraph ...@@ -72,7 +72,8 @@ namespace ngraph
} }
} }
return {std::make_shared<ngraph::op::Slice>(data, lower_bounds, upper_bounds)}; return {
std::make_shared<ngraph::opset0::Slice>(data, lower_bounds, upper_bounds)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
#include <memory> #include <memory>
#include "ngraph/op/softmax.hpp" #include "default_opset.hpp"
#include "softmax.hpp" #include "softmax.hpp"
#include "utils/common.hpp" #include "utils/common.hpp"
...@@ -37,7 +37,7 @@ namespace ngraph ...@@ -37,7 +37,7 @@ namespace ngraph
int axis = node.get_attribute_value<int64_t>("axis", 1); int axis = node.get_attribute_value<int64_t>("axis", 1);
std::size_t valid_axis = common::validate_axis(node, axis, data_shape.size()); std::size_t valid_axis = common::validate_axis(node, axis, data_shape.size());
return {std::make_shared<ngraph::op::v1::Softmax>(data, valid_axis)}; return {std::make_shared<default_opset::Softmax>(data, valid_axis)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -16,14 +16,12 @@ ...@@ -16,14 +16,12 @@
#include <memory> #include <memory>
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/add.hpp" #include "ngraph/op/add.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/exp.hpp"
#include "ngraph/op/greater.hpp" #include "ngraph/op/greater.hpp"
#include "ngraph/op/log.hpp"
#include "ngraph/op/negative.hpp"
#include "ngraph/op/select.hpp" #include "ngraph/op/select.hpp"
#include "ngraph/opsets/opset0.hpp"
#include "softplus.hpp" #include "softplus.hpp"
namespace ngraph namespace ngraph
...@@ -39,30 +37,31 @@ namespace ngraph ...@@ -39,30 +37,31 @@ namespace ngraph
auto data = node.get_ng_inputs().at(0); auto data = node.get_ng_inputs().at(0);
std::shared_ptr<ngraph::Node> zero_node = std::shared_ptr<ngraph::Node> zero_node =
std::make_shared<ngraph::op::Constant>( std::make_shared<default_opset::Constant>(
data->get_element_type(), data->get_shape(), std::vector<float>{0.f}); data->get_element_type(), data->get_shape(), std::vector<float>{0.f});
std::shared_ptr<ngraph::Node> one_node = std::make_shared<ngraph::op::Constant>( std::shared_ptr<ngraph::Node> one_node =
std::make_shared<default_opset::Constant>(
data->get_element_type(), data->get_shape(), std::vector<float>{1.f}); data->get_element_type(), data->get_shape(), std::vector<float>{1.f});
std::shared_ptr<ngraph::Node> positive_val_node = std::shared_ptr<ngraph::Node> positive_val_node =
data + std::make_shared<ngraph::op::Log>( data + std::make_shared<default_opset::Log>(
std::make_shared<ngraph::op::Exp>( std::make_shared<default_opset::Exp>(
std::make_shared<ngraph::op::Negative>(data)) + std::make_shared<default_opset::Negative>(data)) +
one_node); one_node);
std::shared_ptr<ngraph::Node> negative_val_node = std::shared_ptr<ngraph::Node> negative_val_node =
std::make_shared<ngraph::op::Log>(std::make_shared<ngraph::op::Exp>(data) + std::make_shared<default_opset::Log>(
one_node); std::make_shared<default_opset::Exp>(data) + one_node);
std::shared_ptr<ngraph::Node> condition_node = std::shared_ptr<ngraph::Node> condition_node =
std::make_shared<ngraph::op::Greater>(data, zero_node); std::make_shared<ngraph::opset0::Greater>(data, zero_node);
// //
// This equation represents: // This equation represents:
// x + log(exp(-x) + 1) - for x > 0; to manage exponent overflow, // x + log(exp(-x) + 1) - for x > 0; to manage exponent overflow,
// log(exp(x) + 1) - elsewhere. // log(exp(x) + 1) - elsewhere.
// //
return {std::make_shared<ngraph::op::Select>( return {std::make_shared<ngraph::opset0::Select>(
condition_node, positive_val_node, negative_val_node)}; condition_node, positive_val_node, negative_val_node)};
} }
......
...@@ -17,9 +17,9 @@ ...@@ -17,9 +17,9 @@
#include <memory> #include <memory>
#include <vector> #include <vector>
#include "default_opset.hpp"
#include "ngraph/op/abs.hpp" #include "ngraph/op/abs.hpp"
#include "ngraph/op/add.hpp" #include "ngraph/op/add.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/divide.hpp" #include "ngraph/op/divide.hpp"
#include "ngraph/op/util/broadcasting.hpp" #include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/shape.hpp" #include "ngraph/shape.hpp"
...@@ -37,11 +37,12 @@ namespace ngraph ...@@ -37,11 +37,12 @@ namespace ngraph
{ {
auto data = node.get_ng_inputs().at(0); auto data = node.get_ng_inputs().at(0);
std::shared_ptr<ngraph::Node> one_node = std::make_shared<ngraph::op::Constant>( std::shared_ptr<ngraph::Node> one_node =
std::make_shared<default_opset::Constant>(
data->get_element_type(), Shape{}, std::vector<double>{1}); data->get_element_type(), Shape{}, std::vector<double>{1});
one_node = ngraph::op::make_broadcast_node(one_node, data->get_shape()); one_node = ngraph::op::make_broadcast_node(one_node, data->get_shape());
return {data / (std::make_shared<ngraph::op::Abs>(data) + one_node)}; return {data / (std::make_shared<default_opset::Abs>(data) + one_node)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -14,8 +14,8 @@ ...@@ -14,8 +14,8 @@
// limitations under the License. // limitations under the License.
//***************************************************************************** //*****************************************************************************
#include "ngraph/op/fused/space_to_depth.hpp"
#include "space_to_depth.hpp" #include "space_to_depth.hpp"
#include "default_opset.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -29,9 +29,9 @@ namespace ngraph ...@@ -29,9 +29,9 @@ namespace ngraph
{ {
auto data = node.get_ng_inputs().at(0); auto data = node.get_ng_inputs().at(0);
std::size_t block_size = node.get_attribute_value<std::int64_t>("blocksize"); std::size_t block_size = node.get_attribute_value<std::int64_t>("blocksize");
const auto mode = ngraph::op::SpaceToDepth::SpaceToDepthMode::BLOCKS_FIRST; const auto mode = default_opset::SpaceToDepth::SpaceToDepthMode::BLOCKS_FIRST;
return NodeVector{ return NodeVector{
std::make_shared<ngraph::op::SpaceToDepth>(data, mode, block_size)}; std::make_shared<default_opset::SpaceToDepth>(data, mode, block_size)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -17,9 +17,7 @@ ...@@ -17,9 +17,7 @@
#include <cstdint> #include <cstdint>
#include <vector> #include <vector>
#include "ngraph/op/constant.hpp" #include "default_opset.hpp"
#include "ngraph/op/fused/split.hpp"
#include "ngraph/op/variadic_split.hpp"
#include "split.hpp" #include "split.hpp"
#include "utils/common.hpp" #include "utils/common.hpp"
...@@ -36,7 +34,7 @@ namespace ngraph ...@@ -36,7 +34,7 @@ namespace ngraph
const auto input = node.get_ng_inputs().at(0); const auto input = node.get_ng_inputs().at(0);
const auto axis = node.get_attribute_value<int64_t>("axis", 0); const auto axis = node.get_attribute_value<int64_t>("axis", 0);
const auto axis_node = const auto axis_node =
ngraph::op::Constant::create(element::i64, Shape{}, {axis}); default_opset::Constant::create(element::i64, Shape{}, {axis});
std::shared_ptr<ngraph::Node> split; std::shared_ptr<ngraph::Node> split;
if (node.has_attribute("split")) if (node.has_attribute("split"))
...@@ -44,16 +42,16 @@ namespace ngraph ...@@ -44,16 +42,16 @@ namespace ngraph
const auto splits = const auto splits =
node.get_attribute_value<std::vector<std::size_t>>("split"); node.get_attribute_value<std::vector<std::size_t>>("split");
const auto split_lengths = ngraph::op::Constant::create( const auto split_lengths = default_opset::Constant::create(
element::u64, Shape{splits.size()}, splits); element::u64, Shape{splits.size()}, splits);
split = std::make_shared<ngraph::op::v1::VariadicSplit>( split = std::make_shared<default_opset::VariadicSplit>(
input, axis_node, split_lengths); input, axis_node, split_lengths);
} }
else else
{ {
const auto outputs_number = node.get_output_names().size(); const auto outputs_number = node.get_output_names().size();
split = std::make_shared<ngraph::op::v1::Split>( split = std::make_shared<default_opset::Split>(
input, axis_node, outputs_number); input, axis_node, outputs_number);
} }
return common::get_outputs(split); return common::get_outputs(split);
......
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/sqrt.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -32,7 +32,7 @@ namespace ngraph ...@@ -32,7 +32,7 @@ namespace ngraph
{ {
inline NodeVector sqrt(const Node& node) inline NodeVector sqrt(const Node& node)
{ {
return {std::make_shared<ngraph::op::Sqrt>(node.get_ng_inputs().at(0))}; return {std::make_shared<default_opset::Sqrt>(node.get_ng_inputs().at(0))};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -37,9 +37,9 @@ namespace ngraph ...@@ -37,9 +37,9 @@ namespace ngraph
node.get_attribute_value<std::vector<std::int64_t>>("axes", {}); node.get_attribute_value<std::vector<std::int64_t>>("axes", {});
std::vector<std::size_t> valid_axes = std::vector<std::size_t> valid_axes =
common::validate_axes(node, axes, data->get_shape().size()); common::validate_axes(node, axes, data->get_shape().size());
auto axes_node = std::make_shared<ngraph::op::Constant>( auto axes_node = std::make_shared<default_opset::Constant>(
element::u64, Shape{valid_axes.size()}, valid_axes); element::u64, Shape{valid_axes.size()}, valid_axes);
return {std::make_shared<ngraph::op::Squeeze>(data, axes_node)}; return {std::make_shared<default_opset::Squeeze>(data, axes_node)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -17,9 +17,10 @@ ...@@ -17,9 +17,10 @@
#pragma once #pragma once
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/subtract.hpp"
#include "ngraph/op/util/broadcasting.hpp" #include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/opsets/opset0.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -38,8 +39,8 @@ namespace ngraph ...@@ -38,8 +39,8 @@ namespace ngraph
NodeVector ng_inputs{ngraph::op::legacy_style_broadcast_for_binary_operation( NodeVector ng_inputs{ngraph::op::legacy_style_broadcast_for_binary_operation(
node.get_ng_inputs().at(0), node.get_ng_inputs().at(1), axis)}; node.get_ng_inputs().at(0), node.get_ng_inputs().at(1), axis)};
return { return {std::make_shared<ngraph::opset0::Subtract>(ng_inputs.at(0),
std::make_shared<ngraph::op::Subtract>(ng_inputs.at(0), ng_inputs.at(1))}; ng_inputs.at(1))};
} }
} // namespace set_1 } // namespace set_1
...@@ -48,7 +49,7 @@ namespace ngraph ...@@ -48,7 +49,7 @@ namespace ngraph
{ {
inline NodeVector sub(const Node& node) inline NodeVector sub(const Node& node)
{ {
return {std::make_shared<ngraph::op::v1::Subtract>(node.get_ng_inputs().at(0), return {std::make_shared<default_opset::Subtract>(node.get_ng_inputs().at(0),
node.get_ng_inputs().at(1))}; node.get_ng_inputs().at(1))};
} }
......
...@@ -17,8 +17,9 @@ ...@@ -17,8 +17,9 @@
#pragma once #pragma once
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/add.hpp" #include "ngraph/opsets/opset0.hpp"
#include "utils/variadic.hpp" #include "utils/variadic.hpp"
namespace ngraph namespace ngraph
...@@ -31,7 +32,7 @@ namespace ngraph ...@@ -31,7 +32,7 @@ namespace ngraph
{ {
inline NodeVector sum(const Node& node) inline NodeVector sum(const Node& node)
{ {
return variadic::make_ng_variadic_op<ngraph::op::Add>(node); return variadic::make_ng_variadic_op<ngraph::opset0::Add>(node);
} }
} // namespace set_1 } // namespace set_1
...@@ -40,7 +41,7 @@ namespace ngraph ...@@ -40,7 +41,7 @@ namespace ngraph
{ {
inline NodeVector sum(const Node& node) inline NodeVector sum(const Node& node)
{ {
return variadic::make_ng_variadic_op<ngraph::op::v1::Add>(node); return variadic::make_ng_variadic_op<default_opset::Add>(node);
} }
} // namespace set_8 } // namespace set_8
......
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/tan.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -32,7 +32,7 @@ namespace ngraph ...@@ -32,7 +32,7 @@ namespace ngraph
{ {
inline NodeVector tan(const Node& node) inline NodeVector tan(const Node& node)
{ {
return {std::make_shared<ngraph::op::Tan>(node.get_ng_inputs().at(0))}; return {std::make_shared<default_opset::Tan>(node.get_ng_inputs().at(0))};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
#include <memory> #include <memory>
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/tanh.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -32,7 +32,7 @@ namespace ngraph ...@@ -32,7 +32,7 @@ namespace ngraph
{ {
inline NodeVector tanh(const Node& node) inline NodeVector tanh(const Node& node)
{ {
return {std::make_shared<ngraph::op::Tanh>(node.get_ng_inputs().at(0))}; return {std::make_shared<default_opset::Tanh>(node.get_ng_inputs().at(0))};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -17,11 +17,10 @@ ...@@ -17,11 +17,10 @@
#include <memory> #include <memory>
#include <vector> #include <vector>
#include "ngraph/op/constant.hpp" #include "default_opset.hpp"
#include "ngraph/op/convert.hpp"
#include "ngraph/op/greater.hpp"
#include "ngraph/op/multiply.hpp" #include "ngraph/op/multiply.hpp"
#include "ngraph/op/util/broadcasting.hpp" #include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/opsets/opset0.hpp"
#include "thresholded_relu.hpp" #include "thresholded_relu.hpp"
namespace ngraph namespace ngraph
...@@ -38,17 +37,17 @@ namespace ngraph ...@@ -38,17 +37,17 @@ namespace ngraph
double alpha = node.get_attribute_value<double>("alpha", 1.0); double alpha = node.get_attribute_value<double>("alpha", 1.0);
std::shared_ptr<ngraph::Node> alpha_node = std::shared_ptr<ngraph::Node> alpha_node =
std::make_shared<ngraph::op::Constant>(data->get_element_type(), std::make_shared<default_opset::Constant>(data->get_element_type(),
data->get_shape(), data->get_shape(),
std::vector<double>{alpha}); std::vector<double>{alpha});
auto data_map = std::make_shared<ngraph::op::Convert>( auto data_map = std::make_shared<default_opset::Convert>(
std::make_shared<ngraph::op::Greater>(data, alpha_node), std::make_shared<ngraph::opset0::Greater>(data, alpha_node),
data->get_element_type()); data->get_element_type());
return {data * data_map}; return {data * data_map};
} }
} // namespace set_1 } // namespace set_1default_opset
} // namespace op } // namespace op
......
...@@ -17,10 +17,11 @@ ...@@ -17,10 +17,11 @@
#include <cstdint> #include <cstdint>
#include <memory> #include <memory>
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/constant.hpp" #include "ngraph/op/constant.hpp"
#include "ngraph/op/get_output_element.hpp"
#include "ngraph/op/topk.hpp" #include "ngraph/op/topk.hpp"
#include "ngraph/opsets/opset0.hpp"
#include "ngraph/shape.hpp" #include "ngraph/shape.hpp"
#include "ngraph/type/element_type.hpp" #include "ngraph/type/element_type.hpp"
#include "topk.hpp" #include "topk.hpp"
...@@ -54,9 +55,9 @@ namespace ...@@ -54,9 +55,9 @@ namespace
ngraph::NodeVector get_outputs(const std::shared_ptr<ngraph::Node>& node) ngraph::NodeVector get_outputs(const std::shared_ptr<ngraph::Node>& node)
{ {
std::shared_ptr<ngraph::Node> values = std::shared_ptr<ngraph::Node> values =
std::make_shared<ngraph::op::GetOutputElement>(node, 0); std::make_shared<ngraph::opset0::GetOutputElement>(node, 0);
std::shared_ptr<ngraph::Node> indices = std::shared_ptr<ngraph::Node> indices =
std::make_shared<ngraph::op::GetOutputElement>(node, 1); std::make_shared<ngraph::opset0::GetOutputElement>(node, 1);
return {values, indices}; return {values, indices};
} }
...@@ -74,15 +75,15 @@ namespace ngraph ...@@ -74,15 +75,15 @@ namespace ngraph
{ {
auto data = node.get_ng_inputs().at(0); auto data = node.get_ng_inputs().at(0);
std::int64_t k{node.get_attribute_value<std::int64_t>("k")}; std::int64_t k{node.get_attribute_value<std::int64_t>("k")};
auto k_node = ngraph::op::Constant::create(element::i64, Shape{}, {k}); auto k_node = default_opset::Constant::create(element::i64, Shape{}, {k});
auto axis = get_axis(node); auto axis = get_axis(node);
std::shared_ptr<ngraph::Node> top_k = std::make_shared<ngraph::op::v1::TopK>( std::shared_ptr<ngraph::Node> top_k = std::make_shared<default_opset::TopK>(
data, data,
k_node, k_node,
axis, axis,
ngraph::op::v1::TopK::Mode::MAX, default_opset::TopK::Mode::MAX,
ngraph::op::v1::TopK::SortType::SORT_VALUES, default_opset::TopK::SortType::SORT_VALUES,
element::i64); element::i64);
return get_outputs(top_k); return get_outputs(top_k);
...@@ -97,12 +98,12 @@ namespace ngraph ...@@ -97,12 +98,12 @@ namespace ngraph
auto k = get_k(node); auto k = get_k(node);
auto axis = get_axis(node); auto axis = get_axis(node);
std::shared_ptr<ngraph::Node> top_k = std::make_shared<ngraph::op::v1::TopK>( std::shared_ptr<ngraph::Node> top_k = std::make_shared<default_opset::TopK>(
data, data,
k, k,
axis, axis,
ngraph::op::v1::TopK::Mode::MAX, default_opset::TopK::Mode::MAX,
ngraph::op::v1::TopK::SortType::SORT_VALUES, default_opset::TopK::SortType::SORT_VALUES,
element::i64); element::i64);
return get_outputs(top_k); return get_outputs(top_k);
...@@ -123,14 +124,14 @@ namespace ngraph ...@@ -123,14 +124,14 @@ namespace ngraph
const auto sorted = node.get_attribute_value<std::int64_t>("sorted", 1); const auto sorted = node.get_attribute_value<std::int64_t>("sorted", 1);
// Map attribute values to nGraph enums // Map attribute values to nGraph enums
const auto sort_type = sorted ? ngraph::op::v1::TopK::SortType::SORT_VALUES const auto sort_type = sorted ? default_opset::TopK::SortType::SORT_VALUES
: ngraph::op::v1::TopK::SortType::NONE; : default_opset::TopK::SortType::NONE;
const auto compute_max = static_cast<bool>(largest); const auto compute_max = static_cast<bool>(largest);
const auto mode = compute_max ? ngraph::op::v1::TopK::Mode::MAX const auto mode = compute_max ? default_opset::TopK::Mode::MAX
: ngraph::op::v1::TopK::Mode::MIN; : default_opset::TopK::Mode::MIN;
std::shared_ptr<ngraph::Node> top_k = std::make_shared<ngraph::op::v1::TopK>( std::shared_ptr<ngraph::Node> top_k = std::make_shared<default_opset::TopK>(
data, k, axis, mode, sort_type, element::i64); data, k, axis, mode, sort_type, element::i64);
return get_outputs(top_k); return get_outputs(top_k);
......
...@@ -16,8 +16,7 @@ ...@@ -16,8 +16,7 @@
#include <memory> #include <memory>
#include "ngraph/op/constant.hpp" #include "default_opset.hpp"
#include "ngraph/op/fused/unsqueeze.hpp"
#include "ngraph/shape.hpp" #include "ngraph/shape.hpp"
#include "unsqueeze.hpp" #include "unsqueeze.hpp"
#include "utils/common.hpp" #include "utils/common.hpp"
...@@ -37,9 +36,9 @@ namespace ngraph ...@@ -37,9 +36,9 @@ namespace ngraph
const auto expanded_rank = data->get_shape().size() + axes.size(); const auto expanded_rank = data->get_shape().size() + axes.size();
std::vector<std::size_t> valid_axes = std::vector<std::size_t> valid_axes =
common::validate_axes(node, axes, expanded_rank); common::validate_axes(node, axes, expanded_rank);
auto axes_node = std::make_shared<ngraph::op::Constant>( auto axes_node = std::make_shared<default_opset::Constant>(
element::i64, Shape{valid_axes.size()}, valid_axes); element::i64, Shape{valid_axes.size()}, valid_axes);
return {std::make_shared<ngraph::op::Unsqueeze>(data, axes_node)}; return {std::make_shared<default_opset::Unsqueeze>(data, axes_node)};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -20,8 +20,8 @@ ...@@ -20,8 +20,8 @@
#include "core/node.hpp" #include "core/node.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/select.hpp"
#include "ngraph/op/util/broadcasting.hpp" #include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/opsets/opset0.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -35,7 +35,7 @@ namespace ngraph ...@@ -35,7 +35,7 @@ namespace ngraph
{ {
NodeVector ng_inputs{ngraph::op::numpy_style_broadcast(node.get_ng_inputs())}; NodeVector ng_inputs{ngraph::op::numpy_style_broadcast(node.get_ng_inputs())};
return {std::make_shared<ngraph::op::Select>( return {std::make_shared<ngraph::opset0::Select>(
ng_inputs.at(0), ng_inputs.at(1), ng_inputs.at(2))}; ng_inputs.at(0), ng_inputs.at(1), ng_inputs.at(2))};
} }
} // namespace set_1 } // namespace set_1
......
...@@ -17,9 +17,9 @@ ...@@ -17,9 +17,9 @@
#pragma once #pragma once
#include "core/node.hpp" #include "core/node.hpp"
#include "default_opset.hpp"
#include "ngraph/node.hpp" #include "ngraph/node.hpp"
#include "ngraph/op/util/broadcasting.hpp" #include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/op/xor.hpp"
namespace ngraph namespace ngraph
{ {
...@@ -31,7 +31,7 @@ namespace ngraph ...@@ -31,7 +31,7 @@ namespace ngraph
{ {
inline NodeVector logical_xor(const Node& node) inline NodeVector logical_xor(const Node& node)
{ {
return {std::make_shared<ngraph::op::v1::LogicalXor>( return {std::make_shared<default_opset::LogicalXor>(
node.get_ng_inputs().at(0), node.get_ng_inputs().at(0),
node.get_ng_inputs().at(1), node.get_ng_inputs().at(1),
ngraph::op::AutoBroadcastSpec(ngraph::op::AutoBroadcastType::NUMPY))}; ngraph::op::AutoBroadcastSpec(ngraph::op::AutoBroadcastType::NUMPY))};
......
...@@ -16,7 +16,9 @@ ...@@ -16,7 +16,9 @@
#include <onnx/onnx_pb.h> // onnx types #include <onnx/onnx_pb.h> // onnx types
#include "common.hpp" #include "common.hpp"
#include "default_opset.hpp"
#include "ngraph/op/get_output_element.hpp" #include "ngraph/op/get_output_element.hpp"
#include "ngraph/opsets/opset0.hpp"
#include "validation_util.hpp" #include "validation_util.hpp"
namespace ngraph namespace ngraph
...@@ -92,7 +94,7 @@ namespace ngraph ...@@ -92,7 +94,7 @@ namespace ngraph
} }
else else
{ {
outputs[i] = std::make_shared<ngraph::op::GetOutputElement>(node, i); outputs[i] = std::make_shared<ngraph::opset0::GetOutputElement>(node, i);
} }
} }
return outputs; return outputs;
......
...@@ -26,7 +26,7 @@ ...@@ -26,7 +26,7 @@
#include <vector> #include <vector>
#include "core/node.hpp" #include "core/node.hpp"
#include "ngraph/op/constant.hpp" #include "default_opset.hpp"
#include "ngraph/op/util/broadcasting.hpp" #include "ngraph/op/util/broadcasting.hpp"
#include "ngraph/shape.hpp" #include "ngraph/shape.hpp"
#include "ngraph/type/element_type.hpp" #include "ngraph/type/element_type.hpp"
...@@ -155,7 +155,7 @@ namespace ngraph ...@@ -155,7 +155,7 @@ namespace ngraph
identity_matrix.at(diagonal_element_idx) = T{1}; identity_matrix.at(diagonal_element_idx) = T{1};
} }
return std::make_shared<ngraph::op::Constant>( return std::make_shared<default_opset::Constant>(
output_type, output_shape, identity_matrix); output_type, output_shape, identity_matrix);
} }
...@@ -165,7 +165,7 @@ namespace ngraph ...@@ -165,7 +165,7 @@ namespace ngraph
/// ///
/// \return A Constant node representing identity matrix with shape (n, n). /// \return A Constant node representing identity matrix with shape (n, n).
template <typename T = double> template <typename T = double>
std::shared_ptr<ngraph::op::Constant> square_identity(const size_t n, std::shared_ptr<default_opset::Constant> square_identity(const size_t n,
const element::Type& type) const element::Type& type)
{ {
return shifted_square_identity(Shape{n, n}, type, 0); return shifted_square_identity(Shape{n, n}, type, 0);
......
...@@ -16,6 +16,7 @@ ...@@ -16,6 +16,7 @@
#include <iterator> #include <iterator>
#include "default_opset.hpp"
#include "ngraph/coordinate_diff.hpp" #include "ngraph/coordinate_diff.hpp"
#include "utils/convpool.hpp" #include "utils/convpool.hpp"
#include "utils/pooling_factory.hpp" #include "utils/pooling_factory.hpp"
...@@ -45,7 +46,7 @@ namespace ngraph ...@@ -45,7 +46,7 @@ namespace ngraph
{ {
bool count_include_pad = bool count_include_pad =
m_onnx_node.get_attribute_value<std::int64_t>("count_include_pad", 0); m_onnx_node.get_attribute_value<std::int64_t>("count_include_pad", 0);
return {std::make_shared<ngraph::op::v1::AvgPool>(m_inputs.at(0), return {std::make_shared<default_opset::AvgPool>(m_inputs.at(0),
m_strides, m_strides,
m_padding_below, m_padding_below,
m_padding_above, m_padding_above,
...@@ -57,7 +58,7 @@ namespace ngraph ...@@ -57,7 +58,7 @@ namespace ngraph
NodeVector PoolingFactory::make_max_pool() const NodeVector PoolingFactory::make_max_pool() const
{ {
return {std::make_shared<ngraph::op::v1::MaxPool>(m_inputs.at(0), return {std::make_shared<default_opset::MaxPool>(m_inputs.at(0),
m_strides, m_strides,
m_padding_below, m_padding_below,
m_padding_above, m_padding_above,
......
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#pragma once
#include "ngraph/ops.hpp"
namespace ngraph
{
namespace opset0
{
#define NGRAPH_OP(a, b) using b::a;
#include "ngraph/opsets/opset0_tbl.hpp"
#undef NGRAPH_OP
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment