Unverified Commit afd8e51a authored by Robert Kimball's avatar Robert Kimball Committed by GitHub

Merge branch 'master' into bob/backend_api3

parents 94d93423 917efb94
...@@ -84,16 +84,13 @@ set(NGRAPH_FORWARD_CMAKE_ARGS ...@@ -84,16 +84,13 @@ set(NGRAPH_FORWARD_CMAKE_ARGS
if (NOT MSVS) if (NOT MSVS)
if(NOT CMAKE_BUILD_TYPE) if(NOT CMAKE_BUILD_TYPE)
set(NGRAPH_FORWARD_CMAKE_ARGS set(CMAKE_BUILD_TYPE "Release" CACHE STRING "Build type" FORCE)
${NGRAPH_FORWARD_CMAKE_ARGS}
-DCMAKE_BUILD_TYPE=Release
)
else()
set(NGRAPH_FORWARD_CMAKE_ARGS
${NGRAPH_FORWARD_CMAKE_ARGS}
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
)
endif() endif()
set(NGRAPH_FORWARD_CMAKE_ARGS
${NGRAPH_FORWARD_CMAKE_ARGS}
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
)
endif() endif()
message(STATUS "NGRAPH_FORWARD_CMAKE_ARGS ${NGRAPH_FORWARD_CMAKE_ARGS}") message(STATUS "NGRAPH_FORWARD_CMAKE_ARGS ${NGRAPH_FORWARD_CMAKE_ARGS}")
...@@ -339,7 +336,7 @@ else() ...@@ -339,7 +336,7 @@ else()
include(cmake/external_llvm.cmake) include(cmake/external_llvm.cmake)
endif() endif()
if (WIN32) if (WIN32 OR APPLE)
include(cmake/external_tbb_prebuilt.cmake) include(cmake/external_tbb_prebuilt.cmake)
else() else()
include(cmake/external_tbb.cmake) include(cmake/external_tbb.cmake)
......
...@@ -49,7 +49,7 @@ endif() ...@@ -49,7 +49,7 @@ endif()
# This section sets up MKL as an external project to be used later by MKLDNN # This section sets up MKL as an external project to be used later by MKLDNN
set(MKLURLROOT "https://github.com/intel/mkl-dnn/releases/download/v0.17/") set(MKLURLROOT "https://github.com/intel/mkl-dnn/releases/download/v0.17.2/")
set(MKLVERSION "2019.0.1.20180928") set(MKLVERSION "2019.0.1.20180928")
if (${CMAKE_SYSTEM_NAME} STREQUAL "Linux") if (${CMAKE_SYSTEM_NAME} STREQUAL "Linux")
set(MKLPACKAGE "mklml_lnx_${MKLVERSION}.tgz") set(MKLPACKAGE "mklml_lnx_${MKLVERSION}.tgz")
...@@ -90,7 +90,7 @@ set(MKL_LIBS ${TMP_PATHS}) ...@@ -90,7 +90,7 @@ set(MKL_LIBS ${TMP_PATHS})
target_link_libraries(libmkl INTERFACE ${MKL_LIBS}) target_link_libraries(libmkl INTERFACE ${MKL_LIBS})
set(MKLDNN_GIT_REPO_URL https://github.com/intel/mkl-dnn) set(MKLDNN_GIT_REPO_URL https://github.com/intel/mkl-dnn)
set(MKLDNN_GIT_TAG "830a100") set(MKLDNN_GIT_TAG "b9ce57a")
if(NGRAPH_LIB_VERSIONING_ENABLE) if(NGRAPH_LIB_VERSIONING_ENABLE)
set(MKLDNN_PATCH_FILE mkldnn.patch) set(MKLDNN_PATCH_FILE mkldnn.patch)
else() else()
......
...@@ -16,10 +16,13 @@ ...@@ -16,10 +16,13 @@
include(ExternalProject) include(ExternalProject)
set(ARCHIVE_FILE_BASE tbb2019_20181203oss)
if (WIN32) if (WIN32)
set(ARCHIVE_FILE_BASE tbb2019_20181203oss)
set(TBB_FILE https://github.com/01org/tbb/releases/download/2019_U3/${ARCHIVE_FILE_BASE}_win.zip) set(TBB_FILE https://github.com/01org/tbb/releases/download/2019_U3/${ARCHIVE_FILE_BASE}_win.zip)
set(TBB_SHA1_HASH 1989458a49e780d76248edac13b963f80c9a460c) set(TBB_SHA1_HASH 1989458a49e780d76248edac13b963f80c9a460c)
elseif(APPLE)
set(TBB_FILE https://github.com/01org/tbb/releases/download/2019_U3/${ARCHIVE_FILE_BASE}_mac.tgz)
set(TBB_SHA1_HASH 36926fb46add578b88a5c7e19652b94bb612e4be)
endif() endif()
ExternalProject_Add( ExternalProject_Add(
...@@ -37,11 +40,26 @@ ExternalProject_Add( ...@@ -37,11 +40,26 @@ ExternalProject_Add(
ExternalProject_Get_Property(ext_tbb SOURCE_DIR) ExternalProject_Get_Property(ext_tbb SOURCE_DIR)
set(SOURCE_DIR ${SOURCE_DIR}/${ARCHIVE_FILE_BASE}) set(SOURCE_DIR ${SOURCE_DIR}/${ARCHIVE_FILE_BASE})
set(TBB_LINK_LIBS if (WIN32)
${SOURCE_DIR}/lib/${CMAKE_SHARED_LIBRARY_PREFIX}clangTooling${CMAKE_SHARED_LIBRARY_SUFFIX} set(TBB_LINK_LIBS
${SOURCE_DIR}/lib/${CMAKE_SHARED_LIBRARY_PREFIX}clangTooling${CMAKE_SHARED_LIBRARY_SUFFIX} ${SOURCE_DIR}/lib/${CMAKE_SHARED_LIBRARY_PREFIX}clangTooling${CMAKE_SHARED_LIBRARY_SUFFIX}
${SOURCE_DIR}/lib/${CMAKE_SHARED_LIBRARY_PREFIX}clangTooling${CMAKE_SHARED_LIBRARY_SUFFIX} )
) elseif(APPLE)
if(CMAKE_BUILD_TYPE STREQUAL "Debug")
set(TBB_LIB_NAME tbb_debug)
else()
set(TBB_LIB_NAME tbb)
endif()
set(TBB_LINK_LIBS
${NGRAPH_BUILD_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${TBB_LIB_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX}
)
add_custom_command(TARGET ext_tbb POST_BUILD
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${SOURCE_DIR}/lib/${CMAKE_SHARED_LIBRARY_PREFIX}${TBB_LIB_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX} ${NGRAPH_BUILD_DIR}
COMMENT "Move tbb libraries to ngraph build directory"
)
endif()
add_library(libtbb INTERFACE) add_library(libtbb INTERFACE)
add_dependencies(libtbb ext_tbb) add_dependencies(libtbb ext_tbb)
......
...@@ -16,7 +16,7 @@ framework-based complexity and [import it] to test or run on targeted and ...@@ -16,7 +16,7 @@ framework-based complexity and [import it] to test or run on targeted and
efficient backends with our user-friendly Python-based API. efficient backends with our user-friendly Python-based API.
nGraph is also integrated as an execution provider for [ONNX Runtime], nGraph is also integrated as an execution provider for [ONNX Runtime],
which is the first publicably available inference engine for ONNX. which is the first publicly available inference engine for ONNX.
The table below summarizes our current progress on supported frameworks. The table below summarizes our current progress on supported frameworks.
If you are an architect of a framework wishing to take advantage of speed If you are an architect of a framework wishing to take advantage of speed
......
...@@ -145,6 +145,8 @@ add_library(onnx_import STATIC ...@@ -145,6 +145,8 @@ add_library(onnx_import STATIC
op/tanh.hpp op/tanh.hpp
op/thresholded_relu.cpp op/thresholded_relu.cpp
op/thresholded_relu.hpp op/thresholded_relu.hpp
op/topk.cpp
op/topk.hpp
op/transpose.cpp op/transpose.cpp
op/transpose.hpp op/transpose.hpp
op/unsqueeze.cpp op/unsqueeze.cpp
......
...@@ -22,7 +22,7 @@ namespace ngraph ...@@ -22,7 +22,7 @@ namespace ngraph
{ {
namespace onnx_import namespace onnx_import
{ {
std::vector<Graph> Attribute::get_graph_array(const Model& model) const std::vector<Graph> Attribute::get_graph_array(Model& model) const
{ {
std::vector<Graph> result; std::vector<Graph> result;
for (const auto& graph : m_attribute_proto->graphs()) for (const auto& graph : m_attribute_proto->graphs())
...@@ -32,7 +32,7 @@ namespace ngraph ...@@ -32,7 +32,7 @@ namespace ngraph
return result; return result;
} }
Graph Attribute::get_graph(const Model& model) const Graph Attribute::get_graph(Model& model) const
{ {
return Graph{m_attribute_proto->g(), model}; return Graph{m_attribute_proto->g(), model};
} }
......
...@@ -278,7 +278,7 @@ namespace ngraph ...@@ -278,7 +278,7 @@ namespace ngraph
float get_float() const { return m_attribute_proto->f(); } float get_float() const { return m_attribute_proto->f(); }
int64_t get_integer() const { return m_attribute_proto->i(); } int64_t get_integer() const { return m_attribute_proto->i(); }
const std::string& get_string() const { return m_attribute_proto->s(); } const std::string& get_string() const { return m_attribute_proto->s(); }
Graph get_graph(const Model&) const; Graph get_graph(Model&) const;
std::vector<Tensor> get_tensor_array() const std::vector<Tensor> get_tensor_array() const
{ {
...@@ -303,7 +303,7 @@ namespace ngraph ...@@ -303,7 +303,7 @@ namespace ngraph
std::end(m_attribute_proto->strings())}; std::end(m_attribute_proto->strings())};
} }
std::vector<Graph> get_graph_array(const Model&) const; std::vector<Graph> get_graph_array(Model&) const;
/* explicit */ operator onnx::AttributeProto_AttributeType() const /* explicit */ operator onnx::AttributeProto_AttributeType() const
{ {
......
...@@ -14,6 +14,7 @@ ...@@ -14,6 +14,7 @@
// limitations under the License. // limitations under the License.
//***************************************************************************** //*****************************************************************************
#include <functional>
#include <set> #include <set>
#include "graph.hpp" #include "graph.hpp"
...@@ -25,26 +26,40 @@ namespace ngraph ...@@ -25,26 +26,40 @@ namespace ngraph
{ {
namespace detail namespace detail
{ {
std::string to_string(const std::set<std::string>& set) static std::string to_string(
const std::map<std::string, std::reference_wrapper<const onnx::NodeProto>>& map)
{ {
std::string result; std::string result;
for (auto it = std::begin(set); it != std::end(set); ++it) for (auto it = std::begin(map); it != std::end(map); ++it)
{ {
result += (it != std::begin(set) ? ", " : "") + *it; result += (it != std::begin(map) ? ", " : "") + it->first;
} }
return result; return result;
} }
inline std::string to_string(const onnx::NodeProto& node_proto) static std::string get_node_domain(const onnx::NodeProto& node_proto)
{ {
return (node_proto.domain().empty() ? "" : node_proto.domain() + ".") + return (node_proto.domain().empty() ? "" : node_proto.domain());
node_proto.op_type();
} }
}
Graph::Graph(const onnx::GraphProto& graph_proto, /// \brief Gets the operator represented by provided node unique identificator.
const Model& model, ///
const Weights& weights) /// \param[in] node_proto The node protobuf representation object.
///
/// \note The operator is uniquely identified by the tuple (domain, op_type,
/// since_version). The first two elements are stored in NodeProto object,
/// thus we use only them.
///
/// \return The unique identificator.
///
static std::string get_op_domain_and_name(const onnx::NodeProto& node_proto)
{
std::string domain = get_node_domain(node_proto);
return (domain.empty() ? "" : domain + ".") + node_proto.op_type();
}
} // namespace detail
Graph::Graph(const onnx::GraphProto& graph_proto, Model& model, const Weights& weights)
: m_graph_proto{&graph_proto} : m_graph_proto{&graph_proto}
, m_model{&model} , m_model{&model}
{ {
...@@ -70,17 +85,34 @@ namespace ngraph ...@@ -70,17 +85,34 @@ namespace ngraph
} }
// Verify that ONNX graph contains only nodes of available operator types // Verify that ONNX graph contains only nodes of available operator types
std::set<std::string> unknown_operator_types; std::map<std::string, std::reference_wrapper<const onnx::NodeProto>> unknown_operators;
for (const auto& node_proto : m_graph_proto->node()) for (const auto& node_proto : m_graph_proto->node())
{ {
if (!m_model->is_operator_available(node_proto)) if (!m_model->is_operator_available(node_proto))
{ {
unknown_operator_types.emplace(detail::to_string(node_proto)); unknown_operators.emplace(detail::get_op_domain_and_name(node_proto),
node_proto);
// Try adding missing domain
m_model->enable_opset_domain(detail::get_node_domain(node_proto));
}
}
// Reverify wheter we still have any unavailable operators.
auto it = std::begin(unknown_operators);
while (it != std::end(unknown_operators))
{
if (m_model->is_operator_available(it->second))
{
it = unknown_operators.erase(it);
}
else
{
it++;
} }
} }
NGRAPH_ASSERT(unknown_operator_types.empty()) NGRAPH_ASSERT(unknown_operators.empty()) << "unknown operations: "
<< "unknown operations: " << detail::to_string(unknown_operator_types); << detail::to_string(unknown_operators);
// Process ONNX graph nodes, convert to nGraph nodes // Process ONNX graph nodes, convert to nGraph nodes
for (const auto& node_proto : m_graph_proto->node()) for (const auto& node_proto : m_graph_proto->node())
......
...@@ -33,7 +33,7 @@ namespace ngraph ...@@ -33,7 +33,7 @@ namespace ngraph
class Graph class Graph
{ {
public: public:
Graph(const onnx::GraphProto& proto, const Model& model, const Weights& weights = {}); Graph(const onnx::GraphProto& proto, Model& model, const Weights& weights = {});
const std::vector<Node>& get_nodes() const { return m_nodes; } const std::vector<Node>& get_nodes() const { return m_nodes; }
const std::vector<ValueInfo>& get_inputs() const { return m_inputs; } const std::vector<ValueInfo>& get_inputs() const { return m_inputs; }
...@@ -59,7 +59,7 @@ namespace ngraph ...@@ -59,7 +59,7 @@ namespace ngraph
ParameterVector m_parameters; ParameterVector m_parameters;
std::map<std::string, std::shared_ptr<ngraph::Node>> m_ng_node_cache; std::map<std::string, std::shared_ptr<ngraph::Node>> m_ng_node_cache;
std::map<std::string, Tensor> m_initializers; std::map<std::string, Tensor> m_initializers;
const Model* m_model; Model* m_model;
}; };
inline std::ostream& operator<<(std::ostream& outs, const Graph& graph) inline std::ostream& operator<<(std::ostream& outs, const Graph& graph)
......
...@@ -17,6 +17,7 @@ ...@@ -17,6 +17,7 @@
#include <onnx-ml.pb.h> #include <onnx-ml.pb.h>
#include "model.hpp" #include "model.hpp"
#include "ngraph/log.hpp"
#include "ops_bridge.hpp" #include "ops_bridge.hpp"
namespace ngraph namespace ngraph
...@@ -33,14 +34,14 @@ namespace ngraph ...@@ -33,14 +34,14 @@ namespace ngraph
{ {
m_opset.emplace(id.domain(), m_opset.emplace(id.domain(),
OperatorsBridge::get_operator_set( OperatorsBridge::get_operator_set(
id.version(), (id.domain() == "ai.onnx" ? "" : id.domain()))); (id.domain() == "ai.onnx" ? "" : id.domain()), id.version()));
} }
// onnx.proto(.3): the empty string ("") for domain or absence of opset_import field // onnx.proto(.3): the empty string ("") for domain or absence of opset_import field
// implies the operator set that is defined as part of the ONNX specification. // implies the operator set that is defined as part of the ONNX specification.
const auto dm = m_opset.find(""); const auto dm = m_opset.find("");
if (dm == std::end(m_opset)) if (dm == std::end(m_opset))
{ {
m_opset.emplace("", OperatorsBridge::get_operator_set(ONNX_OPSET_VERSION, "")); m_opset.emplace("", OperatorsBridge::get_operator_set("", ONNX_OPSET_VERSION));
} }
} }
...@@ -71,6 +72,26 @@ namespace ngraph ...@@ -71,6 +72,26 @@ namespace ngraph
return (op != std::end(dm->second)); return (op != std::end(dm->second));
} }
void Model::enable_opset_domain(const std::string& domain)
{
// There is no need to 'update' already enabled domain.
// Since this function may be called only during model import,
// (maybe multiple times) the registered domain opset won't differ
// between subsequent calls.
if (m_opset.find(domain) == std::end(m_opset))
{
OperatorSet opset{OperatorsBridge::get_operator_set(domain)};
if (opset.empty())
{
NGRAPH_WARN << "Couldn't enable domain: " << domain
<< " since it hasn't any registered operators.";
return;
}
m_opset.emplace(domain, opset);
}
}
} // namespace onnx_import } // namespace onnx_import
} // namespace ngraph } // namespace ngraph
...@@ -61,6 +61,15 @@ namespace ngraph ...@@ -61,6 +61,15 @@ namespace ngraph
/// \return `true` if the operator is available, otherwise it returns `false`. /// \return `true` if the operator is available, otherwise it returns `false`.
bool is_operator_available(const onnx::NodeProto& node_proto) const; bool is_operator_available(const onnx::NodeProto& node_proto) const;
/// \brief Enable operators from provided domain to use by this model.
///
/// \note This function makes visible all currently registered in provided domain
/// operators for use in this model.
///
/// \param[in] domain The domain name.
///
void enable_opset_domain(const std::string& domain);
private: private:
const onnx::ModelProto* m_model_proto; const onnx::ModelProto* m_model_proto;
std::unordered_map<std::string, OperatorSet> m_opset; std::unordered_map<std::string, OperatorSet> m_opset;
......
...@@ -181,6 +181,34 @@ namespace ngraph ...@@ -181,6 +181,34 @@ namespace ngraph
throw error::tensor::invalid_data_type{tensor.data_type()}; throw error::tensor::invalid_data_type{tensor.data_type()};
} }
template <>
inline std::vector<int8_t> get_data(const onnx::TensorProto& tensor)
{
if (tensor.has_raw_data())
{
return detail::__get_raw_data<int8_t>(tensor.raw_data());
}
if (tensor.data_type() == onnx::TensorProto_DataType_INT8)
{
return detail::__get_data<int8_t>(tensor.int32_data());
}
throw error::tensor::invalid_data_type{tensor.data_type()};
}
template <>
inline std::vector<int16_t> get_data(const onnx::TensorProto& tensor)
{
if (tensor.has_raw_data())
{
return detail::__get_raw_data<int16_t>(tensor.raw_data());
}
if (tensor.data_type() == onnx::TensorProto_DataType_INT16)
{
return detail::__get_data<int16_t>(tensor.int32_data());
}
throw error::tensor::invalid_data_type{tensor.data_type()};
}
template <> template <>
inline std::vector<int32_t> get_data(const onnx::TensorProto& tensor) inline std::vector<int32_t> get_data(const onnx::TensorProto& tensor)
{ {
...@@ -209,6 +237,48 @@ namespace ngraph ...@@ -209,6 +237,48 @@ namespace ngraph
return detail::__get_data<int64_t>(tensor.int64_data()); return detail::__get_data<int64_t>(tensor.int64_data());
} }
template <>
inline std::vector<uint8_t> get_data(const onnx::TensorProto& tensor)
{
if (tensor.has_raw_data())
{
return detail::__get_raw_data<uint8_t>(tensor.raw_data());
}
if (tensor.data_type() == onnx::TensorProto_DataType_UINT8)
{
return detail::__get_data<uint8_t>(tensor.int32_data());
}
throw error::tensor::invalid_data_type{tensor.data_type()};
}
template <>
inline std::vector<uint16_t> get_data(const onnx::TensorProto& tensor)
{
if (tensor.has_raw_data())
{
return detail::__get_raw_data<uint16_t>(tensor.raw_data());
}
if (tensor.data_type() == onnx::TensorProto_DataType_UINT16)
{
return detail::__get_data<uint16_t>(tensor.int32_data());
}
throw error::tensor::invalid_data_type{tensor.data_type()};
}
template <>
inline std::vector<uint32_t> get_data(const onnx::TensorProto& tensor)
{
if (tensor.has_raw_data())
{
return detail::__get_raw_data<uint32_t>(tensor.raw_data());
}
if (tensor.data_type() == onnx::TensorProto_DataType_UINT32)
{
return detail::__get_data<uint32_t>(tensor.uint64_data());
}
throw error::tensor::invalid_data_type{tensor.data_type()};
}
template <> template <>
inline std::vector<uint64_t> get_data(const onnx::TensorProto& tensor) inline std::vector<uint64_t> get_data(const onnx::TensorProto& tensor)
{ {
......
...@@ -90,7 +90,8 @@ namespace ngraph ...@@ -90,7 +90,8 @@ namespace ngraph
std::set<std::string> get_supported_operators(std::int64_t version, std::set<std::string> get_supported_operators(std::int64_t version,
const std::string& domain) const std::string& domain)
{ {
OperatorSet op_set{OperatorsBridge::get_operator_set(version, domain)}; OperatorSet op_set{
OperatorsBridge::get_operator_set(domain == "ai.onnx" ? "" : domain, version)};
std::set<std::string> op_list{}; std::set<std::string> op_list{};
for (const auto& op : op_set) for (const auto& op : op_set)
{ {
......
...@@ -65,6 +65,20 @@ namespace ngraph ...@@ -65,6 +65,20 @@ namespace ngraph
return __make_ng_constant<double>(element::f64, tensor); return __make_ng_constant<double>(element::f64, tensor);
} }
template <>
inline std::shared_ptr<ngraph::op::Constant>
make_ng_constant<Tensor::Type::int8>(const Tensor& tensor)
{
return __make_ng_constant<int8_t>(element::i8, tensor);
}
template <>
inline std::shared_ptr<ngraph::op::Constant>
make_ng_constant<Tensor::Type::int16>(const Tensor& tensor)
{
return __make_ng_constant<int16_t>(element::i16, tensor);
}
template <> template <>
inline std::shared_ptr<ngraph::op::Constant> inline std::shared_ptr<ngraph::op::Constant>
make_ng_constant<Tensor::Type::int32>(const Tensor& tensor) make_ng_constant<Tensor::Type::int32>(const Tensor& tensor)
...@@ -79,6 +93,20 @@ namespace ngraph ...@@ -79,6 +93,20 @@ namespace ngraph
return __make_ng_constant<int64_t>(element::i64, tensor); return __make_ng_constant<int64_t>(element::i64, tensor);
} }
template <>
inline std::shared_ptr<ngraph::op::Constant>
make_ng_constant<Tensor::Type::uint8>(const Tensor& tensor)
{
return __make_ng_constant<uint8_t>(element::u8, tensor);
}
template <>
inline std::shared_ptr<ngraph::op::Constant>
make_ng_constant<Tensor::Type::uint16>(const Tensor& tensor)
{
return __make_ng_constant<uint16_t>(element::u16, tensor);
}
template <> template <>
inline std::shared_ptr<ngraph::op::Constant> inline std::shared_ptr<ngraph::op::Constant>
make_ng_constant<Tensor::Type::uint32>(const Tensor& tensor) make_ng_constant<Tensor::Type::uint32>(const Tensor& tensor)
...@@ -103,8 +131,12 @@ namespace ngraph ...@@ -103,8 +131,12 @@ namespace ngraph
MAKE_NG_CONSTANT(Tensor::Type::float16); MAKE_NG_CONSTANT(Tensor::Type::float16);
MAKE_NG_CONSTANT(Tensor::Type::float32); MAKE_NG_CONSTANT(Tensor::Type::float32);
MAKE_NG_CONSTANT(Tensor::Type::float64); MAKE_NG_CONSTANT(Tensor::Type::float64);
MAKE_NG_CONSTANT(Tensor::Type::int8);
MAKE_NG_CONSTANT(Tensor::Type::int16);
MAKE_NG_CONSTANT(Tensor::Type::int32); MAKE_NG_CONSTANT(Tensor::Type::int32);
MAKE_NG_CONSTANT(Tensor::Type::int64); MAKE_NG_CONSTANT(Tensor::Type::int64);
MAKE_NG_CONSTANT(Tensor::Type::uint8);
MAKE_NG_CONSTANT(Tensor::Type::uint16);
MAKE_NG_CONSTANT(Tensor::Type::uint32); MAKE_NG_CONSTANT(Tensor::Type::uint32);
MAKE_NG_CONSTANT(Tensor::Type::uint64); MAKE_NG_CONSTANT(Tensor::Type::uint64);
default: throw error::tensor::invalid_data_type{tensor}; default: throw error::tensor::invalid_data_type{tensor};
......
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include <cstdint>
#include <memory>
#include <vector>
#include "exceptions.hpp"
#include "ngraph/node.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/get_output_element.hpp"
#include "ngraph/op/topk.hpp"
#include "ngraph/type/element_type.hpp"
#include "topk.hpp"
namespace ngraph
{
namespace onnx_import
{
namespace op
{
namespace set_1
{
NodeVector topk(const Node& node)
{
auto data = node.get_ng_inputs().at(0);
std::int64_t axis{node.get_attribute_value<std::int64_t>("axis", -1)};
std::int64_t k{node.get_attribute_value<std::int64_t>("k")};
auto num_dimensions = data->get_shape().size();
if (axis < 0)
{
axis += num_dimensions;
}
ASSERT_VALID_ARGUMENT(node, axis < num_dimensions)
<< "`axis` parameter is out of range: " << axis;
std::shared_ptr<ngraph::Node> top_k =
std::make_shared<ngraph::op::TopK>(data, axis, element::i64, k);
std::shared_ptr<ngraph::Node> indices =
std::make_shared<ngraph::op::GetOutputElement>(top_k, 0);
std::shared_ptr<ngraph::Node> values =
std::make_shared<ngraph::op::GetOutputElement>(top_k, 1);
return {values, indices};
}
} // namespace set_1
} //namespace op
} // namespace onnx_import
} // namespace ngraph
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#pragma once
#include "core/node.hpp"
#include "ngraph/node_vector.hpp"
namespace ngraph
{
namespace onnx_import
{
namespace op
{
namespace set_1
{
/// \brief Performs ONNX TopK operation.
///
/// \param node The ONNX node object representing this operation.
/// \return The vector containing Ngraph nodes producing output of ONNX TopK
/// operation(both values and indices).
NodeVector topk(const Node& node);
} // namespace set_1
} //namespace op
} // namespace onnx_import
} // namespace ngraph
...@@ -94,6 +94,7 @@ ...@@ -94,6 +94,7 @@
#include "op/tan.hpp" #include "op/tan.hpp"
#include "op/tanh.hpp" #include "op/tanh.hpp"
#include "op/thresholded_relu.hpp" #include "op/thresholded_relu.hpp"
#include "op/topk.hpp"
#include "op/transpose.hpp" #include "op/transpose.hpp"
#include "op/unsqueeze.hpp" #include "op/unsqueeze.hpp"
#include "op/xor.hpp" #include "op/xor.hpp"
...@@ -109,6 +110,11 @@ namespace ngraph ...@@ -109,6 +110,11 @@ namespace ngraph
find(std::int64_t version, const std::map<std::int64_t, Operator>& map) find(std::int64_t version, const std::map<std::int64_t, Operator>& map)
{ {
std::map<std::int64_t, Operator>::const_iterator it{}; std::map<std::int64_t, Operator>::const_iterator it{};
// Get the latest version.
if (version == -1)
{
return map.empty() ? std::end(map) : --std::end(map);
}
while (version > 0) while (version > 0)
{ {
it = map.find(version--); it = map.find(version--);
...@@ -126,23 +132,29 @@ namespace ngraph ...@@ -126,23 +132,29 @@ namespace ngraph
const std::string& domain, const std::string& domain,
Operator fn) Operator fn)
{ {
m_map[domain][name].emplace(version, std::move(fn)); auto result = m_map[domain][name].emplace(version, std::move(fn));
if (result.second)
{
NGRAPH_WARN << "Overwriting existing operator: "
<< domain + "." + name + ":" + std::to_string(version);
}
} }
OperatorSet OperatorsBridge::_get_operator_set(std::int64_t version, OperatorSet OperatorsBridge::_get_operator_set(const std::string& domain,
const std::string& domain) std::int64_t version)
{ {
OperatorSet result; OperatorSet result;
auto dm = m_map.find(domain); auto dm = m_map.find(domain);
if (dm == std::end(m_map)) if (dm == std::end(m_map))
{ {
throw error::UnknownDomain{domain}; throw error::UnknownDomain{domain};
} }
if (version > OperatorsBridge::LATEST_SUPPORTED_OPSET_VERSION) if (domain == "" && version > OperatorsBridge::LATEST_SUPPORTED_ONNX_OPSET_VERSION)
{ {
NGRAPH_WARN << "Currently operator set version: " << version << " is unsupported." NGRAPH_WARN << "Currently ONNX operator set version: " << version
<< " Falling back to: " << " is unsupported. Falling back to: "
<< OperatorsBridge::LATEST_SUPPORTED_OPSET_VERSION; << OperatorsBridge::LATEST_SUPPORTED_ONNX_OPSET_VERSION;
} }
for (const auto& op : dm->second) for (const auto& op : dm->second)
{ {
...@@ -277,6 +289,7 @@ namespace ngraph ...@@ -277,6 +289,7 @@ namespace ngraph
REGISTER_OPERATOR("Tan", 1, tan); REGISTER_OPERATOR("Tan", 1, tan);
REGISTER_OPERATOR("Tanh", 1, tanh); REGISTER_OPERATOR("Tanh", 1, tanh);
REGISTER_OPERATOR("ThresholdedRelu", 1, thresholded_relu); REGISTER_OPERATOR("ThresholdedRelu", 1, thresholded_relu);
REGISTER_OPERATOR("TopK", 1, topk);
REGISTER_OPERATOR("Transpose", 1, transpose); REGISTER_OPERATOR("Transpose", 1, transpose);
REGISTER_OPERATOR("Unsqueeze", 1, unsqueeze); REGISTER_OPERATOR("Unsqueeze", 1, unsqueeze);
REGISTER_OPERATOR("Xor", 1, logical_xor); REGISTER_OPERATOR("Xor", 1, logical_xor);
......
...@@ -62,16 +62,17 @@ namespace ngraph ...@@ -62,16 +62,17 @@ namespace ngraph
class OperatorsBridge class OperatorsBridge
{ {
public: public:
static constexpr const int LATEST_SUPPORTED_OPSET_VERSION = ONNX_OPSET_VERSION; static constexpr const int LATEST_SUPPORTED_ONNX_OPSET_VERSION = ONNX_OPSET_VERSION;
OperatorsBridge(const OperatorsBridge&) = delete; OperatorsBridge(const OperatorsBridge&) = delete;
OperatorsBridge& operator=(const OperatorsBridge&) = delete; OperatorsBridge& operator=(const OperatorsBridge&) = delete;
OperatorsBridge(OperatorsBridge&&) = delete; OperatorsBridge(OperatorsBridge&&) = delete;
OperatorsBridge& operator=(OperatorsBridge&&) = delete; OperatorsBridge& operator=(OperatorsBridge&&) = delete;
static OperatorSet get_operator_set(std::int64_t version, const std::string& domain) static OperatorSet get_operator_set(const std::string& domain,
std::int64_t version = -1)
{ {
return instance()._get_operator_set(version, domain); return instance()._get_operator_set(domain, version);
} }
static void register_operator(const std::string& name, static void register_operator(const std::string& name,
...@@ -90,6 +91,20 @@ namespace ngraph ...@@ -90,6 +91,20 @@ namespace ngraph
} }
private: private:
// Registered operators structure
// {
// domain_1: {
// op_type_1: {
// version_1: {func_handle},
// version_2: {func_handle},
// ...
// },
// op_type_2: { ... }
// ...
// },
// domain_2: { ... },
// ...
// }
std::unordered_map<std::string, std::unordered_map<std::string,
std::unordered_map<std::string, std::map<std::int64_t, Operator>>> std::unordered_map<std::string, std::map<std::int64_t, Operator>>>
m_map; m_map;
...@@ -106,7 +121,8 @@ namespace ngraph ...@@ -106,7 +121,8 @@ namespace ngraph
std::int64_t version, std::int64_t version,
const std::string& domain, const std::string& domain,
Operator fn); Operator fn);
OperatorSet _get_operator_set(std::int64_t version, const std::string& domain); OperatorSet _get_operator_set(const std::string& domain, std::int64_t version);
bool _is_operator_registered(const std::string& name, bool _is_operator_registered(const std::string& name,
std::int64_t version, std::int64_t version,
const std::string& domain); const std::string& domain);
......
...@@ -191,3 +191,26 @@ void Function::replace_node(std::shared_ptr<Node> old, std::shared_ptr<Node> rep ...@@ -191,3 +191,26 @@ void Function::replace_node(std::shared_ptr<Node> old, std::shared_ptr<Node> rep
{ {
ngraph::replace_node(old, repl); ngraph::replace_node(old, repl);
} }
size_t Function::get_graph_size() const
{
size_t total_size = 0;
for (auto node : get_ops())
{
total_size += sizeof(*node);
if (node->description() == "Constant")
{
const Shape& shape = node->get_outputs()[0].get_shape();
size_t const_size = node->get_outputs()[0].get_element_type().size();
if (shape.size() == 0)
{
total_size += const_size;
}
else
{
total_size += (const_size * shape_size(node->get_outputs()[0].get_shape()));
}
}
}
return total_size;
}
...@@ -85,6 +85,11 @@ namespace ngraph ...@@ -85,6 +85,11 @@ namespace ngraph
void validate_nodes_and_infer_types(); void validate_nodes_and_infer_types();
/// \brief Returns the sum of the size of all nodes in the graph plus the size of
/// all constant data. This has little value beyond comparing the relative size of
/// graphs and should not be considered the actual memory consumption of a graph.
size_t get_graph_size() const;
protected: protected:
ResultVector m_results; ResultVector m_results;
ParameterVector m_parameters; ParameterVector m_parameters;
......
...@@ -491,27 +491,3 @@ void Node::validate_and_infer_elementwise_logical() ...@@ -491,27 +491,3 @@ void Node::validate_and_infer_elementwise_logical()
set_output_type(0, element::boolean, args_pshape); set_output_type(0, element::boolean, args_pshape);
} }
bool Node::validate_punt_if_dynamic()
{
bool any_dynamic = false;
for (auto& input : m_inputs)
{
any_dynamic |= input.get_partial_shape().is_dynamic();
any_dynamic |= input.get_element_type().is_dynamic();
}
if (any_dynamic)
{
for (size_t i = 0; i < get_output_size(); i++)
{
set_output_type(i, element::dynamic, PartialShape::dynamic());
}
return true;
}
else
{
return false;
}
}
...@@ -99,17 +99,6 @@ namespace ngraph ...@@ -99,17 +99,6 @@ namespace ngraph
void validate_and_infer_elementwise_arithmetic(); void validate_and_infer_elementwise_arithmetic();
void validate_and_infer_elementwise_logical(); void validate_and_infer_elementwise_logical();
// Temporary hack while partial shape propagation is being implemented. If any input has
// dynamic shape or dynamic element type, sets all outputs to have a shape of dynamic
// rank and dynamic element type. Ops where we haven't yet implemented partial shape
// propagation can add this boilerplate at the top of their validate_and_infer_types():
//
// if (validate_punt_if_dynamic())
// {
// return;
// }
bool validate_punt_if_dynamic();
Node(const std::string& node_type, const NodeVector& arguments, size_t output_size = 1); Node(const std::string& node_type, const NodeVector& arguments, size_t output_size = 1);
virtual void generate_adjoints(autodiff::Adjoints& adjoints, const NodeVector& deltas) {} virtual void generate_adjoints(autodiff::Adjoints& adjoints, const NodeVector& deltas) {}
......
...@@ -45,6 +45,11 @@ ...@@ -45,6 +45,11 @@
// //
// It's that easy. You can use this for fun and profit. // It's that easy. You can use this for fun and profit.
#ifndef NGRAPH_OP
#error "NGRAPH_OP not defined"
#define NGRAPH_OP(x, y)
#endif
NGRAPH_OP(Abs, ngraph::op) NGRAPH_OP(Abs, ngraph::op)
NGRAPH_OP(Acos, ngraph::op) NGRAPH_OP(Acos, ngraph::op)
NGRAPH_OP(Add, ngraph::op) NGRAPH_OP(Add, ngraph::op)
...@@ -103,6 +108,13 @@ NGRAPH_OP(Parameter, ngraph::op) ...@@ -103,6 +108,13 @@ NGRAPH_OP(Parameter, ngraph::op)
NGRAPH_OP(Power, ngraph::op) NGRAPH_OP(Power, ngraph::op)
NGRAPH_OP(Product, ngraph::op) NGRAPH_OP(Product, ngraph::op)
NGRAPH_OP(Quantize, ngraph::op) NGRAPH_OP(Quantize, ngraph::op)
NGRAPH_OP(QuantizedAvgPool, ngraph::op)
NGRAPH_OP(QuantizedConvolutionBias, ngraph::op)
NGRAPH_OP(QuantizedConvolutionBiasAdd, ngraph::op)
NGRAPH_OP(QuantizedConvolutionBiasSignedAdd, ngraph::op)
NGRAPH_OP(QuantizedConvolutionRelu, ngraph::op)
NGRAPH_OP(QuantizedConvolution, ngraph::op)
NGRAPH_OP(QuantizedMaxPool, ngraph::op)
NGRAPH_OP(Relu, ngraph::op) NGRAPH_OP(Relu, ngraph::op)
NGRAPH_OP(ReluBackprop, ngraph::op) NGRAPH_OP(ReluBackprop, ngraph::op)
NGRAPH_OP(ReplaceSlice, ngraph::op) NGRAPH_OP(ReplaceSlice, ngraph::op)
......
...@@ -529,8 +529,11 @@ bool ngraph::pass::ReshapeSinking::run_on_function(std::shared_ptr<ngraph::Funct ...@@ -529,8 +529,11 @@ bool ngraph::pass::ReshapeSinking::run_on_function(std::shared_ptr<ngraph::Funct
// scenarios and marks some reshapes as too "toxic" to sink // scenarios and marks some reshapes as too "toxic" to sink
// For now, this heuristic works really well. // For now, this heuristic works really well.
// Note, get_users(*true*) which means we only care about // Note, get_users(*true*) which means we only care about
// live users of Reshape // live users of Reshape. However get_users(*true*) cause
if (slice->get_argument(0)->get_users(true).size() == 1) // significant time increase on graphs with many slice ops,
// so for now we are removing "true" check and let backend
// handle reshape sinking for slice operation.
if (slice->get_argument(0)->get_users().size() == 1)
{ {
sink_slice(slice, reorders, reshapes_to_delete); sink_slice(slice, reorders, reshapes_to_delete);
} }
......
...@@ -133,7 +133,7 @@ if (NGRAPH_HALIDE) ...@@ -133,7 +133,7 @@ if (NGRAPH_HALIDE)
) )
endif() endif()
if (NGRAPH_TBB_ENABLE AND NOT WIN32) if (NGRAPH_TBB_ENABLE AND NOT (WIN32 OR APPLE))
include(${TBB_ROOT}/cmake/TBBBuild.cmake) include(${TBB_ROOT}/cmake/TBBBuild.cmake)
tbb_build(TBB_ROOT ${TBB_ROOT} MAKE_ARGS tbb_build_dir=${CMAKE_CURRENT_BINARY_DIR}/tbb_build tbb_build(TBB_ROOT ${TBB_ROOT} MAKE_ARGS tbb_build_dir=${CMAKE_CURRENT_BINARY_DIR}/tbb_build
tbb_build_prefix=tbb CONFIG_DIR TBB_DIR) tbb_build_prefix=tbb CONFIG_DIR TBB_DIR)
......
...@@ -306,15 +306,41 @@ bool runtime::cpu::pass::CPURnnMatFusion::run_on_function(std::shared_ptr<Functi ...@@ -306,15 +306,41 @@ bool runtime::cpu::pass::CPURnnMatFusion::run_on_function(std::shared_ptr<Functi
NodeVector params = p.first; NodeVector params = p.first;
NodeVector& op_nodes = p.second; NodeVector& op_nodes = p.second;
auto data_node = params.at(Type::DATA); // we will sort the captured Add(Dot(X, W) + B) as per the the slice ordering of X
// this will simplify the replace_node logic
auto compare_slices = [&](const std::shared_ptr<Node> node1,
const std::shared_ptr<Node> node2) {
const auto node1_slice =
std::static_pointer_cast<op::Slice>(op_seg_map[node1].at(Type::DATA));
const auto node2_slice =
std::static_pointer_cast<op::Slice>(op_seg_map[node2].at(Type::DATA));
return (node1_slice->get_lower_bounds() < node2_slice->get_lower_bounds() &&
node1_slice->get_upper_bounds() < node2_slice->get_upper_bounds());
};
std::sort(op_nodes.begin(), op_nodes.end(), compare_slices);
// we fuse all the data slices captured in the pattern to make bigger GEMM call
auto fuse_data_slices = [&]() {
NodeVector data_slices;
for (auto& op : op_nodes)
{
auto data_node = op_seg_map.at(op).at(Type::DATA);
data_slices.push_back(data_node);
}
return std::make_shared<op::Concat>(data_slices, 0);
};
auto data_node = op_nodes.size() > 1 ? fuse_data_slices() : params.at(Type::DATA);
auto weights_node = params.at(Type::WEIGHTS); auto weights_node = params.at(Type::WEIGHTS);
auto bias_node = params.at(Type::BIAS); auto bias_node = params.at(Type::BIAS);
auto& data_shape = data_node->get_shape();
const auto& data_shape = data_node->get_shape();
// construct new op nodes // construct new op nodes
auto data_order = ngraph::get_default_order(data_node->get_shape()); auto data_reshape_node =
auto data_reshape_node = std::make_shared<op::Reshape>( std::make_shared<op::Reshape>(data_node,
data_node, data_order, Shape{data_shape[0] * data_shape[1], data_shape[2]}); AxisVector{0, 1, 2},
Shape{data_shape[0] * data_shape[1], data_shape[2]});
auto old_weights_reshape_node = op_seg_map.at(op_nodes.at(0)).at(Type::WEIGHTS); auto old_weights_reshape_node = op_seg_map.at(op_nodes.at(0)).at(Type::WEIGHTS);
auto weights_reshape_node = auto weights_reshape_node =
...@@ -327,30 +353,16 @@ bool runtime::cpu::pass::CPURnnMatFusion::run_on_function(std::shared_ptr<Functi ...@@ -327,30 +353,16 @@ bool runtime::cpu::pass::CPURnnMatFusion::run_on_function(std::shared_ptr<Functi
auto add_node = std::make_shared<op::Add>(dot_node, bias_broadcast_node); auto add_node = std::make_shared<op::Add>(dot_node, bias_broadcast_node);
const auto& add_shape = add_node->get_shape(); const auto& add_shape = add_node->get_shape();
// we will sort the captured Add(Dot(X, W) + B) as per the the slice ordering of X
// this will simplify the replace_node logic
auto compare_slices = [&](const std::shared_ptr<Node> node1,
const std::shared_ptr<Node> node2) {
const auto node1_slice =
std::static_pointer_cast<op::Slice>(op_seg_map[node1].at(Type::DATA));
const auto node2_slice =
std::static_pointer_cast<op::Slice>(op_seg_map[node2].at(Type::DATA));
return (node1_slice->get_lower_bounds() < node2_slice->get_lower_bounds() &&
node1_slice->get_upper_bounds() < node2_slice->get_upper_bounds());
};
std::sort(op_nodes.begin(), op_nodes.end(), compare_slices);
size_t num_timesteps = op_nodes.size(); size_t num_timesteps = op_nodes.size();
size_t batch_size = add_shape[0] / num_timesteps; size_t batch_size = add_shape[0] / num_timesteps;
size_t feature_size = add_shape[1];
// create a slice for each user of the dot op matching the original dot op's output // create a slice for each user of the dot op matching the original dot op's output
for (size_t i = 0, start_index = 0; i < op_nodes.size(); i++, start_index += batch_size) for (size_t i = 0, start_index = 0; i < op_nodes.size(); i++, start_index += batch_size)
{ {
// calculate the lower and upper bounds for the slice of the new fused node // calculate the lower and upper bounds for the slice of the new fused node
// ((<x0 | x1..|xt>*W)+b), which will used to replace the nodes matched in the pattern // ((<x0 | x1..|xt>*W)+b), which will used to replace the nodes matched in the pattern
const Coordinate lower_bounds{start_index, 0}; const Coordinate lower_bounds{start_index, 0};
const Coordinate upper_bounds{start_index + batch_size, add_shape[1]}; const Coordinate upper_bounds{start_index + batch_size, feature_size};
auto slice_node = std::make_shared<op::Slice>(add_node, lower_bounds, upper_bounds); auto slice_node = std::make_shared<op::Slice>(add_node, lower_bounds, upper_bounds);
......
...@@ -58,6 +58,11 @@ ...@@ -58,6 +58,11 @@
#include "ngraph/op/equal.hpp" #include "ngraph/op/equal.hpp"
#include "ngraph/op/exp.hpp" #include "ngraph/op/exp.hpp"
#include "ngraph/op/experimental/generate_mask.hpp" #include "ngraph/op/experimental/generate_mask.hpp"
#include "ngraph/op/experimental/quantized_avg_pool.hpp"
#include "ngraph/op/experimental/quantized_conv.hpp"
#include "ngraph/op/experimental/quantized_conv_bias.hpp"
#include "ngraph/op/experimental/quantized_conv_relu.hpp"
#include "ngraph/op/experimental/quantized_max_pool.hpp"
#include "ngraph/op/experimental/shape_of.hpp" #include "ngraph/op/experimental/shape_of.hpp"
#include "ngraph/op/floor.hpp" #include "ngraph/op/floor.hpp"
#include "ngraph/op/get_output_element.hpp" #include "ngraph/op/get_output_element.hpp"
...@@ -880,6 +885,41 @@ std::string runtime::gpu::GPU_Emitter::emit_Quantize(EMIT_ARGS) ...@@ -880,6 +885,41 @@ std::string runtime::gpu::GPU_Emitter::emit_Quantize(EMIT_ARGS)
throw unsupported_op("Unsupported op '" + node->description() + "'"); throw unsupported_op("Unsupported op '" + node->description() + "'");
} }
std::string runtime::gpu::GPU_Emitter::emit_QuantizedAvgPool(EMIT_ARGS)
{
throw unsupported_op("Unsupported op '" + node->description() + "'");
}
std::string runtime::gpu::GPU_Emitter::emit_QuantizedConvolution(EMIT_ARGS)
{
throw unsupported_op("Unsupported op '" + node->description() + "'");
}
std::string runtime::gpu::GPU_Emitter::emit_QuantizedConvolutionBias(EMIT_ARGS)
{
throw unsupported_op("Unsupported op '" + node->description() + "'");
}
std::string runtime::gpu::GPU_Emitter::emit_QuantizedConvolutionBiasAdd(EMIT_ARGS)
{
throw unsupported_op("Unsupported op '" + node->description() + "'");
}
std::string runtime::gpu::GPU_Emitter::emit_QuantizedConvolutionBiasSignedAdd(EMIT_ARGS)
{
throw unsupported_op("Unsupported op '" + node->description() + "'");
}
std::string runtime::gpu::GPU_Emitter::emit_QuantizedConvolutionRelu(EMIT_ARGS)
{
throw unsupported_op("Unsupported op '" + node->description() + "'");
}
std::string runtime::gpu::GPU_Emitter::emit_QuantizedMaxPool(EMIT_ARGS)
{
throw unsupported_op("Unsupported op '" + node->description() + "'");
}
std::string runtime::gpu::GPU_Emitter::emit_Relu(EMIT_ARGS) std::string runtime::gpu::GPU_Emitter::emit_Relu(EMIT_ARGS)
{ {
return emit_elementwise<ngraph::op::Relu>(compiled_function, function_name, node, args, out); return emit_elementwise<ngraph::op::Relu>(compiled_function, function_name, node, args, out);
......
...@@ -1781,6 +1781,13 @@ runtime::intelgpu::IntelGPUExecutable::IntelGPUExecutable(shared_ptr<Function> f ...@@ -1781,6 +1781,13 @@ runtime::intelgpu::IntelGPUExecutable::IntelGPUExecutable(shared_ptr<Function> f
case OP_TYPEID::BroadcastLike: case OP_TYPEID::BroadcastLike:
case OP_TYPEID::Dequantize: case OP_TYPEID::Dequantize:
case OP_TYPEID::Quantize: case OP_TYPEID::Quantize:
case OP_TYPEID::QuantizedAvgPool:
case OP_TYPEID::QuantizedConvolutionBias:
case OP_TYPEID::QuantizedConvolutionBiasAdd:
case OP_TYPEID::QuantizedConvolutionBiasSignedAdd:
case OP_TYPEID::QuantizedConvolutionRelu:
case OP_TYPEID::QuantizedConvolution:
case OP_TYPEID::QuantizedMaxPool:
case OP_TYPEID::ReplaceSlice: case OP_TYPEID::ReplaceSlice:
case OP_TYPEID::GenerateMask: case OP_TYPEID::GenerateMask:
case OP_TYPEID::ReverseSequence: case OP_TYPEID::ReverseSequence:
......
...@@ -145,8 +145,8 @@ namespace ngraph ...@@ -145,8 +145,8 @@ namespace ngraph
class INTBackend; class INTBackend;
class INTExecutable; class INTExecutable;
} }
} } // namespace runtime
} } // namespace ngraph
class ngraph::runtime::interpreter::INTBackend : public Backend class ngraph::runtime::interpreter::INTBackend : public Backend
{ {
...@@ -1024,6 +1024,17 @@ private: ...@@ -1024,6 +1024,17 @@ private:
break; break;
} }
case OP_TYPEID::QuantizedAvgPool:
case OP_TYPEID::QuantizedConvolutionBias:
case OP_TYPEID::QuantizedConvolutionBiasAdd:
case OP_TYPEID::QuantizedConvolutionBiasSignedAdd:
case OP_TYPEID::QuantizedConvolutionRelu:
case OP_TYPEID::QuantizedConvolution:
case OP_TYPEID::QuantizedMaxPool:
{
throw unsupported_op("Unsupported op '" + node.description() +
"' in Interpreter back end.");
}
case OP_TYPEID::Relu: case OP_TYPEID::Relu:
{ {
size_t element_count = shape_size(node.get_output_shape(0)); size_t element_count = shape_size(node.get_output_shape(0));
......
...@@ -48,6 +48,11 @@ ...@@ -48,6 +48,11 @@
#include "ngraph/op/equal.hpp" #include "ngraph/op/equal.hpp"
#include "ngraph/op/exp.hpp" #include "ngraph/op/exp.hpp"
#include "ngraph/op/experimental/generate_mask.hpp" #include "ngraph/op/experimental/generate_mask.hpp"
#include "ngraph/op/experimental/quantized_avg_pool.hpp"
#include "ngraph/op/experimental/quantized_conv.hpp"
#include "ngraph/op/experimental/quantized_conv_bias.hpp"
#include "ngraph/op/experimental/quantized_conv_relu.hpp"
#include "ngraph/op/experimental/quantized_max_pool.hpp"
#include "ngraph/op/experimental/shape_of.hpp" #include "ngraph/op/experimental/shape_of.hpp"
#include "ngraph/op/floor.hpp" #include "ngraph/op/floor.hpp"
#include "ngraph/op/get_output_element.hpp" #include "ngraph/op/get_output_element.hpp"
...@@ -957,6 +962,66 @@ static shared_ptr<ngraph::Function> ...@@ -957,6 +962,66 @@ static shared_ptr<ngraph::Function>
node = make_shared<op::Quantize>(args[0], args[1], args[2], type, axes, round_mode); node = make_shared<op::Quantize>(args[0], args[1], args[2], type, axes, round_mode);
break; break;
} }
case OP_TYPEID::QuantizedAvgPool:
{
auto window_shape = node_js.at("window_shape").get<vector<size_t>>();
auto window_movement_strides =
node_js.at("window_movement_strides").get<vector<size_t>>();
auto padding_below = node_js.at("padding_below").get<vector<size_t>>();
auto padding_above = node_js.at("padding_above").get<vector<size_t>>();
auto include_padding_in_avg_computation =
node_js.at("include_padding_in_avg_computation").get<bool>();
node = make_shared<op::QuantizedAvgPool>(args[0],
window_shape,
window_movement_strides,
padding_below,
padding_above,
include_padding_in_avg_computation);
break;
}
case OP_TYPEID::QuantizedConvolutionBias: { break;
}
case OP_TYPEID::QuantizedConvolutionBiasAdd: { break;
}
case OP_TYPEID::QuantizedConvolutionBiasSignedAdd: { break;
}
case OP_TYPEID::QuantizedConvolutionRelu: { break;
}
case OP_TYPEID::QuantizedConvolution:
{
auto window_movement_strides =
node_js.at("window_movement_strides").get<vector<size_t>>();
auto window_dilation_strides =
node_js.at("window_dilation_strides").get<vector<size_t>>();
auto padding_below = node_js.at("padding_below").get<vector<std::ptrdiff_t>>();
auto padding_above = node_js.at("padding_above").get<vector<std::ptrdiff_t>>();
auto data_dilation_strides = node_js["data_dilation_strides"];
node =
make_shared<op::Convolution>(args[0],
args[1],
window_movement_strides,
window_dilation_strides,
padding_below,
padding_above,
data_dilation_strides.get<std::vector<size_t>>());
break;
}
case OP_TYPEID::QuantizedMaxPool:
{
auto window_shape = node_js.at("window_shape").get<vector<size_t>>();
auto window_movement_strides =
node_js.at("window_movement_strides").get<vector<size_t>>();
// For backwards compatibility, both (but not just one) of the padding_ fields may be
// omitted.
auto padding_below_maybe = node_js["padding_below"];
auto padding_above_maybe = node_js["padding_above"];
auto padding_below = padding_below_maybe.get<vector<size_t>>();
auto padding_above = padding_above_maybe.get<vector<size_t>>();
node = make_shared<op::QuantizedMaxPool>(
args[0], window_shape, window_movement_strides, padding_below, padding_above);
break;
}
case OP_TYPEID::Relu: case OP_TYPEID::Relu:
{ {
node = make_shared<op::Relu>(args[0]); node = make_shared<op::Relu>(args[0]);
...@@ -1507,6 +1572,43 @@ static json write(const Node& n, bool binary_constant_data) ...@@ -1507,6 +1572,43 @@ static json write(const Node& n, bool binary_constant_data)
node["round_mode"] = tmp->get_round_mode(); node["round_mode"] = tmp->get_round_mode();
break; break;
} }
case OP_TYPEID::QuantizedAvgPool:
{
auto tmp = dynamic_cast<const op::QuantizedAvgPool*>(&n);
node["window_shape"] = tmp->get_window_shape();
node["window_movement_strides"] = tmp->get_window_movement_strides();
node["padding_below"] = tmp->get_padding_below();
node["padding_above"] = tmp->get_padding_above();
node["include_padding_in_avg_computation"] = tmp->get_include_padding_in_avg_computation();
break;
}
case OP_TYPEID::QuantizedConvolutionBias: { break;
}
case OP_TYPEID::QuantizedConvolutionBiasAdd: { break;
}
case OP_TYPEID::QuantizedConvolutionBiasSignedAdd: { break;
}
case OP_TYPEID::QuantizedConvolutionRelu: { break;
}
case OP_TYPEID::QuantizedConvolution:
{
auto tmp = dynamic_cast<const op::QuantizedConvolution*>(&n);
node["window_movement_strides"] = tmp->get_window_movement_strides();
node["window_dilation_strides"] = tmp->get_window_dilation_strides();
node["padding_below"] = tmp->get_padding_below();
node["padding_above"] = tmp->get_padding_above();
node["data_dilation_strides"] = tmp->get_data_dilation_strides();
break;
}
case OP_TYPEID::QuantizedMaxPool:
{
auto tmp = dynamic_cast<const op::QuantizedMaxPool*>(&n);
node["window_shape"] = tmp->get_window_shape();
node["window_movement_strides"] = tmp->get_window_movement_strides();
node["padding_below"] = tmp->get_padding_below();
node["padding_above"] = tmp->get_padding_above();
break;
}
case OP_TYPEID::Relu: { break; case OP_TYPEID::Relu: { break;
} }
case OP_TYPEID::ReluBackprop: { break; case OP_TYPEID::ReluBackprop: { break;
......
...@@ -32,6 +32,7 @@ set(SRC ...@@ -32,6 +32,7 @@ set(SRC
control_dependencies.cpp control_dependencies.cpp
coordinate.cpp coordinate.cpp
copy.cpp copy.cpp
core.cpp
cpio.cpp cpio.cpp
cse.cpp cse.cpp
element_type.cpp element_type.cpp
......
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include "gtest/gtest.h"
#include "ngraph/file_util.hpp"
#include "ngraph/ngraph.hpp"
#include "ngraph/serializer.hpp"
using namespace ngraph;
using namespace std;
TEST(core, function_size)
{
const string m1 = file_util::path_join(SERIALIZED_ZOO, "mxnet/mnist_mlp_forward.json");
const string m2 = file_util::path_join(SERIALIZED_ZOO, "mxnet/10_bucket_LSTM.json");
auto f1 = deserialize(m1);
auto f2 = deserialize(m2);
auto s1 = f1->get_graph_size();
auto s2 = f2->get_graph_size();
EXPECT_GT(s2, s1);
}
...@@ -3450,3 +3450,26 @@ TEST(cpu_fusion, rnn_input_fusion_inter_vs_cpu) ...@@ -3450,3 +3450,26 @@ TEST(cpu_fusion, rnn_input_fusion_inter_vs_cpu)
EXPECT_TRUE(test::all_close(cpu_results.at(i), int_results.at(i), 1.0e-4f, 1.0e-4f)); EXPECT_TRUE(test::all_close(cpu_results.at(i), int_results.at(i), 1.0e-4f, 1.0e-4f));
} }
} }
TEST(cpu_fusion, validate_fuse_gru_inputs)
{
const std::string file_name("mxnet/gru_debug.json");
auto cpu_func = make_function_from_file(file_name);
auto int_func = make_function_from_file(file_name);
test::Uniform<float> rng(-10.0f, 10.0f);
vector<vector<float>> args;
for (shared_ptr<op::Parameter> param : int_func->get_parameters())
{
vector<float> tensor_val(shape_size(param->get_shape()));
rng.initialize(tensor_val);
args.push_back(tensor_val);
}
auto int_results = execute(int_func, args, "INTERPRETER");
auto cpu_results = execute(cpu_func, args, "CPU");
for (size_t i = 0; i < cpu_results.size(); i++)
{
EXPECT_TRUE(test::all_close(cpu_results.at(i), int_results.at(i), 1.0e-4f, 1.0e-4f));
}
}
[{"name":"Function_12","ops":[{"cacheable":true,"control_deps":[],"element_type":"float","inputs":[],"name":"Parameter_1114","op":"Parameter","outputs":["Parameter_1114_0"],"shape":[30]},{"cacheable":true,"control_deps":[],"element_type":"float","inputs":[],"name":"Parameter_1113","op":"Parameter","outputs":["Parameter_1113_0"],"shape":[30,10]},{"cacheable":true,"control_deps":[],"element_type":"float","inputs":[],"name":"Parameter_1105","op":"Parameter","outputs":["Parameter_1105_0"],"shape":[30]},{"cacheable":true,"control_deps":[],"element_type":"float","inputs":[],"name":"Parameter_1104","op":"Parameter","outputs":["Parameter_1104_0"],"shape":[30,20]},{"cacheable":true,"control_deps":[],"element_type":"float","inputs":[],"name":"Parameter_993","op":"Parameter","outputs":["Parameter_993_0"],"shape":[30]},{"cacheable":true,"control_deps":[],"element_type":"float","inputs":[],"name":"Parameter_992","op":"Parameter","outputs":["Parameter_992_0"],"shape":[30,10]},{"cacheable":true,"control_deps":[],"element_type":"float","inputs":[],"name":"Parameter_984","op":"Parameter","outputs":["Parameter_984_0"],"shape":[30]},{"cacheable":true,"control_deps":[],"element_type":"float","inputs":[],"name":"Parameter_983","op":"Parameter","outputs":["Parameter_983_0"],"shape":[30,20]},{"cacheable":true,"control_deps":[],"element_type":"float","inputs":[],"name":"Parameter_917","op":"Parameter","outputs":["Parameter_917_0"],"shape":[30]},{"cacheable":true,"control_deps":[],"element_type":"float","inputs":[],"name":"Parameter_916","op":"Parameter","outputs":["Parameter_916_0"],"shape":[30,10]},{"cacheable":true,"control_deps":[],"element_type":"float","inputs":[],"name":"Parameter_901","op":"Parameter","outputs":["Parameter_901_0"],"shape":[30]},{"cacheable":true,"control_deps":[],"element_type":"float","inputs":[],"name":"Parameter_900","op":"Parameter","outputs":["Parameter_900_0"],"shape":[30,10]},{"cacheable":true,"control_deps":[],"element_type":"float","inputs":[],"name":"Parameter_867","op":"Parameter","outputs":["Parameter_867_0"],"shape":[30]},{"cacheable":true,"control_deps":[],"element_type":"float","inputs":[],"name":"Parameter_866","op":"Parameter","outputs":["Parameter_866_0"],"shape":[30,10]},{"cacheable":true,"control_deps":[],"element_type":"float","inputs":[],"name":"Parameter_858","op":"Parameter","outputs":["Parameter_858_0"],"shape":[30]},{"cacheable":true,"control_deps":[],"element_type":"float","inputs":[],"name":"Parameter_857","op":"Parameter","outputs":["Parameter_857_0"],"shape":[30,10]},{"cacheable":false,"control_deps":[],"element_type":"float","inputs":[],"name":"Parameter_854","op":"Parameter","outputs":["Parameter_854_0"],"shape":[2,2,10]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_1111","op":"Constant","outputs":["Constant_1111_0"],"shape":[],"value":["0"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_914","op":"Constant","outputs":["Constant_914_0"],"shape":[],"value":["0"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_924","op":"Constant","outputs":["Constant_924_0"],"shape":[],"value":["1"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_937","op":"Constant","outputs":["Constant_937_0"],"shape":[],"value":["1"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_930","op":"Constant","outputs":["Constant_930_0"],"shape":[],"value":["1"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_864","op":"Constant","outputs":["Constant_864_0"],"shape":[],"value":["0"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_874","op":"Constant","outputs":["Constant_874_0"],"shape":[],"value":["1"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_887","op":"Constant","outputs":["Constant_887_0"],"shape":[],"value":["1"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_880","op":"Constant","outputs":["Constant_880_0"],"shape":[],"value":["1"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_1038","op":"Constant","outputs":["Constant_1038_0"],"shape":[],"value":["1"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_1051","op":"Constant","outputs":["Constant_1051_0"],"shape":[],"value":["1"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_1044","op":"Constant","outputs":["Constant_1044_0"],"shape":[],"value":["1"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_1121","op":"Constant","outputs":["Constant_1121_0"],"shape":[],"value":["1"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_1134","op":"Constant","outputs":["Constant_1134_0"],"shape":[],"value":["1"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_1127","op":"Constant","outputs":["Constant_1127_0"],"shape":[],"value":["1"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_956","op":"Constant","outputs":["Constant_956_0"],"shape":[],"value":["1"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_969","op":"Constant","outputs":["Constant_969_0"],"shape":[],"value":["1"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_962","op":"Constant","outputs":["Constant_962_0"],"shape":[],"value":["1"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_1159","op":"Constant","outputs":["Constant_1159_0"],"shape":[],"value":["1"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_1172","op":"Constant","outputs":["Constant_1172_0"],"shape":[],"value":["1"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_1165","op":"Constant","outputs":["Constant_1165_0"],"shape":[],"value":["1"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_990","op":"Constant","outputs":["Constant_990_0"],"shape":[],"value":["0"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_1000","op":"Constant","outputs":["Constant_1000_0"],"shape":[],"value":["1"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_1013","op":"Constant","outputs":["Constant_1013_0"],"shape":[],"value":["1"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_1006","op":"Constant","outputs":["Constant_1006_0"],"shape":[],"value":["1"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_1076","op":"Constant","outputs":["Constant_1076_0"],"shape":[],"value":["1"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_1089","op":"Constant","outputs":["Constant_1089_0"],"shape":[],"value":["1"]},{"control_deps":[],"element_type":"float","inputs":[],"name":"Constant_1082","op":"Constant","outputs":["Constant_1082_0"],"shape":[],"value":["1"]},{"axes":[0],"control_deps":[],"inputs":["Parameter_1114"],"name":"Broadcast_1117","op":"Broadcast","outputs":["Broadcast_1117_0"],"shape":[2,30]},{"axes":[0],"control_deps":[],"inputs":["Parameter_1114"],"name":"Broadcast_1155","op":"Broadcast","outputs":["Broadcast_1155_0"],"shape":[2,30]},{"control_deps":[],"input_order":[1,0],"inputs":["Parameter_1113"],"name":"Reshape_1115","op":"Reshape","output_shape":[10,30],"outputs":["Reshape_1115_0"]},{"control_deps":[],"input_order":[1,0],"inputs":["Parameter_1113"],"name":"Reshape_1153","op":"Reshape","output_shape":[10,30],"outputs":["Reshape_1153_0"]},{"axes":[0],"control_deps":[],"inputs":["Parameter_1105"],"name":"Broadcast_1108","op":"Broadcast","outputs":["Broadcast_1108_0"],"shape":[2,30]},{"axes":[0],"control_deps":[],"inputs":["Parameter_1105"],"name":"Broadcast_1150","op":"Broadcast","outputs":["Broadcast_1150_0"],"shape":[2,30]},{"control_deps":[],"input_order":[1,0],"inputs":["Parameter_1104"],"name":"Reshape_1106","op":"Reshape","output_shape":[20,30],"outputs":["Reshape_1106_0"]},{"control_deps":[],"input_order":[1,0],"inputs":["Parameter_1104"],"name":"Reshape_1148","op":"Reshape","output_shape":[20,30],"outputs":["Reshape_1148_0"]},{"axes":[0],"control_deps":[],"inputs":["Parameter_993"],"name":"Broadcast_996","op":"Broadcast","outputs":["Broadcast_996_0"],"shape":[2,30]},{"axes":[0],"control_deps":[],"inputs":["Parameter_993"],"name":"Broadcast_1072","op":"Broadcast","outputs":["Broadcast_1072_0"],"shape":[2,30]},{"control_deps":[],"input_order":[1,0],"inputs":["Parameter_992"],"name":"Reshape_994","op":"Reshape","output_shape":[10,30],"outputs":["Reshape_994_0"]},{"control_deps":[],"input_order":[1,0],"inputs":["Parameter_992"],"name":"Reshape_1070","op":"Reshape","output_shape":[10,30],"outputs":["Reshape_1070_0"]},{"axes":[0],"control_deps":[],"inputs":["Parameter_984"],"name":"Broadcast_987","op":"Broadcast","outputs":["Broadcast_987_0"],"shape":[2,30]},{"axes":[0],"control_deps":[],"inputs":["Parameter_984"],"name":"Broadcast_1067","op":"Broadcast","outputs":["Broadcast_1067_0"],"shape":[2,30]},{"control_deps":[],"input_order":[1,0],"inputs":["Parameter_983"],"name":"Reshape_985","op":"Reshape","output_shape":[20,30],"outputs":["Reshape_985_0"]},{"control_deps":[],"input_order":[1,0],"inputs":["Parameter_983"],"name":"Reshape_1065","op":"Reshape","output_shape":[20,30],"outputs":["Reshape_1065_0"]},{"axes":[0],"control_deps":[],"inputs":["Parameter_917"],"name":"Broadcast_920","op":"Broadcast","outputs":["Broadcast_920_0"],"shape":[2,30]},{"axes":[0],"control_deps":[],"inputs":["Parameter_917"],"name":"Broadcast_952","op":"Broadcast","outputs":["Broadcast_952_0"],"shape":[2,30]},{"control_deps":[],"input_order":[1,0],"inputs":["Parameter_916"],"name":"Reshape_918","op":"Reshape","output_shape":[10,30],"outputs":["Reshape_918_0"]},{"control_deps":[],"input_order":[1,0],"inputs":["Parameter_916"],"name":"Reshape_950","op":"Reshape","output_shape":[10,30],"outputs":["Reshape_950_0"]},{"axes":[0],"control_deps":[],"inputs":["Parameter_901"],"name":"Broadcast_904","op":"Broadcast","outputs":["Broadcast_904_0"],"shape":[2,30]},{"axes":[0],"control_deps":[],"inputs":["Parameter_901"],"name":"Broadcast_911","op":"Broadcast","outputs":["Broadcast_911_0"],"shape":[2,30]},{"control_deps":[],"input_order":[1,0],"inputs":["Parameter_900"],"name":"Reshape_902","op":"Reshape","output_shape":[10,30],"outputs":["Reshape_902_0"]},{"control_deps":[],"input_order":[1,0],"inputs":["Parameter_900"],"name":"Reshape_909","op":"Reshape","output_shape":[10,30],"outputs":["Reshape_909_0"]},{"axes":[0],"control_deps":[],"inputs":["Parameter_867"],"name":"Broadcast_870","op":"Broadcast","outputs":["Broadcast_870_0"],"shape":[2,30]},{"axes":[0],"control_deps":[],"inputs":["Parameter_867"],"name":"Broadcast_1034","op":"Broadcast","outputs":["Broadcast_1034_0"],"shape":[2,30]},{"control_deps":[],"input_order":[1,0],"inputs":["Parameter_866"],"name":"Reshape_868","op":"Reshape","output_shape":[10,30],"outputs":["Reshape_868_0"]},{"control_deps":[],"input_order":[1,0],"inputs":["Parameter_866"],"name":"Reshape_1032","op":"Reshape","output_shape":[10,30],"outputs":["Reshape_1032_0"]},{"axes":[0],"control_deps":[],"inputs":["Parameter_858"],"name":"Broadcast_861","op":"Broadcast","outputs":["Broadcast_861_0"],"shape":[2,30]},{"axes":[0],"control_deps":[],"inputs":["Parameter_858"],"name":"Broadcast_1029","op":"Broadcast","outputs":["Broadcast_1029_0"],"shape":[2,30]},{"control_deps":[],"input_order":[1,0],"inputs":["Parameter_857"],"name":"Reshape_859","op":"Reshape","output_shape":[10,30],"outputs":["Reshape_859_0"]},{"control_deps":[],"input_order":[1,0],"inputs":["Parameter_857"],"name":"Reshape_1027","op":"Reshape","output_shape":[10,30],"outputs":["Reshape_1027_0"]},{"control_deps":[],"inputs":["Parameter_854"],"lower_bounds":[0,0,0],"name":"Slice_855","op":"Slice","outputs":["Slice_855_0"],"strides":[1,1,1],"upper_bounds":[2,1,10]},{"control_deps":[],"inputs":["Parameter_854"],"lower_bounds":[0,1,0],"name":"Slice_907","op":"Slice","outputs":["Slice_907_0"],"strides":[1,1,1],"upper_bounds":[2,2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_1111"],"name":"Broadcast_1112","op":"Broadcast","outputs":["Broadcast_1112_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_914"],"name":"Broadcast_915","op":"Broadcast","outputs":["Broadcast_915_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_924"],"name":"Broadcast_925","op":"Broadcast","outputs":["Broadcast_925_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_937"],"name":"Broadcast_938","op":"Broadcast","outputs":["Broadcast_938_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_930"],"name":"Broadcast_931","op":"Broadcast","outputs":["Broadcast_931_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_864"],"name":"Broadcast_865","op":"Broadcast","outputs":["Broadcast_865_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_874"],"name":"Broadcast_875","op":"Broadcast","outputs":["Broadcast_875_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_887"],"name":"Broadcast_888","op":"Broadcast","outputs":["Broadcast_888_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_880"],"name":"Broadcast_881","op":"Broadcast","outputs":["Broadcast_881_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_1038"],"name":"Broadcast_1039","op":"Broadcast","outputs":["Broadcast_1039_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_1051"],"name":"Broadcast_1052","op":"Broadcast","outputs":["Broadcast_1052_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_1044"],"name":"Broadcast_1045","op":"Broadcast","outputs":["Broadcast_1045_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_1121"],"name":"Broadcast_1122","op":"Broadcast","outputs":["Broadcast_1122_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_1134"],"name":"Broadcast_1135","op":"Broadcast","outputs":["Broadcast_1135_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_1127"],"name":"Broadcast_1128","op":"Broadcast","outputs":["Broadcast_1128_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_956"],"name":"Broadcast_957","op":"Broadcast","outputs":["Broadcast_957_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_969"],"name":"Broadcast_970","op":"Broadcast","outputs":["Broadcast_970_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_962"],"name":"Broadcast_963","op":"Broadcast","outputs":["Broadcast_963_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_1159"],"name":"Broadcast_1160","op":"Broadcast","outputs":["Broadcast_1160_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_1172"],"name":"Broadcast_1173","op":"Broadcast","outputs":["Broadcast_1173_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_1165"],"name":"Broadcast_1166","op":"Broadcast","outputs":["Broadcast_1166_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_990"],"name":"Broadcast_991","op":"Broadcast","outputs":["Broadcast_991_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_1000"],"name":"Broadcast_1001","op":"Broadcast","outputs":["Broadcast_1001_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_1013"],"name":"Broadcast_1014","op":"Broadcast","outputs":["Broadcast_1014_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_1006"],"name":"Broadcast_1007","op":"Broadcast","outputs":["Broadcast_1007_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_1076"],"name":"Broadcast_1077","op":"Broadcast","outputs":["Broadcast_1077_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_1089"],"name":"Broadcast_1090","op":"Broadcast","outputs":["Broadcast_1090_0"],"shape":[2,10]},{"axes":[0,1],"control_deps":[],"inputs":["Constant_1082"],"name":"Broadcast_1083","op":"Broadcast","outputs":["Broadcast_1083_0"],"shape":[2,10]},{"control_deps":[],"input_order":[0,1,2],"inputs":["Slice_855"],"name":"Reshape_856","op":"Reshape","output_shape":[2,10],"outputs":["Reshape_856_0"]},{"control_deps":[],"input_order":[0,1,2],"inputs":["Slice_907"],"name":"Reshape_908","op":"Reshape","output_shape":[2,10],"outputs":["Reshape_908_0"]},{"control_deps":[],"inputs":["Broadcast_1112","Reshape_1115"],"name":"Dot_1116","op":"Dot","outputs":["Dot_1116_0"],"reduction_axes_count":1},{"control_deps":[],"inputs":["Broadcast_915","Reshape_918"],"name":"Dot_919","op":"Dot","outputs":["Dot_919_0"],"reduction_axes_count":1},{"control_deps":[],"inputs":["Broadcast_865","Reshape_868"],"name":"Dot_869","op":"Dot","outputs":["Dot_869_0"],"reduction_axes_count":1},{"control_deps":[],"inputs":["Broadcast_991","Reshape_994"],"name":"Dot_995","op":"Dot","outputs":["Dot_995_0"],"reduction_axes_count":1},{"control_deps":[],"inputs":["Reshape_856","Reshape_859"],"name":"Dot_860","op":"Dot","outputs":["Dot_860_0"],"reduction_axes_count":1},{"control_deps":[],"inputs":["Reshape_856","Reshape_902"],"name":"Dot_903","op":"Dot","outputs":["Dot_903_0"],"reduction_axes_count":1},{"control_deps":[],"inputs":["Reshape_908","Reshape_909"],"name":"Dot_910","op":"Dot","outputs":["Dot_910_0"],"reduction_axes_count":1},{"control_deps":[],"inputs":["Reshape_908","Reshape_1027"],"name":"Dot_1028","op":"Dot","outputs":["Dot_1028_0"],"reduction_axes_count":1},{"control_deps":[],"inputs":["Dot_1116","Broadcast_1117"],"name":"Add_1118","op":"Add","outputs":["Add_1118_0"]},{"control_deps":[],"inputs":["Dot_919","Broadcast_920"],"name":"Add_921","op":"Add","outputs":["Add_921_0"]},{"control_deps":[],"inputs":["Dot_869","Broadcast_870"],"name":"Add_871","op":"Add","outputs":["Add_871_0"]},{"control_deps":[],"inputs":["Dot_995","Broadcast_996"],"name":"Add_997","op":"Add","outputs":["Add_997_0"]},{"control_deps":[],"inputs":["Dot_860","Broadcast_861"],"name":"Add_862","op":"Add","outputs":["Add_862_0"]},{"control_deps":[],"inputs":["Dot_903","Broadcast_904"],"name":"Add_905","op":"Add","outputs":["Add_905_0"]},{"control_deps":[],"inputs":["Dot_910","Broadcast_911"],"name":"Add_912","op":"Add","outputs":["Add_912_0"]},{"control_deps":[],"inputs":["Dot_1028","Broadcast_1029"],"name":"Add_1030","op":"Add","outputs":["Add_1030_0"]},{"control_deps":[],"inputs":["Add_1118"],"lower_bounds":[0,10],"name":"Slice_1119","op":"Slice","outputs":["Slice_1119_0"],"strides":[1,1],"upper_bounds":[2,20]},{"control_deps":[],"inputs":["Add_1118"],"lower_bounds":[0,0],"name":"Slice_1132","op":"Slice","outputs":["Slice_1132_0"],"strides":[1,1],"upper_bounds":[2,10]},{"control_deps":[],"inputs":["Add_1118"],"lower_bounds":[0,20],"name":"Slice_1140","op":"Slice","outputs":["Slice_1140_0"],"strides":[1,1],"upper_bounds":[2,30]},{"control_deps":[],"inputs":["Add_921"],"lower_bounds":[0,10],"name":"Slice_922","op":"Slice","outputs":["Slice_922_0"],"strides":[1,1],"upper_bounds":[2,20]},{"control_deps":[],"inputs":["Add_921"],"lower_bounds":[0,0],"name":"Slice_935","op":"Slice","outputs":["Slice_935_0"],"strides":[1,1],"upper_bounds":[2,10]},{"control_deps":[],"inputs":["Add_921"],"lower_bounds":[0,20],"name":"Slice_943","op":"Slice","outputs":["Slice_943_0"],"strides":[1,1],"upper_bounds":[2,30]},{"control_deps":[],"inputs":["Add_871"],"lower_bounds":[0,10],"name":"Slice_872","op":"Slice","outputs":["Slice_872_0"],"strides":[1,1],"upper_bounds":[2,20]},{"control_deps":[],"inputs":["Add_871"],"lower_bounds":[0,0],"name":"Slice_885","op":"Slice","outputs":["Slice_885_0"],"strides":[1,1],"upper_bounds":[2,10]},{"control_deps":[],"inputs":["Add_871"],"lower_bounds":[0,20],"name":"Slice_893","op":"Slice","outputs":["Slice_893_0"],"strides":[1,1],"upper_bounds":[2,30]},{"control_deps":[],"inputs":["Add_997"],"lower_bounds":[0,10],"name":"Slice_998","op":"Slice","outputs":["Slice_998_0"],"strides":[1,1],"upper_bounds":[2,20]},{"control_deps":[],"inputs":["Add_997"],"lower_bounds":[0,0],"name":"Slice_1011","op":"Slice","outputs":["Slice_1011_0"],"strides":[1,1],"upper_bounds":[2,10]},{"control_deps":[],"inputs":["Add_997"],"lower_bounds":[0,20],"name":"Slice_1019","op":"Slice","outputs":["Slice_1019_0"],"strides":[1,1],"upper_bounds":[2,30]},{"control_deps":[],"inputs":["Add_862"],"lower_bounds":[0,10],"name":"Slice_863","op":"Slice","outputs":["Slice_863_0"],"strides":[1,1],"upper_bounds":[2,20]},{"control_deps":[],"inputs":["Add_862"],"lower_bounds":[0,20],"name":"Slice_883","op":"Slice","outputs":["Slice_883_0"],"strides":[1,1],"upper_bounds":[2,30]},{"control_deps":[],"inputs":["Add_862"],"lower_bounds":[0,0],"name":"Slice_884","op":"Slice","outputs":["Slice_884_0"],"strides":[1,1],"upper_bounds":[2,10]},{"control_deps":[],"inputs":["Add_905"],"lower_bounds":[0,10],"name":"Slice_906","op":"Slice","outputs":["Slice_906_0"],"strides":[1,1],"upper_bounds":[2,20]},{"control_deps":[],"inputs":["Add_905"],"lower_bounds":[0,20],"name":"Slice_965","op":"Slice","outputs":["Slice_965_0"],"strides":[1,1],"upper_bounds":[2,30]},{"control_deps":[],"inputs":["Add_905"],"lower_bounds":[0,0],"name":"Slice_966","op":"Slice","outputs":["Slice_966_0"],"strides":[1,1],"upper_bounds":[2,10]},{"control_deps":[],"inputs":["Add_912"],"lower_bounds":[0,10],"name":"Slice_913","op":"Slice","outputs":["Slice_913_0"],"strides":[1,1],"upper_bounds":[2,20]},{"control_deps":[],"inputs":["Add_912"],"lower_bounds":[0,20],"name":"Slice_933","op":"Slice","outputs":["Slice_933_0"],"strides":[1,1],"upper_bounds":[2,30]},{"control_deps":[],"inputs":["Add_912"],"lower_bounds":[0,0],"name":"Slice_934","op":"Slice","outputs":["Slice_934_0"],"strides":[1,1],"upper_bounds":[2,10]},{"control_deps":[],"inputs":["Add_1030"],"lower_bounds":[0,10],"name":"Slice_1031","op":"Slice","outputs":["Slice_1031_0"],"strides":[1,1],"upper_bounds":[2,20]},{"control_deps":[],"inputs":["Add_1030"],"lower_bounds":[0,20],"name":"Slice_1047","op":"Slice","outputs":["Slice_1047_0"],"strides":[1,1],"upper_bounds":[2,30]},{"control_deps":[],"inputs":["Add_1030"],"lower_bounds":[0,0],"name":"Slice_1048","op":"Slice","outputs":["Slice_1048_0"],"strides":[1,1],"upper_bounds":[2,10]},{"control_deps":[],"inputs":["Slice_863","Slice_872"],"name":"Add_873","op":"Add","outputs":["Add_873_0"]},{"control_deps":[],"inputs":["Slice_884","Slice_885"],"name":"Add_886","op":"Add","outputs":["Add_886_0"]},{"control_deps":[],"inputs":["Slice_913","Slice_922"],"name":"Add_923","op":"Add","outputs":["Add_923_0"]},{"control_deps":[],"inputs":["Slice_934","Slice_935"],"name":"Add_936","op":"Add","outputs":["Add_936_0"]},{"control_deps":[],"inputs":["Add_873"],"name":"Negative_876","op":"Negative","outputs":["Negative_876_0"]},{"control_deps":[],"inputs":["Add_886"],"name":"Negative_889","op":"Negative","outputs":["Negative_889_0"]},{"control_deps":[],"inputs":["Add_923"],"name":"Negative_926","op":"Negative","outputs":["Negative_926_0"]},{"control_deps":[],"inputs":["Add_936"],"name":"Negative_939","op":"Negative","outputs":["Negative_939_0"]},{"control_deps":[],"inputs":["Negative_876"],"name":"Exp_877","op":"Exp","outputs":["Exp_877_0"]},{"control_deps":[],"inputs":["Negative_889"],"name":"Exp_890","op":"Exp","outputs":["Exp_890_0"]},{"control_deps":[],"inputs":["Negative_926"],"name":"Exp_927","op":"Exp","outputs":["Exp_927_0"]},{"control_deps":[],"inputs":["Negative_939"],"name":"Exp_940","op":"Exp","outputs":["Exp_940_0"]},{"control_deps":[],"inputs":["Broadcast_875","Exp_877"],"name":"Add_878","op":"Add","outputs":["Add_878_0"]},{"control_deps":[],"inputs":["Broadcast_888","Exp_890"],"name":"Add_891","op":"Add","outputs":["Add_891_0"]},{"control_deps":[],"inputs":["Broadcast_925","Exp_927"],"name":"Add_928","op":"Add","outputs":["Add_928_0"]},{"control_deps":[],"inputs":["Broadcast_938","Exp_940"],"name":"Add_941","op":"Add","outputs":["Add_941_0"]},{"control_deps":[],"inputs":["Broadcast_875","Add_878"],"name":"Divide_879","op":"Divide","outputs":["Divide_879_0"]},{"control_deps":[],"inputs":["Broadcast_888","Add_891"],"name":"Divide_892","op":"Divide","outputs":["Divide_892_0"]},{"control_deps":[],"inputs":["Broadcast_925","Add_928"],"name":"Divide_929","op":"Divide","outputs":["Divide_929_0"]},{"control_deps":[],"inputs":["Broadcast_938","Add_941"],"name":"Divide_942","op":"Divide","outputs":["Divide_942_0"]},{"control_deps":[],"inputs":["Broadcast_881","Divide_879"],"name":"Subtract_882","op":"Subtract","outputs":["Subtract_882_0"]},{"control_deps":[],"inputs":["Divide_879","Broadcast_865"],"name":"Multiply_898","op":"Multiply","outputs":["Multiply_898_0"]},{"control_deps":[],"inputs":["Divide_892","Slice_893"],"name":"Multiply_894","op":"Multiply","outputs":["Multiply_894_0"]},{"control_deps":[],"inputs":["Broadcast_931","Divide_929"],"name":"Subtract_932","op":"Subtract","outputs":["Subtract_932_0"]},{"control_deps":[],"inputs":["Divide_929","Broadcast_915"],"name":"Multiply_948","op":"Multiply","outputs":["Multiply_948_0"]},{"control_deps":[],"inputs":["Divide_942","Slice_943"],"name":"Multiply_944","op":"Multiply","outputs":["Multiply_944_0"]},{"control_deps":[],"inputs":["Slice_883","Multiply_894"],"name":"Add_895","op":"Add","outputs":["Add_895_0"]},{"control_deps":[],"inputs":["Slice_933","Multiply_944"],"name":"Add_945","op":"Add","outputs":["Add_945_0"]},{"control_deps":[],"inputs":["Add_895"],"name":"Tanh_896","op":"Tanh","outputs":["Tanh_896_0"]},{"control_deps":[],"inputs":["Add_945"],"name":"Tanh_946","op":"Tanh","outputs":["Tanh_946_0"]},{"control_deps":[],"inputs":["Subtract_882","Tanh_896"],"name":"Multiply_897","op":"Multiply","outputs":["Multiply_897_0"]},{"control_deps":[],"inputs":["Subtract_932","Tanh_946"],"name":"Multiply_947","op":"Multiply","outputs":["Multiply_947_0"]},{"control_deps":[],"inputs":["Multiply_897","Multiply_898"],"name":"Add_899","op":"Add","outputs":["Add_899_0"]},{"control_deps":[],"inputs":["Multiply_947","Multiply_948"],"name":"Add_949","op":"Add","outputs":["Add_949_0"]},{"control_deps":[],"inputs":["Add_899","Reshape_1032"],"name":"Dot_1033","op":"Dot","outputs":["Dot_1033_0"],"reduction_axes_count":1},{"control_deps":[],"inputs":["Add_949","Reshape_950"],"name":"Dot_951","op":"Dot","outputs":["Dot_951_0"],"reduction_axes_count":1},{"control_deps":[],"inputs":["Dot_1033","Broadcast_1034"],"name":"Add_1035","op":"Add","outputs":["Add_1035_0"]},{"control_deps":[],"inputs":["Dot_951","Broadcast_952"],"name":"Add_953","op":"Add","outputs":["Add_953_0"]},{"control_deps":[],"inputs":["Add_1035"],"lower_bounds":[0,10],"name":"Slice_1036","op":"Slice","outputs":["Slice_1036_0"],"strides":[1,1],"upper_bounds":[2,20]},{"control_deps":[],"inputs":["Add_1035"],"lower_bounds":[0,0],"name":"Slice_1049","op":"Slice","outputs":["Slice_1049_0"],"strides":[1,1],"upper_bounds":[2,10]},{"control_deps":[],"inputs":["Add_1035"],"lower_bounds":[0,20],"name":"Slice_1057","op":"Slice","outputs":["Slice_1057_0"],"strides":[1,1],"upper_bounds":[2,30]},{"control_deps":[],"inputs":["Add_953"],"lower_bounds":[0,10],"name":"Slice_954","op":"Slice","outputs":["Slice_954_0"],"strides":[1,1],"upper_bounds":[2,20]},{"control_deps":[],"inputs":["Add_953"],"lower_bounds":[0,0],"name":"Slice_967","op":"Slice","outputs":["Slice_967_0"],"strides":[1,1],"upper_bounds":[2,10]},{"control_deps":[],"inputs":["Add_953"],"lower_bounds":[0,20],"name":"Slice_975","op":"Slice","outputs":["Slice_975_0"],"strides":[1,1],"upper_bounds":[2,30]},{"control_deps":[],"inputs":["Slice_1031","Slice_1036"],"name":"Add_1037","op":"Add","outputs":["Add_1037_0"]},{"control_deps":[],"inputs":["Slice_1048","Slice_1049"],"name":"Add_1050","op":"Add","outputs":["Add_1050_0"]},{"control_deps":[],"inputs":["Slice_906","Slice_954"],"name":"Add_955","op":"Add","outputs":["Add_955_0"]},{"control_deps":[],"inputs":["Slice_966","Slice_967"],"name":"Add_968","op":"Add","outputs":["Add_968_0"]},{"control_deps":[],"inputs":["Add_1037"],"name":"Negative_1040","op":"Negative","outputs":["Negative_1040_0"]},{"control_deps":[],"inputs":["Add_1050"],"name":"Negative_1053","op":"Negative","outputs":["Negative_1053_0"]},{"control_deps":[],"inputs":["Add_955"],"name":"Negative_958","op":"Negative","outputs":["Negative_958_0"]},{"control_deps":[],"inputs":["Add_968"],"name":"Negative_971","op":"Negative","outputs":["Negative_971_0"]},{"control_deps":[],"inputs":["Negative_1040"],"name":"Exp_1041","op":"Exp","outputs":["Exp_1041_0"]},{"control_deps":[],"inputs":["Negative_1053"],"name":"Exp_1054","op":"Exp","outputs":["Exp_1054_0"]},{"control_deps":[],"inputs":["Negative_958"],"name":"Exp_959","op":"Exp","outputs":["Exp_959_0"]},{"control_deps":[],"inputs":["Negative_971"],"name":"Exp_972","op":"Exp","outputs":["Exp_972_0"]},{"control_deps":[],"inputs":["Broadcast_1039","Exp_1041"],"name":"Add_1042","op":"Add","outputs":["Add_1042_0"]},{"control_deps":[],"inputs":["Broadcast_1052","Exp_1054"],"name":"Add_1055","op":"Add","outputs":["Add_1055_0"]},{"control_deps":[],"inputs":["Broadcast_957","Exp_959"],"name":"Add_960","op":"Add","outputs":["Add_960_0"]},{"control_deps":[],"inputs":["Broadcast_970","Exp_972"],"name":"Add_973","op":"Add","outputs":["Add_973_0"]},{"control_deps":[],"inputs":["Broadcast_1039","Add_1042"],"name":"Divide_1043","op":"Divide","outputs":["Divide_1043_0"]},{"control_deps":[],"inputs":["Broadcast_1052","Add_1055"],"name":"Divide_1056","op":"Divide","outputs":["Divide_1056_0"]},{"control_deps":[],"inputs":["Broadcast_957","Add_960"],"name":"Divide_961","op":"Divide","outputs":["Divide_961_0"]},{"control_deps":[],"inputs":["Broadcast_970","Add_973"],"name":"Divide_974","op":"Divide","outputs":["Divide_974_0"]},{"control_deps":[],"inputs":["Broadcast_1045","Divide_1043"],"name":"Subtract_1046","op":"Subtract","outputs":["Subtract_1046_0"]},{"control_deps":[],"inputs":["Divide_1043","Add_899"],"name":"Multiply_1062","op":"Multiply","outputs":["Multiply_1062_0"]},{"control_deps":[],"inputs":["Divide_1056","Slice_1057"],"name":"Multiply_1058","op":"Multiply","outputs":["Multiply_1058_0"]},{"control_deps":[],"inputs":["Broadcast_963","Divide_961"],"name":"Subtract_964","op":"Subtract","outputs":["Subtract_964_0"]},{"control_deps":[],"inputs":["Divide_961","Add_949"],"name":"Multiply_980","op":"Multiply","outputs":["Multiply_980_0"]},{"control_deps":[],"inputs":["Divide_974","Slice_975"],"name":"Multiply_976","op":"Multiply","outputs":["Multiply_976_0"]},{"control_deps":[],"inputs":["Slice_1047","Multiply_1058"],"name":"Add_1059","op":"Add","outputs":["Add_1059_0"]},{"control_deps":[],"inputs":["Slice_965","Multiply_976"],"name":"Add_977","op":"Add","outputs":["Add_977_0"]},{"control_deps":[],"inputs":["Add_1059"],"name":"Tanh_1060","op":"Tanh","outputs":["Tanh_1060_0"]},{"control_deps":[],"inputs":["Add_977"],"name":"Tanh_978","op":"Tanh","outputs":["Tanh_978_0"]},{"control_deps":[],"inputs":["Subtract_1046","Tanh_1060"],"name":"Multiply_1061","op":"Multiply","outputs":["Multiply_1061_0"]},{"control_deps":[],"inputs":["Subtract_964","Tanh_978"],"name":"Multiply_979","op":"Multiply","outputs":["Multiply_979_0"]},{"control_deps":[],"inputs":["Multiply_1061","Multiply_1062"],"name":"Add_1063","op":"Add","outputs":["Add_1063_0"]},{"control_deps":[],"inputs":["Multiply_979","Multiply_980"],"name":"Add_981","op":"Add","outputs":["Add_981_0"]},{"axis":1,"control_deps":[],"inputs":["Add_1063","Add_949"],"name":"Concat_1064","op":"Concat","outputs":["Concat_1064_0"]},{"axis":1,"control_deps":[],"inputs":["Add_899","Add_981"],"name":"Concat_982","op":"Concat","outputs":["Concat_982_0"]},{"control_deps":[],"inputs":["Concat_1064","Reshape_1065"],"name":"Dot_1066","op":"Dot","outputs":["Dot_1066_0"],"reduction_axes_count":1},{"control_deps":[],"inputs":["Concat_1064","Reshape_1106"],"name":"Dot_1107","op":"Dot","outputs":["Dot_1107_0"],"reduction_axes_count":1},{"control_deps":[],"inputs":["Concat_982","Reshape_985"],"name":"Dot_986","op":"Dot","outputs":["Dot_986_0"],"reduction_axes_count":1},{"control_deps":[],"inputs":["Concat_982","Reshape_1148"],"name":"Dot_1149","op":"Dot","outputs":["Dot_1149_0"],"reduction_axes_count":1},{"control_deps":[],"inputs":["Dot_1066","Broadcast_1067"],"name":"Add_1068","op":"Add","outputs":["Add_1068_0"]},{"control_deps":[],"inputs":["Dot_1107","Broadcast_1108"],"name":"Add_1109","op":"Add","outputs":["Add_1109_0"]},{"control_deps":[],"inputs":["Dot_986","Broadcast_987"],"name":"Add_988","op":"Add","outputs":["Add_988_0"]},{"control_deps":[],"inputs":["Dot_1149","Broadcast_1150"],"name":"Add_1151","op":"Add","outputs":["Add_1151_0"]},{"control_deps":[],"inputs":["Add_1068"],"lower_bounds":[0,10],"name":"Slice_1069","op":"Slice","outputs":["Slice_1069_0"],"strides":[1,1],"upper_bounds":[2,20]},{"control_deps":[],"inputs":["Add_1068"],"lower_bounds":[0,20],"name":"Slice_1085","op":"Slice","outputs":["Slice_1085_0"],"strides":[1,1],"upper_bounds":[2,30]},{"control_deps":[],"inputs":["Add_1068"],"lower_bounds":[0,0],"name":"Slice_1086","op":"Slice","outputs":["Slice_1086_0"],"strides":[1,1],"upper_bounds":[2,10]},{"control_deps":[],"inputs":["Add_1109"],"lower_bounds":[0,10],"name":"Slice_1110","op":"Slice","outputs":["Slice_1110_0"],"strides":[1,1],"upper_bounds":[2,20]},{"control_deps":[],"inputs":["Add_1109"],"lower_bounds":[0,20],"name":"Slice_1130","op":"Slice","outputs":["Slice_1130_0"],"strides":[1,1],"upper_bounds":[2,30]},{"control_deps":[],"inputs":["Add_1109"],"lower_bounds":[0,0],"name":"Slice_1131","op":"Slice","outputs":["Slice_1131_0"],"strides":[1,1],"upper_bounds":[2,10]},{"control_deps":[],"inputs":["Add_988"],"lower_bounds":[0,10],"name":"Slice_989","op":"Slice","outputs":["Slice_989_0"],"strides":[1,1],"upper_bounds":[2,20]},{"control_deps":[],"inputs":["Add_988"],"lower_bounds":[0,20],"name":"Slice_1009","op":"Slice","outputs":["Slice_1009_0"],"strides":[1,1],"upper_bounds":[2,30]},{"control_deps":[],"inputs":["Add_988"],"lower_bounds":[0,0],"name":"Slice_1010","op":"Slice","outputs":["Slice_1010_0"],"strides":[1,1],"upper_bounds":[2,10]},{"control_deps":[],"inputs":["Add_1151"],"lower_bounds":[0,10],"name":"Slice_1152","op":"Slice","outputs":["Slice_1152_0"],"strides":[1,1],"upper_bounds":[2,20]},{"control_deps":[],"inputs":["Add_1151"],"lower_bounds":[0,20],"name":"Slice_1168","op":"Slice","outputs":["Slice_1168_0"],"strides":[1,1],"upper_bounds":[2,30]},{"control_deps":[],"inputs":["Add_1151"],"lower_bounds":[0,0],"name":"Slice_1169","op":"Slice","outputs":["Slice_1169_0"],"strides":[1,1],"upper_bounds":[2,10]},{"control_deps":[],"inputs":["Slice_1110","Slice_1119"],"name":"Add_1120","op":"Add","outputs":["Add_1120_0"]},{"control_deps":[],"inputs":["Slice_1131","Slice_1132"],"name":"Add_1133","op":"Add","outputs":["Add_1133_0"]},{"control_deps":[],"inputs":["Slice_989","Slice_998"],"name":"Add_999","op":"Add","outputs":["Add_999_0"]},{"control_deps":[],"inputs":["Slice_1010","Slice_1011"],"name":"Add_1012","op":"Add","outputs":["Add_1012_0"]},{"control_deps":[],"inputs":["Add_1120"],"name":"Negative_1123","op":"Negative","outputs":["Negative_1123_0"]},{"control_deps":[],"inputs":["Add_1133"],"name":"Negative_1136","op":"Negative","outputs":["Negative_1136_0"]},{"control_deps":[],"inputs":["Add_999"],"name":"Negative_1002","op":"Negative","outputs":["Negative_1002_0"]},{"control_deps":[],"inputs":["Add_1012"],"name":"Negative_1015","op":"Negative","outputs":["Negative_1015_0"]},{"control_deps":[],"inputs":["Negative_1123"],"name":"Exp_1124","op":"Exp","outputs":["Exp_1124_0"]},{"control_deps":[],"inputs":["Negative_1136"],"name":"Exp_1137","op":"Exp","outputs":["Exp_1137_0"]},{"control_deps":[],"inputs":["Negative_1002"],"name":"Exp_1003","op":"Exp","outputs":["Exp_1003_0"]},{"control_deps":[],"inputs":["Negative_1015"],"name":"Exp_1016","op":"Exp","outputs":["Exp_1016_0"]},{"control_deps":[],"inputs":["Broadcast_1122","Exp_1124"],"name":"Add_1125","op":"Add","outputs":["Add_1125_0"]},{"control_deps":[],"inputs":["Broadcast_1135","Exp_1137"],"name":"Add_1138","op":"Add","outputs":["Add_1138_0"]},{"control_deps":[],"inputs":["Broadcast_1001","Exp_1003"],"name":"Add_1004","op":"Add","outputs":["Add_1004_0"]},{"control_deps":[],"inputs":["Broadcast_1014","Exp_1016"],"name":"Add_1017","op":"Add","outputs":["Add_1017_0"]},{"control_deps":[],"inputs":["Broadcast_1122","Add_1125"],"name":"Divide_1126","op":"Divide","outputs":["Divide_1126_0"]},{"control_deps":[],"inputs":["Broadcast_1135","Add_1138"],"name":"Divide_1139","op":"Divide","outputs":["Divide_1139_0"]},{"control_deps":[],"inputs":["Broadcast_1001","Add_1004"],"name":"Divide_1005","op":"Divide","outputs":["Divide_1005_0"]},{"control_deps":[],"inputs":["Broadcast_1014","Add_1017"],"name":"Divide_1018","op":"Divide","outputs":["Divide_1018_0"]},{"control_deps":[],"inputs":["Broadcast_1128","Divide_1126"],"name":"Subtract_1129","op":"Subtract","outputs":["Subtract_1129_0"]},{"control_deps":[],"inputs":["Divide_1126","Broadcast_1112"],"name":"Multiply_1145","op":"Multiply","outputs":["Multiply_1145_0"]},{"control_deps":[],"inputs":["Divide_1139","Slice_1140"],"name":"Multiply_1141","op":"Multiply","outputs":["Multiply_1141_0"]},{"control_deps":[],"inputs":["Broadcast_1007","Divide_1005"],"name":"Subtract_1008","op":"Subtract","outputs":["Subtract_1008_0"]},{"control_deps":[],"inputs":["Divide_1005","Broadcast_991"],"name":"Multiply_1024","op":"Multiply","outputs":["Multiply_1024_0"]},{"control_deps":[],"inputs":["Divide_1018","Slice_1019"],"name":"Multiply_1020","op":"Multiply","outputs":["Multiply_1020_0"]},{"control_deps":[],"inputs":["Slice_1130","Multiply_1141"],"name":"Add_1142","op":"Add","outputs":["Add_1142_0"]},{"control_deps":[],"inputs":["Slice_1009","Multiply_1020"],"name":"Add_1021","op":"Add","outputs":["Add_1021_0"]},{"control_deps":[],"inputs":["Add_1142"],"name":"Tanh_1143","op":"Tanh","outputs":["Tanh_1143_0"]},{"control_deps":[],"inputs":["Add_1021"],"name":"Tanh_1022","op":"Tanh","outputs":["Tanh_1022_0"]},{"control_deps":[],"inputs":["Subtract_1129","Tanh_1143"],"name":"Multiply_1144","op":"Multiply","outputs":["Multiply_1144_0"]},{"control_deps":[],"inputs":["Subtract_1008","Tanh_1022"],"name":"Multiply_1023","op":"Multiply","outputs":["Multiply_1023_0"]},{"control_deps":[],"inputs":["Multiply_1144","Multiply_1145"],"name":"Add_1146","op":"Add","outputs":["Add_1146_0"]},{"control_deps":[],"inputs":["Multiply_1023","Multiply_1024"],"name":"Add_1025","op":"Add","outputs":["Add_1025_0"]},{"control_deps":[],"input_order":[0,1],"inputs":["Add_1146"],"name":"Reshape_1147","op":"Reshape","output_shape":[2,1,10],"outputs":["Reshape_1147_0"]},{"control_deps":[],"inputs":["Add_1146","Reshape_1153"],"name":"Dot_1154","op":"Dot","outputs":["Dot_1154_0"],"reduction_axes_count":1},{"control_deps":[],"input_order":[0,1],"inputs":["Add_1025"],"name":"Reshape_1026","op":"Reshape","output_shape":[2,1,10],"outputs":["Reshape_1026_0"]},{"control_deps":[],"inputs":["Add_1025","Reshape_1070"],"name":"Dot_1071","op":"Dot","outputs":["Dot_1071_0"],"reduction_axes_count":1},{"control_deps":[],"inputs":["Dot_1154","Broadcast_1155"],"name":"Add_1156","op":"Add","outputs":["Add_1156_0"]},{"control_deps":[],"inputs":["Dot_1071","Broadcast_1072"],"name":"Add_1073","op":"Add","outputs":["Add_1073_0"]},{"control_deps":[],"inputs":["Add_1156"],"lower_bounds":[0,10],"name":"Slice_1157","op":"Slice","outputs":["Slice_1157_0"],"strides":[1,1],"upper_bounds":[2,20]},{"control_deps":[],"inputs":["Add_1156"],"lower_bounds":[0,0],"name":"Slice_1170","op":"Slice","outputs":["Slice_1170_0"],"strides":[1,1],"upper_bounds":[2,10]},{"control_deps":[],"inputs":["Add_1156"],"lower_bounds":[0,20],"name":"Slice_1178","op":"Slice","outputs":["Slice_1178_0"],"strides":[1,1],"upper_bounds":[2,30]},{"control_deps":[],"inputs":["Add_1073"],"lower_bounds":[0,10],"name":"Slice_1074","op":"Slice","outputs":["Slice_1074_0"],"strides":[1,1],"upper_bounds":[2,20]},{"control_deps":[],"inputs":["Add_1073"],"lower_bounds":[0,0],"name":"Slice_1087","op":"Slice","outputs":["Slice_1087_0"],"strides":[1,1],"upper_bounds":[2,10]},{"control_deps":[],"inputs":["Add_1073"],"lower_bounds":[0,20],"name":"Slice_1095","op":"Slice","outputs":["Slice_1095_0"],"strides":[1,1],"upper_bounds":[2,30]},{"control_deps":[],"inputs":["Slice_1152","Slice_1157"],"name":"Add_1158","op":"Add","outputs":["Add_1158_0"]},{"control_deps":[],"inputs":["Slice_1169","Slice_1170"],"name":"Add_1171","op":"Add","outputs":["Add_1171_0"]},{"control_deps":[],"inputs":["Slice_1069","Slice_1074"],"name":"Add_1075","op":"Add","outputs":["Add_1075_0"]},{"control_deps":[],"inputs":["Slice_1086","Slice_1087"],"name":"Add_1088","op":"Add","outputs":["Add_1088_0"]},{"control_deps":[],"inputs":["Add_1158"],"name":"Negative_1161","op":"Negative","outputs":["Negative_1161_0"]},{"control_deps":[],"inputs":["Add_1171"],"name":"Negative_1174","op":"Negative","outputs":["Negative_1174_0"]},{"control_deps":[],"inputs":["Add_1075"],"name":"Negative_1078","op":"Negative","outputs":["Negative_1078_0"]},{"control_deps":[],"inputs":["Add_1088"],"name":"Negative_1091","op":"Negative","outputs":["Negative_1091_0"]},{"control_deps":[],"inputs":["Negative_1161"],"name":"Exp_1162","op":"Exp","outputs":["Exp_1162_0"]},{"control_deps":[],"inputs":["Negative_1174"],"name":"Exp_1175","op":"Exp","outputs":["Exp_1175_0"]},{"control_deps":[],"inputs":["Negative_1078"],"name":"Exp_1079","op":"Exp","outputs":["Exp_1079_0"]},{"control_deps":[],"inputs":["Negative_1091"],"name":"Exp_1092","op":"Exp","outputs":["Exp_1092_0"]},{"control_deps":[],"inputs":["Broadcast_1160","Exp_1162"],"name":"Add_1163","op":"Add","outputs":["Add_1163_0"]},{"control_deps":[],"inputs":["Broadcast_1173","Exp_1175"],"name":"Add_1176","op":"Add","outputs":["Add_1176_0"]},{"control_deps":[],"inputs":["Broadcast_1077","Exp_1079"],"name":"Add_1080","op":"Add","outputs":["Add_1080_0"]},{"control_deps":[],"inputs":["Broadcast_1090","Exp_1092"],"name":"Add_1093","op":"Add","outputs":["Add_1093_0"]},{"control_deps":[],"inputs":["Broadcast_1160","Add_1163"],"name":"Divide_1164","op":"Divide","outputs":["Divide_1164_0"]},{"control_deps":[],"inputs":["Broadcast_1173","Add_1176"],"name":"Divide_1177","op":"Divide","outputs":["Divide_1177_0"]},{"control_deps":[],"inputs":["Broadcast_1077","Add_1080"],"name":"Divide_1081","op":"Divide","outputs":["Divide_1081_0"]},{"control_deps":[],"inputs":["Broadcast_1090","Add_1093"],"name":"Divide_1094","op":"Divide","outputs":["Divide_1094_0"]},{"control_deps":[],"inputs":["Broadcast_1166","Divide_1164"],"name":"Subtract_1167","op":"Subtract","outputs":["Subtract_1167_0"]},{"control_deps":[],"inputs":["Divide_1164","Add_1146"],"name":"Multiply_1183","op":"Multiply","outputs":["Multiply_1183_0"]},{"control_deps":[],"inputs":["Divide_1177","Slice_1178"],"name":"Multiply_1179","op":"Multiply","outputs":["Multiply_1179_0"]},{"control_deps":[],"inputs":["Broadcast_1083","Divide_1081"],"name":"Subtract_1084","op":"Subtract","outputs":["Subtract_1084_0"]},{"control_deps":[],"inputs":["Divide_1081","Add_1025"],"name":"Multiply_1100","op":"Multiply","outputs":["Multiply_1100_0"]},{"control_deps":[],"inputs":["Divide_1094","Slice_1095"],"name":"Multiply_1096","op":"Multiply","outputs":["Multiply_1096_0"]},{"control_deps":[],"inputs":["Slice_1168","Multiply_1179"],"name":"Add_1180","op":"Add","outputs":["Add_1180_0"]},{"control_deps":[],"inputs":["Slice_1085","Multiply_1096"],"name":"Add_1097","op":"Add","outputs":["Add_1097_0"]},{"control_deps":[],"inputs":["Add_1180"],"name":"Tanh_1181","op":"Tanh","outputs":["Tanh_1181_0"]},{"control_deps":[],"inputs":["Add_1097"],"name":"Tanh_1098","op":"Tanh","outputs":["Tanh_1098_0"]},{"control_deps":[],"inputs":["Subtract_1167","Tanh_1181"],"name":"Multiply_1182","op":"Multiply","outputs":["Multiply_1182_0"]},{"control_deps":[],"inputs":["Subtract_1084","Tanh_1098"],"name":"Multiply_1099","op":"Multiply","outputs":["Multiply_1099_0"]},{"control_deps":[],"inputs":["Multiply_1182","Multiply_1183"],"name":"Add_1184","op":"Add","outputs":["Add_1184_0"]},{"control_deps":[],"inputs":["Multiply_1099","Multiply_1100"],"name":"Add_1101","op":"Add","outputs":["Add_1101_0"]},{"control_deps":[],"input_order":[0,1],"inputs":["Add_1184"],"name":"Reshape_1185","op":"Reshape","output_shape":[2,1,10],"outputs":["Reshape_1185_0"]},{"control_deps":[],"input_order":[0,1],"inputs":["Add_1101"],"name":"Reshape_1102","op":"Reshape","output_shape":[2,1,10],"outputs":["Reshape_1102_0"]},{"axis":1,"control_deps":[],"inputs":["Reshape_1147","Reshape_1185"],"name":"Concat_1186","op":"Concat","outputs":["Concat_1186_0"]},{"axis":1,"control_deps":[],"inputs":["Reshape_1026","Reshape_1102"],"name":"Concat_1103","op":"Concat","outputs":["Concat_1103_0"]},{"control_deps":[],"inputs":["Concat_1186"],"name":"Reverse_1187","op":"Reverse","outputs":["Reverse_1187_0"],"reversed_axes":[1]},{"axis":2,"control_deps":[],"inputs":["Concat_1103","Reverse_1187"],"name":"Concat_1188","op":"Concat","outputs":["Concat_1188_0"]},{"control_deps":[],"inputs":["Concat_1188"],"name":"Result_1189","op":"Result","outputs":["Result_1189_0"]}],"parameters":["Parameter_854","Parameter_857","Parameter_858","Parameter_866","Parameter_867","Parameter_900","Parameter_901","Parameter_916","Parameter_917","Parameter_983","Parameter_984","Parameter_992","Parameter_993","Parameter_1104","Parameter_1105","Parameter_1113","Parameter_1114"],"result":["Result_1189"]}]
ONNXnGraphImporter:o

A
BC" CustomAdd: custom.op compute_graphZ
A


Z
B


b
C


B
\ No newline at end of file
 backend-test:‰
1
xvaluesindices"TopK*
k *
axis 
test_top_kZ
x


b
values


b
indices


B
\ No newline at end of file
...@@ -1819,3 +1819,47 @@ TEST(onnx_${BACKEND_NAME}, model_space_to_depth_no_blocksize) ...@@ -1819,3 +1819,47 @@ TEST(onnx_${BACKEND_NAME}, model_space_to_depth_no_blocksize)
file_util::path_join(SERIALIZED_ZOO, "onnx/space_to_depth_no_blocksize.onnx")), file_util::path_join(SERIALIZED_ZOO, "onnx/space_to_depth_no_blocksize.onnx")),
std::runtime_error); std::runtime_error);
} }
TEST(onnx_${BACKEND_NAME}, model_missing_op_domain)
{
onnx_import::register_operator(
"CustomAdd", 1, "custom.op", [](const onnx_import::Node& node) -> NodeVector {
NodeVector ng_inputs{node.get_ng_inputs()};
return {std::make_shared<ngraph::op::Add>(ng_inputs.at(0), ng_inputs.at(1))};
});
EXPECT_TRUE(onnx_import::is_operator_supported("CustomAdd", 1, "custom.op"));
auto function = onnx_import::import_onnx_model(
file_util::path_join(SERIALIZED_ZOO, "onnx/missing_op_domain.onnx"));
Inputs inputs;
inputs.emplace_back(std::vector<float>{0.f, 1.f, 2.f, 3.f});
inputs.emplace_back(std::vector<float>{0.f, 1.f, 2.f, 3.f});
Outputs expected_output{std::vector<float>{0.f, 2.f, 4.f, 6.f}};
Outputs outputs{execute(function, inputs, "${BACKEND_NAME}")};
EXPECT_TRUE(test::all_close_f(expected_output.front(), outputs.front()));
}
TEST(onnx_${BACKEND_NAME}, model_top_k)
{
auto function =
onnx_import::import_onnx_model(file_util::path_join(SERIALIZED_ZOO, "onnx/top_k.onnx"));
Inputs inputs;
inputs.emplace_back(std::vector<float>{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11});
std::vector<float> expected_values_output{3, 2, 1, 7, 6, 5, 11, 10, 9};
std::vector<std::int64_t> expected_indices_output{3, 2, 1, 3, 2, 1, 3, 2, 1};
std::vector<std::shared_ptr<ngraph::runtime::Tensor>> result_tensors =
prepare_and_run(function, inputs, "${BACKEND_NAME}");
std::vector<float> values_output = read_vector<float>(result_tensors.at(0));
std::vector<std::int64_t> indices_output = read_vector<std::int64_t>(result_tensors.at(1));
EXPECT_TRUE(test::all_close_f(expected_values_output, values_output));
EXPECT_TRUE(test::all_close(expected_indices_output, indices_output));
}
...@@ -127,10 +127,11 @@ void init_real_tv(ngraph::runtime::Tensor* tv, std::default_random_engine& engin ...@@ -127,10 +127,11 @@ void init_real_tv(ngraph::runtime::Tensor* tv, std::default_random_engine& engin
void random_init(ngraph::runtime::Tensor* tv, std::default_random_engine& engine); void random_init(ngraph::runtime::Tensor* tv, std::default_random_engine& engine);
template <typename T, typename T1 = T> template <typename T>
std::vector<std::vector<T1>> execute(const std::shared_ptr<ngraph::Function>& function, std::vector<std::shared_ptr<ngraph::runtime::Tensor>>
std::vector<std::vector<T>> args, prepare_and_run(const std::shared_ptr<ngraph::Function>& function,
const std::string& backend_id) std::vector<std::vector<T>> args,
const std::string& backend_id)
{ {
auto backend = ngraph::runtime::Backend::create(backend_id); auto backend = ngraph::runtime::Backend::create(backend_id);
...@@ -160,6 +161,16 @@ std::vector<std::vector<T1>> execute(const std::shared_ptr<ngraph::Function>& fu ...@@ -160,6 +161,16 @@ std::vector<std::vector<T1>> execute(const std::shared_ptr<ngraph::Function>& fu
auto handle = backend->compile(function); auto handle = backend->compile(function);
backend->call_with_validate(handle, result_tensors, arg_tensors); backend->call_with_validate(handle, result_tensors, arg_tensors);
return result_tensors;
}
template <typename T, typename T1 = T>
std::vector<std::vector<T1>> execute(const std::shared_ptr<ngraph::Function>& function,
std::vector<std::vector<T>> args,
const std::string& backend_id)
{
std::vector<std::shared_ptr<ngraph::runtime::Tensor>> result_tensors =
prepare_and_run(function, args, backend_id);
std::vector<std::vector<T1>> result_vectors; std::vector<std::vector<T1>> result_vectors;
for (auto rt : result_tensors) for (auto rt : result_tensors)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment