Commit 5a7d60a1 authored by Louis Feng's avatar Louis Feng Committed by Adam Procter

NGRAPH-1605 Sigmoid multiply fusion (#964)

parent 83206a0a
......@@ -219,6 +219,7 @@ if (NGRAPH_CPU_ENABLE AND LLVM_INCLUDE_DIR AND MKLDNN_INCLUDE_DIR)
runtime/cpu/op/conv_relu.cpp
runtime/cpu/op/convert_layout.cpp
runtime/cpu/op/sigmoid.cpp
runtime/cpu/op/sigmoid_mul.cpp
runtime/cpu/op/rnn.cpp
runtime/cpu/op/lstm.cpp
runtime/cpu/op/matmul_bias.cpp
......
......@@ -102,6 +102,7 @@
#include "ngraph/runtime/cpu/op/max_pool_with_indices.hpp"
#include "ngraph/runtime/cpu/op/rnn.hpp"
#include "ngraph/runtime/cpu/op/sigmoid.hpp"
#include "ngraph/runtime/cpu/op/sigmoid_mul.hpp"
#include "ngraph/type/element_type.hpp"
#include "ngraph/util.hpp"
......@@ -3895,6 +3896,158 @@ namespace ngraph
<< to_string(sigmoid_index) << ");\n";
}
std::string
generate_sigmoid_mul_func(const ngraph::op::SigmoidMultiply::FunctionType type,
const std::string& input,
const std::string& out_numer,
const std::string& out_denom,
bool derivative)
{
std::string func_block;
switch (type)
{
case ngraph::op::SigmoidMultiply::FunctionType::Logistic:
func_block = "auto e_x = exp(" + input + ");\n";
func_block += out_numer + " = e_x;\n";
func_block += out_denom + " = e_x+1;\n";
if (derivative)
{
func_block += "d_" + out_numer + " = " + out_numer + ";\n";
func_block +=
"d_" + out_denom + " = " + out_denom + " * " + out_denom + ";\n";
}
break;
case ngraph::op::SigmoidMultiply::FunctionType::Tanh:
func_block = "auto e_2x = exp(2.0*" + input + ");\n";
func_block += out_numer + " = e_2x-1;\n";
func_block += out_denom + " = e_2x+1;\n";
if (derivative)
{
func_block += "d_" + out_numer + " = 4.0*e_2x;\n";
func_block +=
"d_" + out_denom + " = " + out_denom + " * " + out_denom + ";\n";
}
break;
case ngraph::op::SigmoidMultiply::FunctionType::Identity:
func_block = out_numer + " = " + input + ";\n";
func_block += out_denom + " = 1;\n";
if (derivative)
{
func_block += "d_" + out_numer + " = 1;\n";
func_block += "d_" + out_denom + " = 1;\n";
}
break;
}
if (func_block.empty())
{
throw ngraph_error(
"generate_sigmoid_mul_func input function type not supported");
}
return func_block;
}
template <>
void CPU_Emitter::EMITTER_DECL(ngraph::op::SigmoidMultiply)
{
auto sigmoid_mul = static_cast<const ngraph::op::SigmoidMultiply*>(node);
std::string numer_0 = "numer_0";
std::string denom_0 = "denom_0";
std::string numer_1 = "numer_1";
std::string denom_1 = "denom_1";
std::string input_0_func_string =
generate_sigmoid_mul_func(sigmoid_mul->get_input_func_type(0),
args[0].get_name() + "[i]",
numer_0,
denom_0,
false);
std::string input_1_func_string =
generate_sigmoid_mul_func(sigmoid_mul->get_input_func_type(1),
args[1].get_name() + "[i]",
numer_1,
denom_1,
false);
writer.block_begin();
writer << "#pragma omp parallel for simd\n";
writer << "for (size_t i=0; i<" << out[0].get_size() << "; i++)\n";
writer.block_begin();
writer << "float " << numer_0 << ";\n";
writer << "float " << denom_0 << ";\n";
writer.block_begin();
writer << input_0_func_string;
writer.block_end();
writer << "float " << numer_1 << ";\n";
writer << "float " << denom_1 << ";\n";
writer.block_begin();
writer << input_1_func_string;
writer.block_end();
writer << out[0].get_name()
<< "[i] = (" + numer_0 + " * " + numer_1 + ") / (" + denom_0 + " * " +
denom_1 + ");\n";
writer.block_end();
writer.block_end();
}
template <>
void CPU_Emitter::EMITTER_DECL(ngraph::op::SigmoidMultiplyBackprop)
{
// math: we have sigmoid functions f(x) and g(y) multiplied, z = f(x) * g(y)
// dz/dx = dz/df * df/dx = g(y) * f'(x)
// dz/dy = dz/dg * dg/dy = f(x) * g'(y)
auto sigmoid_mul_backprop =
static_cast<const ngraph::op::SigmoidMultiplyBackprop*>(node);
const TensorViewWrapper& data_0 = args[0];
const TensorViewWrapper& data_1 = args[1];
const TensorViewWrapper& delta = args[2];
const TensorViewWrapper& input_0_delta = out[0];
const TensorViewWrapper& input_1_delta = out[1];
std::string numer_0 = "numer_0";
std::string denom_0 = "denom_0";
std::string numer_1 = "numer_1";
std::string denom_1 = "denom_1";
std::string d_numer_0 = "d_numer_0";
std::string d_denom_0 = "d_denom_0";
std::string d_numer_1 = "d_numer_1";
std::string d_denom_1 = "d_denom_1";
std::string input_0_func_string =
generate_sigmoid_mul_func(sigmoid_mul_backprop->get_input_func_type(0),
data_0.get_name() + "[i]",
numer_0,
denom_0,
true);
std::string input_1_func_string =
generate_sigmoid_mul_func(sigmoid_mul_backprop->get_input_func_type(1),
data_1.get_name() + "[i]",
numer_1,
denom_1,
true);
writer.block_begin();
writer << "#pragma omp parallel for simd\n";
writer << "for (size_t i=0; i<" << input_0_delta.get_size() << "; i++)\n";
writer.block_begin();
writer << "float " << numer_0 << ";\n";
writer << "float " << denom_0 << ";\n";
writer << "float " << d_numer_0 << ";\n";
writer << "float " << d_denom_0 << ";\n";
writer.block_begin();
writer << input_0_func_string;
writer.block_end();
writer << "float " << numer_1 << ";\n";
writer << "float " << denom_1 << ";\n";
writer << "float " << d_numer_1 << ";\n";
writer << "float " << d_denom_1 << ";\n";
writer.block_begin();
writer << input_1_func_string;
writer.block_end();
writer << input_0_delta.get_name()
<< "[i] = " + delta.get_name() + "[i]*(" + numer_1 + "*" + d_numer_0 +
")/(" + denom_1 + "*" + d_denom_0 + ");\n";
writer << input_1_delta.get_name()
<< "[i] = " + delta.get_name() + "[i]*(" + numer_0 + "*" + d_numer_1 +
")/(" + denom_0 + "*" + d_denom_1 + ");\n";
writer.block_end();
writer.block_end();
}
template <>
void CPU_Emitter::EMITTER_DECL(ngraph::op::Softmax)
{
......
......@@ -125,6 +125,7 @@
#include "ngraph/runtime/cpu/op/max_pool_with_indices.hpp"
#include "ngraph/runtime/cpu/op/rnn.hpp"
#include "ngraph/runtime/cpu/op/sigmoid.hpp"
#include "ngraph/runtime/cpu/op/sigmoid_mul.hpp"
#include "ngraph/runtime/cpu/pass/cpu_assignment.hpp"
#include "ngraph/runtime/cpu/pass/cpu_concat_inputs.hpp"
#include "ngraph/runtime/cpu/pass/cpu_fusion.hpp"
......@@ -289,6 +290,9 @@ static const runtime::cpu::OpMap dispatcher{
{TI(ngraph::op::ReluBackprop), &runtime::cpu::CPU_Emitter::emit<op::ReluBackprop>},
{TI(ngraph::op::Rnn), &runtime::cpu::CPU_Emitter::emit<op::Rnn>},
{TI(ngraph::op::Sigmoid), &runtime::cpu::CPU_Emitter::emit<op::Sigmoid>},
{TI(ngraph::op::SigmoidMultiply), &runtime::cpu::CPU_Emitter::emit<op::SigmoidMultiply>},
{TI(ngraph::op::SigmoidMultiplyBackprop),
&runtime::cpu::CPU_Emitter::emit<op::SigmoidMultiplyBackprop>},
{TI(ngraph::op::Softmax), &runtime::cpu::CPU_Emitter::emit<op::Softmax>},
{TI(ngraph::op::SigmoidBackprop), &runtime::cpu::CPU_Emitter::emit<op::SigmoidBackprop>},
{TI(ngraph::op::And), &runtime::cpu::CPU_Emitter::emit<op::And>},
......
/*******************************************************************************
* Copyright 2018 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
#include "sigmoid_mul.hpp"
#include "ngraph/log.hpp"
#include "ngraph/op/add.hpp"
#include "ngraph/op/broadcast.hpp"
#include "ngraph/op/get_output_element.hpp"
#include "ngraph/op/tanh.hpp"
#include "ngraph/runtime/cpu/op/sigmoid.hpp"
#include "ngraph/util.hpp"
using namespace std;
using namespace ngraph;
ngraph::op::SigmoidMultiply::FunctionType
op::SigmoidMultiply::identify_node_type(const std::shared_ptr<ngraph::Node>& node)
{
if (std::dynamic_pointer_cast<ngraph::op::Tanh>(node) != nullptr)
{
return ngraph::op::SigmoidMultiply::FunctionType::Tanh;
}
else if (std::dynamic_pointer_cast<ngraph::op::Sigmoid>(node) != nullptr)
{
return ngraph::op::SigmoidMultiply::FunctionType::Logistic;
}
else if (std::dynamic_pointer_cast<ngraph::op::Broadcast>(node) != nullptr)
{
return ngraph::op::SigmoidMultiply::FunctionType::Identity;
}
else if (std::dynamic_pointer_cast<ngraph::op::Add>(node) != nullptr)
{
return ngraph::op::SigmoidMultiply::FunctionType::Identity;
}
else
{
throw ngraph::ngraph_error("SigmoidMultiply input function type not supported: " +
node->get_name());
}
}
op::SigmoidMultiply::SigmoidMultiply(shared_ptr<Node> input_0,
shared_ptr<Node> input_1,
const FunctionType input_0_type,
const FunctionType input_1_type)
: RequiresTensorViewArgs("SigmoidMultiply", {input_0, input_1})
{
if (input_0->get_element_type() != input_1->get_element_type())
{
throw ngraph_error("SigmoidMultiply input element type mismatch");
}
if (input_0->get_shape() != input_1->get_shape())
{
throw ngraph_error("SigmoidMultiply input shape mismatch: " +
vector_to_string(input_0->get_shape()) + " != " +
vector_to_string(input_1->get_shape()));
}
m_input_type[0] = input_0_type;
m_input_type[1] = input_1_type;
add_output(input_0->get_element_type(), input_0->get_shape());
}
shared_ptr<Node> op::SigmoidMultiply::copy_with_new_args(const NodeVector& new_args) const
{
if (new_args.size() != 2)
{
throw ngraph_error("SigmoidMultiply incorrect number of new arguments");
}
// WARNING: implicitly expecting new args must match the original input function types.
return make_shared<SigmoidMultiply>(
new_args.at(0), new_args.at(1), m_input_type[0], m_input_type[1]);
}
void op::SigmoidMultiply::generate_adjoints(autodiff::Adjoints& adjoints, const NodeVector& deltas)
{
auto delta = deltas.at(0);
auto input_0 = get_argument(0);
auto input_1 = get_argument(1);
auto sigmoid_mul_backprop =
make_shared<op::SigmoidMultiplyBackprop>(input_0, input_1, delta, m_input_type);
auto input_0_delta = make_shared<op::GetOutputElement>(sigmoid_mul_backprop, 0);
auto input_1_delta = make_shared<op::GetOutputElement>(sigmoid_mul_backprop, 1);
adjoints.add_delta(input_0, input_0_delta);
adjoints.add_delta(input_1, input_1_delta);
}
op::SigmoidMultiplyBackprop::SigmoidMultiplyBackprop(std::shared_ptr<Node> input_0,
std::shared_ptr<Node> input_1,
shared_ptr<Node> delta,
const std::array<FunctionType, 2>& input_type)
: RequiresTensorViewArgs("SigmoidMultiplyBackprop", {input_0, input_1, delta})
, m_input_type(input_type)
{
if (input_0->get_element_type() != input_1->get_element_type())
{
throw ngraph_error("Argument element types for SigmoidMultiply backprop do not match");
}
if (input_0->get_shape() != input_1->get_shape())
{
throw ngraph_error("Argument shapes for SigmoidMultiply backprop do not match");
}
if (input_0->get_element_type() != delta->get_element_type())
{
throw ngraph_error(
"Argument and delta element types for SigmoidMultiply backprop do not match");
}
if (input_0->get_shape() != delta->get_shape())
{
throw ngraph_error("Argument and delta shape for SigmoidMultiply backprop do not match");
}
add_output(get_input_element_type(0), get_input_shape(0));
add_output(get_input_element_type(1), get_input_shape(1));
}
shared_ptr<Node> op::SigmoidMultiplyBackprop::copy_with_new_args(const NodeVector& new_args) const
{
if (new_args.size() != 3)
{
throw ngraph_error("Incorrect number of new arguments");
}
return make_shared<SigmoidMultiplyBackprop>(
new_args.at(0), new_args.at(1), new_args.at(2), m_input_type);
}
/*******************************************************************************
* Copyright 2018 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
#pragma once
#include "ngraph/op/util/requires_tensor_view_args.hpp"
#include "ngraph/util.hpp"
#include <array>
namespace ngraph
{
namespace op
{
/// \brief Fused Sigmoid functions (logistic and tanh) with multiplication forward prop.
class SigmoidMultiply : public util::RequiresTensorViewArgs
{
public:
/// Defines valid function types
enum class FunctionType
{
Logistic,
Tanh,
Identity
};
/// Input nodes are expected to be actual inputs where the corresponding input
/// FunctionType will be applied to those inputs in the fused operation.
SigmoidMultiply(std::shared_ptr<Node> input_0,
std::shared_ptr<Node> input_1,
const FunctionType input_0_type,
const FunctionType input_1_type);
/// WARNING: copy_with_new_args() implicitly expects new args must match the original input function types.
virtual std::shared_ptr<Node>
copy_with_new_args(const NodeVector& new_args) const override;
virtual void generate_adjoints(autodiff::Adjoints& adjoints,
const NodeVector& deltas) override;
FunctionType get_input_func_type(const unsigned int index) const
{
return m_input_type[index];
}
/// Identifies the corresponding FunctionType for the input node.
static FunctionType identify_node_type(const std::shared_ptr<ngraph::Node>& node);
private:
std::array<FunctionType, 2> m_input_type;
};
/// \brief Elementwise SigmoidMultiplyBackprop operation.
///
class SigmoidMultiplyBackprop : public util::RequiresTensorViewArgs
{
public:
typedef SigmoidMultiply::FunctionType FunctionType;
/// \brief Constructs a SigmoidMultiplyBackprop operation.
///
/// \param input_0 Forward input node 0.
/// \param input_1 Forward input node 1.
/// \param delta Backprop delta node.
/// \param input_type Function type for the input nodes.
SigmoidMultiplyBackprop(std::shared_ptr<Node> input_0,
std::shared_ptr<Node> input_1,
std::shared_ptr<ngraph::Node> delta,
const std::array<FunctionType, 2>& input_type);
virtual std::shared_ptr<Node>
copy_with_new_args(const NodeVector& new_args) const override;
FunctionType get_input_func_type(const unsigned int index) const
{
return m_input_type[index];
}
private:
std::array<FunctionType, 2> m_input_type;
};
}
}
......@@ -41,6 +41,7 @@
#include "ngraph/op/sqrt.hpp"
#include "ngraph/op/subtract.hpp"
#include "ngraph/op/sum.hpp"
#include "ngraph/op/tanh.hpp"
#include "ngraph/pattern/matcher.hpp"
#include "ngraph/pattern/op/label.hpp"
#include "ngraph/pattern/op/skip.hpp"
......@@ -49,6 +50,7 @@
#include "ngraph/runtime/cpu/op/conv_relu.hpp"
#include "ngraph/runtime/cpu/op/matmul_bias.hpp"
#include "ngraph/runtime/cpu/op/sigmoid.hpp"
#include "ngraph/runtime/cpu/op/sigmoid_mul.hpp"
static bool init_cblas_arg(std::shared_ptr<ngraph::Node> reshape,
std::shared_ptr<ngraph::Node> arg,
......@@ -1071,3 +1073,66 @@ void ngraph::runtime::cpu::pass::CPUFusion::construct_conv_bias_relu()
auto m = std::make_shared<pattern::Matcher>(prelu, callback);
this->add_matcher(m);
}
void ngraph::runtime::cpu::pass::CPUFusion::construct_sigmoid_multiply()
{
// Construct predicate to match sigmoid and tanh
auto sigmoid_pred = [](std::shared_ptr<Node> n) {
return (std::dynamic_pointer_cast<op::Sigmoid>(n) != nullptr) ||
(std::dynamic_pointer_cast<op::Tanh>(n) != nullptr);
};
// Construct predicate to match other valid nodes
auto other_pred = [](std::shared_ptr<Node> n) {
return (std::dynamic_pointer_cast<op::Sigmoid>(n) != nullptr) ||
(std::dynamic_pointer_cast<op::Tanh>(n) != nullptr) ||
(std::dynamic_pointer_cast<op::Add>(n) != nullptr) ||
(std::dynamic_pointer_cast<op::Broadcast>(n) != nullptr);
};
auto sigmoid_0 = std::make_shared<pattern::op::Label>(element::f32, Shape{1, 1}, sigmoid_pred);
auto sigmoid_1 = std::make_shared<pattern::op::Label>(element::f32, Shape{1, 1}, other_pred);
auto elem_mul = std::make_shared<op::Multiply>(sigmoid_0, sigmoid_1);
ngraph::pattern::graph_rewrite_callback callback = [sigmoid_0, sigmoid_1](pattern::Matcher& m) {
NGRAPH_DEBUG << "In a callback for construct_sigmoid_multiply pattern against "
<< m.get_match_root()->get_name();
auto pattern_map = m.get_pattern_map();
if (m.get_match_root()->get_element_type() != element::f32)
{
NGRAPH_DEBUG << "mpattern = " << m.get_match_root()->get_name()
<< " type is not float!";
return false;
}
using FunctionType = op::SigmoidMultiply::FunctionType;
const int max_inputs{2};
std::array<std::shared_ptr<ngraph::Node>, max_inputs> match_nodes{
{pattern_map[sigmoid_0], pattern_map[sigmoid_1]}};
std::array<std::shared_ptr<ngraph::Node>, max_inputs> input_nodes;
std::array<FunctionType, max_inputs> input_type;
for (int i = 0; i < max_inputs; ++i)
{
input_type[i] = op::SigmoidMultiply::identify_node_type(match_nodes[i]);
if (input_type[i] != FunctionType::Identity)
{
if (match_nodes[i]->get_users().size() > 1)
{
NGRAPH_DEBUG << "input node has multiple users, skipping fusion.";
return false;
}
input_nodes[i] = match_nodes[i]->get_argument(0);
}
else
{
input_nodes[i] = match_nodes[i];
}
}
auto sigmoid_mul_node = std::make_shared<op::SigmoidMultiply>(
input_nodes[0], input_nodes[1], input_type[0], input_type[1]);
ngraph::replace_node(m.get_match_root(), sigmoid_mul_node);
return true;
};
auto m = std::make_shared<ngraph::pattern::Matcher>(elem_mul, callback);
this->add_matcher(m);
}
......@@ -69,6 +69,7 @@ public:
if (fusions & DIFFERENTIABLE_FUSIONS)
{
construct_conv_bias();
construct_sigmoid_multiply();
}
}
......@@ -80,6 +81,7 @@ private:
void construct_fprop_bn();
void construct_sigmoid();
void construct_sigmoid_bprop();
void construct_sigmoid_multiply();
void construct_zero_padded_reshaped_conv();
void construct_zero_padded_conv();
void construct_zero_padded_conv_backprop_filters();
......
......@@ -33,6 +33,7 @@
#include "ngraph/op/parameter.hpp"
#include "ngraph/op/relu.hpp"
#include "ngraph/op/sum.hpp"
#include "ngraph/op/tanh.hpp"
#include "ngraph/pass/graph_rewrite.hpp"
#include "ngraph/pass/manager.hpp"
#include "ngraph/pass/reshape_elimination.hpp"
......@@ -49,6 +50,7 @@
#include "ngraph/runtime/cpu/op/matmul_bias.hpp"
#include "ngraph/runtime/cpu/op/rnn.hpp"
#include "ngraph/runtime/cpu/op/sigmoid.hpp"
#include "ngraph/runtime/cpu/op/sigmoid_mul.hpp"
#include "ngraph/runtime/cpu/pass/cpu_concat_inputs.hpp"
#include "ngraph/runtime/cpu/pass/cpu_fusion.hpp"
#include "ngraph/runtime/cpu/pass/cpu_post_layout_optimizations.hpp"
......@@ -1650,3 +1652,427 @@ TEST(cpu_fusion, rnn_fusion_inter_vs_cpu_2rnn_layer_3lstm_cell)
EXPECT_TRUE(test::all_close(cpu_results.at(i), int_results.at(i), 1.0e-4f, 1.0e-4f));
}
}
TEST(cpu_fusion, sigmoid_multiply_fusion)
{
pass::Manager pass_manager;
pass_manager.register_pass<runtime::cpu::pass::CPUFusion>();
const string json_path = file_util::path_join(SERIALIZED_ZOO, "mxnet/3_lstm_cell_forward.json");
const string json_string = file_util::read_file_to_string(json_path);
stringstream ss(json_string);
shared_ptr<Function> func = ngraph::deserialize(ss);
pass_manager.run_passes(func);
size_t ccg = count_ops_of_type<op::SigmoidMultiply>(func);
ASSERT_EQ(ccg, 18);
}
void sigmoid_multiply_fusion_forward_compute(shared_ptr<runtime::Backend>& backend,
const op::ParameterVector& input_params,
const vector<vector<float>>& input_data,
const vector<Shape>& input_shapes,
const Shape& result_shape,
shared_ptr<Node> input_0_node,
shared_ptr<Node> input_1_node,
const vector<float>& expected)
{
shared_ptr<runtime::TensorView> result_tensor =
backend->create_tensor(element::f32, result_shape);
vector<shared_ptr<runtime::TensorView>> input_tensors;
for (int i = 0; i < input_params.size(); ++i)
{
input_tensors.push_back(backend->create_tensor(element::f32, input_shapes[i]));
copy_data(input_tensors[i], input_data[i]);
}
auto mul_node = input_0_node * input_1_node;
auto func = make_shared<Function>(mul_node, input_params);
backend->call(func, {result_tensor}, input_tensors);
EXPECT_TRUE(test::all_close(read_vector<float>(result_tensor), expected));
}
TEST(cpu_fusion, sigmoid_multiply_fusion_forward)
{
auto backend = runtime::Backend::create("CPU");
Shape data_shape{1, 1, 2, 2};
Shape const_shape{1};
vector<float> input_0_data{1.f, 2.f, 3.f, 4.f};
vector<float> input_1_data{1.2f, 2.3f, 3.5f, 4.7f};
vector<float> const_data{1.2f};
{
auto input_0_param = make_shared<op::Parameter>(element::f32, data_shape);
auto input_1_param = make_shared<op::Parameter>(element::f32, data_shape);
auto input_2_param = make_shared<op::Parameter>(element::f32, data_shape);
auto sigmoid_0 = make_shared<op::Sigmoid>(input_0_param);
auto sigmoid_1 = make_shared<op::Add>(input_1_param, input_2_param);
vector<float> expected{1.60833f, 3.78743f, 6.19173f, 8.54352f};
op::ParameterVector input_params{input_0_param, input_1_param, input_2_param};
vector<vector<float>> input_data{input_0_data, input_0_data, input_1_data};
vector<Shape> input_shapes{data_shape, data_shape, data_shape};
sigmoid_multiply_fusion_forward_compute(backend,
input_params,
input_data,
input_shapes,
data_shape,
sigmoid_0,
sigmoid_1,
expected);
}
{
auto input_0_param = make_shared<op::Parameter>(element::f32, data_shape);
auto input_1_param = make_shared<op::Parameter>(element::f32, const_shape);
auto sigmoid_0 = make_shared<op::Broadcast>(input_1_param, data_shape, AxisSet{1, 2, 3});
auto sigmoid_1 = make_shared<op::Sigmoid>(input_0_param);
vector<float> expected{0.87727f, 1.05696f, 1.14309f, 1.17842f};
op::ParameterVector input_params{input_0_param, input_1_param};
vector<vector<float>> input_data{input_0_data, const_data};
vector<Shape> input_shapes{data_shape, const_shape};
sigmoid_multiply_fusion_forward_compute(backend,
input_params,
input_data,
input_shapes,
data_shape,
sigmoid_0,
sigmoid_1,
expected);
}
{
auto input_0_param = make_shared<op::Parameter>(element::f32, data_shape);
auto input_1_param = make_shared<op::Parameter>(element::f32, const_shape);
auto sigmoid_0 = make_shared<op::Sigmoid>(input_0_param);
auto sigmoid_1 = make_shared<op::Broadcast>(input_1_param, data_shape, AxisSet{1, 2, 3});
vector<float> expected{0.87727f, 1.05696f, 1.14309f, 1.17842f};
op::ParameterVector input_params{input_0_param, input_1_param};
vector<vector<float>> input_data{input_0_data, const_data};
vector<Shape> input_shapes{data_shape, const_shape};
sigmoid_multiply_fusion_forward_compute(backend,
input_params,
input_data,
input_shapes,
data_shape,
sigmoid_0,
sigmoid_1,
expected);
}
{
auto input_0_param = make_shared<op::Parameter>(element::f32, data_shape);
auto input_1_param = make_shared<op::Parameter>(element::f32, data_shape);
auto sigmoid_0 = make_shared<op::Sigmoid>(input_0_param);
auto sigmoid_1 = make_shared<op::Sigmoid>(input_1_param);
vector<float> expected{0.561837f, 0.800536f, 0.924652f, 0.973163f};
op::ParameterVector input_params{input_0_param, input_1_param};
vector<vector<float>> input_data{input_0_data, input_1_data};
vector<Shape> input_shapes{data_shape, data_shape};
sigmoid_multiply_fusion_forward_compute(backend,
input_params,
input_data,
input_shapes,
data_shape,
sigmoid_0,
sigmoid_1,
expected);
}
{
auto input_0_param = make_shared<op::Parameter>(element::f32, data_shape);
auto input_1_param = make_shared<op::Parameter>(element::f32, data_shape);
auto sigmoid_0 = make_shared<op::Sigmoid>(input_0_param);
auto sigmoid_1 = make_shared<op::Tanh>(input_1_param);
vector<float> expected{0.60945f, 0.863266f, 0.950838f, 0.981851f};
op::ParameterVector input_params{input_0_param, input_1_param};
vector<vector<float>> input_data{input_0_data, input_1_data};
vector<Shape> input_shapes{data_shape, data_shape};
sigmoid_multiply_fusion_forward_compute(backend,
input_params,
input_data,
input_shapes,
data_shape,
sigmoid_0,
sigmoid_1,
expected);
}
{
auto input_0_param = make_shared<op::Parameter>(element::f32, data_shape);
auto input_1_param = make_shared<op::Parameter>(element::f32, data_shape);
auto sigmoid_0 = make_shared<op::Tanh>(input_0_param);
auto sigmoid_1 = make_shared<op::Sigmoid>(input_1_param);
vector<float> expected{0.585304f, 0.876182f, 0.965887f, 0.990322f};
op::ParameterVector input_params{input_0_param, input_1_param};
vector<vector<float>> input_data{input_0_data, input_1_data};
vector<Shape> input_shapes{data_shape, data_shape};
sigmoid_multiply_fusion_forward_compute(backend,
input_params,
input_data,
input_shapes,
data_shape,
sigmoid_0,
sigmoid_1,
expected);
}
{
auto input_0_param = make_shared<op::Parameter>(element::f32, data_shape);
auto input_1_param = make_shared<op::Parameter>(element::f32, data_shape);
auto sigmoid_0 = make_shared<op::Tanh>(input_0_param);
auto sigmoid_1 = make_shared<op::Tanh>(input_1_param);
vector<float> expected{0.634907f, 0.94484f, 0.993242f, 0.999164f};
op::ParameterVector input_params{input_0_param, input_1_param};
vector<vector<float>> input_data{input_0_data, input_1_data};
vector<Shape> input_shapes{data_shape, data_shape};
sigmoid_multiply_fusion_forward_compute(backend,
input_params,
input_data,
input_shapes,
data_shape,
sigmoid_0,
sigmoid_1,
expected);
}
}
void sigmoid_multiply_fusion_backward_compute(shared_ptr<runtime::Backend>& backend,
const op::ParameterVector& input_params,
const vector<vector<float>>& input_data,
const vector<Shape>& input_shapes,
const vector<float> delta_data,
const Shape& delta_shape,
const Shape& d_input_0_shape,
const Shape& d_input_1_shape,
shared_ptr<Node> input_0_node,
shared_ptr<Node> input_1_node,
shared_ptr<Node> input_0_adjoint,
shared_ptr<Node> input_1_adjoint,
const vector<float>& expected_0,
const vector<float>& expected_1)
{
vector<shared_ptr<runtime::TensorView>> input_tensors;
for (int i = 0; i < input_params.size(); ++i)
{
input_tensors.push_back(backend->create_tensor(element::f32, input_shapes[i]));
copy_data(input_tensors[i], input_data[i]);
}
auto delta_param = make_shared<op::Parameter>(element::f32, delta_shape);
shared_ptr<runtime::TensorView> delta_tensor =
backend->create_tensor(element::f32, delta_shape);
copy_data(delta_tensor, delta_data);
op::ParameterVector back_params(input_params);
back_params.push_back(delta_param);
input_tensors.push_back(delta_tensor);
shared_ptr<runtime::TensorView> d_input_0_tensor =
backend->create_tensor(element::f32, d_input_0_shape);
shared_ptr<runtime::TensorView> d_input_1_tensor =
backend->create_tensor(element::f32, d_input_1_shape);
using FunctionType = op::SigmoidMultiply::FunctionType;
auto input_0_type = op::SigmoidMultiply::identify_node_type(input_0_node);
auto input_1_type = op::SigmoidMultiply::identify_node_type(input_1_node);
// for Identity functions, we use the node itself, otherwise use its input
// where we will apply the function of input node
auto input_0_alt =
(input_0_type == FunctionType::Identity) ? input_0_node : input_0_node->get_argument(0);
auto input_1_alt =
(input_1_type == FunctionType::Identity) ? input_1_node : input_1_node->get_argument(0);
auto sigmoid_mul =
make_shared<op::SigmoidMultiply>(input_0_alt, input_1_alt, input_0_type, input_1_type);
ngraph::autodiff::Adjoints adjoints(NodeVector{sigmoid_mul}, NodeVector{delta_param});
auto d_input_0 = adjoints.backprop_node(input_0_adjoint);
auto d_input_1 = adjoints.backprop_node(input_1_adjoint);
auto df = make_shared<Function>(NodeVector{d_input_0, d_input_1}, back_params);
backend->call(df, {d_input_0_tensor, d_input_1_tensor}, input_tensors);
EXPECT_TRUE(test::all_close(read_vector<float>(d_input_0_tensor), expected_0));
EXPECT_TRUE(test::all_close(read_vector<float>(d_input_1_tensor), expected_1));
}
TEST(cpu_fusion, sigmoid_multiply_fusion_backward)
{
auto backend = runtime::Backend::create("CPU");
Shape data_shape{1, 1, 2, 2};
Shape const_shape{1};
vector<float> input_0_data{1.f, 2.f, 3.f, 4.f};
vector<float> input_1_data{1.2f, 2.2f, 3.2f, 4.2f};
vector<float> const_data{1.2f};
vector<float> delta_data(shape_size(data_shape), 20.0f);
{
auto input_0_param = make_shared<op::Parameter>(element::f32, data_shape);
auto input_1_param = make_shared<op::Parameter>(element::f32, data_shape);
auto input_2_param = make_shared<op::Parameter>(element::f32, data_shape);
auto sigmoid_0 = make_shared<op::Sigmoid>(input_0_param);
auto sigmoid_1 = make_shared<op::Add>(input_1_param, input_2_param);
vector<float> expected_0{8.65093f, 8.81946f, 5.60191f, 2.89668f};
vector<float> expected_1{14.6212f, 17.6159f, 19.0515f, 19.6403f};
op::ParameterVector input_params{input_0_param, input_1_param, input_2_param};
vector<vector<float>> input_data{input_0_data, input_0_data, input_1_data};
vector<Shape> input_shapes{data_shape, data_shape, data_shape};
sigmoid_multiply_fusion_backward_compute(backend,
input_params,
input_data,
input_shapes,
delta_data,
data_shape,
data_shape,
data_shape,
sigmoid_0,
sigmoid_1,
input_0_param,
sigmoid_1,
expected_0,
expected_1);
}
{
auto input_0_param = make_shared<op::Parameter>(element::f32, data_shape);
auto input_1_param = make_shared<op::Parameter>(element::f32, const_shape);
auto sigmoid_0 = make_shared<op::Broadcast>(input_1_param, data_shape, AxisSet{1, 2, 3});
auto sigmoid_1 = make_shared<op::Tanh>(input_0_param);
vector<float> expected_0{15.2319f, 19.2806f, 19.9011f, 19.9866f};
vector<float> expected_1{10.0794f, 1.69562f, 0.236785f, 0.0321828f};
op::ParameterVector input_params{input_0_param, input_1_param};
vector<vector<float>> input_data{input_0_data, const_data};
vector<Shape> input_shapes{data_shape, const_shape};
sigmoid_multiply_fusion_backward_compute(backend,
input_params,
input_data,
input_shapes,
delta_data,
data_shape,
data_shape,
data_shape,
sigmoid_0,
sigmoid_1,
sigmoid_0,
input_0_param,
expected_0,
expected_1);
}
{
auto input_0_param = make_shared<op::Parameter>(element::f32, data_shape);
auto input_1_param = make_shared<op::Parameter>(element::f32, const_shape);
auto sigmoid_0 = make_shared<op::Tanh>(input_0_param);
auto sigmoid_1 = make_shared<op::Broadcast>(input_1_param, data_shape, AxisSet{1, 2, 3});
vector<float> expected_0{10.0794f, 1.69562f, 0.236785f, 0.0321828f};
vector<float> expected_1{15.2319f, 19.2806f, 19.9011f, 19.9866f};
op::ParameterVector input_params{input_0_param, input_1_param};
vector<vector<float>> input_data{input_0_data, const_data};
vector<Shape> input_shapes{data_shape, const_shape};
sigmoid_multiply_fusion_backward_compute(backend,
input_params,
input_data,
input_shapes,
delta_data,
data_shape,
data_shape,
data_shape,
sigmoid_0,
sigmoid_1,
input_0_param,
sigmoid_1,
expected_0,
expected_1);
}
{
auto input_0_param = make_shared<op::Parameter>(element::f32, data_shape);
auto input_1_param = make_shared<op::Parameter>(element::f32, data_shape);
auto sigmoid_0 = make_shared<op::Sigmoid>(input_0_param);
auto sigmoid_1 = make_shared<op::Sigmoid>(input_1_param);
vector<float> expected_0{3.02202f, 1.89041f, 0.868146f, 0.348035f};
vector<float> expected_1{2.60102f, 1.58192f, 0.716941f, 0.285879f};
op::ParameterVector input_params{input_0_param, input_1_param};
vector<vector<float>> input_data{input_0_data, input_1_data};
vector<Shape> input_shapes{data_shape, data_shape};
sigmoid_multiply_fusion_backward_compute(backend,
input_params,
input_data,
input_shapes,
delta_data,
data_shape,
data_shape,
data_shape,
sigmoid_0,
sigmoid_1,
input_0_param,
input_1_param,
expected_0,
expected_1);
}
{
auto input_0_param = make_shared<op::Parameter>(element::f32, data_shape);
auto input_1_param = make_shared<op::Parameter>(element::f32, data_shape);
auto sigmoid_0 = make_shared<op::Sigmoid>(input_0_param);
auto sigmoid_1 = make_shared<op::Tanh>(input_1_param);
vector<float> expected_0{3.27813f, 2.04894f, 0.900536f, 0.353095f};
vector<float> expected_1{4.45975f, 0.84425f, 0.126201f, 0.0176579f};
op::ParameterVector input_params{input_0_param, input_1_param};
vector<vector<float>> input_data{input_0_data, input_1_data};
vector<Shape> input_shapes{data_shape, data_shape};
sigmoid_multiply_fusion_backward_compute(backend,
input_params,
input_data,
input_shapes,
delta_data,
data_shape,
data_shape,
data_shape,
sigmoid_0,
sigmoid_1,
input_0_param,
input_1_param,
expected_0,
expected_1);
}
{
auto input_0_param = make_shared<op::Parameter>(element::f32, data_shape);
auto input_1_param = make_shared<op::Parameter>(element::f32, data_shape);
auto sigmoid_0 = make_shared<op::Tanh>(input_0_param);
auto sigmoid_1 = make_shared<op::Sigmoid>(input_1_param);
vector<float> expected_0{6.45521f, 1.27207f, 0.189593f, 0.0264228f};
vector<float> expected_1{2.70967f, 1.7314f, 0.748913f, 0.29092f};
op::ParameterVector input_params{input_0_param, input_1_param};
vector<vector<float>> input_data{input_0_data, input_1_data};
vector<Shape> input_shapes{data_shape, data_shape};
sigmoid_multiply_fusion_backward_compute(backend,
input_params,
input_data,
input_shapes,
delta_data,
data_shape,
data_shape,
data_shape,
sigmoid_0,
sigmoid_1,
input_0_param,
input_1_param,
expected_0,
expected_1);
}
{
auto input_0_param = make_shared<op::Parameter>(element::f32, data_shape);
auto input_1_param = make_shared<op::Parameter>(element::f32, data_shape);
auto sigmoid_0 = make_shared<op::Tanh>(input_0_param);
auto sigmoid_1 = make_shared<op::Tanh>(input_1_param);
vector<float> expected_0{7.00227f, 1.37874f, 0.196666f, 0.026807f};
vector<float> expected_1{4.64603f, 0.924027f, 0.131829f, 0.0179692f};
op::ParameterVector input_params{input_0_param, input_1_param};
vector<vector<float>> input_data{input_0_data, input_1_data};
vector<Shape> input_shapes{data_shape, data_shape};
sigmoid_multiply_fusion_backward_compute(backend,
input_params,
input_data,
input_shapes,
delta_data,
data_shape,
data_shape,
data_shape,
sigmoid_0,
sigmoid_1,
input_0_param,
input_1_param,
expected_0,
expected_1);
}
}
[{
"name" : "Function_0",
"ops" : [
{
"element_type" : "float",
"inputs" : [],
"name" : "Parameter_170",
"op" : "Parameter",
"outputs" : ["Parameter_170_0"],
"shape" : [ 32, 1, 200 ]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Parameter_93",
"op" : "Parameter",
"outputs" : ["Parameter_93_0"],
"shape" : [ 32, 1, 200 ]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Parameter_56",
"op" : "Parameter",
"outputs" : ["Parameter_56_0"],
"shape" : [400]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Parameter_55",
"op" : "Parameter",
"outputs" : ["Parameter_55_0"],
"shape" : [ 400, 100 ]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Parameter_48",
"op" : "Parameter",
"outputs" : ["Parameter_48_0"],
"shape" : [400]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Parameter_47",
"op" : "Parameter",
"outputs" : ["Parameter_47_0"],
"shape" : [ 400, 100 ]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Parameter_11",
"op" : "Parameter",
"outputs" : ["Parameter_11_0"],
"shape" : [400]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Parameter_10",
"op" : "Parameter",
"outputs" : ["Parameter_10_0"],
"shape" : [ 400, 100 ]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Parameter_2",
"op" : "Parameter",
"outputs" : ["Parameter_2_0"],
"shape" : [400]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Parameter_1",
"op" : "Parameter",
"outputs" : ["Parameter_1_0"],
"shape" : [ 400, 200 ]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Parameter_0",
"op" : "Parameter",
"outputs" : ["Parameter_0_0"],
"shape" : [ 32, 1, 200 ]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Constant_53",
"op" : "Constant",
"outputs" : ["Constant_53_0"],
"shape" : [],
"value" : ["0"]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Constant_8",
"op" : "Constant",
"outputs" : ["Constant_8_0"],
"shape" : [],
"value" : ["0"]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Constant_35",
"op" : "Constant",
"outputs" : ["Constant_35_0"],
"shape" : [],
"value" : ["1"]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Constant_31",
"op" : "Constant",
"outputs" : ["Constant_31_0"],
"shape" : [],
"value" : ["0"]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Constant_25",
"op" : "Constant",
"outputs" : ["Constant_25_0"],
"shape" : [],
"value" : ["1"]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Constant_18",
"op" : "Constant",
"outputs" : ["Constant_18_0"],
"shape" : [],
"value" : ["1"]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Constant_80",
"op" : "Constant",
"outputs" : ["Constant_80_0"],
"shape" : [],
"value" : ["1"]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Constant_76",
"op" : "Constant",
"outputs" : ["Constant_76_0"],
"shape" : [],
"value" : ["0"]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Constant_70",
"op" : "Constant",
"outputs" : ["Constant_70_0"],
"shape" : [],
"value" : ["1"]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Constant_63",
"op" : "Constant",
"outputs" : ["Constant_63_0"],
"shape" : [],
"value" : ["1"]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Constant_120",
"op" : "Constant",
"outputs" : ["Constant_120_0"],
"shape" : [],
"value" : ["1"]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Constant_112",
"op" : "Constant",
"outputs" : ["Constant_112_0"],
"shape" : [],
"value" : ["1"]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Constant_105",
"op" : "Constant",
"outputs" : ["Constant_105_0"],
"shape" : [],
"value" : ["1"]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Constant_157",
"op" : "Constant",
"outputs" : ["Constant_157_0"],
"shape" : [],
"value" : ["1"]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Constant_149",
"op" : "Constant",
"outputs" : ["Constant_149_0"],
"shape" : [],
"value" : ["1"]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Constant_142",
"op" : "Constant",
"outputs" : ["Constant_142_0"],
"shape" : [],
"value" : ["1"]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Constant_197",
"op" : "Constant",
"outputs" : ["Constant_197_0"],
"shape" : [],
"value" : ["1"]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Constant_189",
"op" : "Constant",
"outputs" : ["Constant_189_0"],
"shape" : [],
"value" : ["1"]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Constant_182",
"op" : "Constant",
"outputs" : ["Constant_182_0"],
"shape" : [],
"value" : ["1"]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Constant_234",
"op" : "Constant",
"outputs" : ["Constant_234_0"],
"shape" : [],
"value" : ["1"]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Constant_226",
"op" : "Constant",
"outputs" : ["Constant_226_0"],
"shape" : [],
"value" : ["1"]
},
{
"element_type" : "float",
"inputs" : [],
"name" : "Constant_219",
"op" : "Constant",
"outputs" : ["Constant_219_0"],
"shape" : [],
"value" : ["1"]
},
{
"input_order" : [ 0, 1, 2 ],
"inputs" : ["Parameter_170"],
"name" : "Reshape_171",
"op" : "Reshape",
"output_shape" : [ 32, 200 ],
"outputs" : ["Reshape_171_0"]
},
{
"input_order" : [ 0, 1, 2 ],
"inputs" : ["Parameter_93"],
"name" : "Reshape_94",
"op" : "Reshape",
"output_shape" : [ 32, 200 ],
"outputs" : ["Reshape_94_0"]
},
{
"axes" : [0],
"inputs" : ["Parameter_56"],
"name" : "Broadcast_59",
"op" : "Broadcast",
"outputs" : ["Broadcast_59_0"],
"shape" : [ 32, 400 ]
},
{
"axes" : [0],
"inputs" : ["Parameter_56"],
"name" : "Broadcast_138",
"op" : "Broadcast",
"outputs" : ["Broadcast_138_0"],
"shape" : [ 32, 400 ]
},
{
"axes" : [0],
"inputs" : ["Parameter_56"],
"name" : "Broadcast_215",
"op" : "Broadcast",
"outputs" : ["Broadcast_215_0"],
"shape" : [ 32, 400 ]
},
{
"input_order" : [ 1, 0 ],
"inputs" : ["Parameter_55"],
"name" : "Reshape_57",
"op" : "Reshape",
"output_shape" : [ 100, 400 ],
"outputs" : ["Reshape_57_0"]
},
{
"input_order" : [ 1, 0 ],
"inputs" : ["Parameter_55"],
"name" : "Reshape_136",
"op" : "Reshape",
"output_shape" : [ 100, 400 ],
"outputs" : ["Reshape_136_0"]
},
{
"input_order" : [ 1, 0 ],
"inputs" : ["Parameter_55"],
"name" : "Reshape_213",
"op" : "Reshape",
"output_shape" : [ 100, 400 ],
"outputs" : ["Reshape_213_0"]
},
{
"axes" : [0],
"inputs" : ["Parameter_48"],
"name" : "Broadcast_51",
"op" : "Broadcast",
"outputs" : ["Broadcast_51_0"],
"shape" : [ 32, 400 ]
},
{
"axes" : [0],
"inputs" : ["Parameter_48"],
"name" : "Broadcast_134",
"op" : "Broadcast",
"outputs" : ["Broadcast_134_0"],
"shape" : [ 32, 400 ]
},
{
"axes" : [0],
"inputs" : ["Parameter_48"],
"name" : "Broadcast_211",
"op" : "Broadcast",
"outputs" : ["Broadcast_211_0"],
"shape" : [ 32, 400 ]
},
{
"input_order" : [ 1, 0 ],
"inputs" : ["Parameter_47"],
"name" : "Reshape_49",
"op" : "Reshape",
"output_shape" : [ 100, 400 ],
"outputs" : ["Reshape_49_0"]
},
{
"input_order" : [ 1, 0 ],
"inputs" : ["Parameter_47"],
"name" : "Reshape_132",
"op" : "Reshape",
"output_shape" : [ 100, 400 ],
"outputs" : ["Reshape_132_0"]
},
{
"input_order" : [ 1, 0 ],
"inputs" : ["Parameter_47"],
"name" : "Reshape_209",
"op" : "Reshape",
"output_shape" : [ 100, 400 ],
"outputs" : ["Reshape_209_0"]
},
{
"axes" : [0],
"inputs" : ["Parameter_11"],
"name" : "Broadcast_14",
"op" : "Broadcast",
"outputs" : ["Broadcast_14_0"],
"shape" : [ 32, 400 ]
},
{
"axes" : [0],
"inputs" : ["Parameter_11"],
"name" : "Broadcast_101",
"op" : "Broadcast",
"outputs" : ["Broadcast_101_0"],
"shape" : [ 32, 400 ]
},
{
"axes" : [0],
"inputs" : ["Parameter_11"],
"name" : "Broadcast_178",
"op" : "Broadcast",
"outputs" : ["Broadcast_178_0"],
"shape" : [ 32, 400 ]
},
{
"input_order" : [ 1, 0 ],
"inputs" : ["Parameter_10"],
"name" : "Reshape_12",
"op" : "Reshape",
"output_shape" : [ 100, 400 ],
"outputs" : ["Reshape_12_0"]
},
{
"input_order" : [ 1, 0 ],
"inputs" : ["Parameter_10"],
"name" : "Reshape_99",
"op" : "Reshape",
"output_shape" : [ 100, 400 ],
"outputs" : ["Reshape_99_0"]
},
{
"input_order" : [ 1, 0 ],
"inputs" : ["Parameter_10"],
"name" : "Reshape_176",
"op" : "Reshape",
"output_shape" : [ 100, 400 ],
"outputs" : ["Reshape_176_0"]
},
{
"axes" : [0],
"inputs" : ["Parameter_2"],
"name" : "Broadcast_6",
"op" : "Broadcast",
"outputs" : ["Broadcast_6_0"],
"shape" : [ 32, 400 ]
},
{
"axes" : [0],
"inputs" : ["Parameter_2"],
"name" : "Broadcast_97",
"op" : "Broadcast",
"outputs" : ["Broadcast_97_0"],
"shape" : [ 32, 400 ]
},
{
"axes" : [0],
"inputs" : ["Parameter_2"],
"name" : "Broadcast_174",
"op" : "Broadcast",
"outputs" : ["Broadcast_174_0"],
"shape" : [ 32, 400 ]
},
{
"input_order" : [ 1, 0 ],
"inputs" : ["Parameter_1"],
"name" : "Reshape_4",
"op" : "Reshape",
"output_shape" : [ 200, 400 ],
"outputs" : ["Reshape_4_0"]
},
{
"input_order" : [ 1, 0 ],
"inputs" : ["Parameter_1"],
"name" : "Reshape_95",
"op" : "Reshape",
"output_shape" : [ 200, 400 ],
"outputs" : ["Reshape_95_0"]
},
{
"input_order" : [ 1, 0 ],
"inputs" : ["Parameter_1"],
"name" : "Reshape_172",
"op" : "Reshape",
"output_shape" : [ 200, 400 ],
"outputs" : ["Reshape_172_0"]
},
{
"input_order" : [ 0, 1, 2 ],
"inputs" : ["Parameter_0"],
"name" : "Reshape_3",
"op" : "Reshape",
"output_shape" : [ 32, 200 ],
"outputs" : ["Reshape_3_0"]
},
{
"axes" : [ 0, 1 ],
"inputs" : ["Constant_53"],
"name" : "Broadcast_54",
"op" : "Broadcast",
"outputs" : ["Broadcast_54_0"],
"shape" : [ 32, 100 ]
},
{
"axes" : [ 0, 1 ],
"inputs" : ["Constant_8"],
"name" : "Broadcast_9",
"op" : "Broadcast",
"outputs" : ["Broadcast_9_0"],
"shape" : [ 32, 100 ]
},
{
"axes" : [ 0, 1 ],
"inputs" : ["Constant_35"],
"name" : "Broadcast_36",
"op" : "Broadcast",
"outputs" : ["Broadcast_36_0"],
"shape" : [ 32, 100 ]
},
{
"axes" : [ 0, 1 ],
"inputs" : ["Constant_31"],
"name" : "Broadcast_32",
"op" : "Broadcast",
"outputs" : ["Broadcast_32_0"],
"shape" : [ 32, 100 ]
},
{
"axes" : [ 0, 1 ],
"inputs" : ["Constant_25"],
"name" : "Broadcast_26",
"op" : "Broadcast",
"outputs" : ["Broadcast_26_0"],
"shape" : [ 32, 100 ]
},
{
"axes" : [ 0, 1 ],
"inputs" : ["Constant_18"],
"name" : "Broadcast_19",
"op" : "Broadcast",
"outputs" : ["Broadcast_19_0"],
"shape" : [ 32, 100 ]
},
{
"axes" : [ 0, 1 ],
"inputs" : ["Constant_80"],
"name" : "Broadcast_81",
"op" : "Broadcast",
"outputs" : ["Broadcast_81_0"],
"shape" : [ 32, 100 ]
},
{
"axes" : [ 0, 1 ],
"inputs" : ["Constant_76"],
"name" : "Broadcast_77",
"op" : "Broadcast",
"outputs" : ["Broadcast_77_0"],
"shape" : [ 32, 100 ]
},
{
"axes" : [ 0, 1 ],
"inputs" : ["Constant_70"],
"name" : "Broadcast_71",
"op" : "Broadcast",
"outputs" : ["Broadcast_71_0"],
"shape" : [ 32, 100 ]
},
{
"axes" : [ 0, 1 ],
"inputs" : ["Constant_63"],
"name" : "Broadcast_64",
"op" : "Broadcast",
"outputs" : ["Broadcast_64_0"],
"shape" : [ 32, 100 ]
},
{
"axes" : [ 0, 1 ],
"inputs" : ["Constant_120"],
"name" : "Broadcast_121",
"op" : "Broadcast",
"outputs" : ["Broadcast_121_0"],
"shape" : [ 32, 100 ]
},
{
"axes" : [ 0, 1 ],
"inputs" : ["Constant_112"],
"name" : "Broadcast_113",
"op" : "Broadcast",
"outputs" : ["Broadcast_113_0"],
"shape" : [ 32, 100 ]
},
{
"axes" : [ 0, 1 ],
"inputs" : ["Constant_105"],
"name" : "Broadcast_106",
"op" : "Broadcast",
"outputs" : ["Broadcast_106_0"],
"shape" : [ 32, 100 ]
},
{
"axes" : [ 0, 1 ],
"inputs" : ["Constant_157"],
"name" : "Broadcast_158",
"op" : "Broadcast",
"outputs" : ["Broadcast_158_0"],
"shape" : [ 32, 100 ]
},
{
"axes" : [ 0, 1 ],
"inputs" : ["Constant_149"],
"name" : "Broadcast_150",
"op" : "Broadcast",
"outputs" : ["Broadcast_150_0"],
"shape" : [ 32, 100 ]
},
{
"axes" : [ 0, 1 ],
"inputs" : ["Constant_142"],
"name" : "Broadcast_143",
"op" : "Broadcast",
"outputs" : ["Broadcast_143_0"],
"shape" : [ 32, 100 ]
},
{
"axes" : [ 0, 1 ],
"inputs" : ["Constant_197"],
"name" : "Broadcast_198",
"op" : "Broadcast",
"outputs" : ["Broadcast_198_0"],
"shape" : [ 32, 100 ]
},
{
"axes" : [ 0, 1 ],
"inputs" : ["Constant_189"],
"name" : "Broadcast_190",
"op" : "Broadcast",
"outputs" : ["Broadcast_190_0"],
"shape" : [ 32, 100 ]
},
{
"axes" : [ 0, 1 ],
"inputs" : ["Constant_182"],
"name" : "Broadcast_183",
"op" : "Broadcast",
"outputs" : ["Broadcast_183_0"],
"shape" : [ 32, 100 ]
},
{
"axes" : [ 0, 1 ],
"inputs" : ["Constant_234"],
"name" : "Broadcast_235",
"op" : "Broadcast",
"outputs" : ["Broadcast_235_0"],
"shape" : [ 32, 100 ]
},
{
"axes" : [ 0, 1 ],
"inputs" : ["Constant_226"],
"name" : "Broadcast_227",
"op" : "Broadcast",
"outputs" : ["Broadcast_227_0"],
"shape" : [ 32, 100 ]
},
{
"axes" : [ 0, 1 ],
"inputs" : ["Constant_219"],
"name" : "Broadcast_220",
"op" : "Broadcast",
"outputs" : ["Broadcast_220_0"],
"shape" : [ 32, 100 ]
},
{
"inputs" : [ "Reshape_94", "Reshape_95" ],
"name" : "Dot_96",
"op" : "Dot",
"outputs" : ["Dot_96_0"],
"reduction_axes_count" : 1
},
{
"inputs" : [ "Reshape_171", "Reshape_172" ],
"name" : "Dot_173",
"op" : "Dot",
"outputs" : ["Dot_173_0"],
"reduction_axes_count" : 1
},
{
"inputs" : [ "Reshape_3", "Reshape_4" ],
"name" : "Dot_5",
"op" : "Dot",
"outputs" : ["Dot_5_0"],
"reduction_axes_count" : 1
},
{
"inputs" : [ "Broadcast_54", "Reshape_57" ],
"name" : "Dot_58",
"op" : "Dot",
"outputs" : ["Dot_58_0"],
"reduction_axes_count" : 1
},
{
"inputs" : [ "Broadcast_9", "Reshape_12" ],
"name" : "Dot_13",
"op" : "Dot",
"outputs" : ["Dot_13_0"],
"reduction_axes_count" : 1
},
{
"inputs" : [ "Dot_96", "Broadcast_97" ],
"name" : "Add_98",
"op" : "Add",
"outputs" : ["Add_98_0"]
},
{
"inputs" : [ "Dot_173", "Broadcast_174" ],
"name" : "Add_175",
"op" : "Add",
"outputs" : ["Add_175_0"]
},
{
"inputs" : [ "Dot_5", "Broadcast_6" ],
"name" : "Add_7",
"op" : "Add",
"outputs" : ["Add_7_0"]
},
{
"inputs" : [ "Dot_58", "Broadcast_59" ],
"name" : "Add_60",
"op" : "Add",
"outputs" : ["Add_60_0"]
},
{
"inputs" : [ "Dot_13", "Broadcast_14" ],
"name" : "Add_15",
"op" : "Add",
"outputs" : ["Add_15_0"]
},
{
"inputs" : [ "Add_7", "Add_15" ],
"name" : "Add_16",
"op" : "Add",
"outputs" : ["Add_16_0"]
},
{
"inputs" : ["Add_16"],
"lower_bounds" : [ 0, 300 ],
"name" : "Slice_17",
"op" : "Slice",
"outputs" : ["Slice_17_0"],
"strides" : [ 1, 1 ],
"upper_bounds" : [ 32, 400 ]
},
{
"inputs" : ["Add_16"],
"lower_bounds" : [ 0, 100 ],
"name" : "Slice_24",
"op" : "Slice",
"outputs" : ["Slice_24_0"],
"strides" : [ 1, 1 ],
"upper_bounds" : [ 32, 200 ]
},
{
"inputs" : ["Add_16"],
"lower_bounds" : [ 0, 0 ],
"name" : "Slice_34",
"op" : "Slice",
"outputs" : ["Slice_34_0"],
"strides" : [ 1, 1 ],
"upper_bounds" : [ 32, 100 ]
},
{
"inputs" : ["Add_16"],
"lower_bounds" : [ 0, 200 ],
"name" : "Slice_41",
"op" : "Slice",
"outputs" : ["Slice_41_0"],
"strides" : [ 1, 1 ],
"upper_bounds" : [ 32, 300 ]
},
{
"inputs" : ["Slice_17"],
"name" : "Negative_20",
"op" : "Negative",
"outputs" : ["Negative_20_0"]
},
{
"inputs" : ["Slice_24"],
"name" : "Negative_27",
"op" : "Negative",
"outputs" : ["Negative_27_0"]
},
{
"inputs" : ["Slice_34"],
"name" : "Negative_37",
"op" : "Negative",
"outputs" : ["Negative_37_0"]
},
{
"inputs" : ["Slice_41"],
"name" : "Tanh_42",
"op" : "Tanh",
"outputs" : ["Tanh_42_0"]
},
{
"inputs" : ["Negative_20"],
"name" : "Exp_21",
"op" : "Exp",
"outputs" : ["Exp_21_0"]
},
{
"inputs" : ["Negative_27"],
"name" : "Exp_28",
"op" : "Exp",
"outputs" : ["Exp_28_0"]
},
{
"inputs" : ["Negative_37"],
"name" : "Exp_38",
"op" : "Exp",
"outputs" : ["Exp_38_0"]
},
{
"inputs" : [ "Broadcast_19", "Exp_21" ],
"name" : "Add_22",
"op" : "Add",
"outputs" : ["Add_22_0"]
},
{
"inputs" : [ "Broadcast_26", "Exp_28" ],
"name" : "Add_29",
"op" : "Add",
"outputs" : ["Add_29_0"]
},
{
"inputs" : [ "Broadcast_36", "Exp_38" ],
"name" : "Add_39",
"op" : "Add",
"outputs" : ["Add_39_0"]
},
{
"inputs" : [ "Broadcast_19", "Add_22" ],
"name" : "Divide_23",
"op" : "Divide",
"outputs" : ["Divide_23_0"]
},
{
"inputs" : [ "Broadcast_26", "Add_29" ],
"name" : "Divide_30",
"op" : "Divide",
"outputs" : ["Divide_30_0"]
},
{
"inputs" : [ "Broadcast_36", "Add_39" ],
"name" : "Divide_40",
"op" : "Divide",
"outputs" : ["Divide_40_0"]
},
{
"inputs" : [ "Divide_30", "Broadcast_32" ],
"name" : "Multiply_33",
"op" : "Multiply",
"outputs" : ["Multiply_33_0"]
},
{
"inputs" : [ "Divide_40", "Tanh_42" ],
"name" : "Multiply_43",
"op" : "Multiply",
"outputs" : ["Multiply_43_0"]
},
{
"inputs" : [ "Multiply_33", "Multiply_43" ],
"name" : "Add_44",
"op" : "Add",
"outputs" : ["Add_44_0"]
},
{
"inputs" : ["Add_44"],
"name" : "Tanh_45",
"op" : "Tanh",
"outputs" : ["Tanh_45_0"]
},
{
"inputs" : [ "Divide_23", "Tanh_45" ],
"name" : "Multiply_46",
"op" : "Multiply",
"outputs" : ["Multiply_46_0"]
},
{
"inputs" : [ "Multiply_46", "Reshape_49" ],
"name" : "Dot_50",
"op" : "Dot",
"outputs" : ["Dot_50_0"],
"reduction_axes_count" : 1
},
{
"inputs" : [ "Multiply_46", "Reshape_99" ],
"name" : "Dot_100",
"op" : "Dot",
"outputs" : ["Dot_100_0"],
"reduction_axes_count" : 1
},
{
"inputs" : [ "Dot_50", "Broadcast_51" ],
"name" : "Add_52",
"op" : "Add",
"outputs" : ["Add_52_0"]
},
{
"inputs" : [ "Dot_100", "Broadcast_101" ],
"name" : "Add_102",
"op" : "Add",
"outputs" : ["Add_102_0"]
},
{
"inputs" : [ "Add_52", "Add_60" ],
"name" : "Add_61",
"op" : "Add",
"outputs" : ["Add_61_0"]
},
{
"inputs" : [ "Add_98", "Add_102" ],
"name" : "Add_103",
"op" : "Add",
"outputs" : ["Add_103_0"]
},
{
"inputs" : ["Add_61"],
"lower_bounds" : [ 0, 300 ],
"name" : "Slice_62",
"op" : "Slice",
"outputs" : ["Slice_62_0"],
"strides" : [ 1, 1 ],
"upper_bounds" : [ 32, 400 ]
},
{
"inputs" : ["Add_61"],
"lower_bounds" : [ 0, 100 ],
"name" : "Slice_69",
"op" : "Slice",
"outputs" : ["Slice_69_0"],
"strides" : [ 1, 1 ],
"upper_bounds" : [ 32, 200 ]
},
{
"inputs" : ["Add_61"],
"lower_bounds" : [ 0, 0 ],
"name" : "Slice_79",
"op" : "Slice",
"outputs" : ["Slice_79_0"],
"strides" : [ 1, 1 ],
"upper_bounds" : [ 32, 100 ]
},
{
"inputs" : ["Add_61"],
"lower_bounds" : [ 0, 200 ],
"name" : "Slice_86",
"op" : "Slice",
"outputs" : ["Slice_86_0"],
"strides" : [ 1, 1 ],
"upper_bounds" : [ 32, 300 ]
},
{
"inputs" : ["Add_103"],
"lower_bounds" : [ 0, 300 ],
"name" : "Slice_104",
"op" : "Slice",
"outputs" : ["Slice_104_0"],
"strides" : [ 1, 1 ],
"upper_bounds" : [ 32, 400 ]
},
{
"inputs" : ["Add_103"],
"lower_bounds" : [ 0, 100 ],
"name" : "Slice_111",
"op" : "Slice",
"outputs" : ["Slice_111_0"],
"strides" : [ 1, 1 ],
"upper_bounds" : [ 32, 200 ]
},
{
"inputs" : ["Add_103"],
"lower_bounds" : [ 0, 0 ],
"name" : "Slice_119",
"op" : "Slice",
"outputs" : ["Slice_119_0"],
"strides" : [ 1, 1 ],
"upper_bounds" : [ 32, 100 ]
},
{
"inputs" : ["Add_103"],
"lower_bounds" : [ 0, 200 ],
"name" : "Slice_126",
"op" : "Slice",
"outputs" : ["Slice_126_0"],
"strides" : [ 1, 1 ],
"upper_bounds" : [ 32, 300 ]
},
{
"inputs" : ["Slice_62"],
"name" : "Negative_65",
"op" : "Negative",
"outputs" : ["Negative_65_0"]
},
{
"inputs" : ["Slice_69"],
"name" : "Negative_72",
"op" : "Negative",
"outputs" : ["Negative_72_0"]
},
{
"inputs" : ["Slice_79"],
"name" : "Negative_82",
"op" : "Negative",
"outputs" : ["Negative_82_0"]
},
{
"inputs" : ["Slice_86"],
"name" : "Tanh_87",
"op" : "Tanh",
"outputs" : ["Tanh_87_0"]
},
{
"inputs" : ["Slice_104"],
"name" : "Negative_107",
"op" : "Negative",
"outputs" : ["Negative_107_0"]
},
{
"inputs" : ["Slice_111"],
"name" : "Negative_114",
"op" : "Negative",
"outputs" : ["Negative_114_0"]
},
{
"inputs" : ["Slice_119"],
"name" : "Negative_122",
"op" : "Negative",
"outputs" : ["Negative_122_0"]
},
{
"inputs" : ["Slice_126"],
"name" : "Tanh_127",
"op" : "Tanh",
"outputs" : ["Tanh_127_0"]
},
{
"inputs" : ["Negative_65"],
"name" : "Exp_66",
"op" : "Exp",
"outputs" : ["Exp_66_0"]
},
{
"inputs" : ["Negative_72"],
"name" : "Exp_73",
"op" : "Exp",
"outputs" : ["Exp_73_0"]
},
{
"inputs" : ["Negative_82"],
"name" : "Exp_83",
"op" : "Exp",
"outputs" : ["Exp_83_0"]
},
{
"inputs" : ["Negative_107"],
"name" : "Exp_108",
"op" : "Exp",
"outputs" : ["Exp_108_0"]
},
{
"inputs" : ["Negative_114"],
"name" : "Exp_115",
"op" : "Exp",
"outputs" : ["Exp_115_0"]
},
{
"inputs" : ["Negative_122"],
"name" : "Exp_123",
"op" : "Exp",
"outputs" : ["Exp_123_0"]
},
{
"inputs" : [ "Broadcast_64", "Exp_66" ],
"name" : "Add_67",
"op" : "Add",
"outputs" : ["Add_67_0"]
},
{
"inputs" : [ "Broadcast_71", "Exp_73" ],
"name" : "Add_74",
"op" : "Add",
"outputs" : ["Add_74_0"]
},
{
"inputs" : [ "Broadcast_81", "Exp_83" ],
"name" : "Add_84",
"op" : "Add",
"outputs" : ["Add_84_0"]
},
{
"inputs" : [ "Broadcast_106", "Exp_108" ],
"name" : "Add_109",
"op" : "Add",
"outputs" : ["Add_109_0"]
},
{
"inputs" : [ "Broadcast_113", "Exp_115" ],
"name" : "Add_116",
"op" : "Add",
"outputs" : ["Add_116_0"]
},
{
"inputs" : [ "Broadcast_121", "Exp_123" ],
"name" : "Add_124",
"op" : "Add",
"outputs" : ["Add_124_0"]
},
{
"inputs" : [ "Broadcast_64", "Add_67" ],
"name" : "Divide_68",
"op" : "Divide",
"outputs" : ["Divide_68_0"]
},
{
"inputs" : [ "Broadcast_71", "Add_74" ],
"name" : "Divide_75",
"op" : "Divide",
"outputs" : ["Divide_75_0"]
},
{
"inputs" : [ "Broadcast_81", "Add_84" ],
"name" : "Divide_85",
"op" : "Divide",
"outputs" : ["Divide_85_0"]
},
{
"inputs" : [ "Broadcast_106", "Add_109" ],
"name" : "Divide_110",
"op" : "Divide",
"outputs" : ["Divide_110_0"]
},
{
"inputs" : [ "Broadcast_113", "Add_116" ],
"name" : "Divide_117",
"op" : "Divide",
"outputs" : ["Divide_117_0"]
},
{
"inputs" : [ "Broadcast_121", "Add_124" ],
"name" : "Divide_125",
"op" : "Divide",
"outputs" : ["Divide_125_0"]
},
{
"inputs" : [ "Divide_75", "Broadcast_77" ],
"name" : "Multiply_78",
"op" : "Multiply",
"outputs" : ["Multiply_78_0"]
},
{
"inputs" : [ "Divide_85", "Tanh_87" ],
"name" : "Multiply_88",
"op" : "Multiply",
"outputs" : ["Multiply_88_0"]
},
{
"inputs" : [ "Divide_117", "Add_44" ],
"name" : "Multiply_118",
"op" : "Multiply",
"outputs" : ["Multiply_118_0"]
},
{
"inputs" : [ "Divide_125", "Tanh_127" ],
"name" : "Multiply_128",
"op" : "Multiply",
"outputs" : ["Multiply_128_0"]
},
{
"inputs" : [ "Multiply_78", "Multiply_88" ],
"name" : "Add_89",
"op" : "Add",
"outputs" : ["Add_89_0"]
},
{
"inputs" : [ "Multiply_118", "Multiply_128" ],
"name" : "Add_129",
"op" : "Add",
"outputs" : ["Add_129_0"]
},
{
"inputs" : ["Add_89"],
"name" : "Tanh_90",
"op" : "Tanh",
"outputs" : ["Tanh_90_0"]
},
{
"inputs" : ["Add_129"],
"name" : "Tanh_130",
"op" : "Tanh",
"outputs" : ["Tanh_130_0"]
},
{
"inputs" : [ "Divide_68", "Tanh_90" ],
"name" : "Multiply_91",
"op" : "Multiply",
"outputs" : ["Multiply_91_0"]
},
{
"inputs" : [ "Divide_110", "Tanh_130" ],
"name" : "Multiply_131",
"op" : "Multiply",
"outputs" : ["Multiply_131_0"]
},
{
"input_order" : [ 0, 1 ],
"inputs" : ["Multiply_91"],
"name" : "Reshape_92",
"op" : "Reshape",
"output_shape" : [ 32, 1, 100 ],
"outputs" : ["Reshape_92_0"]
},
{
"inputs" : [ "Multiply_91", "Reshape_136" ],
"name" : "Dot_137",
"op" : "Dot",
"outputs" : ["Dot_137_0"],
"reduction_axes_count" : 1
},
{
"inputs" : [ "Multiply_131", "Reshape_132" ],
"name" : "Dot_133",
"op" : "Dot",
"outputs" : ["Dot_133_0"],
"reduction_axes_count" : 1
},
{
"inputs" : [ "Multiply_131", "Reshape_176" ],
"name" : "Dot_177",
"op" : "Dot",
"outputs" : ["Dot_177_0"],
"reduction_axes_count" : 1
},
{
"inputs" : [ "Dot_137", "Broadcast_138" ],
"name" : "Add_139",
"op" : "Add",
"outputs" : ["Add_139_0"]
},
{
"inputs" : [ "Dot_133", "Broadcast_134" ],
"name" : "Add_135",
"op" : "Add",
"outputs" : ["Add_135_0"]
},
{
"inputs" : [ "Dot_177", "Broadcast_178" ],
"name" : "Add_179",
"op" : "Add",
"outputs" : ["Add_179_0"]
},
{
"inputs" : [ "Add_135", "Add_139" ],
"name" : "Add_140",
"op" : "Add",
"outputs" : ["Add_140_0"]
},
{
"inputs" : [ "Add_175", "Add_179" ],
"name" : "Add_180",
"op" : "Add",
"outputs" : ["Add_180_0"]
},
{
"inputs" : ["Add_140"],
"lower_bounds" : [ 0, 300 ],
"name" : "Slice_141",
"op" : "Slice",
"outputs" : ["Slice_141_0"],
"strides" : [ 1, 1 ],
"upper_bounds" : [ 32, 400 ]
},
{
"inputs" : ["Add_140"],
"lower_bounds" : [ 0, 100 ],
"name" : "Slice_148",
"op" : "Slice",
"outputs" : ["Slice_148_0"],
"strides" : [ 1, 1 ],
"upper_bounds" : [ 32, 200 ]
},
{
"inputs" : ["Add_140"],
"lower_bounds" : [ 0, 0 ],
"name" : "Slice_156",
"op" : "Slice",
"outputs" : ["Slice_156_0"],
"strides" : [ 1, 1 ],
"upper_bounds" : [ 32, 100 ]
},
{
"inputs" : ["Add_140"],
"lower_bounds" : [ 0, 200 ],
"name" : "Slice_163",
"op" : "Slice",
"outputs" : ["Slice_163_0"],
"strides" : [ 1, 1 ],
"upper_bounds" : [ 32, 300 ]
},
{
"inputs" : ["Add_180"],
"lower_bounds" : [ 0, 300 ],
"name" : "Slice_181",
"op" : "Slice",
"outputs" : ["Slice_181_0"],
"strides" : [ 1, 1 ],
"upper_bounds" : [ 32, 400 ]
},
{
"inputs" : ["Add_180"],
"lower_bounds" : [ 0, 100 ],
"name" : "Slice_188",
"op" : "Slice",
"outputs" : ["Slice_188_0"],
"strides" : [ 1, 1 ],
"upper_bounds" : [ 32, 200 ]
},
{
"inputs" : ["Add_180"],
"lower_bounds" : [ 0, 0 ],
"name" : "Slice_196",
"op" : "Slice",
"outputs" : ["Slice_196_0"],
"strides" : [ 1, 1 ],
"upper_bounds" : [ 32, 100 ]
},
{
"inputs" : ["Add_180"],
"lower_bounds" : [ 0, 200 ],
"name" : "Slice_203",
"op" : "Slice",
"outputs" : ["Slice_203_0"],
"strides" : [ 1, 1 ],
"upper_bounds" : [ 32, 300 ]
},
{
"inputs" : ["Slice_141"],
"name" : "Negative_144",
"op" : "Negative",
"outputs" : ["Negative_144_0"]
},
{
"inputs" : ["Slice_148"],
"name" : "Negative_151",
"op" : "Negative",
"outputs" : ["Negative_151_0"]
},
{
"inputs" : ["Slice_156"],
"name" : "Negative_159",
"op" : "Negative",
"outputs" : ["Negative_159_0"]
},
{
"inputs" : ["Slice_163"],
"name" : "Tanh_164",
"op" : "Tanh",
"outputs" : ["Tanh_164_0"]
},
{
"inputs" : ["Slice_181"],
"name" : "Negative_184",
"op" : "Negative",
"outputs" : ["Negative_184_0"]
},
{
"inputs" : ["Slice_188"],
"name" : "Negative_191",
"op" : "Negative",
"outputs" : ["Negative_191_0"]
},
{
"inputs" : ["Slice_196"],
"name" : "Negative_199",
"op" : "Negative",
"outputs" : ["Negative_199_0"]
},
{
"inputs" : ["Slice_203"],
"name" : "Tanh_204",
"op" : "Tanh",
"outputs" : ["Tanh_204_0"]
},
{
"inputs" : ["Negative_144"],
"name" : "Exp_145",
"op" : "Exp",
"outputs" : ["Exp_145_0"]
},
{
"inputs" : ["Negative_151"],
"name" : "Exp_152",
"op" : "Exp",
"outputs" : ["Exp_152_0"]
},
{
"inputs" : ["Negative_159"],
"name" : "Exp_160",
"op" : "Exp",
"outputs" : ["Exp_160_0"]
},
{
"inputs" : ["Negative_184"],
"name" : "Exp_185",
"op" : "Exp",
"outputs" : ["Exp_185_0"]
},
{
"inputs" : ["Negative_191"],
"name" : "Exp_192",
"op" : "Exp",
"outputs" : ["Exp_192_0"]
},
{
"inputs" : ["Negative_199"],
"name" : "Exp_200",
"op" : "Exp",
"outputs" : ["Exp_200_0"]
},
{
"inputs" : [ "Broadcast_143", "Exp_145" ],
"name" : "Add_146",
"op" : "Add",
"outputs" : ["Add_146_0"]
},
{
"inputs" : [ "Broadcast_150", "Exp_152" ],
"name" : "Add_153",
"op" : "Add",
"outputs" : ["Add_153_0"]
},
{
"inputs" : [ "Broadcast_158", "Exp_160" ],
"name" : "Add_161",
"op" : "Add",
"outputs" : ["Add_161_0"]
},
{
"inputs" : [ "Broadcast_183", "Exp_185" ],
"name" : "Add_186",
"op" : "Add",
"outputs" : ["Add_186_0"]
},
{
"inputs" : [ "Broadcast_190", "Exp_192" ],
"name" : "Add_193",
"op" : "Add",
"outputs" : ["Add_193_0"]
},
{
"inputs" : [ "Broadcast_198", "Exp_200" ],
"name" : "Add_201",
"op" : "Add",
"outputs" : ["Add_201_0"]
},
{
"inputs" : [ "Broadcast_143", "Add_146" ],
"name" : "Divide_147",
"op" : "Divide",
"outputs" : ["Divide_147_0"]
},
{
"inputs" : [ "Broadcast_150", "Add_153" ],
"name" : "Divide_154",
"op" : "Divide",
"outputs" : ["Divide_154_0"]
},
{
"inputs" : [ "Broadcast_158", "Add_161" ],
"name" : "Divide_162",
"op" : "Divide",
"outputs" : ["Divide_162_0"]
},
{
"inputs" : [ "Broadcast_183", "Add_186" ],
"name" : "Divide_187",
"op" : "Divide",
"outputs" : ["Divide_187_0"]
},
{
"inputs" : [ "Broadcast_190", "Add_193" ],
"name" : "Divide_194",
"op" : "Divide",
"outputs" : ["Divide_194_0"]
},
{
"inputs" : [ "Broadcast_198", "Add_201" ],
"name" : "Divide_202",
"op" : "Divide",
"outputs" : ["Divide_202_0"]
},
{
"inputs" : [ "Divide_154", "Add_89" ],
"name" : "Multiply_155",
"op" : "Multiply",
"outputs" : ["Multiply_155_0"]
},
{
"inputs" : [ "Divide_162", "Tanh_164" ],
"name" : "Multiply_165",
"op" : "Multiply",
"outputs" : ["Multiply_165_0"]
},
{
"inputs" : [ "Divide_194", "Add_129" ],
"name" : "Multiply_195",
"op" : "Multiply",
"outputs" : ["Multiply_195_0"]
},
{
"inputs" : [ "Divide_202", "Tanh_204" ],
"name" : "Multiply_205",
"op" : "Multiply",
"outputs" : ["Multiply_205_0"]
},
{
"inputs" : [ "Multiply_155", "Multiply_165" ],
"name" : "Add_166",
"op" : "Add",
"outputs" : ["Add_166_0"]
},
{
"inputs" : [ "Multiply_195", "Multiply_205" ],
"name" : "Add_206",
"op" : "Add",
"outputs" : ["Add_206_0"]
},
{
"inputs" : ["Add_166"],
"name" : "Tanh_167",
"op" : "Tanh",
"outputs" : ["Tanh_167_0"]
},
{
"inputs" : ["Add_206"],
"name" : "Tanh_207",
"op" : "Tanh",
"outputs" : ["Tanh_207_0"]
},
{
"inputs" : [ "Divide_147", "Tanh_167" ],
"name" : "Multiply_168",
"op" : "Multiply",
"outputs" : ["Multiply_168_0"]
},
{
"inputs" : [ "Divide_187", "Tanh_207" ],
"name" : "Multiply_208",
"op" : "Multiply",
"outputs" : ["Multiply_208_0"]
},
{
"input_order" : [ 0, 1 ],
"inputs" : ["Multiply_168"],
"name" : "Reshape_169",
"op" : "Reshape",
"output_shape" : [ 32, 1, 100 ],
"outputs" : ["Reshape_169_0"]
},
{
"inputs" : [ "Multiply_168", "Reshape_213" ],
"name" : "Dot_214",
"op" : "Dot",
"outputs" : ["Dot_214_0"],
"reduction_axes_count" : 1
},
{
"inputs" : [ "Multiply_208", "Reshape_209" ],
"name" : "Dot_210",
"op" : "Dot",
"outputs" : ["Dot_210_0"],
"reduction_axes_count" : 1
},
{
"inputs" : [ "Dot_214", "Broadcast_215" ],
"name" : "Add_216",
"op" : "Add",
"outputs" : ["Add_216_0"]
},
{
"inputs" : [ "Dot_210", "Broadcast_211" ],
"name" : "Add_212",
"op" : "Add",
"outputs" : ["Add_212_0"]
},
{
"inputs" : [ "Add_212", "Add_216" ],
"name" : "Add_217",
"op" : "Add",
"outputs" : ["Add_217_0"]
},
{
"inputs" : ["Add_217"],
"lower_bounds" : [ 0, 300 ],
"name" : "Slice_218",
"op" : "Slice",
"outputs" : ["Slice_218_0"],
"strides" : [ 1, 1 ],
"upper_bounds" : [ 32, 400 ]
},
{
"inputs" : ["Add_217"],
"lower_bounds" : [ 0, 100 ],
"name" : "Slice_225",
"op" : "Slice",
"outputs" : ["Slice_225_0"],
"strides" : [ 1, 1 ],
"upper_bounds" : [ 32, 200 ]
},
{
"inputs" : ["Add_217"],
"lower_bounds" : [ 0, 0 ],
"name" : "Slice_233",
"op" : "Slice",
"outputs" : ["Slice_233_0"],
"strides" : [ 1, 1 ],
"upper_bounds" : [ 32, 100 ]
},
{
"inputs" : ["Add_217"],
"lower_bounds" : [ 0, 200 ],
"name" : "Slice_240",
"op" : "Slice",
"outputs" : ["Slice_240_0"],
"strides" : [ 1, 1 ],
"upper_bounds" : [ 32, 300 ]
},
{
"inputs" : ["Slice_218"],
"name" : "Negative_221",
"op" : "Negative",
"outputs" : ["Negative_221_0"]
},
{
"inputs" : ["Slice_225"],
"name" : "Negative_228",
"op" : "Negative",
"outputs" : ["Negative_228_0"]
},
{
"inputs" : ["Slice_233"],
"name" : "Negative_236",
"op" : "Negative",
"outputs" : ["Negative_236_0"]
},
{
"inputs" : ["Slice_240"],
"name" : "Tanh_241",
"op" : "Tanh",
"outputs" : ["Tanh_241_0"]
},
{
"inputs" : ["Negative_221"],
"name" : "Exp_222",
"op" : "Exp",
"outputs" : ["Exp_222_0"]
},
{
"inputs" : ["Negative_228"],
"name" : "Exp_229",
"op" : "Exp",
"outputs" : ["Exp_229_0"]
},
{
"inputs" : ["Negative_236"],
"name" : "Exp_237",
"op" : "Exp",
"outputs" : ["Exp_237_0"]
},
{
"inputs" : [ "Broadcast_220", "Exp_222" ],
"name" : "Add_223",
"op" : "Add",
"outputs" : ["Add_223_0"]
},
{
"inputs" : [ "Broadcast_227", "Exp_229" ],
"name" : "Add_230",
"op" : "Add",
"outputs" : ["Add_230_0"]
},
{
"inputs" : [ "Broadcast_235", "Exp_237" ],
"name" : "Add_238",
"op" : "Add",
"outputs" : ["Add_238_0"]
},
{
"inputs" : [ "Broadcast_220", "Add_223" ],
"name" : "Divide_224",
"op" : "Divide",
"outputs" : ["Divide_224_0"]
},
{
"inputs" : [ "Broadcast_227", "Add_230" ],
"name" : "Divide_231",
"op" : "Divide",
"outputs" : ["Divide_231_0"]
},
{
"inputs" : [ "Broadcast_235", "Add_238" ],
"name" : "Divide_239",
"op" : "Divide",
"outputs" : ["Divide_239_0"]
},
{
"inputs" : [ "Divide_231", "Add_166" ],
"name" : "Multiply_232",
"op" : "Multiply",
"outputs" : ["Multiply_232_0"]
},
{
"inputs" : [ "Divide_239", "Tanh_241" ],
"name" : "Multiply_242",
"op" : "Multiply",
"outputs" : ["Multiply_242_0"]
},
{
"inputs" : [ "Multiply_232", "Multiply_242" ],
"name" : "Add_243",
"op" : "Add",
"outputs" : ["Add_243_0"]
},
{
"inputs" : ["Add_243"],
"name" : "Tanh_244",
"op" : "Tanh",
"outputs" : ["Tanh_244_0"]
},
{
"inputs" : [ "Divide_224", "Tanh_244" ],
"name" : "Multiply_245",
"op" : "Multiply",
"outputs" : ["Multiply_245_0"]
},
{
"input_order" : [ 0, 1 ],
"inputs" : ["Multiply_245"],
"name" : "Reshape_246",
"op" : "Reshape",
"output_shape" : [ 32, 1, 100 ],
"outputs" : ["Reshape_246_0"]
},
{
"axis" : 1,
"inputs" : [ "Reshape_92", "Reshape_169", "Reshape_246" ],
"name" : "Concat_247",
"op" : "Concat",
"outputs" : ["Concat_247_0"]
},
{
"inputs" : ["Concat_247"],
"name" : "Result_248",
"op" : "Result",
"outputs" : ["Result_248_0"]
}
],
"parameters" : [
"Parameter_0", "Parameter_1", "Parameter_2", "Parameter_10", "Parameter_11",
"Parameter_47", "Parameter_48", "Parameter_55", "Parameter_56",
"Parameter_93", "Parameter_170"
],
"result" : ["Result_248"]
}]
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment