Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
N
ngraph
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
ngraph
Commits
6e6c8af4
Commit
6e6c8af4
authored
Feb 26, 2019
by
Adam Rogowiec
Committed by
Michał Karzyński
Feb 26, 2019
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
[ONNX] Enhance LSTM support. (#2408)
parent
25c9152f
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
11 changed files
with
250 additions
and
22 deletions
+250
-22
CMakeLists.txt
src/ngraph/frontend/onnx_import/CMakeLists.txt
+2
-0
node.cpp
src/ngraph/frontend/onnx_import/core/node.cpp
+9
-0
lstm.cpp
src/ngraph/frontend/onnx_import/op/lstm.cpp
+0
-0
matmul.cpp
src/ngraph/frontend/onnx_import/op/matmul.cpp
+1
-1
supported_ops.md
src/ngraph/frontend/onnx_import/op/supported_ops.md
+1
-1
reshape.cpp
src/ngraph/frontend/onnx_import/utils/reshape.cpp
+7
-10
reshape.hpp
src/ngraph/frontend/onnx_import/utils/reshape.hpp
+8
-10
activation_functions.cpp
...h/frontend/onnx_import/utils/rnn/activation_functions.cpp
+71
-0
activation_functions.hpp
...h/frontend/onnx_import/utils/rnn/activation_functions.hpp
+66
-0
lstm_fwd_with_clip.onnx
test/models/onnx/lstm_fwd_with_clip.onnx
+0
-0
onnx_import.in.cpp
test/onnx_import.in.cpp
+85
-0
No files found.
src/ngraph/frontend/onnx_import/CMakeLists.txt
View file @
6e6c8af4
...
...
@@ -177,6 +177,8 @@ add_library(onnx_import STATIC
utils/reduction.hpp
utils/reshape.cpp
utils/reshape.hpp
utils/rnn/activation_functions.cpp
utils/rnn/activation_functions.hpp
utils/variadic.hpp
)
set
(
ONNX_IMPORT_INCLUDE_DIR
${
CMAKE_CURRENT_SOURCE_DIR
}
CACHE INTERNAL
""
)
...
...
src/ngraph/frontend/onnx_import/core/node.cpp
View file @
6e6c8af4
...
...
@@ -258,6 +258,15 @@ namespace ngraph
name
,
std
::
move
(
default_value
));
}
template
<>
std
::
vector
<
std
::
string
>
Node
::
get_attribute_value
(
const
std
::
string
&
name
,
std
::
vector
<
std
::
string
>
default_value
)
const
{
return
m_pimpl
->
template
get_attribute_value
<
std
::
vector
<
std
::
string
>>
(
name
,
std
::
move
(
default_value
));
}
template
<>
std
::
vector
<
Tensor
>
Node
::
get_attribute_value
(
const
std
::
string
&
name
,
std
::
vector
<
Tensor
>
default_value
)
const
...
...
src/ngraph/frontend/onnx_import/op/lstm.cpp
View file @
6e6c8af4
This diff is collapsed.
Click to expand it.
src/ngraph/frontend/onnx_import/op/matmul.cpp
View file @
6e6c8af4
...
...
@@ -138,7 +138,7 @@ namespace ngraph
// Expand sub_dot result with single empty outermost axis, in order to
// later concatenate sub_dots at this axis.
small_dots
.
at
(
g
)
=
reshape
::
add_empty_axe
s
(
sub_dot
);
small_dots
.
at
(
g
)
=
reshape
::
expand_dim
s
(
sub_dot
);
}
// Concatenate sub_dots on groups axis.
...
...
src/ngraph/frontend/onnx_import/op/supported_ops.md
View file @
6e6c8af4
...
...
@@ -112,7 +112,7 @@ opset versions starting from `1` to `6` and to the latest opset version.
|------|-----------------|--------|--------|---------|
| Erf | (9) | 284 | 442 | Need separate kernel for this in nGraph core. |
| Pad | 1-2- | 273 | 416 | Not fully supported. |
| LSTM | 1-7- | | 4
30 | Not fully supported
. |
| LSTM | 1-7- | | 4
76 | Mixed sequences length not supported yet
. |
| MaxUnpool | (9) | 286, 289 | 447 | |
| LpPool | - | 291 | 437 | Unsupported by nGraph - only max/avg pooling ops. Need separate kernel. |
| Multinomial | - | 199 | 435 | Lack of PRNG in nGraph. |
...
...
src/ngraph/frontend/onnx_import/utils/reshape.cpp
View file @
6e6c8af4
...
...
@@ -221,17 +221,14 @@ namespace ngraph
node
,
get_default_axis_vector
(
node
->
get_shape
().
size
()),
shape
);
}
std
::
shared_ptr
<
ngraph
::
Node
>
add_empty_axes
(
const
std
::
shared_ptr
<
ngraph
::
Node
>&
node
,
std
::
size_t
outermost_axes_count
,
std
::
size_t
innermost_axes_count
)
std
::
shared_ptr
<
ngraph
::
Node
>
expand_dims
(
const
std
::
shared_ptr
<
ngraph
::
Node
>&
node
,
std
::
size_t
axis
)
{
// Add outermost empty dimensions.
Shape
output_shape
(
outermost_axes_count
,
1
);
output_shape
.
insert
(
std
::
end
(
output_shape
),
std
::
begin
(
node
->
get_shape
()),
std
::
end
(
node
->
get_shape
()));
// Add innermost empty dimensions.
output_shape
.
insert
(
std
::
end
(
output_shape
),
innermost_axes_count
,
1
);
Shape
output_shape
(
node
->
get_shape
());
// Add empty axis at specified position.
auto
empty_axis_it
=
std
::
begin
(
output_shape
);
std
::
advance
(
empty_axis_it
,
axis
);
output_shape
.
insert
(
empty_axis_it
,
1
);
return
std
::
make_shared
<
ngraph
::
op
::
Reshape
>
(
node
,
reshape
::
get_default_axis_vector
(
node
->
get_shape
().
size
()),
output_shape
);
}
...
...
src/ngraph/frontend/onnx_import/utils/reshape.hpp
View file @
6e6c8af4
...
...
@@ -127,19 +127,17 @@ namespace ngraph
return
reshape
(
node
,
get_default_axis_vector
(
node
->
get_shape
().
size
()),
shape
);
}
/// \brief Expands node tensor shape with empty axes.
/// \brief Expands node tensor shape with empty axis at
/// specified position.
///
/// \param[in] node The node to be expanded.
/// \param[in] outermost_axes_count The number of added outermost axes.
/// At the front of the shape.
/// \param[in] innermost_axes_count The number of added innermost axes.
/// At the end of the shape.
/// \param[in] node The node to be expanded.
/// \param[in] axis The position in the expanded axes where the
/// new axis is placed.
///
/// \return The node with added empty ax
e
s.
/// \return The node with added empty ax
i
s.
///
std
::
shared_ptr
<
ngraph
::
Node
>
add_empty_axes
(
const
std
::
shared_ptr
<
ngraph
::
Node
>&
node
,
std
::
size_t
outermost_axes_count
=
1
,
std
::
size_t
innermost_axes_count
=
0
);
std
::
shared_ptr
<
ngraph
::
Node
>
expand_dims
(
const
std
::
shared_ptr
<
ngraph
::
Node
>&
node
,
std
::
size_t
axis
=
0
);
/// \brief Split node on specified axis into multiple parts.
///
...
...
src/ngraph/frontend/onnx_import/utils/rnn/activation_functions.cpp
0 → 100644
View file @
6e6c8af4
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include <functional>
#include <iterator>
#include <unordered_map>
#include "activation_functions.hpp"
#include "ngraph/op/relu.hpp"
#include "ngraph/op/sigmoid.hpp"
#include "ngraph/op/tanh.hpp"
namespace
ngraph
{
namespace
onnx_import
{
namespace
rnn
{
namespace
detail
{
std
::
shared_ptr
<
ngraph
::
Node
>
sigmoid
(
const
std
::
shared_ptr
<
ngraph
::
Node
>&
arg
)
{
return
std
::
make_shared
<
ngraph
::
op
::
Sigmoid
>
(
arg
);
}
std
::
shared_ptr
<
ngraph
::
Node
>
tanh
(
const
std
::
shared_ptr
<
ngraph
::
Node
>&
arg
)
{
return
std
::
make_shared
<
ngraph
::
op
::
Tanh
>
(
arg
);
}
std
::
shared_ptr
<
ngraph
::
Node
>
relu
(
const
std
::
shared_ptr
<
ngraph
::
Node
>&
arg
)
{
return
std
::
make_shared
<
ngraph
::
op
::
Relu
>
(
arg
);
}
}
// namespace detail
ActivationFunction
get_activation_func_by_name
(
const
std
::
string
&
func_name
)
{
using
ActivationFunctionMap
=
std
::
unordered_map
<
std
::
string
,
ActivationFunction
>
;
static
ActivationFunctionMap
func_map
{
{
"sigmoid"
,
std
::
bind
(
detail
::
sigmoid
,
std
::
placeholders
::
_1
)},
{
"tanh"
,
std
::
bind
(
detail
::
tanh
,
std
::
placeholders
::
_1
)},
{
"relu"
,
std
::
bind
(
detail
::
relu
,
std
::
placeholders
::
_1
)}};
auto
func_it
=
func_map
.
find
(
func_name
);
if
(
func_it
==
std
::
end
(
func_map
))
{
throw
error
::
UnknownActivationFunction
(
func_name
);
}
return
func_it
->
second
;
}
}
//namespace rnn
}
// namespace onnx_import
}
// namespace ngraph
src/ngraph/frontend/onnx_import/utils/rnn/activation_functions.hpp
0 → 100644
View file @
6e6c8af4
//*****************************************************************************
// Copyright 2017-2019 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#pragma once
#include <memory>
#include <string>
#include "ngraph/except.hpp"
#include "ngraph/node.hpp"
namespace
ngraph
{
namespace
onnx_import
{
namespace
rnn
{
namespace
error
{
struct
UnknownActivationFunction
:
ngraph_error
{
UnknownActivationFunction
(
const
std
::
string
&
func_name
)
:
ngraph_error
{
"Unknown activation function: "
+
func_name
}
{
}
};
}
namespace
detail
{
std
::
shared_ptr
<
ngraph
::
Node
>
sigmoid
(
const
std
::
shared_ptr
<
ngraph
::
Node
>&
arg
);
std
::
shared_ptr
<
ngraph
::
Node
>
tanh
(
const
std
::
shared_ptr
<
ngraph
::
Node
>&
arg
);
std
::
shared_ptr
<
ngraph
::
Node
>
relu
(
const
std
::
shared_ptr
<
ngraph
::
Node
>&
arg
);
}
using
ActivationFunction
=
std
::
function
<
std
::
shared_ptr
<
ngraph
::
Node
>
(
const
std
::
shared_ptr
<
ngraph
::
Node
>&
)
>
;
/// \brief Gets the activation function by name.
///
/// \param[in] func_name The function name
///
/// \throws UnknownActivationFunction When provided func_name is unknown.
///
/// \return The activation function object.
///
ActivationFunction
get_activation_func_by_name
(
const
std
::
string
&
func_name
);
}
//namespace rnn
}
// namespace onnx_import
}
// namespace ngraph
test/models/onnx/lstm_fwd_with_clip.onnx
0 → 100644
View file @
6e6c8af4
File added
test/onnx_import.in.cpp
View file @
6e6c8af4
...
...
@@ -1864,6 +1864,91 @@ TEST(onnx_${BACKEND_NAME}, model_top_k)
EXPECT_TRUE
(
test
::
all_close
(
expected_indices_output
,
indices_output
));
}
TEST
(
onnx_
$
{
BACKEND_NAME
},
model_lstm_fwd_with_clip
)
{
auto
function
=
onnx_import
::
import_onnx_model
(
file_util
::
path_join
(
SERIALIZED_ZOO
,
"onnx/lstm_fwd_with_clip.onnx"
));
Inputs
inputs
{};
// X
inputs
.
emplace_back
(
std
::
vector
<
float
>
{
-
0.455351
,
-
0.276391
,
-
0.185934
,
-
0.269585
});
// W
inputs
.
emplace_back
(
std
::
vector
<
float
>
{
-
0.494659
f
,
0.0453352
f
,
-
0.487793
f
,
0.417264
f
,
-
0.0175329
f
,
0.489074
f
,
-
0.446013
f
,
0.414029
f
,
-
0.0091708
f
,
-
0.255364
f
,
-
0.106952
f
,
-
0.266717
f
,
-
0.0888852
f
,
-
0.428709
f
,
-
0.283349
f
,
0.208792
f
});
// R
inputs
.
emplace_back
(
std
::
vector
<
float
>
{
0.146626
f
,
-
0.0620289
f
,
-
0.0815302
f
,
0.100482
f
,
-
0.219535
f
,
-
0.306635
f
,
-
0.28515
f
,
-
0.314112
f
,
-
0.228172
f
,
0.405972
f
,
0.31576
f
,
0.281487
f
,
-
0.394864
f
,
0.42111
f
,
-
0.386624
f
,
-
0.390225
f
});
// B
inputs
.
emplace_back
(
std
::
vector
<
float
>
{
0.381619
f
,
0.0323954
f
,
-
0.14449
f
,
0.420804
f
,
-
0.258721
f
,
0.45056
f
,
-
0.250755
f
,
0.0967895
f
,
0.0
f
,
0.0
f
,
0.0
f
,
0.0
f
,
0.0
f
,
0.0
f
,
0.0
f
,
0.0
f
});
// P
inputs
.
emplace_back
(
std
::
vector
<
float
>
{
0.2345
f
,
0.5235
f
,
0.4378
f
,
0.3475
f
,
0.8927
f
,
0.3456
f
});
Outputs
expected_output
{};
// Y_data
expected_output
.
emplace_back
(
std
::
vector
<
float
>
{
-
0.02280854
f
,
0.02744377
f
,
-
0.03516197
f
,
0.03875681
f
});
// Y_h_data
expected_output
.
emplace_back
(
std
::
vector
<
float
>
{
-
0.03516197
f
,
0.03875681
f
});
// Y_c_data
expected_output
.
emplace_back
(
std
::
vector
<
float
>
{
-
0.07415761
f
,
0.07395997
f
});
Outputs
outputs
{
execute
(
function
,
inputs
,
"${BACKEND_NAME}"
)};
EXPECT_TRUE
(
outputs
.
size
()
==
expected_output
.
size
());
for
(
std
::
size_t
i
{
0
};
i
<
expected_output
.
size
();
++
i
)
{
// We have to enlarge tolerance bits to 3 - it's only one bit more than default value.
// The discrepancies may occur at most on 7th decimal position.
EXPECT_TRUE
(
test
::
all_close_f
(
expected_output
.
at
(
i
),
outputs
.
at
(
i
),
3
));
}
}
TEST
(
onnx_
$
{
BACKEND_NAME
},
model_missing_input
)
{
onnx_import
::
register_operator
(
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment