Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
N
ngraph
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
ngraph
Commits
d92ef6b6
Commit
d92ef6b6
authored
Oct 22, 2019
by
Adam Rogowiec
Committed by
Michał Karzyński
Oct 22, 2019
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
[Spec] Donwgrade pass for Convolution op (#3776)
parent
64356bc5
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
190 additions
and
18 deletions
+190
-18
conv.cpp
src/ngraph/frontend/onnx_import/op/conv.cpp
+14
-16
convolution.cpp
src/ngraph/op/convolution.cpp
+1
-1
opset0_downgrade.cpp
src/ngraph/pass/opset0_downgrade.cpp
+76
-1
convolution_opset_pass.cpp
test/opset_pass/convolution_opset_pass.cpp
+99
-0
No files found.
src/ngraph/frontend/onnx_import/op/conv.cpp
View file @
d92ef6b6
...
...
@@ -82,14 +82,13 @@ namespace ngraph
filters
,
filters_lower_bounds
,
filters_upper_bounds
);
convolution_nodes
.
push_back
(
std
::
make_shared
<
ngraph
::
op
::
Convolution
>
(
sliced_data
,
sliced_filters
,
strides
,
dilations
,
padding_below
,
padding_above
,
Strides
{},
auto_pad
));
std
::
make_shared
<
ngraph
::
op
::
v1
::
Convolution
>
(
sliced_data
,
sliced_filters
,
strides
,
padding_below
,
padding_above
,
dilations
,
auto_pad
));
}
std
::
size_t
concatenation_axis
=
1
;
return
std
::
make_shared
<
ngraph
::
op
::
Concat
>
(
convolution_nodes
,
...
...
@@ -97,14 +96,13 @@ namespace ngraph
}
else
{
return
std
::
make_shared
<
ngraph
::
op
::
Convolution
>
(
data
,
filters
,
strides
,
dilations
,
padding_below
,
padding_above
,
Strides
{},
auto_pad
);
return
std
::
make_shared
<
ngraph
::
op
::
v1
::
Convolution
>
(
data
,
filters
,
strides
,
padding_below
,
padding_above
,
dilations
,
auto_pad
);
}
}
...
...
src/ngraph/op/convolution.cpp
View file @
d92ef6b6
...
...
@@ -215,7 +215,7 @@ void op::v1::ConvolutionBackpropData::validate_and_infer_types()
forward_result_shape
=
infer_convolution_forward
(
this
,
m_data_batch_shape
,
Strides
(
m_data_batch_shape
.
size
()
-
2
,
0
),
Strides
(
m_data_batch_shape
.
size
()
-
2
,
1
),
m_pads_begin
,
m_pads_end
,
filters_shape
,
...
...
src/ngraph/pass/opset0_downgrade.cpp
View file @
d92ef6b6
...
...
@@ -13,12 +13,15 @@
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include "ngraph/pass/opset0_downgrade.hpp"
#include <cstdint>
#include "ngraph/graph_util.hpp"
#include "ngraph/node.hpp"
#include "ngraph/op/avg_pool.hpp"
#include "ngraph/op/broadcast.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/op/convolution.hpp"
#include "ngraph/op/get_output_element.hpp"
#include "ngraph/op/pad.hpp"
#include "ngraph/op/product.hpp"
...
...
@@ -27,6 +30,7 @@
#include "ngraph/op/reshape.hpp"
#include "ngraph/op/reverse.hpp"
#include "ngraph/op/sum.hpp"
#include "ngraph/pass/opset0_downgrade.hpp"
using
namespace
std
;
using
namespace
ngraph
;
...
...
@@ -103,6 +107,77 @@ bool pass::Opset0Downgrade::run_on_node(shared_ptr<Node> node)
modified
=
true
;
break
;
}
case
OP_TYPEID
:
:
Convolution
:
{
auto
tmp
=
as_type_ptr
<
op
::
v1
::
Convolution
>
(
node
);
const
auto
data_arg
=
node
->
input
(
0
).
get_source_output
();
const
auto
filters_arg
=
node
->
input
(
1
).
get_source_output
();
const
PartialShape
&
data_arg_pshape
=
node
->
get_input_partial_shape
(
0
);
NGRAPH_CHECK
(
data_arg_pshape
.
rank
().
is_static
(),
"Unable to convert Convolution:v1 to Convolution:v0 if data argument "
"rank is dynamic. Node: "
,
*
node
);
const
size_t
num_spatial_dims
=
static_cast
<
size_t
>
(
data_arg_pshape
.
rank
())
-
2
;
auto
replacement_node
=
make_shared
<
op
::
v0
::
Convolution
>
(
data_arg
,
filters_arg
,
tmp
->
get_strides
(),
tmp
->
get_dilations
(),
tmp
->
get_pads_begin
(),
tmp
->
get_pads_end
(),
Strides
(
num_spatial_dims
,
1
),
tmp
->
get_auto_pad
());
replace_node
(
node
,
replacement_node
);
modified
=
true
;
break
;
}
case
OP_TYPEID
:
:
ConvolutionBackpropData
:
{
auto
tmp
=
as_type_ptr
<
op
::
v1
::
ConvolutionBackpropData
>
(
node
);
const
auto
filters_arg
=
node
->
input
(
0
).
get_source_output
();
const
auto
delta_arg
=
node
->
input
(
1
).
get_source_output
();
const
PartialShape
&
delta_arg_pshape
=
node
->
get_input_partial_shape
(
1
);
NGRAPH_CHECK
(
delta_arg_pshape
.
rank
().
is_static
(),
"Unable to convert ConvolutionBackpropData:v1 to ConvolutionBackpropData:v0 "
"if delta argument rank is dynamic. Node: "
,
*
node
);
const
size_t
num_spatial_dims
=
static_cast
<
size_t
>
(
delta_arg_pshape
.
rank
())
-
2
;
auto
replacement_node
=
make_shared
<
op
::
v0
::
ConvolutionBackpropData
>
(
tmp
->
get_data_batch_shape
(),
filters_arg
,
delta_arg
,
tmp
->
get_strides
(),
tmp
->
get_dilations
(),
tmp
->
get_pads_begin
(),
tmp
->
get_pads_end
(),
Strides
(
num_spatial_dims
,
1
));
replace_node
(
node
,
replacement_node
);
modified
=
true
;
break
;
}
case
OP_TYPEID
:
:
ConvolutionBackpropFilters
:
{
auto
tmp
=
as_type_ptr
<
op
::
v1
::
ConvolutionBackpropFilters
>
(
node
);
const
auto
data_arg
=
node
->
input
(
0
).
get_source_output
();
const
auto
delta_arg
=
node
->
input
(
1
).
get_source_output
();
const
PartialShape
&
data_arg_pshape
=
node
->
get_input_partial_shape
(
0
);
NGRAPH_CHECK
(
data_arg_pshape
.
rank
().
is_static
(),
"Unable to convert ConvolutionBackpropFilters:v1 to "
"ConvolutionBackpropFilters:v0 if data argument rank is dynamic. Node: "
,
*
node
);
const
size_t
num_spatial_dims
=
static_cast
<
size_t
>
(
data_arg_pshape
.
rank
())
-
2
;
auto
replacement_node
=
make_shared
<
op
::
v0
::
ConvolutionBackpropFilters
>
(
data_arg
,
tmp
->
get_filters_shape
(),
delta_arg
,
tmp
->
get_strides
(),
tmp
->
get_dilations
(),
tmp
->
get_pads_begin
(),
tmp
->
get_pads_end
(),
Strides
(
num_spatial_dims
,
1
));
replace_node
(
node
,
replacement_node
);
modified
=
true
;
break
;
}
case
OP_TYPEID
:
:
Pad
:
{
auto
tmp
=
as_type_ptr
<
op
::
v1
::
Pad
>
(
node
);
...
...
test/opset_pass/convolution_opset_pass.cpp
View file @
d92ef6b6
...
...
@@ -3,6 +3,7 @@
#include "ngraph/ngraph.hpp"
#include "ngraph/pass/manager.hpp"
#include "ngraph/pass/opset0_downgrade.hpp"
#include "ngraph/pass/opset1_upgrade.hpp"
#include "util/test_control.hpp"
#include "util/type_prop.hpp"
...
...
@@ -43,3 +44,101 @@ TEST(opset_transform, opset1_convolution_upgrade_pass)
EXPECT_EQ
(
convolution_v1_node
->
get_auto_pad
(),
pad_type
);
EXPECT_EQ
(
convolution_v1_node
->
get_dilations
(),
dilations
);
}
TEST
(
opset_transform
,
opset1_convolution_downgrade_pass
)
{
auto
data
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
Shape
{
1
,
3
,
6
,
9
});
auto
filters
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
Shape
{
1
,
3
,
3
,
3
});
CoordinateDiff
pads_begin
{
1
,
1
};
CoordinateDiff
pads_end
{
2
,
2
};
Strides
strides
{
1
,
1
};
Strides
dilations
{
1
,
1
};
op
::
PadType
pad_type
=
op
::
PadType
::
EXPLICIT
;
auto
convolution_v1
=
make_shared
<
op
::
v1
::
Convolution
>
(
data
,
filters
,
strides
,
pads_begin
,
pads_end
,
dilations
,
pad_type
);
auto
result
=
make_shared
<
op
::
Result
>
(
convolution_v1
);
auto
f
=
make_shared
<
Function
>
(
ResultVector
{
result
},
ParameterVector
{
data
,
filters
});
ngraph
::
pass
::
Manager
pass_manager
;
pass_manager
.
register_pass
<
pass
::
Opset0Downgrade
>
();
pass_manager
.
run_passes
(
f
);
auto
conv_s0_result
=
f
->
get_results
().
at
(
0
);
auto
node
=
conv_s0_result
->
input
(
0
).
get_source_output
().
get_node_shared_ptr
();
auto
conv_v0_node
=
static_pointer_cast
<
op
::
v0
::
Convolution
>
(
node
);
EXPECT_EQ
(
conv_v0_node
->
description
(),
"Convolution"
);
EXPECT_EQ
(
conv_v0_node
->
get_version
(),
0
);
EXPECT_EQ
(
conv_v0_node
->
get_window_movement_strides
(),
strides
);
EXPECT_EQ
(
conv_v0_node
->
get_window_dilation_strides
(),
dilations
);
EXPECT_EQ
(
conv_v0_node
->
get_padding_below
(),
pads_begin
);
EXPECT_EQ
(
conv_v0_node
->
get_padding_above
(),
pads_end
);
EXPECT_EQ
(
conv_v0_node
->
get_data_dilation_strides
(),
(
Strides
{
1
,
1
}));
EXPECT_EQ
(
conv_v0_node
->
get_pad_type
(),
pad_type
);
}
TEST
(
opset_transform
,
opset1_convolution_backprop_data_downgrade_pass
)
{
Shape
data_batch_shape
{
64
,
3
,
100
};
auto
filters
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
Shape
{
128
,
3
,
10
});
auto
delta
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
Shape
{
64
,
128
,
96
});
auto
strides
=
Strides
{
1
};
auto
dilations
=
Strides
{
1
};
auto
padding_begin
=
CoordinateDiff
{
2
};
auto
padding_end
=
CoordinateDiff
{
3
};
auto
conv
=
make_shared
<
op
::
v1
::
ConvolutionBackpropData
>
(
data_batch_shape
,
filters
,
delta
,
strides
,
dilations
,
padding_begin
,
padding_end
);
auto
result
=
make_shared
<
op
::
Result
>
(
conv
);
auto
f
=
make_shared
<
Function
>
(
ResultVector
{
result
},
ParameterVector
{
filters
,
delta
});
ngraph
::
pass
::
Manager
pass_manager
;
pass_manager
.
register_pass
<
pass
::
Opset0Downgrade
>
();
pass_manager
.
run_passes
(
f
);
auto
conv_s0_result
=
f
->
get_results
().
at
(
0
);
auto
node
=
conv_s0_result
->
input
(
0
).
get_source_output
().
get_node_shared_ptr
();
auto
conv_v0_node
=
static_pointer_cast
<
op
::
v0
::
ConvolutionBackpropData
>
(
node
);
EXPECT_EQ
(
conv_v0_node
->
description
(),
"ConvolutionBackpropData"
);
EXPECT_EQ
(
conv_v0_node
->
get_version
(),
0
);
EXPECT_EQ
(
conv_v0_node
->
get_data_batch_shape
(),
data_batch_shape
);
EXPECT_EQ
(
conv_v0_node
->
get_window_movement_strides_forward
(),
strides
);
EXPECT_EQ
(
conv_v0_node
->
get_window_dilation_strides_forward
(),
dilations
);
EXPECT_EQ
(
conv_v0_node
->
get_padding_below_forward
(),
padding_begin
);
EXPECT_EQ
(
conv_v0_node
->
get_padding_above_forward
(),
padding_end
);
EXPECT_EQ
(
conv_v0_node
->
get_data_dilation_strides_forward
(),
(
Strides
{
1
}));
}
TEST
(
opset_transform
,
opset1_convolution_backprop_filters_downgrade_pass
)
{
Shape
filters_shape
{
128
,
3
,
10
};
auto
data
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
Shape
{
64
,
3
,
100
});
auto
delta
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
Shape
{
64
,
128
,
96
});
auto
strides
=
Strides
{
1
};
auto
dilations
=
Strides
{
1
};
auto
padding_begin
=
CoordinateDiff
{
2
};
auto
padding_end
=
CoordinateDiff
{
3
};
auto
conv
=
make_shared
<
op
::
v1
::
ConvolutionBackpropFilters
>
(
data
,
filters_shape
,
delta
,
strides
,
dilations
,
padding_begin
,
padding_end
);
auto
result
=
make_shared
<
op
::
Result
>
(
conv
);
auto
f
=
make_shared
<
Function
>
(
ResultVector
{
result
},
ParameterVector
{
data
,
delta
});
ngraph
::
pass
::
Manager
pass_manager
;
pass_manager
.
register_pass
<
pass
::
Opset0Downgrade
>
();
pass_manager
.
run_passes
(
f
);
auto
conv_s0_result
=
f
->
get_results
().
at
(
0
);
auto
node
=
conv_s0_result
->
input
(
0
).
get_source_output
().
get_node_shared_ptr
();
auto
conv_v0_node
=
static_pointer_cast
<
op
::
v0
::
ConvolutionBackpropFilters
>
(
node
);
EXPECT_EQ
(
conv_v0_node
->
description
(),
"ConvolutionBackpropFilters"
);
EXPECT_EQ
(
conv_v0_node
->
get_version
(),
0
);
EXPECT_EQ
(
conv_v0_node
->
get_filters_shape
(),
filters_shape
);
EXPECT_EQ
(
conv_v0_node
->
get_window_movement_strides_forward
(),
strides
);
EXPECT_EQ
(
conv_v0_node
->
get_window_dilation_strides_forward
(),
dilations
);
EXPECT_EQ
(
conv_v0_node
->
get_padding_below_forward
(),
padding_begin
);
EXPECT_EQ
(
conv_v0_node
->
get_padding_above_forward
(),
padding_end
);
EXPECT_EQ
(
conv_v0_node
->
get_data_dilation_strides_forward
(),
(
Strides
{
1
}));
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment