Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
N
ngraph
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
ngraph
Commits
eb2ce8e5
Commit
eb2ce8e5
authored
Dec 06, 2019
by
Tomasz Socha
Committed by
Michał Karzyński
Dec 06, 2019
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
[SPEC] Fix output_shape input in (Group)ConvolutionBackpropData ops (#4005)
parent
206bc657
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
69 additions
and
35 deletions
+69
-35
convolution.cpp
src/ngraph/op/convolution.cpp
+21
-12
group_conv.cpp
src/ngraph/op/fused/group_conv.cpp
+24
-17
opset0_downgrade.cpp
src/ngraph/pass/opset0_downgrade.cpp
+18
-3
opset1_upgrade.cpp
src/ngraph/pass/opset1_upgrade.cpp
+4
-1
convolution.in.cpp
test/backend/convolution.in.cpp
+1
-1
convolution_opset_pass.cpp
test/opset_pass/convolution_opset_pass.cpp
+1
-1
No files found.
src/ngraph/op/convolution.cpp
View file @
eb2ce8e5
...
...
@@ -205,13 +205,28 @@ op::v1::ConvolutionBackpropData::ConvolutionBackpropData(const Output<Node>& dat
const
PartialShape
op
::
v1
::
ConvolutionBackpropData
::
get_output_shape
()
const
{
PartialShape
shape
{
PartialShape
::
dynamic
()};
PartialShape
shape
{
vector
<
Dimension
>
(
m_strides
.
size
()
+
2
)};
auto
data_pshape
=
get_input_partial_shape
(
0
);
if
(
data_pshape
.
rank
().
is_static
())
{
shape
[
0
]
=
data_pshape
[
0
];
// N
}
auto
filters_pshape
=
get_input_partial_shape
(
1
);
if
(
filters_pshape
.
rank
().
is_static
())
{
shape
[
1
]
=
filters_pshape
[
1
];
// C
}
bool
is_output_shape_present
=
get_inputs
().
size
()
==
3
;
if
(
is_output_shape_present
)
{
if
(
auto
const_op
=
as_type
<
op
::
Constant
>
(
input_value
(
2
).
get_node
()))
{
shape
=
const_op
->
get_shape_val
();
auto
output_shape
=
const_op
->
get_shape_val
();
// Populate spatials
for
(
int
i
=
0
;
i
<
output_shape
.
size
();
++
i
)
{
shape
[
i
+
2
]
=
output_shape
[
i
];
}
}
}
return
shape
;
...
...
@@ -270,13 +285,6 @@ void op::v1::ConvolutionBackpropData::validate_and_infer_types()
if
(
is_output_shape_present
)
{
set_input_is_relevant_to_shape
(
2
);
if
(
output_pshape
.
is_static
()
&&
data_pshape
.
is_static
())
{
auto
data_shape
=
data_pshape
.
to_shape
();
auto
output_shape
=
output_pshape
.
to_shape
();
output_shape
.
insert
(
output_shape
.
begin
(),
data_shape
.
begin
(),
data_shape
.
begin
()
+
1
);
output_pshape
=
output_shape
;
}
}
else
{
...
...
@@ -295,12 +303,13 @@ void op::v1::ConvolutionBackpropData::validate_and_infer_types()
for
(
size_t
i
=
0
;
i
<
data_spatial_rank
;
++
i
)
{
size_t
tmp
=
m_strides
[
i
]
*
(
data_shape
[
i
+
2
]
-
1
)
+
((
filters_shape
[
i
]
+
2
-
1
)
*
m_dilations
[
i
]
+
1
)
-
m_pads_begin
[
i
]
-
((
filters_shape
[
i
+
2
]
-
1
)
*
m_dilations
[
i
]
+
1
)
-
m_pads_begin
[
i
]
-
m_pads_end
[
i
]
+
output_padding
[
i
];
output_shape
.
push_back
(
tmp
);
output_pshape
=
output_shape
;
}
output_shape
.
insert
(
output_shape
.
begin
(),
data_shape
.
begin
(),
data_shape
.
begin
()
+
1
);
output_shape
.
insert
(
output_shape
.
begin
(),
filters_shape
.
at
(
1
));
output_shape
.
insert
(
output_shape
.
begin
(),
data_shape
.
at
(
0
));
output_pshape
=
output_shape
;
}
}
...
...
src/ngraph/op/fused/group_conv.cpp
View file @
eb2ce8e5
...
...
@@ -192,13 +192,28 @@ op::v1::GroupConvolutionBackpropData::GroupConvolutionBackpropData(
const
PartialShape
op
::
v1
::
GroupConvolutionBackpropData
::
get_output_shape
()
const
{
PartialShape
shape
{
PartialShape
::
dynamic
()};
PartialShape
shape
{
vector
<
Dimension
>
(
m_strides
.
size
()
+
2
)};
auto
data_pshape
=
get_input_partial_shape
(
0
);
if
(
data_pshape
.
rank
().
is_static
())
{
shape
[
0
]
=
data_pshape
[
0
];
// N
}
auto
filters_pshape
=
get_input_partial_shape
(
1
);
if
(
filters_pshape
.
rank
().
is_static
())
{
shape
[
1
]
=
filters_pshape
[
1
];
// C
}
bool
is_output_shape_present
=
get_inputs
().
size
()
==
3
;
if
(
is_output_shape_present
)
{
if
(
auto
const_op
=
as_type
<
op
::
Constant
>
(
input_value
(
2
).
get_node
()))
{
shape
=
const_op
->
get_shape_val
();
auto
output_shape
=
const_op
->
get_shape_val
();
// Populate spatials
for
(
int
i
=
0
;
i
<
output_shape
.
size
();
++
i
)
{
shape
[
i
+
2
]
=
output_shape
[
i
];
}
}
}
return
shape
;
...
...
@@ -257,26 +272,16 @@ void op::v1::GroupConvolutionBackpropData::validate_and_infer_types()
if
(
is_output_shape_present
)
{
set_input_is_relevant_to_shape
(
2
);
if
(
output_pshape
.
is_static
()
&&
data_pshape
.
is_static
())
{
auto
data_shape
=
data_pshape
.
to_shape
();
auto
output_shape
=
output_pshape
.
to_shape
();
output_shape
.
insert
(
output_shape
.
begin
(),
data_shape
.
begin
(),
data_shape
.
begin
()
+
1
);
output_pshape
=
output_shape
;
}
}
else
{
if
(
filters_pshape
.
is_static
()
&&
data_pshape
.
is_static
())
{
auto
filters_shape
=
filters_pshape
.
to_shape
();
filters_shape
.
erase
(
filters_shape
.
begin
(),
filters_shape
.
begin
()
+
3
);
// remove {G, O, I}
auto
data_shape
=
data_pshape
.
to_shape
();
data_shape
.
erase
(
data_shape
.
begin
(),
data_shape
.
begin
()
+
2
);
// remove {N, C}
Shape
output_shape
;
auto
data_spatial_rank
=
data_shape
.
size
();
auto
data_spatial_rank
=
data_shape
.
size
()
-
2
;
auto
output_padding
=
get_output_padding
();
if
(
output_padding
.
size
()
==
0
)
{
...
...
@@ -284,13 +289,15 @@ void op::v1::GroupConvolutionBackpropData::validate_and_infer_types()
}
for
(
size_t
i
=
0
;
i
<
data_spatial_rank
;
++
i
)
{
size_t
tmp
=
m_strides
[
i
]
*
(
data_shape
[
i
]
-
1
)
+
((
filters_shape
[
i
]
-
1
)
*
m_dilations
[
i
]
+
1
)
-
m_pads_begin
[
i
]
-
size_t
tmp
=
m_strides
[
i
]
*
(
data_shape
[
i
+
2
]
-
1
)
+
((
filters_shape
[
i
+
3
]
-
1
)
*
m_dilations
[
i
]
+
1
)
-
m_pads_begin
[
i
]
-
m_pads_end
[
i
]
+
output_padding
[
i
];
output_shape
.
push_back
(
tmp
);
output_pshape
=
output_shape
;
}
output_shape
.
insert
(
output_shape
.
begin
(),
data_shape
.
begin
(),
data_shape
.
begin
()
+
1
);
output_shape
.
insert
(
output_shape
.
begin
(),
filters_shape
.
at
(
0
)
*
filters_shape
.
at
(
2
));
// GROUP * C_OUTPUT
output_shape
.
insert
(
output_shape
.
begin
(),
data_shape
.
at
(
0
));
output_pshape
=
output_shape
;
}
}
...
...
src/ngraph/pass/opset0_downgrade.cpp
View file @
eb2ce8e5
...
...
@@ -135,11 +135,12 @@ namespace
bool
op_cast
(
shared_ptr
<
op
::
v1
::
ConvolutionBackpropData
>
node
)
{
auto
output_shape
=
as_type_ptr
<
op
::
Constant
>
(
node
->
input_value
(
2
).
get_node_shared_ptr
());
auto
output_shape_node
=
as_type_ptr
<
op
::
Constant
>
(
node
->
input_value
(
2
).
get_node_shared_ptr
());
const
auto
data_arg
=
node
->
input
(
0
).
get_source_output
();
const
auto
filters_arg
=
node
->
input
(
1
).
get_source_output
();
const
auto
strides
=
node
->
get_strides
();
NGRAPH_CHECK
(
output_shape
,
NGRAPH_CHECK
(
output_shape
_node
,
"Unable to convert ConvolutionBackpropData:v1 to ConvolutionBackpropData:v0 "
"if output_shape is not constant. Node: "
,
*
node
);
...
...
@@ -155,8 +156,22 @@ namespace
"with output padding other than `0`. Node: "
,
*
node
);
auto
data_pshape
=
data_arg
.
get_partial_shape
();
auto
filters_pshape
=
filters_arg
.
get_partial_shape
();
NGRAPH_CHECK
(
data_pshape
.
rank
().
is_static
()
&&
data_pshape
[
0
].
is_static
()
&&
filters_pshape
.
rank
().
is_static
()
&&
filters_pshape
[
1
].
is_static
(),
"Unable to convert ConvolutionBackpropData:v1 to ConvolutionBackpropData:v0 "
"if data shape N and filters shape C dimensions are not static. Node: "
,
*
node
);
// Add N and C dimenstions to output_shape
auto
output_shape
=
output_shape_node
->
get_shape_val
();
output_shape
.
insert
(
output_shape
.
begin
(),
static_cast
<
size_t
>
(
filters_pshape
[
1
]));
output_shape
.
insert
(
output_shape
.
begin
(),
static_cast
<
size_t
>
(
data_pshape
[
0
]));
auto
replacement_node
=
make_shared
<
op
::
v0
::
ConvolutionBackpropData
>
(
output_shape
->
get_shape_val
()
,
make_shared
<
op
::
v0
::
ConvolutionBackpropData
>
(
output_shape
,
filters_arg
,
data_arg
,
node
->
get_strides
(),
...
...
src/ngraph/pass/opset1_upgrade.cpp
View file @
eb2ce8e5
...
...
@@ -179,7 +179,10 @@ namespace
auto
replacement_node
=
make_shared
<
op
::
v1
::
ConvolutionBackpropData
>
(
node
->
input_value
(
1
),
// data
node
->
input_value
(
0
),
// filters
op
::
Constant
::
create
(
element
::
i64
,
Shape
{
data_batch_shape
.
size
()},
data_batch_shape
),
op
::
Constant
::
create
(
element
::
i64
,
Shape
{
data_batch_shape
.
size
()
-
2
},
vector
<
size_t
>
(
data_batch_shape
.
begin
()
+
2
,
data_batch_shape
.
end
())),
strides
,
pads_begin
,
pads_end
,
...
...
test/backend/convolution.in.cpp
View file @
eb2ce8e5
...
...
@@ -175,7 +175,7 @@ NGRAPH_TEST(${BACKEND_NAME}, dyn_convolution_backprop_data)
for
(
int
i
=
0
;
i
<
2
*
3
*
5
*
5
;
i
++
)
expected_result
.
emplace_back
(
i
);
vector
<
int64_t
>
shapes
=
{
2
,
3
,
5
,
5
};
vector
<
int64_t
>
shapes
=
{
5
,
5
};
// Create some tensors for input/output
auto
a
=
backend
->
create_tensor
(
element
::
f32
,
shape_delta
);
...
...
test/opset_pass/convolution_opset_pass.cpp
View file @
eb2ce8e5
...
...
@@ -78,7 +78,7 @@ TEST(opset_transform, opset1_convolution_downgrade_pass)
TEST
(
opset_transform
,
opset1_convolution_backprop_data_downgrade_pass
)
{
auto
data_batch_shape
=
op
::
Constant
::
create
<
int64_t
>
(
element
::
i64
,
Shape
{
3
},
{
64
,
3
,
100
});
auto
data_batch_shape
=
op
::
Constant
::
create
<
int64_t
>
(
element
::
i64
,
Shape
{
1
},
{
100
});
auto
filters
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
Shape
{
128
,
3
,
10
});
auto
delta
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
Shape
{
64
,
128
,
96
});
auto
strides
=
Strides
{
1
};
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment