Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
N
ngraph
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
ngraph
Commits
a29f6754
Commit
a29f6754
authored
Mar 26, 2018
by
Nick Korovaiko
Committed by
Scott Cyphers
Mar 26, 2018
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
add batchnorm training type tests (#746)
parent
2e8c6286
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
90 additions
and
14 deletions
+90
-14
batch_norm.cpp
src/ngraph/op/batch_norm.cpp
+7
-14
type_prop.cpp
test/type_prop.cpp
+83
-0
No files found.
src/ngraph/op/batch_norm.cpp
View file @
a29f6754
...
...
@@ -44,6 +44,7 @@ ngraph::op::BatchNorm::BatchNorm(double eps,
}
auto
et
=
input
->
get_element_type
();
Shape
channel_shape
{
m_bn_input_shape
[
1
]};
const
char
*
input_names
[]
=
{
"gamma"
,
"beta"
};
for
(
size_t
i
=
0
;
i
<
2
;
i
++
)
...
...
@@ -54,21 +55,13 @@ ngraph::op::BatchNorm::BatchNorm(double eps,
" isn't equal to input data's type"
;
throw
ngraph_error
(
err_msg
.
c_str
());
}
}
if
((
gamma
->
get_shape
().
size
()
!=
1
)
||
(
beta
->
get_shape
().
size
()
!=
1
))
{
throw
ngraph_error
(
"gamma and beta shoud have rank 1"
);
}
if
(
gamma
->
get_shape
().
size
()
!=
beta
->
get_shape
().
size
())
{
throw
ngraph_error
(
"gamma and beta rank does not match"
);
}
if
(
gamma
->
get_element_type
()
!=
beta
->
get_element_type
())
{
throw
ngraph_error
(
"gamma and beta element type does not match"
);
if
(
get_input_op
(
i
)
->
get_shape
()
!=
channel_shape
)
{
auto
err_msg
=
std
::
string
(
"The shape of "
)
+
input_names
[
i
]
+
" isn't equal to input channel's shape"
;
throw
ngraph_error
(
err_msg
.
c_str
());
}
}
add_output
(
input
->
get_element_type
(),
m_bn_input_shape
);
...
...
test/type_prop.cpp
View file @
a29f6754
...
...
@@ -58,6 +58,89 @@ TEST(type_prop, broadcast_deduce_incorrect)
}
}
TEST
(
type_prop
,
batchnorm_rank_less_than_2
)
{
auto
dummy
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
Shape
{
1
});
try
{
auto
bc
=
make_shared
<
op
::
BatchNorm
>
(
0.001
,
dummy
,
dummy
,
dummy
);
FAIL
()
<<
"BatchNorm c-tor should throw for tensors whose rank is less than 2"
;
}
catch
(
const
ngraph_error
&
error
)
{
EXPECT_EQ
(
error
.
what
(),
std
::
string
(
"input tensor to batchnorm must have tensor of at least rank 2"
));
}
catch
(...)
{
FAIL
()
<<
"Deduced type check failed for unexpected reason"
;
}
}
TEST
(
type_prop
,
batchnorm_zero_channel_check
)
{
auto
dummy
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
Shape
{
1
,
0
,
2
,
3
});
try
{
auto
bc
=
make_shared
<
op
::
BatchNorm
>
(
0.001
,
dummy
,
dummy
,
dummy
);
FAIL
()
<<
"BatchNorm c-tor should throw for tensors w/ zero-dimension channels"
;
}
catch
(
const
ngraph_error
&
error
)
{
EXPECT_EQ
(
error
.
what
(),
std
::
string
(
"input tensor must have at least one channel axis for batch normalization"
));
}
catch
(...)
{
FAIL
()
<<
"Deduced type check failed for unexpected reason"
;
}
}
TEST
(
type_prop
,
batchnorm_et_check
)
{
auto
dummy_f32
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
Shape
{
3
});
auto
dummy_f64
=
make_shared
<
op
::
Parameter
>
(
element
::
f64
,
Shape
{
3
});
auto
param
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
Shape
{
4
,
3
,
2
,
2
});
try
{
auto
bc
=
make_shared
<
op
::
BatchNorm
>
(
0.001
,
dummy_f32
,
dummy_f64
,
param
);
FAIL
()
<<
"BatchNorm c-tor should throw for different element types"
;
}
catch
(
const
ngraph_error
&
error
)
{
EXPECT_EQ
(
error
.
what
(),
std
::
string
(
"The element type of beta isn't equal to input data's type"
));
}
catch
(...)
{
FAIL
()
<<
"Deduced type check failed for unexpected reason"
;
}
}
TEST
(
type_prop
,
batchnorm_shape_check
)
{
auto
dummy_3
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
Shape
{
3
});
auto
dummy_4
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
Shape
{
4
});
auto
param
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
Shape
{
4
,
3
,
2
,
2
});
try
{
auto
bc
=
make_shared
<
op
::
BatchNorm
>
(
0.001
,
dummy_4
,
dummy_3
,
param
);
FAIL
()
<<
"BatchNorm c-tor should throw if gamma and beta shapes don't match"
;
}
catch
(
const
ngraph_error
&
error
)
{
EXPECT_EQ
(
error
.
what
(),
std
::
string
(
"The shape of gamma isn't equal to input channel's shape"
));
}
catch
(...)
{
FAIL
()
<<
"Deduced type check failed for unexpected reason"
;
}
}
TEST
(
type_prop
,
batchnorm_backprop_4d_check
)
{
auto
dummy
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
Shape
{});
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment