Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
N
ngraph
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
ngraph
Commits
ee8ddabd
Commit
ee8ddabd
authored
5 years ago
by
baojun
Committed by
Scott Cyphers
5 years ago
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Add UT to reproduce issue from PR3931 (#3999)
* add ut to reproduce issue from PR#3931 * disable ut for plaidml
parent
9b56d081
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
80 additions
and
0 deletions
+80
-0
unit_test.manifest
src/ngraph/runtime/plaidml/unit_test.manifest
+1
-0
batch_norm.in.cpp
test/backend/batch_norm.in.cpp
+79
-0
No files found.
src/ngraph/runtime/plaidml/unit_test.manifest
View file @
ee8ddabd
...
@@ -132,6 +132,7 @@ batch_norm_fprop_b1c2h2w2
...
@@ -132,6 +132,7 @@ batch_norm_fprop_b1c2h2w2
batch_norm_fprop_b2c2h2w1
batch_norm_fprop_b2c2h2w1
batch_norm_fprop_b2c2d2h1w1
batch_norm_fprop_b2c2d2h1w1
batch_norm_fprop_inference_b2c2h2w1
batch_norm_fprop_inference_b2c2h2w1
dyn_batch_norm_fprop_b1c2h2w2
pad_edge_1d
pad_edge_1d
pad_edge_1d_top_neg
pad_edge_1d_top_neg
pad_edge_1d_top_neg_bigger_than_tensor
pad_edge_1d_top_neg_bigger_than_tensor
...
...
This diff is collapsed.
Click to expand it.
test/backend/batch_norm.in.cpp
View file @
ee8ddabd
...
@@ -826,3 +826,82 @@ NGRAPH_TEST(${BACKEND_NAME}, batch_norm_fprop_inference_b2c2h2w1)
...
@@ -826,3 +826,82 @@ NGRAPH_TEST(${BACKEND_NAME}, batch_norm_fprop_inference_b2c2h2w1)
ASSERT_TRUE
(
ASSERT_TRUE
(
ngraph
::
test
::
all_close
(
expected_result
,
read_vector
<
float
>
(
bn_output
),
1e-3
f
,
1e-4
f
));
ngraph
::
test
::
all_close
(
expected_result
,
read_vector
<
float
>
(
bn_output
),
1e-3
f
,
1e-4
f
));
}
}
NGRAPH_TEST
(
$
{
BACKEND_NAME
},
dyn_batch_norm_fprop_b1c2h2w2
)
{
// auto input_shape = Shape{1, 2, 2, 2};
auto
input
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
PartialShape
::
dynamic
());
auto
mean_shape
=
Shape
{
2
};
auto
var_shape
=
Shape
{
2
};
auto
gamma_shape
=
Shape
{
2
};
auto
gamma
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
gamma_shape
);
auto
beta_shape
=
Shape
{
2
};
auto
beta
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
beta_shape
);
double
eps
=
0.001
;
auto
shape_r
=
Shape
{
1
,
2
,
2
,
2
};
auto
bn
=
make_shared
<
op
::
BatchNormTraining
>
(
input
,
gamma
,
beta
,
eps
);
auto
output_rt
=
std
::
make_shared
<
op
::
GetOutputElement
>
(
bn
,
0
);
auto
mean_rt
=
std
::
make_shared
<
op
::
GetOutputElement
>
(
bn
,
1
);
auto
variance_rt
=
std
::
make_shared
<
op
::
GetOutputElement
>
(
bn
,
2
);
auto
shapeof_mean_rt
=
std
::
make_shared
<
ngraph
::
op
::
ShapeOf
>
(
mean_rt
);
auto
rankof_mean_rt
=
std
::
make_shared
<
ngraph
::
op
::
ShapeOf
>
(
shapeof_mean_rt
);
auto
rank_scalar
=
std
::
make_shared
<
ngraph
::
op
::
Reshape
>
(
rankof_mean_rt
,
ngraph
::
AxisVector
{
0
},
ngraph
::
Shape
{});
auto
range
=
std
::
make_shared
<
ngraph
::
op
::
Range
>
(
ngraph
::
op
::
Constant
::
create
(
ngraph
::
element
::
i64
,
ngraph
::
Shape
{},
{
0
}),
rank_scalar
,
ngraph
::
op
::
Constant
::
create
(
ngraph
::
element
::
i64
,
ngraph
::
Shape
{},
{
1
}));
auto
one_bcast
=
std
::
make_shared
<
ngraph
::
op
::
DynBroadcast
>
(
ngraph
::
op
::
Constant
::
create
(
mean_rt
->
get_element_type
(),
ngraph
::
Shape
{},
{
1
}),
shapeof_mean_rt
,
range
);
auto
mean_rt_multiplied
=
std
::
make_shared
<
ngraph
::
op
::
Multiply
>
(
one_bcast
,
mean_rt
);
auto
f
=
make_shared
<
Function
>
(
NodeVector
{
output_rt
,
mean_rt_multiplied
,
variance_rt
},
ParameterVector
{
input
,
gamma
,
beta
});
auto
backend
=
runtime
::
Backend
::
create
(
"${BACKEND_NAME}"
,
true
);
// Create some tensors for input/output
auto
_input
=
backend
->
create_tensor
(
element
::
f32
,
Shape
{
1
,
2
,
2
,
2
});
copy_data
(
_input
,
vector
<
float
>
{
0.54881352
f
,
0.71518934
f
,
0.60276335
f
,
0.54488319
f
,
0.42365479
f
,
0.64589411
f
,
0.4375872
f
,
0.89177299
f
});
auto
_gamma
=
backend
->
create_tensor
(
element
::
f32
,
gamma_shape
);
copy_data
(
_gamma
,
vector
<
float
>
{
1.0
f
,
1.0
f
});
auto
_beta
=
backend
->
create_tensor
(
element
::
f32
,
beta_shape
);
copy_data
(
_beta
,
vector
<
float
>
{
0.0
f
,
0.0
f
});
auto
bn_output
=
backend
->
create_dynamic_tensor
(
element
::
f32
,
PartialShape
::
dynamic
());
auto
result_mean
=
backend
->
create_dynamic_tensor
(
element
::
f32
,
PartialShape
::
dynamic
());
auto
result_variance
=
backend
->
create_dynamic_tensor
(
element
::
f32
,
PartialShape
::
dynamic
());
vector
<
float
>
expected_result
{
-
0.71498716
f
,
1.48388731
f
,
-
0.00196938
f
,
-
0.76693159
f
,
-
0.91316032
f
,
0.23943391
f
,
-
0.84090298
f
,
1.51462936
f
};
vector
<
float
>
expected_mean
{
0.602912
f
,
0.599727
f
};
vector
<
float
>
expected_variance
{
0.00472505
f
,
0.0361782
f
};
auto
handle
=
backend
->
compile
(
f
);
handle
->
call_with_validate
({
bn_output
,
result_mean
,
result_variance
},
{
_input
,
_gamma
,
_beta
});
EXPECT_TRUE
(
test
::
all_close
(
expected_result
,
read_vector
<
float
>
(
bn_output
),
1e-5
f
,
1e-6
f
));
EXPECT_TRUE
(
test
::
all_close
(
expected_mean
,
read_vector
<
float
>
(
result_mean
),
1e-5
f
,
1e-6
f
));
EXPECT_TRUE
(
test
::
all_close
(
expected_variance
,
read_vector
<
float
>
(
result_variance
),
1e-5
f
,
1e-6
f
));
}
This diff is collapsed.
Click to expand it.
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment