Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
N
ngraph
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
ngraph
Commits
c732705f
Commit
c732705f
authored
Sep 05, 2019
by
mbencer
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Removed unecessary reshape, added additional asserts
parent
090f16ec
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
101 additions
and
113 deletions
+101
-113
normalize_l2.cpp
src/ngraph/op/fused/normalize_l2.cpp
+44
-33
normalize_l2.hpp
src/ngraph/op/fused/normalize_l2.hpp
+1
-0
unit_test.manifest
src/ngraph/runtime/intelgpu/unit_test.manifest
+6
-5
fused_op.in.cpp
test/backend/fused_op.in.cpp
+6
-68
normalize.cpp
test/type_prop/normalize.cpp
+44
-7
No files found.
src/ngraph/op/fused/normalize_l2.cpp
View file @
c732705f
...
...
@@ -42,55 +42,66 @@ op::NormalizeL2::NormalizeL2(const Output<Node>& data,
void
op
::
NormalizeL2
::
pre_validate_and_infer_types
()
{
const
auto
&
data_pshape
=
get_input_partial_shape
(
0
);
auto
axes_node
=
input
(
1
).
get_source_output
().
get_node_shared_ptr
();
const
auto
&
input_pshape
=
get_input_partial_shape
(
0
);
const
auto
&
axes_pshape
=
get_input_partial_shape
(
1
);
NODE_VALIDATION_CHECK
(
this
,
data_pshape
.
is_static
(),
"Input data must be static."
);
NODE_VALIDATION_CHECK
(
this
,
axes_pshape
.
is_static
(),
"Input axes must be static."
);
const
auto
&
input_rank
=
input_pshape
.
rank
();
const
auto
&
axes_rank
=
axes_pshape
.
rank
();
NODE_VALIDATION_CHECK
(
this
,
static_cast
<
size_t
>
(
axes_pshape
.
rank
())
==
1
,
"Input axes must have rank equals 1 (axes shape: "
,
axes_pshape
,
")."
);
axes_node
->
is_constant
(),
"doesn't support 'axes' input of other type than a Constant."
);
if
(
axes_rank
.
is_static
())
{
NODE_VALIDATION_CHECK
(
this
,
static_cast
<
size_t
>
(
axes_pshape
.
rank
())
==
1
,
"Input axes must have rank equals 1 (axes shape: "
,
axes_pshape
,
")."
);
if
(
input_rank
.
is_static
())
{
const
auto
reduction_axes
=
get_reduction_axes
();
for
(
auto
axis
:
reduction_axes
)
{
NODE_VALIDATION_CHECK
(
this
,
axis
<
size_t
(
input_rank
),
"Reduction axis ("
,
axis
,
") is out of bounds "
,
"(argument shape: "
,
input_pshape
,
")"
);
}
}
}
}
NodeVector
op
::
NormalizeL2
::
decompose_op
()
const
AxisSet
op
::
NormalizeL2
::
get_reduction_axes
()
const
{
Output
<
Node
>
data
{
input_value
(
0
)};
const
Shape
input_shape
{
data
.
get_shape
()};
// Reshape to 4D tensor.
if
(
input_shape
.
size
()
<
4
)
AxisSet
axes
;
auto
axes_input_node
=
input_value
(
1
).
get_node_shared_ptr
();
if
(
auto
const_op
=
dynamic_pointer_cast
<
op
::
Constant
>
(
axes_input_node
))
{
Shape
data_shape
(
4
-
input_shape
.
size
(),
1
);
copy
(
begin
(
input_shape
),
end
(
input_shape
),
back_inserter
(
data_shape
));
data
=
builder
::
reshape
(
data
,
data_shape
);
axes
=
const_op
->
get_axis_set_val
();
}
return
axes
;
}
auto
axes_node
=
input
(
1
).
get_source_output
().
get_node_shared_ptr
();
NODE_VALIDATION_CHECK
(
this
,
axes_node
->
is_constant
(),
"doesn't support 'axes' input of other type than a Constant."
)
;
NodeVector
op
::
NormalizeL2
::
decompose_op
()
const
{
Output
<
Node
>
data
{
input_value
(
0
)};
const
Shape
input_shape
{
data
.
get_shape
()}
;
// Calculate norm over axes indicated by axes input param
auto
axes_constant
=
dynamic_pointer_cast
<
op
::
Constant
>
(
axes_node
);
auto
axes_vector
=
axes_constant
->
get_vector
<
size_t
>
();
AxisSet
reduction_axes
{
axes_vector
};
AxisSet
reduction_axes
=
get_reduction_axes
();
// Calculate l2 norm across axes determined by axes input
auto
builder_bias_mode
=
(
m_eps_mode
==
EpsMode
::
MAX
)
?
builder
::
BiasMode
::
MAX
:
builder
::
BiasMode
::
ADD
;
Output
<
Node
>
norm
=
builder
::
l2_norm
(
data
,
reduction_axes
,
m_eps
,
builder_bias_mode
);
norm
=
numpy_style_broadcast
(
norm
,
data
.
get_shape
());
data
=
data
/
norm
;
// get back original input tensor rank
if
(
input_shape
.
size
()
<
4
)
{
data
=
builder
::
reshape
(
data
,
input_shape
);
}
data
=
make_shared
<
op
::
Divide
>
(
data
,
norm
,
AutoBroadcastSpec
(
AutoBroadcastType
::
NUMPY
));
return
as_node_vector
({
data
});
}
...
...
src/ngraph/op/fused/normalize_l2.hpp
View file @
c732705f
...
...
@@ -54,6 +54,7 @@ namespace ngraph
EpsMode
get_eps_mode
()
const
{
return
m_eps_mode
;
}
virtual
NodeVector
decompose_op
()
const
override
;
virtual
void
pre_validate_and_infer_types
()
override
;
AxisSet
get_reduction_axes
()
const
;
virtual
std
::
shared_ptr
<
Node
>
copy_with_new_args
(
const
NodeVector
&
new_args
)
const
override
;
...
...
src/ngraph/runtime/intelgpu/unit_test.manifest
View file @
c732705f
...
...
@@ -69,13 +69,14 @@ gather_4d_indices_no_axis_2d_input
gemm
gemm_broadcast_input_C
normalize_across_chw_4d
normalize_across_empty_axes_input
normalize_across_h_4d
normalize_across_1axis_5d
normalize_across_123axes_5d
normalize_across_chw_4d_max_bias
normalize_across_chw_3d
normalize_across_chw_2d
normalize_across_hw_4d
normalize_invalid_input_tensor_rank
normalize_axes_input_not_constant
normalize_invalid_axes_rank
normalize_
output_shape_across_chw
normalize_
axes_out_of_bounds
hardsigmoid
model_erf
model_erf_int32
...
...
test/backend/fused_op.in.cpp
View file @
c732705f
...
...
@@ -582,7 +582,7 @@ NGRAPH_TEST(${BACKEND_NAME}, normalize_across_chw_4d)
{
Shape
data_shape
{
1
,
2
,
3
,
4
};
auto
data
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
data_shape
);
const
auto
axes
=
make_shared
<
op
::
Constant
>
(
element
::
u
64
,
Shape
{
3
},
vector
<
int64_t
>
{
1
,
2
,
3
});
const
auto
axes
=
make_shared
<
op
::
Constant
>
(
element
::
i
64
,
Shape
{
3
},
vector
<
int64_t
>
{
1
,
2
,
3
});
float
eps
{
1e-6
f
};
auto
eps_mode
=
op
::
EpsMode
::
ADD
;
...
...
@@ -605,73 +605,11 @@ NGRAPH_TEST(${BACKEND_NAME}, normalize_across_chw_4d)
test_case
.
run
(
DEFAULT_FLOAT_TOLERANCE_BITS
+
1
);
}
NGRAPH_TEST
(
$
{
BACKEND_NAME
},
normalize_across_chw_3d
)
{
Shape
data_shape
{
2
,
3
,
4
};
auto
data
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
data_shape
);
const
auto
axes
=
make_shared
<
op
::
Constant
>
(
element
::
u64
,
Shape
{
3
},
vector
<
int64_t
>
{
1
,
2
,
3
});
float
eps
{
1e-6
f
};
auto
eps_mode
=
op
::
EpsMode
::
ADD
;
auto
normalize
=
make_shared
<
op
::
NormalizeL2
>
(
data
,
axes
,
eps
,
eps_mode
);
auto
function
=
make_shared
<
Function
>
(
NodeVector
{
normalize
},
ParameterVector
{
data
});
auto
test_case
=
test
::
NgraphTestCase
(
function
,
"${BACKEND_NAME}"
);
vector
<
float
>
input_data
(
shape_size
(
data_shape
));
iota
(
begin
(
input_data
),
end
(
input_data
),
1
);
test_case
.
add_input
<
float
>
(
input_data
);
test_case
.
add_expected_output
<
float
>
(
data_shape
,
{
0.01428571
f
,
0.02857143
f
,
0.04285714
f
,
0.05714286
f
,
0.07142857
f
,
0.08571429
f
,
0.1
f
,
0.11428571
f
,
0.12857144
f
,
0.14285715
f
,
0.15714286
f
,
0.17142858
f
,
0.18571429
f
,
0.2
f
,
0.21428572
f
,
0.22857143
f
,
0.24285714
f
,
0.25714287
f
,
0.27142859
f
,
0.2857143
f
,
0.30000001
f
,
0.31428573
f
,
0.32857144
f
,
0.34285715
f
});
test_case
.
run
(
DEFAULT_FLOAT_TOLERANCE_BITS
+
1
);
}
NGRAPH_TEST
(
$
{
BACKEND_NAME
},
normalize_across_chw_2d
)
{
Shape
data_shape
{
3
,
4
};
auto
data
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
data_shape
);
const
auto
axes
=
make_shared
<
op
::
Constant
>
(
element
::
u64
,
Shape
{
3
},
vector
<
int64_t
>
{
1
,
2
,
3
});
float
eps
{
1e-6
f
};
auto
eps_mode
=
op
::
EpsMode
::
ADD
;
auto
normalize
=
make_shared
<
op
::
NormalizeL2
>
(
data
,
axes
,
eps
,
eps_mode
);
auto
function
=
make_shared
<
Function
>
(
NodeVector
{
normalize
},
ParameterVector
{
data
});
auto
test_case
=
test
::
NgraphTestCase
(
function
,
"${BACKEND_NAME}"
);
vector
<
float
>
input_data
(
shape_size
(
data_shape
));
iota
(
begin
(
input_data
),
end
(
input_data
),
1
);
test_case
.
add_input
<
float
>
(
input_data
);
test_case
.
add_expected_output
<
float
>
(
data_shape
,
{
0.03922323
f
,
0.07844646
f
,
0.11766968
f
,
0.15689291
f
,
0.19611613
f
,
0.23533936
f
,
0.2745626
f
,
0.31378582
f
,
0.35300905
f
,
0.39223227
f
,
0.43145549
f
,
0.47067872
f
});
test_case
.
run
();
}
NGRAPH_TEST
(
$
{
BACKEND_NAME
},
normalize_across_empty_axes_input
)
{
Shape
data_shape
{
1
,
2
,
3
,
4
};
auto
data
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
data_shape
);
const
auto
axes
=
make_shared
<
op
::
Constant
>
(
element
::
u
64
,
Shape
{
0
},
vector
<
int64_t
>
{});
const
auto
axes
=
make_shared
<
op
::
Constant
>
(
element
::
i
64
,
Shape
{
0
},
vector
<
int64_t
>
{});
float
eps
{
1e-6
f
};
auto
eps_mode
=
op
::
EpsMode
::
ADD
;
...
...
@@ -695,7 +633,7 @@ NGRAPH_TEST(${BACKEND_NAME}, normalize_across_h_4d)
{
Shape
data_shape
{
1
,
2
,
3
,
4
};
auto
data
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
data_shape
);
const
auto
axes
=
make_shared
<
op
::
Constant
>
(
element
::
u
64
,
Shape
{
1
},
vector
<
int64_t
>
{
1
});
const
auto
axes
=
make_shared
<
op
::
Constant
>
(
element
::
i
64
,
Shape
{
1
},
vector
<
int64_t
>
{
1
});
float
eps
{
1e-6
f
};
auto
eps_mode
=
op
::
EpsMode
::
ADD
;
...
...
@@ -721,7 +659,7 @@ NGRAPH_TEST(${BACKEND_NAME}, normalize_across_1axis_5d)
{
Shape
data_shape
{
1
,
2
,
2
,
2
,
3
};
auto
data
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
data_shape
);
const
auto
axes
=
make_shared
<
op
::
Constant
>
(
element
::
u
64
,
Shape
{
1
},
vector
<
int64_t
>
{
1
});
const
auto
axes
=
make_shared
<
op
::
Constant
>
(
element
::
i
64
,
Shape
{
1
},
vector
<
int64_t
>
{
1
});
float
eps
{
1e-6
f
};
auto
eps_mode
=
op
::
EpsMode
::
ADD
;
...
...
@@ -747,7 +685,7 @@ NGRAPH_TEST(${BACKEND_NAME}, normalize_across_123axes_5d)
{
Shape
data_shape
{
1
,
2
,
2
,
2
,
3
};
auto
data
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
data_shape
);
const
auto
axes
=
make_shared
<
op
::
Constant
>
(
element
::
u
64
,
Shape
{
3
},
vector
<
int64_t
>
{
1
,
2
,
3
});
const
auto
axes
=
make_shared
<
op
::
Constant
>
(
element
::
i
64
,
Shape
{
3
},
vector
<
int64_t
>
{
1
,
2
,
3
});
float
eps
{
1e-6
f
};
auto
eps_mode
=
op
::
EpsMode
::
ADD
;
...
...
@@ -773,7 +711,7 @@ NGRAPH_TEST(${BACKEND_NAME}, normalize_across_chw_4d_max_bias)
{
Shape
data_shape
{
1
,
2
,
3
,
4
};
auto
data
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
data_shape
);
const
auto
axes
=
make_shared
<
op
::
Constant
>
(
element
::
u
64
,
Shape
{
3
},
vector
<
int64_t
>
{
1
,
2
,
3
});
const
auto
axes
=
make_shared
<
op
::
Constant
>
(
element
::
i
64
,
Shape
{
3
},
vector
<
int64_t
>
{
1
,
2
,
3
});
float
eps
{
5000
};
auto
eps_mode
=
op
::
EpsMode
::
MAX
;
...
...
test/type_prop/normalize.cpp
View file @
c732705f
...
...
@@ -21,11 +21,37 @@
using
namespace
std
;
using
namespace
ngraph
;
TEST
(
type_prop
,
normalize_axes_input_not_constant
)
{
Shape
data_shape
{
1
,
2
,
3
,
4
};
auto
data
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
data_shape
);
auto
axes
=
make_shared
<
op
::
Parameter
>
(
element
::
u64
,
Shape
{
1
});
float
eps
{
1e-6
f
};
auto
eps_mode
=
op
::
EpsMode
::
ADD
;
try
{
auto
normalize
=
make_shared
<
op
::
NormalizeL2
>
(
data
,
axes
,
eps
,
eps_mode
);
// Should have thrown, so fail if it didn't
FAIL
()
<<
"Invalid input tensor rank."
;
}
catch
(
const
NodeValidationFailure
&
error
)
{
EXPECT_HAS_SUBSTRING
(
error
.
what
(),
std
::
string
(
"doesn't support 'axes' input of other type than a Constant."
));
}
catch
(...)
{
FAIL
()
<<
"Deduced type check failed for unexpected reason"
;
}
}
TEST
(
type_prop
,
normalize_invalid_axes_rank
)
{
Shape
data_shape
{
1
,
2
,
3
,
4
};
auto
data
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
data_shape
);
auto
axes
=
make_shared
<
op
::
Parameter
>
(
element
::
u64
,
Shape
{
1
,
2
});
const
auto
axes
=
make_shared
<
op
::
Constant
>
(
element
::
i64
,
Shape
{
1
,
2
},
vector
<
int64_t
>
{
1
,
2
});
float
eps
{
1e-6
f
};
auto
eps_mode
=
op
::
EpsMode
::
ADD
;
...
...
@@ -45,15 +71,26 @@ TEST(type_prop, normalize_invalid_axes_rank)
}
}
TEST
(
type_prop
,
normalize_
output_shape_across_chw
)
TEST
(
type_prop
,
normalize_
axes_out_of_bounds
)
{
Shape
data_shape
{
2
,
3
,
4
};
Shape
data_shape
{
1
,
2
,
3
,
4
};
auto
data
=
make_shared
<
op
::
Parameter
>
(
element
::
f32
,
data_shape
);
const
auto
axes
=
make_shared
<
op
::
Constant
>
(
element
::
u64
,
Shape
{
3
},
vector
<
int64_t
>
{
1
,
2
,
3
});
const
auto
axes
=
make_shared
<
op
::
Constant
>
(
element
::
i64
,
Shape
{
2
},
vector
<
int64_t
>
{
3
,
4
});
float
eps
{
1e-6
f
};
auto
eps_mode
=
op
::
EpsMode
::
ADD
;
auto
normalize
=
make_shared
<
op
::
NormalizeL2
>
(
data
,
axes
,
eps
,
eps_mode
);
EXPECT_EQ
(
normalize
->
get_element_type
(),
element
::
f32
);
EXPECT_EQ
(
normalize
->
get_shape
(),
(
Shape
{
2
,
3
,
4
}));
try
{
auto
normalize
=
make_shared
<
op
::
NormalizeL2
>
(
data
,
axes
,
eps
,
eps_mode
);
// Should have thrown, so fail if it didn't
FAIL
()
<<
"Invalid input tensor rank."
;
}
catch
(
const
NodeValidationFailure
&
error
)
{
EXPECT_HAS_SUBSTRING
(
error
.
what
(),
std
::
string
(
"Reduction axis ("
));
}
catch
(...)
{
FAIL
()
<<
"Deduced type check failed for unexpected reason"
;
}
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment