Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
N
ngraph
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
ngraph
Commits
22e9847e
Commit
22e9847e
authored
Oct 10, 2019
by
Amy Zhuang
Committed by
Scott Cyphers
Oct 10, 2019
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Migrate #3736 from master. (#3745)
* Use Eigen kernel for REFLECT mode Pad. * Do not call is_optimized_et.
parent
a68a3fa9
Show whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
123 additions
and
21 deletions
+123
-21
pad.cpp
src/ngraph/runtime/cpu/builder/pad.cpp
+9
-3
cpu_emitter.cpp
src/ngraph/runtime/cpu/cpu_emitter.cpp
+19
-15
cpu_kernels.hpp
src/ngraph/runtime/cpu/cpu_kernels.hpp
+3
-0
pad.cpp
src/ngraph/runtime/cpu/kernel/pad.cpp
+2
-0
pad.hpp
src/ngraph/runtime/cpu/kernel/pad.hpp
+90
-3
No files found.
src/ngraph/runtime/cpu/builder/pad.cpp
View file @
22e9847e
...
...
@@ -50,7 +50,8 @@ namespace ngraph
auto
padding_above
=
pad
->
get_padding_above
();
auto
pad_mode
=
pad
->
get_pad_mode
();
if
(
pad_mode
==
ngraph
::
op
::
PadMode
::
CONSTANT
)
if
(
pad_mode
==
ngraph
::
op
::
PadMode
::
CONSTANT
||
pad_mode
==
ngraph
::
op
::
PadMode
::
REFLECT
)
{
std
::
function
<
decltype
(
runtime
::
cpu
::
kernel
::
pad_and_slice
<
float
,
1
>
)
>
kernel
;
...
...
@@ -65,6 +66,7 @@ namespace ngraph
out_shape
,
padding_below
,
padding_above
,
pad_mode
,
arg_buffer_index
,
padding_value_index
,
out_buffer_index
](
CPURuntimeContext
*
ctx
,
...
...
@@ -76,6 +78,7 @@ namespace ngraph
out_shape
,
CoordinateDiff
(
padding_below
.
begin
(),
padding_below
.
end
()),
CoordinateDiff
(
padding_above
.
begin
(),
padding_above
.
end
()),
pad_mode
,
ectx
->
arena
);
};
functors
.
emplace_back
(
functor
);
...
...
@@ -123,7 +126,8 @@ namespace ngraph
auto
padding_above
=
pad
->
get_padding_above
();
auto
pad_mode
=
pad
->
get_pad_mode
();
if
(
pad_mode
==
ngraph
::
op
::
PadMode
::
CONSTANT
)
if
(
pad_mode
==
ngraph
::
op
::
PadMode
::
CONSTANT
||
pad_mode
==
ngraph
::
op
::
PadMode
::
REFLECT
)
{
std
::
function
<
decltype
(
runtime
::
cpu
::
kernel
::
pad_and_slice
<
float
,
1
>
)
>
kernel
;
...
...
@@ -132,7 +136,8 @@ namespace ngraph
arg_shape
.
size
(),
runtime
::
cpu
::
kernel
::
pad_and_slice
);
auto
functor
=
[
kernel
,
arg_shape
,
out_shape
,
padding_below
,
padding_above
](
auto
functor
=
[
kernel
,
arg_shape
,
out_shape
,
padding_below
,
padding_above
,
pad_mode
](
const
std
::
vector
<
void
*>&
inputs
,
std
::
vector
<
void
*>&
outputs
)
{
kernel
(
inputs
[
0
],
outputs
[
0
],
...
...
@@ -141,6 +146,7 @@ namespace ngraph
out_shape
,
CoordinateDiff
(
padding_below
.
begin
(),
padding_below
.
end
()),
CoordinateDiff
(
padding_above
.
begin
(),
padding_above
.
end
()),
pad_mode
,
0
);
};
return
functor
;
...
...
src/ngraph/runtime/cpu/cpu_emitter.cpp
View file @
22e9847e
...
...
@@ -3107,21 +3107,6 @@ namespace ngraph
auto
arg0_shape
=
args
[
0
].
get_shape
();
auto
result_shape
=
out
[
0
].
get_shape
();
if
(
arg0_shape
.
size
()
==
4
&&
args
[
0
].
get_element_type
()
==
element
::
f32
&&
pad
->
get_pad_mode
()
==
ngraph
::
op
::
PadMode
::
CONSTANT
)
{
writer
<<
"cpu::kernel::pad_4d_float32("
<<
args
[
0
].
get_name
()
<<
",
\n
"
<<
" "
<<
out
[
0
].
get_name
()
<<
",
\n
"
<<
" "
<<
args
[
1
].
get_name
()
<<
",
\n
"
<<
" {"
<<
join
(
arg0_shape
)
<<
"},
\n
"
<<
" {"
<<
join
(
result_shape
)
<<
"},
\n
"
<<
" {"
<<
join
(
pad
->
get_padding_below
())
<<
"},
\n
"
<<
" {"
<<
join
(
pad
->
get_padding_above
())
<<
"}, 0);
\n
"
;
}
else
{
std
::
string
pad_mode_string
;
switch
(
pad
->
get_pad_mode
())
{
...
...
@@ -3138,6 +3123,25 @@ namespace ngraph
pad_mode_string
=
"ngraph::op::PadMode::SYMMETRIC"
;
break
;
}
if
(
arg0_shape
.
size
()
==
4
&&
args
[
0
].
get_element_type
()
==
element
::
f32
&&
(
pad
->
get_pad_mode
()
==
ngraph
::
op
::
PadMode
::
CONSTANT
||
pad
->
get_pad_mode
()
==
ngraph
::
op
::
PadMode
::
REFLECT
))
{
writer
<<
"cpu::kernel::pad_4d_float32("
<<
args
[
0
].
get_name
()
<<
",
\n
"
<<
" "
<<
out
[
0
].
get_name
()
<<
",
\n
"
<<
" "
<<
args
[
1
].
get_name
()
<<
",
\n
"
<<
" {"
<<
join
(
arg0_shape
)
<<
"},
\n
"
<<
" {"
<<
join
(
result_shape
)
<<
"},
\n
"
<<
" {"
<<
join
(
pad
->
get_padding_below
())
<<
"},
\n
"
<<
" {"
<<
join
(
pad
->
get_padding_above
())
<<
"},
\n
"
<<
" "
<<
pad_mode_string
<<
",
\n
"
<<
" 0);
\n
"
;
}
else
{
writer
<<
"reference::pad<"
<<
out
[
0
].
get_type
()
<<
">("
<<
args
[
0
].
get_name
()
<<
",
\n
"
;
writer
<<
" "
<<
args
[
1
].
get_name
()
<<
",
\n
"
;
...
...
src/ngraph/runtime/cpu/cpu_kernels.hpp
View file @
22e9847e
...
...
@@ -21,6 +21,8 @@
#include <random>
#include <vector>
#include "ngraph/op/pad.hpp"
// CBLAS types and wrappers
namespace
cblas
...
...
@@ -146,6 +148,7 @@ namespace ngraph
const
Shape
&
output_shape
,
const
CoordinateDiff
&
padding_below
,
const
CoordinateDiff
&
padding_above
,
const
ngraph
::
op
::
PadMode
pad_mode
,
int
arena
);
void
reduce_sum_all_1d_float32
(
float
*
input
,
...
...
src/ngraph/runtime/cpu/kernel/pad.cpp
View file @
22e9847e
...
...
@@ -31,6 +31,7 @@ namespace ngraph
const
Shape
&
output_shape
,
const
CoordinateDiff
&
padding_below
,
const
CoordinateDiff
&
padding_above
,
const
ngraph
::
op
::
PadMode
pad_mode
,
int
arena
)
{
pad_and_slice
<
float
,
4
>
(
input
,
...
...
@@ -40,6 +41,7 @@ namespace ngraph
output_shape
,
padding_below
,
padding_above
,
pad_mode
,
arena
);
}
}
...
...
src/ngraph/runtime/cpu/kernel/pad.hpp
View file @
22e9847e
...
...
@@ -67,15 +67,19 @@ namespace ngraph
const
Shape
&
output_shape
,
const
CoordinateDiff
&
padding_below
,
const
CoordinateDiff
&
padding_above
,
const
ngraph
::
op
::
PadMode
pad_mode
,
int
arena
)
{
Eigen
::
array
<
Eigen
::
Index
,
Rank
>
out_dims
,
in_dims
;
Eigen
::
array
<
Eigen
::
Index
,
Rank
>
out_dims
,
in_dims
,
temp_dims
;
Eigen
::
array
<
Eigen
::
IndexPair
<
size_t
>
,
Rank
>
padding
;
Eigen
::
array
<
Eigen
::
Index
,
Rank
>
indices
;
bool
has_negative_below_padding
=
false
;
for
(
int
i
=
0
;
i
<
Rank
;
i
++
)
{
out_dims
[
i
]
=
output_shape
[
i
];
temp_dims
[
i
]
=
output_shape
[
i
];
in_dims
[
i
]
=
input_shape
[
i
];
padding
[
i
]
=
{
...
...
@@ -88,6 +92,8 @@ namespace ngraph
{
NGRAPH_CHECK
(
padding_below
[
i
]
>
INT_MIN
);
indices
[
i
]
=
-
padding_below
[
i
];
temp_dims
[
i
]
-=
padding_below
[
i
];
has_negative_below_padding
=
true
;
}
else
{
...
...
@@ -97,13 +103,94 @@ namespace ngraph
Eigen
::
TensorMap
<
Eigen
::
Tensor
<
ElementType
,
Rank
,
Eigen
::
RowMajor
>>
out
(
static_cast
<
ElementType
*>
(
output
),
out_dims
);
Eigen
::
TensorMap
<
Eigen
::
Tensor
<
ElementType
,
Rank
,
Eigen
::
RowMajor
>>
temp
(
static_cast
<
ElementType
*>
(
output
),
temp_dims
);
Eigen
::
TensorMap
<
Eigen
::
Tensor
<
ElementType
,
Rank
,
Eigen
::
RowMajor
>>
in
(
static_cast
<
ElementType
*>
(
input
),
in_dims
);
out
.
device
(
ngraph
::
runtime
::
cpu
::
executor
::
GetCPUExecutor
().
get_device
(
arena
))
=
in
.
pad
(
padding
,
*
static_cast
<
ElementType
*>
(
pad_value
))
if
(
pad_mode
==
ngraph
::
op
::
PadMode
::
CONSTANT
)
{
out
.
device
(
ngraph
::
runtime
::
cpu
::
executor
::
GetCPUExecutor
().
get_device
(
arena
))
=
in
.
pad
(
padding
,
*
static_cast
<
ElementType
*>
(
pad_value
))
.
slice
(
indices
,
out_dims
);
}
else
{
// clang-format off
// PadMode::REFLECT
// We should have dim >= 2 for each dim.
// Example:
//
// Input shape: [4]
// Padding: 6 below, 13 above
// Output shape: [23]
//
// Input: 1 2 3 4
// Expected output: 1 2 3 4 3 2 1 2 3 4 3 2 1 2 3 4 3 2 1 2 3 4 3
// Pattern: ... | original n elements | middle (n - 2) elements of original n in reverse order |
// original n elements | middle (n - 2) elements of original n in reverse order | ...
// | 1 2 3 4 | 3 2 | 1 2 3 4 | 3 2 | 1 2 3 4 | 3 2 | 1 2 3 4 | 3
// clang-format on
auto
generator
=
[
&
](
const
Eigen
::
array
<
Eigen
::
DenseIndex
,
Rank
>&
out_index
)
{
Eigen
::
array
<
Eigen
::
DenseIndex
,
Rank
>
in_index
;
for
(
size_t
i
=
0
;
i
<
Rank
;
i
++
)
{
auto
origin_length
=
in_dims
[
i
];
auto
p_below
=
padding_below
[
i
]
>=
0
?
padding_below
[
i
]
:
0
;
if
(
out_index
[
i
]
<
p_below
)
{
// padding below
auto
reverse
=
p_below
-
out_index
[
i
];
auto
res
=
reverse
%
(
origin_length
*
2
-
2
);
if
(
res
<=
origin_length
-
2
)
{
// copy one of the middle n-2 items
in_index
[
i
]
=
res
;
}
else
{
// copy one of the n items
in_index
[
i
]
=
origin_length
*
2
-
2
-
res
;
}
}
else
if
(
out_index
[
i
]
<
in_dims
[
i
]
+
p_below
)
{
// original
in_index
[
i
]
=
out_index
[
i
]
-
p_below
;
}
else
{
// padding above
auto
pos
=
out_index
[
i
]
-
in_dims
[
i
]
-
p_below
;
auto
res
=
pos
%
(
origin_length
*
2
-
2
);
if
(
res
<
origin_length
-
2
)
{
// copy one of the middle n-2 items
in_index
[
i
]
=
origin_length
-
2
-
res
;
}
else
{
// copy one of the n items
in_index
[
i
]
=
res
-
(
origin_length
-
2
);
}
}
}
return
in
(
in_index
);
};
if
(
has_negative_below_padding
)
{
out
.
device
(
ngraph
::
runtime
::
cpu
::
executor
::
GetCPUExecutor
().
get_device
(
arena
))
=
temp
.
generate
(
generator
).
slice
(
indices
,
out_dims
);
}
else
{
out
.
device
(
ngraph
::
runtime
::
cpu
::
executor
::
GetCPUExecutor
().
get_device
(
arena
))
=
out
.
generate
(
generator
);
}
}
}
template
<
typename
ElementType
>
void
pad_ref
(
const
void
*
arg0
,
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment