Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
N
ngraph
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
ngraph
Commits
eb83f267
Commit
eb83f267
authored
May 08, 2019
by
Nishant Patel
Committed by
Scott Cyphers
May 08, 2019
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Standalone codegen. Ops {Q}MaxPool and {Q}AvgPool (#2867)
parent
10b43d55
Show whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
290 additions
and
151 deletions
+290
-151
cpu_emitter.cpp
src/ngraph/runtime/cpu/cpu_emitter.cpp
+37
-38
mkldnn_emitter.cpp
src/ngraph/runtime/cpu/mkldnn_emitter.cpp
+0
-65
mkldnn_emitter.hpp
src/ngraph/runtime/cpu/mkldnn_emitter.hpp
+0
-12
cpu_mkldnn_primitive_build.cpp
src/ngraph/runtime/cpu/pass/cpu_mkldnn_primitive_build.cpp
+253
-36
No files found.
src/ngraph/runtime/cpu/cpu_emitter.cpp
View file @
eb83f267
...
...
@@ -2548,17 +2548,16 @@ namespace ngraph
if
(
runtime
::
cpu
::
mkldnn_utils
::
use_mkldnn_kernel
(
node
))
{
auto
&
mkldnn_emitter
=
external_function
->
get_mkldnn_emitter
();
size_t
max_pool_index
=
external_function
->
get_primitive_index
(
node
);
auto
&
deps
=
mkldnn_emitter
->
get_primitive_deps
(
max_pool_index
);
writer
<<
"cpu::mkldnn_utils::set_memory_ptr(ctx, "
<<
to_string
(
deps
[
0
])
<<
", "
<<
args
[
0
].
get_name
()
<<
");
\n
"
;
writer
<<
"cpu::mkldnn_utils::set_memory_ptr(ctx, "
<<
to_string
(
deps
[
1
])
<<
", "
<<
out
[
0
].
get_name
()
<<
");
\n
"
;
size_t
max_pool_index
;
std
::
vector
<
std
::
size_t
>
deps
;
emit_build_primitives
(
external_function
,
node
,
writer
,
max_pool_index
,
deps
);
writer
<<
"cpu::mkldnn_utils::mkldnn_invoke_primitive(ctx, "
<<
to_string
(
max_pool_index
)
<<
");
\n
"
;
writer
<<
"cg_ctx->set_memory_ptr("
<<
to_string
(
deps
[
0
])
<<
", "
<<
args
[
0
].
get_name
()
<<
");
\n
"
;
writer
<<
"cg_ctx->set_memory_ptr("
<<
to_string
(
deps
[
1
])
<<
", "
<<
out
[
0
].
get_name
()
<<
");
\n
"
;
writer
<<
"cg_ctx->mkldnn_invoke_primitive("
<<
to_string
(
max_pool_index
)
<<
");
\n
"
;
}
else
{
...
...
@@ -2581,16 +2580,16 @@ namespace ngraph
{
if
(
runtime
::
cpu
::
mkldnn_utils
::
use_mkldnn_kernel
(
node
))
{
auto
&
mkldnn_emitter
=
external_function
->
get_mkldnn_emitter
()
;
s
ize_t
qmax_pool_index
=
external_function
->
get_primitive_index
(
node
)
;
auto
&
deps
=
mkldnn_emitter
->
get_primitive_deps
(
qmax_pool_index
);
size_t
max_pool_index
;
s
td
::
vector
<
std
::
size_t
>
deps
;
emit_build_primitives
(
external_function
,
node
,
writer
,
max_pool_index
,
deps
);
writer
<<
"c
pu::mkldnn_utils::set_memory_ptr(ctx, "
<<
to_string
(
deps
[
0
])
<<
", "
<<
args
[
0
].
get_name
()
<<
");
\n
"
;
writer
<<
"c
pu::mkldnn_utils::set_memory_ptr(ctx, "
<<
to_string
(
deps
[
1
])
<<
", "
<<
out
[
0
].
get_name
()
<<
");
\n
"
;
writer
<<
"c
pu::mkldnn_utils::mkldnn_invoke_primitive(ctx, "
<<
to_string
(
qmax_pool_index
)
<<
");
\n
"
;
writer
<<
"c
g_ctx->set_memory_ptr("
<<
to_string
(
deps
[
0
])
<<
", "
<<
args
[
0
].
get_name
()
<<
");
\n
"
;
writer
<<
"c
g_ctx->set_memory_ptr("
<<
to_string
(
deps
[
1
])
<<
", "
<<
out
[
0
].
get_name
()
<<
");
\n
"
;
writer
<<
"c
g_ctx->mkldnn_invoke_primitive("
<<
to_string
(
max_pool_index
)
<<
");
\n
"
;
}
else
{
...
...
@@ -2603,15 +2602,16 @@ namespace ngraph
{
if
(
runtime
::
cpu
::
mkldnn_utils
::
use_mkldnn_kernel
(
node
))
{
auto
&
mkldnn_emitter
=
external_function
->
get_mkldnn_emitter
();
size_t
qavg_pool_index
=
external_function
->
get_primitive_index
(
node
);
auto
&
deps
=
mkldnn_emitter
->
get_primitive_deps
(
qavg_pool_index
);
writer
<<
"cpu::mkldnn_utils::set_memory_ptr(ctx, "
<<
to_string
(
deps
[
0
])
<<
", "
<<
args
[
0
].
get_name
()
<<
");
\n
"
;
writer
<<
"cpu::mkldnn_utils::set_memory_ptr(ctx, "
<<
to_string
(
deps
[
1
])
<<
", "
<<
out
[
0
].
get_name
()
<<
");
\n
"
;
writer
<<
"cpu::mkldnn_utils::mkldnn_invoke_primitive(ctx, "
<<
to_string
(
qavg_pool_index
)
<<
");
\n
"
;
size_t
avg_pool_index
;
std
::
vector
<
std
::
size_t
>
deps
;
emit_build_primitives
(
external_function
,
node
,
writer
,
avg_pool_index
,
deps
);
writer
<<
"cg_ctx->set_memory_ptr("
<<
to_string
(
deps
[
0
])
<<
", "
<<
args
[
0
].
get_name
()
<<
");
\n
"
;
writer
<<
"cg_ctx->set_memory_ptr("
<<
to_string
(
deps
[
1
])
<<
", "
<<
out
[
0
].
get_name
()
<<
");
\n
"
;
writer
<<
"cg_ctx->mkldnn_invoke_primitive("
<<
to_string
(
avg_pool_index
)
<<
");
\n
"
;
}
else
{
...
...
@@ -2746,17 +2746,16 @@ namespace ngraph
if
(
runtime
::
cpu
::
mkldnn_utils
::
use_mkldnn_kernel
(
node
))
{
auto
&
mkldnn_emitter
=
external_function
->
get_mkldnn_emitter
();
size_t
avg_pool_index
=
external_function
->
get_primitive_index
(
node
);
auto
&
deps
=
mkldnn_emitter
->
get_primitive_deps
(
avg_pool_index
);
writer
<<
"cpu::mkldnn_utils::set_memory_ptr(ctx, "
<<
to_string
(
deps
[
0
])
<<
", "
<<
args
[
0
].
get_name
()
<<
");
\n
"
;
writer
<<
"cpu::mkldnn_utils::set_memory_ptr(ctx, "
<<
to_string
(
deps
[
1
])
<<
", "
<<
out
[
0
].
get_name
()
<<
");
\n
"
;
size_t
avg_pool_index
;
std
::
vector
<
std
::
size_t
>
deps
;
emit_build_primitives
(
external_function
,
node
,
writer
,
avg_pool_index
,
deps
);
writer
<<
"cpu::mkldnn_utils::mkldnn_invoke_primitive(ctx, "
<<
to_string
(
avg_pool_index
)
<<
");
\n
"
;
writer
<<
"cg_ctx->set_memory_ptr("
<<
to_string
(
deps
[
0
])
<<
", "
<<
args
[
0
].
get_name
()
<<
");
\n
"
;
writer
<<
"cg_ctx->set_memory_ptr("
<<
to_string
(
deps
[
1
])
<<
", "
<<
out
[
0
].
get_name
()
<<
");
\n
"
;
writer
<<
"cg_ctx->mkldnn_invoke_primitive("
<<
to_string
(
avg_pool_index
)
<<
");
\n
"
;
}
else
{
...
...
src/ngraph/runtime/cpu/mkldnn_emitter.cpp
View file @
eb83f267
...
...
@@ -276,39 +276,6 @@ size_t MKLDNNEmitter::build_dequantization(const ngraph::Node* node,
return
dequantize_index
;
}
size_t
MKLDNNEmitter
::
build_quantized_max_pool
(
const
ngraph
::
Node
*
node
)
{
auto
qmax_pool
=
static_cast
<
const
ngraph
::
op
::
QuantizedMaxPool
*>
(
node
);
auto
input_desc
=
mkldnn_utils
::
get_input_mkldnn_md
(
node
,
0
);
auto
result_desc
=
mkldnn_utils
::
get_output_mkldnn_md
(
node
,
0
);
size_t
qmax_pool_index
=
this
->
build_pooling_forward
(
mkldnn
::
algorithm
::
pooling_max
,
input_desc
,
result_desc
,
qmax_pool
->
get_window_movement_strides
(),
qmax_pool
->
get_window_shape
(),
qmax_pool
->
get_padding_below
(),
qmax_pool
->
get_padding_above
());
return
qmax_pool_index
;
}
size_t
MKLDNNEmitter
::
build_quantized_avg_pool
(
const
ngraph
::
Node
*
node
)
{
auto
qavg_pool
=
static_cast
<
const
ngraph
::
op
::
QuantizedAvgPool
*>
(
node
);
auto
input_desc
=
mkldnn_utils
::
get_input_mkldnn_md
(
node
,
0
);
auto
result_desc
=
mkldnn_utils
::
get_output_mkldnn_md
(
node
,
0
);
size_t
qavg_pool_index
=
this
->
build_pooling_forward
((
qavg_pool
->
get_include_padding_in_avg_computation
()
?
mkldnn
::
algorithm
::
pooling_avg_include_padding
:
mkldnn
::
algorithm
::
pooling_avg_exclude_padding
),
input_desc
,
result_desc
,
qavg_pool
->
get_window_movement_strides
(),
qavg_pool
->
get_window_shape
(),
qavg_pool
->
get_padding_below
(),
qavg_pool
->
get_padding_above
());
return
qavg_pool_index
;
}
mkldnn
::
memory
::
format
MKLDNNEmitter
::
query_convolution_forward_weight_format
(
const
mkldnn
::
memory
::
desc
&
input_data_desc
,
const
mkldnn
::
memory
::
desc
&
weights_desc_any
,
...
...
@@ -659,38 +626,6 @@ void MKLDNNEmitter::build_convolution_backward_data(
*
mkldnn_primitives
[
result_index
]);
}
size_t
MKLDNNEmitter
::
build_pooling_forward
(
mkldnn
::
algorithm
pooling_algorithm
,
const
mkldnn
::
memory
::
desc
&
input_desc
,
const
mkldnn
::
memory
::
desc
&
result_desc
,
const
ngraph
::
Strides
&
window_strides
,
const
ngraph
::
Shape
&
window_shape
,
const
ngraph
::
Shape
&
padding_below
,
const
ngraph
::
Shape
&
padding_above
)
{
size_t
input_index
=
build_memory_primitive
(
input_desc
);
size_t
result_index
=
build_memory_primitive
(
result_desc
);
size_t
primitive_index
=
insert_primitive
(
new
mkldnn
::
pooling_forward
(
{{
mkldnn
::
prop_kind
::
forward_inference
,
pooling_algorithm
,
input_desc
,
result_desc
,
mkldnn
::
memory
::
dims
(
window_strides
.
begin
(),
window_strides
.
end
()),
mkldnn
::
memory
::
dims
(
window_shape
.
begin
(),
window_shape
.
end
()),
mkldnn
::
memory
::
dims
(
padding_below
.
begin
(),
padding_below
.
end
()),
mkldnn
::
memory
::
dims
(
padding_above
.
begin
(),
padding_above
.
end
()),
mkldnn
::
padding_kind
::
zero
},
executor
::
global_cpu_engine
},
*
m_mkldnn_primitives
[
input_index
],
*
m_mkldnn_primitives
[
result_index
]));
NGRAPH_CHECK
(
m_primitive_deps
.
find
(
primitive_index
)
==
m_primitive_deps
.
end
(),
"Dependencies already created for node"
);
m_primitive_deps
[
primitive_index
]
=
{
input_index
,
result_index
};
return
primitive_index
;
}
void
MKLDNNEmitter
::
build_pooling_forward
(
std
::
vector
<
mkldnn
::
primitive
*>&
mkldnn_primitives
,
const
mkldnn
::
pooling_forward
::
desc
&
pool_desc
,
const
std
::
vector
<
size_t
>&
deps
,
...
...
src/ngraph/runtime/cpu/mkldnn_emitter.hpp
View file @
eb83f267
...
...
@@ -421,14 +421,6 @@ namespace ngraph
}
}
size_t
build_pooling_forward
(
mkldnn
::
algorithm
pooling_algorithm
,
const
mkldnn
::
memory
::
desc
&
input_desc
,
const
mkldnn
::
memory
::
desc
&
result_desc
,
const
ngraph
::
Strides
&
window_strides
,
const
ngraph
::
Shape
&
window_shape
,
const
ngraph
::
Shape
&
padding_below
,
const
ngraph
::
Shape
&
padding_above
);
template
<
typename
OP
>
mkldnn
::
pooling_forward
::
desc
get_max_pooling_forward_desc
(
const
ngraph
::
Node
*
node
,
bool
training
)
...
...
@@ -822,10 +814,6 @@ namespace ngraph
const
std
::
vector
<
size_t
>&
deps
,
size_t
bounded_relu_index
);
size_t
build_quantized_max_pool
(
const
ngraph
::
Node
*
node
);
size_t
build_quantized_avg_pool
(
const
ngraph
::
Node
*
node
);
size_t
build_dequantization
(
const
ngraph
::
Node
*
node
,
const
mkldnn
::
memory
::
desc
&
input_desc
,
const
mkldnn
::
memory
::
desc
&
result_desc
);
...
...
src/ngraph/runtime/cpu/pass/cpu_mkldnn_primitive_build.cpp
View file @
eb83f267
...
...
@@ -1156,33 +1156,260 @@ namespace ngraph
mkldnn_emitter
,
node
);
}
template
<>
size_t
MKLDNNPrimitiveBuildPass
::
BUILD_PRIMITIVE_DECL
(
MaxPool
)
template
<
typename
OP
,
bool
is_training
>
void
construct_primitive_build_string_max_pool
(
ngraph
::
runtime
::
cpu
::
MKLDNNEmitter
&
mkldnn_emitter
,
ngraph
::
Node
*
node
,
std
::
string
&
construct_string
,
std
::
vector
<
size_t
>&
deps
,
size_t
&
index
,
std
::
ofstream
&
desc_file
)
{
auto
pool
=
static_cast
<
const
OP
*>
(
node
);
CodeWriter
writer
;
std
::
vector
<
mkldnn
::
memory
::
desc
>
descs
=
{};
auto
window_shape
=
pool
->
get_window_shape
();
auto
window_strides
=
pool
->
get_window_movement_strides
();
auto
padding_below
=
pool
->
get_padding_below
();
auto
padding_above
=
pool
->
get_padding_above
();
if
(
is_training
)
{
auto
diff_dst_desc
=
mkldnn_utils
::
get_input_mkldnn_md
(
node
,
1
);
auto
diff_src_desc
=
mkldnn_utils
::
get_output_mkldnn_md
(
node
,
0
);
descs
.
push_back
(
diff_dst_desc
);
descs
.
push_back
(
diff_src_desc
);
}
else
{
auto
max_pool
=
static_cast
<
const
ngraph
::
op
::
MaxPool
*>
(
node
);
auto
input_desc
=
mkldnn_utils
::
get_input_mkldnn_md
(
node
,
0
);
auto
result_desc
=
mkldnn_utils
::
get_output_mkldnn_md
(
node
,
0
);
descs
.
push_back
(
input_desc
);
descs
.
push_back
(
result_desc
);
}
return
mkldnn_emitter
.
build_pooling_forward
(
mkldnn
::
algorithm
::
pooling_max
,
input_desc
,
result_desc
,
max_pool
->
get_window_movement_strides
(),
max_pool
->
get_window_shape
(),
max_pool
->
get_padding_below
(),
max_pool
->
get_padding_above
());
index
=
mkldnn_emitter
.
reserve_primitive_space_cg
(
3
);
deps
=
mkldnn_emitter
.
get_primitive_deps_cg
(
index
);
auto
desc_index
=
mkldnn_emitter
.
get_mkldnn_descriptors_size
();
mkldnn_emitter
.
reserve_descriptor_space
(
descs
.
size
());
serialize_memory_descs
(
desc_file
,
descs
,
deps
[
0
]);
writer
<<
"
\n
// build Maxpool primitive descriptor
\n
"
;
writer
<<
"auto max_pool_desc = "
;
if
(
is_training
)
{
writer
<<
"mkldnn::pooling_forward::desc(mkldnn::prop_kind::forward_"
"training,
\n
"
;
}
else
{
writer
<<
"mkldnn::pooling_forward::desc(mkldnn::prop_kind::forward_"
"inference,
\n
"
;
}
writer
<<
"mkldnn::algorithm::pooling_max,
\n
"
"*cg_ctx->mkldnn_descriptors["
<<
desc_index
<<
"],
\n
"
"*cg_ctx->mkldnn_descriptors["
<<
desc_index
+
1
<<
"],
\n
"
;
writer
<<
"mkldnn::memory::dims{"
;
if
(
window_strides
.
size
()
>
1
)
{
for
(
auto
i
=
0
;
i
<
window_strides
.
size
()
-
1
;
i
++
)
{
writer
<<
std
::
to_string
(
window_strides
[
i
])
<<
", "
;
}
}
writer
<<
std
::
to_string
(
window_strides
[
window_strides
.
size
()
-
1
]);
writer
<<
"},
\n
"
;
writer
<<
"mkldnn::memory::dims{"
;
if
(
window_shape
.
size
()
>
1
)
{
for
(
auto
i
=
0
;
i
<
window_shape
.
size
()
-
1
;
i
++
)
{
writer
<<
std
::
to_string
(
window_shape
[
i
])
<<
", "
;
}
}
writer
<<
std
::
to_string
(
window_shape
[
window_shape
.
size
()
-
1
]);
writer
<<
"},
\n
"
;
writer
<<
"mkldnn::memory::dims{"
;
if
(
padding_below
.
size
()
>
1
)
{
for
(
auto
i
=
0
;
i
<
padding_below
.
size
()
-
1
;
i
++
)
{
writer
<<
std
::
to_string
(
padding_below
[
i
])
<<
", "
;
}
}
writer
<<
std
::
to_string
(
padding_below
[
padding_below
.
size
()
-
1
]);
writer
<<
"},
\n
"
;
writer
<<
"mkldnn::memory::dims{"
;
if
(
padding_above
.
size
()
>
1
)
{
for
(
auto
i
=
0
;
i
<
padding_above
.
size
()
-
1
;
i
++
)
{
writer
<<
std
::
to_string
(
padding_above
[
i
])
<<
", "
;
}
}
writer
<<
std
::
to_string
(
padding_above
[
padding_above
.
size
()
-
1
]);
writer
<<
"},
\n
"
;
writer
<<
"mkldnn::padding_kind::zero);
\n
"
;
writer
<<
"mkldnn::primitive* prim;
\n
"
;
writer
<<
"prim = new mkldnn::pooling_forward({max_pool_desc, "
"cg_ctx->global_cpu_engine},"
"*cg_ctx->mkldnn_primitives["
<<
std
::
to_string
(
deps
[
0
])
<<
"],
\n
"
"*cg_ctx->mkldnn_primitives["
<<
std
::
to_string
(
deps
[
1
])
<<
"]);
\n
"
;
writer
<<
"cg_ctx->mkldnn_primitives["
<<
std
::
to_string
(
index
)
<<
"] = prim;
\n
"
;
construct_string
=
writer
.
get_code
();
}
template
<
typename
OP
,
bool
is_training
>
void
construct_primitive_build_string_avg_pool
(
ngraph
::
runtime
::
cpu
::
MKLDNNEmitter
&
mkldnn_emitter
,
ngraph
::
Node
*
node
,
std
::
string
&
construct_string
,
std
::
vector
<
size_t
>&
deps
,
size_t
&
index
,
std
::
ofstream
&
desc_file
)
{
auto
pool
=
static_cast
<
const
OP
*>
(
node
);
auto
input_desc
=
mkldnn_utils
::
get_input_mkldnn_md
(
node
,
0
);
auto
result_desc
=
mkldnn_utils
::
get_output_mkldnn_md
(
node
,
0
);
auto
window_shape
=
pool
->
get_window_shape
();
auto
window_strides
=
pool
->
get_window_movement_strides
();
auto
padding_below
=
pool
->
get_padding_below
();
auto
padding_above
=
pool
->
get_padding_above
();
auto
include_padding_in_avg_computation
=
pool
->
get_include_padding_in_avg_computation
();
CodeWriter
writer
;
std
::
vector
<
mkldnn
::
memory
::
desc
>
descs
=
{
input_desc
,
result_desc
};
index
=
mkldnn_emitter
.
reserve_primitive_space_cg
(
3
);
deps
=
mkldnn_emitter
.
get_primitive_deps_cg
(
index
);
auto
desc_index
=
mkldnn_emitter
.
get_mkldnn_descriptors_size
();
mkldnn_emitter
.
reserve_descriptor_space
(
descs
.
size
());
serialize_memory_descs
(
desc_file
,
descs
,
deps
[
0
]);
writer
<<
"
\n
// build Avgpool primitive descriptor
\n
"
;
writer
<<
"auto avg_pool_desc = "
;
if
(
is_training
)
{
writer
<<
"mkldnn::pooling_forward::desc(mkldnn::prop_kind::forward_"
"training,
\n
"
;
}
else
{
writer
<<
"mkldnn::pooling_forward::desc(mkldnn::prop_kind::forward_"
"inference,
\n
"
;
}
if
(
include_padding_in_avg_computation
)
{
writer
<<
"mkldnn::algorithm::pooling_avg_include_padding,
\n
"
;
}
else
{
writer
<<
"mkldnn::algorithm::pooling_avg_exclude_padding,
\n
"
;
}
writer
<<
"*cg_ctx->mkldnn_descriptors["
<<
desc_index
<<
"],
\n
"
"*cg_ctx->mkldnn_descriptors["
<<
desc_index
+
1
<<
"],
\n
"
;
writer
<<
"mkldnn::memory::dims{"
;
if
(
window_strides
.
size
()
>
1
)
{
for
(
auto
i
=
0
;
i
<
window_strides
.
size
()
-
1
;
i
++
)
{
writer
<<
std
::
to_string
(
window_strides
[
i
])
<<
", "
;
}
}
writer
<<
std
::
to_string
(
window_strides
[
window_strides
.
size
()
-
1
]);
writer
<<
"},
\n
"
;
writer
<<
"mkldnn::memory::dims{"
;
if
(
window_shape
.
size
()
>
1
)
{
for
(
auto
i
=
0
;
i
<
window_shape
.
size
()
-
1
;
i
++
)
{
writer
<<
std
::
to_string
(
window_shape
[
i
])
<<
", "
;
}
}
writer
<<
std
::
to_string
(
window_shape
[
window_shape
.
size
()
-
1
]);
writer
<<
"},
\n
"
;
writer
<<
"mkldnn::memory::dims{"
;
if
(
padding_below
.
size
()
>
1
)
{
for
(
auto
i
=
0
;
i
<
padding_below
.
size
()
-
1
;
i
++
)
{
writer
<<
std
::
to_string
(
padding_below
[
i
])
<<
", "
;
}
}
writer
<<
std
::
to_string
(
padding_below
[
padding_below
.
size
()
-
1
]);
writer
<<
"},
\n
"
;
writer
<<
"mkldnn::memory::dims{"
;
if
(
padding_above
.
size
()
>
1
)
{
for
(
auto
i
=
0
;
i
<
padding_above
.
size
()
-
1
;
i
++
)
{
writer
<<
std
::
to_string
(
padding_above
[
i
])
<<
", "
;
}
}
writer
<<
std
::
to_string
(
padding_above
[
padding_above
.
size
()
-
1
]);
writer
<<
"},
\n
"
;
writer
<<
"mkldnn::padding_kind::zero);
\n
"
;
writer
<<
"mkldnn::primitive* prim;
\n
"
;
writer
<<
"prim = new mkldnn::pooling_forward({avg_pool_desc, "
"cg_ctx->global_cpu_engine},"
"*cg_ctx->mkldnn_primitives["
<<
std
::
to_string
(
deps
[
0
])
<<
"],
\n
"
"*cg_ctx->mkldnn_primitives["
<<
std
::
to_string
(
deps
[
1
])
<<
"]);
\n
"
;
writer
<<
"cg_ctx->mkldnn_primitives["
<<
std
::
to_string
(
index
)
<<
"] = prim;
\n
"
;
construct_string
=
writer
.
get_code
();
}
template
<>
void
MKLDNNPrimitiveBuildPass
::
CONSTRUCT_PRIMITIVE_BUILD_STRING_DECL
(
MaxPool
)
{
construct_primitive_build_string_max_pool
<
MaxPool
,
false
>
(
mkldnn_emitter
,
node
,
construct_string
,
deps
,
index
,
desc_file
);
}
template
<>
size_t
MKLDNNPrimitiveBuildPass
::
BUILD_PRIMITIVE_DECL
(
QuantizedMaxPool
)
void
MKLDNNPrimitiveBuildPass
::
CONSTRUCT_PRIMITIVE_BUILD_STRING_DECL
(
QuantizedMaxPool
)
{
construct_primitive_build_string_max_pool
<
QuantizedMaxPool
,
false
>
(
mkldnn_emitter
,
node
,
construct_string
,
deps
,
index
,
desc_file
);
}
template
<>
void
MKLDNNPrimitiveBuildPass
::
CONSTRUCT_PRIMITIVE_BUILD_STRING_DECL
(
AvgPool
)
{
return
mkldnn_emitter
.
build_quantized_max_pool
(
node
);
construct_primitive_build_string_avg_pool
<
AvgPool
,
false
>
(
mkldnn_emitter
,
node
,
construct_string
,
deps
,
index
,
desc_file
);
}
template
<>
size_t
MKLDNNPrimitiveBuildPass
::
BUILD_PRIMITIVE_DECL
(
QuantizedAvgPool
)
void
MKLDNNPrimitiveBuildPass
::
CONSTRUCT_PRIMITIVE_BUILD_STRING_DECL
(
QuantizedAvgPool
)
{
return
mkldnn_emitter
.
build_quantized_avg_pool
(
node
);
construct_primitive_build_string_avg_pool
<
QuantizedAvgPool
,
false
>
(
mkldnn_emitter
,
node
,
construct_string
,
deps
,
index
,
desc_file
);
}
template
<>
...
...
@@ -1202,25 +1429,6 @@ namespace ngraph
max_pool
->
get_padding_above
());
}
template
<>
size_t
MKLDNNPrimitiveBuildPass
::
BUILD_PRIMITIVE_DECL
(
AvgPool
)
{
auto
input_desc
=
mkldnn_utils
::
get_input_mkldnn_md
(
node
,
0
);
auto
result_desc
=
mkldnn_utils
::
get_output_mkldnn_md
(
node
,
0
);
auto
avg_pool
=
static_cast
<
const
ngraph
::
op
::
AvgPool
*>
(
node
);
return
mkldnn_emitter
.
build_pooling_forward
(
(
avg_pool
->
get_include_padding_in_avg_computation
()
?
mkldnn
::
algorithm
::
pooling_avg_include_padding
:
mkldnn
::
algorithm
::
pooling_avg_exclude_padding
),
input_desc
,
result_desc
,
avg_pool
->
get_window_movement_strides
(),
avg_pool
->
get_window_shape
(),
avg_pool
->
get_padding_below
(),
avg_pool
->
get_padding_above
());
}
template
<>
size_t
MKLDNNPrimitiveBuildPass
::
BUILD_PRIMITIVE_DECL
(
AvgPoolBackprop
)
{
...
...
@@ -1467,7 +1675,6 @@ static const PrimitiveBuildOpMap prim_build_dispatcher{
&
MKLDNNPrimitiveBuildPass
::
build_primitive
<
ConvolutionBackpropData
>
},
{
TI
(
ConvolutionBackpropFilters
),
&
MKLDNNPrimitiveBuildPass
::
build_primitive
<
ConvolutionBackpropFilters
>
},
{
TI
(
MaxPool
),
&
MKLDNNPrimitiveBuildPass
::
build_primitive
<
MaxPool
>
},
{
TI
(
MaxPoolWithIndices
),
&
MKLDNNPrimitiveBuildPass
::
build_primitive
<
MaxPoolWithIndices
>
},
{
TI
(
MaxPoolBackprop
),
&
MKLDNNPrimitiveBuildPass
::
build_primitive
<
MaxPoolBackprop
>
},
{
TI
(
MaxPoolWithIndicesBackprop
),
...
...
@@ -1531,6 +1738,12 @@ static const PrimitiveBuildStringConstructOpMap prim_build_string_construct_disp
{
TI
(
QuantizedConvolutionBiasSignedAdd
),
&
MKLDNNPrimitiveBuildPass
::
construct_primitive_build_string
<
QuantizedConvolutionBiasSignedAdd
>
},
{
TI
(
MaxPool
),
&
MKLDNNPrimitiveBuildPass
::
construct_primitive_build_string
<
MaxPool
>
},
{
TI
(
QuantizedMaxPool
),
&
MKLDNNPrimitiveBuildPass
::
construct_primitive_build_string
<
QuantizedMaxPool
>
},
{
TI
(
AvgPool
),
&
MKLDNNPrimitiveBuildPass
::
construct_primitive_build_string
<
AvgPool
>
},
{
TI
(
QuantizedAvgPool
),
&
MKLDNNPrimitiveBuildPass
::
construct_primitive_build_string
<
QuantizedAvgPool
>
},
};
// Check if the node builds primitives at first iteration.
...
...
@@ -1558,7 +1771,11 @@ static bool in_new_map(const std::shared_ptr<Node>& node)
std
::
dynamic_pointer_cast
<
ngraph
::
op
::
QuantizedConvolutionBiasAdd
>
(
node
)
||
std
::
dynamic_pointer_cast
<
ngraph
::
op
::
QuantizedConvolutionBiasSignedAdd
>
(
node
)
||
std
::
dynamic_pointer_cast
<
ngraph
::
op
::
GroupConvolution
>
(
node
)
||
std
::
dynamic_pointer_cast
<
ngraph
::
op
::
GroupConvolutionBias
>
(
node
))
std
::
dynamic_pointer_cast
<
ngraph
::
op
::
GroupConvolutionBias
>
(
node
)
||
std
::
dynamic_pointer_cast
<
ngraph
::
op
::
MaxPool
>
(
node
)
||
std
::
dynamic_pointer_cast
<
ngraph
::
op
::
QuantizedMaxPool
>
(
node
)
||
std
::
dynamic_pointer_cast
<
ngraph
::
op
::
AvgPool
>
(
node
)
||
std
::
dynamic_pointer_cast
<
ngraph
::
op
::
QuantizedAvgPool
>
(
node
))
{
return
true
;
}
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment