Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
O
opencv
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
opencv
Commits
7bc01760
Commit
7bc01760
authored
7 years ago
by
Li Peng
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Power, Tanh and Channels ReLU layer ocl support
Signed-off-by:
Li Peng
<
peng.li@intel.com
>
parent
4189214d
master
4.3.0
4.2.0
4.1.2
4.1.2-openvino
4.1.1
4.1.1-openvino
4.1.0
4.1.0-openvino
4.0.1
4.0.1-openvino
4.0.0
4.0.0-rc
4.0.0-openvino
4.0.0-beta
4.0.0-alpha
3.4.10
3.4.9
3.4.8
3.4.7
3.4.6
3.4.5
3.4.4
3.4.3
3.4.3-openvino
3.4.2
3.4.2-openvino
3.4.1
3.4.1-cvsdk
No related merge requests found
Show whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
90 additions
and
8 deletions
+90
-8
elementwise_layers.cpp
modules/dnn/src/layers/elementwise_layers.cpp
+75
-8
activations.cl
modules/dnn/src/opencl/activations.cl
+9
-0
test_layers.cpp
modules/dnn/test/test_layers.cpp
+6
-0
No files found.
modules/dnn/src/layers/elementwise_layers.cpp
View file @
7bc01760
...
...
@@ -267,7 +267,6 @@ struct ReLUFunctor
bool
applyOCL
(
InputArrayOfArrays
inps
,
OutputArrayOfArrays
outs
,
OutputArrayOfArrays
internals
)
{
size_t
wgSize
=
ocl
::
Device
::
getDefault
().
maxWorkGroupSize
();
std
::
vector
<
UMat
>
inputs
;
std
::
vector
<
UMat
>
outputs
;
...
...
@@ -287,7 +286,7 @@ struct ReLUFunctor
kernel
.
set
(
2
,
ocl
::
KernelArg
::
PtrWriteOnly
(
dst
));
size_t
gSize
=
src
.
total
();
CV_Assert
(
kernel
.
run
(
1
,
&
gSize
,
&
wgSize
,
false
));
CV_Assert
(
kernel
.
run
(
1
,
&
gSize
,
NULL
,
false
));
}
return
true
;
...
...
@@ -395,8 +394,28 @@ struct TanHFunctor
#ifdef HAVE_OPENCL
bool
applyOCL
(
InputArrayOfArrays
inps
,
OutputArrayOfArrays
outs
,
OutputArrayOfArrays
internals
)
{
// TODO: implement OCL version
return
false
;
std
::
vector
<
UMat
>
inputs
;
std
::
vector
<
UMat
>
outputs
;
inps
.
getUMatVector
(
inputs
);
outs
.
getUMatVector
(
outputs
);
String
buildopt
=
oclGetTMacro
(
inputs
[
0
]);
for
(
size_t
i
=
0
;
i
<
inputs
.
size
();
i
++
)
{
UMat
&
src
=
inputs
[
i
];
UMat
&
dst
=
outputs
[
i
];
ocl
::
Kernel
kernel
(
"TanHForward"
,
ocl
::
dnn
::
activations_oclsrc
,
buildopt
);
kernel
.
set
(
0
,
(
int
)
src
.
total
());
kernel
.
set
(
1
,
ocl
::
KernelArg
::
PtrReadOnly
(
src
));
kernel
.
set
(
2
,
ocl
::
KernelArg
::
PtrWriteOnly
(
dst
));
size_t
gSize
=
src
.
total
();
CV_Assert
(
kernel
.
run
(
1
,
&
gSize
,
NULL
,
false
));
}
return
true
;
}
#endif
...
...
@@ -594,8 +613,31 @@ struct PowerFunctor
#ifdef HAVE_OPENCL
bool
applyOCL
(
InputArrayOfArrays
inps
,
OutputArrayOfArrays
outs
,
OutputArrayOfArrays
internals
)
{
// TODO: implement OCL version
return
false
;
std
::
vector
<
UMat
>
inputs
;
std
::
vector
<
UMat
>
outputs
;
inps
.
getUMatVector
(
inputs
);
outs
.
getUMatVector
(
outputs
);
String
buildopt
=
oclGetTMacro
(
inputs
[
0
]);
for
(
size_t
i
=
0
;
i
<
inputs
.
size
();
i
++
)
{
UMat
&
src
=
inputs
[
i
];
UMat
&
dst
=
outputs
[
i
];
ocl
::
Kernel
kernel
(
"PowForward"
,
ocl
::
dnn
::
activations_oclsrc
,
buildopt
);
kernel
.
set
(
0
,
(
int
)
src
.
total
());
kernel
.
set
(
1
,
ocl
::
KernelArg
::
PtrReadOnly
(
src
));
kernel
.
set
(
2
,
ocl
::
KernelArg
::
PtrWriteOnly
(
dst
));
kernel
.
set
(
3
,
(
float
)
power
);
kernel
.
set
(
4
,
(
float
)
scale
);
kernel
.
set
(
5
,
(
float
)
shift
);
size_t
gSize
=
src
.
total
();
CV_Assert
(
kernel
.
run
(
1
,
&
gSize
,
NULL
,
false
));
}
return
true
;
}
#endif
...
...
@@ -624,9 +666,11 @@ struct ChannelsPReLUFunctor
{
typedef
ChannelsPReLULayer
Layer
;
Mat
scale
;
UMat
scale_umat
;
explicit
ChannelsPReLUFunctor
(
const
Mat
&
scale_
=
Mat
())
:
scale
(
scale_
)
{
scale_umat
=
scale
.
getUMat
(
ACCESS_READ
);
}
void
apply
(
const
float
*
srcptr
,
float
*
dstptr
,
int
len
,
size_t
planeSize
,
int
cn0
,
int
cn1
)
const
...
...
@@ -669,8 +713,31 @@ struct ChannelsPReLUFunctor
#ifdef HAVE_OPENCL
bool
applyOCL
(
InputArrayOfArrays
inps
,
OutputArrayOfArrays
outs
,
OutputArrayOfArrays
internals
)
{
// TODO: implement OCL version
return
false
;
std
::
vector
<
UMat
>
inputs
;
std
::
vector
<
UMat
>
outputs
;
inps
.
getUMatVector
(
inputs
);
outs
.
getUMatVector
(
outputs
);
String
buildopt
=
oclGetTMacro
(
inputs
[
0
]);
for
(
size_t
i
=
0
;
i
<
inputs
.
size
();
i
++
)
{
UMat
&
src
=
inputs
[
i
];
UMat
&
dst
=
outputs
[
i
];
ocl
::
Kernel
kernel
(
"PReLUForward"
,
ocl
::
dnn
::
activations_oclsrc
,
buildopt
);
kernel
.
set
(
0
,
(
int
)
src
.
total
());
kernel
.
set
(
1
,
(
int
)
src
.
size
[
1
]);
kernel
.
set
(
2
,
(
int
)
total
(
shape
(
src
),
2
));
kernel
.
set
(
3
,
ocl
::
KernelArg
::
PtrReadOnly
(
src
));
kernel
.
set
(
4
,
ocl
::
KernelArg
::
PtrWriteOnly
(
dst
));
kernel
.
set
(
5
,
ocl
::
KernelArg
::
PtrReadOnly
(
scale_umat
));
size_t
gSize
=
src
.
total
();
CV_Assert
(
kernel
.
run
(
1
,
&
gSize
,
NULL
,
false
));
}
return
true
;
}
#endif
...
...
This diff is collapsed.
Click to expand it.
modules/dnn/src/opencl/activations.cl
View file @
7bc01760
...
...
@@ -54,6 +54,15 @@ __kernel void ReLUForward(const int count, __global const T* in, __global T* out
#
endif
}
__kernel
void
PReLUForward
(
const
int
count,
const
int
channels,
const
int
plane_size,
__global
const
T*
in,
__global
T*
out,
__global
const
T*
slope_data
)
{
int
index
=
get_global_id
(
0
)
;
int
c
=
(
index
/
plane_size
)
%
channels
;
if
(
index
<
count
)
out[index]
=
in[index]
>
0
?
in[index]
:
in[index]
*
slope_data[c]
;
}
__kernel
void
TanHForward
(
const
int
count,
__global
T*
in,
__global
T*
out
)
{
int
index
=
get_global_id
(
0
)
;
if
(
index
<
count
)
...
...
This diff is collapsed.
Click to expand it.
modules/dnn/test/test_layers.cpp
View file @
7bc01760
...
...
@@ -331,6 +331,12 @@ TEST(Layer_Test_PReLU, Accuracy)
testLayerUsingCaffeModels
(
"layer_prelu_fc"
,
DNN_TARGET_CPU
,
true
,
false
);
}
OCL_TEST
(
Layer_Test_PReLU
,
Accuracy
)
{
testLayerUsingCaffeModels
(
"layer_prelu"
,
DNN_TARGET_OPENCL
,
true
);
testLayerUsingCaffeModels
(
"layer_prelu_fc"
,
DNN_TARGET_OPENCL
,
true
,
false
);
}
//template<typename XMat>
//static void test_Layer_Concat()
//{
...
...
This diff is collapsed.
Click to expand it.
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment