Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
O
opencv
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
opencv
Commits
ace0701a
Commit
ace0701a
authored
Jun 29, 2017
by
Maksim Shabunin
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #9019 from alalek:dnn_trace
parents
ca962214
ed103833
Hide whitespace changes
Inline
Side-by-side
Showing
36 changed files
with
224 additions
and
24 deletions
+224
-24
caffe_importer.cpp
modules/dnn/src/caffe/caffe_importer.cpp
+5
-1
caffe_io.cpp
modules/dnn/src/caffe/caffe_io.cpp
+1
-1
caffe_io.hpp
modules/dnn/src/caffe/caffe_io.hpp
+1
-1
dnn.cpp
modules/dnn/src/dnn.cpp
+107
-11
init.cpp
modules/dnn/src/init.cpp
+2
-0
batch_norm_layer.cpp
modules/dnn/src/layers/batch_norm_layer.cpp
+3
-0
blank_layer.cpp
modules/dnn/src/layers/blank_layer.cpp
+3
-0
concat_layer.cpp
modules/dnn/src/layers/concat_layer.cpp
+3
-0
convolution_layer.cpp
modules/dnn/src/layers/convolution_layer.cpp
+6
-0
crop_layer.cpp
modules/dnn/src/layers/crop_layer.cpp
+3
-0
detection_output_layer.cpp
modules/dnn/src/layers/detection_output_layer.cpp
+3
-0
elementwise_layers.cpp
modules/dnn/src/layers/elementwise_layers.cpp
+2
-0
eltwise_layer.cpp
modules/dnn/src/layers/eltwise_layer.cpp
+3
-0
flatten_layer.cpp
modules/dnn/src/layers/flatten_layer.cpp
+3
-0
fully_connected_layer.cpp
modules/dnn/src/layers/fully_connected_layer.cpp
+3
-0
lrn_layer.cpp
modules/dnn/src/layers/lrn_layer.cpp
+3
-0
max_unpooling_layer.cpp
modules/dnn/src/layers/max_unpooling_layer.cpp
+3
-0
mvn_layer.cpp
modules/dnn/src/layers/mvn_layer.cpp
+3
-0
normalize_bbox_layer.cpp
modules/dnn/src/layers/normalize_bbox_layer.cpp
+3
-0
padding_layer.cpp
modules/dnn/src/layers/padding_layer.cpp
+3
-0
permute_layer.cpp
modules/dnn/src/layers/permute_layer.cpp
+3
-0
pooling_layer.cpp
modules/dnn/src/layers/pooling_layer.cpp
+3
-0
prior_box_layer.cpp
modules/dnn/src/layers/prior_box_layer.cpp
+3
-0
recurrent_layers.cpp
modules/dnn/src/layers/recurrent_layers.cpp
+6
-0
reshape_layer.cpp
modules/dnn/src/layers/reshape_layer.cpp
+3
-0
scale_layer.cpp
modules/dnn/src/layers/scale_layer.cpp
+3
-0
shift_layer.cpp
modules/dnn/src/layers/shift_layer.cpp
+3
-0
slice_layer.cpp
modules/dnn/src/layers/slice_layer.cpp
+3
-0
softmax_layer.cpp
modules/dnn/src/layers/softmax_layer.cpp
+3
-0
split_layer.cpp
modules/dnn/src/layers/split_layer.cpp
+3
-0
precomp.hpp
modules/dnn/src/precomp.hpp
+1
-0
tf_importer.cpp
modules/dnn/src/tensorflow/tf_importer.cpp
+1
-1
tf_io.cpp
modules/dnn/src/tensorflow/tf_io.cpp
+1
-1
tf_io.hpp
modules/dnn/src/tensorflow/tf_io.hpp
+1
-1
torch_importer.cpp
modules/dnn/src/torch/torch_importer.cpp
+6
-0
caffe_googlenet.cpp
samples/dnn/caffe_googlenet.cpp
+18
-7
No files found.
modules/dnn/src/caffe/caffe_importer.cpp
View file @
ace0701a
...
...
@@ -43,7 +43,7 @@
using
namespace
cv
;
using
namespace
cv
::
dnn
;
#if HAVE_PROTOBUF
#if
def
HAVE_PROTOBUF
#include "caffe.pb.h"
#include <iostream>
...
...
@@ -82,6 +82,8 @@ public:
CaffeImporter
(
const
char
*
pototxt
,
const
char
*
caffeModel
)
{
CV_TRACE_FUNCTION
();
ReadNetParamsFromTextFileOrDie
(
pototxt
,
&
net
);
if
(
caffeModel
&&
caffeModel
[
0
])
...
...
@@ -264,6 +266,8 @@ public:
void
populateNet
(
Net
dstNet
)
{
CV_TRACE_FUNCTION
();
int
layersSize
=
net
.
layer_size
();
layerCounter
.
clear
();
addedBlobs
.
clear
();
...
...
modules/dnn/src/caffe/caffe_io.cpp
View file @
ace0701a
...
...
@@ -87,7 +87,7 @@
//
//M*/
#if HAVE_PROTOBUF
#if
def
HAVE_PROTOBUF
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/io/zero_copy_stream_impl.h>
#include <google/protobuf/text_format.h>
...
...
modules/dnn/src/caffe/caffe_io.hpp
View file @
ace0701a
...
...
@@ -89,7 +89,7 @@
#ifndef __OPENCV_DNN_CAFFE_IO_HPP__
#define __OPENCV_DNN_CAFFE_IO_HPP__
#if HAVE_PROTOBUF
#if
def
HAVE_PROTOBUF
#include "caffe.pb.h"
...
...
modules/dnn/src/dnn.cpp
View file @
ace0701a
...
...
@@ -90,6 +90,7 @@ static String toString(const T &v)
Mat
blobFromImage
(
const
Mat
&
image
,
double
scalefactor
,
const
Size
&
size
,
const
Scalar
&
mean
,
bool
swapRB
)
{
CV_TRACE_FUNCTION
();
std
::
vector
<
Mat
>
images
(
1
,
image
);
return
blobFromImages
(
images
,
scalefactor
,
size
,
mean
,
swapRB
);
}
...
...
@@ -97,6 +98,7 @@ Mat blobFromImage(const Mat& image, double scalefactor, const Size& size,
Mat
blobFromImages
(
const
std
::
vector
<
Mat
>&
images_
,
double
scalefactor
,
Size
size
,
const
Scalar
&
mean_
,
bool
swapRB
)
{
CV_TRACE_FUNCTION
();
std
::
vector
<
Mat
>
images
=
images_
;
for
(
int
i
=
0
;
i
<
images
.
size
();
i
++
)
{
...
...
@@ -207,6 +209,8 @@ class BackendWrapManager
public
:
Ptr
<
BackendWrapper
>
wrap
(
const
Mat
&
m
,
int
backendId
,
int
targetId
)
{
CV_TRACE_FUNCTION
();
CV_Assert
(
backendId
!=
DNN_BACKEND_DEFAULT
);
std
::
map
<
void
*
,
Ptr
<
BackendWrapper
>
>::
iterator
hostsIt
;
...
...
@@ -261,6 +265,8 @@ public:
void
reset
()
{
CV_TRACE_FUNCTION
();
hostWrappers
.
clear
();
extraWrappers
.
clear
();
}
...
...
@@ -321,6 +327,8 @@ struct LayerData
LayerData
(
int
_id
,
const
String
&
_name
,
const
String
&
_type
,
LayerParams
&
_params
)
:
id
(
_id
),
name
(
_name
),
type
(
_type
),
params
(
_params
),
flag
(
0
)
{
CV_TRACE_FUNCTION
();
//add logging info
params
.
name
=
name
;
params
.
type
=
type
;
...
...
@@ -349,6 +357,9 @@ struct LayerData
Ptr
<
Layer
>
getLayerInstance
()
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
type
,
"type"
,
type
.
c_str
());
if
(
layerInstance
)
return
layerInstance
;
...
...
@@ -500,6 +511,8 @@ public:
void
allocateBlobsForLayer
(
LayerData
&
ld
,
const
LayerShapes
&
layerShapes
,
std
::
vector
<
LayerPin
>&
pinsForInternalBlobs
)
{
CV_TRACE_FUNCTION
();
pinsForInternalBlobs
.
clear
();
std
::
vector
<
Mat
>&
outputBlobs
=
ld
.
outputBlobs
,
...
...
@@ -578,6 +591,8 @@ public:
// Clear internal state. Calls before an every reallocation.
void
reset
()
{
CV_TRACE_FUNCTION
();
refCounter
.
clear
();
reuseMap
.
clear
();
memHosts
.
clear
();
...
...
@@ -639,6 +654,8 @@ struct Net::Impl
void
compileHalide
()
{
CV_TRACE_FUNCTION
();
CV_Assert
(
preferableBackend
==
DNN_BACKEND_HALIDE
);
HalideScheduler
scheduler
(
halideConfigFile
);
...
...
@@ -666,6 +683,8 @@ struct Net::Impl
void
clear
()
{
CV_TRACE_FUNCTION
();
MapIdToLayerData
::
iterator
it
;
for
(
it
=
layers
.
begin
();
it
!=
layers
.
end
();
it
++
)
{
...
...
@@ -694,6 +713,8 @@ struct Net::Impl
void
setUpNet
(
const
std
::
vector
<
LayerPin
>&
blobsToKeep_
=
std
::
vector
<
LayerPin
>
())
{
CV_TRACE_FUNCTION
();
if
(
!
netWasAllocated
||
this
->
blobsToKeep
!=
blobsToKeep_
)
{
clear
();
...
...
@@ -862,6 +883,8 @@ struct Net::Impl
void
computeNetOutputLayers
()
{
CV_TRACE_FUNCTION
();
netOutputs
.
clear
();
MapIdToLayerData
::
iterator
it
;
...
...
@@ -883,6 +906,8 @@ struct Net::Impl
void
initBackend
()
{
CV_TRACE_FUNCTION
();
backendWrapper
.
reset
();
if
(
preferableBackend
==
DNN_BACKEND_DEFAULT
)
{
...
...
@@ -953,6 +978,8 @@ struct Net::Impl
void
allocateLayer
(
int
lid
,
const
LayersShapesMap
&
layersShapes
)
{
CV_TRACE_FUNCTION
();
LayerData
&
ld
=
layers
[
lid
];
//already allocated
...
...
@@ -1026,6 +1053,8 @@ struct Net::Impl
void
fuseLayers
(
const
std
::
vector
<
LayerPin
>&
blobsToKeep_
)
{
CV_TRACE_FUNCTION
();
// scan through all the layers. If there is convolution layer followed by the activation layer,
// we try to embed this activation into the convolution and disable separate execution of the activation
std
::
vector
<
String
>
outnames
;
...
...
@@ -1094,6 +1123,8 @@ struct Net::Impl
void
allocateLayers
(
const
std
::
vector
<
LayerPin
>&
blobsToKeep_
)
{
CV_TRACE_FUNCTION
();
MapIdToLayerData
::
iterator
it
;
for
(
it
=
layers
.
begin
();
it
!=
layers
.
end
();
it
++
)
it
->
second
.
flag
=
0
;
...
...
@@ -1131,6 +1162,8 @@ struct Net::Impl
void
forwardLayer
(
LayerData
&
ld
)
{
CV_TRACE_FUNCTION
();
Ptr
<
Layer
>
layer
=
ld
.
layerInstance
;
if
(
preferableBackend
==
DNN_BACKEND_DEFAULT
||
...
...
@@ -1159,6 +1192,8 @@ struct Net::Impl
void
forwardToLayer
(
LayerData
&
ld
,
bool
clearFlags
=
true
)
{
CV_TRACE_FUNCTION
();
if
(
clearFlags
)
{
MapIdToLayerData
::
iterator
it
;
...
...
@@ -1186,6 +1221,8 @@ struct Net::Impl
void
forwardAll
()
{
CV_TRACE_FUNCTION
();
forwardToLayer
(
layers
.
rbegin
()
->
second
,
true
);
}
...
...
@@ -1247,6 +1284,8 @@ struct Net::Impl
Mat
getBlob
(
const
LayerPin
&
pin
)
{
CV_TRACE_FUNCTION
();
if
(
!
pin
.
valid
())
CV_Error
(
Error
::
StsObjectNotFound
,
"Requested blob not found"
);
...
...
@@ -1285,6 +1324,8 @@ Net::~Net()
int
Net
::
addLayer
(
const
String
&
name
,
const
String
&
type
,
LayerParams
&
params
)
{
CV_TRACE_FUNCTION
();
if
(
name
.
find
(
'.'
)
!=
String
::
npos
)
{
CV_Error
(
Error
::
StsBadArg
,
"Added layer name
\"
"
+
name
+
"
\"
must not contain dot symbol"
);
...
...
@@ -1306,6 +1347,8 @@ int Net::addLayer(const String &name, const String &type, LayerParams ¶ms)
int
Net
::
addLayerToPrev
(
const
String
&
name
,
const
String
&
type
,
LayerParams
&
params
)
{
CV_TRACE_FUNCTION
();
int
prvLid
=
impl
->
lastLayerId
;
int
newLid
=
this
->
addLayer
(
name
,
type
,
params
);
this
->
connect
(
prvLid
,
0
,
newLid
,
0
);
...
...
@@ -1314,11 +1357,15 @@ int Net::addLayerToPrev(const String &name, const String &type, LayerParams &par
void
Net
::
connect
(
int
outLayerId
,
int
outNum
,
int
inpLayerId
,
int
inpNum
)
{
CV_TRACE_FUNCTION
();
impl
->
connect
(
outLayerId
,
outNum
,
inpLayerId
,
inpNum
);
}
void
Net
::
connect
(
String
_outPin
,
String
_inPin
)
{
CV_TRACE_FUNCTION
();
LayerPin
outPin
=
impl
->
getPinByAlias
(
_outPin
);
LayerPin
inpPin
=
impl
->
getPinByAlias
(
_inPin
);
...
...
@@ -1329,6 +1376,8 @@ void Net::connect(String _outPin, String _inPin)
Mat
Net
::
forward
(
const
String
&
outputName
)
{
CV_TRACE_FUNCTION
();
String
layerName
=
outputName
;
if
(
layerName
.
empty
())
...
...
@@ -1342,6 +1391,8 @@ Mat Net::forward(const String& outputName)
void
Net
::
forward
(
std
::
vector
<
Mat
>&
outputBlobs
,
const
String
&
outputName
)
{
CV_TRACE_FUNCTION
();
impl
->
setUpNet
();
String
layerName
=
outputName
;
...
...
@@ -1359,6 +1410,8 @@ void Net::forward(std::vector<Mat>& outputBlobs, const String& outputName)
void
Net
::
forward
(
std
::
vector
<
Mat
>&
outputBlobs
,
const
std
::
vector
<
String
>&
outBlobNames
)
{
CV_TRACE_FUNCTION
();
std
::
vector
<
LayerPin
>
pins
;
for
(
int
i
=
0
;
i
<
outBlobNames
.
size
();
i
++
)
{
...
...
@@ -1381,6 +1434,8 @@ void Net::forward(std::vector<Mat>& outputBlobs,
void
Net
::
forward
(
std
::
vector
<
std
::
vector
<
Mat
>
>&
outputBlobs
,
const
std
::
vector
<
String
>&
outBlobNames
)
{
CV_TRACE_FUNCTION
();
std
::
vector
<
LayerPin
>
pins
;
for
(
int
i
=
0
;
i
<
outBlobNames
.
size
();
i
++
)
{
...
...
@@ -1407,6 +1462,9 @@ void Net::forward(std::vector<std::vector<Mat> >& outputBlobs,
void
Net
::
setPreferableBackend
(
int
backendId
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG
(
backendId
);
impl
->
netWasAllocated
=
impl
->
netWasAllocated
&&
impl
->
preferableBackend
==
backendId
;
impl
->
preferableBackend
=
backendId
;
...
...
@@ -1414,6 +1472,9 @@ void Net::setPreferableBackend(int backendId)
void
Net
::
setPreferableTarget
(
int
targetId
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG
(
targetId
);
impl
->
netWasAllocated
=
impl
->
netWasAllocated
&&
impl
->
preferableTarget
==
targetId
;
impl
->
preferableTarget
=
targetId
;
...
...
@@ -1421,11 +1482,16 @@ void Net::setPreferableTarget(int targetId)
void
Net
::
setInputsNames
(
const
std
::
vector
<
String
>
&
inputBlobNames
)
{
CV_TRACE_FUNCTION
();
impl
->
netInputLayer
->
setNames
(
inputBlobNames
);
}
void
Net
::
setInput
(
const
Mat
&
blob_
,
const
String
&
name
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
LayerPin
pin
;
pin
.
lid
=
0
;
pin
.
oid
=
impl
->
resolvePinOutputName
(
impl
->
getLayerData
(
pin
.
lid
),
name
);
...
...
@@ -1595,6 +1661,8 @@ void Net::getLayerShapes(const ShapesVec& netInputShapes,
int64
Net
::
getFLOPS
(
const
std
::
vector
<
MatShape
>&
netInputShapes
)
const
{
CV_TRACE_FUNCTION
();
int64
flops
=
0
;
std
::
vector
<
int
>
ids
;
std
::
vector
<
std
::
vector
<
MatShape
>
>
inShapes
,
outShapes
;
...
...
@@ -1670,6 +1738,8 @@ void Net::getMemoryConsumption(const int layerId,
const
std
::
vector
<
MatShape
>&
netInputShapes
,
size_t
&
weights
,
size_t
&
blobs
)
const
{
CV_TRACE_FUNCTION
();
Impl
::
MapIdToLayerData
::
iterator
layer
=
impl
->
layers
.
find
(
layerId
);
CV_Assert
(
layer
!=
impl
->
layers
.
end
());
...
...
@@ -1692,6 +1762,8 @@ void Net::getMemoryConsumption(const int layerId,
void
Net
::
getMemoryConsumption
(
const
std
::
vector
<
MatShape
>&
netInputShapes
,
size_t
&
weights
,
size_t
&
blobs
)
const
{
CV_TRACE_FUNCTION
();
std
::
vector
<
int
>
layerIds
;
std
::
vector
<
size_t
>
w
,
b
;
getMemoryConsumption
(
netInputShapes
,
layerIds
,
w
,
b
);
...
...
@@ -1723,6 +1795,8 @@ void Net::getMemoryConsumption(const std::vector<MatShape>& netInputShapes,
std
::
vector
<
int
>&
layerIds
,
std
::
vector
<
size_t
>&
weights
,
std
::
vector
<
size_t
>&
blobs
)
const
{
CV_TRACE_FUNCTION
();
layerIds
.
clear
();
weights
.
clear
();
blobs
.
clear
();
...
...
@@ -1762,6 +1836,9 @@ void Net::getMemoryConsumption(const MatShape& netInputShape, std::vector<int>&
void
Net
::
setHalideScheduler
(
const
String
&
scheduler
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
scheduler
,
"scheduler"
,
scheduler
.
c_str
());
impl
->
halideConfigFile
=
scheduler
;
}
...
...
@@ -1810,6 +1887,8 @@ void Layer::applyHalideScheduler(Ptr<BackendNode>& node, const std::vector<Mat*>
const
std
::
vector
<
Mat
>
&
outputs
,
int
targetId
)
const
{
#ifdef HAVE_HALIDE
CV_TRACE_FUNCTION
();
Halide
::
Var
x
(
"x"
),
y
(
"y"
),
c
(
"c"
),
n
(
"n"
),
co
(
"co"
),
ci
(
"ci"
),
xo
(
"xo"
),
xi
(
"xi"
),
yo
(
"yo"
),
yi
(
"yi"
),
tile
(
"tile"
);
Halide
::
Func
&
top
=
node
.
dynamicCast
<
HalideBackendNode
>
()
->
funcs
.
back
();
...
...
@@ -1891,6 +1970,8 @@ static void vecToPVec(const std::vector<T> &v, std::vector<T*> &pv)
void
Layer
::
finalize
(
const
std
::
vector
<
Mat
>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
)
{
CV_TRACE_FUNCTION
();
std
::
vector
<
Mat
*>
inputsp
;
vecToPVec
(
inputs
,
inputsp
);
this
->
finalize
(
inputsp
,
outputs
);
...
...
@@ -1903,6 +1984,8 @@ void Layer::finalize(const std::vector<Mat*> &input, std::vector<Mat> &output)
std
::
vector
<
Mat
>
Layer
::
finalize
(
const
std
::
vector
<
Mat
>
&
inputs
)
{
CV_TRACE_FUNCTION
();
std
::
vector
<
Mat
>
outputs
;
this
->
finalize
(
inputs
,
outputs
);
return
outputs
;
...
...
@@ -1910,6 +1993,8 @@ std::vector<Mat> Layer::finalize(const std::vector<Mat> &inputs)
void
Layer
::
forward
(
const
std
::
vector
<
Mat
>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
std
::
vector
<
Mat
*>
inputsp
;
vecToPVec
(
inputs
,
inputsp
);
this
->
forward
(
inputsp
,
outputs
,
internals
);
...
...
@@ -1917,6 +2002,8 @@ void Layer::forward(const std::vector<Mat> &inputs, std::vector<Mat> &outputs, s
void
Layer
::
run
(
const
std
::
vector
<
Mat
>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
std
::
vector
<
Mat
*>
inputsp
;
vecToPVec
(
inputs
,
inputsp
);
this
->
finalize
(
inputsp
,
outputs
);
...
...
@@ -1972,32 +2059,41 @@ static LayerFactory_Impl& getLayerFactoryImpl()
return
*
instance
;
}
void
LayerFactory
::
registerLayer
(
const
String
&
_
type
,
Constuctor
constructor
)
void
LayerFactory
::
registerLayer
(
const
String
&
type
,
Constuctor
constructor
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
type
,
"type"
,
type
.
c_str
());
cv
::
AutoLock
lock
(
getLayerFactoryMutex
());
String
type
=
_
type
.
toLowerCase
();
LayerFactory_Impl
::
const_iterator
it
=
getLayerFactoryImpl
().
find
(
type
);
String
type
_
=
type
.
toLowerCase
();
LayerFactory_Impl
::
const_iterator
it
=
getLayerFactoryImpl
().
find
(
type
_
);
if
(
it
!=
getLayerFactoryImpl
().
end
()
&&
it
->
second
!=
constructor
)
{
CV_Error
(
cv
::
Error
::
StsBadArg
,
"Layer
\"
"
+
type
+
"
\"
already was registered"
);
CV_Error
(
cv
::
Error
::
StsBadArg
,
"Layer
\"
"
+
type
_
+
"
\"
already was registered"
);
}
getLayerFactoryImpl
().
insert
(
std
::
make_pair
(
type
,
constructor
));
getLayerFactoryImpl
().
insert
(
std
::
make_pair
(
type
_
,
constructor
));
}
void
LayerFactory
::
unregisterLayer
(
const
String
&
_
type
)
void
LayerFactory
::
unregisterLayer
(
const
String
&
type
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
type
,
"type"
,
type
.
c_str
());
cv
::
AutoLock
lock
(
getLayerFactoryMutex
());
String
type
=
_
type
.
toLowerCase
();
getLayerFactoryImpl
().
erase
(
type
);
String
type
_
=
type
.
toLowerCase
();
getLayerFactoryImpl
().
erase
(
type
_
);
}
Ptr
<
Layer
>
LayerFactory
::
createLayerInstance
(
const
String
&
_
type
,
LayerParams
&
params
)
Ptr
<
Layer
>
LayerFactory
::
createLayerInstance
(
const
String
&
type
,
LayerParams
&
params
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
type
,
"type"
,
type
.
c_str
());
cv
::
AutoLock
lock
(
getLayerFactoryMutex
());
String
type
=
_
type
.
toLowerCase
();
LayerFactory_Impl
::
const_iterator
it
=
getLayerFactoryImpl
().
find
(
type
);
String
type
_
=
type
.
toLowerCase
();
LayerFactory_Impl
::
const_iterator
it
=
getLayerFactoryImpl
().
find
(
type
_
);
if
(
it
!=
getLayerFactoryImpl
().
end
())
{
...
...
modules/dnn/src/init.cpp
View file @
ace0701a
...
...
@@ -60,6 +60,8 @@ Mutex* __initialization_mutex_initializer = &getInitializationMutex();
void
initializeLayerFactory
()
{
CV_TRACE_FUNCTION
();
CV_DNN_REGISTER_LAYER_CLASS
(
Slice
,
SliceLayer
);
CV_DNN_REGISTER_LAYER_CLASS
(
Split
,
SplitLayer
);
CV_DNN_REGISTER_LAYER_CLASS
(
Concat
,
ConcatLayer
);
...
...
modules/dnn/src/layers/batch_norm_layer.cpp
View file @
ace0701a
...
...
@@ -104,6 +104,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
CV_Assert
(
blobs
.
size
()
>=
2
);
CV_Assert
(
inputs
.
size
()
==
1
);
...
...
modules/dnn/src/layers/blank_layer.cpp
View file @
ace0701a
...
...
@@ -64,6 +64,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
for
(
int
i
=
0
,
n
=
outputs
.
size
();
i
<
n
;
++
i
)
if
(
outputs
[
i
].
data
!=
inputs
[
i
]
->
data
)
inputs
[
i
]
->
copyTo
(
outputs
[
i
]);
...
...
modules/dnn/src/layers/concat_layer.cpp
View file @
ace0701a
...
...
@@ -96,6 +96,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
int
cAxis
=
clamp
(
axis
,
inputs
[
0
]
->
dims
);
Mat
&
outMat
=
outputs
[
0
];
std
::
vector
<
Range
>
ranges
(
outputs
[
0
].
dims
,
Range
::
all
());
...
...
modules/dnn/src/layers/convolution_layer.cpp
View file @
ace0701a
...
...
@@ -627,6 +627,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
/*printf("conv %s: input (%d x %d x %d x %d), kernel (%d x %d), pad (%d x %d), stride (%d x %d), dilation (%d x %d)\n",
name.c_str(), inputs[0]->size[0], inputs[0]->size[1], inputs[0]->size[2], inputs[0]->size[3],
kernel.width, kernel.height, pad.width, pad.height,
...
...
@@ -1013,6 +1016,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
int
outCn
=
blobs
[
0
].
size
[
0
];
int
inpCn
=
inputs
[
0
]
->
size
[
1
];
bool
is1x1flag
=
is1x1
();
...
...
modules/dnn/src/layers/crop_layer.cpp
View file @
ace0701a
...
...
@@ -135,6 +135,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
Mat
&
input
=
*
inputs
[
0
];
Mat
&
output
=
outputs
[
0
];
...
...
modules/dnn/src/layers/detection_output_layer.cpp
View file @
ace0701a
...
...
@@ -206,6 +206,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
const
float
*
locationData
=
inputs
[
0
]
->
ptr
<
float
>
();
const
float
*
confidenceData
=
inputs
[
1
]
->
ptr
<
float
>
();
const
float
*
priorData
=
inputs
[
2
]
->
ptr
<
float
>
();
...
...
modules/dnn/src/layers/elementwise_layers.cpp
View file @
ace0701a
...
...
@@ -156,6 +156,8 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
for
(
size_t
i
=
0
;
i
<
inputs
.
size
();
i
++
)
{
const
Mat
&
src
=
*
inputs
[
i
];
...
...
modules/dnn/src/layers/eltwise_layer.cpp
View file @
ace0701a
...
...
@@ -251,6 +251,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
CV_Assert
(
outputs
.
size
()
==
1
);
const
int
nstripes
=
getNumThreads
();
EltwiseInvoker
::
run
((
const
Mat
**
)
&
inputs
[
0
],
(
int
)
inputs
.
size
(),
outputs
[
0
],
...
...
modules/dnn/src/layers/flatten_layer.cpp
View file @
ace0701a
...
...
@@ -106,6 +106,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
for
(
size_t
i
=
0
;
i
<
inputs
.
size
();
i
++
)
{
MatShape
outShape
=
shape
(
outputs
[
i
]);
...
...
modules/dnn/src/layers/fully_connected_layer.cpp
View file @
ace0701a
...
...
@@ -233,6 +233,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
input
,
std
::
vector
<
Mat
>
&
output
,
std
::
vector
<
Mat
>
&
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
int
axisCan
=
clamp
(
axis
,
input
[
0
]
->
dims
);
int
outerSize
=
input
[
0
]
->
total
(
0
,
axisCan
);
...
...
modules/dnn/src/layers/lrn_layer.cpp
View file @
ace0701a
...
...
@@ -86,6 +86,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
CV_Assert
(
inputs
.
size
()
==
outputs
.
size
());
for
(
int
i
=
0
;
i
<
inputs
.
size
();
i
++
)
{
...
...
modules/dnn/src/layers/max_unpooling_layer.cpp
View file @
ace0701a
...
...
@@ -57,6 +57,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
CV_Assert
(
inputs
.
size
()
==
2
);
Mat
&
input
=
*
inputs
[
0
];
Mat
&
indices
=
*
inputs
[
1
];
...
...
modules/dnn/src/layers/mvn_layer.cpp
View file @
ace0701a
...
...
@@ -62,6 +62,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
for
(
size_t
inpIdx
=
0
;
inpIdx
<
inputs
.
size
();
inpIdx
++
)
{
Mat
&
inpBlob
=
*
inputs
[
inpIdx
];
...
...
modules/dnn/src/layers/normalize_bbox_layer.cpp
View file @
ace0701a
...
...
@@ -142,6 +142,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
checkInputs
(
inputs
);
Mat
&
buffer
=
internals
[
0
],
sumChannelMultiplier
=
internals
[
1
],
...
...
modules/dnn/src/layers/padding_layer.cpp
View file @
ace0701a
...
...
@@ -61,6 +61,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
for
(
int
i
=
0
;
i
<
inputs
.
size
();
i
++
)
{
outputs
[
i
]
=
paddingValue
;
...
...
modules/dnn/src/layers/permute_layer.cpp
View file @
ace0701a
...
...
@@ -245,6 +245,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
size_t
k
,
ninputs
=
inputs
.
size
();
if
(
!
_needsPermute
)
{
...
...
modules/dnn/src/layers/pooling_layer.cpp
View file @
ace0701a
...
...
@@ -106,6 +106,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
for
(
size_t
ii
=
0
;
ii
<
inputs
.
size
();
ii
++
)
{
switch
(
type
)
...
...
modules/dnn/src/layers/prior_box_layer.cpp
View file @
ace0701a
...
...
@@ -228,6 +228,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
int
_layerWidth
=
inputs
[
0
]
->
size
[
3
];
int
_layerHeight
=
inputs
[
0
]
->
size
[
2
];
...
...
modules/dnn/src/layers/recurrent_layers.cpp
View file @
ace0701a
...
...
@@ -221,6 +221,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
input
,
std
::
vector
<
Mat
>
&
output
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
const
Mat
&
Wh
=
blobs
[
0
];
const
Mat
&
Wx
=
blobs
[
1
];
const
Mat
&
bias
=
blobs
[
2
];
...
...
@@ -406,6 +409,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
input
,
std
::
vector
<
Mat
>
&
output
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
Mat
xTs
=
input
[
0
]
->
reshape
(
1
,
numSamplesTotal
);
Mat
oTs
=
output
[
0
].
reshape
(
1
,
numSamplesTotal
);
Mat
hTs
=
produceH
?
output
[
1
].
reshape
(
1
,
numSamplesTotal
)
:
Mat
();
...
...
modules/dnn/src/layers/reshape_layer.cpp
View file @
ace0701a
...
...
@@ -196,6 +196,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
for
(
size_t
i
=
0
;
i
<
inputs
.
size
();
i
++
)
{
Mat
srcBlob
=
*
inputs
[
i
];
...
...
modules/dnn/src/layers/scale_layer.cpp
View file @
ace0701a
...
...
@@ -45,6 +45,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
CV_Assert
(
blobs
.
size
()
==
1
+
hasBias
);
for
(
size_t
ii
=
0
;
ii
<
outputs
.
size
();
ii
++
)
...
...
modules/dnn/src/layers/shift_layer.cpp
View file @
ace0701a
...
...
@@ -38,6 +38,9 @@ public:
virtual
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
CV_Assert
(
inputs
.
size
()
>
0
);
CV_Assert
(
blobs
.
size
()
>
0
);
...
...
modules/dnn/src/layers/slice_layer.cpp
View file @
ace0701a
...
...
@@ -118,6 +118,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
const
Mat
&
inpMat
=
*
inputs
[
0
];
std
::
vector
<
Range
>
ranges
(
inpMat
.
dims
,
Range
::
all
());
int
cAxis
=
clamp
(
axis
,
inpMat
.
dims
);
...
...
modules/dnn/src/layers/softmax_layer.cpp
View file @
ace0701a
...
...
@@ -84,6 +84,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
const
Mat
&
src
=
*
inputs
[
0
];
Mat
&
dst
=
outputs
[
0
];
...
...
modules/dnn/src/layers/split_layer.cpp
View file @
ace0701a
...
...
@@ -80,6 +80,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
for
(
size_t
i
=
0
;
i
<
outputs
.
size
();
i
++
)
{
CV_Assert
(
inputs
[
0
]
->
total
()
==
outputs
[
i
].
total
());
...
...
modules/dnn/src/precomp.hpp
View file @
ace0701a
...
...
@@ -40,6 +40,7 @@
//M*/
#include <opencv2/core.hpp>
#include <opencv2/core/utils/trace.hpp>
#include "cvconfig.h"
#include <opencv2/dnn.hpp>
#include <opencv2/dnn/all_layers.hpp>
...
...
modules/dnn/src/tensorflow/tf_importer.cpp
View file @
ace0701a
...
...
@@ -13,7 +13,7 @@ Implementation of Tensorflow models parser
using
namespace
cv
;
using
namespace
cv
::
dnn
;
#if HAVE_PROTOBUF
#if
def
HAVE_PROTOBUF
#include "graph.pb.h"
#include <iostream>
...
...
modules/dnn/src/tensorflow/tf_io.cpp
View file @
ace0701a
...
...
@@ -9,7 +9,7 @@
Implementation of various functions which are related to Tensorflow models reading.
*/
#if HAVE_PROTOBUF
#if
def
HAVE_PROTOBUF
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/io/zero_copy_stream_impl.h>
#include <google/protobuf/text_format.h>
...
...
modules/dnn/src/tensorflow/tf_io.hpp
View file @
ace0701a
...
...
@@ -11,7 +11,7 @@ Declaration of various functions which are related to Tensorflow models reading.
#ifndef __OPENCV_DNN_TF_IO_HPP__
#define __OPENCV_DNN_TF_IO_HPP__
#if HAVE_PROTOBUF
#if
def
HAVE_PROTOBUF
#include "graph.pb.h"
...
...
modules/dnn/src/torch/torch_importer.cpp
View file @
ace0701a
...
...
@@ -115,6 +115,8 @@ struct TorchImporter : public ::cv::dnn::Importer
TorchImporter
(
String
filename
,
bool
isBinary
)
{
CV_TRACE_FUNCTION
();
rootModule
=
curModule
=
NULL
;
moduleCounter
=
0
;
...
...
@@ -966,6 +968,8 @@ struct TorchImporter : public ::cv::dnn::Importer
void
populateNet
(
Net
net_
)
{
CV_TRACE_FUNCTION
();
if
(
rootModule
==
NULL
)
{
rootModule
=
new
Module
(
"Sequential"
);
...
...
@@ -1014,6 +1018,8 @@ Mat readTorchBlob(const String&, bool)
Net
readNetFromTorch
(
const
String
&
model
,
bool
isBinary
)
{
CV_TRACE_FUNCTION
();
Ptr
<
Importer
>
importer
=
createTorchImporter
(
model
,
isBinary
);
Net
net
;
if
(
importer
)
...
...
samples/dnn/caffe_googlenet.cpp
View file @
ace0701a
...
...
@@ -41,6 +41,7 @@
#include <opencv2/dnn.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/core/utils/trace.hpp>
using
namespace
cv
;
using
namespace
cv
::
dnn
;
...
...
@@ -84,6 +85,8 @@ static std::vector<String> readClassNames(const char *filename = "synset_words.t
int
main
(
int
argc
,
char
**
argv
)
{
CV_TRACE_FUNCTION
();
String
modelTxt
=
"bvlc_googlenet.prototxt"
;
String
modelBin
=
"bvlc_googlenet.caffemodel"
;
String
imageFile
=
(
argc
>
1
)
?
argv
[
1
]
:
"space_shuttle.jpg"
;
...
...
@@ -117,13 +120,20 @@ int main(int argc, char **argv)
Scalar
(
104
,
117
,
123
));
//Convert Mat to batch of images
//! [Prepare blob]
//! [Set input blob]
net
.
setInput
(
inputBlob
,
"data"
);
//set the network input
//! [Set input blob]
//! [Make forward pass]
Mat
prob
=
net
.
forward
(
"prob"
);
//compute output
//! [Make forward pass]
Mat
prob
;
cv
::
TickMeter
t
;
for
(
int
i
=
0
;
i
<
10
;
i
++
)
{
CV_TRACE_REGION
(
"forward"
);
//! [Set input blob]
net
.
setInput
(
inputBlob
,
"data"
);
//set the network input
//! [Set input blob]
t
.
start
();
//! [Make forward pass]
prob
=
net
.
forward
(
"prob"
);
//compute output
//! [Make forward pass]
t
.
stop
();
}
//! [Gather output]
int
classId
;
...
...
@@ -136,6 +146,7 @@ int main(int argc, char **argv)
std
::
cout
<<
"Best class: #"
<<
classId
<<
" '"
<<
classNames
.
at
(
classId
)
<<
"'"
<<
std
::
endl
;
std
::
cout
<<
"Probability: "
<<
classProb
*
100
<<
"%"
<<
std
::
endl
;
//! [Print results]
std
::
cout
<<
"Time: "
<<
(
double
)
t
.
getTimeMilli
()
/
t
.
getCounter
()
<<
" ms (average from "
<<
t
.
getCounter
()
<<
" iterations)"
<<
std
::
endl
;
return
0
;
}
//main
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment