Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
O
opencv
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
opencv
Commits
ed103833
Commit
ed103833
authored
Jun 28, 2017
by
Alexander Alekhin
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
dnn: added trace macros
parent
bbb14d37
Show whitespace changes
Inline
Side-by-side
Showing
36 changed files
with
219 additions
and
19 deletions
+219
-19
caffe_importer.cpp
modules/dnn/src/caffe/caffe_importer.cpp
+5
-1
caffe_io.cpp
modules/dnn/src/caffe/caffe_io.cpp
+1
-1
caffe_io.hpp
modules/dnn/src/caffe/caffe_io.hpp
+1
-1
dnn.cpp
modules/dnn/src/dnn.cpp
+107
-11
init.cpp
modules/dnn/src/init.cpp
+2
-0
batch_norm_layer.cpp
modules/dnn/src/layers/batch_norm_layer.cpp
+3
-0
blank_layer.cpp
modules/dnn/src/layers/blank_layer.cpp
+3
-0
concat_layer.cpp
modules/dnn/src/layers/concat_layer.cpp
+3
-0
convolution_layer.cpp
modules/dnn/src/layers/convolution_layer.cpp
+6
-0
crop_layer.cpp
modules/dnn/src/layers/crop_layer.cpp
+3
-0
detection_output_layer.cpp
modules/dnn/src/layers/detection_output_layer.cpp
+3
-0
elementwise_layers.cpp
modules/dnn/src/layers/elementwise_layers.cpp
+2
-0
eltwise_layer.cpp
modules/dnn/src/layers/eltwise_layer.cpp
+3
-0
flatten_layer.cpp
modules/dnn/src/layers/flatten_layer.cpp
+3
-0
fully_connected_layer.cpp
modules/dnn/src/layers/fully_connected_layer.cpp
+3
-0
lrn_layer.cpp
modules/dnn/src/layers/lrn_layer.cpp
+3
-0
max_unpooling_layer.cpp
modules/dnn/src/layers/max_unpooling_layer.cpp
+3
-0
mvn_layer.cpp
modules/dnn/src/layers/mvn_layer.cpp
+3
-0
normalize_bbox_layer.cpp
modules/dnn/src/layers/normalize_bbox_layer.cpp
+3
-0
padding_layer.cpp
modules/dnn/src/layers/padding_layer.cpp
+3
-0
permute_layer.cpp
modules/dnn/src/layers/permute_layer.cpp
+3
-0
pooling_layer.cpp
modules/dnn/src/layers/pooling_layer.cpp
+3
-0
prior_box_layer.cpp
modules/dnn/src/layers/prior_box_layer.cpp
+3
-0
recurrent_layers.cpp
modules/dnn/src/layers/recurrent_layers.cpp
+6
-0
reshape_layer.cpp
modules/dnn/src/layers/reshape_layer.cpp
+3
-0
scale_layer.cpp
modules/dnn/src/layers/scale_layer.cpp
+3
-0
shift_layer.cpp
modules/dnn/src/layers/shift_layer.cpp
+3
-0
slice_layer.cpp
modules/dnn/src/layers/slice_layer.cpp
+3
-0
softmax_layer.cpp
modules/dnn/src/layers/softmax_layer.cpp
+3
-0
split_layer.cpp
modules/dnn/src/layers/split_layer.cpp
+3
-0
precomp.hpp
modules/dnn/src/precomp.hpp
+1
-0
tf_importer.cpp
modules/dnn/src/tensorflow/tf_importer.cpp
+1
-1
tf_io.cpp
modules/dnn/src/tensorflow/tf_io.cpp
+1
-1
tf_io.hpp
modules/dnn/src/tensorflow/tf_io.hpp
+1
-1
torch_importer.cpp
modules/dnn/src/torch/torch_importer.cpp
+6
-0
caffe_googlenet.cpp
samples/dnn/caffe_googlenet.cpp
+13
-2
No files found.
modules/dnn/src/caffe/caffe_importer.cpp
View file @
ed103833
...
@@ -43,7 +43,7 @@
...
@@ -43,7 +43,7 @@
using
namespace
cv
;
using
namespace
cv
;
using
namespace
cv
::
dnn
;
using
namespace
cv
::
dnn
;
#if HAVE_PROTOBUF
#if
def
HAVE_PROTOBUF
#include "caffe.pb.h"
#include "caffe.pb.h"
#include <iostream>
#include <iostream>
...
@@ -82,6 +82,8 @@ public:
...
@@ -82,6 +82,8 @@ public:
CaffeImporter
(
const
char
*
pototxt
,
const
char
*
caffeModel
)
CaffeImporter
(
const
char
*
pototxt
,
const
char
*
caffeModel
)
{
{
CV_TRACE_FUNCTION
();
ReadNetParamsFromTextFileOrDie
(
pototxt
,
&
net
);
ReadNetParamsFromTextFileOrDie
(
pototxt
,
&
net
);
if
(
caffeModel
&&
caffeModel
[
0
])
if
(
caffeModel
&&
caffeModel
[
0
])
...
@@ -264,6 +266,8 @@ public:
...
@@ -264,6 +266,8 @@ public:
void
populateNet
(
Net
dstNet
)
void
populateNet
(
Net
dstNet
)
{
{
CV_TRACE_FUNCTION
();
int
layersSize
=
net
.
layer_size
();
int
layersSize
=
net
.
layer_size
();
layerCounter
.
clear
();
layerCounter
.
clear
();
addedBlobs
.
clear
();
addedBlobs
.
clear
();
...
...
modules/dnn/src/caffe/caffe_io.cpp
View file @
ed103833
...
@@ -87,7 +87,7 @@
...
@@ -87,7 +87,7 @@
//
//
//M*/
//M*/
#if HAVE_PROTOBUF
#if
def
HAVE_PROTOBUF
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/io/zero_copy_stream_impl.h>
#include <google/protobuf/io/zero_copy_stream_impl.h>
#include <google/protobuf/text_format.h>
#include <google/protobuf/text_format.h>
...
...
modules/dnn/src/caffe/caffe_io.hpp
View file @
ed103833
...
@@ -89,7 +89,7 @@
...
@@ -89,7 +89,7 @@
#ifndef __OPENCV_DNN_CAFFE_IO_HPP__
#ifndef __OPENCV_DNN_CAFFE_IO_HPP__
#define __OPENCV_DNN_CAFFE_IO_HPP__
#define __OPENCV_DNN_CAFFE_IO_HPP__
#if HAVE_PROTOBUF
#if
def
HAVE_PROTOBUF
#include "caffe.pb.h"
#include "caffe.pb.h"
...
...
modules/dnn/src/dnn.cpp
View file @
ed103833
...
@@ -90,6 +90,7 @@ static String toString(const T &v)
...
@@ -90,6 +90,7 @@ static String toString(const T &v)
Mat
blobFromImage
(
const
Mat
&
image
,
double
scalefactor
,
const
Size
&
size
,
Mat
blobFromImage
(
const
Mat
&
image
,
double
scalefactor
,
const
Size
&
size
,
const
Scalar
&
mean
,
bool
swapRB
)
const
Scalar
&
mean
,
bool
swapRB
)
{
{
CV_TRACE_FUNCTION
();
std
::
vector
<
Mat
>
images
(
1
,
image
);
std
::
vector
<
Mat
>
images
(
1
,
image
);
return
blobFromImages
(
images
,
scalefactor
,
size
,
mean
,
swapRB
);
return
blobFromImages
(
images
,
scalefactor
,
size
,
mean
,
swapRB
);
}
}
...
@@ -97,6 +98,7 @@ Mat blobFromImage(const Mat& image, double scalefactor, const Size& size,
...
@@ -97,6 +98,7 @@ Mat blobFromImage(const Mat& image, double scalefactor, const Size& size,
Mat
blobFromImages
(
const
std
::
vector
<
Mat
>&
images_
,
double
scalefactor
,
Size
size
,
Mat
blobFromImages
(
const
std
::
vector
<
Mat
>&
images_
,
double
scalefactor
,
Size
size
,
const
Scalar
&
mean_
,
bool
swapRB
)
const
Scalar
&
mean_
,
bool
swapRB
)
{
{
CV_TRACE_FUNCTION
();
std
::
vector
<
Mat
>
images
=
images_
;
std
::
vector
<
Mat
>
images
=
images_
;
for
(
int
i
=
0
;
i
<
images
.
size
();
i
++
)
for
(
int
i
=
0
;
i
<
images
.
size
();
i
++
)
{
{
...
@@ -207,6 +209,8 @@ class BackendWrapManager
...
@@ -207,6 +209,8 @@ class BackendWrapManager
public
:
public
:
Ptr
<
BackendWrapper
>
wrap
(
const
Mat
&
m
,
int
backendId
,
int
targetId
)
Ptr
<
BackendWrapper
>
wrap
(
const
Mat
&
m
,
int
backendId
,
int
targetId
)
{
{
CV_TRACE_FUNCTION
();
CV_Assert
(
backendId
!=
DNN_BACKEND_DEFAULT
);
CV_Assert
(
backendId
!=
DNN_BACKEND_DEFAULT
);
std
::
map
<
void
*
,
Ptr
<
BackendWrapper
>
>::
iterator
hostsIt
;
std
::
map
<
void
*
,
Ptr
<
BackendWrapper
>
>::
iterator
hostsIt
;
...
@@ -261,6 +265,8 @@ public:
...
@@ -261,6 +265,8 @@ public:
void
reset
()
void
reset
()
{
{
CV_TRACE_FUNCTION
();
hostWrappers
.
clear
();
hostWrappers
.
clear
();
extraWrappers
.
clear
();
extraWrappers
.
clear
();
}
}
...
@@ -321,6 +327,8 @@ struct LayerData
...
@@ -321,6 +327,8 @@ struct LayerData
LayerData
(
int
_id
,
const
String
&
_name
,
const
String
&
_type
,
LayerParams
&
_params
)
LayerData
(
int
_id
,
const
String
&
_name
,
const
String
&
_type
,
LayerParams
&
_params
)
:
id
(
_id
),
name
(
_name
),
type
(
_type
),
params
(
_params
)
:
id
(
_id
),
name
(
_name
),
type
(
_type
),
params
(
_params
)
{
{
CV_TRACE_FUNCTION
();
//add logging info
//add logging info
params
.
name
=
name
;
params
.
name
=
name
;
params
.
type
=
type
;
params
.
type
=
type
;
...
@@ -349,6 +357,9 @@ struct LayerData
...
@@ -349,6 +357,9 @@ struct LayerData
Ptr
<
Layer
>
getLayerInstance
()
Ptr
<
Layer
>
getLayerInstance
()
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
type
,
"type"
,
type
.
c_str
());
if
(
layerInstance
)
if
(
layerInstance
)
return
layerInstance
;
return
layerInstance
;
...
@@ -500,6 +511,8 @@ public:
...
@@ -500,6 +511,8 @@ public:
void
allocateBlobsForLayer
(
LayerData
&
ld
,
const
LayerShapes
&
layerShapes
,
void
allocateBlobsForLayer
(
LayerData
&
ld
,
const
LayerShapes
&
layerShapes
,
std
::
vector
<
LayerPin
>&
pinsForInternalBlobs
)
std
::
vector
<
LayerPin
>&
pinsForInternalBlobs
)
{
{
CV_TRACE_FUNCTION
();
pinsForInternalBlobs
.
clear
();
pinsForInternalBlobs
.
clear
();
std
::
vector
<
Mat
>&
outputBlobs
=
ld
.
outputBlobs
,
std
::
vector
<
Mat
>&
outputBlobs
=
ld
.
outputBlobs
,
...
@@ -578,6 +591,8 @@ public:
...
@@ -578,6 +591,8 @@ public:
// Clear internal state. Calls before an every reallocation.
// Clear internal state. Calls before an every reallocation.
void
reset
()
void
reset
()
{
{
CV_TRACE_FUNCTION
();
refCounter
.
clear
();
refCounter
.
clear
();
reuseMap
.
clear
();
reuseMap
.
clear
();
memHosts
.
clear
();
memHosts
.
clear
();
...
@@ -639,6 +654,8 @@ struct Net::Impl
...
@@ -639,6 +654,8 @@ struct Net::Impl
void
compileHalide
()
void
compileHalide
()
{
{
CV_TRACE_FUNCTION
();
CV_Assert
(
preferableBackend
==
DNN_BACKEND_HALIDE
);
CV_Assert
(
preferableBackend
==
DNN_BACKEND_HALIDE
);
HalideScheduler
scheduler
(
halideConfigFile
);
HalideScheduler
scheduler
(
halideConfigFile
);
...
@@ -666,6 +683,8 @@ struct Net::Impl
...
@@ -666,6 +683,8 @@ struct Net::Impl
void
clear
()
void
clear
()
{
{
CV_TRACE_FUNCTION
();
MapIdToLayerData
::
iterator
it
;
MapIdToLayerData
::
iterator
it
;
for
(
it
=
layers
.
begin
();
it
!=
layers
.
end
();
it
++
)
for
(
it
=
layers
.
begin
();
it
!=
layers
.
end
();
it
++
)
{
{
...
@@ -694,6 +713,8 @@ struct Net::Impl
...
@@ -694,6 +713,8 @@ struct Net::Impl
void
setUpNet
(
const
std
::
vector
<
LayerPin
>&
blobsToKeep_
=
std
::
vector
<
LayerPin
>
())
void
setUpNet
(
const
std
::
vector
<
LayerPin
>&
blobsToKeep_
=
std
::
vector
<
LayerPin
>
())
{
{
CV_TRACE_FUNCTION
();
if
(
!
netWasAllocated
||
this
->
blobsToKeep
!=
blobsToKeep_
)
if
(
!
netWasAllocated
||
this
->
blobsToKeep
!=
blobsToKeep_
)
{
{
clear
();
clear
();
...
@@ -862,6 +883,8 @@ struct Net::Impl
...
@@ -862,6 +883,8 @@ struct Net::Impl
void
computeNetOutputLayers
()
void
computeNetOutputLayers
()
{
{
CV_TRACE_FUNCTION
();
netOutputs
.
clear
();
netOutputs
.
clear
();
MapIdToLayerData
::
iterator
it
;
MapIdToLayerData
::
iterator
it
;
...
@@ -883,6 +906,8 @@ struct Net::Impl
...
@@ -883,6 +906,8 @@ struct Net::Impl
void
initBackend
()
void
initBackend
()
{
{
CV_TRACE_FUNCTION
();
backendWrapper
.
reset
();
backendWrapper
.
reset
();
if
(
preferableBackend
==
DNN_BACKEND_DEFAULT
)
if
(
preferableBackend
==
DNN_BACKEND_DEFAULT
)
{
{
...
@@ -953,6 +978,8 @@ struct Net::Impl
...
@@ -953,6 +978,8 @@ struct Net::Impl
void
allocateLayer
(
int
lid
,
const
LayersShapesMap
&
layersShapes
)
void
allocateLayer
(
int
lid
,
const
LayersShapesMap
&
layersShapes
)
{
{
CV_TRACE_FUNCTION
();
LayerData
&
ld
=
layers
[
lid
];
LayerData
&
ld
=
layers
[
lid
];
//already allocated
//already allocated
...
@@ -1026,6 +1053,8 @@ struct Net::Impl
...
@@ -1026,6 +1053,8 @@ struct Net::Impl
void
fuseLayers
(
const
std
::
vector
<
LayerPin
>&
blobsToKeep_
)
void
fuseLayers
(
const
std
::
vector
<
LayerPin
>&
blobsToKeep_
)
{
{
CV_TRACE_FUNCTION
();
// scan through all the layers. If there is convolution layer followed by the activation layer,
// scan through all the layers. If there is convolution layer followed by the activation layer,
// we try to embed this activation into the convolution and disable separate execution of the activation
// we try to embed this activation into the convolution and disable separate execution of the activation
std
::
vector
<
String
>
outnames
;
std
::
vector
<
String
>
outnames
;
...
@@ -1094,6 +1123,8 @@ struct Net::Impl
...
@@ -1094,6 +1123,8 @@ struct Net::Impl
void
allocateLayers
(
const
std
::
vector
<
LayerPin
>&
blobsToKeep_
)
void
allocateLayers
(
const
std
::
vector
<
LayerPin
>&
blobsToKeep_
)
{
{
CV_TRACE_FUNCTION
();
MapIdToLayerData
::
iterator
it
;
MapIdToLayerData
::
iterator
it
;
for
(
it
=
layers
.
begin
();
it
!=
layers
.
end
();
it
++
)
for
(
it
=
layers
.
begin
();
it
!=
layers
.
end
();
it
++
)
it
->
second
.
flag
=
0
;
it
->
second
.
flag
=
0
;
...
@@ -1131,6 +1162,8 @@ struct Net::Impl
...
@@ -1131,6 +1162,8 @@ struct Net::Impl
void
forwardLayer
(
LayerData
&
ld
)
void
forwardLayer
(
LayerData
&
ld
)
{
{
CV_TRACE_FUNCTION
();
Ptr
<
Layer
>
layer
=
ld
.
layerInstance
;
Ptr
<
Layer
>
layer
=
ld
.
layerInstance
;
if
(
preferableBackend
==
DNN_BACKEND_DEFAULT
||
if
(
preferableBackend
==
DNN_BACKEND_DEFAULT
||
...
@@ -1159,6 +1192,8 @@ struct Net::Impl
...
@@ -1159,6 +1192,8 @@ struct Net::Impl
void
forwardToLayer
(
LayerData
&
ld
,
bool
clearFlags
=
true
)
void
forwardToLayer
(
LayerData
&
ld
,
bool
clearFlags
=
true
)
{
{
CV_TRACE_FUNCTION
();
if
(
clearFlags
)
if
(
clearFlags
)
{
{
MapIdToLayerData
::
iterator
it
;
MapIdToLayerData
::
iterator
it
;
...
@@ -1186,6 +1221,8 @@ struct Net::Impl
...
@@ -1186,6 +1221,8 @@ struct Net::Impl
void
forwardAll
()
void
forwardAll
()
{
{
CV_TRACE_FUNCTION
();
forwardToLayer
(
layers
.
rbegin
()
->
second
,
true
);
forwardToLayer
(
layers
.
rbegin
()
->
second
,
true
);
}
}
...
@@ -1247,6 +1284,8 @@ struct Net::Impl
...
@@ -1247,6 +1284,8 @@ struct Net::Impl
Mat
getBlob
(
const
LayerPin
&
pin
)
Mat
getBlob
(
const
LayerPin
&
pin
)
{
{
CV_TRACE_FUNCTION
();
if
(
!
pin
.
valid
())
if
(
!
pin
.
valid
())
CV_Error
(
Error
::
StsObjectNotFound
,
"Requested blob not found"
);
CV_Error
(
Error
::
StsObjectNotFound
,
"Requested blob not found"
);
...
@@ -1285,6 +1324,8 @@ Net::~Net()
...
@@ -1285,6 +1324,8 @@ Net::~Net()
int
Net
::
addLayer
(
const
String
&
name
,
const
String
&
type
,
LayerParams
&
params
)
int
Net
::
addLayer
(
const
String
&
name
,
const
String
&
type
,
LayerParams
&
params
)
{
{
CV_TRACE_FUNCTION
();
if
(
name
.
find
(
'.'
)
!=
String
::
npos
)
if
(
name
.
find
(
'.'
)
!=
String
::
npos
)
{
{
CV_Error
(
Error
::
StsBadArg
,
"Added layer name
\"
"
+
name
+
"
\"
must not contain dot symbol"
);
CV_Error
(
Error
::
StsBadArg
,
"Added layer name
\"
"
+
name
+
"
\"
must not contain dot symbol"
);
...
@@ -1306,6 +1347,8 @@ int Net::addLayer(const String &name, const String &type, LayerParams ¶ms)
...
@@ -1306,6 +1347,8 @@ int Net::addLayer(const String &name, const String &type, LayerParams ¶ms)
int
Net
::
addLayerToPrev
(
const
String
&
name
,
const
String
&
type
,
LayerParams
&
params
)
int
Net
::
addLayerToPrev
(
const
String
&
name
,
const
String
&
type
,
LayerParams
&
params
)
{
{
CV_TRACE_FUNCTION
();
int
prvLid
=
impl
->
lastLayerId
;
int
prvLid
=
impl
->
lastLayerId
;
int
newLid
=
this
->
addLayer
(
name
,
type
,
params
);
int
newLid
=
this
->
addLayer
(
name
,
type
,
params
);
this
->
connect
(
prvLid
,
0
,
newLid
,
0
);
this
->
connect
(
prvLid
,
0
,
newLid
,
0
);
...
@@ -1314,11 +1357,15 @@ int Net::addLayerToPrev(const String &name, const String &type, LayerParams &par
...
@@ -1314,11 +1357,15 @@ int Net::addLayerToPrev(const String &name, const String &type, LayerParams &par
void
Net
::
connect
(
int
outLayerId
,
int
outNum
,
int
inpLayerId
,
int
inpNum
)
void
Net
::
connect
(
int
outLayerId
,
int
outNum
,
int
inpLayerId
,
int
inpNum
)
{
{
CV_TRACE_FUNCTION
();
impl
->
connect
(
outLayerId
,
outNum
,
inpLayerId
,
inpNum
);
impl
->
connect
(
outLayerId
,
outNum
,
inpLayerId
,
inpNum
);
}
}
void
Net
::
connect
(
String
_outPin
,
String
_inPin
)
void
Net
::
connect
(
String
_outPin
,
String
_inPin
)
{
{
CV_TRACE_FUNCTION
();
LayerPin
outPin
=
impl
->
getPinByAlias
(
_outPin
);
LayerPin
outPin
=
impl
->
getPinByAlias
(
_outPin
);
LayerPin
inpPin
=
impl
->
getPinByAlias
(
_inPin
);
LayerPin
inpPin
=
impl
->
getPinByAlias
(
_inPin
);
...
@@ -1329,6 +1376,8 @@ void Net::connect(String _outPin, String _inPin)
...
@@ -1329,6 +1376,8 @@ void Net::connect(String _outPin, String _inPin)
Mat
Net
::
forward
(
const
String
&
outputName
)
Mat
Net
::
forward
(
const
String
&
outputName
)
{
{
CV_TRACE_FUNCTION
();
String
layerName
=
outputName
;
String
layerName
=
outputName
;
if
(
layerName
.
empty
())
if
(
layerName
.
empty
())
...
@@ -1342,6 +1391,8 @@ Mat Net::forward(const String& outputName)
...
@@ -1342,6 +1391,8 @@ Mat Net::forward(const String& outputName)
void
Net
::
forward
(
std
::
vector
<
Mat
>&
outputBlobs
,
const
String
&
outputName
)
void
Net
::
forward
(
std
::
vector
<
Mat
>&
outputBlobs
,
const
String
&
outputName
)
{
{
CV_TRACE_FUNCTION
();
impl
->
setUpNet
();
impl
->
setUpNet
();
String
layerName
=
outputName
;
String
layerName
=
outputName
;
...
@@ -1359,6 +1410,8 @@ void Net::forward(std::vector<Mat>& outputBlobs, const String& outputName)
...
@@ -1359,6 +1410,8 @@ void Net::forward(std::vector<Mat>& outputBlobs, const String& outputName)
void
Net
::
forward
(
std
::
vector
<
Mat
>&
outputBlobs
,
void
Net
::
forward
(
std
::
vector
<
Mat
>&
outputBlobs
,
const
std
::
vector
<
String
>&
outBlobNames
)
const
std
::
vector
<
String
>&
outBlobNames
)
{
{
CV_TRACE_FUNCTION
();
std
::
vector
<
LayerPin
>
pins
;
std
::
vector
<
LayerPin
>
pins
;
for
(
int
i
=
0
;
i
<
outBlobNames
.
size
();
i
++
)
for
(
int
i
=
0
;
i
<
outBlobNames
.
size
();
i
++
)
{
{
...
@@ -1381,6 +1434,8 @@ void Net::forward(std::vector<Mat>& outputBlobs,
...
@@ -1381,6 +1434,8 @@ void Net::forward(std::vector<Mat>& outputBlobs,
void
Net
::
forward
(
std
::
vector
<
std
::
vector
<
Mat
>
>&
outputBlobs
,
void
Net
::
forward
(
std
::
vector
<
std
::
vector
<
Mat
>
>&
outputBlobs
,
const
std
::
vector
<
String
>&
outBlobNames
)
const
std
::
vector
<
String
>&
outBlobNames
)
{
{
CV_TRACE_FUNCTION
();
std
::
vector
<
LayerPin
>
pins
;
std
::
vector
<
LayerPin
>
pins
;
for
(
int
i
=
0
;
i
<
outBlobNames
.
size
();
i
++
)
for
(
int
i
=
0
;
i
<
outBlobNames
.
size
();
i
++
)
{
{
...
@@ -1407,6 +1462,9 @@ void Net::forward(std::vector<std::vector<Mat> >& outputBlobs,
...
@@ -1407,6 +1462,9 @@ void Net::forward(std::vector<std::vector<Mat> >& outputBlobs,
void
Net
::
setPreferableBackend
(
int
backendId
)
void
Net
::
setPreferableBackend
(
int
backendId
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG
(
backendId
);
impl
->
netWasAllocated
=
impl
->
netWasAllocated
&&
impl
->
netWasAllocated
=
impl
->
netWasAllocated
&&
impl
->
preferableBackend
==
backendId
;
impl
->
preferableBackend
==
backendId
;
impl
->
preferableBackend
=
backendId
;
impl
->
preferableBackend
=
backendId
;
...
@@ -1414,6 +1472,9 @@ void Net::setPreferableBackend(int backendId)
...
@@ -1414,6 +1472,9 @@ void Net::setPreferableBackend(int backendId)
void
Net
::
setPreferableTarget
(
int
targetId
)
void
Net
::
setPreferableTarget
(
int
targetId
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG
(
targetId
);
impl
->
netWasAllocated
=
impl
->
netWasAllocated
&&
impl
->
netWasAllocated
=
impl
->
netWasAllocated
&&
impl
->
preferableTarget
==
targetId
;
impl
->
preferableTarget
==
targetId
;
impl
->
preferableTarget
=
targetId
;
impl
->
preferableTarget
=
targetId
;
...
@@ -1421,11 +1482,16 @@ void Net::setPreferableTarget(int targetId)
...
@@ -1421,11 +1482,16 @@ void Net::setPreferableTarget(int targetId)
void
Net
::
setInputsNames
(
const
std
::
vector
<
String
>
&
inputBlobNames
)
void
Net
::
setInputsNames
(
const
std
::
vector
<
String
>
&
inputBlobNames
)
{
{
CV_TRACE_FUNCTION
();
impl
->
netInputLayer
->
setNames
(
inputBlobNames
);
impl
->
netInputLayer
->
setNames
(
inputBlobNames
);
}
}
void
Net
::
setInput
(
const
Mat
&
blob_
,
const
String
&
name
)
void
Net
::
setInput
(
const
Mat
&
blob_
,
const
String
&
name
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
LayerPin
pin
;
LayerPin
pin
;
pin
.
lid
=
0
;
pin
.
lid
=
0
;
pin
.
oid
=
impl
->
resolvePinOutputName
(
impl
->
getLayerData
(
pin
.
lid
),
name
);
pin
.
oid
=
impl
->
resolvePinOutputName
(
impl
->
getLayerData
(
pin
.
lid
),
name
);
...
@@ -1595,6 +1661,8 @@ void Net::getLayerShapes(const ShapesVec& netInputShapes,
...
@@ -1595,6 +1661,8 @@ void Net::getLayerShapes(const ShapesVec& netInputShapes,
int64
Net
::
getFLOPS
(
const
std
::
vector
<
MatShape
>&
netInputShapes
)
const
int64
Net
::
getFLOPS
(
const
std
::
vector
<
MatShape
>&
netInputShapes
)
const
{
{
CV_TRACE_FUNCTION
();
int64
flops
=
0
;
int64
flops
=
0
;
std
::
vector
<
int
>
ids
;
std
::
vector
<
int
>
ids
;
std
::
vector
<
std
::
vector
<
MatShape
>
>
inShapes
,
outShapes
;
std
::
vector
<
std
::
vector
<
MatShape
>
>
inShapes
,
outShapes
;
...
@@ -1670,6 +1738,8 @@ void Net::getMemoryConsumption(const int layerId,
...
@@ -1670,6 +1738,8 @@ void Net::getMemoryConsumption(const int layerId,
const
std
::
vector
<
MatShape
>&
netInputShapes
,
const
std
::
vector
<
MatShape
>&
netInputShapes
,
size_t
&
weights
,
size_t
&
blobs
)
const
size_t
&
weights
,
size_t
&
blobs
)
const
{
{
CV_TRACE_FUNCTION
();
Impl
::
MapIdToLayerData
::
iterator
layer
=
impl
->
layers
.
find
(
layerId
);
Impl
::
MapIdToLayerData
::
iterator
layer
=
impl
->
layers
.
find
(
layerId
);
CV_Assert
(
layer
!=
impl
->
layers
.
end
());
CV_Assert
(
layer
!=
impl
->
layers
.
end
());
...
@@ -1692,6 +1762,8 @@ void Net::getMemoryConsumption(const int layerId,
...
@@ -1692,6 +1762,8 @@ void Net::getMemoryConsumption(const int layerId,
void
Net
::
getMemoryConsumption
(
const
std
::
vector
<
MatShape
>&
netInputShapes
,
void
Net
::
getMemoryConsumption
(
const
std
::
vector
<
MatShape
>&
netInputShapes
,
size_t
&
weights
,
size_t
&
blobs
)
const
size_t
&
weights
,
size_t
&
blobs
)
const
{
{
CV_TRACE_FUNCTION
();
std
::
vector
<
int
>
layerIds
;
std
::
vector
<
int
>
layerIds
;
std
::
vector
<
size_t
>
w
,
b
;
std
::
vector
<
size_t
>
w
,
b
;
getMemoryConsumption
(
netInputShapes
,
layerIds
,
w
,
b
);
getMemoryConsumption
(
netInputShapes
,
layerIds
,
w
,
b
);
...
@@ -1723,6 +1795,8 @@ void Net::getMemoryConsumption(const std::vector<MatShape>& netInputShapes,
...
@@ -1723,6 +1795,8 @@ void Net::getMemoryConsumption(const std::vector<MatShape>& netInputShapes,
std
::
vector
<
int
>&
layerIds
,
std
::
vector
<
size_t
>&
weights
,
std
::
vector
<
int
>&
layerIds
,
std
::
vector
<
size_t
>&
weights
,
std
::
vector
<
size_t
>&
blobs
)
const
std
::
vector
<
size_t
>&
blobs
)
const
{
{
CV_TRACE_FUNCTION
();
layerIds
.
clear
();
layerIds
.
clear
();
weights
.
clear
();
weights
.
clear
();
blobs
.
clear
();
blobs
.
clear
();
...
@@ -1762,6 +1836,9 @@ void Net::getMemoryConsumption(const MatShape& netInputShape, std::vector<int>&
...
@@ -1762,6 +1836,9 @@ void Net::getMemoryConsumption(const MatShape& netInputShape, std::vector<int>&
void
Net
::
setHalideScheduler
(
const
String
&
scheduler
)
void
Net
::
setHalideScheduler
(
const
String
&
scheduler
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
scheduler
,
"scheduler"
,
scheduler
.
c_str
());
impl
->
halideConfigFile
=
scheduler
;
impl
->
halideConfigFile
=
scheduler
;
}
}
...
@@ -1810,6 +1887,8 @@ void Layer::applyHalideScheduler(Ptr<BackendNode>& node, const std::vector<Mat*>
...
@@ -1810,6 +1887,8 @@ void Layer::applyHalideScheduler(Ptr<BackendNode>& node, const std::vector<Mat*>
const
std
::
vector
<
Mat
>
&
outputs
,
int
targetId
)
const
const
std
::
vector
<
Mat
>
&
outputs
,
int
targetId
)
const
{
{
#ifdef HAVE_HALIDE
#ifdef HAVE_HALIDE
CV_TRACE_FUNCTION
();
Halide
::
Var
x
(
"x"
),
y
(
"y"
),
c
(
"c"
),
n
(
"n"
),
co
(
"co"
),
ci
(
"ci"
),
Halide
::
Var
x
(
"x"
),
y
(
"y"
),
c
(
"c"
),
n
(
"n"
),
co
(
"co"
),
ci
(
"ci"
),
xo
(
"xo"
),
xi
(
"xi"
),
yo
(
"yo"
),
yi
(
"yi"
),
tile
(
"tile"
);
xo
(
"xo"
),
xi
(
"xi"
),
yo
(
"yo"
),
yi
(
"yi"
),
tile
(
"tile"
);
Halide
::
Func
&
top
=
node
.
dynamicCast
<
HalideBackendNode
>
()
->
funcs
.
back
();
Halide
::
Func
&
top
=
node
.
dynamicCast
<
HalideBackendNode
>
()
->
funcs
.
back
();
...
@@ -1891,6 +1970,8 @@ static void vecToPVec(const std::vector<T> &v, std::vector<T*> &pv)
...
@@ -1891,6 +1970,8 @@ static void vecToPVec(const std::vector<T> &v, std::vector<T*> &pv)
void
Layer
::
finalize
(
const
std
::
vector
<
Mat
>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
)
void
Layer
::
finalize
(
const
std
::
vector
<
Mat
>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
)
{
{
CV_TRACE_FUNCTION
();
std
::
vector
<
Mat
*>
inputsp
;
std
::
vector
<
Mat
*>
inputsp
;
vecToPVec
(
inputs
,
inputsp
);
vecToPVec
(
inputs
,
inputsp
);
this
->
finalize
(
inputsp
,
outputs
);
this
->
finalize
(
inputsp
,
outputs
);
...
@@ -1903,6 +1984,8 @@ void Layer::finalize(const std::vector<Mat*> &input, std::vector<Mat> &output)
...
@@ -1903,6 +1984,8 @@ void Layer::finalize(const std::vector<Mat*> &input, std::vector<Mat> &output)
std
::
vector
<
Mat
>
Layer
::
finalize
(
const
std
::
vector
<
Mat
>
&
inputs
)
std
::
vector
<
Mat
>
Layer
::
finalize
(
const
std
::
vector
<
Mat
>
&
inputs
)
{
{
CV_TRACE_FUNCTION
();
std
::
vector
<
Mat
>
outputs
;
std
::
vector
<
Mat
>
outputs
;
this
->
finalize
(
inputs
,
outputs
);
this
->
finalize
(
inputs
,
outputs
);
return
outputs
;
return
outputs
;
...
@@ -1910,6 +1993,8 @@ std::vector<Mat> Layer::finalize(const std::vector<Mat> &inputs)
...
@@ -1910,6 +1993,8 @@ std::vector<Mat> Layer::finalize(const std::vector<Mat> &inputs)
void
Layer
::
forward
(
const
std
::
vector
<
Mat
>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
Layer
::
forward
(
const
std
::
vector
<
Mat
>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
std
::
vector
<
Mat
*>
inputsp
;
std
::
vector
<
Mat
*>
inputsp
;
vecToPVec
(
inputs
,
inputsp
);
vecToPVec
(
inputs
,
inputsp
);
this
->
forward
(
inputsp
,
outputs
,
internals
);
this
->
forward
(
inputsp
,
outputs
,
internals
);
...
@@ -1917,6 +2002,8 @@ void Layer::forward(const std::vector<Mat> &inputs, std::vector<Mat> &outputs, s
...
@@ -1917,6 +2002,8 @@ void Layer::forward(const std::vector<Mat> &inputs, std::vector<Mat> &outputs, s
void
Layer
::
run
(
const
std
::
vector
<
Mat
>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
Layer
::
run
(
const
std
::
vector
<
Mat
>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
std
::
vector
<
Mat
*>
inputsp
;
std
::
vector
<
Mat
*>
inputsp
;
vecToPVec
(
inputs
,
inputsp
);
vecToPVec
(
inputs
,
inputsp
);
this
->
finalize
(
inputsp
,
outputs
);
this
->
finalize
(
inputsp
,
outputs
);
...
@@ -1972,32 +2059,41 @@ static LayerFactory_Impl& getLayerFactoryImpl()
...
@@ -1972,32 +2059,41 @@ static LayerFactory_Impl& getLayerFactoryImpl()
return
*
instance
;
return
*
instance
;
}
}
void
LayerFactory
::
registerLayer
(
const
String
&
_
type
,
Constuctor
constructor
)
void
LayerFactory
::
registerLayer
(
const
String
&
type
,
Constuctor
constructor
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
type
,
"type"
,
type
.
c_str
());
cv
::
AutoLock
lock
(
getLayerFactoryMutex
());
cv
::
AutoLock
lock
(
getLayerFactoryMutex
());
String
type
=
_
type
.
toLowerCase
();
String
type
_
=
type
.
toLowerCase
();
LayerFactory_Impl
::
const_iterator
it
=
getLayerFactoryImpl
().
find
(
type
);
LayerFactory_Impl
::
const_iterator
it
=
getLayerFactoryImpl
().
find
(
type
_
);
if
(
it
!=
getLayerFactoryImpl
().
end
()
&&
it
->
second
!=
constructor
)
if
(
it
!=
getLayerFactoryImpl
().
end
()
&&
it
->
second
!=
constructor
)
{
{
CV_Error
(
cv
::
Error
::
StsBadArg
,
"Layer
\"
"
+
type
+
"
\"
already was registered"
);
CV_Error
(
cv
::
Error
::
StsBadArg
,
"Layer
\"
"
+
type
_
+
"
\"
already was registered"
);
}
}
getLayerFactoryImpl
().
insert
(
std
::
make_pair
(
type
,
constructor
));
getLayerFactoryImpl
().
insert
(
std
::
make_pair
(
type
_
,
constructor
));
}
}
void
LayerFactory
::
unregisterLayer
(
const
String
&
_
type
)
void
LayerFactory
::
unregisterLayer
(
const
String
&
type
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
type
,
"type"
,
type
.
c_str
());
cv
::
AutoLock
lock
(
getLayerFactoryMutex
());
cv
::
AutoLock
lock
(
getLayerFactoryMutex
());
String
type
=
_
type
.
toLowerCase
();
String
type
_
=
type
.
toLowerCase
();
getLayerFactoryImpl
().
erase
(
type
);
getLayerFactoryImpl
().
erase
(
type
_
);
}
}
Ptr
<
Layer
>
LayerFactory
::
createLayerInstance
(
const
String
&
_
type
,
LayerParams
&
params
)
Ptr
<
Layer
>
LayerFactory
::
createLayerInstance
(
const
String
&
type
,
LayerParams
&
params
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
type
,
"type"
,
type
.
c_str
());
cv
::
AutoLock
lock
(
getLayerFactoryMutex
());
cv
::
AutoLock
lock
(
getLayerFactoryMutex
());
String
type
=
_
type
.
toLowerCase
();
String
type
_
=
type
.
toLowerCase
();
LayerFactory_Impl
::
const_iterator
it
=
getLayerFactoryImpl
().
find
(
type
);
LayerFactory_Impl
::
const_iterator
it
=
getLayerFactoryImpl
().
find
(
type
_
);
if
(
it
!=
getLayerFactoryImpl
().
end
())
if
(
it
!=
getLayerFactoryImpl
().
end
())
{
{
...
...
modules/dnn/src/init.cpp
View file @
ed103833
...
@@ -60,6 +60,8 @@ Mutex* __initialization_mutex_initializer = &getInitializationMutex();
...
@@ -60,6 +60,8 @@ Mutex* __initialization_mutex_initializer = &getInitializationMutex();
void
initializeLayerFactory
()
void
initializeLayerFactory
()
{
{
CV_TRACE_FUNCTION
();
CV_DNN_REGISTER_LAYER_CLASS
(
Slice
,
SliceLayer
);
CV_DNN_REGISTER_LAYER_CLASS
(
Slice
,
SliceLayer
);
CV_DNN_REGISTER_LAYER_CLASS
(
Split
,
SplitLayer
);
CV_DNN_REGISTER_LAYER_CLASS
(
Split
,
SplitLayer
);
CV_DNN_REGISTER_LAYER_CLASS
(
Concat
,
ConcatLayer
);
CV_DNN_REGISTER_LAYER_CLASS
(
Concat
,
ConcatLayer
);
...
...
modules/dnn/src/layers/batch_norm_layer.cpp
View file @
ed103833
...
@@ -104,6 +104,9 @@ public:
...
@@ -104,6 +104,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
CV_Assert
(
blobs
.
size
()
>=
2
);
CV_Assert
(
blobs
.
size
()
>=
2
);
CV_Assert
(
inputs
.
size
()
==
1
);
CV_Assert
(
inputs
.
size
()
==
1
);
...
...
modules/dnn/src/layers/blank_layer.cpp
View file @
ed103833
...
@@ -64,6 +64,9 @@ public:
...
@@ -64,6 +64,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
for
(
int
i
=
0
,
n
=
outputs
.
size
();
i
<
n
;
++
i
)
for
(
int
i
=
0
,
n
=
outputs
.
size
();
i
<
n
;
++
i
)
if
(
outputs
[
i
].
data
!=
inputs
[
i
]
->
data
)
if
(
outputs
[
i
].
data
!=
inputs
[
i
]
->
data
)
inputs
[
i
]
->
copyTo
(
outputs
[
i
]);
inputs
[
i
]
->
copyTo
(
outputs
[
i
]);
...
...
modules/dnn/src/layers/concat_layer.cpp
View file @
ed103833
...
@@ -96,6 +96,9 @@ public:
...
@@ -96,6 +96,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
int
cAxis
=
clamp
(
axis
,
inputs
[
0
]
->
dims
);
int
cAxis
=
clamp
(
axis
,
inputs
[
0
]
->
dims
);
Mat
&
outMat
=
outputs
[
0
];
Mat
&
outMat
=
outputs
[
0
];
std
::
vector
<
Range
>
ranges
(
outputs
[
0
].
dims
,
Range
::
all
());
std
::
vector
<
Range
>
ranges
(
outputs
[
0
].
dims
,
Range
::
all
());
...
...
modules/dnn/src/layers/convolution_layer.cpp
View file @
ed103833
...
@@ -624,6 +624,9 @@ public:
...
@@ -624,6 +624,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
/*printf("conv %s: input (%d x %d x %d x %d), kernel (%d x %d), pad (%d x %d), stride (%d x %d), dilation (%d x %d)\n",
/*printf("conv %s: input (%d x %d x %d x %d), kernel (%d x %d), pad (%d x %d), stride (%d x %d), dilation (%d x %d)\n",
name.c_str(), inputs[0]->size[0], inputs[0]->size[1], inputs[0]->size[2], inputs[0]->size[3],
name.c_str(), inputs[0]->size[0], inputs[0]->size[1], inputs[0]->size[2], inputs[0]->size[3],
kernel.width, kernel.height, pad.width, pad.height,
kernel.width, kernel.height, pad.width, pad.height,
...
@@ -1006,6 +1009,9 @@ public:
...
@@ -1006,6 +1009,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
int
outCn
=
blobs
[
0
].
size
[
0
];
int
outCn
=
blobs
[
0
].
size
[
0
];
int
inpCn
=
inputs
[
0
]
->
size
[
1
];
int
inpCn
=
inputs
[
0
]
->
size
[
1
];
bool
is1x1flag
=
is1x1
();
bool
is1x1flag
=
is1x1
();
...
...
modules/dnn/src/layers/crop_layer.cpp
View file @
ed103833
...
@@ -135,6 +135,9 @@ public:
...
@@ -135,6 +135,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
Mat
&
input
=
*
inputs
[
0
];
Mat
&
input
=
*
inputs
[
0
];
Mat
&
output
=
outputs
[
0
];
Mat
&
output
=
outputs
[
0
];
...
...
modules/dnn/src/layers/detection_output_layer.cpp
View file @
ed103833
...
@@ -206,6 +206,9 @@ public:
...
@@ -206,6 +206,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
const
float
*
locationData
=
inputs
[
0
]
->
ptr
<
float
>
();
const
float
*
locationData
=
inputs
[
0
]
->
ptr
<
float
>
();
const
float
*
confidenceData
=
inputs
[
1
]
->
ptr
<
float
>
();
const
float
*
confidenceData
=
inputs
[
1
]
->
ptr
<
float
>
();
const
float
*
priorData
=
inputs
[
2
]
->
ptr
<
float
>
();
const
float
*
priorData
=
inputs
[
2
]
->
ptr
<
float
>
();
...
...
modules/dnn/src/layers/elementwise_layers.cpp
View file @
ed103833
...
@@ -156,6 +156,8 @@ public:
...
@@ -156,6 +156,8 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
for
(
size_t
i
=
0
;
i
<
inputs
.
size
();
i
++
)
for
(
size_t
i
=
0
;
i
<
inputs
.
size
();
i
++
)
{
{
const
Mat
&
src
=
*
inputs
[
i
];
const
Mat
&
src
=
*
inputs
[
i
];
...
...
modules/dnn/src/layers/eltwise_layer.cpp
View file @
ed103833
...
@@ -251,6 +251,9 @@ public:
...
@@ -251,6 +251,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
CV_Assert
(
outputs
.
size
()
==
1
);
CV_Assert
(
outputs
.
size
()
==
1
);
const
int
nstripes
=
getNumThreads
();
const
int
nstripes
=
getNumThreads
();
EltwiseInvoker
::
run
((
const
Mat
**
)
&
inputs
[
0
],
(
int
)
inputs
.
size
(),
outputs
[
0
],
EltwiseInvoker
::
run
((
const
Mat
**
)
&
inputs
[
0
],
(
int
)
inputs
.
size
(),
outputs
[
0
],
...
...
modules/dnn/src/layers/flatten_layer.cpp
View file @
ed103833
...
@@ -106,6 +106,9 @@ public:
...
@@ -106,6 +106,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
for
(
size_t
i
=
0
;
i
<
inputs
.
size
();
i
++
)
for
(
size_t
i
=
0
;
i
<
inputs
.
size
();
i
++
)
{
{
MatShape
outShape
=
shape
(
outputs
[
i
]);
MatShape
outShape
=
shape
(
outputs
[
i
]);
...
...
modules/dnn/src/layers/fully_connected_layer.cpp
View file @
ed103833
...
@@ -233,6 +233,9 @@ public:
...
@@ -233,6 +233,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
input
,
std
::
vector
<
Mat
>
&
output
,
std
::
vector
<
Mat
>
&
)
void
forward
(
std
::
vector
<
Mat
*>
&
input
,
std
::
vector
<
Mat
>
&
output
,
std
::
vector
<
Mat
>
&
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
int
axisCan
=
clamp
(
axis
,
input
[
0
]
->
dims
);
int
axisCan
=
clamp
(
axis
,
input
[
0
]
->
dims
);
int
outerSize
=
input
[
0
]
->
total
(
0
,
axisCan
);
int
outerSize
=
input
[
0
]
->
total
(
0
,
axisCan
);
...
...
modules/dnn/src/layers/lrn_layer.cpp
View file @
ed103833
...
@@ -86,6 +86,9 @@ public:
...
@@ -86,6 +86,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
CV_Assert
(
inputs
.
size
()
==
outputs
.
size
());
CV_Assert
(
inputs
.
size
()
==
outputs
.
size
());
for
(
int
i
=
0
;
i
<
inputs
.
size
();
i
++
)
for
(
int
i
=
0
;
i
<
inputs
.
size
();
i
++
)
{
{
...
...
modules/dnn/src/layers/max_unpooling_layer.cpp
View file @
ed103833
...
@@ -57,6 +57,9 @@ public:
...
@@ -57,6 +57,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
CV_Assert
(
inputs
.
size
()
==
2
);
CV_Assert
(
inputs
.
size
()
==
2
);
Mat
&
input
=
*
inputs
[
0
];
Mat
&
input
=
*
inputs
[
0
];
Mat
&
indices
=
*
inputs
[
1
];
Mat
&
indices
=
*
inputs
[
1
];
...
...
modules/dnn/src/layers/mvn_layer.cpp
View file @
ed103833
...
@@ -62,6 +62,9 @@ public:
...
@@ -62,6 +62,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
for
(
size_t
inpIdx
=
0
;
inpIdx
<
inputs
.
size
();
inpIdx
++
)
for
(
size_t
inpIdx
=
0
;
inpIdx
<
inputs
.
size
();
inpIdx
++
)
{
{
Mat
&
inpBlob
=
*
inputs
[
inpIdx
];
Mat
&
inpBlob
=
*
inputs
[
inpIdx
];
...
...
modules/dnn/src/layers/normalize_bbox_layer.cpp
View file @
ed103833
...
@@ -142,6 +142,9 @@ public:
...
@@ -142,6 +142,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
checkInputs
(
inputs
);
checkInputs
(
inputs
);
Mat
&
buffer
=
internals
[
0
],
sumChannelMultiplier
=
internals
[
1
],
Mat
&
buffer
=
internals
[
0
],
sumChannelMultiplier
=
internals
[
1
],
...
...
modules/dnn/src/layers/padding_layer.cpp
View file @
ed103833
...
@@ -61,6 +61,9 @@ public:
...
@@ -61,6 +61,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
for
(
int
i
=
0
;
i
<
inputs
.
size
();
i
++
)
for
(
int
i
=
0
;
i
<
inputs
.
size
();
i
++
)
{
{
outputs
[
i
]
=
paddingValue
;
outputs
[
i
]
=
paddingValue
;
...
...
modules/dnn/src/layers/permute_layer.cpp
View file @
ed103833
...
@@ -245,6 +245,9 @@ public:
...
@@ -245,6 +245,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
size_t
k
,
ninputs
=
inputs
.
size
();
size_t
k
,
ninputs
=
inputs
.
size
();
if
(
!
_needsPermute
)
if
(
!
_needsPermute
)
{
{
...
...
modules/dnn/src/layers/pooling_layer.cpp
View file @
ed103833
...
@@ -106,6 +106,9 @@ public:
...
@@ -106,6 +106,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
for
(
size_t
ii
=
0
;
ii
<
inputs
.
size
();
ii
++
)
for
(
size_t
ii
=
0
;
ii
<
inputs
.
size
();
ii
++
)
{
{
switch
(
type
)
switch
(
type
)
...
...
modules/dnn/src/layers/prior_box_layer.cpp
View file @
ed103833
...
@@ -227,6 +227,9 @@ public:
...
@@ -227,6 +227,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
int
_layerWidth
=
inputs
[
0
]
->
size
[
3
];
int
_layerWidth
=
inputs
[
0
]
->
size
[
3
];
int
_layerHeight
=
inputs
[
0
]
->
size
[
2
];
int
_layerHeight
=
inputs
[
0
]
->
size
[
2
];
...
...
modules/dnn/src/layers/recurrent_layers.cpp
View file @
ed103833
...
@@ -220,6 +220,9 @@ public:
...
@@ -220,6 +220,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
input
,
std
::
vector
<
Mat
>
&
output
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
input
,
std
::
vector
<
Mat
>
&
output
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
const
Mat
&
Wh
=
blobs
[
0
];
const
Mat
&
Wh
=
blobs
[
0
];
const
Mat
&
Wx
=
blobs
[
1
];
const
Mat
&
Wx
=
blobs
[
1
];
const
Mat
&
bias
=
blobs
[
2
];
const
Mat
&
bias
=
blobs
[
2
];
...
@@ -404,6 +407,9 @@ public:
...
@@ -404,6 +407,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
input
,
std
::
vector
<
Mat
>
&
output
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
input
,
std
::
vector
<
Mat
>
&
output
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
Mat
xTs
=
input
[
0
]
->
reshape
(
1
,
numSamplesTotal
);
Mat
xTs
=
input
[
0
]
->
reshape
(
1
,
numSamplesTotal
);
Mat
oTs
=
output
[
0
].
reshape
(
1
,
numSamplesTotal
);
Mat
oTs
=
output
[
0
].
reshape
(
1
,
numSamplesTotal
);
Mat
hTs
=
produceH
?
output
[
1
].
reshape
(
1
,
numSamplesTotal
)
:
Mat
();
Mat
hTs
=
produceH
?
output
[
1
].
reshape
(
1
,
numSamplesTotal
)
:
Mat
();
...
...
modules/dnn/src/layers/reshape_layer.cpp
View file @
ed103833
...
@@ -196,6 +196,9 @@ public:
...
@@ -196,6 +196,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
for
(
size_t
i
=
0
;
i
<
inputs
.
size
();
i
++
)
for
(
size_t
i
=
0
;
i
<
inputs
.
size
();
i
++
)
{
{
Mat
srcBlob
=
*
inputs
[
i
];
Mat
srcBlob
=
*
inputs
[
i
];
...
...
modules/dnn/src/layers/scale_layer.cpp
View file @
ed103833
...
@@ -45,6 +45,9 @@ public:
...
@@ -45,6 +45,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
CV_Assert
(
blobs
.
size
()
==
1
+
hasBias
);
CV_Assert
(
blobs
.
size
()
==
1
+
hasBias
);
for
(
size_t
ii
=
0
;
ii
<
outputs
.
size
();
ii
++
)
for
(
size_t
ii
=
0
;
ii
<
outputs
.
size
();
ii
++
)
...
...
modules/dnn/src/layers/shift_layer.cpp
View file @
ed103833
...
@@ -38,6 +38,9 @@ public:
...
@@ -38,6 +38,9 @@ public:
virtual
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
virtual
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
CV_Assert
(
inputs
.
size
()
>
0
);
CV_Assert
(
inputs
.
size
()
>
0
);
CV_Assert
(
blobs
.
size
()
>
0
);
CV_Assert
(
blobs
.
size
()
>
0
);
...
...
modules/dnn/src/layers/slice_layer.cpp
View file @
ed103833
...
@@ -118,6 +118,9 @@ public:
...
@@ -118,6 +118,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
const
Mat
&
inpMat
=
*
inputs
[
0
];
const
Mat
&
inpMat
=
*
inputs
[
0
];
std
::
vector
<
Range
>
ranges
(
inpMat
.
dims
,
Range
::
all
());
std
::
vector
<
Range
>
ranges
(
inpMat
.
dims
,
Range
::
all
());
int
cAxis
=
clamp
(
axis
,
inpMat
.
dims
);
int
cAxis
=
clamp
(
axis
,
inpMat
.
dims
);
...
...
modules/dnn/src/layers/softmax_layer.cpp
View file @
ed103833
...
@@ -84,6 +84,9 @@ public:
...
@@ -84,6 +84,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
const
Mat
&
src
=
*
inputs
[
0
];
const
Mat
&
src
=
*
inputs
[
0
];
Mat
&
dst
=
outputs
[
0
];
Mat
&
dst
=
outputs
[
0
];
...
...
modules/dnn/src/layers/split_layer.cpp
View file @
ed103833
...
@@ -80,6 +80,9 @@ public:
...
@@ -80,6 +80,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
for
(
size_t
i
=
0
;
i
<
outputs
.
size
();
i
++
)
for
(
size_t
i
=
0
;
i
<
outputs
.
size
();
i
++
)
{
{
CV_Assert
(
inputs
[
0
]
->
total
()
==
outputs
[
i
].
total
());
CV_Assert
(
inputs
[
0
]
->
total
()
==
outputs
[
i
].
total
());
...
...
modules/dnn/src/precomp.hpp
View file @
ed103833
...
@@ -40,6 +40,7 @@
...
@@ -40,6 +40,7 @@
//M*/
//M*/
#include <opencv2/core.hpp>
#include <opencv2/core.hpp>
#include <opencv2/core/utils/trace.hpp>
#include "cvconfig.h"
#include "cvconfig.h"
#include <opencv2/dnn.hpp>
#include <opencv2/dnn.hpp>
#include <opencv2/dnn/all_layers.hpp>
#include <opencv2/dnn/all_layers.hpp>
...
...
modules/dnn/src/tensorflow/tf_importer.cpp
View file @
ed103833
...
@@ -13,7 +13,7 @@ Implementation of Tensorflow models parser
...
@@ -13,7 +13,7 @@ Implementation of Tensorflow models parser
using
namespace
cv
;
using
namespace
cv
;
using
namespace
cv
::
dnn
;
using
namespace
cv
::
dnn
;
#if HAVE_PROTOBUF
#if
def
HAVE_PROTOBUF
#include "graph.pb.h"
#include "graph.pb.h"
#include <iostream>
#include <iostream>
...
...
modules/dnn/src/tensorflow/tf_io.cpp
View file @
ed103833
...
@@ -9,7 +9,7 @@
...
@@ -9,7 +9,7 @@
Implementation of various functions which are related to Tensorflow models reading.
Implementation of various functions which are related to Tensorflow models reading.
*/
*/
#if HAVE_PROTOBUF
#if
def
HAVE_PROTOBUF
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/io/zero_copy_stream_impl.h>
#include <google/protobuf/io/zero_copy_stream_impl.h>
#include <google/protobuf/text_format.h>
#include <google/protobuf/text_format.h>
...
...
modules/dnn/src/tensorflow/tf_io.hpp
View file @
ed103833
...
@@ -11,7 +11,7 @@ Declaration of various functions which are related to Tensorflow models reading.
...
@@ -11,7 +11,7 @@ Declaration of various functions which are related to Tensorflow models reading.
#ifndef __OPENCV_DNN_TF_IO_HPP__
#ifndef __OPENCV_DNN_TF_IO_HPP__
#define __OPENCV_DNN_TF_IO_HPP__
#define __OPENCV_DNN_TF_IO_HPP__
#if HAVE_PROTOBUF
#if
def
HAVE_PROTOBUF
#include "graph.pb.h"
#include "graph.pb.h"
...
...
modules/dnn/src/torch/torch_importer.cpp
View file @
ed103833
...
@@ -115,6 +115,8 @@ struct TorchImporter : public ::cv::dnn::Importer
...
@@ -115,6 +115,8 @@ struct TorchImporter : public ::cv::dnn::Importer
TorchImporter
(
String
filename
,
bool
isBinary
)
TorchImporter
(
String
filename
,
bool
isBinary
)
{
{
CV_TRACE_FUNCTION
();
rootModule
=
curModule
=
NULL
;
rootModule
=
curModule
=
NULL
;
moduleCounter
=
0
;
moduleCounter
=
0
;
...
@@ -966,6 +968,8 @@ struct TorchImporter : public ::cv::dnn::Importer
...
@@ -966,6 +968,8 @@ struct TorchImporter : public ::cv::dnn::Importer
void
populateNet
(
Net
net_
)
void
populateNet
(
Net
net_
)
{
{
CV_TRACE_FUNCTION
();
if
(
rootModule
==
NULL
)
if
(
rootModule
==
NULL
)
{
{
rootModule
=
new
Module
(
"Sequential"
);
rootModule
=
new
Module
(
"Sequential"
);
...
@@ -1014,6 +1018,8 @@ Mat readTorchBlob(const String&, bool)
...
@@ -1014,6 +1018,8 @@ Mat readTorchBlob(const String&, bool)
Net
readNetFromTorch
(
const
String
&
model
,
bool
isBinary
)
Net
readNetFromTorch
(
const
String
&
model
,
bool
isBinary
)
{
{
CV_TRACE_FUNCTION
();
Ptr
<
Importer
>
importer
=
createTorchImporter
(
model
,
isBinary
);
Ptr
<
Importer
>
importer
=
createTorchImporter
(
model
,
isBinary
);
Net
net
;
Net
net
;
if
(
importer
)
if
(
importer
)
...
...
samples/dnn/caffe_googlenet.cpp
View file @
ed103833
...
@@ -41,6 +41,7 @@
...
@@ -41,6 +41,7 @@
#include <opencv2/dnn.hpp>
#include <opencv2/dnn.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/core/utils/trace.hpp>
using
namespace
cv
;
using
namespace
cv
;
using
namespace
cv
::
dnn
;
using
namespace
cv
::
dnn
;
...
@@ -84,6 +85,8 @@ static std::vector<String> readClassNames(const char *filename = "synset_words.t
...
@@ -84,6 +85,8 @@ static std::vector<String> readClassNames(const char *filename = "synset_words.t
int
main
(
int
argc
,
char
**
argv
)
int
main
(
int
argc
,
char
**
argv
)
{
{
CV_TRACE_FUNCTION
();
String
modelTxt
=
"bvlc_googlenet.prototxt"
;
String
modelTxt
=
"bvlc_googlenet.prototxt"
;
String
modelBin
=
"bvlc_googlenet.caffemodel"
;
String
modelBin
=
"bvlc_googlenet.caffemodel"
;
String
imageFile
=
(
argc
>
1
)
?
argv
[
1
]
:
"space_shuttle.jpg"
;
String
imageFile
=
(
argc
>
1
)
?
argv
[
1
]
:
"space_shuttle.jpg"
;
...
@@ -117,13 +120,20 @@ int main(int argc, char **argv)
...
@@ -117,13 +120,20 @@ int main(int argc, char **argv)
Scalar
(
104
,
117
,
123
));
//Convert Mat to batch of images
Scalar
(
104
,
117
,
123
));
//Convert Mat to batch of images
//! [Prepare blob]
//! [Prepare blob]
Mat
prob
;
cv
::
TickMeter
t
;
for
(
int
i
=
0
;
i
<
10
;
i
++
)
{
CV_TRACE_REGION
(
"forward"
);
//! [Set input blob]
//! [Set input blob]
net
.
setInput
(
inputBlob
,
"data"
);
//set the network input
net
.
setInput
(
inputBlob
,
"data"
);
//set the network input
//! [Set input blob]
//! [Set input blob]
t
.
start
();
//! [Make forward pass]
//! [Make forward pass]
Mat
prob
=
net
.
forward
(
"prob"
);
//compute output
prob
=
net
.
forward
(
"prob"
);
//compute output
//! [Make forward pass]
//! [Make forward pass]
t
.
stop
();
}
//! [Gather output]
//! [Gather output]
int
classId
;
int
classId
;
...
@@ -136,6 +146,7 @@ int main(int argc, char **argv)
...
@@ -136,6 +146,7 @@ int main(int argc, char **argv)
std
::
cout
<<
"Best class: #"
<<
classId
<<
" '"
<<
classNames
.
at
(
classId
)
<<
"'"
<<
std
::
endl
;
std
::
cout
<<
"Best class: #"
<<
classId
<<
" '"
<<
classNames
.
at
(
classId
)
<<
"'"
<<
std
::
endl
;
std
::
cout
<<
"Probability: "
<<
classProb
*
100
<<
"%"
<<
std
::
endl
;
std
::
cout
<<
"Probability: "
<<
classProb
*
100
<<
"%"
<<
std
::
endl
;
//! [Print results]
//! [Print results]
std
::
cout
<<
"Time: "
<<
(
double
)
t
.
getTimeMilli
()
/
t
.
getCounter
()
<<
" ms (average from "
<<
t
.
getCounter
()
<<
" iterations)"
<<
std
::
endl
;
return
0
;
return
0
;
}
//main
}
//main
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment