Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
O
opencv
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
opencv
Commits
ace0701a
Commit
ace0701a
authored
Jun 29, 2017
by
Maksim Shabunin
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #9019 from alalek:dnn_trace
parents
ca962214
ed103833
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
36 changed files
with
117 additions
and
13 deletions
+117
-13
caffe_importer.cpp
modules/dnn/src/caffe/caffe_importer.cpp
+5
-1
caffe_io.cpp
modules/dnn/src/caffe/caffe_io.cpp
+1
-1
caffe_io.hpp
modules/dnn/src/caffe/caffe_io.hpp
+1
-1
dnn.cpp
modules/dnn/src/dnn.cpp
+0
-0
init.cpp
modules/dnn/src/init.cpp
+2
-0
batch_norm_layer.cpp
modules/dnn/src/layers/batch_norm_layer.cpp
+3
-0
blank_layer.cpp
modules/dnn/src/layers/blank_layer.cpp
+3
-0
concat_layer.cpp
modules/dnn/src/layers/concat_layer.cpp
+3
-0
convolution_layer.cpp
modules/dnn/src/layers/convolution_layer.cpp
+6
-0
crop_layer.cpp
modules/dnn/src/layers/crop_layer.cpp
+3
-0
detection_output_layer.cpp
modules/dnn/src/layers/detection_output_layer.cpp
+3
-0
elementwise_layers.cpp
modules/dnn/src/layers/elementwise_layers.cpp
+2
-0
eltwise_layer.cpp
modules/dnn/src/layers/eltwise_layer.cpp
+3
-0
flatten_layer.cpp
modules/dnn/src/layers/flatten_layer.cpp
+3
-0
fully_connected_layer.cpp
modules/dnn/src/layers/fully_connected_layer.cpp
+3
-0
lrn_layer.cpp
modules/dnn/src/layers/lrn_layer.cpp
+3
-0
max_unpooling_layer.cpp
modules/dnn/src/layers/max_unpooling_layer.cpp
+3
-0
mvn_layer.cpp
modules/dnn/src/layers/mvn_layer.cpp
+3
-0
normalize_bbox_layer.cpp
modules/dnn/src/layers/normalize_bbox_layer.cpp
+3
-0
padding_layer.cpp
modules/dnn/src/layers/padding_layer.cpp
+3
-0
permute_layer.cpp
modules/dnn/src/layers/permute_layer.cpp
+3
-0
pooling_layer.cpp
modules/dnn/src/layers/pooling_layer.cpp
+3
-0
prior_box_layer.cpp
modules/dnn/src/layers/prior_box_layer.cpp
+3
-0
recurrent_layers.cpp
modules/dnn/src/layers/recurrent_layers.cpp
+6
-0
reshape_layer.cpp
modules/dnn/src/layers/reshape_layer.cpp
+3
-0
scale_layer.cpp
modules/dnn/src/layers/scale_layer.cpp
+3
-0
shift_layer.cpp
modules/dnn/src/layers/shift_layer.cpp
+3
-0
slice_layer.cpp
modules/dnn/src/layers/slice_layer.cpp
+3
-0
softmax_layer.cpp
modules/dnn/src/layers/softmax_layer.cpp
+3
-0
split_layer.cpp
modules/dnn/src/layers/split_layer.cpp
+3
-0
precomp.hpp
modules/dnn/src/precomp.hpp
+1
-0
tf_importer.cpp
modules/dnn/src/tensorflow/tf_importer.cpp
+1
-1
tf_io.cpp
modules/dnn/src/tensorflow/tf_io.cpp
+1
-1
tf_io.hpp
modules/dnn/src/tensorflow/tf_io.hpp
+1
-1
torch_importer.cpp
modules/dnn/src/torch/torch_importer.cpp
+6
-0
caffe_googlenet.cpp
samples/dnn/caffe_googlenet.cpp
+18
-7
No files found.
modules/dnn/src/caffe/caffe_importer.cpp
View file @
ace0701a
...
...
@@ -43,7 +43,7 @@
using
namespace
cv
;
using
namespace
cv
::
dnn
;
#if HAVE_PROTOBUF
#if
def
HAVE_PROTOBUF
#include "caffe.pb.h"
#include <iostream>
...
...
@@ -82,6 +82,8 @@ public:
CaffeImporter
(
const
char
*
pototxt
,
const
char
*
caffeModel
)
{
CV_TRACE_FUNCTION
();
ReadNetParamsFromTextFileOrDie
(
pototxt
,
&
net
);
if
(
caffeModel
&&
caffeModel
[
0
])
...
...
@@ -264,6 +266,8 @@ public:
void
populateNet
(
Net
dstNet
)
{
CV_TRACE_FUNCTION
();
int
layersSize
=
net
.
layer_size
();
layerCounter
.
clear
();
addedBlobs
.
clear
();
...
...
modules/dnn/src/caffe/caffe_io.cpp
View file @
ace0701a
...
...
@@ -87,7 +87,7 @@
//
//M*/
#if HAVE_PROTOBUF
#if
def
HAVE_PROTOBUF
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/io/zero_copy_stream_impl.h>
#include <google/protobuf/text_format.h>
...
...
modules/dnn/src/caffe/caffe_io.hpp
View file @
ace0701a
...
...
@@ -89,7 +89,7 @@
#ifndef __OPENCV_DNN_CAFFE_IO_HPP__
#define __OPENCV_DNN_CAFFE_IO_HPP__
#if HAVE_PROTOBUF
#if
def
HAVE_PROTOBUF
#include "caffe.pb.h"
...
...
modules/dnn/src/dnn.cpp
View file @
ace0701a
This diff is collapsed.
Click to expand it.
modules/dnn/src/init.cpp
View file @
ace0701a
...
...
@@ -60,6 +60,8 @@ Mutex* __initialization_mutex_initializer = &getInitializationMutex();
void
initializeLayerFactory
()
{
CV_TRACE_FUNCTION
();
CV_DNN_REGISTER_LAYER_CLASS
(
Slice
,
SliceLayer
);
CV_DNN_REGISTER_LAYER_CLASS
(
Split
,
SplitLayer
);
CV_DNN_REGISTER_LAYER_CLASS
(
Concat
,
ConcatLayer
);
...
...
modules/dnn/src/layers/batch_norm_layer.cpp
View file @
ace0701a
...
...
@@ -104,6 +104,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
CV_Assert
(
blobs
.
size
()
>=
2
);
CV_Assert
(
inputs
.
size
()
==
1
);
...
...
modules/dnn/src/layers/blank_layer.cpp
View file @
ace0701a
...
...
@@ -64,6 +64,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
for
(
int
i
=
0
,
n
=
outputs
.
size
();
i
<
n
;
++
i
)
if
(
outputs
[
i
].
data
!=
inputs
[
i
]
->
data
)
inputs
[
i
]
->
copyTo
(
outputs
[
i
]);
...
...
modules/dnn/src/layers/concat_layer.cpp
View file @
ace0701a
...
...
@@ -96,6 +96,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
int
cAxis
=
clamp
(
axis
,
inputs
[
0
]
->
dims
);
Mat
&
outMat
=
outputs
[
0
];
std
::
vector
<
Range
>
ranges
(
outputs
[
0
].
dims
,
Range
::
all
());
...
...
modules/dnn/src/layers/convolution_layer.cpp
View file @
ace0701a
...
...
@@ -627,6 +627,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
/*printf("conv %s: input (%d x %d x %d x %d), kernel (%d x %d), pad (%d x %d), stride (%d x %d), dilation (%d x %d)\n",
name.c_str(), inputs[0]->size[0], inputs[0]->size[1], inputs[0]->size[2], inputs[0]->size[3],
kernel.width, kernel.height, pad.width, pad.height,
...
...
@@ -1013,6 +1016,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
int
outCn
=
blobs
[
0
].
size
[
0
];
int
inpCn
=
inputs
[
0
]
->
size
[
1
];
bool
is1x1flag
=
is1x1
();
...
...
modules/dnn/src/layers/crop_layer.cpp
View file @
ace0701a
...
...
@@ -135,6 +135,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
Mat
&
input
=
*
inputs
[
0
];
Mat
&
output
=
outputs
[
0
];
...
...
modules/dnn/src/layers/detection_output_layer.cpp
View file @
ace0701a
...
...
@@ -206,6 +206,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
const
float
*
locationData
=
inputs
[
0
]
->
ptr
<
float
>
();
const
float
*
confidenceData
=
inputs
[
1
]
->
ptr
<
float
>
();
const
float
*
priorData
=
inputs
[
2
]
->
ptr
<
float
>
();
...
...
modules/dnn/src/layers/elementwise_layers.cpp
View file @
ace0701a
...
...
@@ -156,6 +156,8 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
for
(
size_t
i
=
0
;
i
<
inputs
.
size
();
i
++
)
{
const
Mat
&
src
=
*
inputs
[
i
];
...
...
modules/dnn/src/layers/eltwise_layer.cpp
View file @
ace0701a
...
...
@@ -251,6 +251,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
CV_Assert
(
outputs
.
size
()
==
1
);
const
int
nstripes
=
getNumThreads
();
EltwiseInvoker
::
run
((
const
Mat
**
)
&
inputs
[
0
],
(
int
)
inputs
.
size
(),
outputs
[
0
],
...
...
modules/dnn/src/layers/flatten_layer.cpp
View file @
ace0701a
...
...
@@ -106,6 +106,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
for
(
size_t
i
=
0
;
i
<
inputs
.
size
();
i
++
)
{
MatShape
outShape
=
shape
(
outputs
[
i
]);
...
...
modules/dnn/src/layers/fully_connected_layer.cpp
View file @
ace0701a
...
...
@@ -233,6 +233,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
input
,
std
::
vector
<
Mat
>
&
output
,
std
::
vector
<
Mat
>
&
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
int
axisCan
=
clamp
(
axis
,
input
[
0
]
->
dims
);
int
outerSize
=
input
[
0
]
->
total
(
0
,
axisCan
);
...
...
modules/dnn/src/layers/lrn_layer.cpp
View file @
ace0701a
...
...
@@ -86,6 +86,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
CV_Assert
(
inputs
.
size
()
==
outputs
.
size
());
for
(
int
i
=
0
;
i
<
inputs
.
size
();
i
++
)
{
...
...
modules/dnn/src/layers/max_unpooling_layer.cpp
View file @
ace0701a
...
...
@@ -57,6 +57,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
CV_Assert
(
inputs
.
size
()
==
2
);
Mat
&
input
=
*
inputs
[
0
];
Mat
&
indices
=
*
inputs
[
1
];
...
...
modules/dnn/src/layers/mvn_layer.cpp
View file @
ace0701a
...
...
@@ -62,6 +62,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
for
(
size_t
inpIdx
=
0
;
inpIdx
<
inputs
.
size
();
inpIdx
++
)
{
Mat
&
inpBlob
=
*
inputs
[
inpIdx
];
...
...
modules/dnn/src/layers/normalize_bbox_layer.cpp
View file @
ace0701a
...
...
@@ -142,6 +142,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
checkInputs
(
inputs
);
Mat
&
buffer
=
internals
[
0
],
sumChannelMultiplier
=
internals
[
1
],
...
...
modules/dnn/src/layers/padding_layer.cpp
View file @
ace0701a
...
...
@@ -61,6 +61,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
for
(
int
i
=
0
;
i
<
inputs
.
size
();
i
++
)
{
outputs
[
i
]
=
paddingValue
;
...
...
modules/dnn/src/layers/permute_layer.cpp
View file @
ace0701a
...
...
@@ -245,6 +245,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
size_t
k
,
ninputs
=
inputs
.
size
();
if
(
!
_needsPermute
)
{
...
...
modules/dnn/src/layers/pooling_layer.cpp
View file @
ace0701a
...
...
@@ -106,6 +106,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
for
(
size_t
ii
=
0
;
ii
<
inputs
.
size
();
ii
++
)
{
switch
(
type
)
...
...
modules/dnn/src/layers/prior_box_layer.cpp
View file @
ace0701a
...
...
@@ -228,6 +228,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
int
_layerWidth
=
inputs
[
0
]
->
size
[
3
];
int
_layerHeight
=
inputs
[
0
]
->
size
[
2
];
...
...
modules/dnn/src/layers/recurrent_layers.cpp
View file @
ace0701a
...
...
@@ -221,6 +221,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
input
,
std
::
vector
<
Mat
>
&
output
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
const
Mat
&
Wh
=
blobs
[
0
];
const
Mat
&
Wx
=
blobs
[
1
];
const
Mat
&
bias
=
blobs
[
2
];
...
...
@@ -406,6 +409,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
input
,
std
::
vector
<
Mat
>
&
output
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
Mat
xTs
=
input
[
0
]
->
reshape
(
1
,
numSamplesTotal
);
Mat
oTs
=
output
[
0
].
reshape
(
1
,
numSamplesTotal
);
Mat
hTs
=
produceH
?
output
[
1
].
reshape
(
1
,
numSamplesTotal
)
:
Mat
();
...
...
modules/dnn/src/layers/reshape_layer.cpp
View file @
ace0701a
...
...
@@ -196,6 +196,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
for
(
size_t
i
=
0
;
i
<
inputs
.
size
();
i
++
)
{
Mat
srcBlob
=
*
inputs
[
i
];
...
...
modules/dnn/src/layers/scale_layer.cpp
View file @
ace0701a
...
...
@@ -45,6 +45,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
CV_Assert
(
blobs
.
size
()
==
1
+
hasBias
);
for
(
size_t
ii
=
0
;
ii
<
outputs
.
size
();
ii
++
)
...
...
modules/dnn/src/layers/shift_layer.cpp
View file @
ace0701a
...
...
@@ -38,6 +38,9 @@ public:
virtual
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
CV_Assert
(
inputs
.
size
()
>
0
);
CV_Assert
(
blobs
.
size
()
>
0
);
...
...
modules/dnn/src/layers/slice_layer.cpp
View file @
ace0701a
...
...
@@ -118,6 +118,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
const
Mat
&
inpMat
=
*
inputs
[
0
];
std
::
vector
<
Range
>
ranges
(
inpMat
.
dims
,
Range
::
all
());
int
cAxis
=
clamp
(
axis
,
inpMat
.
dims
);
...
...
modules/dnn/src/layers/softmax_layer.cpp
View file @
ace0701a
...
...
@@ -84,6 +84,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
const
Mat
&
src
=
*
inputs
[
0
];
Mat
&
dst
=
outputs
[
0
];
...
...
modules/dnn/src/layers/split_layer.cpp
View file @
ace0701a
...
...
@@ -80,6 +80,9 @@ public:
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
for
(
size_t
i
=
0
;
i
<
outputs
.
size
();
i
++
)
{
CV_Assert
(
inputs
[
0
]
->
total
()
==
outputs
[
i
].
total
());
...
...
modules/dnn/src/precomp.hpp
View file @
ace0701a
...
...
@@ -40,6 +40,7 @@
//M*/
#include <opencv2/core.hpp>
#include <opencv2/core/utils/trace.hpp>
#include "cvconfig.h"
#include <opencv2/dnn.hpp>
#include <opencv2/dnn/all_layers.hpp>
...
...
modules/dnn/src/tensorflow/tf_importer.cpp
View file @
ace0701a
...
...
@@ -13,7 +13,7 @@ Implementation of Tensorflow models parser
using
namespace
cv
;
using
namespace
cv
::
dnn
;
#if HAVE_PROTOBUF
#if
def
HAVE_PROTOBUF
#include "graph.pb.h"
#include <iostream>
...
...
modules/dnn/src/tensorflow/tf_io.cpp
View file @
ace0701a
...
...
@@ -9,7 +9,7 @@
Implementation of various functions which are related to Tensorflow models reading.
*/
#if HAVE_PROTOBUF
#if
def
HAVE_PROTOBUF
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/io/zero_copy_stream_impl.h>
#include <google/protobuf/text_format.h>
...
...
modules/dnn/src/tensorflow/tf_io.hpp
View file @
ace0701a
...
...
@@ -11,7 +11,7 @@ Declaration of various functions which are related to Tensorflow models reading.
#ifndef __OPENCV_DNN_TF_IO_HPP__
#define __OPENCV_DNN_TF_IO_HPP__
#if HAVE_PROTOBUF
#if
def
HAVE_PROTOBUF
#include "graph.pb.h"
...
...
modules/dnn/src/torch/torch_importer.cpp
View file @
ace0701a
...
...
@@ -115,6 +115,8 @@ struct TorchImporter : public ::cv::dnn::Importer
TorchImporter
(
String
filename
,
bool
isBinary
)
{
CV_TRACE_FUNCTION
();
rootModule
=
curModule
=
NULL
;
moduleCounter
=
0
;
...
...
@@ -966,6 +968,8 @@ struct TorchImporter : public ::cv::dnn::Importer
void
populateNet
(
Net
net_
)
{
CV_TRACE_FUNCTION
();
if
(
rootModule
==
NULL
)
{
rootModule
=
new
Module
(
"Sequential"
);
...
...
@@ -1014,6 +1018,8 @@ Mat readTorchBlob(const String&, bool)
Net
readNetFromTorch
(
const
String
&
model
,
bool
isBinary
)
{
CV_TRACE_FUNCTION
();
Ptr
<
Importer
>
importer
=
createTorchImporter
(
model
,
isBinary
);
Net
net
;
if
(
importer
)
...
...
samples/dnn/caffe_googlenet.cpp
View file @
ace0701a
...
...
@@ -41,6 +41,7 @@
#include <opencv2/dnn.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/core/utils/trace.hpp>
using
namespace
cv
;
using
namespace
cv
::
dnn
;
...
...
@@ -84,6 +85,8 @@ static std::vector<String> readClassNames(const char *filename = "synset_words.t
int
main
(
int
argc
,
char
**
argv
)
{
CV_TRACE_FUNCTION
();
String
modelTxt
=
"bvlc_googlenet.prototxt"
;
String
modelBin
=
"bvlc_googlenet.caffemodel"
;
String
imageFile
=
(
argc
>
1
)
?
argv
[
1
]
:
"space_shuttle.jpg"
;
...
...
@@ -117,13 +120,20 @@ int main(int argc, char **argv)
Scalar
(
104
,
117
,
123
));
//Convert Mat to batch of images
//! [Prepare blob]
//! [Set input blob]
net
.
setInput
(
inputBlob
,
"data"
);
//set the network input
//! [Set input blob]
//! [Make forward pass]
Mat
prob
=
net
.
forward
(
"prob"
);
//compute output
//! [Make forward pass]
Mat
prob
;
cv
::
TickMeter
t
;
for
(
int
i
=
0
;
i
<
10
;
i
++
)
{
CV_TRACE_REGION
(
"forward"
);
//! [Set input blob]
net
.
setInput
(
inputBlob
,
"data"
);
//set the network input
//! [Set input blob]
t
.
start
();
//! [Make forward pass]
prob
=
net
.
forward
(
"prob"
);
//compute output
//! [Make forward pass]
t
.
stop
();
}
//! [Gather output]
int
classId
;
...
...
@@ -136,6 +146,7 @@ int main(int argc, char **argv)
std
::
cout
<<
"Best class: #"
<<
classId
<<
" '"
<<
classNames
.
at
(
classId
)
<<
"'"
<<
std
::
endl
;
std
::
cout
<<
"Probability: "
<<
classProb
*
100
<<
"%"
<<
std
::
endl
;
//! [Print results]
std
::
cout
<<
"Time: "
<<
(
double
)
t
.
getTimeMilli
()
/
t
.
getCounter
()
<<
" ms (average from "
<<
t
.
getCounter
()
<<
" iterations)"
<<
std
::
endl
;
return
0
;
}
//main
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment