Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
O
opencv
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
opencv
Commits
8d8f3bca
Commit
8d8f3bca
authored
Mar 04, 2018
by
Dmitry Kurtaev
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Update links to OpenCV's face detection network
parent
d5afa894
Show whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
2343 additions
and
16 deletions
+2343
-16
face_detector_accuracy.py
modules/dnn/misc/face_detector_accuracy.py
+1
-5
CMakeLists.txt
samples/dnn/CMakeLists.txt
+19
-10
README.md
samples/dnn/README.md
+28
-0
opencv_face_detector.pbtxt
samples/dnn/face_detector/opencv_face_detector.pbtxt
+2294
-0
js_face_recognition.html
samples/dnn/js_face_recognition.html
+1
-1
No files found.
modules/dnn/misc/face_detector_accuracy.py
View file @
8d8f3bca
...
...
@@ -17,7 +17,6 @@ parser = argparse.ArgumentParser(
'using COCO evaluation tool, http://cocodataset.org/#detections-eval'
)
parser
.
add_argument
(
'--proto'
,
help
=
'Path to .prototxt of Caffe model or .pbtxt of TensorFlow graph'
)
parser
.
add_argument
(
'--model'
,
help
=
'Path to .caffemodel trained in Caffe or .pb from TensorFlow'
)
parser
.
add_argument
(
'--caffe'
,
help
=
'Indicate that tested model is from Caffe. Otherwise model from TensorFlow is expected.'
,
action
=
'store_true'
)
parser
.
add_argument
(
'--cascade'
,
help
=
'Optional path to trained Haar cascade as '
'an additional model for evaluation'
)
parser
.
add_argument
(
'--ann'
,
help
=
'Path to text file with ground truth annotations'
)
...
...
@@ -141,10 +140,7 @@ with open('annotations.json', 'wt') as f:
### Obtain detections ##########################################################
detections
=
[]
if
args
.
proto
and
args
.
model
:
if
args
.
caffe
:
net
=
cv
.
dnn
.
readNetFromCaffe
(
args
.
proto
,
args
.
model
)
else
:
net
=
cv
.
dnn
.
readNetFromTensorflow
(
args
.
model
,
args
.
proto
)
net
=
cv
.
dnn
.
readNet
(
args
.
proto
,
args
.
model
)
def
detect
(
img
,
imageId
):
imgWidth
=
img
.
shape
[
1
]
...
...
samples/dnn/CMakeLists.txt
View file @
8d8f3bca
...
...
@@ -13,23 +13,32 @@ if(NOT BUILD_EXAMPLES OR NOT OCV_DEPENDENCIES_FOUND)
return
()
endif
()
# Model branch name: dnn_samples_face_detector_20170830
set
(
DNN_FACE_DETECTOR_MODEL_COMMIT
"b2bfc75f6aea5b1f834ff0f0b865a7c18ff1459f"
)
set
(
DNN_FACE_DETECTOR_MODEL_HASH
"afbb6037fd180e8d2acb3b58ca737b9e"
)
set
(
DNN_FACE_DETECTOR_MODEL_NAME
"res10_300x300_ssd_iter_140000.caffemodel"
)
set
(
DNN_FACE_DETECTOR_MODEL_DOWNLOAD_DIR
"
${
CMAKE_CURRENT_LIST_DIR
}
/face_detector"
)
if
(
COMMAND ocv_download
)
ocv_download
(
FILENAME
${
DNN_FACE_DETECTOR_MODEL_NAME
}
HASH
${
DNN_FACE_DETECTOR_MODEL_HASH
}
function
(
download_net name commit hash
)
set
(
DNN_FACE_DETECTOR_MODEL_DOWNLOAD_DIR
"
${
CMAKE_CURRENT_LIST_DIR
}
/face_detector"
)
if
(
COMMAND ocv_download
)
ocv_download
(
FILENAME
${
name
}
HASH
${
hash
}
URL
"$ENV{OPENCV_DNN_MODELS_URL}"
"
${
OPENCV_DNN_MODELS_URL
}
"
"https://raw.githubusercontent.com/opencv/opencv_3rdparty/
${
DNN_FACE_DETECTOR_MODEL_COMMIT
}
/"
"https://raw.githubusercontent.com/opencv/opencv_3rdparty/
${
commit
}
/"
DESTINATION_DIR
${
DNN_FACE_DETECTOR_MODEL_DOWNLOAD_DIR
}
ID DNN_FACE_DETECTOR
RELATIVE_URL
STATUS res
)
endif
()
endif
()
endfunction
()
# Model branch name: dnn_samples_face_detector_20180205_fp16
download_net
(
"res10_300x300_ssd_iter_140000_fp16.caffemodel"
"19512576c112aa2c7b6328cb0e8d589a4a90a26d"
"f737f886e33835410c69e3ccfe0720a1"
)
# Model branch name: dnn_samples_face_detector_20180220_uint8
download_net
(
"opencv_face_detector_uint8.pb"
"7b425df276ba2161b8edaab0f0756f4a735d61b9"
"56acf81f55d9b9e96c3347bc65409b9e"
)
project
(
dnn_samples
)
ocv_include_modules_recurse
(
${
OPENCV_DNN_SAMPLES_REQUIRED_DEPS
}
)
file
(
GLOB_RECURSE dnn_samples RELATIVE
${
CMAKE_CURRENT_SOURCE_DIR
}
*.cpp
)
...
...
samples/dnn/README.md
View file @
8d8f3bca
...
...
@@ -14,6 +14,34 @@
|
[
Faster-RCNN
](
https://github.com/rbgirshick/py-faster-rcnn
)
|
`1.0`
|
`800x600`
|
`102.9801, 115.9465, 122.7717`
| BGR |
|
[
R-FCN
](
https://github.com/YuwenXiong/py-R-FCN
)
|
`1.0`
|
`800x600`
|
`102.9801 115.9465 122.7717`
| BGR |
#### Face detection
[
An origin model
](
https://github.com/opencv/opencv/tree/master/samples/dnn/face_detector
)
with single precision floating point weights has been quantized using
[
TensorFlow framework
](
https://www.tensorflow.org/
)
.
To achieve the best accuracy run the model on BGR images resized to
`300x300`
applying mean subtraction
of values
`(104, 177, 123)`
for each blue, green and red channels correspondingly.
The following are accuracy metrics obtained using
[
COCO object detection evaluation
tool](http://cocodataset.org/#detections-eval) on
[
FDDB dataset
](
http://vis-www.cs.umass.edu/fddb/
)
(see
[
script
](
https://github.com/opencv/opencv/blob/master/modules/dnn/misc/face_detector_accuracy.py
)
)
applying resize to
`300x300`
and keeping an origin images' sizes.
```
AP - Average Precision | FP32/FP16 | UINT8 | FP32/FP16 | UINT8 |
AR - Average Recall | 300x300 | 300x300 | any size | any size |
--------------------------------------------------|-----------|----------------|-----------|----------------|
AP @[ IoU=0.50:0.95 | area= all | maxDets=100 ] | 0.408 | 0.408 | 0.378 | 0.328 (-0.050) |
AP @[ IoU=0.50 | area= all | maxDets=100 ] | 0.849 | 0.849 | 0.797 | 0.790 (-0.007) |
AP @[ IoU=0.75 | area= all | maxDets=100 ] | 0.251 | 0.251 | 0.208 | 0.140 (-0.068) |
AP @[ IoU=0.50:0.95 | area= small | maxDets=100 ] | 0.050 | 0.051 (+0.001) | 0.107 | 0.070 (-0.037) |
AP @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] | 0.381 | 0.379 (-0.002) | 0.380 | 0.368 (-0.012) |
AP @[ IoU=0.50:0.95 | area= large | maxDets=100 ] | 0.455 | 0.455 | 0.412 | 0.337 (-0.075) |
AR @[ IoU=0.50:0.95 | area= all | maxDets= 1 ] | 0.299 | 0.299 | 0.279 | 0.246 (-0.033) |
AR @[ IoU=0.50:0.95 | area= all | maxDets= 10 ] | 0.482 | 0.482 | 0.476 | 0.436 (-0.040) |
AR @[ IoU=0.50:0.95 | area= all | maxDets=100 ] | 0.496 | 0.496 | 0.491 | 0.451 (-0.040) |
AR @[ IoU=0.50:0.95 | area= small | maxDets=100 ] | 0.189 | 0.193 (+0.004) | 0.284 | 0.232 (-0.052) |
AR @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] | 0.481 | 0.480 (-0.001) | 0.470 | 0.458 (-0.012) |
AR @[ IoU=0.50:0.95 | area= large | maxDets=100 ] | 0.528 | 0.528 | 0.520 | 0.462 (-0.058) |
```
### Classification
| Model | Scale | Size WxH| Mean subtraction | Channels order |
|---------------|-------|-----------|--------------------|-------|
...
...
samples/dnn/face_detector/opencv_face_detector.pbtxt
0 → 100644
View file @
8d8f3bca
node {
name: "data"
op: "Placeholder"
attr {
key: "dtype"
value {
type: DT_FLOAT
}
}
}
node {
name: "data_bn/FusedBatchNorm"
op: "FusedBatchNorm"
input: "data:0"
input: "data_bn/gamma"
input: "data_bn/beta"
input: "data_bn/mean"
input: "data_bn/std"
attr {
key: "epsilon"
value {
f: 1.00099996416e-05
}
}
}
node {
name: "data_scale/Mul"
op: "Mul"
input: "data_bn/FusedBatchNorm"
input: "data_scale/mul"
}
node {
name: "data_scale/BiasAdd"
op: "BiasAdd"
input: "data_scale/Mul"
input: "data_scale/add"
}
node {
name: "Pad"
op: "Pad"
input: "data_scale/BiasAdd"
input: "Pad/paddings"
}
node {
name: "conv1_h/Conv2D"
op: "Conv2D"
input: "Pad"
input: "conv1_h/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "VALID"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 2
i: 2
i: 1
}
}
}
}
node {
name: "conv1_h/BiasAdd"
op: "BiasAdd"
input: "conv1_h/Conv2D"
input: "conv1_h/bias"
}
node {
name: "conv1_bn_h/FusedBatchNorm"
op: "FusedBatchNorm"
input: "conv1_h/BiasAdd"
input: "conv1_bn_h/gamma"
input: "conv1_bn_h/beta"
input: "conv1_bn_h/mean"
input: "conv1_bn_h/std"
attr {
key: "epsilon"
value {
f: 1.00099996416e-05
}
}
}
node {
name: "conv1_scale_h/Mul"
op: "Mul"
input: "conv1_bn_h/FusedBatchNorm"
input: "conv1_scale_h/mul"
}
node {
name: "conv1_scale_h/BiasAdd"
op: "BiasAdd"
input: "conv1_scale_h/Mul"
input: "conv1_scale_h/add"
}
node {
name: "Relu"
op: "Relu"
input: "conv1_scale_h/BiasAdd"
}
node {
name: "conv1_pool/MaxPool"
op: "MaxPool"
input: "Relu"
attr {
key: "ksize"
value {
list {
i: 1
i: 3
i: 3
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 2
i: 2
i: 1
}
}
}
}
node {
name: "layer_64_1_conv1_h/Conv2D"
op: "Conv2D"
input: "conv1_pool/MaxPool"
input: "layer_64_1_conv1_h/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "layer_64_1_bn2_h/FusedBatchNorm"
op: "BiasAdd"
input: "layer_64_1_conv1_h/Conv2D"
input: "layer_64_1_conv1_h/Conv2D_bn_offset"
}
node {
name: "layer_64_1_scale2_h/Mul"
op: "Mul"
input: "layer_64_1_bn2_h/FusedBatchNorm"
input: "layer_64_1_scale2_h/mul"
}
node {
name: "layer_64_1_scale2_h/BiasAdd"
op: "BiasAdd"
input: "layer_64_1_scale2_h/Mul"
input: "layer_64_1_scale2_h/add"
}
node {
name: "Relu_1"
op: "Relu"
input: "layer_64_1_scale2_h/BiasAdd"
}
node {
name: "layer_64_1_conv2_h/Conv2D"
op: "Conv2D"
input: "Relu_1"
input: "layer_64_1_conv2_h/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "add"
op: "Add"
input: "layer_64_1_conv2_h/Conv2D"
input: "conv1_pool/MaxPool"
}
node {
name: "layer_128_1_bn1_h/FusedBatchNorm"
op: "FusedBatchNorm"
input: "add"
input: "layer_128_1_bn1_h/gamma"
input: "layer_128_1_bn1_h/beta"
input: "layer_128_1_bn1_h/mean"
input: "layer_128_1_bn1_h/std"
attr {
key: "epsilon"
value {
f: 1.00099996416e-05
}
}
}
node {
name: "layer_128_1_scale1_h/Mul"
op: "Mul"
input: "layer_128_1_bn1_h/FusedBatchNorm"
input: "layer_128_1_scale1_h/mul"
}
node {
name: "layer_128_1_scale1_h/BiasAdd"
op: "BiasAdd"
input: "layer_128_1_scale1_h/Mul"
input: "layer_128_1_scale1_h/add"
}
node {
name: "Relu_2"
op: "Relu"
input: "layer_128_1_scale1_h/BiasAdd"
}
node {
name: "layer_128_1_conv_expand_h/Conv2D"
op: "Conv2D"
input: "Relu_2"
input: "layer_128_1_conv_expand_h/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 2
i: 2
i: 1
}
}
}
}
node {
name: "layer_128_1_conv1_h/Conv2D"
op: "Conv2D"
input: "Relu_2"
input: "layer_128_1_conv1_h/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 2
i: 2
i: 1
}
}
}
}
node {
name: "layer_128_1_bn2/FusedBatchNorm"
op: "BiasAdd"
input: "layer_128_1_conv1_h/Conv2D"
input: "layer_128_1_conv1_h/Conv2D_bn_offset"
}
node {
name: "layer_128_1_scale2/Mul"
op: "Mul"
input: "layer_128_1_bn2/FusedBatchNorm"
input: "layer_128_1_scale2/mul"
}
node {
name: "layer_128_1_scale2/BiasAdd"
op: "BiasAdd"
input: "layer_128_1_scale2/Mul"
input: "layer_128_1_scale2/add"
}
node {
name: "Relu_3"
op: "Relu"
input: "layer_128_1_scale2/BiasAdd"
}
node {
name: "layer_128_1_conv2/Conv2D"
op: "Conv2D"
input: "Relu_3"
input: "layer_128_1_conv2/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "add_1"
op: "Add"
input: "layer_128_1_conv2/Conv2D"
input: "layer_128_1_conv_expand_h/Conv2D"
}
node {
name: "layer_256_1_bn1/FusedBatchNorm"
op: "FusedBatchNorm"
input: "add_1"
input: "layer_256_1_bn1/gamma"
input: "layer_256_1_bn1/beta"
input: "layer_256_1_bn1/mean"
input: "layer_256_1_bn1/std"
attr {
key: "epsilon"
value {
f: 1.00099996416e-05
}
}
}
node {
name: "layer_256_1_scale1/Mul"
op: "Mul"
input: "layer_256_1_bn1/FusedBatchNorm"
input: "layer_256_1_scale1/mul"
}
node {
name: "layer_256_1_scale1/BiasAdd"
op: "BiasAdd"
input: "layer_256_1_scale1/Mul"
input: "layer_256_1_scale1/add"
}
node {
name: "Relu_4"
op: "Relu"
input: "layer_256_1_scale1/BiasAdd"
}
node {
name: "Pad_1"
op: "Pad"
input: "Relu_4"
input: "Pad_1/paddings"
}
node {
name: "layer_256_1_conv_expand/Conv2D"
op: "Conv2D"
input: "Relu_4"
input: "layer_256_1_conv_expand/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 2
i: 2
i: 1
}
}
}
}
node {
name: "conv4_3_norm/l2_normalize"
op: "L2Normalize"
input: "Relu_4:0"
}
node {
name: "conv4_3_norm/mul_1"
op: "Mul"
input: "conv4_3_norm/l2_normalize"
input: "conv4_3_norm/mul"
}
node {
name: "conv4_3_norm_mbox_loc/Conv2D"
op: "Conv2D"
input: "conv4_3_norm/mul_1"
input: "conv4_3_norm_mbox_loc/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "conv4_3_norm_mbox_loc/BiasAdd"
op: "BiasAdd"
input: "conv4_3_norm_mbox_loc/Conv2D"
input: "conv4_3_norm_mbox_loc/bias"
}
node {
name: "flatten/Reshape"
op: "Flatten"
input: "conv4_3_norm_mbox_loc/BiasAdd"
}
node {
name: "conv4_3_norm_mbox_conf/Conv2D"
op: "Conv2D"
input: "conv4_3_norm/mul_1"
input: "conv4_3_norm_mbox_conf/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "conv4_3_norm_mbox_conf/BiasAdd"
op: "BiasAdd"
input: "conv4_3_norm_mbox_conf/Conv2D"
input: "conv4_3_norm_mbox_conf/bias"
}
node {
name: "flatten_6/Reshape"
op: "Flatten"
input: "conv4_3_norm_mbox_conf/BiasAdd"
}
node {
name: "layer_256_1_conv1/Conv2D"
op: "Conv2D"
input: "Pad_1"
input: "layer_256_1_conv1/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "VALID"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 2
i: 2
i: 1
}
}
}
}
node {
name: "layer_256_1_bn2/FusedBatchNorm"
op: "BiasAdd"
input: "layer_256_1_conv1/Conv2D"
input: "layer_256_1_conv1/Conv2D_bn_offset"
}
node {
name: "layer_256_1_scale2/Mul"
op: "Mul"
input: "layer_256_1_bn2/FusedBatchNorm"
input: "layer_256_1_scale2/mul"
}
node {
name: "layer_256_1_scale2/BiasAdd"
op: "BiasAdd"
input: "layer_256_1_scale2/Mul"
input: "layer_256_1_scale2/add"
}
node {
name: "Relu_5"
op: "Relu"
input: "layer_256_1_scale2/BiasAdd"
}
node {
name: "layer_256_1_conv2/Conv2D"
op: "Conv2D"
input: "Relu_5"
input: "layer_256_1_conv2/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "add_2"
op: "Add"
input: "layer_256_1_conv2/Conv2D"
input: "layer_256_1_conv_expand/Conv2D"
}
node {
name: "layer_512_1_bn1/FusedBatchNorm"
op: "FusedBatchNorm"
input: "add_2"
input: "layer_512_1_bn1/gamma"
input: "layer_512_1_bn1/beta"
input: "layer_512_1_bn1/mean"
input: "layer_512_1_bn1/std"
attr {
key: "epsilon"
value {
f: 1.00099996416e-05
}
}
}
node {
name: "layer_512_1_scale1/Mul"
op: "Mul"
input: "layer_512_1_bn1/FusedBatchNorm"
input: "layer_512_1_scale1/mul"
}
node {
name: "layer_512_1_scale1/BiasAdd"
op: "BiasAdd"
input: "layer_512_1_scale1/Mul"
input: "layer_512_1_scale1/add"
}
node {
name: "Relu_6"
op: "Relu"
input: "layer_512_1_scale1/BiasAdd"
}
node {
name: "layer_512_1_conv_expand_h/Conv2D"
op: "Conv2D"
input: "Relu_6"
input: "layer_512_1_conv_expand_h/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "layer_512_1_conv1_h/Conv2D"
op: "Conv2D"
input: "Relu_6"
input: "layer_512_1_conv1_h/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "layer_512_1_bn2_h/FusedBatchNorm"
op: "BiasAdd"
input: "layer_512_1_conv1_h/Conv2D"
input: "layer_512_1_conv1_h/Conv2D_bn_offset"
}
node {
name: "layer_512_1_scale2_h/Mul"
op: "Mul"
input: "layer_512_1_bn2_h/FusedBatchNorm"
input: "layer_512_1_scale2_h/mul"
}
node {
name: "layer_512_1_scale2_h/BiasAdd"
op: "BiasAdd"
input: "layer_512_1_scale2_h/Mul"
input: "layer_512_1_scale2_h/add"
}
node {
name: "Relu_7"
op: "Relu"
input: "layer_512_1_scale2_h/BiasAdd"
}
node {
name: "layer_512_1_conv2_h/convolution/SpaceToBatchND"
op: "SpaceToBatchND"
input: "Relu_7"
input: "layer_512_1_conv2_h/convolution/SpaceToBatchND/block_shape"
input: "layer_512_1_conv2_h/convolution/SpaceToBatchND/paddings"
attr {
key: "Tblock_shape"
value {
type: DT_INT32
}
}
}
node {
name: "layer_512_1_conv2_h/convolution"
op: "Conv2D"
input: "layer_512_1_conv2_h/convolution/SpaceToBatchND"
input: "layer_512_1_conv2_h/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "VALID"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "layer_512_1_conv2_h/convolution/BatchToSpaceND"
op: "BatchToSpaceND"
input: "layer_512_1_conv2_h/convolution"
input: "layer_512_1_conv2_h/convolution/BatchToSpaceND/block_shape"
input: "layer_512_1_conv2_h/convolution/BatchToSpaceND/crops"
attr {
key: "Tblock_shape"
value {
type: DT_INT32
}
}
attr {
key: "Tcrops"
value {
type: DT_INT32
}
}
}
node {
name: "add_3"
op: "Add"
input: "layer_512_1_conv2_h/convolution/BatchToSpaceND"
input: "layer_512_1_conv_expand_h/Conv2D"
}
node {
name: "last_bn_h/FusedBatchNorm"
op: "FusedBatchNorm"
input: "add_3"
input: "last_bn_h/gamma"
input: "last_bn_h/beta"
input: "last_bn_h/mean"
input: "last_bn_h/std"
attr {
key: "epsilon"
value {
f: 1.00099996416e-05
}
}
}
node {
name: "last_scale_h/Mul"
op: "Mul"
input: "last_bn_h/FusedBatchNorm"
input: "last_scale_h/mul"
}
node {
name: "last_scale_h/BiasAdd"
op: "BiasAdd"
input: "last_scale_h/Mul"
input: "last_scale_h/add"
}
node {
name: "last_relu"
op: "Relu"
input: "last_scale_h/BiasAdd"
}
node {
name: "conv6_1_h/Conv2D"
op: "Conv2D"
input: "last_relu"
input: "conv6_1_h/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "conv6_1_h/BiasAdd"
op: "BiasAdd"
input: "conv6_1_h/Conv2D"
input: "conv6_1_h/bias"
}
node {
name: "conv6_1_h/Relu"
op: "Relu"
input: "conv6_1_h/BiasAdd"
}
node {
name: "conv6_2_h/Conv2D"
op: "Conv2D"
input: "conv6_1_h/Relu"
input: "conv6_2_h/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 2
i: 2
i: 1
}
}
}
}
node {
name: "conv6_2_h/BiasAdd"
op: "BiasAdd"
input: "conv6_2_h/Conv2D"
input: "conv6_2_h/bias"
}
node {
name: "conv6_2_h/Relu"
op: "Relu"
input: "conv6_2_h/BiasAdd"
}
node {
name: "conv7_1_h/Conv2D"
op: "Conv2D"
input: "conv6_2_h/Relu"
input: "conv7_1_h/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "conv7_1_h/BiasAdd"
op: "BiasAdd"
input: "conv7_1_h/Conv2D"
input: "conv7_1_h/bias"
}
node {
name: "conv7_1_h/Relu"
op: "Relu"
input: "conv7_1_h/BiasAdd"
}
node {
name: "Pad_2"
op: "Pad"
input: "conv7_1_h/Relu"
input: "Pad_2/paddings"
}
node {
name: "conv7_2_h/Conv2D"
op: "Conv2D"
input: "Pad_2"
input: "conv7_2_h/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "VALID"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 2
i: 2
i: 1
}
}
}
}
node {
name: "conv7_2_h/BiasAdd"
op: "BiasAdd"
input: "conv7_2_h/Conv2D"
input: "conv7_2_h/bias"
}
node {
name: "conv7_2_h/Relu"
op: "Relu"
input: "conv7_2_h/BiasAdd"
}
node {
name: "conv8_1_h/Conv2D"
op: "Conv2D"
input: "conv7_2_h/Relu"
input: "conv8_1_h/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "conv8_1_h/BiasAdd"
op: "BiasAdd"
input: "conv8_1_h/Conv2D"
input: "conv8_1_h/bias"
}
node {
name: "conv8_1_h/Relu"
op: "Relu"
input: "conv8_1_h/BiasAdd"
}
node {
name: "conv8_2_h/Conv2D"
op: "Conv2D"
input: "conv8_1_h/Relu"
input: "conv8_2_h/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "conv8_2_h/BiasAdd"
op: "BiasAdd"
input: "conv8_2_h/Conv2D"
input: "conv8_2_h/bias"
}
node {
name: "conv8_2_h/Relu"
op: "Relu"
input: "conv8_2_h/BiasAdd"
}
node {
name: "conv9_1_h/Conv2D"
op: "Conv2D"
input: "conv8_2_h/Relu"
input: "conv9_1_h/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "conv9_1_h/BiasAdd"
op: "BiasAdd"
input: "conv9_1_h/Conv2D"
input: "conv9_1_h/bias"
}
node {
name: "conv9_1_h/Relu"
op: "Relu"
input: "conv9_1_h/BiasAdd"
}
node {
name: "conv9_2_h/Conv2D"
op: "Conv2D"
input: "conv9_1_h/Relu"
input: "conv9_2_h/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "conv9_2_h/BiasAdd"
op: "BiasAdd"
input: "conv9_2_h/Conv2D"
input: "conv9_2_h/bias"
}
node {
name: "conv9_2_h/Relu"
op: "Relu"
input: "conv9_2_h/BiasAdd"
}
node {
name: "conv9_2_mbox_loc/Conv2D"
op: "Conv2D"
input: "conv9_2_h/Relu"
input: "conv9_2_mbox_loc/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "conv9_2_mbox_loc/BiasAdd"
op: "BiasAdd"
input: "conv9_2_mbox_loc/Conv2D"
input: "conv9_2_mbox_loc/bias"
}
node {
name: "flatten_5/Reshape"
op: "Flatten"
input: "conv9_2_mbox_loc/BiasAdd"
}
node {
name: "conv9_2_mbox_conf/Conv2D"
op: "Conv2D"
input: "conv9_2_h/Relu"
input: "conv9_2_mbox_conf/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "conv9_2_mbox_conf/BiasAdd"
op: "BiasAdd"
input: "conv9_2_mbox_conf/Conv2D"
input: "conv9_2_mbox_conf/bias"
}
node {
name: "flatten_11/Reshape"
op: "Flatten"
input: "conv9_2_mbox_conf/BiasAdd"
}
node {
name: "conv8_2_mbox_loc/Conv2D"
op: "Conv2D"
input: "conv8_2_h/Relu"
input: "conv8_2_mbox_loc/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "conv8_2_mbox_loc/BiasAdd"
op: "BiasAdd"
input: "conv8_2_mbox_loc/Conv2D"
input: "conv8_2_mbox_loc/bias"
}
node {
name: "flatten_4/Reshape"
op: "Flatten"
input: "conv8_2_mbox_loc/BiasAdd"
}
node {
name: "conv8_2_mbox_conf/Conv2D"
op: "Conv2D"
input: "conv8_2_h/Relu"
input: "conv8_2_mbox_conf/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "conv8_2_mbox_conf/BiasAdd"
op: "BiasAdd"
input: "conv8_2_mbox_conf/Conv2D"
input: "conv8_2_mbox_conf/bias"
}
node {
name: "flatten_10/Reshape"
op: "Flatten"
input: "conv8_2_mbox_conf/BiasAdd"
}
node {
name: "conv7_2_mbox_loc/Conv2D"
op: "Conv2D"
input: "conv7_2_h/Relu"
input: "conv7_2_mbox_loc/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "conv7_2_mbox_loc/BiasAdd"
op: "BiasAdd"
input: "conv7_2_mbox_loc/Conv2D"
input: "conv7_2_mbox_loc/bias"
}
node {
name: "flatten_3/Reshape"
op: "Flatten"
input: "conv7_2_mbox_loc/BiasAdd"
}
node {
name: "conv7_2_mbox_conf/Conv2D"
op: "Conv2D"
input: "conv7_2_h/Relu"
input: "conv7_2_mbox_conf/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "conv7_2_mbox_conf/BiasAdd"
op: "BiasAdd"
input: "conv7_2_mbox_conf/Conv2D"
input: "conv7_2_mbox_conf/bias"
}
node {
name: "flatten_9/Reshape"
op: "Flatten"
input: "conv7_2_mbox_conf/BiasAdd"
}
node {
name: "conv6_2_mbox_loc/Conv2D"
op: "Conv2D"
input: "conv6_2_h/Relu"
input: "conv6_2_mbox_loc/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "conv6_2_mbox_loc/BiasAdd"
op: "BiasAdd"
input: "conv6_2_mbox_loc/Conv2D"
input: "conv6_2_mbox_loc/bias"
}
node {
name: "flatten_2/Reshape"
op: "Flatten"
input: "conv6_2_mbox_loc/BiasAdd"
}
node {
name: "conv6_2_mbox_conf/Conv2D"
op: "Conv2D"
input: "conv6_2_h/Relu"
input: "conv6_2_mbox_conf/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "conv6_2_mbox_conf/BiasAdd"
op: "BiasAdd"
input: "conv6_2_mbox_conf/Conv2D"
input: "conv6_2_mbox_conf/bias"
}
node {
name: "flatten_8/Reshape"
op: "Flatten"
input: "conv6_2_mbox_conf/BiasAdd"
}
node {
name: "fc7_mbox_loc/Conv2D"
op: "Conv2D"
input: "last_relu"
input: "fc7_mbox_loc/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "fc7_mbox_loc/BiasAdd"
op: "BiasAdd"
input: "fc7_mbox_loc/Conv2D"
input: "fc7_mbox_loc/bias"
}
node {
name: "flatten_1/Reshape"
op: "Flatten"
input: "fc7_mbox_loc/BiasAdd"
}
node {
name: "mbox_loc"
op: "ConcatV2"
input: "flatten/Reshape"
input: "flatten_1/Reshape"
input: "flatten_2/Reshape"
input: "flatten_3/Reshape"
input: "flatten_4/Reshape"
input: "flatten_5/Reshape"
input: "mbox_loc/axis"
}
node {
name: "fc7_mbox_conf/Conv2D"
op: "Conv2D"
input: "last_relu"
input: "fc7_mbox_conf/weights"
attr {
key: "dilations"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
attr {
key: "padding"
value {
s: "SAME"
}
}
attr {
key: "strides"
value {
list {
i: 1
i: 1
i: 1
i: 1
}
}
}
}
node {
name: "fc7_mbox_conf/BiasAdd"
op: "BiasAdd"
input: "fc7_mbox_conf/Conv2D"
input: "fc7_mbox_conf/bias"
}
node {
name: "flatten_7/Reshape"
op: "Flatten"
input: "fc7_mbox_conf/BiasAdd"
}
node {
name: "mbox_conf"
op: "ConcatV2"
input: "flatten_6/Reshape"
input: "flatten_7/Reshape"
input: "flatten_8/Reshape"
input: "flatten_9/Reshape"
input: "flatten_10/Reshape"
input: "flatten_11/Reshape"
input: "mbox_conf/axis"
}
node {
name: "mbox_conf_reshape"
op: "Reshape"
input: "mbox_conf"
input: "reshape_before_softmax"
}
node {
name: "mbox_conf_softmax"
op: "Softmax"
input: "mbox_conf_reshape"
attr {
key: "axis"
value {
i: 2
}
}
}
node {
name: "mbox_conf_flatten"
op: "Flatten"
input: "mbox_conf_softmax"
}
node {
name: "PriorBox_0"
op: "PriorBox"
input: "conv4_3_norm/mul_1"
input: "data"
attr {
key: "aspect_ratio"
value {
tensor {
dtype: DT_FLOAT
tensor_shape {
dim {
size: 1
}
}
float_val: 2.0
}
}
}
attr {
key: "clip"
value {
b: false
}
}
attr {
key: "flip"
value {
b: true
}
}
attr {
key: "max_size"
value {
i: 60
}
}
attr {
key: "min_size"
value {
i: 30
}
}
attr {
key: "offset"
value {
f: 0.5
}
}
attr {
key: "step"
value {
f: 8.0
}
}
attr {
key: "variance"
value {
tensor {
dtype: DT_FLOAT
tensor_shape {
dim {
size: 4
}
}
float_val: 0.10000000149
float_val: 0.10000000149
float_val: 0.20000000298
float_val: 0.20000000298
}
}
}
}
node {
name: "PriorBox_1"
op: "PriorBox"
input: "last_relu"
input: "data"
attr {
key: "aspect_ratio"
value {
tensor {
dtype: DT_FLOAT
tensor_shape {
dim {
size: 2
}
}
float_val: 2.0
float_val: 3.0
}
}
}
attr {
key: "clip"
value {
b: false
}
}
attr {
key: "flip"
value {
b: true
}
}
attr {
key: "max_size"
value {
i: 111
}
}
attr {
key: "min_size"
value {
i: 60
}
}
attr {
key: "offset"
value {
f: 0.5
}
}
attr {
key: "step"
value {
f: 16.0
}
}
attr {
key: "variance"
value {
tensor {
dtype: DT_FLOAT
tensor_shape {
dim {
size: 4
}
}
float_val: 0.10000000149
float_val: 0.10000000149
float_val: 0.20000000298
float_val: 0.20000000298
}
}
}
}
node {
name: "PriorBox_2"
op: "PriorBox"
input: "conv6_2_h/Relu"
input: "data"
attr {
key: "aspect_ratio"
value {
tensor {
dtype: DT_FLOAT
tensor_shape {
dim {
size: 2
}
}
float_val: 2.0
float_val: 3.0
}
}
}
attr {
key: "clip"
value {
b: false
}
}
attr {
key: "flip"
value {
b: true
}
}
attr {
key: "max_size"
value {
i: 162
}
}
attr {
key: "min_size"
value {
i: 111
}
}
attr {
key: "offset"
value {
f: 0.5
}
}
attr {
key: "step"
value {
f: 32.0
}
}
attr {
key: "variance"
value {
tensor {
dtype: DT_FLOAT
tensor_shape {
dim {
size: 4
}
}
float_val: 0.10000000149
float_val: 0.10000000149
float_val: 0.20000000298
float_val: 0.20000000298
}
}
}
}
node {
name: "PriorBox_3"
op: "PriorBox"
input: "conv7_2_h/Relu"
input: "data"
attr {
key: "aspect_ratio"
value {
tensor {
dtype: DT_FLOAT
tensor_shape {
dim {
size: 2
}
}
float_val: 2.0
float_val: 3.0
}
}
}
attr {
key: "clip"
value {
b: false
}
}
attr {
key: "flip"
value {
b: true
}
}
attr {
key: "max_size"
value {
i: 213
}
}
attr {
key: "min_size"
value {
i: 162
}
}
attr {
key: "offset"
value {
f: 0.5
}
}
attr {
key: "step"
value {
f: 64.0
}
}
attr {
key: "variance"
value {
tensor {
dtype: DT_FLOAT
tensor_shape {
dim {
size: 4
}
}
float_val: 0.10000000149
float_val: 0.10000000149
float_val: 0.20000000298
float_val: 0.20000000298
}
}
}
}
node {
name: "PriorBox_4"
op: "PriorBox"
input: "conv8_2_h/Relu"
input: "data"
attr {
key: "aspect_ratio"
value {
tensor {
dtype: DT_FLOAT
tensor_shape {
dim {
size: 1
}
}
float_val: 2.0
}
}
}
attr {
key: "clip"
value {
b: false
}
}
attr {
key: "flip"
value {
b: true
}
}
attr {
key: "max_size"
value {
i: 264
}
}
attr {
key: "min_size"
value {
i: 213
}
}
attr {
key: "offset"
value {
f: 0.5
}
}
attr {
key: "step"
value {
f: 100.0
}
}
attr {
key: "variance"
value {
tensor {
dtype: DT_FLOAT
tensor_shape {
dim {
size: 4
}
}
float_val: 0.10000000149
float_val: 0.10000000149
float_val: 0.20000000298
float_val: 0.20000000298
}
}
}
}
node {
name: "PriorBox_5"
op: "PriorBox"
input: "conv9_2_h/Relu"
input: "data"
attr {
key: "aspect_ratio"
value {
tensor {
dtype: DT_FLOAT
tensor_shape {
dim {
size: 1
}
}
float_val: 2.0
}
}
}
attr {
key: "clip"
value {
b: false
}
}
attr {
key: "flip"
value {
b: true
}
}
attr {
key: "max_size"
value {
i: 315
}
}
attr {
key: "min_size"
value {
i: 264
}
}
attr {
key: "offset"
value {
f: 0.5
}
}
attr {
key: "step"
value {
f: 300.0
}
}
attr {
key: "variance"
value {
tensor {
dtype: DT_FLOAT
tensor_shape {
dim {
size: 4
}
}
float_val: 0.10000000149
float_val: 0.10000000149
float_val: 0.20000000298
float_val: 0.20000000298
}
}
}
}
node {
name: "mbox_priorbox"
op: "ConcatV2"
input: "PriorBox_0"
input: "PriorBox_1"
input: "PriorBox_2"
input: "PriorBox_3"
input: "PriorBox_4"
input: "PriorBox_5"
input: "mbox_loc/axis"
}
node {
name: "detection_out"
op: "DetectionOutput"
input: "mbox_loc"
input: "mbox_conf_flatten"
input: "mbox_priorbox"
attr {
key: "background_label_id"
value {
i: 0
}
}
attr {
key: "code_type"
value {
s: "CENTER_SIZE"
}
}
attr {
key: "confidence_threshold"
value {
f: 0.00999999977648
}
}
attr {
key: "keep_top_k"
value {
i: 200
}
}
attr {
key: "nms_threshold"
value {
f: 0.449999988079
}
}
attr {
key: "num_classes"
value {
i: 2
}
}
attr {
key: "share_location"
value {
b: true
}
}
attr {
key: "top_k"
value {
i: 400
}
}
}
node {
name: "reshape_before_softmax"
op: "Const"
attr {
key: "value"
value {
tensor {
dtype: DT_INT32
tensor_shape {
dim {
size: 3
}
}
int_val: 0
int_val: -1
int_val: 2
}
}
}
}
library {
}
samples/dnn/js_face_recognition.html
View file @
8d8f3bca
...
...
@@ -70,7 +70,7 @@ function recognize(face) {
function
loadModels
(
callback
)
{
var
utils
=
new
Utils
(
''
);
var
proto
=
'https://raw.githubusercontent.com/opencv/opencv/master/samples/dnn/face_detector/deploy.prototxt'
;
var
weights
=
'https://
github.com/opencv/opencv_3rdparty/raw/19512576c112aa2c7b6328cb0e8d589a4a90a26d
/res10_300x300_ssd_iter_140000_fp16.caffemodel'
;
var
weights
=
'https://
raw.githubusercontent.com/opencv/opencv_3rdparty/dnn_samples_face_detector_20180205_fp16
/res10_300x300_ssd_iter_140000_fp16.caffemodel'
;
var
recognModel
=
'https://raw.githubusercontent.com/pyannote/pyannote-data/master/openface.nn4.small2.v1.t7'
;
utils
.
createFileFromUrl
(
'face_detector.prototxt'
,
proto
,
()
=>
{
document
.
getElementById
(
'status'
).
innerHTML
=
'Downloading face_detector.caffemodel'
;
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment