Commit 3ac6a20f authored by berak's avatar berak Committed by Alexander Alekhin

Merge pull request #1499 from berak:face_mace

* face: add a mace filter for authentication
parent 52619555
......@@ -158,3 +158,13 @@
year={2003},
publisher={ACM}
}
@article{Savvides04,
title={Cancelable Biometric Filters for Face Recognition},
author={Savvides, Marios and Kumar, B. V. K. Vijaya and Khosla, P. K. },
journal={Pattern Recognition, International Conference on, vol. 03, no. , pp. 922-925, 2004, },
volume={03},
pages={922-925},
year={2004},
publisher={IEEE}
}
......@@ -378,5 +378,6 @@ protected:
#include "opencv2/face/facemarkLBF.hpp"
#include "opencv2/face/facemarkAAM.hpp"
#include "opencv2/face/face_alignment.hpp"
#include "opencv2/face/mace.hpp"
#endif // __OPENCV_FACE_HPP__
// This file is part of the OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#ifndef __mace_h_onboard__
#define __mace_h_onboard__
#include "opencv2/core.hpp"
namespace cv {
namespace face {
//! @addtogroup face
//! @{
/**
@brief Minimum Average Correlation Energy Filter
useful for authentication with (cancellable) biometrical features.
(does not need many positives to train (10-50), and no negatives at all, also robust to noise/salting)
see also: @cite Savvides04
this implementation is largely based on: https://code.google.com/archive/p/pam-face-authentication (GSOC 2009)
use it like:
@code
Ptr<face::MACE> mace = face::MACE::create(64);
vector<Mat> pos_images = ...
mace->train(pos_images);
Mat query = ...
bool same = mace->same(query);
@endcode
you can also use two-factor authentication, with an additional passphrase:
@code
String owners_passphrase = "ilikehotdogs";
Ptr<face::MACE> mace = face::MACE::create(64);
mace->salt(owners_passphrase);
vector<Mat> pos_images = ...
mace->train(pos_images);
// now, users have to give a valid passphrase, along with the image:
Mat query = ...
cout << "enter passphrase: ";
string pass;
getline(cin, pass);
mace->salt(pass);
bool same = mace->same(query);
@endcode
save/load your model:
@code
Ptr<face::MACE> mace = face::MACE::create(64);
mace->train(pos_images);
mace->save("my_mace.xml");
// later:
Ptr<MACE> reloaded = MACE::load("my_mace.xml");
reloaded->same(some_image);
@endcode
*/
class CV_EXPORTS_W MACE : public cv::Algorithm
{
public:
/**
@brief optionally encrypt images with random convolution
@param passphrase a crc64 random seed will get generated from this
*/
CV_WRAP virtual void salt(const cv::String &passphrase) = 0;
/**
@brief train it on positive features
compute the mace filter: `h = D(-1) * X * (X(+) * D(-1) * X)(-1) * C`
also calculate a minimal threshold for this class, the smallest self-similarity from the train images
@param images a vector<Mat> with the train images
*/
CV_WRAP virtual void train(cv::InputArrayOfArrays images) = 0;
/**
@brief correlate query img and threshold to min class value
@param query a Mat with query image
*/
CV_WRAP virtual bool same(cv::InputArray query) const = 0;
/**
@brief constructor
@param filename build a new MACE instance from a pre-serialized FileStorage
@param objname (optional) top-level node in the FileStorage
*/
CV_WRAP static cv::Ptr<MACE> load(const String &filename, const String &objname=String());
/**
@brief constructor
@param IMGSIZE images will get resized to this (should be an even number)
*/
CV_WRAP static cv::Ptr<MACE> create(int IMGSIZE=64);
};
//! @}
}/* namespace face */
}/* namespace cv */
#endif // __mace_h_onboard__
......@@ -23,3 +23,6 @@ target_link_libraries(facerec_fisherfaces opencv_face opencv_core opencv_imgproc
add_executable(facerec_lbph facerec_lbph.cpp)
target_link_libraries(facerec_lbph opencv_face opencv_core opencv_imgproc opencv_highgui)
add_executable(mace_webcam mace_webcam.cpp)
target_link_libraries(mace_webcam opencv_face opencv_core opencv_imgproc opencv_highgui opencv_videoio)
// This file is part of the OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#include "opencv2/videoio.hpp"
#include "opencv2/highgui.hpp"
#include "opencv2/imgproc.hpp"
#include "opencv2/objdetect.hpp"
#include "opencv2/face/mace.hpp"
#include <iostream>
using namespace cv;
using namespace cv::face;
using namespace std;
enum STATE {
NEUTRAL,
RECORD,
PREDICT
};
const char *help =
"press 'r' to record images. once N trainimages were recorded, train the mace filter\n"
"press 'p' to predict (twofactor mode will switch back to neutral after each prediction attempt)\n"
"press 's' to save a trained model\n"
"press 'esc' to return\n"
"any other key will reset to neutral state\n";
int main(int argc, char **argv) {
CommandLineParser parser(argc, argv,
"{ help h usage ? || show this help message }"
"{ cascade c || (required) path to a cascade file for face detection }"
"{ pre p || load a pretrained mace filter file, saved from previous session (e.g. my.xml.gz) }"
"{ num n |50| num train images }"
"{ size s |64| image size }"
"{ twofactor t || pass phrase(text) for 2 factor authentification.\n"
" (random convolute images seeded with the crc of this)\n"
" users will get prompted to guess the secrect, additional to the image. }"
);
String cascade = parser.get<String>("cascade");
if (parser.has("help") || cascade.empty()) {
parser.printMessage();
return 1;
} else {
cout << help << endl;
}
String defname = "mace.xml.gz";
String pre = parser.get<String>("pre");
String two = parser.get<String>("twofactor");
int N = parser.get<int>("num");
int Z = parser.get<int>("size");
int state = NEUTRAL;
Ptr<MACE> mace;
if (! pre.empty()) { // load pretrained model, if available
mace = MACE::load(pre);
if (mace->empty()) {
cerr << "loading the MACE failed !" << endl;
return -1;
}
state = PREDICT;
} else {
mace = MACE::create(Z);
if (! two.empty()) {
cout << "'" << two << "' initial passphrase" << endl;
mace->salt(two);
}
}
CascadeClassifier head(cascade);
if (head.empty()) {
cerr << "loading the cascade failed !" << endl;
return -2;
}
VideoCapture cap(0);
if (! cap.isOpened()) {
cerr << "VideoCapture could not be opened !" << endl;
return -3;
}
vector<Mat> train_img;
while(1) {
Mat frame;
cap >> frame;
vector<Rect> rects;
head.detectMultiScale(frame,rects);
if (rects.size()>0) {
Scalar col = Scalar(0,120,0);
if (state == RECORD) {
if (train_img.size() >= size_t(N)) {
mace->train(train_img);
train_img.clear();
state = PREDICT;
} else {
train_img.push_back(frame(rects[0]).clone());
}
col = Scalar(200,0,0);
}
if (state == PREDICT) {
if (! two.empty()) { // prompt for secret on console
cout << "enter passphrase: ";
string pass;
getline(cin, pass);
mace->salt(pass);
state = NEUTRAL;
cout << "'" << pass << "' : ";
}
bool same = mace->same(frame(rects[0]));
if (same) col = Scalar(0,220,220);
else col = Scalar(60,60,60);
if (! two.empty()) {
cout << (same ? "accepted." : "denied.") << endl;
}
}
rectangle(frame, rects[0], col, 2);
}
imshow("MACE",frame);
int k = waitKey(10);
switch (k) {
case -1 : break;
case 27 : return 0;
default : state = NEUTRAL; break;
case 'r': state = RECORD; break;
case 'p': state = PREDICT; break;
case 's': mace->save(defname); break;
}
}
return 0;
}
#include "precomp.hpp"
#include "opencv2/face/mace.hpp"
namespace cv {
namespace face {
//
//! Rearrange the quadrants of Fourier image
//! so that the origin is at the image center
//
static void shiftDFT(const Mat &src, Mat &dst)
{
Size size = src.size();
if (dst.empty() || (dst.size().width != size.width || dst.size().height != size.height))
{
dst.create(src.size(), src.type());
}
int cx = size.width/2;
int cy = size.height/2; // image center
Mat q1 = src(Rect(0, 0, cx,cy));
Mat q2 = src(Rect(cx,0, cx,cy));
Mat q3 = src(Rect(cx,cy,cx,cy));
Mat q4 = src(Rect(0, cy,cx,cy));
Mat d1 = dst(Rect(0, 0, cx,cy));
Mat d2 = dst(Rect(cx,0, cx,cy));
Mat d3 = dst(Rect(cx,cy,cx,cy));
Mat d4 = dst(Rect(0, cy,cx,cy));
if (src.data != dst.data){
q3.copyTo(d1);
q4.copyTo(d2);
q1.copyTo(d3);
q2.copyTo(d4);
} else {
Mat tmp;
q3.copyTo(tmp);
q1.copyTo(d3);
tmp.copyTo(d1);
q4.copyTo(tmp);
q2.copyTo(d4);
tmp.copyTo(d2);
}
}
// Computes 64-bit "cyclic redundancy check" sum, as specified in ECMA-182
static uint64 crc64( const uchar* data, size_t size, uint64 crc0=0 )
{
static uint64 table[256];
static bool initialized = false;
if( !initialized )
{
for( int i = 0; i < 256; i++ )
{
uint64 c = i;
for( int j = 0; j < 8; j++ )
c = ((c & 1) ? CV_BIG_UINT(0xc96c5795d7870f42) : 0) ^ (c >> 1);
table[i] = c;
}
initialized = true;
}
uint64 crc = ~crc0;
for( size_t idx = 0; idx < size; idx++ )
crc = table[(uchar)crc ^ data[idx]] ^ (crc >> 8);
return ~crc;
}
struct MACEImpl : MACE {
Mat_<Vec2d> maceFilter; // filled from compute()
Mat convFilter; // optional random convolution (cancellable)
int IMGSIZE; // images will get resized to this
double threshold; // minimal "sameness" threshold from the train images
MACEImpl(int siz) : IMGSIZE(siz), threshold(DBL_MAX) {}
void salt(const String &passphrase) {
theRNG().state = ((int64)crc64((uchar*)passphrase.c_str(), passphrase.size()));
convFilter.create(IMGSIZE, IMGSIZE, CV_64F);
randn(convFilter, 0, 1.0/(IMGSIZE*IMGSIZE));
}
Mat dftImage(Mat img) const {
Mat gray;
resize(img, gray, Size(IMGSIZE,IMGSIZE)) ;
if (gray.channels() > 1)
cvtColor(gray, gray, COLOR_BGR2GRAY);
equalizeHist(gray, gray);
gray.convertTo(gray, CV_64F);
if (! convFilter.empty()) { // optional, but unfortunately, it has to happen after resize/equalize ops.
filter2D(gray, gray, CV_64F, convFilter);
}
Mat input[2] = {gray, Mat(gray.size(), gray.type(), 0.0)};
Mat complexInput;
merge(input, 2, complexInput);
Mat_<Vec2d> dftImg(IMGSIZE*2, IMGSIZE*2, 0.0);
complexInput.copyTo(dftImg(Rect(0,0,IMGSIZE,IMGSIZE)));
dft(dftImg, dftImg);
return dftImg;
}
// compute the mace filter: `h = D(-1) * X * (X(+) * D(-1) * X)(-1) * C`
void compute(std::vector<Mat> images) {
return compute(images, false);
}
void compute(std::vector<Mat> images, bool isdft) {
int size = (int)images.size();
int IMGSIZE_2X = IMGSIZE * 2;
int TOTALPIXEL = IMGSIZE_2X * IMGSIZE_2X;
Mat_<Vec2d> D(TOTALPIXEL, 1, 0.0);
Mat_<Vec2d> S(TOTALPIXEL, size, 0.0);
Mat_<Vec2d> SPLUS(size, TOTALPIXEL, 0.0);
for (int i=0; i<size; i++) {
Mat_<Vec2d> dftImg = isdft ? images[i] : dftImage(images[i]);
for (int l=0; l<IMGSIZE_2X; l++) {
for (int m=0; m<IMGSIZE_2X; m++) {
int j = l * IMGSIZE_2X + m;
Vec2d s = dftImg(l, m);
S(j, i) = s;
SPLUS(i, j) = Vec2d(s[0], -s[1]);
D(j, 0)[0] += (s[0]*s[0]) + (s[1]*s[1]);
}
}
}
Mat sq; cv::sqrt(D, sq);
Mat_<Vec2d> DINV = TOTALPIXEL * size / sq;
Mat_<Vec2d> DINV_S(TOTALPIXEL, size, 0.0);
Mat_<Vec2d> SPLUS_DINV(size, TOTALPIXEL, 0.0);
for (int l=0; l<size; l++) {
for (int m=0; m<TOTALPIXEL; m++) {
SPLUS_DINV(l, m)[0] = SPLUS(l,m)[0] * DINV(m,0)[0];
SPLUS_DINV(l, m)[1] = SPLUS(l,m)[1] * DINV(m,0)[1];
DINV_S(m, l)[0] = S(m,l)[0] * DINV(m,0)[0];
DINV_S(m, l)[1] = S(m,l)[1] * DINV(m,0)[1];
}
}
Mat_<Vec2d> SPLUS_DINV_S = SPLUS_DINV * S;
Mat_<Vec2d> SPLUS_DINV_S_INV(size, size);
Mat_<double> SPLUS_DINV_S_INV_1(2*size, 2*size);
for (int l=0; l<size; l++) {
for (int m=0; m<size; m++) {
Vec2d s = SPLUS_DINV_S(l, m);
SPLUS_DINV_S_INV_1(l, m) = s[0];
SPLUS_DINV_S_INV_1(l+size, m+size) = s[0];
SPLUS_DINV_S_INV_1(l, m+size) = s[1];
SPLUS_DINV_S_INV_1(l+size, m) = -s[1];
}
}
invert(SPLUS_DINV_S_INV_1, SPLUS_DINV_S_INV_1);
for (int l=0; l<size; l++) {
for (int m=0; m<size; m++) {
SPLUS_DINV_S_INV(l, m) = Vec2d(SPLUS_DINV_S_INV_1(l,m), SPLUS_DINV_S_INV_1(l,m+size));
}
}
Mat_<Vec2d> Hmace = DINV_S * SPLUS_DINV_S_INV;
Mat_<Vec2d> C(size,1, Vec2d(1,0));
maceFilter = Mat(Hmace * C).reshape(2,IMGSIZE_2X);
}
// get the lowest (worst) positive train correlation,
// our lower bound threshold for the "same()" test later
double computeThreshold(const std::vector<Mat> &images, bool isdft) const {
double best=DBL_MAX;
for (size_t i=0; i<images.size(); i++) {
double d = correlate(images[i], isdft);
if (d < best) {
best = d;
}
}
return best;
}
// convolute macefilter and dft image,
// calculate the peak to sidelobe ratio
// on the real part of the inverse dft
double correlate(const Mat &img) const {
return correlate(img, false);
}
double correlate(const Mat &img, bool isdft) const {
if (maceFilter.empty()) return -1; // not trained.
int IMGSIZE_2X = IMGSIZE * 2;
Mat dftImg = isdft ? img : dftImage(img);
mulSpectrums(dftImg, maceFilter, dftImg, DFT_ROWS, true);
dft(dftImg, dftImg, DFT_INVERSE|DFT_SCALE, 0);
Mat chn[2];
split(dftImg, chn);
Mat_<double> re;
shiftDFT(chn[0], re);
double m1,M1;
minMaxLoc(re, &m1, &M1, 0, 0);
double peakCorrPlaneEnergy = M1 / sqrt(sum(re)[0]);
re -= m1;
double value=0;
double num=0;
int rad1=int(floor((double)(45.0/64.0)*(double)IMGSIZE));
int rad2=int(floor((double)(27.0/64.0)*(double)IMGSIZE));
// cache a few pow's and sqrts
std::vector<double> r2(IMGSIZE_2X);
Mat_<double> radtab(IMGSIZE_2X,IMGSIZE_2X);
for (int l=0; l<IMGSIZE_2X; l++) {
r2[l] = (l-IMGSIZE) * (l-IMGSIZE);
}
for (int l=0; l<IMGSIZE_2X; l++) {
for (int m=l+1; m<IMGSIZE_2X; m++) {
double rad = sqrt(r2[m] + r2[l]);
radtab(l,m) = radtab(m,l) = rad;
}
}
// mean of the sidelobe area:
for (int l=0; l<IMGSIZE_2X; l++) {
for (int m=0; m<IMGSIZE_2X; m++) {
double rad = radtab(l,m);
if (rad < rad1) {
if (rad > rad2) {
value += re(l,m);
num++;
}
}
}
}
value /= num;
// normalize it
double std2=0;
for (int l=0; l<IMGSIZE_2X; l++) {
for (int m=0; m<IMGSIZE_2X; m++) {
double rad = radtab(l,m);
if (rad < rad1) {
if (rad > rad2) {
double d = (value - re(l,m));
std2 += d * d;
}
}
}
}
std2 /= num;
std2 = sqrt(std2);
double sca = re(IMGSIZE, IMGSIZE);
double peakToSideLobeRatio = (sca - value) / std2;
return 100.0 * peakToSideLobeRatio * peakCorrPlaneEnergy;
}
// MACE interface
void train(InputArrayOfArrays input) {
std::vector<Mat> images, dftImg;
input.getMatVector(images);
for (size_t i=0; i<images.size(); i++) { // cache dft images
dftImg.push_back(dftImage(images[i]));
}
compute(dftImg, true);
threshold = computeThreshold(dftImg, true);
}
bool same(InputArray img) const {
return correlate(img.getMat()) >= threshold;
}
// cv::Algorithm:
bool empty() const {
return maceFilter.empty() || IMGSIZE == 0;
}
String getDefaultName () const {
return String("MACE");
}
void clear() {
maceFilter.release();
convFilter.release();
}
void write(cv::FileStorage &fs) const {
fs << "mace" << maceFilter;
fs << "conv" << convFilter;
fs << "threshold" << threshold;
}
void read(const cv::FileNode &fn) {
fn["mace"] >> maceFilter;
fn["conv"] >> convFilter;
fn["threshold"] >> threshold;
IMGSIZE = maceFilter.cols/2;
}
};
cv::Ptr<MACE> MACE::create(int siz) {
return makePtr<MACEImpl>(siz);
}
cv::Ptr<MACE> MACE::load(const String &filename, const String &objname) {
return Algorithm::load<MACE>(filename, objname);
}
} /* namespace face */
} /* namespace cv */
// This file is part of the OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#include "test_precomp.hpp"
#include <fstream>
namespace opencv_test { namespace {
//
// train on one person, and test against the other
//
#define TESTSET_NAMES testing::Values("david","dudek")
const string TRACKING_DIR = "tracking";
const string FOLDER_IMG = "data";
class MaceTest
{
public:
MaceTest(string _video, bool salt);
void run();
protected:
vector<Rect> boxes(const string &fn);
vector<Mat> samples(const string &name, int N,int off=0);
int found(const string &vid);
Ptr<MACE> mace;
string video; // train
string vidA; // test
int nSampsTest;
int nSampsTrain;
int nStep;
bool salt;
};
MaceTest::MaceTest(string _video, bool use_salt)
{
int Z = 64; // window size
mace = MACE::create(Z);
video = _video;
if (video=="david") { vidA="dudek"; }
if (video=="dudek") { vidA="david"; }
nStep = 2;
nSampsTest = 5;
nSampsTrain = 35;
salt = use_salt;
}
vector<Rect> MaceTest::boxes(const string &fn)
{
std::ifstream in(fn.c_str());
int x,y,w,h;
char sep;
vector<Rect> _boxes;
while (in.good() && (in >> x >> sep >> y >> sep >> w >> sep >> h))
{
_boxes.push_back( Rect(x,y,w,h) );
}
return _boxes;
}
void MaceTest::run()
{
vector<Mat> sam_train = samples(video, nSampsTrain, 0);
if (salt) mace->salt(video); // "owner's" salt with "two factor"
mace->train(sam_train);
int self_ok = found(video);
if (salt) mace->salt(vidA); // "other's" salt
int false_A = found(vidA);
ASSERT_GE(self_ok, nSampsTest/2); // it may miss positives
ASSERT_EQ(false_A, 0); // but *absolutely* no false positives allowed.
}
int MaceTest::found(const string &vid)
{
vector<Mat> sam_test = samples(vid, nSampsTest, (1+nStep*nSampsTrain));
int hits = 0;
for (size_t i=0; i<sam_test.size(); i++)
{
hits += mace->same(sam_test[i]);
}
return hits;
}
vector<Mat> MaceTest::samples(const string &name, int N, int off)
{
string folder = cvtest::TS::ptr()->get_data_path() + TRACKING_DIR + "/" + name;
string vid = folder + "/" + FOLDER_IMG + "/" + name + ".webm";
string anno = folder + "/gt.txt";
vector<Rect> bb = boxes(anno);
int startFrame = (name=="david") ? 300 : 0;
VideoCapture c;
EXPECT_TRUE(c.open(vid));
vector<Mat> samps;
while (samps.size() < size_t(N))
{
int frameNo = startFrame + off;
c.set(CAP_PROP_POS_FRAMES, frameNo);
Mat frame;
c >> frame;
Rect r = bb[off];
off += nStep;
samps.push_back(frame(r));
}
c.release();
return samps;
}
//[TESTDATA]
PARAM_TEST_CASE(MACE_, string)
{
string dataset;
virtual void SetUp()
{
dataset = GET_PARAM(0);
}
};
TEST_P(MACE_, unsalted)
{
MaceTest test(dataset, false); test.run();
}
TEST_P(MACE_, salted)
{
MaceTest test(dataset, true); test.run();
}
INSTANTIATE_TEST_CASE_P(Face, MACE_, TESTSET_NAMES);
}} // namespace
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment