Commit bd901eb5 authored by Andrey Kamaev's avatar Andrey Kamaev

Reverted r8721 and r8725 (issue #2080)

parent 74707ec7
...@@ -7,73 +7,22 @@ ...@@ -7,73 +7,22 @@
#include <vector> #include <vector>
namespace cv
{
class DetectionBasedTracker class DetectionBasedTracker
{ {
public: public:
struct Parameters struct Parameters
{ {
int minObjectSize;
int maxObjectSize;
double scaleFactor;
int maxTrackLifetime; int maxTrackLifetime;
int minNeighbors;
int minDetectionPeriod; //the minimal time between run of the big object detector (on the whole frame) in ms (1000 mean 1 sec), default=0 int minDetectionPeriod; //the minimal time between run of the big object detector (on the whole frame) in ms (1000 mean 1 sec), default=0
Parameters(); Parameters();
}; };
class IDetector DetectionBasedTracker(const std::string& cascadeFilename, const Parameters& params);
{
public:
IDetector():
minObjSize(96, 96),
maxObjSize(INT_MAX, INT_MAX),
minNeighbours(2),
scaleFactor(1.1f)
{}
virtual void detect(const cv::Mat& Image, std::vector<cv::Rect>& objects) = 0;
void setMinObjectSize(const cv::Size& min)
{
minObjSize = min;
}
void setMaxObjectSize(const cv::Size& max)
{
maxObjSize = max;
}
cv::Size getMinObjectSize() const
{
return minObjSize;
}
cv::Size getMaxObjectSize() const
{
return maxObjSize;
}
float getScaleFactor()
{
return scaleFactor;
}
void setScaleFactor(float value)
{
scaleFactor = value;
}
int getMinNeighbours()
{
return minNeighbours;
}
void setMinNeighbours(int value)
{
minNeighbours = value;
}
virtual ~IDetector() {}
protected:
cv::Size minObjSize;
cv::Size maxObjSize;
int minNeighbours;
float scaleFactor;
};
DetectionBasedTracker(cv::Ptr<IDetector> MainDetector, cv::Ptr<IDetector> TrackingDetector, const Parameters& params);
virtual ~DetectionBasedTracker(); virtual ~DetectionBasedTracker();
virtual bool run(); virtual bool run();
...@@ -95,6 +44,7 @@ class DetectionBasedTracker ...@@ -95,6 +44,7 @@ class DetectionBasedTracker
cv::Ptr<SeparateDetectionWork> separateDetectionWork; cv::Ptr<SeparateDetectionWork> separateDetectionWork;
friend void* workcycleObjectDetectorFunction(void* p); friend void* workcycleObjectDetectorFunction(void* p);
struct InnerParameters struct InnerParameters
{ {
int numLastPositionsToTrack; int numLastPositionsToTrack;
...@@ -140,11 +90,13 @@ class DetectionBasedTracker ...@@ -140,11 +90,13 @@ class DetectionBasedTracker
std::vector<float> weightsPositionsSmoothing; std::vector<float> weightsPositionsSmoothing;
std::vector<float> weightsSizesSmoothing; std::vector<float> weightsSizesSmoothing;
cv::Ptr<IDetector> cascadeForTracking; cv::CascadeClassifier cascadeForTracking;
void updateTrackedObjects(const std::vector<cv::Rect>& detectedObjects); void updateTrackedObjects(const std::vector<cv::Rect>& detectedObjects);
cv::Rect calcTrackedObjectPositionToShow(int i) const; cv::Rect calcTrackedObjectPositionToShow(int i) const;
void detectInRegion(const cv::Mat& img, const cv::Rect& r, std::vector<cv::Rect>& detectedObjectsInRegions); void detectInRegion(const cv::Mat& img, const cv::Rect& r, std::vector<cv::Rect>& detectedObjectsInRegions);
}; };
} //end of cv namespace
#endif #endif
...@@ -91,7 +91,7 @@ public class FdActivity extends Activity { ...@@ -91,7 +91,7 @@ public class FdActivity extends Activity {
ad.setMessage("Fatal error: can't open camera!"); ad.setMessage("Fatal error: can't open camera!");
ad.setButton("OK", new DialogInterface.OnClickListener() { ad.setButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) { public void onClick(DialogInterface dialog, int which) {
dialog.dismiss(); dialog.dismiss();
finish(); finish();
} }
}); });
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
# #
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------
SET(OPENCV_CPP_SAMPLES_REQUIRED_DEPS opencv_core_vision_api opencv_core opencv_flann opencv_imgproc SET(OPENCV_CPP_SAMPLES_REQUIRED_DEPS opencv_core opencv_flann opencv_imgproc
opencv_highgui opencv_ml opencv_video opencv_objdetect opencv_photo opencv_nonfree opencv_highgui opencv_ml opencv_video opencv_objdetect opencv_photo opencv_nonfree
opencv_features2d opencv_calib3d opencv_legacy opencv_contrib opencv_stitching opencv_videostab) opencv_features2d opencv_calib3d opencv_legacy opencv_contrib opencv_stitching opencv_videostab)
......
#include <opencv2/core/core.hpp> // Basic OpenCV structures (cv::Mat, Scalar)
#include <opencv2/highgui/highgui.hpp> // OpenCV window I/O
#include <opencv2/core_vision_api/tracker.hpp>
#include <stdio.h>
#include <string>
#include <vector>
using namespace std;
using namespace cv;
const string WindowName = "Face Detection example";
const Scalar RectColor = CV_RGB(0,255,0);
int main()
{
namedWindow(WindowName);
cv::moveWindow(WindowName, 100, 100);
Mat Viewport;
Mat ReferenceFrame = imread("board.jpg");
if (ReferenceFrame.empty())
{
printf("Error: Cannot load input image\n");
return 1;
}
cv::Ptr<nv::Tracker> tracker = nv::Algorithm::create<nv::Tracker>("nv::Tracker::OpticalFlow");
tracker->initialize();
// First frame for initialization
tracker->feed(ReferenceFrame);
nv::Tracker::TrackedObjectHandler obj = tracker->addObject(cv::Rect(100,100, 200, 200));
while(true)
{
tracker->feed(ReferenceFrame);
if (obj->getStatus() == nv::Tracker::LOST_STATUS)
break;
cv::Rect currentLocation = obj->getLocation();
ReferenceFrame.copyTo(Viewport);
rectangle(Viewport, currentLocation, RectColor);
imshow(WindowName, Viewport);
if (cvWaitKey(30) >= 0) break;
}
return 0;
}
#if 0 //defined(__linux__) || defined(LINUX) || defined(__APPLE__) || defined(ANDROID)
#include <opencv2/imgproc/imgproc.hpp> // Gaussian Blur
#include <opencv2/core/core.hpp> // Basic OpenCV structures (cv::Mat, Scalar)
#include <opencv2/highgui/highgui.hpp> // OpenCV window I/O
#include <opencv2/features2d/features2d.hpp>
#include <opencv2/contrib/detection_based_tracker.hpp>
#include <stdio.h>
#include <string>
#include <vector>
using namespace std;
using namespace cv;
const string WindowName = "Face Detection example";
class CascadeDetectorAdapter: public DetectionBasedTracker::IDetector
{
public:
CascadeDetectorAdapter(cv::Ptr<cv::CascadeClassifier> detector):
IDetector(),
Detector(detector)
{
CV_Assert(!detector.empty());
}
void detect(const cv::Mat &Image, std::vector<cv::Rect> &objects)
{
Detector->detectMultiScale(Image, objects, scaleFactor, minNeighbours, 0, minObjSize, maxObjSize);
}
virtual ~CascadeDetectorAdapter()
{}
private:
CascadeDetectorAdapter();
cv::Ptr<cv::CascadeClassifier> Detector;
};
int main(int argc, char* argv[])
{
namedWindow(WindowName);
VideoCapture VideoStream(0);
if (!VideoStream.isOpened())
{
printf("Error: Cannot open video stream from camera\n");
return 1;
}
std::string cascadeFrontalfilename = "../../data/lbpcascades/lbpcascade_frontalface.xml";
cv::Ptr<cv::CascadeClassifier> cascade = new cv::CascadeClassifier(cascadeFrontalfilename);
cv::Ptr<DetectionBasedTracker::IDetector> MainDetector = new CascadeDetectorAdapter(cascade);
cascade = new cv::CascadeClassifier(cascadeFrontalfilename);
cv::Ptr<DetectionBasedTracker::IDetector> TrackingDetector = new CascadeDetectorAdapter(cascade);
DetectionBasedTracker::Parameters params;
DetectionBasedTracker Detector(MainDetector, TrackingDetector, params);
if (!Detector.run())
{
printf("Error: Detector initialization failed\n");
return 2;
}
Mat ReferenceFrame;
Mat GrayFrame;
vector<Rect> Faces;
while(true)
{
VideoStream >> ReferenceFrame;
cvtColor(ReferenceFrame, GrayFrame, COLOR_RGB2GRAY);
Detector.process(GrayFrame);
Detector.getObjects(Faces);
for (size_t i = 0; i < Faces.size(); i++)
{
rectangle(ReferenceFrame, Faces[i], CV_RGB(0,255,0));
}
imshow(WindowName, ReferenceFrame);
if (cvWaitKey(30) >= 0) break;
}
Detector.stop();
return 0;
}
#else
#include <stdio.h>
int main()
{
printf("This sample works for UNIX or ANDROID only\n");
return 0;
}
#endif
...@@ -43,6 +43,8 @@ ...@@ -43,6 +43,8 @@
#define LOGE(...) do{} while(0) #define LOGE(...) do{} while(0)
#endif #endif
using namespace cv; using namespace cv;
using namespace std; using namespace std;
...@@ -61,31 +63,9 @@ static void usage() ...@@ -61,31 +63,9 @@ static void usage()
LOGE0("\t (e.g.\"opencv/data/lbpcascades/lbpcascade_frontalface.xml\" "); LOGE0("\t (e.g.\"opencv/data/lbpcascades/lbpcascade_frontalface.xml\" ");
} }
class CascadeDetectorAdapter: public DetectionBasedTracker::IDetector
{
public:
CascadeDetectorAdapter(cv::Ptr<cv::CascadeClassifier> detector):
Detector(detector)
{
CV_Assert(!detector.empty());
}
void detect(const cv::Mat &Image, std::vector<cv::Rect> &objects)
{
Detector->detectMultiScale(Image, objects, 1.1, 3, 0, minObjSize, maxObjSize);
}
virtual ~CascadeDetectorAdapter()
{}
private:
CascadeDetectorAdapter();
cv::Ptr<cv::CascadeClassifier> Detector;
};
static int test_FaceDetector(int argc, char *argv[]) static int test_FaceDetector(int argc, char *argv[])
{ {
if (argc < 4) if (argc < 4) {
{
usage(); usage();
return -1; return -1;
} }
...@@ -100,14 +80,12 @@ static int test_FaceDetector(int argc, char *argv[]) ...@@ -100,14 +80,12 @@ static int test_FaceDetector(int argc, char *argv[])
vector<Mat> images; vector<Mat> images;
{ {
char filename[256]; char filename[256];
for(int n=1; ; n++) for(int n=1; ; n++) {
{
snprintf(filename, sizeof(filename), filepattern, n); snprintf(filename, sizeof(filename), filepattern, n);
LOGD("filename='%s'", filename); LOGD("filename='%s'", filename);
Mat m0; Mat m0;
m0=imread(filename); m0=imread(filename);
if (m0.empty()) if (m0.empty()) {
{
LOGI0("Cannot read the file --- break"); LOGI0("Cannot read the file --- break");
break; break;
} }
...@@ -116,15 +94,10 @@ static int test_FaceDetector(int argc, char *argv[]) ...@@ -116,15 +94,10 @@ static int test_FaceDetector(int argc, char *argv[])
LOGD("read %d images", (int)images.size()); LOGD("read %d images", (int)images.size());
} }
DetectionBasedTracker::Parameters params;
std::string cascadeFrontalfilename=cascadefile; std::string cascadeFrontalfilename=cascadefile;
cv::Ptr<cv::CascadeClassifier> cascade = new cv::CascadeClassifier(cascadeFrontalfilename);
cv::Ptr<DetectionBasedTracker::IDetector> MainDetector = new CascadeDetectorAdapter(cascade);
cascade = new cv::CascadeClassifier(cascadeFrontalfilename);
cv::Ptr<DetectionBasedTracker::IDetector> TrackingDetector = new CascadeDetectorAdapter(cascade);
DetectionBasedTracker::Parameters params; DetectionBasedTracker fd(cascadeFrontalfilename, params);
DetectionBasedTracker fd(MainDetector, TrackingDetector, params);
fd.run(); fd.run();
...@@ -135,13 +108,12 @@ static int test_FaceDetector(int argc, char *argv[]) ...@@ -135,13 +108,12 @@ static int test_FaceDetector(int argc, char *argv[])
double freq=getTickFrequency(); double freq=getTickFrequency();
int num_images=images.size(); int num_images=images.size();
for(int n=1; n <= num_images; n++) for(int n=1; n <= num_images; n++) {
{
int64 tcur=getTickCount(); int64 tcur=getTickCount();
int64 dt=tcur-tprev; int64 dt=tcur-tprev;
tprev=tcur; tprev=tcur;
double t_ms=((double)dt)/freq * 1000.0; double t_ms=((double)dt)/freq * 1000.0;
LOGD("\n\nSTEP n=%d from prev step %f ms\n", n, t_ms); LOGD("\n\nSTEP n=%d from prev step %f ms\n\n", n, t_ms);
m=images[n-1]; m=images[n-1];
CV_Assert(! m.empty()); CV_Assert(! m.empty());
cvtColor(m, gray, CV_BGR2GRAY); cvtColor(m, gray, CV_BGR2GRAY);
...@@ -151,8 +123,11 @@ static int test_FaceDetector(int argc, char *argv[]) ...@@ -151,8 +123,11 @@ static int test_FaceDetector(int argc, char *argv[])
vector<Rect> result; vector<Rect> result;
fd.getObjects(result); fd.getObjects(result);
for(size_t i=0; i < result.size(); i++)
{
for(size_t i=0; i < result.size(); i++) {
Rect r=result[i]; Rect r=result[i];
CV_Assert(r.area() > 0); CV_Assert(r.area() > 0);
Point tl=r.tl(); Point tl=r.tl();
...@@ -161,14 +136,14 @@ static int test_FaceDetector(int argc, char *argv[]) ...@@ -161,14 +136,14 @@ static int test_FaceDetector(int argc, char *argv[])
rectangle(m, tl, br, color, 3); rectangle(m, tl, br, color, 3);
} }
} }
char outfilename[256];
for(int n=1; n <= num_images; n++)
{ {
snprintf(outfilename, sizeof(outfilename), outfilepattern, n); char outfilename[256];
LOGD("outfilename='%s'", outfilename); for(int n=1; n <= num_images; n++) {
m=images[n-1]; snprintf(outfilename, sizeof(outfilename), outfilepattern, n);
imwrite(outfilename, m); LOGD("outfilename='%s'", outfilename);
m=images[n-1];
imwrite(outfilename, m);
}
} }
fd.stop(); fd.stop();
...@@ -176,6 +151,8 @@ static int test_FaceDetector(int argc, char *argv[]) ...@@ -176,6 +151,8 @@ static int test_FaceDetector(int argc, char *argv[])
return 0; return 0;
} }
int main(int argc, char *argv[]) int main(int argc, char *argv[])
{ {
return test_FaceDetector(argc, argv); return test_FaceDetector(argc, argv);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment