Commit 88742e0e authored by Alexander Alekhin's avatar Alexander Alekhin

Merge pull request #899 from mshabunin:pr718

parents cac3c9a2 9d9c0f33
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2014, Itseez Inc, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Itseez Inc or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#ifndef OPENCV_DATASETS_TRACK_ALOV_HPP
#define OPENCV_DATASETS_TRACK_ALOV_HPP
#include <string>
#include <vector>
#include "opencv2/datasets/dataset.hpp"
#include "opencv2/datasets/util.hpp"
using namespace std;
namespace cv
{
namespace datasets
{
//! @addtogroup datasets_track
//! @{
struct TRACK_alovObj : public Object
{
int id;
std::string imagePath;
vector <Point2f> gtbb;
};
const string sectionNames[] = { "01-Light", "02-SurfaceCover", "03-Specularity", "04-Transparency", "05-Shape", "06-MotionSmoothness", "07-MotionCoherence",
"08-Clutter", "09-Confusion", "10-LowContrast", "11-Occlusion", "12-MovingCamera", "13-ZoomingCamera", "14-LongDuration" };
const int sectionSizes[] = { 33, 15, 18, 20, 24, 22, 12, 15, 37, 23, 34, 22, 29, 10 };
class CV_EXPORTS TRACK_alov : public Dataset
{
public:
static Ptr<TRACK_alov> create();
virtual void load(const std::string &path) = 0;
//Load only frames with annotations (~every 5-th frame)
virtual void loadAnnotatedOnly(const std::string &path) = 0;
virtual int getDatasetsNum() = 0;
virtual int getDatasetLength(int id) = 0;
virtual bool initDataset(int id) = 0;
virtual bool getNextFrame(Mat &frame) = 0;
virtual vector <Point2f> getNextGT() = 0;
//Get frame/GT by datasetID (1..N) frameID (1..K)
virtual bool getFrame(Mat &frame, int datasetID, int frameID) = 0;
virtual vector <Point2f> getGT(int datasetID, int frameID) = 0;
protected:
vector <vector <Ptr<TRACK_alovObj> > > data;
int activeDatasetID;
int frameCounter;
};
//! @}
}
}
#endif
This diff is collapsed.
set(the_description "Tracking API")
ocv_define_module(tracking opencv_imgproc opencv_core opencv_video opencv_highgui opencv_plot OPTIONAL opencv_datasets WRAP python)
ocv_define_module(tracking opencv_imgproc opencv_core opencv_video opencv_highgui opencv_dnn opencv_plot OPTIONAL opencv_datasets WRAP python)
\ No newline at end of file
......@@ -93,3 +93,10 @@
keywords={computer vision;feature extraction;image colour analysis;image representation;image sequences;adaptive color attributes;benchmark color sequences;color features;color representations;computer vision;image description;real-time visual tracking;tracking-by-detection framework;Color;Computational modeling;Covariance matrices;Image color analysis;Kernel;Target tracking;Visualization;Adaptive Dimensionality Reduction;Appearance Model;Color Features;Visual Tracking},
doi={10.1109/CVPR.2014.143},
}
@inproceedings{GOTURN,
title={Learning to Track at 100 FPS with Deep Regression Networks},
author={Held, David and Thrun, Sebastian and Savarese, Silvio},
booktitle = {European Conference Computer Vision (ECCV)},
year = {2016}
}
......@@ -343,3 +343,73 @@ PERF_TEST_P(tracking, tld, testing::Combine(TESTSET_NAMES, SEGMENTS))
SANITY_CHECK( bbs_mat, 15, ERROR_RELATIVE );
}
PERF_TEST_P(tracking, GOTURN, testing::Combine(TESTSET_NAMES, SEGMENTS))
{
string video = get<0>(GetParam());
int segmentId = get<1>(GetParam());
int startFrame;
string prefix;
string suffix;
string datasetMeta = getDataPath(TRACKING_DIR + "/" + video + "/" + video + ".yml");
checkData(datasetMeta, startFrame, prefix, suffix);
int gtStartFrame = startFrame;
vector<Rect> gtBBs;
string gtFile = getDataPath(TRACKING_DIR + "/" + video + "/gt.txt");
if (!getGroundTruth(gtFile, gtBBs))
FAIL() << "Ground truth file " << gtFile << " can not be read" << endl;
int bbCounter = (int)gtBBs.size();
Mat frame;
bool initialized = false;
vector<Rect> bbs;
Ptr<Tracker> tracker = Tracker::create("GOTURN");
string folder = TRACKING_DIR + "/" + video + "/" + FOLDER_IMG;
int numSegments = (sizeof(SEGMENTS) / sizeof(int));
int endFrame = 0;
getSegment(segmentId, numSegments, bbCounter, startFrame, endFrame);
Rect currentBBi = gtBBs[startFrame - gtStartFrame];
Rect2d currentBB(currentBBi);
TEST_CYCLE_N(1)
{
VideoCapture c;
c.open(getDataPath(TRACKING_DIR + "/" + video + "/" + FOLDER_IMG + "/" + video + ".webm"));
c.set(CAP_PROP_POS_FRAMES, startFrame);
for (int frameCounter = startFrame; frameCounter < endFrame; frameCounter++)
{
c >> frame;
if (frame.empty())
{
break;
}
if (!initialized)
{
if (!tracker->init(frame, currentBB))
{
FAIL() << "Could not initialize tracker" << endl;
return;
}
initialized = true;
}
else if (initialized)
{
tracker->update(frame, currentBB);
}
bbs.push_back(currentBB);
}
}
//save the bounding boxes in a Mat
Mat bbs_mat((int)bbs.size(), 4, CV_32F);
getMatOfRects(bbs, bbs_mat);
SANITY_CHECK(bbs_mat, 15, ERROR_RELATIVE);
}
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
//Demo of GOTURN tracker
//In order to use GOTURN tracker, GOTURN architecture goturn.prototxt and goturn.caffemodel are required to exist in root folder.
//There are 2 ways to get caffemodel:
//1 - Train you own GOTURN model using <https://github.com/Auron-X/GOTURN_Training_Toolkit>
//2 - Download pretrained caffemodel from <https://github.com/opencv/opencv_extra>
#include "opencv2/datasets/track_alov.hpp"
#include <opencv2/core/utility.hpp>
#include <opencv2/tracking.hpp>
#include <opencv2/videoio.hpp>
#include <opencv2/highgui.hpp>
#include <iostream>
using namespace std;
using namespace cv;
using namespace cv::datasets;
#define NUM_TEST_FRAMES 1000
static Mat image;
static bool paused;
static bool selectObjects = false;
static bool startSelection = false;
Rect2d boundingBox;
static const char* keys =
{ "{@dataset_path |true| Dataset path }"
"{@dataset_id |1| Dataset ID }"
};
static void onMouse(int event, int x, int y, int, void*)
{
if (!selectObjects)
{
switch (event)
{
case EVENT_LBUTTONDOWN:
//set origin of the bounding box
startSelection = true;
boundingBox.x = x;
boundingBox.y = y;
boundingBox.width = boundingBox.height = 0;
break;
case EVENT_LBUTTONUP:
//sei with and height of the bounding box
boundingBox.width = std::abs(x - boundingBox.x);
boundingBox.height = std::abs(y - boundingBox.y);
paused = false;
selectObjects = true;
startSelection = false;
break;
case EVENT_MOUSEMOVE:
if (startSelection && !selectObjects)
{
//draw the bounding box
Mat currentFrame;
image.copyTo(currentFrame);
rectangle(currentFrame, Point((int)boundingBox.x, (int)boundingBox.y), Point(x, y), Scalar(255, 0, 0), 2, 1);
imshow("GOTURN Tracking", currentFrame);
}
break;
}
}
}
static void help()
{
cout << "\nThis example is a simple demo of GOTURN tracking on ALOV300++ dataset"
"ALOV dataset contains videos with ID range: 1~314\n"
"-- pause video [p] and draw a bounding boxes around the targets to start the tracker\n"
"Example:\n"
"./goturnTracker <dataset_path> <dataset_id>\n"
<< endl;
cout << "\n\nHot keys: \n"
"\tq - quit the program\n"
"\tp - pause video\n";
}
int main(int argc, char *argv[])
{
CommandLineParser parser(argc, argv, keys);
string datasetRootPath = parser.get<string>(0);
int datasetID = parser.get<int>(1);
if (datasetRootPath.empty())
{
help();
return -1;
}
Mat frame;
paused = false;
namedWindow("GOTURN Tracking", 0);
setMouseCallback("GOTURN Tracking", onMouse, 0);
//Create GOTURN tracker
Ptr<Tracker> tracker = Tracker::create("GOTURN");
//Load and init full ALOV300++ dataset with a given datasetID, as alternative you can use loadAnnotatedOnly(..)
//to load only frames with labled ground truth ~ every 5-th frame
Ptr<cv::datasets::TRACK_alov> dataset = TRACK_alov::create();
dataset->load(datasetRootPath);
dataset->initDataset(datasetID);
//Read first frame
dataset->getNextFrame(frame);
frame.copyTo(image);
rectangle(image, boundingBox, Scalar(255, 0, 0), 2, 1);
imshow("GOTURN Tracking", image);
bool initialized = false;
paused = true;
int frameCounter = 0;
//Time measurment
int64 e3 = getTickCount();
for (;;)
{
if (!paused)
{
//Time measurment
int64 e1 = getTickCount();
if (initialized){
if (!dataset->getNextFrame(frame))
break;
frame.copyTo(image);
}
if (!initialized && selectObjects)
{
//Initialize the tracker and add targets
if (!tracker->init(frame, boundingBox))
{
cout << "Tracker Init Error!!!";
return 0;
}
rectangle(frame, boundingBox, Scalar(0, 0, 255), 2, 1);
initialized = true;
}
else if (initialized)
{
//Update all targets
if (tracker->update(frame, boundingBox))
{
rectangle(frame, boundingBox, Scalar(0, 0, 255), 2, 1);
}
}
imshow("GOTURN Tracking", frame);
frameCounter++;
//Time measurment
int64 e2 = getTickCount();
double t1 = (e2 - e1) / getTickFrequency();
cout << frameCounter << "\tframe : " << t1 * 1000.0 << "ms" << endl;
}
char c = (char)waitKey(2);
if (c == 'q')
break;
if (c == 'p')
paused = !paused;
}
//Time measurment
int64 e4 = getTickCount();
double t2 = (e4 - e3) / getTickFrequency();
cout << "Average Time for Frame: " << t2 * 1000.0 / frameCounter << "ms" << endl;
cout << "Average FPS: " << 1.0 / t2*frameCounter << endl;
waitKey(0);
return 0;
}
/*///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "gtrTracker.hpp"
namespace cv
{
TrackerGOTURN::Params::Params(){}
void TrackerGOTURN::Params::read(const cv::FileNode& /*fn*/){}
void TrackerGOTURN::Params::write(cv::FileStorage& /*fs*/) const {}
Ptr<TrackerGOTURN> TrackerGOTURN::createTracker(const TrackerGOTURN::Params &parameters)
{
return Ptr<gtr::TrackerGOTURNImpl>(new gtr::TrackerGOTURNImpl(parameters));
}
namespace gtr
{
class TrackerGOTURNModel : public TrackerModel{
public:
TrackerGOTURNModel(TrackerGOTURN::Params){}
Rect2d getBoundingBox(){ return boundingBox_; }
void setBoudingBox(Rect2d boundingBox){ boundingBox_ = boundingBox; }
Mat getImage(){ return image_; }
void setImage(const Mat& image){ image.copyTo(image_); }
protected:
Rect2d boundingBox_;
Mat image_;
void modelEstimationImpl(const std::vector<Mat>&){}
void modelUpdateImpl(){}
};
TrackerGOTURNImpl::TrackerGOTURNImpl(const TrackerGOTURN::Params &parameters) :
params(parameters){
isInit = false;
};
void TrackerGOTURNImpl::read(const cv::FileNode& fn)
{
params.read(fn);
}
void TrackerGOTURNImpl::write(cv::FileStorage& fs) const
{
params.write(fs);
}
bool TrackerGOTURNImpl::initImpl(const Mat& image, const Rect2d& boundingBox)
{
//Make a simple model from frame and bounding box
model = Ptr<TrackerGOTURNModel>(new TrackerGOTURNModel(params));
((TrackerGOTURNModel*)static_cast<TrackerModel*>(model))->setImage(image);
((TrackerGOTURNModel*)static_cast<TrackerModel*>(model))->setBoudingBox(boundingBox);
//Load GOTURN architecture from *.prototxt and pretrained weights from *.caffemodel
String modelTxt = "goturn.prototxt";
String modelBin = "goturn.caffemodel";
Ptr<dnn::Importer> importer;
try //Import GOTURN model
{
importer = dnn::createCaffeImporter(modelTxt, modelBin);
}
catch (const cv::Exception &err) //Importer can throw errors, we will catch them
{
std::cerr << err.msg << std::endl;
}
if (!importer)
{
cvError(CV_StsError, "cv::gtr::InitImpl", "GOTURN network loading error...", "gtrTracker.cpp", 117);
}
importer->populateNet(net);
importer.release(); //We don't need importer anymore
return true;
}
bool TrackerGOTURNImpl::updateImpl(const Mat& image, Rect2d& boundingBox)
{
int INPUT_SIZE = 227;
//Using prevFrame & prevBB from model and curFrame GOTURN calculating curBB
Mat curFrame = image.clone();
Mat prevFrame = ((TrackerGOTURNModel*)static_cast<TrackerModel*>(model))->getImage();
Rect2d prevBB = ((TrackerGOTURNModel*)static_cast<TrackerModel*>(model))->getBoundingBox();
Rect2d curBB;
float padTargetPatch = 2.0;
Rect2f searchPatchRect, targetPatchRect;
Point2f currCenter, prevCenter;
Mat prevFramePadded, curFramePadded;
Mat searchPatch, targetPatch;
prevCenter.x = (float)(prevBB.x + prevBB.width / 2);
prevCenter.y = (float)(prevBB.y + prevBB.height / 2);
targetPatchRect.width = (float)(prevBB.width*padTargetPatch);
targetPatchRect.height = (float)(prevBB.height*padTargetPatch);
targetPatchRect.x = (float)(prevCenter.x - prevBB.width*padTargetPatch / 2.0 + targetPatchRect.width);
targetPatchRect.y = (float)(prevCenter.y - prevBB.height*padTargetPatch / 2.0 + targetPatchRect.height);
copyMakeBorder(prevFrame, prevFramePadded, (int)targetPatchRect.height, (int)targetPatchRect.height, (int)targetPatchRect.width, (int)targetPatchRect.width, BORDER_REPLICATE);
targetPatch = prevFramePadded(targetPatchRect).clone();
copyMakeBorder(curFrame, curFramePadded, (int)targetPatchRect.height, (int)targetPatchRect.height, (int)targetPatchRect.width, (int)targetPatchRect.width, BORDER_REPLICATE);
searchPatch = curFramePadded(targetPatchRect).clone();
//Preprocess
//Resize
resize(targetPatch, targetPatch, Size(INPUT_SIZE, INPUT_SIZE));
resize(searchPatch, searchPatch, Size(INPUT_SIZE, INPUT_SIZE));
//Mean Subtract
targetPatch = targetPatch - 128;
searchPatch = searchPatch - 128;
//Convert to Float type
targetPatch.convertTo(targetPatch, CV_32F);
searchPatch.convertTo(searchPatch, CV_32F);
dnn::Blob targetBlob = dnn::Blob(targetPatch);
dnn::Blob searchBlob = dnn::Blob(searchPatch);
net.setBlob(".data1", targetBlob);
net.setBlob(".data2", searchBlob);
net.forward();
dnn::Blob res = net.getBlob("scale");
Mat resMat = res.matRefConst().reshape(1, 1);
curBB.x = targetPatchRect.x + (resMat.at<float>(0) * targetPatchRect.width / INPUT_SIZE) - targetPatchRect.width;
curBB.y = targetPatchRect.y + (resMat.at<float>(1) * targetPatchRect.height / INPUT_SIZE) - targetPatchRect.height;
curBB.width = (resMat.at<float>(2) - resMat.at<float>(0)) * targetPatchRect.width / INPUT_SIZE;
curBB.height = (resMat.at<float>(3) - resMat.at<float>(1)) * targetPatchRect.height / INPUT_SIZE;
//Predicted BB
boundingBox = curBB;
//Set new model image and BB from current frame
((TrackerGOTURNModel*)static_cast<TrackerModel*>(model))->setImage(curFrame);
((TrackerGOTURNModel*)static_cast<TrackerModel*>(model))->setBoudingBox(curBB);
return true;
}
}
}
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#ifndef OPENCV_GOTURN_TRACKER
#define OPENCV_GOTURN_TRACKER
#include "precomp.hpp"
#include "opencv2/video/tracking.hpp"
#include "opencv2/dnn.hpp"
#include "gtrUtils.hpp"
#include "opencv2/imgproc.hpp"
#include<algorithm>
#include<limits.h>
namespace cv
{
namespace gtr
{
class TrackerGOTURNImpl : public TrackerGOTURN
{
public:
TrackerGOTURNImpl(const TrackerGOTURN::Params &parameters = TrackerGOTURN::Params());
void read(const FileNode& fn);
void write(FileStorage& fs) const;
bool initImpl(const Mat& image, const Rect2d& boundingBox);
bool updateImpl(const Mat& image, Rect2d& boundingBox);
TrackerGOTURN::Params params;
dnn::Net net;
};
}
}
#endif
/*///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "gtrUtils.hpp"
namespace cv
{
namespace gtr
{
double generateRandomLaplacian(double b, double m)
{
double t = (double)rand() / (RAND_MAX);
double n = (double)rand() / (RAND_MAX);
if (t > 0.5)
return m + b*log(n);
else
return m - b*log(n);
}
Rect2f anno2rect(vector<Point2f> annoBB)
{
Rect2f rectBB;
rectBB.x = min(annoBB[0].x, annoBB[1].x);
rectBB.y = min(annoBB[0].y, annoBB[2].y);
rectBB.width = fabs(annoBB[0].x - annoBB[1].x);
rectBB.height = fabs(annoBB[0].y - annoBB[2].y);
return rectBB;
}
vector <TrainingSample> gatherFrameSamples(Mat prevFrame, Mat currFrame, Rect2f prevBB, Rect2f currBB)
{
vector <TrainingSample> trainingSamples;
Point2f currCenter, prevCenter;
Rect2f targetPatchRect, searchPatchRect;
Mat targetPatch, searchPatch;
Mat prevFramePadded, currFramePadded;
//Crop Target Patch
//Padding
//Previous frame GTBBs center
prevCenter.x = prevBB.x + prevBB.width / 2;
prevCenter.y = prevBB.y + prevBB.height / 2;
targetPatchRect.width = (float)(prevBB.width*padTarget);
targetPatchRect.height = (float)(prevBB.height*padTarget);
targetPatchRect.x = (float)(prevCenter.x - prevBB.width*padTarget / 2.0 + targetPatchRect.width);
targetPatchRect.y = (float)(prevCenter.y - prevBB.height*padTarget / 2.0 + targetPatchRect.height);
copyMakeBorder(prevFrame, prevFramePadded, (int)targetPatchRect.height, (int)targetPatchRect.height, (int)targetPatchRect.width, (int)targetPatchRect.width, BORDER_REPLICATE);
targetPatch = prevFramePadded(targetPatchRect);
for (int i = 0; i < samplesInFrame; i++)
{
TrainingSample sample;
//Current frame GTBBs center
currCenter.x = (float)(currBB.x + currBB.width / 2.0);
currCenter.y = (float)(currBB.y + currBB.height / 2.0);
//Generate and add random Laplacian distribution (Scaling from target size)
double dx, dy, ds;
dx = generateRandomLaplacian(bX, 0)*prevBB.width;
dy = generateRandomLaplacian(bY, 0)*prevBB.height;
ds = generateRandomLaplacian(bS, 1);
//Limit coefficients
dx = min(dx, (double)prevBB.width);
dx = max(dx, (double)-prevBB.width);
dy = min(dy, (double)prevBB.height);
dy = max(dy, (double)-prevBB.height);
ds = min(ds, Ymax);
ds = max(ds, Ymin);
searchPatchRect.width = (float)(prevBB.width*padSearch*ds);
searchPatchRect.height =(float)(prevBB.height*padSearch*ds);
searchPatchRect.x = (float)(currCenter.x + dx - searchPatchRect.width / 2.0 + searchPatchRect.width);
searchPatchRect.y = (float)(currCenter.y + dy - searchPatchRect.height / 2.0 + searchPatchRect.height);
copyMakeBorder(currFrame, currFramePadded, (int)searchPatchRect.height, (int)searchPatchRect.height, (int)searchPatchRect.width, (int)searchPatchRect.width, BORDER_REPLICATE);
searchPatch = currFramePadded(searchPatchRect);
//Calculate Relative GTBB in search patch
Rect2f relGTBB;
relGTBB.width = currBB.width;
relGTBB.height = currBB.height;
relGTBB.x = currBB.x - searchPatchRect.x + searchPatchRect.width;
relGTBB.y = currBB.y - searchPatchRect.y + searchPatchRect.height;
//Link to the sample struct
sample.targetPatch = targetPatch.clone();
sample.searchPatch = searchPatch.clone();
sample.targetBB = relGTBB;
trainingSamples.push_back(sample);
}
return trainingSamples;
}
}
}
#ifndef OPENCV_GTR_UTILS
#define OPENCV_GTR_UTILS
#include "precomp.hpp"
#include <vector>
#include "opencv2/highgui.hpp"
#include <opencv2/datasets/track_alov.hpp>
namespace cv
{
namespace gtr
{
//Number of samples in batch
const int samplesInBatch = 50;
//Number of samples to mine from video frame
const int samplesInFrame = 10;
//Number of samples to mine from still image
const int samplesInImage = 10;
//Padding coefficients for Target/Search Region
const double padTarget = 2.0;
const double padSearch = 2.0;
//Scale parameters for Laplace distribution for Translation/Scale
const double bX = 1.0/10;
const double bY = 1.0/10;
const double bS = 1.0/15;
//Limits of scale changes
const double Ymax = 1.4;
const double Ymin = 0.6;
//Lower boundary constraints for random samples (sample should include X% of target BB)
const double minX = 0.5;
const double minY = 0.5;
//Structure of sample for training
struct TrainingSample
{
Mat targetPatch;
Mat searchPatch;
//Output bounding box on search patch
Rect2f targetBB;
};
//Laplacian distribution
double generateRandomLaplacian(double b, double m);
//Convert ALOV300++ anno coordinates to Rectangle BB
Rect2f anno2rect(vector<Point2f> annoBB);
//Gather samples from random video frame
vector <TrainingSample> gatherFrameSamples(Mat prevFrame, Mat currFrame, Rect2f prevBB, Rect2f currBB);
}
}
#endif
......@@ -111,6 +111,7 @@ Ptr<Tracker> Tracker::create( const String& trackerType )
BOILERPLATE_CODE("MEDIANFLOW",TrackerMedianFlow);
BOILERPLATE_CODE("TLD",TrackerTLD);
BOILERPLATE_CODE("KCF",TrackerKCF);
BOILERPLATE_CODE("GOTURN", TrackerGOTURN);
return Ptr<Tracker>();
}
......
......@@ -422,6 +422,20 @@ TEST_P(OPE_Overlap, TLD)
RecordProperty( "ratioSuccess", test.getRatioSucc() );
}
TEST_P(OPE_Distance, GOTURN)
{
TrackerOPETest test(Tracker::create("GOTURN"), TrackerOPETest::DISTANCE, dataset, threshold);
test.run();
RecordProperty("ratioSuccess", test.getRatioSucc());
}
TEST_P(OPE_Overlap, GOTURN)
{
TrackerOPETest test(Tracker::create("GOTURN"), TrackerOPETest::OVERLAP, dataset, threshold);
test.run();
RecordProperty("ratioSuccess", test.getRatioSucc());
}
INSTANTIATE_TEST_CASE_P( Tracking, OPE_Distance, testing::Combine( TESTSET_NAMES, LOCATION_ERROR_THRESHOLD ) );
INSTANTIATE_TEST_CASE_P( Tracking, OPE_Overlap, testing::Combine( TESTSET_NAMES, OVERLAP_THRESHOLD ) );
......
......@@ -529,6 +529,20 @@ TEST_P(SRE_Overlap, TLD)
RecordProperty( "ratioSuccess", test.getRatioSucc() );
}
TEST_P(SRE_Distance, GOTURN)
{
TrackerSRETest test(Tracker::create("GOTURN"), TrackerSRETest::DISTANCE, dataset, shift, threshold);
test.run();
RecordProperty("ratioSuccess", test.getRatioSucc());
}
TEST_P(SRE_Overlap, GOTURN)
{
TrackerSRETest test(Tracker::create("GOTURN"), TrackerSRETest::OVERLAP, dataset, shift, threshold);
test.run();
RecordProperty("ratioSuccess", test.getRatioSucc());
}
INSTANTIATE_TEST_CASE_P( Tracking, SRE_Distance, testing::Combine( TESTSET_NAMES, SPATIAL_SHIFTS, LOCATION_ERROR_THRESHOLD ) );
INSTANTIATE_TEST_CASE_P( Tracking, SRE_Overlap, testing::Combine( TESTSET_NAMES, SPATIAL_SHIFTS, OVERLAP_THRESHOLD ) );
......
......@@ -389,7 +389,7 @@ void TrackerTRETest::checkDataTest()
gt2.close();
if( segmentIdx == ( sizeof ( SEGMENTS)/sizeof(int) ) )
endFrame = (int)bbs.size();
endFrame = (int)bbs.size();
}
void TrackerTRETest::run()
......@@ -499,6 +499,20 @@ TEST_P(TRE_Overlap, TLD)
RecordProperty( "ratioSuccess", test.getRatioSucc() );
}
TEST_P(TRE_Distance, GOTURN)
{
TrackerTRETest test(Tracker::create("GOTURN"), TrackerTRETest::DISTANCE, dataset, threshold, segment);
test.run();
RecordProperty("ratioSuccess", test.getRatioSucc());
}
TEST_P(TRE_Overlap, GOTURN)
{
TrackerTRETest test(Tracker::create("GOTURN"), TrackerTRETest::OVERLAP, dataset, threshold, segment);
test.run();
RecordProperty("ratioSuccess", test.getRatioSucc());
}
INSTANTIATE_TEST_CASE_P( Tracking, TRE_Distance, testing::Combine( TESTSET_NAMES, SEGMENTS, LOCATION_ERROR_THRESHOLD ) );
INSTANTIATE_TEST_CASE_P( Tracking, TRE_Overlap, testing::Combine( TESTSET_NAMES, SEGMENTS, OVERLAP_THRESHOLD ) );
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment