Commit 54ef4c08 authored by Vadim Pisarevsky's avatar Vadim Pisarevsky

moved some old stuff to the legacy module; merge "compat_c.h" headers and moved…

moved some old stuff to the legacy module; merge "compat_c.h" headers and moved to the legacy as well. moved implementation of many non-critical/obsolete inline functions and methods to .cpp to improve Opencv build time
parent fbdb4f4a
...@@ -745,6 +745,4 @@ CV_EXPORTS_W void reprojectImageTo3D( const Mat& disparity, ...@@ -745,6 +745,4 @@ CV_EXPORTS_W void reprojectImageTo3D( const Mat& disparity,
#endif #endif
#include "opencv2/calib3d/compat_c.h"
#endif #endif
...@@ -780,6 +780,4 @@ CVAPI(void) cvLSHQuery(struct CvLSH* lsh, const CvMat* query_points, ...@@ -780,6 +780,4 @@ CVAPI(void) cvLSHQuery(struct CvLSH* lsh, const CvMat* query_points,
} }
#endif #endif
#include "opencv2/imgproc/compat_c.h"
#endif #endif
...@@ -579,17 +579,17 @@ CVAPI(int) icvCompute3DPoint( double alpha,double betta, ...@@ -579,17 +579,17 @@ CVAPI(int) icvCompute3DPoint( double alpha,double betta,
CvStereoLineCoeff* coeffs, CvStereoLineCoeff* coeffs,
CvPoint3D64f* point); CvPoint3D64f* point);
CVAPI(int) icvCreateConvertMatrVect( CvMatr64d rotMatr1, CVAPI(int) icvCreateConvertMatrVect( double* rotMatr1,
CvMatr64d transVect1, double* transVect1,
CvMatr64d rotMatr2, double* rotMatr2,
CvMatr64d transVect2, double* transVect2,
CvMatr64d convRotMatr, double* convRotMatr,
CvMatr64d convTransVect); double* convTransVect);
CVAPI(int) icvConvertPointSystem(CvPoint3D64f M2, CVAPI(int) icvConvertPointSystem(CvPoint3D64f M2,
CvPoint3D64f* M1, CvPoint3D64f* M1,
CvMatr64d rotMatr, double* rotMatr,
CvMatr64d transVect double* transVect
); );
CVAPI(int) icvComputeCoeffForStereo( CvStereoCamera* stereoCamera); CVAPI(int) icvComputeCoeffForStereo( CvStereoCamera* stereoCamera);
...@@ -615,17 +615,17 @@ CVAPI(int) icvComCoeffForLine( CvPoint2D64f point1, ...@@ -615,17 +615,17 @@ CVAPI(int) icvComCoeffForLine( CvPoint2D64f point1,
CvPoint2D64f point2, CvPoint2D64f point2,
CvPoint2D64f point3, CvPoint2D64f point3,
CvPoint2D64f point4, CvPoint2D64f point4,
CvMatr64d camMatr1, double* camMatr1,
CvMatr64d rotMatr1, double* rotMatr1,
CvMatr64d transVect1, double* transVect1,
CvMatr64d camMatr2, double* camMatr2,
CvMatr64d rotMatr2, double* rotMatr2,
CvMatr64d transVect2, double* transVect2,
CvStereoLineCoeff* coeffs, CvStereoLineCoeff* coeffs,
int* needSwapCameras); int* needSwapCameras);
CVAPI(int) icvGetDirectionForPoint( CvPoint2D64f point, CVAPI(int) icvGetDirectionForPoint( CvPoint2D64f point,
CvMatr64d camMatr, double* camMatr,
CvPoint3D64f* direct); CvPoint3D64f* direct);
CVAPI(int) icvGetCrossLines(CvPoint3D64f point11,CvPoint3D64f point12, CVAPI(int) icvGetCrossLines(CvPoint3D64f point11,CvPoint3D64f point12,
...@@ -638,15 +638,15 @@ CVAPI(int) icvComputeStereoLineCoeffs( CvPoint3D64f pointA, ...@@ -638,15 +638,15 @@ CVAPI(int) icvComputeStereoLineCoeffs( CvPoint3D64f pointA,
double gamma, double gamma,
CvStereoLineCoeff* coeffs); CvStereoLineCoeff* coeffs);
/*CVAPI(int) icvComputeFundMatrEpipoles ( CvMatr64d camMatr1, /*CVAPI(int) icvComputeFundMatrEpipoles ( double* camMatr1,
CvMatr64d rotMatr1, double* rotMatr1,
CvVect64d transVect1, double* transVect1,
CvMatr64d camMatr2, double* camMatr2,
CvMatr64d rotMatr2, double* rotMatr2,
CvVect64d transVect2, double* transVect2,
CvPoint2D64f* epipole1, CvPoint2D64f* epipole1,
CvPoint2D64f* epipole2, CvPoint2D64f* epipole2,
CvMatr64d fundMatr);*/ double* fundMatr);*/
CVAPI(int) icvGetAngleLine( CvPoint2D64f startPoint, CvSize imageSize,CvPoint2D64f *point1,CvPoint2D64f *point2); CVAPI(int) icvGetAngleLine( CvPoint2D64f startPoint, CvSize imageSize,CvPoint2D64f *point1,CvPoint2D64f *point2);
...@@ -656,24 +656,24 @@ CVAPI(void) icvGetCoefForPiece( CvPoint2D64f p_start,CvPoint2D64f p_end, ...@@ -656,24 +656,24 @@ CVAPI(void) icvGetCoefForPiece( CvPoint2D64f p_start,CvPoint2D64f p_end,
/*CVAPI(void) icvGetCommonArea( CvSize imageSize, /*CVAPI(void) icvGetCommonArea( CvSize imageSize,
CvPoint2D64f epipole1,CvPoint2D64f epipole2, CvPoint2D64f epipole1,CvPoint2D64f epipole2,
CvMatr64d fundMatr, double* fundMatr,
CvVect64d coeff11,CvVect64d coeff12, double* coeff11,double* coeff12,
CvVect64d coeff21,CvVect64d coeff22, double* coeff21,double* coeff22,
int* result);*/ int* result);*/
CVAPI(void) icvComputeeInfiniteProject1(CvMatr64d rotMatr, CVAPI(void) icvComputeeInfiniteProject1(double* rotMatr,
CvMatr64d camMatr1, double* camMatr1,
CvMatr64d camMatr2, double* camMatr2,
CvPoint2D32f point1, CvPoint2D32f point1,
CvPoint2D32f *point2); CvPoint2D32f *point2);
CVAPI(void) icvComputeeInfiniteProject2(CvMatr64d rotMatr, CVAPI(void) icvComputeeInfiniteProject2(double* rotMatr,
CvMatr64d camMatr1, double* camMatr1,
CvMatr64d camMatr2, double* camMatr2,
CvPoint2D32f* point1, CvPoint2D32f* point1,
CvPoint2D32f point2); CvPoint2D32f point2);
CVAPI(void) icvGetCrossDirectDirect( CvVect64d direct1,CvVect64d direct2, CVAPI(void) icvGetCrossDirectDirect( double* direct1,double* direct2,
CvPoint2D64f *cross,int* result); CvPoint2D64f *cross,int* result);
CVAPI(void) icvGetCrossPieceDirect( CvPoint2D64f p_start,CvPoint2D64f p_end, CVAPI(void) icvGetCrossPieceDirect( CvPoint2D64f p_start,CvPoint2D64f p_end,
...@@ -693,20 +693,20 @@ CVAPI(void) icvGetCrossRectDirect( CvSize imageSize, ...@@ -693,20 +693,20 @@ CVAPI(void) icvGetCrossRectDirect( CvSize imageSize,
int* result); int* result);
CVAPI(void) icvProjectPointToImage( CvPoint3D64f point, CVAPI(void) icvProjectPointToImage( CvPoint3D64f point,
CvMatr64d camMatr,CvMatr64d rotMatr,CvVect64d transVect, double* camMatr,double* rotMatr,double* transVect,
CvPoint2D64f* projPoint); CvPoint2D64f* projPoint);
CVAPI(void) icvGetQuadsTransform( CvSize imageSize, CVAPI(void) icvGetQuadsTransform( CvSize imageSize,
CvMatr64d camMatr1, double* camMatr1,
CvMatr64d rotMatr1, double* rotMatr1,
CvVect64d transVect1, double* transVect1,
CvMatr64d camMatr2, double* camMatr2,
CvMatr64d rotMatr2, double* rotMatr2,
CvVect64d transVect2, double* transVect2,
CvSize* warpSize, CvSize* warpSize,
double quad1[4][2], double quad1[4][2],
double quad2[4][2], double quad2[4][2],
CvMatr64d fundMatr, double* fundMatr,
CvPoint3D64f* epipole1, CvPoint3D64f* epipole1,
CvPoint3D64f* epipole2 CvPoint3D64f* epipole2
); );
...@@ -715,7 +715,7 @@ CVAPI(void) icvGetQuadsTransformStruct( CvStereoCamera* stereoCamera); ...@@ -715,7 +715,7 @@ CVAPI(void) icvGetQuadsTransformStruct( CvStereoCamera* stereoCamera);
CVAPI(void) icvComputeStereoParamsForCameras(CvStereoCamera* stereoCamera); CVAPI(void) icvComputeStereoParamsForCameras(CvStereoCamera* stereoCamera);
CVAPI(void) icvGetCutPiece( CvVect64d areaLineCoef1,CvVect64d areaLineCoef2, CVAPI(void) icvGetCutPiece( double* areaLineCoef1,double* areaLineCoef2,
CvPoint2D64f epipole, CvPoint2D64f epipole,
CvSize imageSize, CvSize imageSize,
CvPoint2D64f* point11,CvPoint2D64f* point12, CvPoint2D64f* point11,CvPoint2D64f* point12,
...@@ -726,14 +726,14 @@ CVAPI(void) icvGetMiddleAnglePoint( CvPoint2D64f basePoint, ...@@ -726,14 +726,14 @@ CVAPI(void) icvGetMiddleAnglePoint( CvPoint2D64f basePoint,
CvPoint2D64f point1,CvPoint2D64f point2, CvPoint2D64f point1,CvPoint2D64f point2,
CvPoint2D64f* midPoint); CvPoint2D64f* midPoint);
CVAPI(void) icvGetNormalDirect(CvVect64d direct,CvPoint2D64f point,CvVect64d normDirect); CVAPI(void) icvGetNormalDirect(double* direct,CvPoint2D64f point,double* normDirect);
CVAPI(double) icvGetVect(CvPoint2D64f basePoint,CvPoint2D64f point1,CvPoint2D64f point2); CVAPI(double) icvGetVect(CvPoint2D64f basePoint,CvPoint2D64f point1,CvPoint2D64f point2);
CVAPI(void) icvProjectPointToDirect( CvPoint2D64f point,CvVect64d lineCoeff, CVAPI(void) icvProjectPointToDirect( CvPoint2D64f point,double* lineCoeff,
CvPoint2D64f* projectPoint); CvPoint2D64f* projectPoint);
CVAPI(void) icvGetDistanceFromPointToDirect( CvPoint2D64f point,CvVect64d lineCoef,double*dist); CVAPI(void) icvGetDistanceFromPointToDirect( CvPoint2D64f point,double* lineCoef,double*dist);
CVAPI(IplImage*) icvCreateIsometricImage( IplImage* src, IplImage* dst, CVAPI(IplImage*) icvCreateIsometricImage( IplImage* src, IplImage* dst,
int desired_depth, int desired_num_channels ); int desired_depth, int desired_num_channels );
...@@ -1096,6 +1096,11 @@ CVAPI(void) cvInitPerspectiveTransform( CvSize size, const CvPoint2D32f vertex[4 ...@@ -1096,6 +1096,11 @@ CVAPI(void) cvInitPerspectiveTransform( CvSize size, const CvPoint2D32f vertex[4
/*************************** View Morphing Functions ************************/ /*************************** View Morphing Functions ************************/
typedef struct CvMatrix3
{
float m[3][3];
} CvMatrix3;
/* The order of the function corresponds to the order they should appear in /* The order of the function corresponds to the order they should appear in
the view morphing pipeline */ the view morphing pipeline */
......
...@@ -42,34 +42,20 @@ ...@@ -42,34 +42,20 @@
// Function cvCreateBGStatModel creates and returns initialized BG model. // Function cvCreateBGStatModel creates and returns initialized BG model.
// Parameters: // Parameters:
// first_frame - frame from video sequence // first_frame - frame from video sequence
// model_type type of BG model (CV_BG_MODEL_MOG, CV_BG_MODEL_FGD,) // model_type ñ type of BG model (CV_BG_MODEL_MOG, CV_BG_MODEL_FGD,Ö)
// parameters - (optional) if NULL the default parameters of the algorithm will be used // parameters - (optional) if NULL the default parameters of the algorithm will be used
CvBGStatModel* cvCreateBGStatModel( IplImage* first_frame, int model_type, void* params ) static CvBGStatModel* cvCreateBGStatModel( IplImage* first_frame, int model_type, void* params )
{ {
CvBGStatModel* bg_model = NULL; CvBGStatModel* bg_model = NULL;
if( model_type == CV_BG_MODEL_FGD || model_type == CV_BG_MODEL_FGD_SIMPLE ) if( model_type == CV_BG_MODEL_FGD || model_type == CV_BG_MODEL_FGD_SIMPLE )
bg_model = cvCreateFGDStatModel( first_frame, (CvFGDStatModelParams*)params ); bg_model = cvCreateFGDStatModel( first_frame, (CvFGDStatModelParams*)params );
else if( model_type == CV_BG_MODEL_MOG ) else if( model_type == CV_BG_MODEL_MOG )
bg_model = cvCreateGaussianBGModel( first_frame, (CvGaussBGStatModelParams*)params ); bg_model = cvCreateGaussianBGModel( first_frame, (CvGaussBGStatModelParams*)params );
return bg_model; return bg_model;
} }
void cvReleaseBGStatModel( CvBGStatModel** bg_model )
{
if( bg_model && *bg_model && (*bg_model)->release )
(*bg_model)->release( bg_model );
}
int cvUpdateBGStatModel( IplImage* current_frame,
CvBGStatModel* bg_model,
double learningRate )
{
return bg_model && bg_model->update ? bg_model->update( current_frame, bg_model, learningRate ) : 0;
}
/* FOREGROUND DETECTOR INTERFACE */ /* FOREGROUND DETECTOR INTERFACE */
class CvFGDetectorBase : public CvFGDetector class CvFGDetectorBase : public CvFGDetector
{ {
......
This diff is collapsed.
...@@ -50,8 +50,14 @@ ...@@ -50,8 +50,14 @@
#endif #endif
#include "opencv2/legacy/legacy.hpp" #include "opencv2/legacy/legacy.hpp"
#include "opencv2/video/tracking.hpp"
#include "opencv2/core/internal.hpp" #include "opencv2/core/internal.hpp"
#include "opencv2/video/tracking.hpp"
#include "opencv2/video/background_segm.hpp"
#include "opencv2/legacy/blobtrack.hpp"
#include "opencv2/legacy/compat.hpp"
#include "_matrix.h" #include "_matrix.h"
typedef unsigned short ushort; typedef unsigned short ushort;
......
...@@ -340,7 +340,6 @@ CVAPI(CvSeq*) cvSegmentFGMask( CvArr *fgmask, int poly1Hull0 CV_DEFAULT(1), ...@@ -340,7 +340,6 @@ CVAPI(CvSeq*) cvSegmentFGMask( CvArr *fgmask, int poly1Hull0 CV_DEFAULT(1),
CvMemStorage* storage CV_DEFAULT(0), CvMemStorage* storage CV_DEFAULT(0),
CvPoint offset CV_DEFAULT(cvPoint(0,0))); CvPoint offset CV_DEFAULT(cvPoint(0,0)));
#ifdef __cplusplus #ifdef __cplusplus
} }
......
...@@ -40,6 +40,20 @@ ...@@ -40,6 +40,20 @@
#include "precomp.hpp" #include "precomp.hpp"
void cvReleaseBGStatModel( CvBGStatModel** bg_model )
{
if( bg_model && *bg_model && (*bg_model)->release )
(*bg_model)->release( bg_model );
}
int cvUpdateBGStatModel( IplImage* current_frame,
CvBGStatModel* bg_model,
double learningRate )
{
return bg_model && bg_model->update ? bg_model->update( current_frame, bg_model, learningRate ) : 0;
}
// Function cvRefineForegroundMaskBySegm preforms FG post-processing based on segmentation // Function cvRefineForegroundMaskBySegm preforms FG post-processing based on segmentation
// (all pixels of the segment will be classified as FG if majority of pixels of the region are FG). // (all pixels of the segment will be classified as FG if majority of pixels of the region are FG).
// parameters: // parameters:
......
...@@ -52,7 +52,6 @@ ...@@ -52,7 +52,6 @@
#endif #endif
#include "opencv2/video/tracking.hpp" #include "opencv2/video/tracking.hpp"
#include "opencv2/video/blobtrack.hpp"
#include "opencv2/video/background_segm.hpp" #include "opencv2/video/background_segm.hpp"
#include "opencv2/imgproc/imgproc.hpp" #include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/core/internal.hpp" #include "opencv2/core/internal.hpp"
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment