Commit 5ae550c6 authored by Maksim Shabunin's avatar Maksim Shabunin

Merge pull request #11320 from mshabunin:gstreamer-cpp

parents e82af627 53098323
......@@ -40,6 +40,8 @@
//M*/
#include "precomp.hpp"
#include <iostream>
using namespace std;
#include "cap_intelperc.hpp"
#include "cap_dshow.hpp"
......@@ -200,12 +202,6 @@ CV_IMPL CvCapture * cvCreateCameraCapture (int index)
TRY_OPEN(capture, cvCreateCameraCapture_V4L(index))
#endif
#ifdef HAVE_GSTREAMER
TRY_OPEN(capture, cvCreateCapture_GStreamer(CV_CAP_GSTREAMER_V4L2, reinterpret_cast<char *>(index)))
TRY_OPEN(capture, cvCreateCapture_GStreamer(CV_CAP_GSTREAMER_V4L, reinterpret_cast<char *>(index)))
#endif
if (pref) break; // CAP_VFW or CAP_V4L or CAP_V4L2
case CAP_FIREWIRE:
......@@ -221,11 +217,6 @@ CV_IMPL CvCapture * cvCreateCameraCapture (int index)
TRY_OPEN(capture, cvCreateCameraCapture_CMU(index))
#endif
#if defined(HAVE_GSTREAMER) && 0
// Re-enable again when gstreamer 1394 support will land in the backend code
TRY_OPEN(capture, cvCreateCapture_GStreamer(CV_CAP_GSTREAMER_1394, 0))
#endif
if (pref) break; // CAP_FIREWIRE
#ifdef HAVE_MIL
......@@ -330,12 +321,6 @@ CV_IMPL CvCapture * cvCreateFileCaptureWithPreference (const char * filename, in
if (apiPreference) break;
#endif
#ifdef HAVE_GSTREAMER
case CAP_GSTREAMER:
TRY_OPEN(result, cvCreateCapture_GStreamer (CV_CAP_GSTREAMER_FILE, filename))
if (apiPreference) break;
#endif
#if defined(HAVE_QUICKTIME) || defined(HAVE_QTKIT)
case CAP_QT:
TRY_OPEN(result, cvCreateFileCapture_QT (filename))
......@@ -463,6 +448,9 @@ static Ptr<IVideoCapture> IVideoCapture_create(int index)
{
int domains[] =
{
#ifdef HAVE_GSTREAMER
CAP_GSTREAMER,
#endif
#ifdef HAVE_DSHOW
CAP_DSHOW,
#endif
......@@ -490,7 +478,8 @@ static Ptr<IVideoCapture> IVideoCapture_create(int index)
// try every possibly installed camera API
for (int i = 0; domains[i] >= 0; i++)
{
#if defined(HAVE_DSHOW) || \
#if defined(HAVE_GSTREAMER) || \
defined(HAVE_DSHOW) || \
defined(HAVE_INTELPERC) || \
defined(WINRT_VIDEO) || \
defined(HAVE_GPHOTO2) || \
......@@ -499,6 +488,11 @@ static Ptr<IVideoCapture> IVideoCapture_create(int index)
switch (domains[i])
{
#ifdef HAVE_GSTREAMER
case CAP_GSTREAMER:
capture = createGStreamerCapture(index);
break;
#endif
#ifdef HAVE_DSHOW
case CAP_DSHOW:
capture = makePtr<VideoCapture_DShow>(index);
......@@ -536,6 +530,14 @@ static Ptr<IVideoCapture> IVideoCapture_create(const String& filename, int apiPr
{
bool useAny = (apiPreference == CAP_ANY);
Ptr<IVideoCapture> capture;
#ifdef HAVE_GSTREAMER
if (useAny || apiPreference == CAP_GSTREAMER)
{
capture = createGStreamerCapture(filename);
if (capture && capture->isOpened())
return capture;
}
#endif
#ifdef HAVE_XINE
if (useAny || apiPreference == CAP_XINE)
{
......
......@@ -48,6 +48,8 @@
* \brief Use GStreamer to read/write video
*/
#include "precomp.hpp"
#include <iostream>
using namespace std;
#ifndef _MSC_VER
#include <unistd.h>
#endif
......@@ -72,7 +74,7 @@
#ifdef NDEBUG
#define CV_WARN(message)
#else
#define CV_WARN(message) fprintf(stderr, "warning: %s (%s:%d)\n", message, __FILE__, __LINE__)
#define CV_WARN(message) fprintf(stderr, "OpenCV | GStreamer warning: %s (%s:%d)\n", message, __FILE__, __LINE__)
#endif
#if GST_VERSION_MAJOR == 0
......@@ -100,6 +102,7 @@ inline char *realpath(const char *path, char *resolved_path)
void toFraction(double decimal, double &numerator, double &denominator);
void handleMessage(GstElement * pipeline);
using namespace cv;
static cv::Mutex gst_initializer_mutex;
......@@ -120,116 +123,118 @@ private:
gst_initializer()
{
gst_init(NULL, NULL);
guint major, minor, micro, nano;
gst_version(&major, &minor, &micro, &nano);
if (GST_VERSION_MAJOR == major)
{
CV_WARN("incompatible gstreamer version");
}
// gst_debug_set_active(1);
// gst_debug_set_colored(1);
// gst_debug_set_default_threshold(GST_LEVEL_INFO);
}
};
/*!
* \brief The CvCapture_GStreamer class
* Use GStreamer to capture video
*/
class CvCapture_GStreamer CV_FINAL : public CvCapture
inline static string get_gst_propname(int propId)
{
public:
CvCapture_GStreamer() { init(); }
virtual ~CvCapture_GStreamer() { close(); }
switch (propId)
{
case CV_CAP_PROP_BRIGHTNESS: return "brightness";
case CV_CAP_PROP_CONTRAST: return "contrast";
case CV_CAP_PROP_SATURATION: return "saturation";
case CV_CAP_PROP_HUE: return "hue";
default: return string();
}
}
virtual bool open( int type, const char* filename );
virtual void close();
inline static bool is_gst_element_exists(const std::string & name)
{
GstElementFactory * testfac = gst_element_factory_find(name.c_str());
if (!testfac)
return false;
g_object_unref(G_OBJECT(testfac));
return true;
}
virtual double getProperty(int) const CV_OVERRIDE;
virtual bool setProperty(int, double) CV_OVERRIDE;
virtual bool grabFrame() CV_OVERRIDE;
virtual IplImage* retrieveFrame(int) CV_OVERRIDE;
//==================================================================================================
protected:
void init();
bool reopen();
bool isPipelinePlaying();
void startPipeline();
void stopPipeline();
void restartPipeline();
void setFilter(const char* prop, int type, int v1, int v2 = 0);
void removeFilter(const char *filter);
static void newPad(GstElement *myelement,
GstPad *pad,
gpointer data);
class GStreamerCapture : public IVideoCapture
{
private:
GstElement* pipeline;
GstElement* uridecodebin;
GstElement* v4l2src;
GstElement* color;
GstElement* sink;
#if GST_VERSION_MAJOR > 0
GstSample* sample;
#endif
#else
void * sample; // unused
GstBuffer* buffer;
#endif
GstCaps* caps;
IplImage* frame;
gint64 duration;
gint width;
gint height;
gint channels;
double fps;
bool isPosFramesSupported;
bool isPosFramesEmulated;
gint64 emulatedFrameNumber;
bool isOutputByteBuffer;
public:
GStreamerCapture();
~GStreamerCapture();
virtual bool grabFrame();
virtual bool retrieveFrame(int /*unused*/, OutputArray dst);
virtual double getProperty(int propId) const;
virtual bool setProperty(int propId, double value);
virtual bool isOpened() const;
virtual int getCaptureDomain(); // Return the type of the capture object: CAP_VFW, etc...
bool open(int id);
bool open(const String &filename_);
static void newPad(GstElement * /*elem*/, GstPad *pad, gpointer data);
protected:
bool determineFrameDims(Size & sz);
bool isPipelinePlaying();
void startPipeline();
void stopPipeline();
void restartPipeline();
void setFilter(const char *prop, int type, int v1, int v2);
void removeFilter(const char *filter);
};
/*!
* \brief CvCapture_GStreamer::init
* inits the class
*/
void CvCapture_GStreamer::init()
{
pipeline = NULL;
uridecodebin = NULL;
v4l2src = NULL;
color = NULL;
sink = NULL;
#if GST_VERSION_MAJOR > 0
sample = NULL;
GStreamerCapture::GStreamerCapture() :
pipeline(NULL), v4l2src(NULL), sink(NULL), sample(NULL),
#if GST_VERSION_MAJOR == 0
buffer(NULL),
#endif
buffer = NULL;
caps = NULL;
frame = NULL;
duration = -1;
width = -1;
height = -1;
fps = -1;
isPosFramesSupported = false;
isPosFramesEmulated = false;
emulatedFrameNumber = -1;
isOutputByteBuffer = false;
caps(NULL),
duration(-1), width(-1), height(-1), channels(0), fps(-1),
isPosFramesSupported(false),
isPosFramesEmulated(false),
emulatedFrameNumber(-1),
isOutputByteBuffer(false)
{
}
/*!
* \brief CvCapture_GStreamer::close
* Closes the pipeline and destroys all instances
*/
void CvCapture_GStreamer::close()
GStreamerCapture::~GStreamerCapture()
{
if (isPipelinePlaying())
this->stopPipeline();
if(pipeline) {
stopPipeline();
if (pipeline && GST_IS_ELEMENT(pipeline))
{
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
gst_object_unref(GST_OBJECT(pipeline));
pipeline = NULL;
}
duration = -1;
width = -1;
height = -1;
fps = -1;
isPosFramesSupported = false;
isPosFramesEmulated = false;
emulatedFrameNumber = -1;
}
/*!
......@@ -238,7 +243,7 @@ void CvCapture_GStreamer::close()
* Grabs a sample from the pipeline, awaiting consumation by retreiveFrame.
* The pipeline is started if it was not running yet
*/
bool CvCapture_GStreamer::grabFrame()
bool GStreamerCapture::grabFrame()
{
if(!pipeline)
return false;
......@@ -254,23 +259,18 @@ bool CvCapture_GStreamer::grabFrame()
#if GST_VERSION_MAJOR == 0
if(buffer)
gst_buffer_unref(buffer);
buffer = gst_app_sink_pull_buffer(GST_APP_SINK(sink));
if(!buffer)
return false;
#else
if(sample)
gst_sample_unref(sample);
sample = gst_app_sink_pull_sample(GST_APP_SINK(sink));
if(!sample)
return false;
buffer = gst_sample_get_buffer(sample);
gst_sample_ref(sample);
#endif
if(!buffer)
return false;
if (isPosFramesEmulated)
emulatedFrameNumber++;
......@@ -282,146 +282,145 @@ bool CvCapture_GStreamer::grabFrame()
* \return IplImage pointer. [Transfer Full]
* Retrieve the previously grabbed buffer, and wrap it in an IPLImage structure
*/
IplImage * CvCapture_GStreamer::retrieveFrame(int)
bool GStreamerCapture::retrieveFrame(int, OutputArray dst)
{
if(!buffer)
return 0;
#if GST_VERSION_MAJOR == 0
if (!buffer)
return false;
#else
if(!sample)
return false;
#endif
Size sz;
if (!determineFrameDims(sz))
return false;
//construct a frame header if we did not have any yet
if(!frame)
// gstreamer expects us to handle the memory at this point
// so we can just wrap the raw buffer and be done with it
#if GST_VERSION_MAJOR == 0
Mat src(sz, CV_8UC1, (uchar*)GST_BUFFER_DATA(buffer));
src.copyTo(dst);
#else
GstBuffer * buf = gst_sample_get_buffer(sample);
if (!buf)
return false;
GstMapInfo info;
if (!gst_buffer_map(buf, &info, GST_MAP_READ))
{
//something weird went wrong here. abort. abort.
CV_WARN("Failed to map GStreamerbuffer to system memory");
return false;
}
{
Mat src;
if (isOutputByteBuffer)
src = Mat(Size(info.size, 1), CV_8UC1, info.data);
else
src = Mat(sz, CV_MAKETYPE(CV_8U, channels), info.data);
CV_Assert(src.isContinuous());
src.copyTo(dst);
}
gst_buffer_unmap(buf, &info);
#endif
return true;
}
bool GStreamerCapture::determineFrameDims(Size &sz)
{
#if GST_VERSION_MAJOR == 0
GstCaps* buffer_caps = gst_buffer_get_caps(buffer);
GstCaps * frame_caps = gst_buffer_get_caps(buffer);
#else
GstCaps* buffer_caps = gst_sample_get_caps(sample);
GstCaps * frame_caps = gst_sample_get_caps(sample);
#endif
// bail out in no caps
assert(gst_caps_get_size(buffer_caps) == 1);
GstStructure* structure = gst_caps_get_structure(buffer_caps, 0);
// bail out in no caps
if (!GST_CAPS_IS_SIMPLE(frame_caps))
return false;
// bail out if width or height are 0
if(!gst_structure_get_int(structure, "width", &width) ||
!gst_structure_get_int(structure, "height", &height))
{
gst_caps_unref(buffer_caps);
return 0;
}
GstStructure* structure = gst_caps_get_structure(frame_caps, 0);
// bail out if width or height are 0
if (!gst_structure_get_int(structure, "width", &width)
|| !gst_structure_get_int(structure, "height", &height))
return false;
int depth = 3;
bool height_extend = false;
sz = Size(width, height);
#if GST_VERSION_MAJOR > 0
depth = 0;
const gchar* name = gst_structure_get_name(structure);
const gchar* format = gst_structure_get_string(structure, "format");
const gchar* name = gst_structure_get_name(structure);
if (!name)
return 0;
if (!name)
return false;
// we support 11 types of data:
// video/x-raw, format=BGR -> 8bit, 3 channels
// video/x-raw, format=GRAY8 -> 8bit, 1 channel
// video/x-raw, format=UYVY -> 8bit, 2 channel
// video/x-raw, format=YUY2 -> 8bit, 2 channel
// video/x-raw, format=YVYU -> 8bit, 2 channel
// video/x-raw, format=NV12 -> 8bit, 1 channel (height is 1.5x larger than true height)
// video/x-raw, format=NV21 -> 8bit, 1 channel (height is 1.5x larger than true height)
// video/x-raw, format=YV12 -> 8bit, 1 channel (height is 1.5x larger than true height)
// video/x-raw, format=I420 -> 8bit, 1 channel (height is 1.5x larger than true height)
// video/x-bayer -> 8bit, 1 channel
// image/jpeg -> 8bit, mjpeg: buffer_size x 1 x 1
// bayer data is never decoded, the user is responsible for that
// everything is 8 bit, so we just test the caps for bit depth
if (strcasecmp(name, "video/x-raw") == 0)
// we support 11 types of data:
// video/x-raw, format=BGR -> 8bit, 3 channels
// video/x-raw, format=GRAY8 -> 8bit, 1 channel
// video/x-raw, format=UYVY -> 8bit, 2 channel
// video/x-raw, format=YUY2 -> 8bit, 2 channel
// video/x-raw, format=YVYU -> 8bit, 2 channel
// video/x-raw, format=NV12 -> 8bit, 1 channel (height is 1.5x larger than true height)
// video/x-raw, format=NV21 -> 8bit, 1 channel (height is 1.5x larger than true height)
// video/x-raw, format=YV12 -> 8bit, 1 channel (height is 1.5x larger than true height)
// video/x-raw, format=I420 -> 8bit, 1 channel (height is 1.5x larger than true height)
// video/x-bayer -> 8bit, 1 channel
// image/jpeg -> 8bit, mjpeg: buffer_size x 1 x 1
// bayer data is never decoded, the user is responsible for that
// everything is 8 bit, so we just test the caps for bit depth
if (strcasecmp(name, "video/x-raw") == 0)
{
const gchar* format = gst_structure_get_string(structure, "format");
if (!format)
return false;
if (strcasecmp(format, "BGR") == 0)
{
if (!format)
return 0;
if (strcasecmp(format, "BGR") == 0) {
depth = 3;
}
else if( (strcasecmp(format, "UYVY") == 0) || (strcasecmp(format, "YUY2") == 0) || (strcasecmp(format, "YVYU") == 0) ){
depth = 2;
}
else if( (strcasecmp(format, "NV12") == 0) || (strcasecmp(format, "NV21") == 0) || (strcasecmp(format, "YV12") == 0) || (strcasecmp(format, "I420") == 0) ){
depth = 1;
height_extend = true;
}
else if(strcasecmp(format, "GRAY8") == 0){
depth = 1;
}
channels = 3;
}
else if (strcasecmp(name, "video/x-bayer") == 0)
else if( (strcasecmp(format, "UYVY") == 0) || (strcasecmp(format, "YUY2") == 0) || (strcasecmp(format, "YVYU") == 0) )
{
depth = 1;
} else if(strcasecmp(name, "image/jpeg") == 0) {
depth = 1;
// the correct size will be set once the first frame arrives
isOutputByteBuffer = true;
channels = 2;
}
#endif
if (depth > 0) {
if(height_extend){
frame = cvCreateImageHeader(cvSize(width, height*3/2), IPL_DEPTH_8U, depth);
}else{
frame = cvCreateImageHeader(cvSize(width, height), IPL_DEPTH_8U, depth);
}
} else {
gst_caps_unref(buffer_caps);
return 0;
else if( (strcasecmp(format, "NV12") == 0) || (strcasecmp(format, "NV21") == 0) || (strcasecmp(format, "YV12") == 0) || (strcasecmp(format, "I420") == 0) )
{
channels = 1;
sz.height = sz.height * 3 / 2;
}
else if(strcasecmp(format, "GRAY8") == 0)
{
channels = 1;
}
gst_caps_unref(buffer_caps);
}
// gstreamer expects us to handle the memory at this point
// so we can just wrap the raw buffer and be done with it
#if GST_VERSION_MAJOR == 0
frame->imageData = (char *)GST_BUFFER_DATA(buffer);
#else
// info.data ptr is valid until next grabFrame where the associated sample is unref'd
GstMapInfo info = GstMapInfo();
gboolean success = gst_buffer_map(buffer,&info, (GstMapFlags)GST_MAP_READ);
// with MJPEG streams frame size can change arbitrarily
if (isOutputByteBuffer && (size_t)info.size != (size_t)frame->imageSize)
else if (strcasecmp(name, "video/x-bayer") == 0)
{
cvReleaseImageHeader(&frame);
frame = cvCreateImageHeader(cvSize(info.size, 1), IPL_DEPTH_8U, 1);
channels = 1;
}
if (!success){
//something weird went wrong here. abort. abort.
//fprintf(stderr,"GStreamer: unable to map buffer");
return 0;
else if(strcasecmp(name, "image/jpeg") == 0)
{
// the correct size will be set once the first frame arrives
channels = 1;
isOutputByteBuffer = true;
}
frame->imageData = (char*)info.data;
gst_buffer_unmap(buffer,&info);
#else
// we support only video/x-raw, format=BGR -> 8bit, 3 channels
channels = 3;
#endif
return frame;
return true;
}
/*!
* \brief CvCapture_GStreamer::isPipelinePlaying
* \return if the pipeline is currently playing.
*/
bool CvCapture_GStreamer::isPipelinePlaying()
bool GStreamerCapture::isPipelinePlaying()
{
GstState current, pending;
GstClockTime timeout = 5*GST_SECOND;
if(!GST_IS_ELEMENT(pipeline)){
return false;
}
GstStateChangeReturn ret = gst_element_get_state(GST_ELEMENT(pipeline),&current, &pending, timeout);
if (!ret){
//fprintf(stderr, "GStreamer: unable to query pipeline state\n");
GstStateChangeReturn ret = gst_element_get_state(pipeline, &current, &pending, timeout);
if (!ret)
{
CV_WARN("GStreamer: unable to query pipeline state");
return false;
}
return current == GST_STATE_PLAYING;
}
......@@ -429,12 +428,8 @@ bool CvCapture_GStreamer::isPipelinePlaying()
* \brief CvCapture_GStreamer::startPipeline
* Start the pipeline by setting it to the playing state
*/
void CvCapture_GStreamer::startPipeline()
void GStreamerCapture::startPipeline()
{
CV_FUNCNAME("icvStartPipeline");
__BEGIN__;
//fprintf(stderr, "relinked, pausing\n");
GstStateChangeReturn status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
if (status == GST_STATE_CHANGE_ASYNC)
......@@ -447,7 +442,7 @@ void CvCapture_GStreamer::startPipeline()
handleMessage(pipeline);
gst_object_unref(pipeline);
pipeline = NULL;
CV_ERROR(CV_StsError, "GStreamer: unable to start pipeline\n");
CV_WARN("GStreamer: unable to start pipeline");
return;
}
......@@ -456,36 +451,28 @@ void CvCapture_GStreamer::startPipeline()
//printf("state now playing\n");
handleMessage(pipeline);
__END__;
}
/*!
* \brief CvCapture_GStreamer::stopPipeline
* Stop the pipeline by setting it to NULL
*/
void CvCapture_GStreamer::stopPipeline()
void GStreamerCapture::stopPipeline()
{
CV_FUNCNAME("icvStopPipeline");
__BEGIN__;
//fprintf(stderr, "restarting pipeline, going to ready\n");
if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL) ==
GST_STATE_CHANGE_FAILURE) {
CV_ERROR(CV_StsError, "GStreamer: unable to stop pipeline\n");
if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL) == GST_STATE_CHANGE_FAILURE)
{
CV_WARN("GStreamer: unable to stop pipeline");
gst_object_unref(pipeline);
pipeline = NULL;
return;
}
__END__;
}
/*!
* \brief CvCapture_GStreamer::restartPipeline
* Restart the pipeline
*/
void CvCapture_GStreamer::restartPipeline()
void GStreamerCapture::restartPipeline()
{
handleMessage(pipeline);
......@@ -493,7 +480,6 @@ void CvCapture_GStreamer::restartPipeline()
this->startPipeline();
}
/*!
* \brief CvCapture_GStreamer::setFilter
* \param prop the property name
......@@ -502,7 +488,7 @@ void CvCapture_GStreamer::restartPipeline()
* \param v2 second value of property type requires it, else NULL
* Filter the output formats by setting appsink caps properties
*/
void CvCapture_GStreamer::setFilter(const char *prop, int type, int v1, int v2)
void GStreamerCapture::setFilter(const char *prop, int type, int v1, int v2)
{
//printf("GStreamer: setFilter \n");
if(!caps || !( GST_IS_CAPS (caps) ))
......@@ -545,13 +531,12 @@ void CvCapture_GStreamer::setFilter(const char *prop, int type, int v1, int v2)
//printf("filtering with %s\n", gst_caps_to_string(caps));
}
/*!
* \brief CvCapture_GStreamer::removeFilter
* \param filter filter to remove
* remove the specified filter from the appsink template caps
*/
void CvCapture_GStreamer::removeFilter(const char *filter)
void GStreamerCapture::removeFilter(const char *filter)
{
if(!caps)
return;
......@@ -574,9 +559,7 @@ void CvCapture_GStreamer::removeFilter(const char *filter)
* decodebin creates pads based on stream information, which is not known upfront
* on receiving the pad-added signal, we connect it to the colorspace conversion element
*/
void CvCapture_GStreamer::newPad(GstElement * /*elem*/,
GstPad *pad,
gpointer data)
void GStreamerCapture::newPad(GstElement *, GstPad *pad, gpointer data)
{
GstPad *sinkpad;
GstElement *color = (GstElement *) data;
......@@ -591,6 +574,13 @@ void CvCapture_GStreamer::newPad(GstElement * /*elem*/,
gst_object_unref (sinkpad);
}
bool GStreamerCapture::isOpened() const
{
return pipeline != NULL;
}
int GStreamerCapture::getCaptureDomain() { return CAP_GSTREAMER; }
/*!
* \brief CvCapture_GStreamer::open Open the given file with gstreamer
* \param type CvCapture type. One of CV_CAP_GSTREAMER_*
......@@ -622,40 +612,31 @@ void CvCapture_GStreamer::newPad(GstElement * /*elem*/,
* I expect this to be the same for CV_CAP_GSTREAMER_1394. Is anyone actually still using v4l (v1)?
*
*/
bool CvCapture_GStreamer::open( int type, const char* filename )
bool GStreamerCapture::open(int id)
{
CV_FUNCNAME("cvCaptureFromCAM_GStreamer");
__BEGIN__;
if (!is_gst_element_exists("v4l2src"))
return false;
std::ostringstream desc;
desc << "v4l2src device-name=/dev/video" << id
<< " ! " << COLOR_ELEM
<< " ! appsink";
return open(desc.str());
}
bool GStreamerCapture::open(const String &filename_)
{
gst_initializer::init();
const gchar * filename = filename_.c_str();
bool file = false;
bool stream = false;
bool manualpipeline = false;
char *uri = NULL;
uridecodebin = NULL;
GstElementFactory * testfac;
GstElement* uridecodebin = NULL;
GstElement* color = NULL;
GstStateChangeReturn status;
if (type == CV_CAP_GSTREAMER_V4L){
testfac = gst_element_factory_find("v4lsrc");
if (!testfac){
return false;
}
g_object_unref(G_OBJECT(testfac));
filename = "v4lsrc ! " COLOR_ELEM " ! appsink";
}
if (type == CV_CAP_GSTREAMER_V4L2){
testfac = gst_element_factory_find("v4l2src");
if (!testfac){
return false;
}
g_object_unref(G_OBJECT(testfac));
filename = "v4l2src ! " COLOR_ELEM " ! appsink";
}
// test if we have a valid uri. If so, open it with an uridecodebin
// else, we might have a file or a manual pipeline.
// if gstreamer cannot parse the manual pipeline, we assume we were given and
......@@ -687,7 +668,6 @@ bool CvCapture_GStreamer::open( int type, const char* filename )
CV_WARN("GStreamer: Error opening file\n");
CV_WARN(filename);
CV_WARN(uri);
close();
return false;
}
}
......@@ -736,8 +716,7 @@ bool CvCapture_GStreamer::open( int type, const char* filename )
if(!uridecodebin)
{
//fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message);
close();
CV_WARN("Can not parse GStreamer URI bin");
return false;
}
}
......@@ -802,7 +781,7 @@ bool CvCapture_GStreamer::open( int type, const char* filename )
if (!sink)
{
CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
CV_WARN("GStreamer: cannot find appsink in manual pipeline\n");
return false;
}
......@@ -822,7 +801,7 @@ bool CvCapture_GStreamer::open( int type, const char* filename )
{
if(!gst_element_link(uridecodebin, color))
{
CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n");
CV_WARN("cannot link color -> sink");
gst_object_unref(pipeline);
pipeline = NULL;
return false;
......@@ -835,7 +814,7 @@ bool CvCapture_GStreamer::open( int type, const char* filename )
if(!gst_element_link(color, sink))
{
CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n");
CV_WARN("GStreamer: cannot link color -> sink\n");
gst_object_unref(pipeline);
pipeline = NULL;
return false;
......@@ -844,9 +823,10 @@ bool CvCapture_GStreamer::open( int type, const char* filename )
//TODO: is 1 single buffer really high enough?
gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
gst_app_sink_set_drop (GST_APP_SINK(sink), stream);
// gst_app_sink_set_drop (GST_APP_SINK(sink), stream);
//do not emit signals: all calls will be synchronous and blocking
gst_app_sink_set_emit_signals (GST_APP_SINK(sink), 0);
gst_app_sink_set_emit_signals (GST_APP_SINK(sink), FALSE);
// gst_base_sink_set_sync(GST_BASE_SINK(sink), FALSE);
#if GST_VERSION_MAJOR == 0
caps = gst_caps_new_simple("video/x-raw-rgb",
......@@ -875,6 +855,8 @@ bool CvCapture_GStreamer::open( int type, const char* filename )
gst_caps_unref(caps);
{
GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-init");
status = gst_element_set_state(GST_ELEMENT(pipeline),
file ? GST_STATE_PAUSED : GST_STATE_PLAYING);
if (status == GST_STATE_CHANGE_ASYNC)
......@@ -884,10 +866,11 @@ bool CvCapture_GStreamer::open( int type, const char* filename )
}
if (status == GST_STATE_CHANGE_FAILURE)
{
GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-error");
handleMessage(pipeline);
gst_object_unref(pipeline);
pipeline = NULL;
CV_ERROR(CV_StsError, "GStreamer: unable to start pipeline\n");
CV_WARN("GStreamer: unable to start pipeline\n");
return false;
}
......@@ -944,12 +927,12 @@ bool CvCapture_GStreamer::open( int type, const char* filename )
#else
#define FORMAT format_
#endif
status_ = gst_element_query_position(pipeline, FORMAT, &value_);
status_ = gst_element_query_position(sink, FORMAT, &value_);
#undef FORMAT
if (!status_ || value_ != 0 || duration < 0)
{
CV_WARN(cv::format("Cannot query video position: status=%d value=%lld duration=%lld\n",
(int)status_, (long long int)value_, (long long int)duration).c_str());
(int)status_, (long long int)value_, (long long int)duration).c_str());
isPosFramesSupported = false;
isPosFramesEmulated = true;
emulatedFrameNumber = 0;
......@@ -961,8 +944,6 @@ bool CvCapture_GStreamer::open( int type, const char* filename )
GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline");
}
__END__;
return true;
}
......@@ -975,7 +956,7 @@ bool CvCapture_GStreamer::open( int type, const char* filename )
* For frame-based properties, we use the caps of the lasst receivef sample. This means that some properties
* are not available until a first frame was received
*/
double CvCapture_GStreamer::getProperty( int propId ) const
double GStreamerCapture::getProperty(int propId) const
{
GstFormat format;
gint64 value;
......@@ -1032,38 +1013,28 @@ double CvCapture_GStreamer::getProperty( int propId ) const
return height;
case CV_CAP_PROP_FPS:
return fps;
case CV_CAP_PROP_FOURCC:
break;
case CV_CAP_PROP_FRAME_COUNT:
return duration;
case CV_CAP_PROP_FORMAT:
case CV_CAP_PROP_MODE:
case CV_CAP_PROP_BRIGHTNESS:
case CV_CAP_PROP_CONTRAST:
case CV_CAP_PROP_SATURATION:
case CV_CAP_PROP_HUE:
if (v4l2src)
{
const gchar * propName =
propId == CV_CAP_PROP_BRIGHTNESS ? "brightness" :
propId == CV_CAP_PROP_CONTRAST ? "contrast" :
propId == CV_CAP_PROP_SATURATION ? "saturation" :
propId == CV_CAP_PROP_HUE ? "hue" : NULL;
if (propName)
string propName = get_gst_propname(propId);
if (!propName.empty())
{
gint32 value32 = 0;
g_object_get(G_OBJECT(v4l2src), propName, &value32, NULL);
return value32;
gint32 val = 0;
g_object_get(G_OBJECT(v4l2src), propName.c_str(), &val, NULL);
return static_cast<double>(val);
}
}
case CV_CAP_PROP_GAIN:
case CV_CAP_PROP_CONVERT_RGB:
break;
case CV_CAP_GSTREAMER_QUEUE_LENGTH:
if(!sink) {
CV_WARN("GStreamer: there is no sink yet");
return false;
if(!sink)
{
CV_WARN("there is no sink yet");
return 0;
}
return gst_app_sink_get_max_buffers(GST_APP_SINK(sink));
default:
......@@ -1084,13 +1055,13 @@ double CvCapture_GStreamer::getProperty( int propId ) const
* Sets the desired property id with val. If the pipeline is running,
* it is briefly stopped and started again after the property was set
*/
bool CvCapture_GStreamer::setProperty( int propId, double value )
bool GStreamerCapture::setProperty(int propId, double value)
{
GstFormat format;
GstSeekFlags flags;
const GstSeekFlags flags = (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE);
if(!pipeline) {
CV_WARN("GStreamer: no pipeline");
if(!pipeline)
{
CV_WARN("no pipeline");
return false;
}
......@@ -1098,12 +1069,10 @@ bool CvCapture_GStreamer::setProperty( int propId, double value )
if (wasPlaying)
this->stopPipeline();
switch(propId) {
switch(propId)
{
case CV_CAP_PROP_POS_MSEC:
format = GST_FORMAT_TIME;
flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
if(!gst_element_seek_simple(GST_ELEMENT(pipeline), GST_FORMAT_TIME,
flags, (gint64) (value * GST_MSECOND))) {
handleMessage(pipeline);
CV_WARN("GStreamer: unable to seek");
......@@ -1138,10 +1107,9 @@ bool CvCapture_GStreamer::setProperty( int propId, double value )
}
}
return false;
CV_WARN("unable to seek");
}
format = GST_FORMAT_DEFAULT;
flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
if(!gst_element_seek_simple(GST_ELEMENT(pipeline), GST_FORMAT_DEFAULT,
flags, (gint64) value)) {
handleMessage(pipeline);
CV_WARN("GStreamer: unable to seek");
......@@ -1152,9 +1120,7 @@ bool CvCapture_GStreamer::setProperty( int propId, double value )
return true;
}
case CV_CAP_PROP_POS_AVI_RATIO:
format = GST_FORMAT_PERCENT;
flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
if(!gst_element_seek_simple(GST_ELEMENT(pipeline), GST_FORMAT_PERCENT,
flags, (gint64) (value * GST_FORMAT_PERCENT_MAX))) {
handleMessage(pipeline);
CV_WARN("GStreamer: unable to seek");
......@@ -1195,37 +1161,34 @@ bool CvCapture_GStreamer::setProperty( int propId, double value )
} else
removeFilter("framerate");
break;
case CV_CAP_PROP_FOURCC:
case CV_CAP_PROP_FRAME_COUNT:
case CV_CAP_PROP_FORMAT:
case CV_CAP_PROP_MODE:
case CV_CAP_PROP_BRIGHTNESS:
case CV_CAP_PROP_CONTRAST:
case CV_CAP_PROP_SATURATION:
case CV_CAP_PROP_HUE:
if (v4l2src)
{
const gchar * propName =
propId == CV_CAP_PROP_BRIGHTNESS ? "brightness" :
propId == CV_CAP_PROP_CONTRAST ? "contrast" :
propId == CV_CAP_PROP_SATURATION ? "saturation" :
propId == CV_CAP_PROP_HUE ? "hue" : NULL;
if (propName)
string propName = get_gst_propname(propId);
if (!propName.empty())
{
gint32 value32 = cv::saturate_cast<gint32>(value);
g_object_set(G_OBJECT(v4l2src), propName, &value32, NULL);
gint32 val = cv::saturate_cast<gint32>(value);
g_object_set(G_OBJECT(v4l2src), propName.c_str(), &val, NULL);
return true;
}
}
return false;
case CV_CAP_PROP_GAIN:
case CV_CAP_PROP_CONVERT_RGB:
break;
case CV_CAP_GSTREAMER_QUEUE_LENGTH:
{
if(!sink)
break;
{
CV_WARN("there is no sink yet");
return false;
}
gst_app_sink_set_max_buffers(GST_APP_SINK(sink), (guint) value);
break;
return true;
}
default:
CV_WARN("GStreamer: unhandled property");
}
......@@ -1236,23 +1199,24 @@ bool CvCapture_GStreamer::setProperty( int propId, double value )
return false;
}
/*!
* \brief cvCreateCapture_GStreamer
* \param type
* \param filename
* \return
*/
CvCapture* cvCreateCapture_GStreamer(int type, const char* filename )
{
CvCapture_GStreamer* capture = new CvCapture_GStreamer;
if( capture->open( type, filename ))
return capture;
Ptr<IVideoCapture> cv::createGStreamerCapture(const String& filename)
{
Ptr<GStreamerCapture> cap = makePtr<GStreamerCapture>();
if (cap && cap->open(filename))
return cap;
return Ptr<IVideoCapture>();
}
delete capture;
return 0;
Ptr<IVideoCapture> cv::createGStreamerCapture(int index)
{
Ptr<GStreamerCapture> cap = makePtr<GStreamerCapture>();
if (cap && cap->open(index))
return cap;
return Ptr<IVideoCapture>();
}
//==================================================================================================
/*!
* \brief The CvVideoWriter_GStreamer class
......@@ -1420,8 +1384,6 @@ const char* CvVideoWriter_GStreamer::filenameToMimetype(const char *filename)
bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,
double fps, CvSize frameSize, bool is_color )
{
CV_FUNCNAME("CvVideoWriter_GStreamer::open");
// check arguments
assert (filename);
assert (fps > 0);
......@@ -1459,8 +1421,6 @@ bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,
// we first try to construct a pipeline from the given string.
// if that fails, we assume it is an ordinary filename
__BEGIN__;
encodebin = gst_parse_launch(filename, &err);
manualpipeline = (encodebin != NULL);
......@@ -1469,7 +1429,7 @@ bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,
#if GST_VERSION_MAJOR == 0
it = gst_bin_iterate_sources(GST_BIN(encodebin));
if(gst_iterator_next(it, (gpointer *)&source) != GST_ITERATOR_OK) {
CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
CV_WARN("GStreamer: cannot find appsink in manual pipeline\n");
return false;
}
#else
......@@ -1503,7 +1463,7 @@ bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,
gst_iterator_free (it);
if (!source){
CV_ERROR(CV_StsError, "GStreamer: cannot find appsrc in manual pipeline\n");
CV_WARN("GStreamer: cannot find appsrc in manual pipeline\n");
return false;
}
#endif
......@@ -1528,13 +1488,15 @@ bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,
videocaps = gst_riff_create_video_caps(fourcc, NULL, NULL, NULL, NULL, NULL);
if (!videocaps){
CV_ERROR( CV_StsUnsupportedFormat, "Gstreamer Opencv backend does not support this codec.");
CV_WARN("Gstreamer Opencv backend does not support this codec.");
return false;
}
//create container caps from file extension
mime = filenameToMimetype(filename);
if (!mime) {
CV_ERROR( CV_StsUnsupportedFormat, "Gstreamer Opencv backend does not support this file type.");
CV_WARN("Gstreamer Opencv backend does not support this file type.");
return false;
}
#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
......@@ -1566,7 +1528,8 @@ bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,
NULL);
caps = gst_caps_fixate(caps);
#else
CV_ERROR( CV_StsUnsupportedFormat, "Gstreamer 0.10 Opencv backend does not support writing encoded MJPEG data.");
CV_WARN("Gstreamer 0.10 Opencv backend does not support writing encoded MJPEG data.");
return false;
#endif
}
else if(is_color)
......@@ -1633,7 +1596,8 @@ bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,
g_object_set(G_OBJECT(file), "buffer-size", bufsize, NULL);
gst_bin_add_many(GST_BIN(pipeline), source, encodebin, file, NULL);
if(!gst_element_link_many(source, encodebin, file, NULL)) {
CV_ERROR(CV_StsError, "GStreamer: cannot link elements\n");
CV_WARN("GStreamer: cannot link elements\n");
return false;
}
}
......@@ -1697,7 +1661,8 @@ bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,
stateret = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
if(stateret == GST_STATE_CHANGE_FAILURE) {
handleMessage(pipeline);
CV_ERROR(CV_StsError, "GStreamer: cannot put pipeline to play\n");
CV_WARN("GStreamer: cannot put pipeline to play\n");
return false;
}
framerate = fps;
......@@ -1705,8 +1670,6 @@ bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,
handleMessage(pipeline);
__END__;
return true;
}
......@@ -1721,38 +1684,37 @@ bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,
*/
bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image )
{
CV_FUNCNAME("CvVideoWriter_GStreamer::writerFrame");
GstClockTime duration, timestamp;
GstFlowReturn ret;
int size;
__BEGIN__;
handleMessage(pipeline);
#if GST_VERSION_MAJOR > 0
if (input_pix_fmt == GST_VIDEO_FORMAT_ENCODED) {
if (image->nChannels != 1 || image->depth != IPL_DEPTH_8U || image->height != 1) {
CV_ERROR(CV_StsUnsupportedFormat, "cvWriteFrame() needs images with depth = IPL_DEPTH_8U, nChannels = 1 and height = 1.");
CV_WARN("cvWriteFrame() needs images with depth = IPL_DEPTH_8U, nChannels = 1 and height = 1.");
return false;
}
}
else
#endif
if(input_pix_fmt == GST_VIDEO_FORMAT_BGR) {
if (image->nChannels != 3 || image->depth != IPL_DEPTH_8U) {
CV_ERROR(CV_StsUnsupportedFormat, "cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 3.");
CV_WARN("cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 3.");
return false;
}
}
#if FULL_GST_VERSION >= VERSION_NUM(0,10,29)
else if (input_pix_fmt == GST_VIDEO_FORMAT_GRAY8) {
if (image->nChannels != 1 || image->depth != IPL_DEPTH_8U) {
CV_ERROR(CV_StsUnsupportedFormat, "cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 1.");
CV_WARN("cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 1.");
return false;
}
}
#endif
else {
CV_ERROR(CV_StsUnsupportedFormat, "cvWriteFrame() needs BGR or grayscale images\n");
CV_WARN("cvWriteFrame() needs BGR or grayscale images\n");
return false;
}
......@@ -1765,7 +1727,7 @@ bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image )
buffer = gst_buffer_try_new_and_alloc (size);
if (!buffer)
{
CV_ERROR(CV_StsBadSize, "Cannot create GStreamer buffer");
CV_WARN("Cannot create GStreamer buffer");
}
memcpy(GST_BUFFER_DATA (buffer), (guint8*)image->imageData, size);
......@@ -1794,8 +1756,6 @@ bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image )
++num_frames;
__END__;
return true;
}
......@@ -1848,8 +1808,6 @@ void toFraction(double decimal, double &numerator, double &denominator)
*/
void handleMessage(GstElement * pipeline)
{
CV_FUNCNAME("handlemessage");
GError *err = NULL;
gchar *debug = NULL;
GstBus* bus = NULL;
......@@ -1857,7 +1815,6 @@ void handleMessage(GstElement * pipeline)
GstElement * elem = NULL;
GstMessage* msg = NULL;
__BEGIN__;
bus = gst_element_get_bus(pipeline);
while(gst_bus_have_pending(bus)) {
......@@ -1867,7 +1824,7 @@ void handleMessage(GstElement * pipeline)
if(gst_is_missing_plugin_message(msg))
{
CV_ERROR(CV_StsError, "GStreamer: your gstreamer installation is missing a required plugin\n");
CV_WARN("your gstreamer installation is missing a required plugin\n");
}
else
{
......@@ -1906,6 +1863,4 @@ void handleMessage(GstElement * pipeline)
}
gst_object_unref(GST_OBJECT(bus));
__END__
}
......@@ -139,7 +139,6 @@ CvVideoWriter* cvCreateVideoWriter_Images(const char* filename);
#define CV_CAP_GSTREAMER_V4L2 2
#define CV_CAP_GSTREAMER_FILE 3
CvCapture* cvCreateCapture_GStreamer(int type, const char *filename);
CvCapture* cvCreateFileCapture_FFMPEG_proxy(const char* filename);
......@@ -194,7 +193,11 @@ namespace cv
Ptr<IVideoCapture> createGPhoto2Capture(int index);
Ptr<IVideoCapture> createGPhoto2Capture(const String& deviceName);
Ptr<IVideoCapture> createXINECapture(const char* filename);
Ptr<IVideoCapture> createGStreamerCapture(const String& filename);
Ptr<IVideoCapture> createGStreamerCapture(int index);
}
#endif /* __VIDEOIO_H_ */
......@@ -7,6 +7,7 @@
namespace opencv_test
{
typedef tuple< string, Size, Size, int > Param;
typedef testing::TestWithParam< Param > Videoio_Gstreamer_Test;
......@@ -19,8 +20,9 @@ TEST_P(Videoio_Gstreamer_Test, test_object_structure)
int count_frames = 10;
std::ostringstream pipeline;
pipeline << "videotestsrc pattern=ball num-buffers=" << count_frames << " ! " << format;
pipeline << ", framerate=1/1, width=" << frame_size.width << ", height=" << frame_size.height << " ! appsink";
VideoCapture cap(pipeline.str(), CAP_GSTREAMER);
pipeline << ", width=" << frame_size.width << ", height=" << frame_size.height << " ! appsink";
VideoCapture cap;
ASSERT_NO_THROW(cap.open(pipeline.str(), CAP_GSTREAMER));
ASSERT_TRUE(cap.isOpened());
Mat buffer, decode_frame, gray_frame, rgb_frame;
......
......@@ -46,12 +46,61 @@
namespace opencv_test
{
struct VideoCaptureAPI
{
VideoCaptureAPIs api;
inline const char * toString() const
{
switch (api)
{
case CAP_ANY: return "CAP_ANY";
#ifdef __linux__
case CAP_V4L2: return "CAP_V4L/CAP_V4L2";
#else
case CAP_VFW: return "CAP_VFW";
#endif
case CAP_FIREWIRE: return "CAP_FIREWIRE";
case CAP_QT: return "CAP_QT";
case CAP_UNICAP: return "CAP_UNICAP";
case CAP_DSHOW: return "CAP_DSHOW";
case CAP_PVAPI: return "CAP_PVAPI";
case CAP_OPENNI: return "CAP_OPENNI";
case CAP_OPENNI_ASUS: return "CAP_OPENNI_ASUS";
case CAP_ANDROID: return "CAP_ANDROID";
case CAP_XIAPI: return "CAP_XIAPI";
case CAP_AVFOUNDATION: return "CAP_AVFOUNDATION";
case CAP_GIGANETIX: return "CAP_GIGANETIX";
case CAP_MSMF: return "CAP_MSMF";
case CAP_WINRT: return "CAP_WINRT";
case CAP_INTELPERC: return "CAP_INTELPERC";
case CAP_OPENNI2: return "CAP_OPENNI2";
case CAP_OPENNI2_ASUS: return "CAP_OPENNI2_ASUS";
case CAP_GPHOTO2: return "CAP_GPHOTO2";
case CAP_GSTREAMER: return "CAP_GSTREAMER";
case CAP_FFMPEG: return "CAP_FFMPEG";
case CAP_IMAGES: return "CAP_IMAGES";
case CAP_ARAVIS: return "CAP_ARAVIS";
case CAP_OPENCV_MJPEG: return "CAP_OPENCV_MJPEG";
case CAP_INTEL_MFX: return "CAP_INTEL_MFX";
}
return "unknown";
}
VideoCaptureAPI(int api_ = CAP_ANY) : api((VideoCaptureAPIs)api_) {}
operator int() { return api; }
};
inline std::ostream &operator<<(std::ostream &out, const VideoCaptureAPI & api)
{
out << api.toString(); return out;
}
class Videoio_Test_Base
{
protected:
string ext;
string video_file;
int apiPref;
VideoCaptureAPI apiPref;
protected:
Videoio_Test_Base() {}
virtual ~Videoio_Test_Base() {}
......@@ -60,14 +109,16 @@ protected:
void checkFrameRead(int idx, VideoCapture & cap)
{
//int frameID = (int)cap.get(CAP_PROP_POS_FRAMES);
Mat img; cap >> img;
Mat img;
ASSERT_NO_THROW(cap >> img);
//std::cout << "idx=" << idx << " img=" << img.size() << " frameID=" << frameID << std::endl;
ASSERT_FALSE(img.empty()) << "idx=" << idx;
checkFrameContent(img, idx);
}
void checkFrameSeek(int idx, VideoCapture & cap)
{
bool canSeek = cap.set(CAP_PROP_POS_FRAMES, idx);
bool canSeek = false;
ASSERT_NO_THROW(canSeek = cap.set(CAP_PROP_POS_FRAMES, idx));
if (!canSeek)
{
std::cout << "Seek to frame '" << idx << "' is not supported. SKIP." << std::endl;
......@@ -79,26 +130,15 @@ protected:
public:
void doTest()
{
if (apiPref == CAP_AVFOUNDATION)
{
// TODO: fix this backend
std::cout << "SKIP test: AVFoundation backend returns invalid frame count" << std::endl;
return;
}
else if (apiPref == CAP_VFW)
{
// TODO: fix this backend
std::cout << "SKIP test: Video for Windows backend not open files" << std::endl;
return;
}
VideoCapture cap(video_file, apiPref);
VideoCapture cap;
ASSERT_NO_THROW(cap.open(video_file, apiPref));
if (!cap.isOpened())
{
std::cout << "SKIP test: backend " << apiPref << " can't open the video: " << video_file << std::endl;
return;
}
int n_frames = (int)cap.get(CAP_PROP_FRAME_COUNT);
int n_frames = -1;
EXPECT_NO_THROW(n_frames = (int)cap.get(CAP_PROP_FRAME_COUNT));
if (n_frames > 0)
{
ASSERT_GT(n_frames, 0);
......@@ -124,7 +164,8 @@ public:
checkFrameRead(k, cap);
}
}
bool canSeek = cap.set(CAP_PROP_POS_FRAMES, 0);
bool canSeek = false;
EXPECT_NO_THROW(canSeek = cap.set(CAP_PROP_POS_FRAMES, 0));
if (!canSeek)
{
std::cout << "Seek to frame '0' is not supported. SKIP all 'seek' tests." << std::endl;
......@@ -134,7 +175,9 @@ public:
if (ext != "wmv" && ext != "h264" && ext != "h265")
{
SCOPED_TRACE("progressive seek");
ASSERT_TRUE(cap.set(CAP_PROP_POS_FRAMES, 0));
bool res = false;
EXPECT_NO_THROW(res = cap.set(CAP_PROP_POS_FRAMES, 0));
ASSERT_TRUE(res);
for (int k = 0; k < n_frames; k += 20)
{
checkFrameSeek(k, cap);
......@@ -144,7 +187,9 @@ public:
if (ext != "mpg" && ext != "wmv" && ext != "h264" && ext != "h265")
{
SCOPED_TRACE("random seek");
ASSERT_TRUE(cap.set(CAP_PROP_POS_FRAMES, 0));
bool res = false;
EXPECT_NO_THROW(res = cap.set(CAP_PROP_POS_FRAMES, 0));
ASSERT_TRUE(res);
for (int k = 0; k < 10; ++k)
{
checkFrameSeek(cvtest::TS::ptr()->get_rng().uniform(0, n_frames), cap);
......@@ -154,7 +199,7 @@ public:
};
//==================================================================================================
typedef tuple<string, int> Backend_Type_Params;
typedef tuple<string, VideoCaptureAPI> Backend_Type_Params;
class Videoio_Bunny : public Videoio_Test_Base, public testing::TestWithParam<Backend_Type_Params>
{
......@@ -168,37 +213,29 @@ public:
}
void doFrameCountTest()
{
if (apiPref == CAP_AVFOUNDATION)
{
// TODO: fix this backend
std::cout << "SKIP test: AVFoundation backend returns invalid frame count" << std::endl;
return;
}
else if (apiPref == CAP_VFW)
{
// TODO: fix this backend
std::cout << "SKIP test: Video for Windows backend not open files" << std::endl;
return;
}
VideoCapture cap(video_file, apiPref);
VideoCapture cap;
EXPECT_NO_THROW(cap.open(video_file, apiPref));
if (!cap.isOpened())
{
std::cout << "SKIP test: backend " << apiPref << " can't open the video: " << video_file << std::endl;
return;
}
EXPECT_EQ(bunny_param.getWidth() , cap.get(CAP_PROP_FRAME_WIDTH));
EXPECT_EQ(bunny_param.getHeight(), cap.get(CAP_PROP_FRAME_HEIGHT));
Size actual;
EXPECT_NO_THROW(actual = Size((int)cap.get(CAP_PROP_FRAME_WIDTH),
(int)cap.get(CAP_PROP_FRAME_HEIGHT)));
EXPECT_EQ(bunny_param.getWidth(), actual.width);
EXPECT_EQ(bunny_param.getHeight(), actual.height);
double fps_prop = cap.get(CAP_PROP_FPS);
double fps_prop = 0;
EXPECT_NO_THROW(fps_prop = cap.get(CAP_PROP_FPS));
if (fps_prop > 0)
EXPECT_NEAR(fps_prop, bunny_param.getFps(), 1);
else
std::cout << "FPS is not available. SKIP check." << std::endl;
int count_prop = 0;
count_prop = (int)cap.get(CAP_PROP_FRAME_COUNT);
EXPECT_NO_THROW(count_prop = (int)cap.get(CAP_PROP_FRAME_COUNT));
// mpg file reports 5.08 sec * 24 fps => property returns 122 frames
// but actual number of frames returned is 125
if (ext != "mpg")
......@@ -213,7 +250,7 @@ public:
while (cap.isOpened())
{
Mat frame;
cap >> frame;
EXPECT_NO_THROW(cap >> frame);
if (frame.empty())
break;
EXPECT_EQ(bunny_param.getWidth(), frame.cols);
......@@ -229,7 +266,15 @@ public:
}
};
typedef tuple<string, string, float, int> Ext_Fourcc_PSNR;
//==================================================================================================
struct Ext_Fourcc_PSNR
{
string ext;
string fourcc;
float PSNR;
VideoCaptureAPI api;
};
typedef tuple<Size, Ext_Fourcc_PSNR> Size_Ext_Fourcc_PSNR;
class Videoio_Synthetic : public Videoio_Test_Base, public testing::TestWithParam<Size_Ext_Fourcc_PSNR>
......@@ -243,39 +288,27 @@ public:
Videoio_Synthetic()
{
frame_size = get<0>(GetParam());
const Ext_Fourcc_PSNR &param = get<1>(GetParam());
ext = get<0>(param);
fourcc = fourccFromString(get<1>(param));
PSNR_GT = get<2>(param);
const Ext_Fourcc_PSNR p = get<1>(GetParam());
ext = p.ext;
fourcc = fourccFromString(p.fourcc);
PSNR_GT = p.PSNR;
video_file = cv::tempfile((fourccToString(fourcc) + "." + ext).c_str());
frame_count = 100;
fps = 25.;
apiPref = get<3>(param);
apiPref = p.api;
}
void SetUp()
{
if (apiPref == CAP_AVFOUNDATION)
{
// TODO: fix this backend
std::cout << "SKIP test: AVFoundation backend can not write video" << std::endl;
return;
}
else if (apiPref == CAP_VFW)
{
// TODO: fix this backend
std::cout << "SKIP test: Video for Windows backend not open files" << std::endl;
return;
}
Mat img(frame_size, CV_8UC3);
VideoWriter writer(video_file, apiPref, fourcc, fps, frame_size, true);
VideoWriter writer;
EXPECT_NO_THROW(writer.open(video_file, apiPref, fourcc, fps, frame_size, true));
ASSERT_TRUE(writer.isOpened());
for(int i = 0; i < frame_count; ++i )
{
generateFrame(i, frame_count, img);
writer << img;
EXPECT_NO_THROW(writer << img);
}
writer.release();
EXPECT_NO_THROW(writer.release());
}
void TearDown()
{
......@@ -301,6 +334,10 @@ public:
if (fourcc == VideoWriter::fourcc('M', 'P', 'E', 'G') && ext == "mkv")
expected_frame_count.end += 1;
// Workaround for some gstreamer pipelines
if (apiPref == CAP_GSTREAMER)
expected_frame_count.start -= 1;
ASSERT_LE(expected_frame_count.start, actual);
ASSERT_GE(expected_frame_count.end, actual);
......@@ -310,22 +347,24 @@ public:
//==================================================================================================
int backend_params[] = {
static VideoCaptureAPI backend_params[] = {
#ifdef HAVE_QUICKTIME
CAP_QT,
#endif
#ifdef HAVE_AVFOUNDATION
CAP_AVFOUNDATION,
#endif
// TODO: Broken?
//#ifdef HAVE_AVFOUNDATION
// CAP_AVFOUNDATION,
//#endif
#ifdef HAVE_MSMF
CAP_MSMF,
#endif
#ifdef HAVE_VFW
CAP_VFW,
#endif
// TODO: Broken?
//#ifdef HAVE_VFW
// CAP_VFW,
//#endif
#ifdef HAVE_GSTREAMER
CAP_GSTREAMER,
......@@ -343,7 +382,7 @@ int backend_params[] = {
// CAP_INTEL_MFX
};
string bunny_params[] = {
static string bunny_params[] = {
#ifdef HAVE_VIDEO_INPUT
string("wmv"),
string("mov"),
......@@ -368,12 +407,22 @@ INSTANTIATE_TEST_CASE_P(videoio, Videoio_Bunny,
//==================================================================================================
inline Ext_Fourcc_PSNR makeParam(const char * ext, const char * fourcc, float psnr, int apipref)
inline Ext_Fourcc_PSNR makeParam(const char * ext, const char * fourcc, float psnr, VideoCaptureAPIs apipref)
{
return make_tuple(string(ext), string(fourcc), (float)psnr, (int)apipref);
Ext_Fourcc_PSNR res;
res.ext = ext;
res.fourcc = fourcc;
res.PSNR = psnr;
res.api = apipref;
return res;
}
Ext_Fourcc_PSNR synthetic_params[] = {
inline static std::ostream &operator<<(std::ostream &out, const Ext_Fourcc_PSNR &p)
{
out << "FOURCC(" << p.fourcc << "), ." << p.ext << ", " << p.api << ", " << p.PSNR << "dB"; return out;
}
static Ext_Fourcc_PSNR synthetic_params[] = {
#ifdef HAVE_MSMF
#if !defined(_M_ARM)
......@@ -385,16 +434,17 @@ Ext_Fourcc_PSNR synthetic_params[] = {
makeParam("mov", "H264", 30.f, CAP_MSMF),
#endif
#ifdef HAVE_VFW
#if !defined(_M_ARM)
makeParam("wmv", "WMV1", 30.f, CAP_VFW),
makeParam("wmv", "WMV2", 30.f, CAP_VFW),
#endif
makeParam("wmv", "WMV3", 30.f, CAP_VFW),
makeParam("wmv", "WVC1", 30.f, CAP_VFW),
makeParam("avi", "H264", 30.f, CAP_VFW),
makeParam("avi", "MJPG", 30.f, CAP_VFW),
#endif
// TODO: Broken?
//#ifdef HAVE_VFW
//#if !defined(_M_ARM)
// makeParam("wmv", "WMV1", 30.f, CAP_VFW),
// makeParam("wmv", "WMV2", 30.f, CAP_VFW),
//#endif
// makeParam("wmv", "WMV3", 30.f, CAP_VFW),
// makeParam("wmv", "WVC1", 30.f, CAP_VFW),
// makeParam("avi", "H264", 30.f, CAP_VFW),
// makeParam("avi", "MJPG", 30.f, CAP_VFW),
//#endif
#ifdef HAVE_QUICKTIME
makeParam("mov", "mp4v", 30.f, CAP_QT),
......@@ -408,17 +458,18 @@ Ext_Fourcc_PSNR synthetic_params[] = {
makeParam("mkv", "MJPG", 30.f, CAP_QT),
#endif
#ifdef HAVE_AVFOUNDATION
makeParam("mov", "mp4v", 30.f, CAP_AVFOUNDATION),
makeParam("avi", "XVID", 30.f, CAP_AVFOUNDATION),
makeParam("avi", "MPEG", 30.f, CAP_AVFOUNDATION),
makeParam("avi", "IYUV", 30.f, CAP_AVFOUNDATION),
makeParam("avi", "MJPG", 30.f, CAP_AVFOUNDATION),
// TODO: Broken?
//#ifdef HAVE_AVFOUNDATION
// makeParam("mov", "mp4v", 30.f, CAP_AVFOUNDATION),
// makeParam("avi", "XVID", 30.f, CAP_AVFOUNDATION),
// makeParam("avi", "MPEG", 30.f, CAP_AVFOUNDATION),
// makeParam("avi", "IYUV", 30.f, CAP_AVFOUNDATION),
// makeParam("avi", "MJPG", 30.f, CAP_AVFOUNDATION),
makeParam("mkv", "XVID", 30.f, CAP_AVFOUNDATION),
makeParam("mkv", "MPEG", 30.f, CAP_AVFOUNDATION),
makeParam("mkv", "MJPG", 30.f, CAP_AVFOUNDATION),
#endif
// makeParam("mkv", "XVID", 30.f, CAP_AVFOUNDATION),
// makeParam("mkv", "MPEG", 30.f, CAP_AVFOUNDATION),
// makeParam("mkv", "MJPG", 30.f, CAP_AVFOUNDATION),
//#endif
#ifdef HAVE_FFMPEG
makeParam("avi", "XVID", 30.f, CAP_FFMPEG),
......@@ -432,15 +483,13 @@ Ext_Fourcc_PSNR synthetic_params[] = {
#endif
#ifdef HAVE_GSTREAMER
// makeParam("avi", "XVID", 30.f, CAP_GSTREAMER), - corrupted frames, broken indexes
makeParam("avi", "MPEG", 30.f, CAP_GSTREAMER),
makeParam("avi", "IYUV", 30.f, CAP_GSTREAMER),
makeParam("avi", "MJPG", 30.f, CAP_GSTREAMER),
makeParam("avi", "H264", 30.f, CAP_GSTREAMER),
// makeParam("mkv", "XVID", 30.f, CAP_GSTREAMER),
makeParam("mkv", "MPEG", 30.f, CAP_GSTREAMER),
makeParam("mkv", "MJPG", 30.f, CAP_GSTREAMER),
makeParam("mkv", "H264", 30.f, CAP_GSTREAMER),
#endif
makeParam("avi", "MJPG", 30.f, CAP_OPENCV_MJPEG),
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment