Commit 4f3453db authored by Dirk Van Haerenborgh's avatar Dirk Van Haerenborgh

bomb commit of gstreamer videocapture and videowriter

parent 0990b42e
......@@ -120,6 +120,7 @@ OCV_OPTION(WITH_NVCUVID "Include NVidia Video Decoding library support"
OCV_OPTION(WITH_EIGEN "Include Eigen2/Eigen3 support" ON)
OCV_OPTION(WITH_FFMPEG "Include FFMPEG support" ON IF (NOT ANDROID AND NOT IOS))
OCV_OPTION(WITH_GSTREAMER "Include Gstreamer support" ON IF (UNIX AND NOT APPLE AND NOT ANDROID) )
OCV_OPTION(WITH_GSTREAMER_1_X "Include Gstreamer 1.x support" OFF)
OCV_OPTION(WITH_GTK "Include GTK support" ON IF (UNIX AND NOT APPLE AND NOT ANDROID) )
OCV_OPTION(WITH_IPP "Include Intel IPP support" OFF IF (MSVC OR X86 OR X86_64) )
OCV_OPTION(WITH_JASPER "Include JPEG2K support" ON IF (NOT IOS) )
......@@ -707,10 +708,12 @@ endif(DEFINED WITH_FFMPEG)
if(DEFINED WITH_GSTREAMER)
status(" GStreamer:" HAVE_GSTREAMER THEN "" ELSE NO)
if(HAVE_GSTREAMER)
status(" base:" "YES (ver ${ALIASOF_gstreamer-base-0.10_VERSION})")
status(" app:" "YES (ver ${ALIASOF_gstreamer-app-0.10_VERSION})")
status(" video:" "YES (ver ${ALIASOF_gstreamer-video-0.10_VERSION})")
endif()
status(" base:" "YES (ver ${GSTREAMER_BASE_VERSION})")
status(" video:" "YES (ver ${GSTREAMER_VIDEO_VERSION})")
status(" app:" "YES (ver ${GSTREAMER_APP_VERSION})")
status(" riff:" "YES (ver ${GSTREAMER_RIFF_VERSION})")
status(" pbutils:" "YES (ver ${GSTREAMER_PBUTILS_VERSION})")
endif(HAVE_GSTREAMER)
endif(DEFINED WITH_GSTREAMER)
if(DEFINED WITH_OPENNI)
......
......@@ -4,15 +4,44 @@
# --- GStreamer ---
ocv_clear_vars(HAVE_GSTREAMER)
if(WITH_GSTREAMER)
CHECK_MODULE(gstreamer-base-0.10 HAVE_GSTREAMER)
if(HAVE_GSTREAMER)
CHECK_MODULE(gstreamer-app-0.10 HAVE_GSTREAMER)
# try to find gstreamer 0.10 first
if(WITH_GSTREAMER AND NOT WITH_GSTREAMER_1_X)
CHECK_MODULE(gstreamer-base-0.10 HAVE_GSTREAMER_BASE)
CHECK_MODULE(gstreamer-video-0.10 HAVE_GSTREAMER_VIDEO)
CHECK_MODULE(gstreamer-app-0.10 HAVE_GSTREAMER_APP)
CHECK_MODULE(gstreamer-riff-0.10 HAVE_GSTREAMER_RIFF)
CHECK_MODULE(gstreamer-pbutils-0.10 HAVE_GSTREAMER_PBUTILS)
if(HAVE_GSTREAMER_BASE AND HAVE_GSTREAMER_VIDEO AND HAVE_GSTREAMER_APP AND HAVE_GSTREAMER_RIFF AND HAVE_GSTREAMER_PBUTILS)
set(HAVE_GSTREAMER TRUE)
set(GSTREAMER_BASE_VERSION ${ALIASOF_gstreamer-base-0.10_VERSION})
set(GSTREAMER_VIDEO_VERSION ${ALIASOF_gstreamer-video-0.10_VERSION})
set(GSTREAMER_APP_VERSION ${ALIASOF_gstreamer-app-0.10_VERSION})
set(GSTREAMER_RIFF_VERSION ${ALIASOF_gstreamer-riff-0.10_VERSION})
set(GSTREAMER_PBUTILS_VERSION ${ALIASOF_gstreamer-pbutils-0.10_VERSION})
endif()
if(HAVE_GSTREAMER)
CHECK_MODULE(gstreamer-video-0.10 HAVE_GSTREAMER)
endif(WITH_GSTREAMER AND NOT WITH_GSTREAMER_1_X)
# if gstreamer 0.10 was not found, or we specified we wanted 1.x, try to find it
if(WITH_GSTREAMER_1_X OR NOT HAVE_GSTREAMER)
#check for 1.x
CHECK_MODULE(gstreamer-base-1.0 HAVE_GSTREAMER_BASE)
CHECK_MODULE(gstreamer-video-1.0 HAVE_GSTREAMER_VIDEO)
CHECK_MODULE(gstreamer-app-1.0 HAVE_GSTREAMER_APP)
CHECK_MODULE(gstreamer-riff-1.0 HAVE_GSTREAMER_RIFF)
CHECK_MODULE(gstreamer-pbutils-1.0 HAVE_GSTREAMER_PBUTILS)
if(HAVE_GSTREAMER_BASE AND HAVE_GSTREAMER_VIDEO AND HAVE_GSTREAMER_APP AND HAVE_GSTREAMER_RIFF AND HAVE_GSTREAMER_PBUTILS)
set(HAVE_GSTREAMER TRUE)
set(GSTREAMER_BASE_VERSION ${ALIASOF_gstreamer-base-1.0_VERSION})
set(GSTREAMER_VIDEO_VERSION ${ALIASOF_gstreamer-video-1.0_VERSION})
set(GSTREAMER_APP_VERSION ${ALIASOF_gstreamer-app-1.0_VERSION})
set(GSTREAMER_RIFF_VERSION ${ALIASOF_gstreamer-riff-1.0_VERSION})
set(GSTREAMER_PBUTILS_VERSION ${ALIASOF_gstreamer-pbutils-1.0_VERSION})
endif()
endif(WITH_GSTREAMER)
endif(WITH_GSTREAMER_1_X OR NOT HAVE_GSTREAMER)
# --- unicap ---
ocv_clear_vars(HAVE_UNICAP)
......
......@@ -39,25 +39,27 @@
//
//M*/
// Author: Nils Hasler <hasler@mpi-inf.mpg.de>
//
// Max-Planck-Institut Informatik
//
// this implementation was inspired by gnash's gstreamer interface
//
// use GStreamer to read a video
//
/*!
* \file cap_gstreamer.cpp
* \author Nils Hasler <hasler@mpi-inf.mpg.de>
* Max-Planck-Institut Informatik
* \author Dirk Van Haerenborgh <vhdirk@gmail.com>
*
* \brief Use GStreamer to read/write video
*/
#include "precomp.hpp"
#include <unistd.h>
#include <string.h>
#include <map>
#include <gst/gst.h>
#include <gst/gstbuffer.h>
#include <gst/video/video.h>
#include <gst/app/gstappsink.h>
#include <gst/app/gstappsrc.h>
#include <gst/riff/riff-media.h>
#include <gst/pbutils/encoding-profile.h>
#include <gst/pbutils/missing-plugins.h>
//#include <gst/base/gsttypefindhelper.h>
#ifdef NDEBUG
#define CV_WARN(message)
......@@ -65,9 +67,23 @@
#define CV_WARN(message) fprintf(stderr, "warning: %s (%s:%d)\n", message, __FILE__, __LINE__)
#endif
#if GST_VERSION_MAJOR > 0
#define COLOR_ELEM "videoconvert"
#else
#define COLOR_ELEM "ffmpegcolorspace"
#endif
void toFraction(double decimal, double &numerator, double &denominator);
void handleMessage(GstElement * pipeline);
static cv::Mutex gst_initializer_mutex;
class gst_initializer
/*!
* \brief The gst_initializer class
* Initializes gstreamer once in the whole process
*/class gst_initializer
{
public:
static void init()
......@@ -80,9 +96,16 @@ private:
gst_initializer()
{
gst_init(NULL, NULL);
// gst_debug_set_active(TRUE);
// gst_debug_set_colored(TRUE);
// gst_debug_set_default_threshold(GST_LEVEL_INFO);
}
};
/*!
* \brief The CvCapture_GStreamer class
* Use GStreamer to capture video
*/
class CvCapture_GStreamer : public CvCapture
{
public:
......@@ -100,259 +123,486 @@ public:
protected:
void init();
bool reopen();
void handleMessage();
bool isPipelinePlaying();
void startPipeline();
void stopPipeline();
void restartPipeline();
void setFilter(const char*, int, int, int);
void setFilter(const char* prop, int type, int v1, int v2 = NULL);
void removeFilter(const char *filter);
void static newPad(GstElement *myelement,
GstPad *pad,
gpointer data);
GstElement *pipeline;
GstElement *uridecodebin;
GstElement *color;
GstElement *sink;
GstBuffer *buffer;
GstCaps *caps;
IplImage *frame;
static void newPad(GstElement *myelement,
GstPad *pad,
gpointer data);
GstElement* pipeline;
GstElement* uridecodebin;
GstElement* color;
GstElement* sink;
#if GST_VERSION_MAJOR > 0
GstSample* sample;
GstMapInfo* info;
#endif
GstBuffer* buffer;
GstCaps* caps;
GstCaps* buffer_caps;
IplImage* frame;
};
/*!
* \brief CvCapture_GStreamer::init
* inits the class
*/
void CvCapture_GStreamer::init()
{
pipeline=0;
frame=0;
buffer=0;
frame=0;
pipeline = NULL;
frame = NULL;
buffer = NULL;
buffer_caps = NULL;
#if GST_VERSION_MAJOR > 0
sample = NULL;
info = new GstMapInfo;
#endif
}
void CvCapture_GStreamer::handleMessage()
/*!
* \brief CvCapture_GStreamer::close
* Closes the pipeline and destroys all instances
*/
void CvCapture_GStreamer::close()
{
GstBus* bus = gst_element_get_bus(pipeline);
while(gst_bus_have_pending(bus)) {
GstMessage* msg = gst_bus_pop(bus);
// printf("Got %s message\n", GST_MESSAGE_TYPE_NAME(msg));
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_STATE_CHANGED:
GstState oldstate, newstate, pendstate;
gst_message_parse_state_changed(msg, &oldstate, &newstate, &pendstate);
// printf("state changed from %d to %d (%d)\n", oldstate, newstate, pendstate);
break;
case GST_MESSAGE_ERROR: {
GError *err;
gchar *debug;
gst_message_parse_error(msg, &err, &debug);
fprintf(stderr, "GStreamer Plugin: Embedded video playback halted; module %s reported: %s\n",
gst_element_get_name(GST_MESSAGE_SRC (msg)), err->message);
g_error_free(err);
g_free(debug);
gst_element_set_state(pipeline, GST_STATE_NULL);
break;
}
case GST_MESSAGE_EOS:
// CV_WARN("NetStream has reached the end of the stream.");
break;
default:
// CV_WARN("unhandled message\n");
break;
}
if (isPipelinePlaying())
this->stopPipeline();
gst_message_unref(msg);
if(pipeline) {
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
gst_object_unref(GST_OBJECT(pipeline));
}
if(uridecodebin){
gst_object_unref(GST_OBJECT(uridecodebin));
}
if(color){
gst_object_unref(GST_OBJECT(color));
}
if(sink){
gst_object_unref(GST_OBJECT(sink));
}
if(buffer)
gst_buffer_unref(buffer);
if(frame) {
frame->imageData = 0;
cvReleaseImage(&frame);
}
if(caps){
gst_caps_unref(caps);
}
if(buffer_caps){
gst_caps_unref(buffer_caps);
}
#if GST_VERSION_MAJOR > 0
if(sample){
gst_sample_unref(sample);
}
#endif
gst_object_unref(GST_OBJECT(bus));
}
//
// start the pipeline, grab a buffer, and pause again
//
/*!
* \brief CvCapture_GStreamer::grabFrame
* \return
* Grabs a sample from the pipeline, awaiting consumation by retreiveFrame.
* The pipeline is started if it was not running yet
*/
bool CvCapture_GStreamer::grabFrame()
{
if(!pipeline)
return false;
// start the pipeline if it was not in playing state yet
if(!this->isPipelinePlaying())
this->startPipeline();
// bail out if EOS
if(gst_app_sink_is_eos(GST_APP_SINK(sink)))
return false;
#if GST_VERSION_MAJOR == 0
if(buffer)
gst_buffer_unref(buffer);
handleMessage();
buffer = gst_app_sink_pull_buffer(GST_APP_SINK(sink));
#else
if(sample)
gst_sample_unref(sample);
sample = gst_app_sink_pull_sample(GST_APP_SINK(sink));
if(!sample)
return false;
buffer = gst_sample_get_buffer(sample);
#endif
if(!buffer)
return false;
return true;
}
//
// decode buffer
//
/*!
* \brief CvCapture_GStreamer::retrieveFrame
* \return IplImage pointer. [Transfer Full]
* Retreive the previously grabbed buffer, and wrap it in an IPLImage structure
*/
IplImage * CvCapture_GStreamer::retrieveFrame(int)
{
if(!buffer)
return 0;
if(!frame) {
//construct a frame header if we did not have any yet
if(!frame)
{
gint height, width;
GstCaps *buff_caps = gst_buffer_get_caps(buffer);
assert(gst_caps_get_size(buff_caps) == 1);
GstStructure* structure = gst_caps_get_structure(buff_caps, 0);
//reuse the caps ptr
if (buffer_caps)
gst_caps_unref(buffer_caps);
#if GST_VERSION_MAJOR == 0
buffer_caps = gst_buffer_get_caps(buffer);
#else
buffer_caps = gst_sample_get_caps(sample);
#endif
// bail out in no caps
assert(gst_caps_get_size(buffer_caps) == 1);
GstStructure* structure = gst_caps_get_structure(buffer_caps, 0);
// bail out if width or height are 0
if(!gst_structure_get_int(structure, "width", &width) ||
!gst_structure_get_int(structure, "height", &height))
!gst_structure_get_int(structure, "height", &height))
{
return 0;
}
int depth = 3;
#if GST_VERSION_MAJOR > 0
depth = 0;
const gchar* name = gst_structure_get_name(structure);
const gchar* format = gst_structure_get_string(structure, "format");
if (!name || !format)
return 0;
frame = cvCreateImageHeader(cvSize(width, height), IPL_DEPTH_8U, 3);
gst_caps_unref(buff_caps);
// we support 3 types of data:
// video/x-raw, format=BGR -> 8bit, 3 channels
// video/x-raw, format=GRAY8 -> 8bit, 1 channel
// video/x-bayer -> 8bit, 1 channel
// bayer data is never decoded, the user is responsible for that
// everything is 8 bit, so we just test the caps for bit depth
if (strcasecmp(name, "video/x-raw") == 0)
{
if (strcasecmp(format, "BGR") == 0) {
depth = 3;
}
else if(strcasecmp(format, "GRAY8") == 0){
depth = 1;
}
}
else if (strcasecmp(name, "video/x-bayer") == 0)
{
depth = 1;
}
#endif
if (depth > 0) {
frame = cvCreateImageHeader(cvSize(width, height), IPL_DEPTH_8U, depth);
}else{
return 0;
}
}
// no need to memcpy, just use gstreamer's buffer :-)
// gstreamer expects us to handle the memory at this point
// so we can just wrap the raw buffer and be done with it
#if GST_VERSION_MAJOR == 0
frame->imageData = (char *)GST_BUFFER_DATA(buffer);
//memcpy (frame->imageData, GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE (buffer));
//gst_buffer_unref(buffer);
//buffer = 0;
#else
// the data ptr in GstMapInfo is only valid throughout the mapifo objects life.
// TODO: check if reusing the mapinfo object is ok.
gboolean success = gst_buffer_map(buffer,info, (GstMapFlags)GST_MAP_READ);
if (!success){
//something weird went wrong here. abort. abort.
//fprintf(stderr,"GStreamer: unable to map buffer");
return 0;
}
frame->imageData = (char*)info->data;
gst_buffer_unmap(buffer,info);
#endif
return frame;
}
void CvCapture_GStreamer::restartPipeline()
/*!
* \brief CvCapture_GStreamer::isPipelinePlaying
* \return if the pipeline is currently playing.
*/
bool CvCapture_GStreamer::isPipelinePlaying()
{
CV_FUNCNAME("icvRestartPipeline");
GstState current, pending;
GstClockTime timeout = 5*GST_SECOND;
if(!GST_IS_ELEMENT(pipeline)){
return false;
}
__BEGIN__;
GstStateChangeReturn ret = gst_element_get_state(GST_ELEMENT(pipeline),&current, &pending, timeout);
if (!ret){
//fprintf(stderr, "GStreamer: unable to query pipeline state\n");
return false;
}
return current == GST_STATE_PLAYING;
}
/*!
* \brief CvCapture_GStreamer::startPipeline
* Start the pipeline by setting it to the playing state
*/
void CvCapture_GStreamer::startPipeline()
{
CV_FUNCNAME("icvStartPipeline");
printf("restarting pipeline, going to ready\n");
__BEGIN__;
if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_READY) ==
GST_STATE_CHANGE_FAILURE) {
//fprintf(stderr, "relinked, pausing\n");
if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING) ==
GST_STATE_CHANGE_FAILURE) {
CV_ERROR(CV_StsError, "GStreamer: unable to start pipeline\n");
gst_object_unref(pipeline);
return;
}
printf("ready, relinking\n");
//printf("state now playing\n");
handleMessage(pipeline);
__END__;
}
gst_element_unlink(uridecodebin, color);
printf("filtering with %s\n", gst_caps_to_string(caps));
gst_element_link_filtered(uridecodebin, color, caps);
/*!
* \brief CvCapture_GStreamer::stopPipeline
* Stop the pipeline by setting it to NULL
*/
void CvCapture_GStreamer::stopPipeline()
{
CV_FUNCNAME("icvStopPipeline");
printf("relinked, pausing\n");
__BEGIN__;
if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING) ==
GST_STATE_CHANGE_FAILURE) {
CV_ERROR(CV_StsError, "GStreamer: unable to start pipeline\n");
//fprintf(stderr, "restarting pipeline, going to ready\n");
if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL) ==
GST_STATE_CHANGE_FAILURE) {
CV_ERROR(CV_StsError, "GStreamer: unable to stop pipeline\n");
gst_object_unref(pipeline);
return;
}
__END__;
}
printf("state now paused\n");
/*!
* \brief CvCapture_GStreamer::restartPipeline
* Restart the pipeline
*/
void CvCapture_GStreamer::restartPipeline()
{
handleMessage(pipeline);
__END__;
this->stopPipeline();
this->startPipeline();
}
void CvCapture_GStreamer::setFilter(const char *property, int type, int v1, int v2)
{
if(!caps) {
if(type == G_TYPE_INT)
caps = gst_caps_new_simple("video/x-raw-rgb", property, type, v1, NULL);
else
caps = gst_caps_new_simple("video/x-raw-rgb", property, type, v1, v2, NULL);
} else {
//printf("caps before setting %s\n", gst_caps_to_string(caps));
/*!
* \brief CvCapture_GStreamer::setFilter
* \param prop the property name
* \param type glib property type
* \param v1 the value
* \param v2 second value of property type requires it, else NULL
* Filter the output formats by setting appsink caps properties
*/
void CvCapture_GStreamer::setFilter(const char *prop, int type, int v1, int v2)
{
//printf("GStreamer: setFilter \n");
if(!caps || !( GST_IS_CAPS (caps) ))
{
if(type == G_TYPE_INT)
gst_caps_set_simple(caps, "video/x-raw-rgb", property, type, v1, NULL);
{
#if GST_VERSION_MAJOR == 0
caps = gst_caps_new_simple("video/x-raw-rgb", prop, type, v1, NULL);
#else
caps = gst_caps_new_simple("video/x-raw","format",G_TYPE_STRING,"BGR", prop, type, v1, NULL);
#endif
}
else
gst_caps_set_simple(caps, "video/x-raw-rgb", property, type, v1, v2, NULL);
{
#if GST_VERSION_MAJOR == 0
caps = gst_caps_new_simple("video/x-raw-rgb", prop, type, v1, v2, NULL);
#else
caps = gst_caps_new_simple("video/x-raw","format",G_TYPE_STRING,"BGR", prop, type, v1, v2, NULL);
#endif
}
}
else
{
#if GST_VERSION_MAJOR > 0
if (! gst_caps_is_writable(caps))
caps = gst_caps_make_writable (caps);
#endif
if(type == G_TYPE_INT){
gst_caps_set_simple(caps, prop, type, v1, NULL);
}else{
gst_caps_set_simple(caps, prop, type, v1, v2, NULL);
}
}
restartPipeline();
#if GST_VERSION_MAJOR > 0
caps = gst_caps_fixate(caps);
#endif
gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
//printf("filtering with %s\n", gst_caps_to_string(caps));
}
/*!
* \brief CvCapture_GStreamer::removeFilter
* \param filter filter to remove
* remove the specified filter from the appsink template caps
*/
void CvCapture_GStreamer::removeFilter(const char *filter)
{
if(!caps)
return;
#if GST_VERSION_MAJOR > 0
if (! gst_caps_is_writable(caps))
caps = gst_caps_make_writable (caps);
#endif
GstStructure *s = gst_caps_get_structure(caps, 0);
gst_structure_remove_field(s, filter);
restartPipeline();
gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
}
//
// connect uridecodebin dynamically created source pads to colourconverter
//
void CvCapture_GStreamer::newPad(GstElement * /*uridecodebin*/,
GstPad *pad,
gpointer data)
/*!
* \brief CvCapture_GStreamer::newPad link dynamic padd
* \param pad
* \param data
* decodebin creates pads based on stream information, which is not known upfront
* on receiving the pad-added signal, we connect it to the colorspace conversion element
*/
void CvCapture_GStreamer::newPad(GstElement * /*elem*/,
GstPad *pad,
gpointer data)
{
GstPad *sinkpad;
GstElement *color = (GstElement *) data;
sinkpad = gst_element_get_static_pad (color, "sink");
// printf("linking dynamic pad to colourconverter %p %p\n", uridecodebin, pad);
if (!sinkpad){
//fprintf(stderr, "Gstreamer: no pad named sink\n");
return;
}
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
/*!
* \brief CvCapture_GStreamer::open Open the given file with gstreamer
* \param type CvCapture type. One of CV_CAP_GSTREAMER_*
* \param filename Filename to open in case of CV_CAP_GSTREAMER_FILE
* \return boolean. Specifies if opening was succesful.
*
* In case of CV_CAP_GSTREAMER_V4L(2), a pipelin is constructed as follows:
* v4l2src ! autoconvert ! appsink
*
*
* The 'filename' parameter is not limited to filesystem paths, and may be one of the following:
*
* - a normal filesystem path:
* e.g. video.avi or /path/to/video.avi or C:\\video.avi
* - an uri:
* e.g. file:///path/to/video.avi or rtsp:///path/to/stream.asf
* - a gstreamer pipeline description:
* e.g. videotestsrc ! videoconvert ! appsink
* the appsink name should be either 'appsink0' (the default) or 'opencvsink'
*
* When dealing with a file, CvCapture_GStreamer will not drop frames if the grabbing interval
* larger than the framerate period. (Unlike the uri or manual pipeline description, which assume
* a live source)
*
* The pipeline will only be started whenever the first frame is grabbed. Setting pipeline properties
* is really slow if we need to restart the pipeline over and over again.
*
* TODO: the 'type' parameter is imo unneeded. for v4l2, filename 'v4l2:///dev/video0' can be used.
* I expect this to be the same for CV_CAP_GSTREAMER_1394. Is anyone actually still using v4l (v1)?
*
*/
bool CvCapture_GStreamer::open( int type, const char* filename )
{
close();
CV_FUNCNAME("cvCaptureFromCAM_GStreamer");
__BEGIN__;
gst_initializer::init();
// if(!isInited) {
// printf("gst_init\n");
// gst_init (NULL, NULL);
// gst_debug_set_active(TRUE);
// gst_debug_set_colored(TRUE);
// gst_debug_set_default_threshold(GST_LEVEL_WARNING);
// isInited = true;
// }
bool stream = false;
bool manualpipeline = false;
char *uri = NULL;
uridecodebin = NULL;
if(type != CV_CAP_GSTREAMER_FILE) {
close();
return false;
GstElementFactory * testfac;
if (type == CV_CAP_GSTREAMER_V4L){
testfac = gst_element_factory_find("v4lsrc");
if (!testfac){
return false;
}
g_object_unref(G_OBJECT(testfac));
filename = "v4lsrc ! "COLOR_ELEM" ! appsink";
}
if (type == CV_CAP_GSTREAMER_V4L2){
testfac = gst_element_factory_find("v4l2src");
if (!testfac){
return false;
}
g_object_unref(G_OBJECT(testfac));
filename = "v4l2src ! "COLOR_ELEM" ! appsink";
}
if(!gst_uri_is_valid(filename)) {
// test if we have a valid uri. If so, open it with an uridecodebin
// else, we might have a file or a manual pipeline.
// if gstreamer cannot parse the manual pipeline, we assume we were given and
// ordinary file path.
if(!gst_uri_is_valid(filename))
{
uri = realpath(filename, NULL);
stream=false;
if(uri) {
stream = false;
if(uri)
{
uri = g_filename_to_uri(uri, NULL, NULL);
if(!uri) {
CV_WARN("GStreamer: Error opening file\n");
close();
return false;
}
} else {
}
else
{
GError *err = NULL;
//uridecodebin = gst_parse_bin_from_description(filename, FALSE, &err);
uridecodebin = gst_parse_launch(filename, &err);
if(!uridecodebin) {
CV_WARN("GStreamer: Error opening bin\n");
close();
//fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message);
//close();
return false;
}
stream = true;
......@@ -363,32 +613,75 @@ bool CvCapture_GStreamer::open( int type, const char* filename )
uri = g_strdup(filename);
}
if(!uridecodebin) {
uridecodebin = gst_element_factory_make ("uridecodebin", NULL);
g_object_set(G_OBJECT(uridecodebin),"uri",uri, NULL);
bool element_from_uri = false;
if(!uridecodebin)
{
// At this writing, the v4l2 element (and maybe others too) does not support caps renegotiation.
// This means that we cannot use an uridecodebin when dealing with v4l2, since setting
// capture properties will not work.
// The solution (probably only until gstreamer 1.2) is to make an element from uri when dealing with v4l2.
gchar * protocol = gst_uri_get_protocol(uri);
if (!strcasecmp(protocol , "v4l2"))
{
#if GST_VERSION_MAJOR == 0
uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src");
#else
uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src", NULL);
#endif
element_from_uri = true;
}else{
uridecodebin = gst_element_factory_make ("uridecodebin", NULL);
g_object_set(G_OBJECT(uridecodebin),"uri",uri, NULL);
}
g_free(protocol);
if(!uridecodebin) {
CV_WARN("GStreamer: Failed to create uridecodebin\n");
//fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message);
close();
return false;
}
}
if(manualpipeline) {
if(manualpipeline)
{
#if GST_VERSION_MAJOR == 0
GstIterator *it = gst_bin_iterate_sinks(GST_BIN(uridecodebin));
if(gst_iterator_next(it, (gpointer *)&sink) != GST_ITERATOR_OK) {
CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
return false;
CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
return false;
}
#else
sink = gst_bin_get_by_name(GST_BIN(uridecodebin), "opencvsink");
if (!sink){
sink = gst_bin_get_by_name(GST_BIN(uridecodebin), "appsink0");
}
pipeline = uridecodebin;
} else {
pipeline = gst_pipeline_new (NULL);
color = gst_element_factory_make("ffmpegcolorspace", NULL);
if (!sink){
CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
return false;
}
#endif
pipeline = uridecodebin;
}
else
{
pipeline = gst_pipeline_new (NULL);
// videoconvert (in 0.10: ffmpegcolorspace) automatically selects the correct colorspace
// conversion based on caps.
color = gst_element_factory_make(COLOR_ELEM, NULL);
sink = gst_element_factory_make("appsink", NULL);
gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL);
g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color);
if(element_from_uri) {
if(!gst_element_link(uridecodebin, color)) {
CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n");
gst_object_unref(pipeline);
return false;
}
}else{
g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color);
}
if(!gst_element_link(color, sink)) {
CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n");
......@@ -397,266 +690,131 @@ bool CvCapture_GStreamer::open( int type, const char* filename )
}
}
//TODO: is 1 single buffer really high enough?
gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
gst_app_sink_set_drop (GST_APP_SINK(sink), stream);
//do not emit signals: all calls will be synchronous and blocking
gst_app_sink_set_emit_signals (GST_APP_SINK(sink), FALSE);
#if GST_VERSION_MAJOR == 0
caps = gst_caps_new_simple("video/x-raw-rgb",
"red_mask", G_TYPE_INT, 0x0000FF,
"green_mask", G_TYPE_INT, 0x00FF00,
"blue_mask", G_TYPE_INT, 0xFF0000,
NULL);
#else
// support 1 and 3 channel 8 bit data, as well as bayer (also 1 channel, 8bit)
caps = gst_caps_from_string("video/x-raw, format=(string){BGR, GRAY8}; video/x-bayer,format=(string){rggb,bggr,grbg,gbrg}");
#endif
gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
gst_caps_unref(caps);
if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_READY) ==
GST_STATE_CHANGE_FAILURE) {
CV_WARN("GStreamer: unable to set pipeline to ready\n");
gst_object_unref(pipeline);
return false;
}
if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING) ==
GST_STATE_CHANGE_FAILURE) {
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
CV_WARN("GStreamer: unable to set pipeline to playing\n");
gst_object_unref(pipeline);
return false;
}
handleMessage();
//we do not start recording here just yet.
// the user probably wants to set capture properties first, so start recording whenever the first frame is requested
__END__;
return true;
}
//
//
// gstreamer image sequence writer
//
//
class CvVideoWriter_GStreamer : public CvVideoWriter
/*!
* \brief CvCapture_GStreamer::getProperty retreive the requested property from the pipeline
* \param propId requested property
* \return property value
*
* There are two ways the properties can be retreived. For seek-based properties we can query the pipeline.
* For frame-based properties, we use the caps of the lasst receivef sample. This means that some properties
* are not available until a first frame was received
*/
double CvCapture_GStreamer::getProperty( int propId )
{
public:
CvVideoWriter_GStreamer() { init(); }
virtual ~CvVideoWriter_GStreamer() { close(); }
GstFormat format;
gint64 value;
gboolean status;
virtual bool open( const char* filename, int fourcc,
double fps, CvSize frameSize, bool isColor );
virtual void close();
virtual bool writeFrame( const IplImage* image );
protected:
void init();
std::map<int, char*> encs;
GstElement* source;
GstElement* file;
GstElement* enc;
GstElement* mux;
GstElement* color;
GstBuffer* buffer;
GstElement* pipeline;
int input_pix_fmt;
};
#if GST_VERSION_MAJOR == 0
#define FORMAT &format
#else
#define FORMAT format
#endif
void CvVideoWriter_GStreamer::init()
{
encs[CV_FOURCC('D','R','A','C')]=(char*)"diracenc";
encs[CV_FOURCC('H','F','Y','U')]=(char*)"ffenc_huffyuv";
encs[CV_FOURCC('J','P','E','G')]=(char*)"jpegenc";
encs[CV_FOURCC('M','J','P','G')]=(char*)"jpegenc";
encs[CV_FOURCC('M','P','1','V')]=(char*)"mpeg2enc";
encs[CV_FOURCC('M','P','2','V')]=(char*)"mpeg2enc";
encs[CV_FOURCC('T','H','E','O')]=(char*)"theoraenc";
encs[CV_FOURCC('V','P','8','0')]=(char*)"vp8enc";
encs[CV_FOURCC('H','2','6','4')]=(char*)"x264enc";
encs[CV_FOURCC('X','2','6','4')]=(char*)"x264enc";
encs[CV_FOURCC('X','V','I','D')]=(char*)"xvidenc";
encs[CV_FOURCC('F','F','Y','U')]=(char*)"y4menc";
//encs[CV_FOURCC('H','F','Y','U')]=(char*)"y4menc";
pipeline=0;
buffer=0;
}
void CvVideoWriter_GStreamer::close()
{
if (pipeline) {
gst_app_src_end_of_stream(GST_APP_SRC(source));
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (GST_OBJECT (pipeline));
if(!pipeline) {
CV_WARN("GStreamer: no pipeline");
return false;
}
}
bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,
double fps, CvSize frameSize, bool is_color )
{
CV_FUNCNAME("CvVideoWriter_GStreamer::open");
__BEGIN__;
//actually doesn't support fourcc parameter and encode an avi with jpegenc
//we need to find a common api between backend to support fourcc for avi
//but also to choose in a common way codec and container format (ogg,dirac,matroska)
// check arguments
assert (filename);
assert (fps > 0);
assert (frameSize.width > 0 && frameSize.height > 0);
std::map<int,char*>::iterator encit;
encit=encs.find(fourcc);
if (encit==encs.end())
CV_ERROR( CV_StsUnsupportedFormat,"Gstreamer Opencv backend doesn't support this codec acutally.");
// if(!isInited) {
// gst_init (NULL, NULL);
// isInited = true;
// }
gst_initializer::init();
close();
source=gst_element_factory_make("appsrc",NULL);
file=gst_element_factory_make("filesink", NULL);
enc=gst_element_factory_make(encit->second, NULL);
mux=gst_element_factory_make("avimux", NULL);
color = gst_element_factory_make("ffmpegcolorspace", NULL);
if (!enc)
CV_ERROR( CV_StsUnsupportedFormat, "Your version of Gstreamer doesn't support this codec acutally or needed plugin missing.");
g_object_set(G_OBJECT(file), "location", filename, NULL);
pipeline = gst_pipeline_new (NULL);
GstCaps* caps;
if (is_color) {
input_pix_fmt=1;
caps= gst_video_format_new_caps(GST_VIDEO_FORMAT_BGR,
frameSize.width,
frameSize.height,
(int) (fps * 1000),
1000,
1,
1);
}
else {
input_pix_fmt=0;
caps= gst_caps_new_simple("video/x-raw-gray",
"width", G_TYPE_INT, frameSize.width,
"height", G_TYPE_INT, frameSize.height,
"framerate", GST_TYPE_FRACTION, int(fps),1,
"bpp",G_TYPE_INT,8,
"depth",G_TYPE_INT,8,
NULL);
}
gst_app_src_set_caps(GST_APP_SRC(source), caps);
if (fourcc==CV_FOURCC_DEFAULT) {
gst_bin_add_many(GST_BIN(pipeline), source, color,mux, file, NULL);
if(!gst_element_link_many(source,color,enc,mux,file,NULL)) {
CV_ERROR(CV_StsError, "GStreamer: cannot link elements\n");
}
}
else {
gst_bin_add_many(GST_BIN(pipeline), source, color,enc,mux, file, NULL);
if(!gst_element_link_many(source,color,enc,mux,file,NULL)) {
CV_ERROR(CV_StsError, "GStreamer: cannot link elements\n");
}
}
if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING) ==
GST_STATE_CHANGE_FAILURE) {
CV_ERROR(CV_StsError, "GStreamer: cannot put pipeline to play\n");
}
__END__;
return true;
}
bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image )
{
CV_FUNCNAME("CvVideoWriter_GStreamer::writerFrame");
__BEGIN__;
if (input_pix_fmt == 1) {
if (image->nChannels != 3 || image->depth != IPL_DEPTH_8U) {
CV_ERROR(CV_StsUnsupportedFormat, "cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 3.");
}
}
else if (input_pix_fmt == 0) {
if (image->nChannels != 1 || image->depth != IPL_DEPTH_8U) {
CV_ERROR(CV_StsUnsupportedFormat, "cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 1.");
}
}
else {
assert(false);
}
int size;
size = image->imageSize;
buffer = gst_buffer_new_and_alloc (size);
//gst_buffer_set_data (buffer,(guint8*)image->imageData, size);
memcpy (GST_BUFFER_DATA(buffer),image->imageData, size);
gst_app_src_push_buffer(GST_APP_SRC(source),buffer);
//gst_buffer_unref(buffer);
//buffer = 0;
__END__;
return true;
}
CvVideoWriter* cvCreateVideoWriter_GStreamer(const char* filename, int fourcc, double fps,
CvSize frameSize, int isColor )
{
CvVideoWriter_GStreamer* wrt = new CvVideoWriter_GStreamer;
if( wrt->open(filename, fourcc, fps,frameSize, isColor))
return wrt;
delete wrt;
return 0;
}
void CvCapture_GStreamer::close()
{
if(pipeline) {
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
gst_object_unref(GST_OBJECT(pipeline));
}
if(buffer)
gst_buffer_unref(buffer);
if(frame) {
frame->imageData = 0;
cvReleaseImage(&frame);
}
}
double CvCapture_GStreamer::getProperty( int propId )
{
GstFormat format;
//GstQuery q;
gint64 value;
if(!pipeline) {
CV_WARN("GStreamer: no pipeline");
return false;
}
switch(propId) {
case CV_CAP_PROP_POS_MSEC:
format = GST_FORMAT_TIME;
if(!gst_element_query_position(sink, &format, &value)) {
CV_WARN("GStreamer: unable to query position of stream");
return false;
switch(propId) {
case CV_CAP_PROP_POS_MSEC:
format = GST_FORMAT_TIME;
status = gst_element_query_position(sink, FORMAT, &value);
if(!status) {
CV_WARN("GStreamer: unable to query position of stream");
return false;
}
return value * 1e-6; // nano seconds to milli seconds
case CV_CAP_PROP_POS_FRAMES:
format = GST_FORMAT_DEFAULT;
if(!gst_element_query_position(sink, &format, &value)) {
status = gst_element_query_position(sink, FORMAT, &value);
if(!status) {
CV_WARN("GStreamer: unable to query position of stream");
return false;
}
return value;
case CV_CAP_PROP_POS_AVI_RATIO:
format = GST_FORMAT_PERCENT;
if(!gst_element_query_position(pipeline, &format, &value)) {
status = gst_element_query_position(sink, FORMAT, &value);
if(!status) {
CV_WARN("GStreamer: unable to query position of stream");
return false;
}
return ((double) value) / GST_FORMAT_PERCENT_MAX;
case CV_CAP_PROP_FRAME_WIDTH:
case CV_CAP_PROP_FRAME_HEIGHT:
case CV_CAP_PROP_FPS:
case CV_CAP_PROP_FRAME_WIDTH: {
if (!buffer_caps){
CV_WARN("GStreamer: unable to query width of frame; no frame grabbed yet");
return 0;
}
GstStructure* structure = gst_caps_get_structure(buffer_caps, 0);
gint width = 0;
if(!gst_structure_get_int(structure, "width", &width)){
CV_WARN("GStreamer: unable to query width of frame");
return 0;
}
return width;
break;
}
case CV_CAP_PROP_FRAME_HEIGHT: {
if (!buffer_caps){
CV_WARN("GStreamer: unable to query height of frame; no frame grabbed yet");
return 0;
}
GstStructure* structure = gst_caps_get_structure(buffer_caps, 0);
gint height = 0;
if(!gst_structure_get_int(structure, "height", &height)){
CV_WARN("GStreamer: unable to query height of frame");
return 0;
}
return height;
break;
}
case CV_CAP_PROP_FPS: {
if (!buffer_caps){
CV_WARN("GStreamer: unable to query framerate of stream; no frame grabbed yet");
return 0;
}
GstStructure* structure = gst_caps_get_structure(buffer_caps, 0);
gint num = 0, denom=1;
if(!gst_structure_get_fraction(structure, "framerate", &num, &denom)){
CV_WARN("GStreamer: unable to query framerate of stream");
return 0;
}
return (double)num/(double)denom;
break;
}
case CV_CAP_PROP_FOURCC:
break;
case CV_CAP_PROP_FRAME_COUNT:
format = GST_FORMAT_DEFAULT;
if(!gst_element_query_duration(pipeline, &format, &value)) {
status = gst_element_query_position(sink, FORMAT, &value);
if(!status) {
CV_WARN("GStreamer: unable to query position of stream");
return false;
}
......@@ -672,20 +830,31 @@ double CvCapture_GStreamer::getProperty( int propId )
break;
case CV_CAP_GSTREAMER_QUEUE_LENGTH:
if(!sink) {
CV_WARN("GStreamer: there is no sink yet");
return false;
CV_WARN("GStreamer: there is no sink yet");
return false;
}
return gst_app_sink_get_max_buffers(GST_APP_SINK(sink));
default:
CV_WARN("GStreamer: unhandled property");
break;
}
#undef FORMAT
return false;
}
/*!
* \brief CvCapture_GStreamer::setProperty
* \param propId
* \param value
* \return success
* Sets the desired property id with val. If the pipeline is running,
* it is briefly stopped and started again after the property was set
*/
bool CvCapture_GStreamer::setProperty( int propId, double value )
{
GstFormat format;
GstFormat format;
GstSeekFlags flags;
if(!pipeline) {
......@@ -693,12 +862,17 @@ bool CvCapture_GStreamer::setProperty( int propId, double value )
return false;
}
bool wasPlaying = this->isPipelinePlaying();
if (wasPlaying)
this->stopPipeline();
switch(propId) {
case CV_CAP_PROP_POS_MSEC:
format = GST_FORMAT_TIME;
flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
flags, (gint64) (value * GST_MSECOND))) {
flags, (gint64) (value * GST_MSECOND))) {
CV_WARN("GStreamer: unable to seek");
}
break;
......@@ -706,7 +880,7 @@ bool CvCapture_GStreamer::setProperty( int propId, double value )
format = GST_FORMAT_DEFAULT;
flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
flags, (gint64) value)) {
flags, (gint64) value)) {
CV_WARN("GStreamer: unable to seek");
}
break;
......@@ -714,7 +888,7 @@ bool CvCapture_GStreamer::setProperty( int propId, double value )
format = GST_FORMAT_PERCENT;
flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
flags, (gint64) (value * GST_FORMAT_PERCENT_MAX))) {
flags, (gint64) (value * GST_FORMAT_PERCENT_MAX))) {
CV_WARN("GStreamer: unable to seek");
}
break;
......@@ -732,15 +906,9 @@ bool CvCapture_GStreamer::setProperty( int propId, double value )
break;
case CV_CAP_PROP_FPS:
if(value > 0) {
int num, denom;
num = (int) value;
if(value != num) { // FIXME this supports only fractions x/1 and x/2
num = (int) (value * 2);
denom = 2;
} else
denom = 1;
setFilter("framerate", GST_TYPE_FRACTION, num, denom);
double num=0, denom = 1;
toFraction(value, num, denom);
setFilter("framerate", GST_TYPE_FRACTION, value, denom);
} else
removeFilter("framerate");
break;
......@@ -763,8 +931,19 @@ bool CvCapture_GStreamer::setProperty( int propId, double value )
default:
CV_WARN("GStreamer: unhandled property");
}
if (wasPlaying)
this->startPipeline();
return false;
}
/*!
* \brief cvCreateCapture_GStreamer
* \param type
* \param filename
* \return
*/
CvCapture* cvCreateCapture_GStreamer(int type, const char* filename )
{
CvCapture_GStreamer* capture = new CvCapture_GStreamer;
......@@ -775,3 +954,498 @@ CvCapture* cvCreateCapture_GStreamer(int type, const char* filename )
delete capture;
return 0;
}
/*!
* \brief The CvVideoWriter_GStreamer class
* Use Gstreamer to write video
*/
class CvVideoWriter_GStreamer : public CvVideoWriter
{
public:
CvVideoWriter_GStreamer() { init(); }
virtual ~CvVideoWriter_GStreamer() { close(); }
virtual bool open( const char* filename, int fourcc,
double fps, CvSize frameSize, bool isColor );
virtual void close();
virtual bool writeFrame( const IplImage* image );
protected:
void init();
const char* filenameToMimetype(const char* filename);
GstElement* pipeline;
GstElement* source;
GstElement* encodebin;
GstElement* file;
GstBuffer* buffer;
int input_pix_fmt;
int num_frames;
double framerate;
};
/*!
* \brief CvVideoWriter_GStreamer::init
* initialise all variables
*/
void CvVideoWriter_GStreamer::init()
{
pipeline = NULL;
source = NULL;
encodebin = NULL;
file = NULL;
buffer = NULL;
num_frames = 0;
framerate = 0;
}
/*!
* \brief CvVideoWriter_GStreamer::close
* ends the pipeline by sending EOS and destroys the pipeline and all
* elements afterwards
*/
void CvVideoWriter_GStreamer::close()
{
if (pipeline) {
GstFlowReturn ret;
ret = gst_app_src_end_of_stream(GST_APP_SRC(source));
//wait for EOS to trickle down the pipeline. This will let all elements finish properly
GstBus* bus = gst_element_get_bus(pipeline);
GstMessage *msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
if(msg != NULL){
gst_message_unref(msg);
g_object_unref(G_OBJECT(bus));
}
gst_element_set_state (pipeline, GST_STATE_NULL);
handleMessage(pipeline);
gst_object_unref (GST_OBJECT (pipeline));
}
}
/*!
* \brief CvVideoWriter_GStreamer::filenameToMimetype
* \param filename
* \return mimetype
* Resturns a container mime type for a given filename by looking at it's extension
*/
const char* CvVideoWriter_GStreamer::filenameToMimetype(const char *filename)
{
//get extension
const char *ext = strrchr(filename, '.');
if(!ext || ext == filename) return NULL;
ext += 1; //exclude the dot
// return a container mime based on the given extension.
// gstreamer's function returns too much possibilities, which is not useful to us
//return the appropriate mime
if (strncasecmp(ext,"avi", 3) == 0)
return (const char*)"video/x-msvideo";
if (strncasecmp(ext,"mkv", 3) == 0 || strncasecmp(ext,"mk3d",4) == 0 || strncasecmp(ext,"webm",4) == 0 )
return (const char*)"video/x-matroska";
if (strncasecmp(ext,"wmv", 3) == 0)
return (const char*)"video/x-ms-asf";
if (strncasecmp(ext,"mov", 3) == 0)
return (const char*)"video/x-quicktime";
if (strncasecmp(ext,"ogg", 3) == 0 || strncasecmp(ext,"ogv", 3) == 0)
return (const char*)"application/ogg";
if (strncasecmp(ext,"rm", 3) == 0)
return (const char*)"vnd.rn-realmedia";
if (strncasecmp(ext,"swf", 3) == 0)
return (const char*)"application/x-shockwave-flash";
if (strncasecmp(ext,"mp4", 3) == 0)
return (const char*)"video/x-quicktime, variant=(string)iso";
//default to avi
return (const char*)"video/x-msvideo";
}
/*!
* \brief CvVideoWriter_GStreamer::open
* \param filename filename to output to
* \param fourcc desired codec fourcc
* \param fps desired framerate
* \param frameSize the size of the expected frames
* \param is_color color or grayscale
* \return success
*
* We support 2 modes of operation. Either the user enters a filename and a fourcc
* code, or enters a manual pipeline description like in CvVideoCapture_Gstreamer.
* In the latter case, we just push frames on the appsink with appropriate caps.
* In the former case, we try to deduce the correct container from the filename,
* and the correct encoder from the fourcc profile.
*
* If the file extension did was not recognize, an avi container is used
*
*/
bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,
double fps, CvSize frameSize, bool is_color )
{
CV_FUNCNAME("CvVideoWriter_GStreamer::open");
// check arguments
assert (filename);
assert (fps > 0);
assert (frameSize.width > 0 && frameSize.height > 0);
// init gstreamer
gst_initializer::init();
// init vars
bool manualpipeline = true;
int bufsize = 0;
GError *err = NULL;
const char* mime = NULL;
GstStateChangeReturn stateret;
GstCaps* caps = NULL;
GstCaps* videocaps = NULL;
GstCaps* containercaps = NULL;
GstEncodingContainerProfile* containerprofile = NULL;
GstEncodingVideoProfile* videoprofile = NULL;
#if GST_VERSION_MAJOR == 0
GstIterator *it = NULL;
#endif
// we first try to construct a pipeline from the given string.
// if that fails, we assume it is an ordinary filename
__BEGIN__;
encodebin = gst_parse_launch(filename, &err);
if(!encodebin) {
manualpipeline = false;
}
if(manualpipeline)
{
#if GST_VERSION_MAJOR == 0
it = gst_bin_iterate_sources(GST_BIN(encodebin));
if(gst_iterator_next(it, (gpointer *)&source) != GST_ITERATOR_OK) {
CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
return false;
}
#else
source = gst_bin_get_by_name(GST_BIN(encodebin), "opencvsrc");
if (!source){
source = gst_bin_get_by_name(GST_BIN(encodebin), "appsrc0");
}
if (!source){
CV_ERROR(CV_StsError, "GStreamer: cannot find appsrc in manual pipeline\n");
return false;
}
#endif
pipeline = encodebin;
}
else
{
pipeline = gst_pipeline_new (NULL);
// we just got a filename and a fourcc code.
// first, try to guess the container from the filename
//encodebin = gst_element_factory_make("encodebin", NULL);
//proxy old non existing fourcc ids. These were used in previous opencv versions,
//but do not even exist in gstreamer any more
if (fourcc == CV_FOURCC('M','P','1','V')) fourcc = CV_FOURCC('M', 'P', 'G' ,'1');
if (fourcc == CV_FOURCC('M','P','2','V')) fourcc = CV_FOURCC('M', 'P', 'G' ,'2');
if (fourcc == CV_FOURCC('D','R','A','C')) fourcc = CV_FOURCC('d', 'r', 'a' ,'c');
//create encoder caps from fourcc
videocaps = gst_riff_create_video_caps(fourcc, NULL, NULL, NULL, NULL, NULL);
if (!videocaps){
CV_ERROR( CV_StsUnsupportedFormat, "Gstreamer Opencv backend does not support this codec.");
}
//create container caps from file extension
mime = filenameToMimetype(filename);
if (!mime) {
CV_ERROR( CV_StsUnsupportedFormat, "Gstreamer Opencv backend does not support this file type.");
}
containercaps = gst_caps_from_string(mime);
//create encodebin profile
containerprofile = gst_encoding_container_profile_new("container", "container", containercaps, NULL);
videoprofile = gst_encoding_video_profile_new(videocaps, NULL, NULL, 1);
gst_encoding_container_profile_add_profile(containerprofile, (GstEncodingProfile *) videoprofile);
//create pipeline elements
encodebin = gst_element_factory_make("encodebin", NULL);
g_object_set(G_OBJECT(encodebin), "profile", containerprofile, NULL);
source = gst_element_factory_make("appsrc", NULL);
file = gst_element_factory_make("filesink", NULL);
g_object_set(G_OBJECT(file), "location", filename, NULL);
}
if (is_color)
{
input_pix_fmt = GST_VIDEO_FORMAT_BGR;
bufsize = frameSize.width * frameSize.height * 3;
#if GST_VERSION_MAJOR == 0
caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_BGR,
frameSize.width,
frameSize.height,
int(fps), 1,
1, 1);
#else
caps = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "BGR",
"width", G_TYPE_INT, frameSize.width,
"height", G_TYPE_INT, frameSize.height,
"framerate", GST_TYPE_FRACTION, int(fps), 1,
NULL);
caps = gst_caps_fixate(caps);
#endif
}
else
{
input_pix_fmt = GST_VIDEO_FORMAT_GRAY8;
bufsize = frameSize.width * frameSize.height;
#if GST_VERSION_MAJOR == 0
caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_GRAY8,
frameSize.width,
frameSize.height,
int(fps), 1,
1, 1);
#else
caps = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "GRAY8",
"width", G_TYPE_INT, frameSize.width,
"height", G_TYPE_INT, frameSize.height,
"framerate", GST_TYPE_FRACTION, int(fps), 1,
NULL);
caps = gst_caps_fixate(caps);
#endif
}
gst_app_src_set_caps(GST_APP_SRC(source), caps);
gst_app_src_set_stream_type(GST_APP_SRC(source), GST_APP_STREAM_TYPE_STREAM);
gst_app_src_set_size (GST_APP_SRC(source), -1);
g_object_set(G_OBJECT(source), "format", GST_FORMAT_TIME, NULL);
g_object_set(G_OBJECT(source), "block", TRUE, NULL);
g_object_set(G_OBJECT(source), "is-live", FALSE, NULL);
g_object_set(G_OBJECT(source), "emit-signals", TRUE, NULL);
if(!manualpipeline)
{
g_object_set(G_OBJECT(file), "buffer-size", bufsize, NULL);
gst_bin_add_many(GST_BIN(pipeline), source, encodebin, file, NULL);
if(!gst_element_link_many(source, encodebin, file, NULL)) {
CV_ERROR(CV_StsError, "GStreamer: cannot link elements\n");
}
}
stateret = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
if(stateret == GST_STATE_CHANGE_FAILURE) {
CV_ERROR(CV_StsError, "GStreamer: cannot put pipeline to play\n");
}
handleMessage(pipeline);
framerate = fps;
num_frames = 0;
__END__;
return true;
}
/*!
* \brief CvVideoWriter_GStreamer::writeFrame
* \param image
* \return
* Pushes the given frame on the pipeline.
* The timestamp for the buffer is generated from the framerate set in open
* and ensures a smooth video
*/
bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image )
{
CV_FUNCNAME("CvVideoWriter_GStreamer::writerFrame");
GstClockTime duration, timestamp;
GstFlowReturn ret;
int size;
__BEGIN__;
handleMessage(pipeline);
if (input_pix_fmt == GST_VIDEO_FORMAT_BGR) {
if (image->nChannels != 3 || image->depth != IPL_DEPTH_8U) {
CV_ERROR(CV_StsUnsupportedFormat, "cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 3.");
}
}
else if (input_pix_fmt == GST_VIDEO_FORMAT_GRAY8) {
if (image->nChannels != 1 || image->depth != IPL_DEPTH_8U) {
CV_ERROR(CV_StsUnsupportedFormat, "cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 1.");
}
}
else {
assert(false);
}
size = image->imageSize;
duration = ((double)1/framerate) * GST_SECOND;
timestamp = num_frames * duration;
//gst_app_src_push_buffer takes ownership of the buffer, so we need to supply it a copy
#if GST_VERSION_MAJOR == 0
buffer = gst_buffer_new_and_alloc (size);
memcpy(GST_BUFFER_DATA (buffer), (guint8*)image->imageData, size);
GST_BUFFER_DURATION(buffer) = duration;
GST_BUFFER_TIMESTAMP(buffer) = timestamp;
#else
buffer = gst_buffer_new_allocate (NULL, size, NULL);
GstMapInfo info;
gst_buffer_map(buffer, &info, (GstMapFlags)GST_MAP_READ);
memcpy(info.data, (guint8*)image->imageData, size);
gst_buffer_unmap(buffer, &info);
GST_BUFFER_DURATION(buffer) = duration;
GST_BUFFER_PTS(buffer) = timestamp;
GST_BUFFER_DTS(buffer) = timestamp;
#endif
//set the current number in the frame
GST_BUFFER_OFFSET(buffer) = num_frames;
ret = gst_app_src_push_buffer(GST_APP_SRC(source), buffer);
if (ret != GST_FLOW_OK) {
/* something wrong, stop pushing */
assert(false);
}
//gst_debug_bin_to_dot_file (GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline");
++num_frames;
__END__;
return true;
}
/*!
* \brief cvCreateVideoWriter_GStreamer
* \param filename
* \param fourcc
* \param fps
* \param frameSize
* \param isColor
* \return
* Constructor
*/
CvVideoWriter* cvCreateVideoWriter_GStreamer(const char* filename, int fourcc, double fps,
CvSize frameSize, int isColor )
{
CvVideoWriter_GStreamer* wrt = new CvVideoWriter_GStreamer;
if( wrt->open(filename, fourcc, fps,frameSize, isColor))
return wrt;
delete wrt;
return 0;
}
// utility functions
/*!
* \brief toFraction
* \param decimal
* \param numerator
* \param denominator
* Split a floating point value into numerator and denominator
*/
void toFraction(double decimal, double &numerator, double &denominator)
{
double dummy;
double whole;
decimal = modf (decimal, &whole);
for (denominator = 1; denominator<=100; denominator++){
if (modf(denominator * decimal, &dummy) < 0.001f)
break;
}
numerator = denominator * decimal;
}
/*!
* \brief handleMessage
* Handles gstreamer bus messages. Mainly for debugging purposes and ensuring clean shutdown on error
*/
void handleMessage(GstElement * pipeline)
{
CV_FUNCNAME("handlemessage");
GError *err = NULL;
gchar *debug = NULL;
GstBus* bus = NULL;
GstStreamStatusType tp;
GstElement * elem = NULL;
GstMessage* msg = NULL;
__BEGIN__;
bus = gst_element_get_bus(pipeline);
while(gst_bus_have_pending(bus)) {
msg = gst_bus_pop(bus);
//printf("Got %s message\n", GST_MESSAGE_TYPE_NAME(msg));
if(gst_is_missing_plugin_message(msg))
{
CV_ERROR(CV_StsError, "GStreamer: your gstreamer installation is missing a required plugin\n");
}
else
{
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_STATE_CHANGED:
GstState oldstate, newstate, pendstate;
gst_message_parse_state_changed(msg, &oldstate, &newstate, &pendstate);
//fprintf(stderr, "state changed from %s to %s (pending: %s)\n", gst_element_state_get_name(oldstate),
// gst_element_state_get_name(newstate), gst_element_state_get_name(pendstate));
break;
case GST_MESSAGE_ERROR:
gst_message_parse_error(msg, &err, &debug);
//fprintf(stderr, "GStreamer Plugin: Embedded video playback halted; module %s reported: %s\n",
// gst_element_get_name(GST_MESSAGE_SRC (msg)), err->message);
g_error_free(err);
g_free(debug);
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
break;
case GST_MESSAGE_EOS:
//fprintf(stderr, "reached the end of the stream.");
break;
case GST_MESSAGE_STREAM_STATUS:
gst_message_parse_stream_status(msg,&tp,&elem);
//fprintf(stderr, "stream status: elem %s, %i\n", GST_ELEMENT_NAME(elem), tp);
break;
default:
//fprintf(stderr, "unhandled message\n");
break;
}
}
gst_message_unref(msg);
}
gst_object_unref(GST_OBJECT(bus));
__END__
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment