Commit 56b206dc authored by Nils Hasler's avatar Nils Hasler

When opening a file with GStreamer:

* if the filename looks like a URI, it is opened in non-blocking mode, cvQueryFrame() could skip frames or grab one frame more than once
* if the filename looks like a filename, it is opened in blocking mode. cvQueryFrame() grabs consecutive frames
* otherwise the filename is interpreted as a gstreamer pipeline as used with gst-launch. The last element of the pipeline has to have the property name=to-opencv
parent b906ad31
...@@ -85,9 +85,9 @@ public: ...@@ -85,9 +85,9 @@ public:
virtual bool grabFrame(); virtual bool grabFrame();
virtual IplImage* retrieveFrame(int); virtual IplImage* retrieveFrame(int);
protected: protected:
void init(); void init();
bool reopen(); bool reopen();
void handleMessage(); void handleMessage();
void restartPipeline(); void restartPipeline();
void setFilter(const char*, int, int, int); void setFilter(const char*, int, int, int);
...@@ -95,67 +95,67 @@ protected: ...@@ -95,67 +95,67 @@ protected:
void static newPad(GstElement *myelement, void static newPad(GstElement *myelement,
GstPad *pad, GstPad *pad,
gpointer data); gpointer data);
GstElement *pipeline; GstElement *pipeline;
GstElement *uridecodebin; GstElement *uridecodebin;
GstElement *color; GstElement *color;
GstElement *sink; GstElement *sink;
GstBuffer *buffer; GstBuffer *buffer;
GstCaps *caps; GstCaps *caps;
IplImage *frame; IplImage *frame;
}; };
void CvCapture_GStreamer::init() void CvCapture_GStreamer::init()
{ {
pipeline=0; pipeline=0;
frame=0; frame=0;
buffer=0; buffer=0;
frame=0; frame=0;
} }
void CvCapture_GStreamer::handleMessage() void CvCapture_GStreamer::handleMessage()
{ {
GstBus* bus = gst_element_get_bus(pipeline); GstBus* bus = gst_element_get_bus(pipeline);
while(gst_bus_have_pending(bus)) { while(gst_bus_have_pending(bus)) {
GstMessage* msg = gst_bus_pop(bus); GstMessage* msg = gst_bus_pop(bus);
// printf("Got %s message\n", GST_MESSAGE_TYPE_NAME(msg)); // printf("Got %s message\n", GST_MESSAGE_TYPE_NAME(msg));
switch (GST_MESSAGE_TYPE (msg)) { switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_STATE_CHANGED: case GST_MESSAGE_STATE_CHANGED:
GstState oldstate, newstate, pendstate; GstState oldstate, newstate, pendstate;
gst_message_parse_state_changed(msg, &oldstate, &newstate, &pendstate); gst_message_parse_state_changed(msg, &oldstate, &newstate, &pendstate);
// printf("state changed from %d to %d (%d)\n", oldstate, newstate, pendstate); // printf("state changed from %d to %d (%d)\n", oldstate, newstate, pendstate);
break; break;
case GST_MESSAGE_ERROR: { case GST_MESSAGE_ERROR: {
GError *err; GError *err;
gchar *debug; gchar *debug;
gst_message_parse_error(msg, &err, &debug); gst_message_parse_error(msg, &err, &debug);
fprintf(stderr, "GStreamer Plugin: Embedded video playback halted; module %s reported: %s\n", fprintf(stderr, "GStreamer Plugin: Embedded video playback halted; module %s reported: %s\n",
gst_element_get_name(GST_MESSAGE_SRC (msg)), err->message); gst_element_get_name(GST_MESSAGE_SRC (msg)), err->message);
g_error_free(err); g_error_free(err);
g_free(debug); g_free(debug);
gst_element_set_state(pipeline, GST_STATE_NULL); gst_element_set_state(pipeline, GST_STATE_NULL);
break; break;
} }
case GST_MESSAGE_EOS: case GST_MESSAGE_EOS:
// CV_WARN("NetStream has reached the end of the stream."); // CV_WARN("NetStream has reached the end of the stream.");
break; break;
default: default:
// CV_WARN("unhandled message\n"); // CV_WARN("unhandled message\n");
break; break;
} }
gst_message_unref(msg); gst_message_unref(msg);
} }
gst_object_unref(GST_OBJECT(bus)); gst_object_unref(GST_OBJECT(bus));
} }
// //
...@@ -163,34 +163,34 @@ void CvCapture_GStreamer::handleMessage() ...@@ -163,34 +163,34 @@ void CvCapture_GStreamer::handleMessage()
// //
bool CvCapture_GStreamer::grabFrame() bool CvCapture_GStreamer::grabFrame()
{ {
if(!pipeline)
return false;
if(gst_app_sink_is_eos(GST_APP_SINK(sink))) { if(!pipeline)
//printf("end of stream\n"); return false;
return false;
}
if(buffer) if(gst_app_sink_is_eos(GST_APP_SINK(sink))) {
gst_buffer_unref(buffer); //printf("end of stream\n");
handleMessage(); return false;
}
if(buffer)
gst_buffer_unref(buffer);
handleMessage();
#ifndef HAVE_GSTREAMER_APP #ifndef HAVE_GSTREAMER_APP
if(gst_app_sink_get_queue_length(GST_APP_SINK(sink))) if(gst_app_sink_get_queue_length(GST_APP_SINK(sink)))
{ {
// printf("peeking buffer, %d buffers in queue\n", // printf("peeking buffer, %d buffers in queue\n",
buffer = gst_app_sink_peek_buffer(GST_APP_SINK(sink)); buffer = gst_app_sink_peek_buffer(GST_APP_SINK(sink));
} }
else else
#endif #endif
{ {
buffer = gst_app_sink_pull_buffer(GST_APP_SINK(sink)); buffer = gst_app_sink_pull_buffer(GST_APP_SINK(sink));
} }
if(!buffer) if(!buffer)
return false; return false;
return true; return true;
} }
// //
...@@ -198,91 +198,91 @@ bool CvCapture_GStreamer::grabFrame() ...@@ -198,91 +198,91 @@ bool CvCapture_GStreamer::grabFrame()
// //
IplImage * CvCapture_GStreamer::retrieveFrame(int) IplImage * CvCapture_GStreamer::retrieveFrame(int)
{ {
if(!buffer) if(!buffer)
return false; return false;
if(!frame) { if(!frame) {
gint height, width; gint height, width;
GstCaps *buff_caps = gst_buffer_get_caps(buffer); GstCaps *buff_caps = gst_buffer_get_caps(buffer);
assert(gst_caps_get_size(buff_caps) == 1); assert(gst_caps_get_size(buff_caps) == 1);
GstStructure* structure = gst_caps_get_structure(buff_caps, 0); GstStructure* structure = gst_caps_get_structure(buff_caps, 0);
if(!gst_structure_get_int(structure, "width", &width) || if(!gst_structure_get_int(structure, "width", &width) ||
!gst_structure_get_int(structure, "height", &height)) !gst_structure_get_int(structure, "height", &height))
return false; return false;
frame = cvCreateImage(cvSize(width, height), IPL_DEPTH_8U, 3); frame = cvCreateImage(cvSize(width, height), IPL_DEPTH_8U, 3);
gst_caps_unref(buff_caps); gst_caps_unref(buff_caps);
} }
memcpy (frame->imageData, GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE (buffer)); memcpy (frame->imageData, GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE (buffer));
//gst_data_copy_into (frame->imageData,GST_BUFFER_DATA(buffer)); //gst_data_copy_into (frame->imageData,GST_BUFFER_DATA(buffer));
gst_buffer_unref(buffer); gst_buffer_unref(buffer);
buffer = 0; buffer = 0;
return frame; return frame;
} }
void CvCapture_GStreamer::restartPipeline() void CvCapture_GStreamer::restartPipeline()
{ {
CV_FUNCNAME("icvRestartPipeline"); CV_FUNCNAME("icvRestartPipeline");
__BEGIN__; __BEGIN__;
printf("restarting pipeline, going to ready\n"); printf("restarting pipeline, going to ready\n");
if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_READY) == if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_READY) ==
GST_STATE_CHANGE_FAILURE) { GST_STATE_CHANGE_FAILURE) {
CV_ERROR(CV_StsError, "GStreamer: unable to start pipeline\n"); CV_ERROR(CV_StsError, "GStreamer: unable to start pipeline\n");
return; return;
} }
printf("ready, relinking\n"); printf("ready, relinking\n");
gst_element_unlink(uridecodebin, color); gst_element_unlink(uridecodebin, color);
printf("filtering with %s\n", gst_caps_to_string(caps)); printf("filtering with %s\n", gst_caps_to_string(caps));
gst_element_link_filtered(uridecodebin, color, caps); gst_element_link_filtered(uridecodebin, color, caps);
printf("relinked, pausing\n"); printf("relinked, pausing\n");
if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING) == if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING) ==
GST_STATE_CHANGE_FAILURE) { GST_STATE_CHANGE_FAILURE) {
CV_ERROR(CV_StsError, "GStreamer: unable to start pipeline\n"); CV_ERROR(CV_StsError, "GStreamer: unable to start pipeline\n");
return; return;
} }
printf("state now paused\n"); printf("state now paused\n");
__END__; __END__;
} }
void CvCapture_GStreamer::setFilter(const char *property, int type, int v1, int v2) void CvCapture_GStreamer::setFilter(const char *property, int type, int v1, int v2)
{ {
if(!caps) { if(!caps) {
if(type == G_TYPE_INT) if(type == G_TYPE_INT)
caps = gst_caps_new_simple("video/x-raw-rgb", property, type, v1, NULL); caps = gst_caps_new_simple("video/x-raw-rgb", property, type, v1, NULL);
else else
caps = gst_caps_new_simple("video/x-raw-rgb", property, type, v1, v2, NULL); caps = gst_caps_new_simple("video/x-raw-rgb", property, type, v1, v2, NULL);
} else { } else {
printf("caps before setting %s\n", gst_caps_to_string(caps)); printf("caps before setting %s\n", gst_caps_to_string(caps));
if(type == G_TYPE_INT) if(type == G_TYPE_INT)
gst_caps_set_simple(caps, "video/x-raw-rgb", property, type, v1, NULL); gst_caps_set_simple(caps, "video/x-raw-rgb", property, type, v1, NULL);
else else
gst_caps_set_simple(caps, "video/x-raw-rgb", property, type, v1, v2, NULL); gst_caps_set_simple(caps, "video/x-raw-rgb", property, type, v1, v2, NULL);
} }
restartPipeline(); restartPipeline();
} }
void CvCapture_GStreamer::removeFilter(const char *filter) void CvCapture_GStreamer::removeFilter(const char *filter)
{ {
if(!caps) if(!caps)
return; return;
GstStructure *s = gst_caps_get_structure(caps, 0); GstStructure *s = gst_caps_get_structure(caps, 0);
gst_structure_remove_field(s, filter); gst_structure_remove_field(s, filter);
restartPipeline(); restartPipeline();
} }
...@@ -298,6 +298,8 @@ void CvCapture_GStreamer::newPad(GstElement *uridecodebin, ...@@ -298,6 +298,8 @@ void CvCapture_GStreamer::newPad(GstElement *uridecodebin,
sinkpad = gst_element_get_static_pad (color, "sink"); sinkpad = gst_element_get_static_pad (color, "sink");
// printf("linking dynamic pad to colourconverter %p %p\n", uridecodebin, pad);
gst_pad_link (pad, sinkpad); gst_pad_link (pad, sinkpad);
...@@ -309,117 +311,153 @@ bool CvCapture_GStreamer::open( int type, const char* filename ) ...@@ -309,117 +311,153 @@ bool CvCapture_GStreamer::open( int type, const char* filename )
close(); close();
CV_FUNCNAME("cvCaptureFromCAM_GStreamer"); CV_FUNCNAME("cvCaptureFromCAM_GStreamer");
__BEGIN__; __BEGIN__;
// teststreamer(filename); if(!isInited) {
// printf("gst_init\n");
gst_init (NULL, NULL);
// return false; // gst_debug_set_active(TRUE);
// gst_debug_set_colored(TRUE);
// gst_debug_set_default_threshold(GST_LEVEL_WARNING);
if(!isInited) { isInited = true;
// printf("gst_init\n"); }
gst_init (NULL, NULL); bool stream = false;
bool manualpipeline = false;
char *uri = NULL;
uridecodebin = NULL;
if(type != CV_CAP_GSTREAMER_FILE) {
close();
return false;
}
if(!gst_uri_is_valid(filename)) {
// printf("file '%s' is not uri\n", filename);
uri = realpath(filename, NULL);
stream=false;
if(uri) {
// printf("is file... ? %s\n", uri);
uri = g_filename_to_uri(uri, NULL, NULL);
if(!uri) {
CV_WARN("GStreamer: Error opening file\n");
close();
return false;
}
} else {
GError *err = NULL;
uridecodebin = gst_parse_bin_from_description(filename, FALSE, &err);
if(!uridecodebin) {
CV_WARN("GStreamer: Error opening bin\n");
close();
return false;
}
stream = true;
// printf("created custom bin\n");
manualpipeline = true;
}
}
else {
// printf("file '%s' is uri\n", filename);
stream = true;
uri = g_strdup(filename);
}
isInited = true; if(!uridecodebin) {
} // printf("creating uridecodebin\n");
bool stream=false; uridecodebin = gst_element_factory_make ("uridecodebin", NULL);
char *uri=NULL; g_object_set(G_OBJECT(uridecodebin),"uri",uri, NULL);
//const char *sourcetypes[] = {"dv1394src", "v4lsrc", "v4l2src", "filesrc"}; }
//printf("entered capturecreator %d\n", type); if(!uridecodebin) {
if (type == CV_CAP_GSTREAMER_FILE) { CV_WARN("GStreamer: Failed to create uridecodebin\n");
if (!gst_uri_is_valid(filename)) { close();
uri=realpath(filename,NULL); return false;
if (uri) }
uri=g_filename_to_uri(uri,NULL,NULL); // printf("Trying to connect to stream \n");
else { color = gst_element_factory_make("ffmpegcolorspace", NULL);
CV_WARN("Error opening file\n");
close(); //printf("%sstreaming\n", stream ? "" : "not ");
return false;
}
stream=false;
}
else {
stream=true;
uri=g_strdup (filename);
printf("Trying to connect to stream \n");
}
uridecodebin = gst_element_factory_make ("uridecodebin", NULL);
g_object_set(G_OBJECT(uridecodebin),"uri",uri, NULL);
if(!uridecodebin) {
close();
return false;
}
}
else {
close();
return false;
}
color = gst_element_factory_make("ffmpegcolorspace", NULL);
#ifdef HAVE_GSTREAMER_APP #ifdef HAVE_GSTREAMER_APP
sink = gst_element_factory_make("appsink", NULL); sink = gst_element_factory_make("appsink", NULL);
gst_app_sink_set_max_buffers (GST_APP_SINK(sink),1); gst_app_sink_set_max_buffers (GST_APP_SINK(sink),1);
if (stream) { if (stream) {
gst_app_sink_set_drop (GST_APP_SINK(sink),true); gst_app_sink_set_drop (GST_APP_SINK(sink),true);
} }
#else #else
sink = gst_element_factory_make("opencv-appsink", NULL); sink = gst_element_factory_make("opencv-appsink", NULL);
#endif #endif
GstCaps* caps= gst_caps_new_simple("video/x-raw-rgb", GstCaps* caps= gst_caps_new_simple("video/x-raw-rgb",
"red_mask", G_TYPE_INT, 255, "red_mask", G_TYPE_INT, 255,
"green_mask", G_TYPE_INT, 65280, "green_mask", G_TYPE_INT, 65280,
"blue_mask", G_TYPE_INT, 16711680, "blue_mask", G_TYPE_INT, 16711680,
NULL); NULL);
//GstCaps *caps=gst_video_format_new_caps(GST_VIDEO_FORMAT_BGR,,368,30,1,1,1);
gst_app_sink_set_caps(GST_APP_SINK(sink), caps); gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
gst_caps_unref(caps); gst_caps_unref(caps);
g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color);
pipeline = gst_pipeline_new (NULL); pipeline = gst_pipeline_new (NULL);
gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL);
switch(type) { //printf("adding stuff to pipeline\n");
case CV_CAP_GSTREAMER_V4L2: // default to 640x480, 30 fps if(manualpipeline) {
break; // it is easier to link elements inside the same bin
case CV_CAP_GSTREAMER_V4L: gst_bin_add_many(GST_BIN(uridecodebin), color, sink, NULL);
case CV_CAP_GSTREAMER_1394: // need the pipeline around the bin because bins don't know about timing
break; gst_bin_add(GST_BIN(pipeline), uridecodebin);
}
else {
gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL);
} }
if(!gst_element_link(color, sink)) {
CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n"); if(manualpipeline) {
gst_object_unref(pipeline); GstElement *e = gst_bin_get_by_name(GST_BIN(uridecodebin), "to-opencv");
return false; if(e) {
if(!gst_element_link(e, color)) {
//printf("catching 'pad-added' for element 'to-opencv'\n");
g_signal_connect(e, "pad-added", G_CALLBACK(newPad), color);
}/* else {
printf("linked to-opencv -> color\n");
}*/
gst_object_unref(e);
} else {
CV_WARN("GStreamer: no element with 'name=to-opencv'\n");
gst_object_unref(pipeline);
return false;
} }
} else {
g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color);
}
// printf("linked, pausing\n"); if(!gst_element_link(color, sink)) {
CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n");
gst_object_unref(pipeline);
return false;
}
if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_READY) == if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_READY) ==
GST_STATE_CHANGE_FAILURE) { GST_STATE_CHANGE_FAILURE) {
CV_WARN("GStreamer: unable to set pipeline to paused\n"); CV_WARN("GStreamer: unable to set pipeline to ready\n");
// icvHandleMessage(capture); // icvHandleMessage(capture);
// cvReleaseCapture((CvCapture **)(void *)&capture); // cvReleaseCapture((CvCapture **)(void *)&capture);
gst_object_unref(pipeline); gst_object_unref(pipeline);
return false; return false;
} }
if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING) ==
GST_STATE_CHANGE_FAILURE) {
CV_WARN("GStreamer: unable to set pipeline to playing\n");
// icvHandleMessage(capture);
// cvReleaseCapture((CvCapture **)(void *)&capture);
gst_object_unref(pipeline);
return false;
}
if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING) ==
GST_STATE_CHANGE_FAILURE) {
CV_WARN("GStreamer: unable to set pipeline to paused\n");
// icvHandleMessage(capture);
// cvReleaseCapture((CvCapture **)(void *)&capture);
gst_object_unref(pipeline);
return false;
}
handleMessage();
handleMessage(); __END__;
__END__; return true;
return true;
} }
#ifdef HAVE_GSTREAMER_APP #ifdef HAVE_GSTREAMER_APP
// //
...@@ -436,7 +474,7 @@ public: ...@@ -436,7 +474,7 @@ public:
virtual bool open( const char* filename, int fourcc, virtual bool open( const char* filename, int fourcc,
double fps, CvSize frameSize, bool isColor ); double fps, CvSize frameSize, bool isColor );
virtual void close(); virtual void close();
virtual bool writeFrame( const IplImage* image ); virtual bool writeFrame( const IplImage* image );
protected: protected:
void init(); void init();
std::map<int, char*> encs; std::map<int, char*> encs;
...@@ -452,127 +490,127 @@ protected: ...@@ -452,127 +490,127 @@ protected:
void CvVideoWriter_GStreamer::init() void CvVideoWriter_GStreamer::init()
{ {
encs[CV_FOURCC('H','F','Y','U')]=(char*)"ffenc_huffyuv"; encs[CV_FOURCC('H','F','Y','U')]=(char*)"ffenc_huffyuv";
encs[CV_FOURCC('D','R','A','C')]=(char*)"diracenc"; encs[CV_FOURCC('D','R','A','C')]=(char*)"diracenc";
encs[CV_FOURCC('X','V','I','D')]=(char*)"xvidenc"; encs[CV_FOURCC('X','V','I','D')]=(char*)"xvidenc";
encs[CV_FOURCC('X','2','6','4')]=(char*)"x264enc"; encs[CV_FOURCC('X','2','6','4')]=(char*)"x264enc";
encs[CV_FOURCC('M','P','1','V')]=(char*)"mpeg2enc"; encs[CV_FOURCC('M','P','1','V')]=(char*)"mpeg2enc";
//encs[CV_FOURCC('M','P','2','V')]=(char*)"mpeg2enc"; //encs[CV_FOURCC('M','P','2','V')]=(char*)"mpeg2enc";
pipeline=0; pipeline=0;
buffer=0; buffer=0;
} }
void CvVideoWriter_GStreamer::close() void CvVideoWriter_GStreamer::close()
{ {
if (pipeline) { if (pipeline) {
gst_app_src_end_of_stream(GST_APP_SRC(source)); gst_app_src_end_of_stream(GST_APP_SRC(source));
gst_element_set_state (pipeline, GST_STATE_NULL); gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (GST_OBJECT (pipeline)); gst_object_unref (GST_OBJECT (pipeline));
} }
} }
bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc, bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,
double fps, CvSize frameSize, bool is_color ) double fps, CvSize frameSize, bool is_color )
{ {
CV_FUNCNAME("CvVideoWriter_GStreamer::open"); CV_FUNCNAME("CvVideoWriter_GStreamer::open");
__BEGIN__; __BEGIN__;
//actually doesn't support fourcc parameter and encode an avi with jpegenc //actually doesn't support fourcc parameter and encode an avi with jpegenc
//we need to find a common api between backend to support fourcc for avi //we need to find a common api between backend to support fourcc for avi
//but also to choose in a common way codec and container format (ogg,dirac,matroska) //but also to choose in a common way codec and container format (ogg,dirac,matroska)
// check arguments // check arguments
assert (filename); assert (filename);
assert (fps > 0); assert (fps > 0);
assert (frameSize.width > 0 && frameSize.height > 0); assert (frameSize.width > 0 && frameSize.height > 0);
std::map<int,char*>::iterator encit; std::map<int,char*>::iterator encit;
encit=encs.find(fourcc); encit=encs.find(fourcc);
if (encit==encs.end()) if (encit==encs.end())
CV_ERROR( CV_StsUnsupportedFormat,"Gstreamer Opencv backend doesn't support this codec acutally."); CV_ERROR( CV_StsUnsupportedFormat,"Gstreamer Opencv backend doesn't support this codec acutally.");
if(!isInited) { if(!isInited) {
gst_init (NULL, NULL); gst_init (NULL, NULL);
isInited = true; isInited = true;
} }
close(); close();
source=gst_element_factory_make("appsrc",NULL); source=gst_element_factory_make("appsrc",NULL);
file=gst_element_factory_make("filesink", NULL); file=gst_element_factory_make("filesink", NULL);
enc=gst_element_factory_make(encit->second, NULL); enc=gst_element_factory_make(encit->second, NULL);
mux=gst_element_factory_make("avimux", NULL); mux=gst_element_factory_make("avimux", NULL);
color = gst_element_factory_make("ffmpegcolorspace", NULL); color = gst_element_factory_make("ffmpegcolorspace", NULL);
if (!enc) if (!enc)
CV_ERROR( CV_StsUnsupportedFormat, "Your version of Gstreamer doesn't support this codec acutally or needed plugin missing."); CV_ERROR( CV_StsUnsupportedFormat, "Your version of Gstreamer doesn't support this codec acutally or needed plugin missing.");
g_object_set(G_OBJECT(file), "location", filename, NULL); g_object_set(G_OBJECT(file), "location", filename, NULL);
pipeline = gst_pipeline_new (NULL); pipeline = gst_pipeline_new (NULL);
GstCaps* caps; GstCaps* caps;
if (is_color) { if (is_color) {
input_pix_fmt=1; input_pix_fmt=1;
caps= gst_video_format_new_caps(GST_VIDEO_FORMAT_BGR, caps= gst_video_format_new_caps(GST_VIDEO_FORMAT_BGR,
frameSize.width, frameSize.width,
frameSize.height, frameSize.height,
fps, (int) (fps * 1000),
1, 1000,
1, 1,
1); 1);
} }
else { else {
input_pix_fmt=0; input_pix_fmt=0;
caps= gst_caps_new_simple("video/x-raw-gray", caps= gst_caps_new_simple("video/x-raw-gray",
"width", G_TYPE_INT, frameSize.width, "width", G_TYPE_INT, frameSize.width,
"height", G_TYPE_INT, frameSize.height, "height", G_TYPE_INT, frameSize.height,
"framerate", GST_TYPE_FRACTION, int(fps),1, "framerate", GST_TYPE_FRACTION, int(fps),1,
"bpp",G_TYPE_INT,8, "bpp",G_TYPE_INT,8,
"depth",G_TYPE_INT,8, "depth",G_TYPE_INT,8,
NULL); NULL);
} }
gst_app_src_set_caps(GST_APP_SRC(source), caps); gst_app_src_set_caps(GST_APP_SRC(source), caps);
if (fourcc==CV_FOURCC_DEFAULT) { if (fourcc==CV_FOURCC_DEFAULT) {
gst_bin_add_many(GST_BIN(pipeline), source, color,mux, file, NULL); gst_bin_add_many(GST_BIN(pipeline), source, color,mux, file, NULL);
if(!gst_element_link_many(source,color,enc,mux,file,NULL)) { if(!gst_element_link_many(source,color,enc,mux,file,NULL)) {
CV_ERROR(CV_StsError, "GStreamer: cannot link elements\n"); CV_ERROR(CV_StsError, "GStreamer: cannot link elements\n");
} }
} }
else { else {
gst_bin_add_many(GST_BIN(pipeline), source, color,enc,mux, file, NULL); gst_bin_add_many(GST_BIN(pipeline), source, color,enc,mux, file, NULL);
if(!gst_element_link_many(source,color,enc,mux,file,NULL)) { if(!gst_element_link_many(source,color,enc,mux,file,NULL)) {
CV_ERROR(CV_StsError, "GStreamer: cannot link elements\n"); CV_ERROR(CV_StsError, "GStreamer: cannot link elements\n");
} }
} }
if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING) == if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING) ==
GST_STATE_CHANGE_FAILURE) { GST_STATE_CHANGE_FAILURE) {
CV_ERROR(CV_StsError, "GStreamer: cannot put pipeline to play\n"); CV_ERROR(CV_StsError, "GStreamer: cannot put pipeline to play\n");
} }
__END__; __END__;
return true; return true;
} }
bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image ) bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image )
{ {
CV_FUNCNAME("CvVideoWriter_GStreamer::writerFrame"); CV_FUNCNAME("CvVideoWriter_GStreamer::writerFrame");
__BEGIN__; __BEGIN__;
if (input_pix_fmt == 1) { if (input_pix_fmt == 1) {
if (image->nChannels != 3 || image->depth != IPL_DEPTH_8U) { if (image->nChannels != 3 || image->depth != IPL_DEPTH_8U) {
CV_ERROR(CV_StsUnsupportedFormat, "cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 3."); CV_ERROR(CV_StsUnsupportedFormat, "cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 3.");
} }
} }
else if (input_pix_fmt == 0) { else if (input_pix_fmt == 0) {
if (image->nChannels != 1 || image->depth != IPL_DEPTH_8U) { if (image->nChannels != 1 || image->depth != IPL_DEPTH_8U) {
CV_ERROR(CV_StsUnsupportedFormat, "cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 1."); CV_ERROR(CV_StsUnsupportedFormat, "cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 1.");
} }
} }
else { else {
assert(false); assert(false);
} }
int size; int size;
size = image->imageSize; size = image->imageSize;
buffer = gst_buffer_new_and_alloc (size); buffer = gst_buffer_new_and_alloc (size);
//gst_buffer_set_data (buffer,(guint8*)image->imageData, size); //gst_buffer_set_data (buffer,(guint8*)image->imageData, size);
memcpy (GST_BUFFER_DATA(buffer),image->imageData, size); memcpy (GST_BUFFER_DATA(buffer),image->imageData, size);
gst_app_src_push_buffer(GST_APP_SRC(source),buffer); gst_app_src_push_buffer(GST_APP_SRC(source),buffer);
//gst_buffer_unref(buffer); //gst_buffer_unref(buffer);
//buffer = 0; //buffer = 0;
__END__; __END__;
return true; return true;
} }
CvVideoWriter* cvCreateVideoWriter_GStreamer(const char* filename, int fourcc, double fps, CvVideoWriter* cvCreateVideoWriter_GStreamer(const char* filename, int fourcc, double fps,
CvSize frameSize, int isColor ) CvSize frameSize, int isColor )
...@@ -593,151 +631,151 @@ CvVideoWriter* cvCreateVideoWriter_GStreamer(const char*, int, double, CvSize, i ...@@ -593,151 +631,151 @@ CvVideoWriter* cvCreateVideoWriter_GStreamer(const char*, int, double, CvSize, i
#endif #endif
void CvCapture_GStreamer::close() void CvCapture_GStreamer::close()
{ {
if(pipeline) { if(pipeline) {
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL); gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
gst_object_unref(GST_OBJECT(pipeline)); gst_object_unref(GST_OBJECT(pipeline));
} }
if(buffer) if(buffer)
gst_buffer_unref(buffer); gst_buffer_unref(buffer);
} }
double CvCapture_GStreamer::getProperty( int propId ) double CvCapture_GStreamer::getProperty( int propId )
{ {
GstFormat format; GstFormat format;
//GstQuery q; //GstQuery q;
gint64 value; gint64 value;
if(!pipeline) { if(!pipeline) {
CV_WARN("GStreamer: no pipeline"); CV_WARN("GStreamer: no pipeline");
return false; return false;
} }
switch(propId) { switch(propId) {
case CV_CAP_PROP_POS_MSEC: case CV_CAP_PROP_POS_MSEC:
format = GST_FORMAT_TIME; format = GST_FORMAT_TIME;
if(!gst_element_query_position(pipeline, &format, &value)) { if(!gst_element_query_position(pipeline, &format, &value)) {
CV_WARN("GStreamer: unable to query position of stream"); CV_WARN("GStreamer: unable to query position of stream");
return false; return false;
} }
return value * 1e-6; // nano seconds to milli seconds return value * 1e-6; // nano seconds to milli seconds
case CV_CAP_PROP_POS_FRAMES: case CV_CAP_PROP_POS_FRAMES:
format = GST_FORMAT_DEFAULT; format = GST_FORMAT_DEFAULT;
if(!gst_element_query_position(pipeline, &format, &value)) { if(!gst_element_query_position(pipeline, &format, &value)) {
CV_WARN("GStreamer: unable to query position of stream"); CV_WARN("GStreamer: unable to query position of stream");
return false; return false;
} }
return value; return value;
case CV_CAP_PROP_POS_AVI_RATIO: case CV_CAP_PROP_POS_AVI_RATIO:
format = GST_FORMAT_PERCENT; format = GST_FORMAT_PERCENT;
if(!gst_element_query_position(pipeline, &format, &value)) { if(!gst_element_query_position(pipeline, &format, &value)) {
CV_WARN("GStreamer: unable to query position of stream"); CV_WARN("GStreamer: unable to query position of stream");
return false; return false;
} }
return ((double) value) / GST_FORMAT_PERCENT_MAX; return ((double) value) / GST_FORMAT_PERCENT_MAX;
case CV_CAP_PROP_FRAME_WIDTH: case CV_CAP_PROP_FRAME_WIDTH:
case CV_CAP_PROP_FRAME_HEIGHT: case CV_CAP_PROP_FRAME_HEIGHT:
case CV_CAP_PROP_FPS: case CV_CAP_PROP_FPS:
case CV_CAP_PROP_FOURCC: case CV_CAP_PROP_FOURCC:
break; break;
case CV_CAP_PROP_FRAME_COUNT: case CV_CAP_PROP_FRAME_COUNT:
format = GST_FORMAT_DEFAULT; format = GST_FORMAT_DEFAULT;
if(!gst_element_query_duration(pipeline, &format, &value)) { if(!gst_element_query_duration(pipeline, &format, &value)) {
CV_WARN("GStreamer: unable to query position of stream"); CV_WARN("GStreamer: unable to query position of stream");
return false; return false;
} }
return value; return value;
case CV_CAP_PROP_FORMAT: case CV_CAP_PROP_FORMAT:
case CV_CAP_PROP_MODE: case CV_CAP_PROP_MODE:
case CV_CAP_PROP_BRIGHTNESS: case CV_CAP_PROP_BRIGHTNESS:
case CV_CAP_PROP_CONTRAST: case CV_CAP_PROP_CONTRAST:
case CV_CAP_PROP_SATURATION: case CV_CAP_PROP_SATURATION:
case CV_CAP_PROP_HUE: case CV_CAP_PROP_HUE:
case CV_CAP_PROP_GAIN: case CV_CAP_PROP_GAIN:
case CV_CAP_PROP_CONVERT_RGB: case CV_CAP_PROP_CONVERT_RGB:
break; break;
default: default:
CV_WARN("GStreamer: unhandled property"); CV_WARN("GStreamer: unhandled property");
break; break;
} }
return false; return false;
} }
bool CvCapture_GStreamer::setProperty( int propId, double value ) bool CvCapture_GStreamer::setProperty( int propId, double value )
{ {
GstFormat format; GstFormat format;
GstSeekFlags flags; GstSeekFlags flags;
if(!pipeline) { if(!pipeline) {
CV_WARN("GStreamer: no pipeline"); CV_WARN("GStreamer: no pipeline");
return false; return false;
} }
switch(propId) { switch(propId) {
case CV_CAP_PROP_POS_MSEC: case CV_CAP_PROP_POS_MSEC:
format = GST_FORMAT_TIME; format = GST_FORMAT_TIME;
flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE); flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format, if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
flags, (gint64) (value * GST_MSECOND))) { flags, (gint64) (value * GST_MSECOND))) {
CV_WARN("GStreamer: unable to seek"); CV_WARN("GStreamer: unable to seek");
} }
break; break;
case CV_CAP_PROP_POS_FRAMES: case CV_CAP_PROP_POS_FRAMES:
format = GST_FORMAT_DEFAULT; format = GST_FORMAT_DEFAULT;
flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE); flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format, if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
flags, (gint64) value)) { flags, (gint64) value)) {
CV_WARN("GStreamer: unable to seek"); CV_WARN("GStreamer: unable to seek");
} }
break; break;
case CV_CAP_PROP_POS_AVI_RATIO: case CV_CAP_PROP_POS_AVI_RATIO:
format = GST_FORMAT_PERCENT; format = GST_FORMAT_PERCENT;
flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE); flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format, if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
flags, (gint64) (value * GST_FORMAT_PERCENT_MAX))) { flags, (gint64) (value * GST_FORMAT_PERCENT_MAX))) {
CV_WARN("GStreamer: unable to seek"); CV_WARN("GStreamer: unable to seek");
} }
break; break;
case CV_CAP_PROP_FRAME_WIDTH: case CV_CAP_PROP_FRAME_WIDTH:
if(value > 0) if(value > 0)
setFilter("width", G_TYPE_INT, (int) value, 0); setFilter("width", G_TYPE_INT, (int) value, 0);
else else
removeFilter("width"); removeFilter("width");
break; break;
case CV_CAP_PROP_FRAME_HEIGHT: case CV_CAP_PROP_FRAME_HEIGHT:
if(value > 0) if(value > 0)
setFilter("height", G_TYPE_INT, (int) value, 0); setFilter("height", G_TYPE_INT, (int) value, 0);
else else
removeFilter("height"); removeFilter("height");
break; break;
case CV_CAP_PROP_FPS: case CV_CAP_PROP_FPS:
if(value > 0) { if(value > 0) {
int num, denom; int num, denom;
num = (int) value; num = (int) value;
if(value != num) { // FIXME this supports only fractions x/1 and x/2 if(value != num) { // FIXME this supports only fractions x/1 and x/2
num = (int) (value * 2); num = (int) (value * 2);
denom = 2; denom = 2;
} else } else
denom = 1; denom = 1;
setFilter("framerate", GST_TYPE_FRACTION, num, denom); setFilter("framerate", GST_TYPE_FRACTION, num, denom);
} else } else
removeFilter("framerate"); removeFilter("framerate");
break; break;
case CV_CAP_PROP_FOURCC: case CV_CAP_PROP_FOURCC:
case CV_CAP_PROP_FRAME_COUNT: case CV_CAP_PROP_FRAME_COUNT:
case CV_CAP_PROP_FORMAT: case CV_CAP_PROP_FORMAT:
case CV_CAP_PROP_MODE: case CV_CAP_PROP_MODE:
case CV_CAP_PROP_BRIGHTNESS: case CV_CAP_PROP_BRIGHTNESS:
case CV_CAP_PROP_CONTRAST: case CV_CAP_PROP_CONTRAST:
case CV_CAP_PROP_SATURATION: case CV_CAP_PROP_SATURATION:
case CV_CAP_PROP_HUE: case CV_CAP_PROP_HUE:
case CV_CAP_PROP_GAIN: case CV_CAP_PROP_GAIN:
case CV_CAP_PROP_CONVERT_RGB: case CV_CAP_PROP_CONVERT_RGB:
break; break;
default: default:
CV_WARN("GStreamer: unhandled property"); CV_WARN("GStreamer: unhandled property");
} }
return false; return false;
} }
CvCapture* cvCreateCapture_GStreamer(int type, const char* filename ) CvCapture* cvCreateCapture_GStreamer(int type, const char* filename )
{ {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment