Commit 8396ae6e authored by Alexey Nikolaev's avatar Alexey Nikolaev Committed by Alexander Alekhin

Merge pull request #12893 from aleksey-nikolaev:cap-update

V4L (V4L2): Refactoring. Added missed camera properties. Fixed getting `INF` for some properties. Singlethread as always (#12893)

* cap_v4l:
1 Added cap_properties verbalization.
2 Set Get of properties elementary refactoring.
3 Removed converting parameters to/from [0,1] range.
4 Added all known conversion from V4L2_CID_* to CV_CAP_PROP_*

* cap_v4l:
1. Removed all query for parameters range.
2. Refactored capture initialization.
3. Added selecting input channel by CV_CAP_PROP_MODE. Default value -1 the channels not changed.

* cap_v4l:
1. Refactoring of Convert To RGB

* cap_v4l:
1. Fixed use of video buffer index.
2. Removed extra memcopy for grab image.
3. Removed device closing from autosetup_capture_mode_v4l2

* cap_v4l:
1. The `goto` was eliminated
2. Fixed use of temporary buffer index for V4L2_PIX_FMT_SN9C10X
3. Fixed use of the bufferIndex
4. Removed trailing spaces and unused variables.

* cap_v4l:
1. Alias for capture->buffers[capture->bufferIndex]
2. Reduced size of data for memcpy: bytesused instead of length
3. Refactoring. Code duplication. More info for debug

* cap_v4l:
1. Added the ability to grab and retrieveFrame independently several times

* cap_v4l:
1. Not need to close/open device for new capture parameters applying.
2. Removed using of device name as a flag that the capture is closed. Added sufficient function.
3. Refactoring. Added requestBuffers and createBuffers

* cap_v4l:
1. Added tryIoctl with `select` like was in mainloop_v4l2.
2. Fixed buffer request for device without closing the device.
3. Some static function moved to CvCaptureCAM_V4L
4. Removed unused defines

* cap_v4l:
1. Thread-safe now

* cap_v4l:
1. Fixed thread-safe destructor
2. Fixed FPS setting

* Missed brake

* Removed thread-safety

* cap_v4l:
1. Reverted conversion parameters to/from [0,1] by default for backward compatibility.
2. Added setting for turn off compatibility mode: set CV_CAP_PROP_MODE to 65536
3. Most static functions moved to CvCaptureCAM_V4L
4. Refactoring of icvRetrieveFrameCAM_V4L and using of frame_allocated flag

* cap_v4l:
1. Added conversion to RGB from NV12, NV21
2. Refactoring. Removed wrappers for known format conversions.

* Added `CAP_PROP_CHANNEL` to the enum VideoCaptureProperties.
CAP_V4L migrated to use VideoCaptureProperties.

* 1. Update comments.
2. Environment variable `OPENCV_VIDEOIO_V4L_RANGE_NORMALIZED` for setting default backward compatibility mode.
3. Revert getting of `CAP_PROP_MODE` as fourcc code in backward compatibility mode.

* videoio: update cap_v4l - compatibilityMode => normalizePropRange

* videoio(test): V4L2 MJPEG test

`v4l2-ctl --list-formats` should have 'MJPG' entry

* videoio: fix buffer initialization

to avoid "munmap: Invalid argument" messages
parent 0c261acf
...@@ -169,7 +169,8 @@ enum VideoCaptureProperties { ...@@ -169,7 +169,8 @@ enum VideoCaptureProperties {
CAP_PROP_AUTOFOCUS =39, CAP_PROP_AUTOFOCUS =39,
CAP_PROP_SAR_NUM =40, //!< Sample aspect ratio: num/den (num) CAP_PROP_SAR_NUM =40, //!< Sample aspect ratio: num/den (num)
CAP_PROP_SAR_DEN =41, //!< Sample aspect ratio: num/den (den) CAP_PROP_SAR_DEN =41, //!< Sample aspect ratio: num/den (den)
CAP_PROP_BACKEND =42, //!< current backend (enum VideoCaptureAPIs). Read-only property CAP_PROP_BACKEND =42, //!< Current backend (enum VideoCaptureAPIs). Read-only property
CAP_PROP_CHANNEL =43, //!< Video input or Channel Number (only for those cameras that support)
#ifndef CV_DOXYGEN #ifndef CV_DOXYGEN
CV__CAP_PROP_LATEST CV__CAP_PROP_LATEST
#endif #endif
......
...@@ -242,10 +242,8 @@ make & enjoy! ...@@ -242,10 +242,8 @@ make & enjoy!
#define DEFAULT_V4L_HEIGHT 480 #define DEFAULT_V4L_HEIGHT 480
#define DEFAULT_V4L_FPS 30 #define DEFAULT_V4L_FPS 30
#define CHANNEL_NUMBER 1
#define MAX_CAMERAS 8 #define MAX_CAMERAS 8
// default and maximum number of V4L buffers, not including last, 'special' buffer // default and maximum number of V4L buffers, not including last, 'special' buffer
#define MAX_V4L_BUFFERS 10 #define MAX_V4L_BUFFERS 10
#define DEFAULT_V4L_BUFFERS 4 #define DEFAULT_V4L_BUFFERS 4
...@@ -253,16 +251,22 @@ make & enjoy! ...@@ -253,16 +251,22 @@ make & enjoy!
// if enabled, then bad JPEG warnings become errors and cause NULL returned instead of image // if enabled, then bad JPEG warnings become errors and cause NULL returned instead of image
#define V4L_ABORT_BADJPEG #define V4L_ABORT_BADJPEG
#define MAX_DEVICE_DRIVER_NAME 80
namespace cv { namespace cv {
/* Device Capture Objects */ /* Device Capture Objects */
/* V4L2 structure */ /* V4L2 structure */
struct buffer struct Buffer
{ {
void * start; void * start;
size_t length; size_t length;
// This is dequeued buffer. It used for to put it back in the queue.
// The buffer is valid only if capture->bufferIndex >= 0
v4l2_buffer buffer;
Buffer() : start(NULL), length(0)
{
buffer = v4l2_buffer();
}
}; };
struct CvCaptureCAM_V4L CV_FINAL : public CvCapture struct CvCaptureCAM_V4L CV_FINAL : public CvCapture
...@@ -271,10 +275,9 @@ struct CvCaptureCAM_V4L CV_FINAL : public CvCapture ...@@ -271,10 +275,9 @@ struct CvCaptureCAM_V4L CV_FINAL : public CvCapture
int deviceHandle; int deviceHandle;
int bufferIndex; int bufferIndex;
int FirstCapture; bool FirstCapture;
String deviceName; String deviceName;
char *memoryMap;
IplImage frame; IplImage frame;
__u32 palette; __u32 palette;
...@@ -285,149 +288,163 @@ struct CvCaptureCAM_V4L CV_FINAL : public CvCapture ...@@ -285,149 +288,163 @@ struct CvCaptureCAM_V4L CV_FINAL : public CvCapture
bool convert_rgb; bool convert_rgb;
bool frame_allocated; bool frame_allocated;
bool returnFrame; bool returnFrame;
// To select a video input set cv::CAP_PROP_CHANNEL to channel number.
// If the new channel number is than 0, then a video input will not change
int channelNumber;
// Normalize properties. If set parameters will be converted to/from [0,1) range.
// Enabled by default (as OpenCV 3.x does).
// Value is initialized from the environment variable `OPENCV_VIDEOIO_V4L_RANGE_NORMALIZED`:
// To select real parameters mode after devise is open set cv::CAP_PROP_MODE to 0
// any other value revert the backward compatibility mode (with normalized properties).
// Range normalization affects the following parameters:
// cv::CAP_PROP_*: BRIGHTNESS,CONTRAST,SATURATION,HUE,GAIN,EXPOSURE,FOCUS,AUTOFOCUS,AUTO_EXPOSURE.
bool normalizePropRange;
/* V4L2 variables */ /* V4L2 variables */
buffer buffers[MAX_V4L_BUFFERS + 1]; Buffer buffers[MAX_V4L_BUFFERS + 1];
v4l2_capability cap; v4l2_capability capability;
v4l2_input inp; v4l2_input videoInput;
v4l2_format form; v4l2_format form;
v4l2_crop crop;
v4l2_cropcap cropcap;
v4l2_requestbuffers req; v4l2_requestbuffers req;
v4l2_buf_type type; v4l2_buf_type type;
v4l2_queryctrl queryctrl;
timeval timestamp; timeval timestamp;
/* V4L2 control variables */
Range focus, brightness, contrast, saturation, hue, gain, exposure;
bool open(int _index); bool open(int _index);
bool open(const char* deviceName); bool open(const char* deviceName);
bool isOpened() const;
virtual double getProperty(int) const CV_OVERRIDE; virtual double getProperty(int) const CV_OVERRIDE;
virtual bool setProperty(int, double) CV_OVERRIDE; virtual bool setProperty(int, double) CV_OVERRIDE;
virtual bool grabFrame() CV_OVERRIDE; virtual bool grabFrame() CV_OVERRIDE;
virtual IplImage* retrieveFrame(int) CV_OVERRIDE; virtual IplImage* retrieveFrame(int) CV_OVERRIDE;
Range getRange(int property_id) const { CvCaptureCAM_V4L();
switch (property_id) {
case CV_CAP_PROP_BRIGHTNESS:
return brightness;
case CV_CAP_PROP_CONTRAST:
return contrast;
case CV_CAP_PROP_SATURATION:
return saturation;
case CV_CAP_PROP_HUE:
return hue;
case CV_CAP_PROP_GAIN:
return gain;
case CV_CAP_PROP_EXPOSURE:
return exposure;
case CV_CAP_PROP_FOCUS:
return focus;
case CV_CAP_PROP_AUTOFOCUS:
return Range(0, 1);
case CV_CAP_PROP_AUTO_EXPOSURE:
return Range(0, 4);
default:
return Range(0, 255);
}
}
virtual ~CvCaptureCAM_V4L(); virtual ~CvCaptureCAM_V4L();
bool requestBuffers();
bool requestBuffers(unsigned int buffer_number);
bool createBuffers();
void releaseBuffers();
bool initCapture();
bool streaming(bool startStream);
bool setFps(int value);
bool tryIoctl(unsigned long ioctlCode, void *parameter) const;
bool controlInfo(int property_id, __u32 &v4l2id, cv::Range &range) const;
bool icvControl(__u32 v4l2id, int &value, bool isSet) const;
bool icvSetFrameSize(int _width, int _height);
bool v4l2_reset();
bool setVideoInputChannel();
bool try_palette_v4l2();
bool try_init_v4l2();
bool autosetup_capture_mode_v4l2();
void v4l2_create_frame();
bool read_frame_v4l2();
bool convertableToRgb() const;
void convertToRgb(const Buffer &currentBuffer);
void releaseFrame();
}; };
static void icvCloseCAM_V4L( CvCaptureCAM_V4L* capture );
static bool icvGrabFrameCAM_V4L( CvCaptureCAM_V4L* capture );
static IplImage* icvRetrieveFrameCAM_V4L( CvCaptureCAM_V4L* capture, int );
static double icvGetPropertyCAM_V4L( const CvCaptureCAM_V4L* capture, int property_id );
static int icvSetPropertyCAM_V4L( CvCaptureCAM_V4L* capture, int property_id, double value );
/*********************** Implementations ***************************************/ /*********************** Implementations ***************************************/
CvCaptureCAM_V4L::CvCaptureCAM_V4L() : deviceHandle(-1), bufferIndex(-1)
{}
CvCaptureCAM_V4L::~CvCaptureCAM_V4L() { CvCaptureCAM_V4L::~CvCaptureCAM_V4L() {
icvCloseCAM_V4L(this); streaming(false);
releaseBuffers();
if(deviceHandle != -1)
close(deviceHandle);
}
bool CvCaptureCAM_V4L::isOpened() const
{
return deviceHandle != -1;
} }
static bool try_palette_v4l2(CvCaptureCAM_V4L* capture) bool CvCaptureCAM_V4L::try_palette_v4l2()
{ {
capture->form = v4l2_format(); form = v4l2_format();
capture->form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
capture->form.fmt.pix.pixelformat = capture->palette; form.fmt.pix.pixelformat = palette;
capture->form.fmt.pix.field = V4L2_FIELD_ANY; form.fmt.pix.field = V4L2_FIELD_ANY;
capture->form.fmt.pix.width = capture->width; form.fmt.pix.width = width;
capture->form.fmt.pix.height = capture->height; form.fmt.pix.height = height;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_S_FMT, &capture->form)) if (!tryIoctl(VIDIOC_S_FMT, &form))
return false; return false;
return capture->palette == capture->form.fmt.pix.pixelformat; return palette == form.fmt.pix.pixelformat;
} }
static int try_init_v4l2(CvCaptureCAM_V4L* capture, const char *deviceName) bool CvCaptureCAM_V4L::setVideoInputChannel()
{ {
// Test device for V4L2 compatibility if(channelNumber < 0)
// Return value: return true;
// -1 then unable to open device /* Query channels number */
// 0 then detected nothing int channel = 0;
// 1 then V4L2 device if (!tryIoctl(VIDIOC_G_INPUT, &channel))
return false;
int deviceIndex; if(channel == channelNumber)
return true;
/* Open and test V4L2 device */ /* Query information about new input channel */
capture->deviceHandle = open (deviceName, O_RDWR /* required */ | O_NONBLOCK, 0); videoInput = v4l2_input();
if (-1 == capture->deviceHandle) videoInput.index = channelNumber;
{ if (!tryIoctl(VIDIOC_ENUMINPUT, &videoInput))
#ifndef NDEBUG return false;
fprintf(stderr, "(DEBUG) try_init_v4l2 open \"%s\": %s\n", deviceName, strerror(errno));
#endif //To select a video input applications store the number of the desired input in an integer
icvCloseCAM_V4L(capture); // and call the VIDIOC_S_INPUT ioctl with a pointer to this integer. Side effects are possible.
return -1; // For example inputs may support different video standards, so the driver may implicitly
} // switch the current standard.
// It is good practice to select an input before querying or negotiating any other parameters.
return tryIoctl(VIDIOC_S_INPUT, &channelNumber);
}
bool CvCaptureCAM_V4L::try_init_v4l2()
{
/* The following code sets the CHANNEL_NUMBER of the video input. Some video sources
have sub "Channel Numbers". For a typical V4L TV capture card, this is usually 1.
I myself am using a simple NTSC video input capture card that uses the value of 1.
If you are not in North America or have a different video standard, you WILL have to change
the following settings and recompile/reinstall. This set of settings is based on
the most commonly encountered input video source types (like my bttv card) */
capture->cap = v4l2_capability(); // The cv::CAP_PROP_MODE used for set the video input channel number
if (-1 == ioctl (capture->deviceHandle, VIDIOC_QUERYCAP, &capture->cap)) if (!setVideoInputChannel())
{ {
#ifndef NDEBUG #ifndef NDEBUG
fprintf(stderr, "(DEBUG) try_init_v4l2 VIDIOC_QUERYCAP \"%s\": %s\n", deviceName, strerror(errno)); fprintf(stderr, "(DEBUG) V4L2: Unable to set Video Input Channel.");
#endif #endif
icvCloseCAM_V4L(capture); return false;
return 0;
} }
/* Query channels number */ // Test device for V4L2 compatibility
if (-1 == ioctl (capture->deviceHandle, VIDIOC_G_INPUT, &deviceIndex)) capability = v4l2_capability();
if (!tryIoctl(VIDIOC_QUERYCAP, &capability))
{ {
#ifndef NDEBUG #ifndef NDEBUG
fprintf(stderr, "(DEBUG) try_init_v4l2 VIDIOC_G_INPUT \"%s\": %s\n", deviceName, strerror(errno)); fprintf(stderr, "(DEBUG) V4L2: Unable to query capability.");
#endif #endif
icvCloseCAM_V4L(capture); return false;
return 0;
} }
/* Query information about current input */ if ((capability.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0)
capture->inp = v4l2_input();
capture->inp.index = deviceIndex;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_ENUMINPUT, &capture->inp))
{ {
#ifndef NDEBUG /* Nope. */
fprintf(stderr, "(DEBUG) try_init_v4l2 VIDIOC_ENUMINPUT \"%s\": %s\n", deviceName, strerror(errno)); fprintf(stderr, "VIDEOIO ERROR: V4L2: Unable to capture video memory.");
#endif return false;
icvCloseCAM_V4L(capture);
return 0;
} }
return true;
return 1;
} }
static int autosetup_capture_mode_v4l2(CvCaptureCAM_V4L* capture) { bool CvCaptureCAM_V4L::autosetup_capture_mode_v4l2()
{
//in case palette is already set and works, no need to setup. //in case palette is already set and works, no need to setup.
if(capture->palette != 0 and try_palette_v4l2(capture)){ if (palette != 0 && try_palette_v4l2()) {
return 0; return true;
} }
__u32 try_order[] = { __u32 try_order[] = {
V4L2_PIX_FMT_BGR24, V4L2_PIX_FMT_BGR24,
...@@ -437,6 +454,8 @@ static int autosetup_capture_mode_v4l2(CvCaptureCAM_V4L* capture) { ...@@ -437,6 +454,8 @@ static int autosetup_capture_mode_v4l2(CvCaptureCAM_V4L* capture) {
V4L2_PIX_FMT_YUV411P, V4L2_PIX_FMT_YUV411P,
V4L2_PIX_FMT_YUYV, V4L2_PIX_FMT_YUYV,
V4L2_PIX_FMT_UYVY, V4L2_PIX_FMT_UYVY,
V4L2_PIX_FMT_NV12,
V4L2_PIX_FMT_NV21,
V4L2_PIX_FMT_SBGGR8, V4L2_PIX_FMT_SBGGR8,
V4L2_PIX_FMT_SGBRG8, V4L2_PIX_FMT_SGBRG8,
V4L2_PIX_FMT_SN9C10X, V4L2_PIX_FMT_SN9C10X,
...@@ -444,316 +463,262 @@ static int autosetup_capture_mode_v4l2(CvCaptureCAM_V4L* capture) { ...@@ -444,316 +463,262 @@ static int autosetup_capture_mode_v4l2(CvCaptureCAM_V4L* capture) {
V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_MJPEG,
V4L2_PIX_FMT_JPEG, V4L2_PIX_FMT_JPEG,
#endif #endif
V4L2_PIX_FMT_Y16 V4L2_PIX_FMT_Y16,
V4L2_PIX_FMT_GREY
}; };
for (size_t i = 0; i < sizeof(try_order) / sizeof(__u32); i++) { for (size_t i = 0; i < sizeof(try_order) / sizeof(__u32); i++) {
capture->palette = try_order[i]; palette = try_order[i];
if (try_palette_v4l2(capture)) { if (try_palette_v4l2()) {
return 0; return true;
} }
} }
return false;
fprintf(stderr,
"VIDEOIO ERROR: V4L2: Pixel format of incoming image is unsupported by OpenCV\n");
icvCloseCAM_V4L(capture);
return -1;
} }
static void v4l2_control_range(CvCaptureCAM_V4L* cap, __u32 id) bool CvCaptureCAM_V4L::setFps(int value)
{ {
cap->queryctrl= v4l2_queryctrl(); if (!isOpened())
cap->queryctrl.id = id; return false;
if(0 != ioctl(cap->deviceHandle, VIDIOC_QUERYCTRL, &cap->queryctrl))
{
if (errno != EINVAL)
perror ("VIDIOC_QUERYCTRL");
return;
}
if (cap->queryctrl.flags & V4L2_CTRL_FLAG_DISABLED)
return;
Range range(cap->queryctrl.minimum, cap->queryctrl.maximum); v4l2_streamparm streamparm = v4l2_streamparm();
streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
streamparm.parm.capture.timeperframe.numerator = 1;
streamparm.parm.capture.timeperframe.denominator = __u32(value);
if (!tryIoctl(VIDIOC_S_PARM, &streamparm) || !tryIoctl(VIDIOC_G_PARM, &streamparm))
return false;
switch(cap->queryctrl.id) { fps = streamparm.parm.capture.timeperframe.denominator;
case V4L2_CID_BRIGHTNESS: return true;
cap->brightness = range;
break;
case V4L2_CID_CONTRAST:
cap->contrast = range;
break;
case V4L2_CID_SATURATION:
cap->saturation = range;
break;
case V4L2_CID_HUE:
cap->hue = range;
break;
case V4L2_CID_GAIN:
cap->gain = range;
break;
case V4L2_CID_EXPOSURE_ABSOLUTE:
cap->exposure = range;
break;
case V4L2_CID_FOCUS_ABSOLUTE:
cap->focus = range;
break;
}
} }
static void v4l2_scan_controls(CvCaptureCAM_V4L* capture) bool CvCaptureCAM_V4L::convertableToRgb() const
{ {
switch (palette) {
__u32 ctrl_id;
for (ctrl_id = V4L2_CID_BASE; ctrl_id < V4L2_CID_LASTP1; ctrl_id++)
{
v4l2_control_range(capture, ctrl_id);
}
for (ctrl_id = V4L2_CID_PRIVATE_BASE;;ctrl_id++)
{
errno = 0;
v4l2_control_range(capture, ctrl_id);
if (errno)
break;
}
v4l2_control_range(capture, V4L2_CID_FOCUS_ABSOLUTE);
}
static int v4l2_set_fps(CvCaptureCAM_V4L* capture) {
v4l2_streamparm setfps = v4l2_streamparm();
setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
setfps.parm.capture.timeperframe.numerator = 1;
setfps.parm.capture.timeperframe.denominator = capture->fps;
return ioctl (capture->deviceHandle, VIDIOC_S_PARM, &setfps);
}
static int v4l2_num_channels(__u32 palette) {
switch(palette) {
case V4L2_PIX_FMT_YVU420: case V4L2_PIX_FMT_YVU420:
case V4L2_PIX_FMT_YUV420: case V4L2_PIX_FMT_YUV420:
case V4L2_PIX_FMT_NV12:
case V4L2_PIX_FMT_NV21:
case V4L2_PIX_FMT_YUV411P:
#ifdef HAVE_JPEG
case V4L2_PIX_FMT_MJPEG: case V4L2_PIX_FMT_MJPEG:
case V4L2_PIX_FMT_JPEG: case V4L2_PIX_FMT_JPEG:
case V4L2_PIX_FMT_Y16: #endif
return 1;
case V4L2_PIX_FMT_YUYV: case V4L2_PIX_FMT_YUYV:
case V4L2_PIX_FMT_UYVY: case V4L2_PIX_FMT_UYVY:
return 2; case V4L2_PIX_FMT_SBGGR8:
case V4L2_PIX_FMT_BGR24: case V4L2_PIX_FMT_SN9C10X:
case V4L2_PIX_FMT_SGBRG8:
case V4L2_PIX_FMT_RGB24: case V4L2_PIX_FMT_RGB24:
return 3; case V4L2_PIX_FMT_Y16:
case V4L2_PIX_FMT_GREY:
case V4L2_PIX_FMT_BGR24:
return true;
default: default:
return 0; break;
} }
return false;
} }
static void v4l2_create_frame(CvCaptureCAM_V4L *capture) { void CvCaptureCAM_V4L::v4l2_create_frame()
CvSize size = {capture->form.fmt.pix.width, capture->form.fmt.pix.height}; {
CvSize size = {form.fmt.pix.width, form.fmt.pix.height};
int channels = 3; int channels = 3;
int depth = IPL_DEPTH_8U; int depth = IPL_DEPTH_8U;
if (!capture->convert_rgb) { if (!convert_rgb) {
channels = v4l2_num_channels(capture->palette); switch (palette) {
case V4L2_PIX_FMT_BGR24:
switch(capture->palette) { case V4L2_PIX_FMT_RGB24:
case V4L2_PIX_FMT_MJPEG: break;
case V4L2_PIX_FMT_JPEG: case V4L2_PIX_FMT_YUYV:
size = cvSize(capture->buffers[capture->bufferIndex].length, 1); case V4L2_PIX_FMT_UYVY:
channels = 2;
break; break;
case V4L2_PIX_FMT_YVU420: case V4L2_PIX_FMT_YVU420:
case V4L2_PIX_FMT_YUV420: case V4L2_PIX_FMT_YUV420:
case V4L2_PIX_FMT_NV12:
case V4L2_PIX_FMT_NV21:
channels = 1;
size.height = size.height * 3 / 2; // "1.5" channels size.height = size.height * 3 / 2; // "1.5" channels
break; break;
case V4L2_PIX_FMT_Y16: case V4L2_PIX_FMT_Y16:
if(!capture->convert_rgb){ depth = IPL_DEPTH_16U;
depth = IPL_DEPTH_16U; /* fallthru */
} case V4L2_PIX_FMT_GREY:
channels = 1;
break;
case V4L2_PIX_FMT_MJPEG:
case V4L2_PIX_FMT_JPEG:
default:
channels = 1;
if(bufferIndex < 0)
size = cvSize(buffers[MAX_V4L_BUFFERS].length, 1);
else
size = cvSize(buffers[bufferIndex].buffer.bytesused, 1);
break; break;
} }
} }
/* Set up Image data */ /* Set up Image data */
cvInitImageHeader(&capture->frame, size, depth, channels); cvInitImageHeader(&frame, size, depth, channels);
/* Allocate space for pixelformat we convert to. /* Allocate space for pixelformat we convert to.
* If we do not convert frame is just points to the buffer * If we do not convert frame is just points to the buffer
*/ */
if(capture->convert_rgb) { releaseFrame();
capture->frame.imageData = (char*)cvAlloc(capture->frame.imageSize); // we need memory iff convert_rgb is true
if (convert_rgb) {
frame.imageData = (char *)cvAlloc(frame.imageSize);
frame_allocated = true;
} }
capture->frame_allocated = capture->convert_rgb;
} }
static int _capture_V4L2 (CvCaptureCAM_V4L *capture) bool CvCaptureCAM_V4L::initCapture()
{ {
const char* deviceName = capture->deviceName.c_str(); if (!isOpened())
if (try_init_v4l2(capture, deviceName) != 1) { return false;
/* init of the v4l2 device is not OK */
return -1;
}
/* V4L2 control variables are zero (memset above) */
/* Scan V4L2 controls */
v4l2_scan_controls(capture);
if ((capture->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) { if (!try_init_v4l2()) {
/* Nope. */ #ifndef NDEBUG
fprintf( stderr, "VIDEOIO ERROR: V4L2: device %s is unable to capture video memory.\n",deviceName); fprintf(stderr, " try_init_v4l2 open \"%s\": %s\n", deviceName.c_str(), strerror(errno));
icvCloseCAM_V4L(capture); #endif
return -1; return false;
} }
/* The following code sets the CHANNEL_NUMBER of the video input. Some video sources
have sub "Channel Numbers". For a typical V4L TV capture card, this is usually 1.
I myself am using a simple NTSC video input capture card that uses the value of 1.
If you are not in North America or have a different video standard, you WILL have to change
the following settings and recompile/reinstall. This set of settings is based on
the most commonly encountered input video source types (like my bttv card) */
if(capture->inp.index > 0) {
capture->inp = v4l2_input();
capture->inp.index = CHANNEL_NUMBER;
/* Set only channel number to CHANNEL_NUMBER */
/* V4L2 have a status field from selected video mode */
if (-1 == ioctl (capture->deviceHandle, VIDIOC_ENUMINPUT, &capture->inp))
{
fprintf (stderr, "VIDEOIO ERROR: V4L2: Aren't able to set channel number\n");
icvCloseCAM_V4L (capture);
return -1;
}
} /* End if */
/* Find Window info */ /* Find Window info */
capture->form = v4l2_format(); form = v4l2_format();
capture->form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_G_FMT, &capture->form)) { if (!tryIoctl(VIDIOC_G_FMT, &form)) {
fprintf( stderr, "VIDEOIO ERROR: V4L2: Could not obtain specifics of capture window.\n\n"); fprintf( stderr, "VIDEOIO ERROR: V4L2: Could not obtain specifics of capture window.\n");
icvCloseCAM_V4L(capture); return false;
return -1;
} }
if (autosetup_capture_mode_v4l2(capture) == -1) if (!autosetup_capture_mode_v4l2()) {
return -1; fprintf(stderr, "VIDEOIO ERROR: V4L2: Pixel format of incoming image is unsupported by OpenCV\n");
return false;
}
/* try to set framerate */ /* try to set framerate */
v4l2_set_fps(capture); setFps(fps);
unsigned int min; unsigned int min;
/* Buggy driver paranoia. */ /* Buggy driver paranoia. */
min = capture->form.fmt.pix.width * 2; min = form.fmt.pix.width * 2;
if (capture->form.fmt.pix.bytesperline < min) if (form.fmt.pix.bytesperline < min)
capture->form.fmt.pix.bytesperline = min; form.fmt.pix.bytesperline = min;
min = capture->form.fmt.pix.bytesperline * capture->form.fmt.pix.height; min = form.fmt.pix.bytesperline * form.fmt.pix.height;
if (form.fmt.pix.sizeimage < min)
form.fmt.pix.sizeimage = min;
if (!requestBuffers())
return false;
if (capture->form.fmt.pix.sizeimage < min) if (!createBuffers()) {
capture->form.fmt.pix.sizeimage = min; /* free capture, and returns an error code */
releaseBuffers();
return false;
}
capture->req = v4l2_requestbuffers(); v4l2_create_frame();
unsigned int buffer_number = capture->bufferSize; // reinitialize buffers
FirstCapture = true;
try_again: return true;
};
capture->req.count = buffer_number; bool CvCaptureCAM_V4L::requestBuffers()
capture->req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; {
capture->req.memory = V4L2_MEMORY_MMAP; unsigned int buffer_number = bufferSize;
while (buffer_number > 0) {
if (!requestBuffers(buffer_number))
return false;
if (req.count >= buffer_number)
break;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_REQBUFS, &capture->req)) buffer_number--;
{ fprintf(stderr, "Insufficient buffer memory on %s -- decreasing buffers\n", deviceName.c_str());
if (EINVAL == errno)
{
fprintf (stderr, "%s does not support memory mapping\n", deviceName);
} else {
perror ("VIDIOC_REQBUFS");
}
/* free capture, and returns an error code */
icvCloseCAM_V4L (capture);
return -1;
} }
if (buffer_number < 1) {
fprintf(stderr, "Insufficient buffer memory on %s\n", deviceName.c_str());
return false;
}
bufferSize = req.count;
return true;
}
if (capture->req.count < buffer_number) bool CvCaptureCAM_V4L::requestBuffers(unsigned int buffer_number)
{ {
if (buffer_number == 1) if (!isOpened())
{ return false;
fprintf (stderr, "Insufficient buffer memory on %s\n", deviceName);
/* free capture, and returns an error code */ req = v4l2_requestbuffers();
icvCloseCAM_V4L (capture); req.count = buffer_number;
return -1; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
} else { req.memory = V4L2_MEMORY_MMAP;
buffer_number--;
fprintf (stderr, "Insufficient buffer memory on %s -- decreaseing buffers\n", deviceName);
goto try_again; if (!tryIoctl(VIDIOC_REQBUFS, &req)) {
if (EINVAL == errno) {
fprintf(stderr, "%s does not support memory mapping\n", deviceName.c_str());
} else {
perror("VIDIOC_REQBUFS");
} }
return false;
} }
return true;
}
for (unsigned int n_buffers = 0; n_buffers < capture->req.count; ++n_buffers) bool CvCaptureCAM_V4L::createBuffers()
{ {
size_t maxLength = 0;
for (unsigned int n_buffers = 0; n_buffers < req.count; ++n_buffers) {
v4l2_buffer buf = v4l2_buffer(); v4l2_buffer buf = v4l2_buffer();
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP; buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers; buf.index = n_buffers;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_QUERYBUF, &buf)) { if (!tryIoctl(VIDIOC_QUERYBUF, &buf)) {
perror ("VIDIOC_QUERYBUF"); perror("VIDIOC_QUERYBUF");
return false;
/* free capture, and returns an error code */
icvCloseCAM_V4L (capture);
return -1;
} }
capture->buffers[n_buffers].length = buf.length; buffers[n_buffers].length = buf.length;
capture->buffers[n_buffers].start = buffers[n_buffers].start =
mmap (NULL /* start anywhere */, mmap(NULL /* start anywhere */,
buf.length, buf.length,
PROT_READ | PROT_WRITE /* required */, PROT_READ /* required */,
MAP_SHARED /* recommended */, MAP_SHARED /* recommended */,
capture->deviceHandle, buf.m.offset); deviceHandle, buf.m.offset);
if (MAP_FAILED == capture->buffers[n_buffers].start) {
perror ("mmap");
/* free capture, and returns an error code */
icvCloseCAM_V4L (capture);
return -1;
}
if (n_buffers == 0) { if (MAP_FAILED == buffers[n_buffers].start) {
capture->buffers[MAX_V4L_BUFFERS].start = malloc( buf.length ); perror("mmap");
capture->buffers[MAX_V4L_BUFFERS].length = buf.length; return false;
} }
maxLength = maxLength > buf.length ? maxLength : buf.length;
} }
if (maxLength > 0) {
v4l2_create_frame(capture); buffers[MAX_V4L_BUFFERS].start = malloc(maxLength);
buffers[MAX_V4L_BUFFERS].length = maxLength;
// reinitialize buffers }
capture->FirstCapture = 1; return buffers[MAX_V4L_BUFFERS].start != 0;
}
return 1;
}; /* End _capture_V4L2 */
/** /**
* some properties can not be changed while the device is in streaming mode. * some properties can not be changed while the device is in streaming mode.
* this method closes and re-opens the device to re-start the stream. * this method closes and re-opens the device to re-start the stream.
* this also causes buffers to be reallocated if the frame size was changed. * this also causes buffers to be reallocated if the frame size was changed.
*/ */
static bool v4l2_reset( CvCaptureCAM_V4L* capture) { bool CvCaptureCAM_V4L::v4l2_reset()
String deviceName = capture->deviceName; {
icvCloseCAM_V4L(capture); streaming(false);
capture->deviceName = deviceName; releaseBuffers();
return _capture_V4L2(capture) == 1; return initCapture();
} }
bool CvCaptureCAM_V4L::open(int _index) bool CvCaptureCAM_V4L::open(int _index)
...@@ -800,154 +765,127 @@ bool CvCaptureCAM_V4L::open(const char* _deviceName) ...@@ -800,154 +765,127 @@ bool CvCaptureCAM_V4L::open(const char* _deviceName)
#ifndef NDEBUG #ifndef NDEBUG
fprintf(stderr, "(DEBUG) V4L: opening %s\n", _deviceName); fprintf(stderr, "(DEBUG) V4L: opening %s\n", _deviceName);
#endif #endif
FirstCapture = 1; FirstCapture = true;
width = DEFAULT_V4L_WIDTH; width = DEFAULT_V4L_WIDTH;
height = DEFAULT_V4L_HEIGHT; height = DEFAULT_V4L_HEIGHT;
width_set = height_set = 0; width_set = height_set = 0;
bufferSize = DEFAULT_V4L_BUFFERS; bufferSize = DEFAULT_V4L_BUFFERS;
fps = DEFAULT_V4L_FPS; fps = DEFAULT_V4L_FPS;
convert_rgb = true; convert_rgb = true;
frame_allocated = false;
deviceName = _deviceName; deviceName = _deviceName;
returnFrame = true; returnFrame = true;
normalizePropRange = utils::getConfigurationParameterBool("OPENCV_VIDEOIO_V4L_RANGE_NORMALIZED", true);
channelNumber = -1;
bufferIndex = -1;
deviceHandle = ::open(deviceName.c_str(), O_RDWR /* required */ | O_NONBLOCK, 0);
if (deviceHandle == -1)
return false;
return _capture_V4L2(this) == 1; return initCapture();
} }
static int read_frame_v4l2(CvCaptureCAM_V4L* capture) { bool CvCaptureCAM_V4L::read_frame_v4l2()
{
v4l2_buffer buf = v4l2_buffer(); v4l2_buffer buf = v4l2_buffer();
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP; buf.memory = V4L2_MEMORY_MMAP;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_DQBUF, &buf)) { while (!tryIoctl(VIDIOC_DQBUF, &buf)) {
switch (errno) { if (errno == EIO && !(buf.flags & (V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE))) {
case EAGAIN: // Maybe buffer not in the queue? Try to put there
return 0; if (!tryIoctl(VIDIOC_QBUF, &buf))
return false;
case EIO: continue;
if (!(buf.flags & (V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE)))
{
if (ioctl(capture->deviceHandle, VIDIOC_QBUF, &buf) == -1)
{
return 0;
}
}
return 0;
default:
/* display the error and stop processing */
capture->returnFrame = false;
perror ("VIDIOC_DQBUF");
return -1;
} }
/* display the error and stop processing */
returnFrame = false;
perror("VIDIOC_DQBUF");
return false;
} }
assert(buf.index < capture->req.count); assert(buf.index < req.count);
assert(buffers[buf.index].length == buf.length);
memcpy(capture->buffers[MAX_V4L_BUFFERS].start, //We shouldn't use this buffer in the queue while not retrieve frame from it.
capture->buffers[buf.index].start, buffers[buf.index].buffer = buf;
capture->buffers[MAX_V4L_BUFFERS].length ); bufferIndex = buf.index;
capture->bufferIndex = MAX_V4L_BUFFERS;
//printf("got data in buff %d, len=%d, flags=0x%X, seq=%d, used=%d)\n",
// buf.index, buf.length, buf.flags, buf.sequence, buf.bytesused);
//set timestamp in capture struct to be timestamp of most recent frame //set timestamp in capture struct to be timestamp of most recent frame
capture->timestamp = buf.timestamp; timestamp = buf.timestamp;
return true;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_QBUF, &buf))
perror ("VIDIOC_QBUF");
return 1;
} }
static int mainloop_v4l2(CvCaptureCAM_V4L* capture) { bool CvCaptureCAM_V4L::tryIoctl(unsigned long ioctlCode, void *parameter) const
for (;;) { {
fd_set fds; while (-1 == ioctl(deviceHandle, ioctlCode, parameter)) {
struct timeval tv; if (!(errno == EBUSY || errno == EAGAIN))
int r; return false;
FD_ZERO (&fds); fd_set fds;
FD_SET (capture->deviceHandle, &fds); FD_ZERO(&fds);
FD_SET(deviceHandle, &fds);
/* Timeout. */ /* Timeout. */
struct timeval tv;
tv.tv_sec = 10; tv.tv_sec = 10;
tv.tv_usec = 0; tv.tv_usec = 0;
r = select (capture->deviceHandle+1, &fds, NULL, NULL, &tv); int result = select(deviceHandle + 1, &fds, NULL, NULL, &tv);
if (0 == result) {
if (-1 == r) { fprintf(stderr, "select timeout\n");
if (EINTR == errno) return false;
continue;
perror ("select");
}
if (0 == r) {
fprintf (stderr, "select timeout\n");
/* end the infinite loop */
break;
} }
if (-1 == result && EINTR != errno)
int returnCode = read_frame_v4l2 (capture); perror("select");
if(returnCode == -1)
return -1;
if(returnCode == 1)
return 1;
} }
return 0; return true;
} }
static bool icvGrabFrameCAM_V4L(CvCaptureCAM_V4L* capture) { bool CvCaptureCAM_V4L::grabFrame()
if (capture->FirstCapture) { {
if (FirstCapture) {
/* Some general initialization must take place the first time through */ /* Some general initialization must take place the first time through */
/* This is just a technicality, but all buffers must be filled up before any /* This is just a technicality, but all buffers must be filled up before any
staggered SYNC is applied. SO, filler up. (see V4L HowTo) */ staggered SYNC is applied. SO, filler up. (see V4L HowTo) */
bufferIndex = -1;
for (__u32 index = 0; index < req.count; ++index) {
v4l2_buffer buf = v4l2_buffer();
{ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
for (capture->bufferIndex = 0; buf.index = index;
capture->bufferIndex < ((int)capture->req.count);
++capture->bufferIndex)
{
v4l2_buffer buf = v4l2_buffer();
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = (unsigned long)capture->bufferIndex;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_QBUF, &buf)) { if (!tryIoctl(VIDIOC_QBUF, &buf)) {
perror ("VIDIOC_QBUF"); perror("VIDIOC_QBUF");
return false;
}
}
/* enable the streaming */
capture->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_STREAMON,
&capture->type)) {
/* error enabling the stream */
perror ("VIDIOC_STREAMON");
return false; return false;
} }
} }
if(!streaming(true)) {
/* error enabling the stream */
perror("VIDIOC_STREAMON");
return false;
}
#if defined(V4L_ABORT_BADJPEG) #if defined(V4L_ABORT_BADJPEG)
// skip first frame. it is often bad -- this is unnotied in traditional apps, // skip first frame. it is often bad -- this is unnotied in traditional apps,
// but could be fatal if bad jpeg is enabled // but could be fatal if bad jpeg is enabled
if(mainloop_v4l2(capture) != 1) if (!read_frame_v4l2())
return false; return false;
#endif #endif
/* preparation is ok */ /* preparation is ok */
capture->FirstCapture = 0; FirstCapture = false;
} }
// In the case that the grab frame was without retrieveFrame
if(mainloop_v4l2(capture) != 1) return false; if (bufferIndex >= 0) {
if (!tryIoctl(VIDIOC_QBUF, &buffers[bufferIndex].buffer))
return true; perror("VIDIOC_QBUF");
}
return read_frame_v4l2();
} }
/* /*
...@@ -1018,14 +956,6 @@ move_411_block(int yTL, int yTR, int yBL, int yBR, int u, int v, ...@@ -1018,14 +956,6 @@ move_411_block(int yTL, int yTR, int yBL, int yBR, int u, int v,
rgb[5] = LIMIT(r+yBR); rgb[5] = LIMIT(r+yBR);
} }
/* Converts from planar YUV420P to RGB24. */
static inline void
yuv420p_to_rgb24(int width, int height, uchar* src, uchar* dst, bool isYUV)
{
cvtColor(Mat(height * 3 / 2, width, CV_8U, src), Mat(height, width, CV_8UC3, dst),
isYUV ? COLOR_YUV2BGR_IYUV : COLOR_YUV2BGR_YV12);
}
// Consider a YUV411P image of 8x2 pixels. // Consider a YUV411P image of 8x2 pixels.
// //
// A plane of Y values as before. // A plane of Y values as before.
...@@ -1072,40 +1002,6 @@ yuv411p_to_rgb24(int width, int height, ...@@ -1072,40 +1002,6 @@ yuv411p_to_rgb24(int width, int height,
} }
} }
/* convert from 4:2:2 YUYV interlaced to RGB24 */
static void
yuyv_to_rgb24(int width, int height, unsigned char* src, unsigned char* dst) {
cvtColor(Mat(height, width, CV_8UC2, src), Mat(height, width, CV_8UC3, dst),
COLOR_YUV2BGR_YUYV);
}
static inline void
uyvy_to_rgb24 (int width, int height, unsigned char *src, unsigned char *dst)
{
cvtColor(Mat(height, width, CV_8UC2, src), Mat(height, width, CV_8UC3, dst),
COLOR_YUV2BGR_UYVY);
}
static inline void
y16_to_rgb24 (int width, int height, unsigned char* src, unsigned char* dst)
{
Mat gray8;
Mat(height, width, CV_16UC1, src).convertTo(gray8, CV_8U, 0.00390625);
cvtColor(gray8,Mat(height, width, CV_8UC3, dst),COLOR_GRAY2BGR);
}
#ifdef HAVE_JPEG
/* convert from mjpeg to rgb24 */
static bool
mjpeg_to_rgb24(int width, int height, unsigned char* src, int length, IplImage* dst) {
Mat temp = cvarrToMat(dst);
imdecode(Mat(1, length, CV_8U, src), IMREAD_COLOR, &temp);
return temp.data && temp.cols == width && temp.rows == height;
}
#endif
/* /*
* BAYER2RGB24 ROUTINE TAKEN FROM: * BAYER2RGB24 ROUTINE TAKEN FROM:
* *
...@@ -1274,12 +1170,6 @@ static void sgbrg2rgb24(long int WIDTH, long int HEIGHT, unsigned char *src, uns ...@@ -1274,12 +1170,6 @@ static void sgbrg2rgb24(long int WIDTH, long int HEIGHT, unsigned char *src, uns
} }
} }
static inline void
rgb24_to_rgb24 (int width, int height, unsigned char *src, unsigned char *dst)
{
cvtColor(Mat(height, width, CV_8UC3, src), Mat(height, width, CV_8UC3, dst), COLOR_RGB2BGR);
}
#define CLAMP(x) ((x)<0?0:((x)>255)?255:(x)) #define CLAMP(x) ((x)<0?0:((x)>255)?255:(x))
typedef struct { typedef struct {
...@@ -1451,446 +1341,562 @@ static int sonix_decompress(int width, int height, unsigned char *inp, unsigned ...@@ -1451,446 +1341,562 @@ static int sonix_decompress(int width, int height, unsigned char *inp, unsigned
return 0; return 0;
} }
static IplImage* icvRetrieveFrameCAM_V4L( CvCaptureCAM_V4L* capture, int) { void CvCaptureCAM_V4L::convertToRgb(const Buffer &currentBuffer)
/* Now get what has already been captured as a IplImage return */ {
// we need memory iff convert_rgb is true cv::Size imageSize(form.fmt.pix.width, form.fmt.pix.height);
bool recreate_frame = capture->frame_allocated != capture->convert_rgb; // Not found conversion
switch (palette)
if (!capture->convert_rgb) { {
// for mjpeg streams the size might change in between, so we have to change the header case V4L2_PIX_FMT_YUV411P:
recreate_frame += capture->frame.imageSize != (int)capture->buffers[capture->bufferIndex].length; yuv411p_to_rgb24(imageSize.width, imageSize.height,
} (unsigned char*)(currentBuffer.start),
(unsigned char*)frame.imageData);
if(recreate_frame) { return;
// printf("realloc %d %zu\n", capture->frame.imageSize, capture->buffers[capture->bufferIndex].length); case V4L2_PIX_FMT_SBGGR8:
if(capture->frame_allocated) bayer2rgb24(imageSize.width, imageSize.height,
cvFree(&capture->frame.imageData); (unsigned char*)currentBuffer.start,
v4l2_create_frame(capture); (unsigned char*)frame.imageData);
} return;
if(!capture->convert_rgb) { case V4L2_PIX_FMT_SN9C10X:
capture->frame.imageData = (char*)capture->buffers[capture->bufferIndex].start; sonix_decompress_init();
return &capture->frame; sonix_decompress(imageSize.width, imageSize.height,
} (unsigned char*)currentBuffer.start,
(unsigned char*)buffers[MAX_V4L_BUFFERS].start);
switch (capture->palette) bayer2rgb24(imageSize.width, imageSize.height,
{ (unsigned char*)buffers[MAX_V4L_BUFFERS].start,
case V4L2_PIX_FMT_BGR24: (unsigned char*)frame.imageData);
memcpy((char *)capture->frame.imageData, return;
(char *)capture->buffers[capture->bufferIndex].start, case V4L2_PIX_FMT_SGBRG8:
capture->frame.imageSize); sgbrg2rgb24(imageSize.width, imageSize.height,
(unsigned char*)currentBuffer.start,
(unsigned char*)frame.imageData);
return;
default:
break; break;
}
// Converted by cvtColor or imdecode
cv::Mat destination(imageSize, CV_8UC3, frame.imageData);
switch (palette) {
case V4L2_PIX_FMT_YVU420: case V4L2_PIX_FMT_YVU420:
cv::cvtColor(cv::Mat(imageSize.height * 3 / 2, imageSize.width, CV_8U, currentBuffer.start), destination,
COLOR_YUV2BGR_YV12);
return;
case V4L2_PIX_FMT_YUV420: case V4L2_PIX_FMT_YUV420:
yuv420p_to_rgb24(capture->form.fmt.pix.width, cv::cvtColor(cv::Mat(imageSize.height * 3 / 2, imageSize.width, CV_8U, currentBuffer.start), destination,
capture->form.fmt.pix.height, COLOR_YUV2BGR_IYUV);
(unsigned char*)(capture->buffers[capture->bufferIndex].start), return;
(unsigned char*)capture->frame.imageData, case V4L2_PIX_FMT_NV12:
capture->palette == V4L2_PIX_FMT_YUV420); cv::cvtColor(cv::Mat(imageSize.height * 3 / 2, imageSize.width, CV_8U, currentBuffer.start), destination,
break; COLOR_YUV2RGB_NV12);
return;
case V4L2_PIX_FMT_YUV411P: case V4L2_PIX_FMT_NV21:
yuv411p_to_rgb24(capture->form.fmt.pix.width, cv::cvtColor(cv::Mat(imageSize.height * 3 / 2, imageSize.width, CV_8U, currentBuffer.start), destination,
capture->form.fmt.pix.height, COLOR_YUV2RGB_NV21);
(unsigned char*)(capture->buffers[capture->bufferIndex].start), return;
(unsigned char*)capture->frame.imageData);
break;
#ifdef HAVE_JPEG #ifdef HAVE_JPEG
case V4L2_PIX_FMT_MJPEG: case V4L2_PIX_FMT_MJPEG:
case V4L2_PIX_FMT_JPEG: case V4L2_PIX_FMT_JPEG:
if (!mjpeg_to_rgb24(capture->form.fmt.pix.width, cv::imdecode(Mat(1, currentBuffer.buffer.bytesused, CV_8U, currentBuffer.start), IMREAD_COLOR, &destination);
capture->form.fmt.pix.height, return;
(unsigned char*)(capture->buffers[capture->bufferIndex]
.start),
capture->buffers[capture->bufferIndex].length,
&capture->frame))
return 0;
break;
#endif #endif
case V4L2_PIX_FMT_YUYV: case V4L2_PIX_FMT_YUYV:
yuyv_to_rgb24(capture->form.fmt.pix.width, cv::cvtColor(cv::Mat(imageSize, CV_8UC2, currentBuffer.start), destination, COLOR_YUV2BGR_YUYV);
capture->form.fmt.pix.height, return;
(unsigned char*)(capture->buffers[capture->bufferIndex].start),
(unsigned char*)capture->frame.imageData);
break;
case V4L2_PIX_FMT_UYVY: case V4L2_PIX_FMT_UYVY:
uyvy_to_rgb24(capture->form.fmt.pix.width, cv::cvtColor(cv::Mat(imageSize, CV_8UC2, currentBuffer.start), destination, COLOR_YUV2BGR_UYVY);
capture->form.fmt.pix.height, return;
(unsigned char*)(capture->buffers[capture->bufferIndex].start),
(unsigned char*)capture->frame.imageData);
break;
case V4L2_PIX_FMT_SBGGR8:
bayer2rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)capture->buffers[capture->bufferIndex].start,
(unsigned char*)capture->frame.imageData);
break;
case V4L2_PIX_FMT_SN9C10X:
sonix_decompress_init();
sonix_decompress(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)capture->buffers[capture->bufferIndex].start,
(unsigned char*)capture->buffers[(capture->bufferIndex+1) % capture->req.count].start);
bayer2rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)capture->buffers[(capture->bufferIndex+1) % capture->req.count].start,
(unsigned char*)capture->frame.imageData);
break;
case V4L2_PIX_FMT_SGBRG8:
sgbrg2rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)capture->buffers[(capture->bufferIndex+1) % capture->req.count].start,
(unsigned char*)capture->frame.imageData);
break;
case V4L2_PIX_FMT_RGB24: case V4L2_PIX_FMT_RGB24:
rgb24_to_rgb24(capture->form.fmt.pix.width, cv::cvtColor(cv::Mat(imageSize, CV_8UC3, currentBuffer.start), destination, COLOR_RGB2BGR);
capture->form.fmt.pix.height, return;
(unsigned char*)capture->buffers[(capture->bufferIndex+1) % capture->req.count].start,
(unsigned char*)capture->frame.imageData);
break;
case V4L2_PIX_FMT_Y16: case V4L2_PIX_FMT_Y16:
if(capture->convert_rgb){ {
y16_to_rgb24(capture->form.fmt.pix.width, cv::Mat temp(imageSize, CV_8UC1, buffers[MAX_V4L_BUFFERS].start);
capture->form.fmt.pix.height, cv::Mat(imageSize, CV_16UC1, currentBuffer.start).convertTo(temp, CV_8U, 1.0 / 256);
(unsigned char*)capture->buffers[capture->bufferIndex].start, cv::cvtColor(temp, destination, COLOR_GRAY2BGR);
(unsigned char*)capture->frame.imageData); return;
}else{ }
memcpy((char *)capture->frame.imageData, case V4L2_PIX_FMT_GREY:
(char *)capture->buffers[capture->bufferIndex].start, cv::cvtColor(cv::Mat(imageSize, CV_8UC1, currentBuffer.start), destination, COLOR_GRAY2BGR);
capture->frame.imageSize); break;
} case V4L2_PIX_FMT_BGR24:
default:
memcpy((char *)frame.imageData, (char *)currentBuffer.start,
std::min(frame.imageSize, (int)currentBuffer.buffer.bytesused));
break; break;
} }
}
if (capture->returnFrame) static inline cv::String capPropertyName(int prop)
return(&capture->frame); {
else switch (prop) {
return 0; case cv::CAP_PROP_POS_MSEC:
return "pos_msec";
case cv::CAP_PROP_POS_FRAMES:
return "pos_frames";
case cv::CAP_PROP_POS_AVI_RATIO:
return "pos_avi_ratio";
case cv::CAP_PROP_FRAME_COUNT:
return "frame_count";
case cv::CAP_PROP_FRAME_HEIGHT:
return "height";
case cv::CAP_PROP_FRAME_WIDTH:
return "width";
case cv::CAP_PROP_CONVERT_RGB:
return "convert_rgb";
case cv::CAP_PROP_FORMAT:
return "format";
case cv::CAP_PROP_MODE:
return "mode";
case cv::CAP_PROP_FOURCC:
return "fourcc";
case cv::CAP_PROP_AUTO_EXPOSURE:
return "auto_exposure";
case cv::CAP_PROP_EXPOSURE:
return "exposure";
case cv::CAP_PROP_TEMPERATURE:
return "temperature";
case cv::CAP_PROP_FPS:
return "fps";
case cv::CAP_PROP_BRIGHTNESS:
return "brightness";
case cv::CAP_PROP_CONTRAST:
return "contrast";
case cv::CAP_PROP_SATURATION:
return "saturation";
case cv::CAP_PROP_HUE:
return "hue";
case cv::CAP_PROP_GAIN:
return "gain";
case cv::CAP_PROP_RECTIFICATION:
return "rectification";
case cv::CAP_PROP_MONOCHROME:
return "monochrome";
case cv::CAP_PROP_SHARPNESS:
return "sharpness";
case cv::CAP_PROP_GAMMA:
return "gamma";
case cv::CAP_PROP_TRIGGER:
return "trigger";
case cv::CAP_PROP_TRIGGER_DELAY:
return "trigger_delay";
case cv::CAP_PROP_WHITE_BALANCE_RED_V:
return "white_balance_red_v";
case cv::CAP_PROP_ZOOM:
return "zoom";
case cv::CAP_PROP_FOCUS:
return "focus";
case cv::CAP_PROP_GUID:
return "guid";
case cv::CAP_PROP_ISO_SPEED:
return "iso_speed";
case cv::CAP_PROP_BACKLIGHT:
return "backlight";
case cv::CAP_PROP_PAN:
return "pan";
case cv::CAP_PROP_TILT:
return "tilt";
case cv::CAP_PROP_ROLL:
return "roll";
case cv::CAP_PROP_IRIS:
return "iris";
case cv::CAP_PROP_SETTINGS:
return "dialog_settings";
case cv::CAP_PROP_BUFFERSIZE:
return "buffersize";
case cv::CAP_PROP_AUTOFOCUS:
return "autofocus";
case cv::CAP_PROP_WHITE_BALANCE_BLUE_U:
return "white_balance_blue_u";
case cv::CAP_PROP_SAR_NUM:
return "sar_num";
case cv::CAP_PROP_SAR_DEN:
return "sar_den";
default:
return "unknown";
}
} }
static inline __u32 capPropertyToV4L2(int prop) { static inline int capPropertyToV4L2(int prop)
{
switch (prop) { switch (prop) {
case CV_CAP_PROP_BRIGHTNESS: case cv::CAP_PROP_FPS:
return -1;
case cv::CAP_PROP_FOURCC:
return -1;
case cv::CAP_PROP_FRAME_COUNT:
return V4L2_CID_MPEG_VIDEO_B_FRAMES;
case cv::CAP_PROP_FORMAT:
return -1;
case cv::CAP_PROP_MODE:
return -1;
case cv::CAP_PROP_BRIGHTNESS:
return V4L2_CID_BRIGHTNESS; return V4L2_CID_BRIGHTNESS;
case CV_CAP_PROP_CONTRAST: case cv::CAP_PROP_CONTRAST:
return V4L2_CID_CONTRAST; return V4L2_CID_CONTRAST;
case CV_CAP_PROP_SATURATION: case cv::CAP_PROP_SATURATION:
return V4L2_CID_SATURATION; return V4L2_CID_SATURATION;
case CV_CAP_PROP_HUE: case cv::CAP_PROP_HUE:
return V4L2_CID_HUE; return V4L2_CID_HUE;
case CV_CAP_PROP_GAIN: case cv::CAP_PROP_GAIN:
return V4L2_CID_GAIN; return V4L2_CID_GAIN;
case CV_CAP_PROP_AUTO_EXPOSURE: case cv::CAP_PROP_EXPOSURE:
return V4L2_CID_EXPOSURE_AUTO;
case CV_CAP_PROP_EXPOSURE:
return V4L2_CID_EXPOSURE_ABSOLUTE; return V4L2_CID_EXPOSURE_ABSOLUTE;
case CV_CAP_PROP_AUTOFOCUS: case cv::CAP_PROP_CONVERT_RGB:
return V4L2_CID_FOCUS_AUTO; return -1;
case CV_CAP_PROP_FOCUS: case cv::CAP_PROP_WHITE_BALANCE_BLUE_U:
return V4L2_CID_BLUE_BALANCE;
case cv::CAP_PROP_RECTIFICATION:
return -1;
case cv::CAP_PROP_MONOCHROME:
return -1;
case cv::CAP_PROP_SHARPNESS:
return V4L2_CID_SHARPNESS;
case cv::CAP_PROP_AUTO_EXPOSURE:
return V4L2_CID_EXPOSURE_AUTO;
case cv::CAP_PROP_GAMMA:
return V4L2_CID_GAMMA;
case cv::CAP_PROP_TEMPERATURE:
return V4L2_CID_WHITE_BALANCE_TEMPERATURE;
case cv::CAP_PROP_TRIGGER:
return -1;
case cv::CAP_PROP_TRIGGER_DELAY:
return -1;
case cv::CAP_PROP_WHITE_BALANCE_RED_V:
return V4L2_CID_RED_BALANCE;
case cv::CAP_PROP_ZOOM:
return V4L2_CID_ZOOM_ABSOLUTE;
case cv::CAP_PROP_FOCUS:
return V4L2_CID_FOCUS_ABSOLUTE; return V4L2_CID_FOCUS_ABSOLUTE;
default: case cv::CAP_PROP_GUID:
return -1;
case cv::CAP_PROP_ISO_SPEED:
return V4L2_CID_ISO_SENSITIVITY;
case cv::CAP_PROP_BACKLIGHT:
return V4L2_CID_BACKLIGHT_COMPENSATION;
case cv::CAP_PROP_PAN:
return V4L2_CID_PAN_ABSOLUTE;
case cv::CAP_PROP_TILT:
return V4L2_CID_TILT_ABSOLUTE;
case cv::CAP_PROP_ROLL:
return V4L2_CID_ROTATE;
case cv::CAP_PROP_IRIS:
return V4L2_CID_IRIS_ABSOLUTE;
case cv::CAP_PROP_SETTINGS:
return -1; return -1;
case cv::CAP_PROP_BUFFERSIZE:
return -1;
case cv::CAP_PROP_AUTOFOCUS:
return V4L2_CID_FOCUS_AUTO;
case cv::CAP_PROP_SAR_NUM:
return V4L2_CID_MPEG_VIDEO_H264_VUI_EXT_SAR_HEIGHT;
case cv::CAP_PROP_SAR_DEN:
return V4L2_CID_MPEG_VIDEO_H264_VUI_EXT_SAR_WIDTH;
default:
break;
} }
return -1;
} }
static double icvGetPropertyCAM_V4L (const CvCaptureCAM_V4L* capture, static inline bool compatibleRange(int property_id)
int property_id ) { {
{ switch (property_id) {
v4l2_format form; case cv::CAP_PROP_BRIGHTNESS:
memset(&form, 0, sizeof(v4l2_format)); case cv::CAP_PROP_CONTRAST:
form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; case cv::CAP_PROP_SATURATION:
if (-1 == ioctl (capture->deviceHandle, VIDIOC_G_FMT, &form)) { case cv::CAP_PROP_HUE:
/* display an error message, and return an error code */ case cv::CAP_PROP_GAIN:
perror ("VIDIOC_G_FMT"); case cv::CAP_PROP_EXPOSURE:
return -1; case cv::CAP_PROP_FOCUS:
} case cv::CAP_PROP_AUTOFOCUS:
case cv::CAP_PROP_AUTO_EXPOSURE:
switch (property_id) { return true;
case CV_CAP_PROP_FRAME_WIDTH: default:
return form.fmt.pix.width; break;
case CV_CAP_PROP_FRAME_HEIGHT:
return form.fmt.pix.height;
case CV_CAP_PROP_FOURCC:
case CV_CAP_PROP_MODE:
return capture->palette;
case CV_CAP_PROP_FORMAT:
return CV_MAKETYPE(IPL2CV_DEPTH(capture->frame.depth), capture->frame.nChannels);
case CV_CAP_PROP_CONVERT_RGB:
return capture->convert_rgb;
case CV_CAP_PROP_BUFFERSIZE:
return capture->bufferSize;
}
if(property_id == CV_CAP_PROP_FPS) {
v4l2_streamparm sp = v4l2_streamparm();
sp.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(capture->deviceHandle, VIDIOC_G_PARM, &sp) < 0){
fprintf(stderr, "VIDEOIO ERROR: V4L: Unable to get camera FPS\n");
return -1;
}
return sp.parm.capture.timeperframe.denominator / (double)sp.parm.capture.timeperframe.numerator;
}
/* initialize the control structure */
if(property_id == CV_CAP_PROP_POS_MSEC) {
if (capture->FirstCapture) {
return 0;
} else {
return 1000 * capture->timestamp.tv_sec + ((double) capture->timestamp.tv_usec) / 1000;
}
}
__u32 v4l2id = capPropertyToV4L2(property_id);
if(v4l2id == __u32(-1)) {
fprintf(stderr,
"VIDEOIO ERROR: V4L2: getting property #%d is not supported\n",
property_id);
return -1;
}
v4l2_control control = {v4l2id, 0};
if (-1 == ioctl (capture->deviceHandle, VIDIOC_G_CTRL,
&control)) {
fprintf( stderr, "VIDEOIO ERROR: V4L2: ");
switch (property_id) {
case CV_CAP_PROP_BRIGHTNESS:
fprintf (stderr, "Brightness");
break;
case CV_CAP_PROP_CONTRAST:
fprintf (stderr, "Contrast");
break;
case CV_CAP_PROP_SATURATION:
fprintf (stderr, "Saturation");
break;
case CV_CAP_PROP_HUE:
fprintf (stderr, "Hue");
break;
case CV_CAP_PROP_GAIN:
fprintf (stderr, "Gain");
break;
case CV_CAP_PROP_AUTO_EXPOSURE:
fprintf (stderr, "Auto Exposure");
break;
case CV_CAP_PROP_EXPOSURE:
fprintf (stderr, "Exposure");
break;
case CV_CAP_PROP_AUTOFOCUS:
fprintf (stderr, "Autofocus");
break;
case CV_CAP_PROP_FOCUS:
fprintf (stderr, "Focus");
break;
}
fprintf (stderr, " is not supported by your device\n");
return -1;
}
/* get the min/max values */
Range range = capture->getRange(property_id);
/* all was OK, so convert to 0.0 - 1.0 range, and return the value */
return ((double)control.value - range.start) / range.size();
}
};
static bool icvSetControl (CvCaptureCAM_V4L* capture,
int property_id, double value) {
/* limitation of the input value */
if (value < 0.0) {
value = 0.0;
} else if (value > 1.0) {
value = 1.0;
} }
return false;
}
bool CvCaptureCAM_V4L::controlInfo(int property_id, __u32 &_v4l2id, cv::Range &range) const
{
/* initialisations */ /* initialisations */
__u32 v4l2id = capPropertyToV4L2(property_id); int v4l2id = capPropertyToV4L2(property_id);
v4l2_queryctrl queryctrl = v4l2_queryctrl();
if(v4l2id == __u32(-1)) { queryctrl.id = __u32(v4l2id);
fprintf(stderr, if (v4l2id == -1 || !tryIoctl(VIDIOC_QUERYCTRL, &queryctrl)) {
"VIDEOIO ERROR: V4L2: setting property #%d is not supported\n", fprintf(stderr, "VIDEOIO ERROR: V4L2: property %s is not supported\n", capPropertyName(property_id).c_str());
property_id);
return false; return false;
} }
_v4l2id = __u32(v4l2id);
range = cv::Range(queryctrl.minimum, queryctrl.maximum);
if (normalizePropRange) {
if (property_id == cv::CAP_PROP_AUTOFOCUS)
range = Range(0, 1);
else if (property_id == cv::CAP_PROP_AUTO_EXPOSURE)
range = Range(0, 4);
}
return true;
}
/* get the min/max values */ bool CvCaptureCAM_V4L::icvControl(__u32 v4l2id, int &value, bool isSet) const
Range range = capture->getRange(property_id); {
/* scale the value we want to set */
value = value * range.size() + range.start;
/* set which control we want to set */ /* set which control we want to set */
v4l2_control control = {v4l2id, int(value)}; v4l2_control control = v4l2_control();
control.id = v4l2id;
control.value = value;
/* The driver may clamp the value or return ERANGE, ignored here */ /* The driver may clamp the value or return ERANGE, ignored here */
if (-1 == ioctl(capture->deviceHandle, VIDIOC_S_CTRL, &control) && errno != ERANGE) { if (!tryIoctl(isSet ? VIDIOC_S_CTRL : VIDIOC_G_CTRL, &control)) {
perror ("VIDIOC_S_CTRL"); switch (errno) {
#ifndef NDEBUG
case EINVAL:
fprintf(stderr,
"The struct v4l2_control id is invalid or the value is inappropriate for the given control (i.e. "
"if a menu item is selected that is not supported by the driver according to VIDIOC_QUERYMENU).");
break;
case ERANGE:
fprintf(stderr, "The struct v4l2_control value is out of bounds.");
break;
case EACCES:
fprintf(stderr, "Attempt to set a read-only control or to get a write-only control.");
break;
#endif
default:
perror(isSet ? "VIDIOC_S_CTRL" : "VIDIOC_G_CTRL");
break;
}
return false; return false;
} }
if (!isSet)
value = control.value;
return true;
}
if(control.id == V4L2_CID_EXPOSURE_AUTO && control.value == V4L2_EXPOSURE_MANUAL) { double CvCaptureCAM_V4L::getProperty(int property_id) const
// update the control range for expose after disabling autoexposure {
// as it is not read correctly at startup switch (property_id) {
// TODO check this again as it might be fixed with Linux 4.5 case cv::CAP_PROP_FRAME_WIDTH:
v4l2_control_range(capture, V4L2_CID_EXPOSURE_ABSOLUTE); return form.fmt.pix.width;
case cv::CAP_PROP_FRAME_HEIGHT:
return form.fmt.pix.height;
case cv::CAP_PROP_FOURCC:
return palette;
case cv::CAP_PROP_FORMAT:
return CV_MAKETYPE(IPL2CV_DEPTH(frame.depth), frame.nChannels);
case cv::CAP_PROP_MODE:
if (normalizePropRange)
return palette;
return normalizePropRange;
case cv::CAP_PROP_CONVERT_RGB:
return convert_rgb;
case cv::CAP_PROP_BUFFERSIZE:
return bufferSize;
case cv::CAP_PROP_FPS:
{
v4l2_streamparm sp = v4l2_streamparm();
sp.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (!tryIoctl(VIDIOC_G_PARM, &sp)) {
fprintf(stderr, "VIDEOIO ERROR: V4L: Unable to get camera FPS\n");
return -1;
}
return sp.parm.capture.timeperframe.denominator / (double)sp.parm.capture.timeperframe.numerator;
} }
case cv::CAP_PROP_POS_MSEC:
if (FirstCapture)
return 0;
/* all was OK */ return 1000 * timestamp.tv_sec + ((double)timestamp.tv_usec) / 1000;
return true; case cv::CAP_PROP_CHANNEL:
return channelNumber;
default:
{
cv::Range range;
__u32 v4l2id;
if(!controlInfo(property_id, v4l2id, range))
return -1.0;
int value = 0;
if(!icvControl(v4l2id, value, false))
return -1.0;
if (normalizePropRange && compatibleRange(property_id))
return ((double)value - range.start) / range.size();
return value;
}
}
return -1.0;
} }
static int icvSetPropertyCAM_V4L( CvCaptureCAM_V4L* capture, bool CvCaptureCAM_V4L::icvSetFrameSize(int _width, int _height)
int property_id, double value ){ {
bool retval = false; if (_width > 0)
bool possible; width_set = _width;
if (height > 0)
height_set = _height;
/* two subsequent calls setting WIDTH and HEIGHT will change /* two subsequent calls setting WIDTH and HEIGHT will change
the video size */ the video size */
if (width_set <= 0 || height_set <= 0)
return true;
width = width_set;
height = height_set;
width_set = height_set = 0;
return v4l2_reset();
}
bool CvCaptureCAM_V4L::setProperty( int property_id, double _value )
{
int value = cvRound(_value);
switch (property_id) { switch (property_id) {
case CV_CAP_PROP_FRAME_WIDTH: case cv::CAP_PROP_FRAME_WIDTH:
{ return icvSetFrameSize(value, 0);
int& width = capture->width_set; case cv::CAP_PROP_FRAME_HEIGHT:
int& height = capture->height_set; return icvSetFrameSize(0, value);
width = cvRound(value); case cv::CAP_PROP_FPS:
retval = width != 0; if (fps == static_cast<__u32>(value))
if(width !=0 && height != 0) { return true;
capture->width = width; return setFps(value);
capture->height = height; case cv::CAP_PROP_CONVERT_RGB:
retval = v4l2_reset(capture); if (bool(value)) {
width = height = 0; convert_rgb = convertableToRgb();
return convert_rgb;
} }
} convert_rgb = false;
break; return true;
case CV_CAP_PROP_FRAME_HEIGHT: case cv::CAP_PROP_FOURCC:
{ {
int& width = capture->width_set; if (palette == static_cast<__u32>(value))
int& height = capture->height_set; return true;
height = cvRound(value);
retval = height != 0; __u32 old_palette = palette;
if(width !=0 && height != 0) { palette = static_cast<__u32>(value);
capture->width = width; if (v4l2_reset())
capture->height = height; return true;
retval = v4l2_reset(capture);
width = height = 0; palette = old_palette;
} v4l2_reset();
return false;
} }
break; case cv::CAP_PROP_MODE:
case CV_CAP_PROP_FPS: normalizePropRange = bool(value);
capture->fps = value; return true;
retval = v4l2_reset(capture); case cv::CAP_PROP_BUFFERSIZE:
break; if (bufferSize == value)
case CV_CAP_PROP_CONVERT_RGB: return true;
// returns "0" for formats we do not know how to map to IplImage
possible = v4l2_num_channels(capture->palette); if (value > MAX_V4L_BUFFERS || value < 1) {
capture->convert_rgb = bool(value) && possible; fprintf(stderr, "V4L: Bad buffer size %d, buffer size must be from 1 to %d\n", value, MAX_V4L_BUFFERS);
retval = possible || !bool(value); return false;
break; }
case CV_CAP_PROP_FOURCC: bufferSize = value;
return v4l2_reset();
case cv::CAP_PROP_CHANNEL:
{ {
__u32 old_palette = capture->palette; if (value < 0) {
__u32 new_palette = static_cast<__u32>(value); channelNumber = -1;
capture->palette = new_palette; return true;
if (v4l2_reset(capture)) {
retval = true;
} else {
capture->palette = old_palette;
v4l2_reset(capture);
retval = false;
} }
if (channelNumber == value)
return true;
int old_channel = channelNumber;
channelNumber = value;
if (v4l2_reset())
return true;
channelNumber = old_channel;
v4l2_reset();
return false;
} }
break;
case CV_CAP_PROP_BUFFERSIZE:
if ((int)value > MAX_V4L_BUFFERS || (int)value < 1) {
fprintf(stderr, "V4L: Bad buffer size %d, buffer size must be from 1 to %d\n", (int)value, MAX_V4L_BUFFERS);
retval = false;
} else {
capture->bufferSize = (int)value;
if (capture->bufferIndex > capture->bufferSize) {
capture->bufferIndex = 0;
}
retval = v4l2_reset(capture);
}
break;
default: default:
retval = icvSetControl(capture, property_id, value); {
break; cv::Range range;
__u32 v4l2id;
if (!controlInfo(property_id, v4l2id, range))
return false;
if (normalizePropRange && compatibleRange(property_id))
value = cv::saturate_cast<int>(_value * range.size() + range.start);
return icvControl(v4l2id, value, true);
} }
}
return false;
}
/* return the the status */ void CvCaptureCAM_V4L::releaseFrame()
return retval; {
if (frame_allocated && frame.imageData) {
cvFree(&frame.imageData);
frame_allocated = false;
}
} }
static void icvCloseCAM_V4L( CvCaptureCAM_V4L* capture ){ void CvCaptureCAM_V4L::releaseBuffers()
/* Deallocate space - Hopefully, no leaks */ {
releaseFrame();
if (!capture->deviceName.empty()) if (buffers[MAX_V4L_BUFFERS].start) {
{ free(buffers[MAX_V4L_BUFFERS].start);
if (capture->deviceHandle != -1) buffers[MAX_V4L_BUFFERS].start = 0;
{ }
capture->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == ioctl(capture->deviceHandle, VIDIOC_STREAMOFF, &capture->type)) {
perror ("Unable to stop the stream");
}
for (unsigned int n_buffers = 0; n_buffers < MAX_V4L_BUFFERS; ++n_buffers) bufferIndex = -1;
{ FirstCapture = true;
if (capture->buffers[n_buffers].start) { if (!isOpened())
if (-1 == munmap (capture->buffers[n_buffers].start, capture->buffers[n_buffers].length)) { return;
perror ("munmap");
} else {
capture->buffers[n_buffers].start = 0;
}
}
}
if (capture->buffers[MAX_V4L_BUFFERS].start) for (unsigned int n_buffers = 0; n_buffers < MAX_V4L_BUFFERS; ++n_buffers) {
{ if (buffers[n_buffers].start) {
free(capture->buffers[MAX_V4L_BUFFERS].start); if (-1 == munmap(buffers[n_buffers].start, buffers[n_buffers].length)) {
capture->buffers[MAX_V4L_BUFFERS].start = 0; perror("munmap");
} else {
buffers[n_buffers].start = 0;
} }
} }
if (capture->deviceHandle != -1)
close(capture->deviceHandle);
if (capture->frame_allocated && capture->frame.imageData)
cvFree(&capture->frame.imageData);
capture->deviceName.clear(); // flag that the capture is closed
} }
//Applications can call ioctl VIDIOC_REQBUFS again to change the number of buffers,
// however this cannot succeed when any buffers are still mapped. A count value of zero
// frees all buffers, after aborting or finishing any DMA in progress, an implicit VIDIOC_STREAMOFF.
requestBuffers(0);
}; };
bool CvCaptureCAM_V4L::grabFrame() bool CvCaptureCAM_V4L::streaming(bool startStream)
{ {
return icvGrabFrameCAM_V4L( this ); if (!isOpened())
} return !startStream;
IplImage* CvCaptureCAM_V4L::retrieveFrame(int) type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
{ return tryIoctl(startStream ? VIDIOC_STREAMON : VIDIOC_STREAMOFF, &type);
return icvRetrieveFrameCAM_V4L( this, 0 );
} }
double CvCaptureCAM_V4L::getProperty( int propId ) const IplImage *CvCaptureCAM_V4L::retrieveFrame(int)
{ {
return icvGetPropertyCAM_V4L( this, propId ); if (bufferIndex < 0)
} return &frame;
bool CvCaptureCAM_V4L::setProperty( int propId, double value ) /* Now get what has already been captured as a IplImage return */
{ const Buffer &currentBuffer = buffers[bufferIndex];
return icvSetPropertyCAM_V4L( this, propId, value ); if (convert_rgb) {
if (!frame_allocated)
v4l2_create_frame();
convertToRgb(currentBuffer);
} else {
// for mjpeg streams the size might change in between, so we have to change the header
// We didn't allocate memory when not convert_rgb, but we have to recreate the header
if (frame.imageSize != (int)currentBuffer.buffer.bytesused)
v4l2_create_frame();
frame.imageData = (char *)buffers[MAX_V4L_BUFFERS].start;
memcpy(buffers[MAX_V4L_BUFFERS].start, currentBuffer.start,
std::min(buffers[MAX_V4L_BUFFERS].length, (size_t)currentBuffer.buffer.bytesused));
}
//Revert buffer to the queue
if (!tryIoctl(VIDIOC_QBUF, &buffers[bufferIndex].buffer))
perror("VIDIOC_QBUF");
bufferIndex = -1;
return &frame;
} }
} // end namespace cv } // end namespace cv
......
...@@ -11,16 +11,8 @@ ...@@ -11,16 +11,8 @@
namespace opencv_test { namespace { namespace opencv_test { namespace {
TEST(DISABLED_VideoIO_Camera, basic) static void test_readFrames(/*const*/ VideoCapture& capture, const int N = 100)
{ {
VideoCapture capture(0);
ASSERT_TRUE(capture.isOpened());
std::cout << "Camera 0 via " << capture.getBackendName() << " backend" << std::endl;
std::cout << "Frame width: " << capture.get(CAP_PROP_FRAME_WIDTH) << std::endl;
std::cout << " height: " << capture.get(CAP_PROP_FRAME_HEIGHT) << std::endl;
std::cout << "Capturing FPS: " << capture.get(CAP_PROP_FPS) << std::endl;
const int N = 100;
Mat frame; Mat frame;
int64 time0 = cv::getTickCount(); int64 time0 = cv::getTickCount();
for (int i = 0; i < N; i++) for (int i = 0; i < N; i++)
...@@ -34,7 +26,32 @@ TEST(DISABLED_VideoIO_Camera, basic) ...@@ -34,7 +26,32 @@ TEST(DISABLED_VideoIO_Camera, basic)
} }
int64 time1 = cv::getTickCount(); int64 time1 = cv::getTickCount();
printf("Processed %d frames on %.2f FPS\n", N, (N * cv::getTickFrequency()) / (time1 - time0 + 1)); printf("Processed %d frames on %.2f FPS\n", N, (N * cv::getTickFrequency()) / (time1 - time0 + 1));
}
TEST(DISABLED_VideoIO_Camera, basic)
{
VideoCapture capture(0);
ASSERT_TRUE(capture.isOpened());
std::cout << "Camera 0 via " << capture.getBackendName() << " backend" << std::endl;
std::cout << "Frame width: " << capture.get(CAP_PROP_FRAME_WIDTH) << std::endl;
std::cout << " height: " << capture.get(CAP_PROP_FRAME_HEIGHT) << std::endl;
std::cout << "Capturing FPS: " << capture.get(CAP_PROP_FPS) << std::endl;
test_readFrames(capture);
capture.release();
}
TEST(DISABLED_VideoIO_Camera, validate_V4L2_MJPEG)
{
VideoCapture capture(CAP_V4L2);
ASSERT_TRUE(capture.isOpened());
ASSERT_TRUE(capture.set(CAP_PROP_FOURCC, VideoWriter::fourcc('M', 'J', 'P', 'G')));
std::cout << "Camera 0 via " << capture.getBackendName() << " backend" << std::endl;
std::cout << "Frame width: " << capture.get(CAP_PROP_FRAME_WIDTH) << std::endl;
std::cout << " height: " << capture.get(CAP_PROP_FRAME_HEIGHT) << std::endl;
std::cout << "Capturing FPS: " << capture.get(CAP_PROP_FPS) << std::endl;
int fourcc = (int)capture.get(CAP_PROP_FOURCC);
std::cout << "FOURCC code: " << cv::format("0x%8x", fourcc) << std::endl;
test_readFrames(capture);
capture.release(); capture.release();
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment