Commit 7dce2d0b authored by StevenPuttemans's avatar StevenPuttemans

fixing cap_pvpapi interface and making sure both AVT Manta and Proscilla

series are supported. Testing this with both cams for Windows and Linux
exhaustively.

Optimizing memory footprint by removing unused calls.

Adapted with the input of Shai

Added small example that illustrates how it should work.
parent a77a2f35
...@@ -139,124 +139,125 @@ void CvCaptureCAM_PvAPI::close() ...@@ -139,124 +139,125 @@ void CvCaptureCAM_PvAPI::close()
} }
// Initialize camera input // Initialize camera input
bool CvCaptureCAM_PvAPI::open( int index ) bool CvCaptureCAM_PvAPI::open( int )
{ {
tPvCameraInfo cameraList[MAX_CAMERAS]; tPvCameraInfo cameraList[MAX_CAMERAS];
tPvCameraInfo camInfo; tPvCameraInfo camInfo;
tPvIpSettings ipSettings; tPvIpSettings ipSettings;
// Initialization parameters [500 x 10 ms = 5000 ms timeout]
int initializeTimeOut = 500;
if (PvInitialize()) { // Disregard any errors, since this might be called several times and only needs to be called once or it will return an
} // Important when wanting to use more than 1 AVT camera at the same time
//return false; PvInitialize();
Sleep(1000); while((!PvCameraCount()) && (initializeTimeOut--))
Sleep(10);
//close(); if (!initializeTimeOut){
fprintf(stderr,"ERROR: camera intialisation timeout [5000ms].\n");
return false;
}
int numCameras=PvCameraList(cameraList, MAX_CAMERAS, NULL); unsigned int numCameras = PvCameraList(cameraList, MAX_CAMERAS, NULL);
if (numCameras <= 0 || index >= numCameras) // If no cameras are found
if(!numCameras)
{
fprintf(stderr, "ERROR: No cameras found.\n");
return false; return false;
}
Camera.UID = cameraList[index].UniqueId; // Try opening the cameras in the list, one-by-one until a camera that is not used is found
unsigned int findNewCamera;
if (!PvCameraInfo(Camera.UID,&camInfo) && !PvCameraIpSettingsGet(Camera.UID,&ipSettings)) { for(findNewCamera=0; findNewCamera<numCameras; findNewCamera++)
/* {
struct in_addr addr; Camera.UID = cameraList[findNewCamera].UniqueId;
addr.s_addr = ipSettings.CurrentIpAddress; if(PvCameraOpen(Camera.UID, ePvAccessMaster, &(Camera.Handle))==ePvErrSuccess)
printf("Current address:\t%s\n",inet_ntoa(addr)); break;
addr.s_addr = ipSettings.CurrentIpSubnet;
printf("Current subnet:\t\t%s\n",inet_ntoa(addr));
addr.s_addr = ipSettings.CurrentIpGateway;
printf("Current gateway:\t%s\n",inet_ntoa(addr));
*/
} }
else {
fprintf(stderr,"ERROR: could not retrieve camera IP settings.\n"); if(findNewCamera == numCameras)
{
fprintf(stderr, "Could not find a new camera to connect to.\n");
return false; return false;
} }
if(PvCameraIpSettingsGet(Camera.UID,&ipSettings)==ePvErrNotFound)
{
fprintf(stderr, "The specified camera UID %lu could not be found, PvCameraIpSettingsGet().\n", Camera.UID);
return false;
}
if (PvCameraOpen(Camera.UID, ePvAccessMaster, &(Camera.Handle))==ePvErrSuccess) if(PvCameraInfo(Camera.UID,&camInfo)==ePvErrNotFound)
{ {
fprintf(stderr, "The specified camera UID %lu could not be found, PvCameraInfo().\n", Camera.UID);
return false;
}
//Set Pixel Format to BRG24 to follow conventions tPvUint32 frameWidth, frameHeight, frameSize;
/*Errcode = PvAttrEnumSet(Camera.Handle, "PixelFormat", "Bgr24"); char pixelFormat[256];
if (Errcode != ePvErrSuccess) PvAttrUint32Get(Camera.Handle, "TotalBytesPerFrame", &frameSize);
{ PvAttrUint32Get(Camera.Handle, "Width", &frameWidth);
fprintf(stderr, "PvAPI: couldn't set PixelFormat to Bgr24\n"); PvAttrUint32Get(Camera.Handle, "Height", &frameHeight);
return NULL; PvAttrEnumGet(Camera.Handle, "PixelFormat", pixelFormat,256,NULL);
}
*/
tPvUint32 frameWidth, frameHeight, frameSize;
unsigned long maxSize;
char pixelFormat[256];
PvAttrUint32Get(Camera.Handle, "TotalBytesPerFrame", &frameSize);
PvAttrUint32Get(Camera.Handle, "Width", &frameWidth);
PvAttrUint32Get(Camera.Handle, "Height", &frameHeight);
PvAttrEnumGet(Camera.Handle, "PixelFormat", pixelFormat,256,NULL);
maxSize = 8228;
//PvAttrUint32Get(Camera.Handle,"PacketSize",&maxSize);
if (PvCaptureAdjustPacketSize(Camera.Handle,maxSize)!=ePvErrSuccess)
return false;
if (strcmp(pixelFormat, "Mono8")==0) {
grayframe = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 1);
grayframe->widthStep = (int)frameWidth;
frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 3);
frame->widthStep = (int)frameWidth*3;
Camera.Frame.ImageBufferSize = frameSize;
Camera.Frame.ImageBuffer = grayframe->imageData;
}
else if (strcmp(pixelFormat, "Mono16")==0) {
grayframe = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_16U, 1);
grayframe->widthStep = (int)frameWidth;
frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_16U, 3);
frame->widthStep = (int)frameWidth*3;
Camera.Frame.ImageBufferSize = frameSize;
Camera.Frame.ImageBuffer = grayframe->imageData;
}
else if (strcmp(pixelFormat, "Bgr24")==0) {
frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 3);
frame->widthStep = (int)frameWidth*3;
Camera.Frame.ImageBufferSize = frameSize;
Camera.Frame.ImageBuffer = frame->imageData;
}
else
return false;
// Start the camera
PvCaptureStart(Camera.Handle);
// Set the camera to capture continuously // Start the camera
if(PvAttrEnumSet(Camera.Handle, "AcquisitionMode", "Continuous")!= ePvErrSuccess) PvCaptureStart(Camera.Handle);
{
fprintf(stderr,"Could not set Prosilica Acquisition Mode\n");
return false;
}
if(PvCommandRun(Camera.Handle, "AcquisitionStart")!= ePvErrSuccess) // Set the camera explicitly to capture data frames continuously
{ if(PvAttrEnumSet(Camera.Handle, "AcquisitionMode", "Continuous")!= ePvErrSuccess)
fprintf(stderr,"Could not start Prosilica acquisition\n"); {
return false; fprintf(stderr,"Could not set Acquisition Mode\n");
} return false;
}
if(PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Freerun")!= ePvErrSuccess) if(PvCommandRun(Camera.Handle, "AcquisitionStart")!= ePvErrSuccess)
{ {
fprintf(stderr,"Error setting Prosilica trigger to \"Freerun\""); fprintf(stderr,"Could not start acquisition\n");
return false; return false;
} }
if(PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Freerun")!= ePvErrSuccess)
{
fprintf(stderr,"Error setting trigger to \"Freerun\"");
return false;
}
return true; // Settings depending on the pixelformat
// This works for all AVT camera models that use the PvAPI interface
if (strcmp(pixelFormat, "Mono8")==0) {
monocrome = true;
grayframe = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 1);
grayframe->widthStep = (int)frameWidth;
Camera.Frame.ImageBufferSize = frameSize;
Camera.Frame.ImageBuffer = grayframe->imageData;
}
else if (strcmp(pixelFormat, "Mono16")==0) {
monocrome = true;
grayframe = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_16U, 1);
grayframe->widthStep = (int)frameWidth*2;
Camera.Frame.ImageBufferSize = frameSize;
Camera.Frame.ImageBuffer = grayframe->imageData;
}
else if (strcmp(pixelFormat, "Bgr24")==0) {
monocrome = false;
frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 3);
frame->widthStep = (int)frameWidth*3;
Camera.Frame.ImageBufferSize = frameSize;
Camera.Frame.ImageBuffer = frame->imageData;
}
else{
fprintf(stderr, "Pixel format %s not supported; only Mono8, Mono16 and Bgr24 are currently supported.\n", pixelFormat);
return false;
} }
fprintf(stderr,"Error cannot open camera\n");
return false;
return true;
} }
bool CvCaptureCAM_PvAPI::grabFrame() bool CvCaptureCAM_PvAPI::grabFrame()
{ {
//if(Camera.Frame.Status != ePvErrUnplugged && Camera.Frame.Status != ePvErrCancelled)
return PvCaptureQueueFrame(Camera.Handle, &(Camera.Frame), NULL) == ePvErrSuccess; return PvCaptureQueueFrame(Camera.Handle, &(Camera.Frame), NULL) == ePvErrSuccess;
} }
...@@ -266,7 +267,6 @@ IplImage* CvCaptureCAM_PvAPI::retrieveFrame(int) ...@@ -266,7 +267,6 @@ IplImage* CvCaptureCAM_PvAPI::retrieveFrame(int)
if (PvCaptureWaitForFrameDone(Camera.Handle, &(Camera.Frame), 1000) == ePvErrSuccess) { if (PvCaptureWaitForFrameDone(Camera.Handle, &(Camera.Frame), 1000) == ePvErrSuccess) {
if (!monocrome) { if (!monocrome) {
cvMerge(grayframe,grayframe,grayframe,NULL,frame);
return frame; return frame;
} }
return grayframe; return grayframe;
...@@ -287,26 +287,26 @@ double CvCaptureCAM_PvAPI::getProperty( int property_id ) ...@@ -287,26 +287,26 @@ double CvCaptureCAM_PvAPI::getProperty( int property_id )
PvAttrUint32Get(Camera.Handle, "Height", &nTemp); PvAttrUint32Get(Camera.Handle, "Height", &nTemp);
return (double)nTemp; return (double)nTemp;
case CV_CAP_PROP_EXPOSURE: case CV_CAP_PROP_EXPOSURE:
PvAttrUint32Get(Camera.Handle,"ExposureValue",&nTemp); PvAttrUint32Get(Camera.Handle,"ExposureValue",&nTemp);
return (double)nTemp; return (double)nTemp;
case CV_CAP_PROP_FPS: case CV_CAP_PROP_FPS:
tPvFloat32 nfTemp; tPvFloat32 nfTemp;
PvAttrFloat32Get(Camera.Handle, "StatFrameRate", &nfTemp); PvAttrFloat32Get(Camera.Handle, "StatFrameRate", &nfTemp);
return (double)nfTemp; return (double)nfTemp;
case CV_CAP_PROP_PVAPI_MULTICASTIP: case CV_CAP_PROP_PVAPI_MULTICASTIP:
char mEnable[2]; char mEnable[2];
char mIp[11]; char mIp[11];
PvAttrEnumGet(Camera.Handle,"MulticastEnable",mEnable,sizeof(mEnable),NULL); PvAttrEnumGet(Camera.Handle,"MulticastEnable",mEnable,sizeof(mEnable),NULL);
if (strcmp(mEnable, "Off") == 0) { if (strcmp(mEnable, "Off") == 0) {
return -1; return -1;
} }
else { else {
long int ip; long int ip;
int a,b,c,d; int a,b,c,d;
PvAttrStringGet(Camera.Handle, "MulticastIPAddress",mIp,sizeof(mIp),NULL); PvAttrStringGet(Camera.Handle, "MulticastIPAddress",mIp,sizeof(mIp),NULL);
sscanf(mIp, "%d.%d.%d.%d", &a, &b, &c, &d); ip = ((a*256 + b)*256 + c)*256 + d; sscanf(mIp, "%d.%d.%d.%d", &a, &b, &c, &d); ip = ((a*256 + b)*256 + c)*256 + d;
return (double)ip; return (double)ip;
} }
} }
return -1.0; return -1.0;
} }
......
//////////////////////////////////////////////////////
// Sample illustrating the use of the VideoCapture //
// interface in combination with PvAPI interface //
// //
// Succesfully tested on Prosilica and Manta series //
//////////////////////////////////////////////////////
#include <iostream>
#include "opencv2/opencv.hpp"
using namespace std;
using namespace cv;
int main()
{
// Capturing multiple AVT cameras can be done by simply initiating
// two VideoCaptures after eachother.
VideoCapture camera1(0 + CV_CAP_PVAPI);
VideoCapture camera2(0 + CV_CAP_PVAPI);
Mat frame1, frame2;
for(;;){
camera1 >> frame1;
camera2 >> frame2;
imshow("camera 1 frame", frame1);
imshow("camera 2 frame", frame2);
int key = waitKey(10);
if(key == 27){
break;
}
}
return 0;
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment