Commit e05a0bc6 authored by Vadim Pisarevsky's avatar Vadim Pisarevsky

Merge pull request #3322 from StevenPuttemans:fix_pvpapi_interface

parents 3da1144c 7dce2d0b
...@@ -139,124 +139,125 @@ void CvCaptureCAM_PvAPI::close() ...@@ -139,124 +139,125 @@ void CvCaptureCAM_PvAPI::close()
} }
// Initialize camera input // Initialize camera input
bool CvCaptureCAM_PvAPI::open( int index ) bool CvCaptureCAM_PvAPI::open( int )
{ {
tPvCameraInfo cameraList[MAX_CAMERAS]; tPvCameraInfo cameraList[MAX_CAMERAS];
tPvCameraInfo camInfo; tPvCameraInfo camInfo;
tPvIpSettings ipSettings; tPvIpSettings ipSettings;
// Initialization parameters [500 x 10 ms = 5000 ms timeout]
int initializeTimeOut = 500;
if (PvInitialize()) { // Disregard any errors, since this might be called several times and only needs to be called once or it will return an
} // Important when wanting to use more than 1 AVT camera at the same time
//return false; PvInitialize();
Sleep(1000); while((!PvCameraCount()) && (initializeTimeOut--))
Sleep(10);
//close(); if (!initializeTimeOut){
fprintf(stderr,"ERROR: camera intialisation timeout [5000ms].\n");
return false;
}
int numCameras=PvCameraList(cameraList, MAX_CAMERAS, NULL); unsigned int numCameras = PvCameraList(cameraList, MAX_CAMERAS, NULL);
if (numCameras <= 0 || index >= numCameras) // If no cameras are found
if(!numCameras)
{
fprintf(stderr, "ERROR: No cameras found.\n");
return false; return false;
}
Camera.UID = cameraList[index].UniqueId; // Try opening the cameras in the list, one-by-one until a camera that is not used is found
unsigned int findNewCamera;
if (!PvCameraInfo(Camera.UID,&camInfo) && !PvCameraIpSettingsGet(Camera.UID,&ipSettings)) { for(findNewCamera=0; findNewCamera<numCameras; findNewCamera++)
/* {
struct in_addr addr; Camera.UID = cameraList[findNewCamera].UniqueId;
addr.s_addr = ipSettings.CurrentIpAddress; if(PvCameraOpen(Camera.UID, ePvAccessMaster, &(Camera.Handle))==ePvErrSuccess)
printf("Current address:\t%s\n",inet_ntoa(addr)); break;
addr.s_addr = ipSettings.CurrentIpSubnet;
printf("Current subnet:\t\t%s\n",inet_ntoa(addr));
addr.s_addr = ipSettings.CurrentIpGateway;
printf("Current gateway:\t%s\n",inet_ntoa(addr));
*/
} }
else {
fprintf(stderr,"ERROR: could not retrieve camera IP settings.\n"); if(findNewCamera == numCameras)
{
fprintf(stderr, "Could not find a new camera to connect to.\n");
return false; return false;
} }
if(PvCameraIpSettingsGet(Camera.UID,&ipSettings)==ePvErrNotFound)
if (PvCameraOpen(Camera.UID, ePvAccessMaster, &(Camera.Handle))==ePvErrSuccess)
{ {
fprintf(stderr, "The specified camera UID %lu could not be found, PvCameraIpSettingsGet().\n", Camera.UID);
return false;
}
//Set Pixel Format to BRG24 to follow conventions if(PvCameraInfo(Camera.UID,&camInfo)==ePvErrNotFound)
/*Errcode = PvAttrEnumSet(Camera.Handle, "PixelFormat", "Bgr24");
if (Errcode != ePvErrSuccess)
{ {
fprintf(stderr, "PvAPI: couldn't set PixelFormat to Bgr24\n"); fprintf(stderr, "The specified camera UID %lu could not be found, PvCameraInfo().\n", Camera.UID);
return NULL; return false;
} }
*/
tPvUint32 frameWidth, frameHeight, frameSize; tPvUint32 frameWidth, frameHeight, frameSize;
unsigned long maxSize;
char pixelFormat[256]; char pixelFormat[256];
PvAttrUint32Get(Camera.Handle, "TotalBytesPerFrame", &frameSize); PvAttrUint32Get(Camera.Handle, "TotalBytesPerFrame", &frameSize);
PvAttrUint32Get(Camera.Handle, "Width", &frameWidth); PvAttrUint32Get(Camera.Handle, "Width", &frameWidth);
PvAttrUint32Get(Camera.Handle, "Height", &frameHeight); PvAttrUint32Get(Camera.Handle, "Height", &frameHeight);
PvAttrEnumGet(Camera.Handle, "PixelFormat", pixelFormat,256,NULL); PvAttrEnumGet(Camera.Handle, "PixelFormat", pixelFormat,256,NULL);
maxSize = 8228;
//PvAttrUint32Get(Camera.Handle,"PacketSize",&maxSize); // Start the camera
if (PvCaptureAdjustPacketSize(Camera.Handle,maxSize)!=ePvErrSuccess) PvCaptureStart(Camera.Handle);
// Set the camera explicitly to capture data frames continuously
if(PvAttrEnumSet(Camera.Handle, "AcquisitionMode", "Continuous")!= ePvErrSuccess)
{
fprintf(stderr,"Could not set Acquisition Mode\n");
return false; return false;
}
if(PvCommandRun(Camera.Handle, "AcquisitionStart")!= ePvErrSuccess)
{
fprintf(stderr,"Could not start acquisition\n");
return false;
}
if(PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Freerun")!= ePvErrSuccess)
{
fprintf(stderr,"Error setting trigger to \"Freerun\"");
return false;
}
// Settings depending on the pixelformat
// This works for all AVT camera models that use the PvAPI interface
if (strcmp(pixelFormat, "Mono8")==0) { if (strcmp(pixelFormat, "Mono8")==0) {
monocrome = true;
grayframe = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 1); grayframe = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 1);
grayframe->widthStep = (int)frameWidth; grayframe->widthStep = (int)frameWidth;
frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 3);
frame->widthStep = (int)frameWidth*3;
Camera.Frame.ImageBufferSize = frameSize; Camera.Frame.ImageBufferSize = frameSize;
Camera.Frame.ImageBuffer = grayframe->imageData; Camera.Frame.ImageBuffer = grayframe->imageData;
} }
else if (strcmp(pixelFormat, "Mono16")==0) { else if (strcmp(pixelFormat, "Mono16")==0) {
monocrome = true;
grayframe = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_16U, 1); grayframe = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_16U, 1);
grayframe->widthStep = (int)frameWidth; grayframe->widthStep = (int)frameWidth*2;
frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_16U, 3);
frame->widthStep = (int)frameWidth*3;
Camera.Frame.ImageBufferSize = frameSize; Camera.Frame.ImageBufferSize = frameSize;
Camera.Frame.ImageBuffer = grayframe->imageData; Camera.Frame.ImageBuffer = grayframe->imageData;
} }
else if (strcmp(pixelFormat, "Bgr24")==0) { else if (strcmp(pixelFormat, "Bgr24")==0) {
monocrome = false;
frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 3); frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 3);
frame->widthStep = (int)frameWidth*3; frame->widthStep = (int)frameWidth*3;
Camera.Frame.ImageBufferSize = frameSize; Camera.Frame.ImageBufferSize = frameSize;
Camera.Frame.ImageBuffer = frame->imageData; Camera.Frame.ImageBuffer = frame->imageData;
} }
else else{
return false; fprintf(stderr, "Pixel format %s not supported; only Mono8, Mono16 and Bgr24 are currently supported.\n", pixelFormat);
// Start the camera
PvCaptureStart(Camera.Handle);
// Set the camera to capture continuously
if(PvAttrEnumSet(Camera.Handle, "AcquisitionMode", "Continuous")!= ePvErrSuccess)
{
fprintf(stderr,"Could not set Prosilica Acquisition Mode\n");
return false;
}
if(PvCommandRun(Camera.Handle, "AcquisitionStart")!= ePvErrSuccess)
{
fprintf(stderr,"Could not start Prosilica acquisition\n");
return false;
}
if(PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Freerun")!= ePvErrSuccess)
{
fprintf(stderr,"Error setting Prosilica trigger to \"Freerun\"");
return false; return false;
} }
return true; return true;
}
fprintf(stderr,"Error cannot open camera\n");
return false;
} }
bool CvCaptureCAM_PvAPI::grabFrame() bool CvCaptureCAM_PvAPI::grabFrame()
{ {
//if(Camera.Frame.Status != ePvErrUnplugged && Camera.Frame.Status != ePvErrCancelled)
return PvCaptureQueueFrame(Camera.Handle, &(Camera.Frame), NULL) == ePvErrSuccess; return PvCaptureQueueFrame(Camera.Handle, &(Camera.Frame), NULL) == ePvErrSuccess;
} }
...@@ -266,7 +267,6 @@ IplImage* CvCaptureCAM_PvAPI::retrieveFrame(int) ...@@ -266,7 +267,6 @@ IplImage* CvCaptureCAM_PvAPI::retrieveFrame(int)
if (PvCaptureWaitForFrameDone(Camera.Handle, &(Camera.Frame), 1000) == ePvErrSuccess) { if (PvCaptureWaitForFrameDone(Camera.Handle, &(Camera.Frame), 1000) == ePvErrSuccess) {
if (!monocrome) { if (!monocrome) {
cvMerge(grayframe,grayframe,grayframe,NULL,frame);
return frame; return frame;
} }
return grayframe; return grayframe;
......
//////////////////////////////////////////////////////
// Sample illustrating the use of the VideoCapture //
// interface in combination with PvAPI interface //
// //
// Succesfully tested on Prosilica and Manta series //
//////////////////////////////////////////////////////
#include <iostream>
#include "opencv2/opencv.hpp"
using namespace std;
using namespace cv;
int main()
{
// Capturing multiple AVT cameras can be done by simply initiating
// two VideoCaptures after eachother.
VideoCapture camera1(0 + CV_CAP_PVAPI);
VideoCapture camera2(0 + CV_CAP_PVAPI);
Mat frame1, frame2;
for(;;){
camera1 >> frame1;
camera2 >> frame2;
imshow("camera 1 frame", frame1);
imshow("camera 2 frame", frame2);
int key = waitKey(10);
if(key == 27){
break;
}
}
return 0;
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment