Commit 6d0407b6 authored by Piotr Dobrowolski's avatar Piotr Dobrowolski

VideoCapture with digital camera and gPhoto2 library

parent 298c98ea
......@@ -205,6 +205,7 @@ OCV_OPTION(WITH_DIRECTX "Include DirectX support" ON
OCV_OPTION(WITH_INTELPERC "Include Intel Perceptual Computing support" OFF IF (WIN32 AND NOT WINRT) )
OCV_OPTION(WITH_IPP_A "Include Intel IPP_A support" OFF IF (MSVC OR X86 OR X86_64) )
OCV_OPTION(WITH_GDAL "Include GDAL Support" OFF IF (NOT ANDROID AND NOT IOS AND NOT WINRT) )
OCV_OPTION(WITH_GPHOTO2 "Include gPhoto2 library support" ON IF (UNIX AND NOT ANDROID) )
# OpenCV build components
# ===================================================
......@@ -1032,6 +1033,10 @@ if(DEFINED WITH_INTELPERC)
status(" Intel PerC:" HAVE_INTELPERC THEN "YES" ELSE NO)
endif(DEFINED WITH_INTELPERC)
if(DEFINED WITH_GPHOTO2)
status(" gPhoto2:" HAVE_GPHOTO2 THEN "YES" ELSE NO)
endif(DEFINED WITH_GPHOTO2)
# ========================== Other third-party libraries ==========================
status("")
......
......@@ -311,3 +311,9 @@ endif()
if(WITH_INTELPERC)
include("${OpenCV_SOURCE_DIR}/cmake/OpenCVFindIntelPerCSDK.cmake")
endif(WITH_INTELPERC)
# --- gPhoto2 ---
ocv_clear_vars(HAVE_GPHOTO2)
if(WITH_GPHOTO2)
CHECK_MODULE(libgphoto2 HAVE_GPHOTO2)
endif(WITH_GPHOTO2)
......@@ -178,3 +178,6 @@
/* Define if your processor stores words with the most significant byte
first (like Motorola and SPARC, unlike Intel and VAX). */
#cmakedefine WORDS_BIGENDIAN
/* gPhoto2 library */
#cmakedefine HAVE_GPHOTO2
......@@ -186,6 +186,10 @@ if(HAVE_INTELPERC)
list(APPEND VIDEOIO_LIBRARIES ${INTELPERC_LIBRARIES})
endif(HAVE_INTELPERC)
if(HAVE_GPHOTO2)
list(APPEND videoio_srcs ${CMAKE_CURRENT_LIST_DIR}/src/cap_gphoto2.cpp)
endif(HAVE_GPHOTO2)
if(IOS)
add_definitions(-DHAVE_IOS=1)
list(APPEND videoio_srcs
......
......@@ -89,7 +89,8 @@ enum { CAP_ANY = 0, // autodetect
CAP_WINRT = 1410, // Microsoft Windows Runtime using Media Foundation
CAP_INTELPERC = 1500, // Intel Perceptual Computing SDK
CAP_OPENNI2 = 1600, // OpenNI2 (for Kinect)
CAP_OPENNI2_ASUS = 1610 // OpenNI2 (for Asus Xtion and Occipital Structure sensors)
CAP_OPENNI2_ASUS = 1610, // OpenNI2 (for Asus Xtion and Occipital Structure sensors)
CAP_GPHOTO2 = 1700 // gPhoto2 connection
};
// generic properties (based on DC1394 properties)
......@@ -382,6 +383,23 @@ enum { VIDEOWRITER_PROP_QUALITY = 1, // Quality (0..100%) of the videostream
VIDEOWRITER_PROP_FRAMEBYTES = 2, // (Read-only): Size of just encoded video frame
};
// gPhoto2 properties, if propertyId is less than 0 then work on widget with that __additive inversed__ camera setting ID
// Get IDs by using CAP_PROP_GPHOTO2_WIDGET_ENUMERATE.
// @see CvCaptureCAM_GPHOTO2 for more info
enum { CAP_PROP_GPHOTO2_PREVIEW = 17001, // Capture only preview from liveview mode.
CAP_PROP_GPHOTO2_WIDGET_ENUMERATE = 17002, // Readonly, returns (const char *).
CAP_PROP_GPHOTO2_RELOAD_CONFIG = 17003, // Trigger, only by set. Reload camera settings.
CAP_PROP_GPHOTO2_RELOAD_ON_CHANGE = 17004, // Reload all settings on set.
CAP_PROP_GPHOTO2_COLLECT_MSGS = 17005, // Collect messages with details.
CAP_PROP_GPHOTO2_FLUSH_MSGS = 17006, // Readonly, returns (const char *).
CAP_PROP_SPEED = 17007, // Exposure speed. Can be readonly, depends on camera program.
CAP_PROP_APERTURE = 17008, // Aperture. Can be readonly, depends on camera program.
CAP_PROP_EXPOSUREPROGRAM = 17009, // Camera exposure program.
CAP_PROP_VIEWFINDER = 17010 // Enter liveview mode.
};
//enum {
class IVideoCapture;
/** @brief Class for video capturing from video files, image sequences or cameras. The class provides C++ API
......
......@@ -110,7 +110,9 @@ enum
CV_CAP_INTELPERC = 1500, // Intel Perceptual Computing
CV_CAP_OPENNI2 = 1600 // OpenNI2 (for Kinect)
CV_CAP_OPENNI2 = 1600, // OpenNI2 (for Kinect)
CV_CAP_GPHOTO2 = 1700
};
/* start capturing frames from camera: index = camera_index + domain_offset (CV_CAP_*) */
......@@ -391,6 +393,23 @@ enum
CV_CAP_INTELPERC_IMAGE = 3
};
// gPhoto2 properties, if propertyId is less than 0 then work on widget with that __additive inversed__ camera setting ID
// Get IDs by using CAP_PROP_GPHOTO2_WIDGET_ENUMERATE.
// @see CvCaptureCAM_GPHOTO2 for more info
enum
{
CV_CAP_PROP_GPHOTO2_PREVIEW = 17001, // Capture only preview from liveview mode.
CV_CAP_PROP_GPHOTO2_WIDGET_ENUMERATE = 17002, // Readonly, returns (const char *).
CV_CAP_PROP_GPHOTO2_RELOAD_CONFIG = 17003, // Trigger, only by set. Reload camera settings.
CV_CAP_PROP_GPHOTO2_RELOAD_ON_CHANGE = 17004, // Reload all settings on set.
CV_CAP_PROP_GPHOTO2_COLLECT_MSGS = 17005, // Collect messages with details.
CV_CAP_PROP_GPHOTO2_FLUSH_MSGS = 17006, // Readonly, returns (const char *).
CV_CAP_PROP_SPEED = 17007, // Exposure speed. Can be readonly, depends on camera program.
CV_CAP_PROP_APERTURE = 17008, // Aperture. Can be readonly, depends on camera program.
CV_CAP_PROP_EXPOSUREPROGRAM = 17009, // Camera exposure program.
CV_CAP_PROP_VIEWFINDER = 17010 // Enter liveview mode.
};
/* retrieve or set capture properties */
CVAPI(double) cvGetCaptureProperty( CvCapture* capture, int property_id );
CVAPI(int) cvSetCaptureProperty( CvCapture* capture, int property_id, double value );
......
......@@ -518,6 +518,9 @@ static Ptr<IVideoCapture> IVideoCapture_create(int index)
#endif
#ifdef WINRT_VIDEO
CAP_WINRT,
#endif
#ifdef HAVE_GPHOTO2
CV_CAP_GPHOTO2,
#endif
-1, -1
};
......@@ -537,6 +540,7 @@ static Ptr<IVideoCapture> IVideoCapture_create(int index)
#if defined(HAVE_DSHOW) || \
defined(HAVE_INTELPERC) || \
defined(WINRT_VIDEO) || \
defined(HAVE_GPHOTO2) || \
(0)
Ptr<IVideoCapture> capture;
......@@ -558,6 +562,11 @@ static Ptr<IVideoCapture> IVideoCapture_create(int index)
if (capture)
return capture;
break; // CAP_WINRT
#endif
#ifdef HAVE_GPHOTO2
case CV_CAP_GPHOTO2:
capture = createGPhoto2Capture(index);
break;
#endif
}
if (capture && capture->isOpened())
......@@ -572,14 +581,37 @@ static Ptr<IVideoCapture> IVideoCapture_create(int index)
static Ptr<IVideoCapture> IVideoCapture_create(const String& filename)
{
int domains[] =
{
CV_CAP_ANY,
#ifdef HAVE_GPHOTO2
CV_CAP_GPHOTO2,
#endif
-1, -1
};
// try every possibly installed camera API
for (int i = 0; domains[i] >= 0; i++)
{
Ptr<IVideoCapture> capture;
switch (domains[i])
{
case CV_CAP_ANY:
capture = createMotionJpegCapture(filename);
break;
#ifdef HAVE_GPHOTO2
case CV_CAP_GPHOTO2:
capture = createGPhoto2Capture(filename);
break;
#endif
}
if (capture && capture->isOpened())
{
return capture;
}
}
// failed open a camera
return Ptr<IVideoCapture>();
}
......
/*
* Copyright (c) 2015, Piotr Dobrowolski dobrypd[at]gmail[dot]com
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
* THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#include "precomp.hpp"
#ifdef HAVE_GPHOTO2
#include <gphoto2/gphoto2.h>
#include <algorithm>
#include <clocale>
#include <cstdio>
#include <cstring>
#include <ctime>
#include <deque>
#include <exception>
#include <map>
#include <ostream>
#include <string>
namespace cv
{
namespace gphoto2 {
/**
* \brief Map gPhoto2 return code into this exception.
*/
class GPhoto2Exception: public std::exception
{
private:
int result;
const char * method;
public:
/**
* @param methodStr libgphoto2 method name
* @param gPhoto2Result libgphoto2 method result, should be less than GP_OK
*/
GPhoto2Exception(const char * methodStr, int gPhoto2Result)
{
result = gPhoto2Result;
method = methodStr;
}
virtual const char * what() const throw ()
{
return gp_result_as_string(result);
}
friend std::ostream & operator<<(std::ostream & ostream,
GPhoto2Exception & e)
{
return ostream << e.method << ": " << e.what();
}
};
/**
* \brief Capture using your camera device via digital camera library - gPhoto2.
*
* For library description and list of supported cameras, go to
* @url http://gphoto.sourceforge.net/
*
* Because gPhoto2 configuration is based on a widgets
* and OpenCV CvCapture property settings are double typed
* some assumptions and tricks has to be made.
* 1. Device properties can be changed by IDs, use @method setProperty(int, double)
* and @method getProperty(int) with __additive inversed__
* camera setting ID as propertyId. (If you want to get camera setting
* with ID == x, you want to call #getProperty(-x)).
* 2. Digital camera settings IDs are device dependent.
* 3. You can list them by getting property CAP_PROP_GPHOTO2_WIDGET_ENUMERATE.
* 3.1. As return you will get pointer to char array (with listed properties)
* instead of double. This list is in CSV type.
* 4. There are several types of widgets (camera settings).
* 4.1. For "menu" and "radio", you can get/set choice number.
* 4.2. For "toggle" you can get/set int type.
* 4.3. For "range" you can get/set float.
* 4.4. For any other pointer will be fetched/set.
* 5. You can fetch camera messages by using CAP_PROP_GPHOTO2_COLLECT_MSGS
* and CAP_PROP_GPHOTO2_FLUSH_MSGS (will return pointer to char array).
* 6. Camera settings are fetched from device as lazy as possible.
* It creates problem with situation when change of one setting
* affects another setting. You can use CV_CAP_PROP_GPHOTO2_RELOAD_ON_CHANGE
* or CV_CAP_PROP_GPHOTO2_RELOAD_CONFIG to be sure that property you are
* planning to get will be actual.
*
* Capture can work in 2 main modes: preview and final.
* Where preview is an output from digital camera "liveview".
* Change modes with CAP_PROP_GPHOTO2_PREVIEW property.
*
* Moreover some generic properties are mapped to widgets, or implemented:
* * CV_CAP_PROP_SPEED,
* * CV_CAP_PROP_APERATURE,
* * CV_CAP_PROP_EXPOSUREPROGRAM,
* * CV_CAP_PROP_VIEWFINDER,
* * CV_CAP_PROP_POS_MSEC,
* * CV_CAP_PROP_POS_FRAMES,
* * CV_CAP_PROP_FRAME_WIDTH,
* * CV_CAP_PROP_FRAME_HEIGHT,
* * CV_CAP_PROP_FPS,
* * CV_CAP_PROP_FRAME_COUNT
* * CV_CAP_PROP_FORMAT,
* * CV_CAP_PROP_EXPOSURE,
* * CV_CAP_PROP_TRIGGER_DELAY,
* * CV_CAP_PROP_ZOOM,
* * CV_CAP_PROP_FOCUS,
* * CV_CAP_PROP_ISO_SPEED.
*/
class DigitalCameraCapture: public IVideoCapture
{
public:
static const char * separator;
static const char * lineDelimiter;
DigitalCameraCapture();
DigitalCameraCapture(int index);
DigitalCameraCapture(const String &deviceName);
virtual ~DigitalCameraCapture();
virtual bool isOpened() const;
virtual double getProperty(int) const;
virtual bool setProperty(int, double);
virtual bool grabFrame();
virtual bool retrieveFrame(int, OutputArray);
virtual int getCaptureDomain()
{
return CV_CAP_GPHOTO2;
} // Return the type of the capture object: CV_CAP_VFW, etc...
bool open(int index);
void close();
bool deviceExist(int index) const;
int findDevice(const char * deviceName) const;
protected:
// Known widget names
static const char * PROP_EXPOSURE_COMPENSACTION;
static const char * PROP_SELF_TIMER_DELAY;
static const char * PROP_MANUALFOCUS;
static const char * PROP_AUTOFOCUS;
static const char * PROP_ISO;
static const char * PROP_SPEED;
static const char * PROP_APERTURE_NIKON;
static const char * PROP_APERTURE_CANON;
static const char * PROP_EXPOSURE_PROGRAM;
static const char * PROP_VIEWFINDER;
// Instance
GPContext * context = NULL;
int numDevices;
void initContext();
// Selected device
bool opened;
Camera * camera = NULL;
Mat frame;
// Properties
CameraWidget * rootWidget = NULL;
CameraWidget * getGenericProperty(int propertyId, double & output) const;
CameraWidget * setGenericProperty(int propertyId, double value,
bool & output) const;
// Widgets
void reloadConfig() throw (GPhoto2Exception);
CameraWidget * getWidget(int widgetId) const;
CameraWidget * findWidgetByName(const char * name) const;
// Loading
void readFrameFromFile(CameraFile * file, OutputArray outputFrame) throw (GPhoto2Exception);
// Context feedback
friend void ctxErrorFunc(GPContext *, const char *, void *);
friend void ctxStatusFunc(GPContext *, const char *, void *);
friend void ctxMessageFunc(GPContext *, const char *, void *);
// Messages / debug
enum MsgType
{
ERROR = (int) 'E',
WARNING = (int) 'W',
STATUS = (int) 'S',
OTHER = (int) 'O'
};
template<typename OsstreamPrintable>
void message(MsgType msgType, const char * msg,
OsstreamPrintable & arg) const;
private:
// Instance
CameraAbilitiesList * abilitiesList = NULL;
GPPortInfoList * capablePorts = NULL;
CameraList * allDevices = NULL;
// Selected device
CameraAbilities cameraAbilities;
std::deque<CameraFile *> grabbedFrames;
// Properties
bool preview; // CV_CAP_PROP_GPHOTO2_PREVIEW
std::string widgetInfo; // CV_CAP_PROP_GPHOTO2_WIDGET_ENUMERATE
std::map<int, CameraWidget *> widgets;
bool reloadOnChange; // CV_CAP_PROP_GPHOTO2_RELOAD_ON_CHANGE
time_t firstCapturedFrameTime;
unsigned long int capturedFrames;
DigitalCameraCapture(const DigitalCameraCapture&); // Disable copying
DigitalCameraCapture& operator=(DigitalCameraCapture const&); // Disable assigning
// Widgets
int noOfWidgets;
int widgetDescription(std::ostream &os, CameraWidget * widget) const
throw (GPhoto2Exception);
int collectWidgets(std::ostream &os, CameraWidget * widget)
throw (GPhoto2Exception);
// Messages / debug
mutable std::ostringstream msgsBuffer; // CV_CAP_PROP_GPHOTO2_FLUSH_MSGS
mutable std::string lastFlush; // CV_CAP_PROP_GPHOTO2_FLUSH_MSGS
bool collectMsgs; // CV_CAP_PROP_GPHOTO2_COLLECT_MSGS
};
/**
* \brief Check if gPhoto2 function ends successfully. If not, throw an exception.
*/
#define CR(GPHOTO2_FUN) do {\
int r_0629c47b758;\
if ((r_0629c47b758 = (GPHOTO2_FUN)) < GP_OK) {\
throw GPhoto2Exception(#GPHOTO2_FUN, r_0629c47b758);\
};\
} while(0)
/**
* \brief gPhoto2 context error feedback function.
* @param thatGPhotoCap is required to be pointer to DigitalCameraCapture object.
*/
void ctxErrorFunc(GPContext *, const char * str, void * thatGPhotoCap)
{
const DigitalCameraCapture * self =
(const DigitalCameraCapture *) thatGPhotoCap;
self->message(self->ERROR, "context feedback", str);
}
/**
* \brief gPhoto2 context status feedback function.
* @param thatGPhotoCap is required to be pointer to DigitalCameraCapture object.
*/
void ctxStatusFunc(GPContext *, const char * str, void * thatGPhotoCap)
{
const DigitalCameraCapture * self =
(const DigitalCameraCapture *) thatGPhotoCap;
self->message(self->STATUS, "context feedback", str);
}
/**
* \brief gPhoto2 context message feedback function.
* @param thatGPhotoCap is required to be pointer to DigitalCameraCapture object.
*/
void ctxMessageFunc(GPContext *, const char * str, void * thatGPhotoCap)
{
const DigitalCameraCapture * self =
(const DigitalCameraCapture *) thatGPhotoCap;
self->message(self->OTHER, "context feedback", str);
}
/**
* \brief Separator used while creating CSV.
*/
const char * DigitalCameraCapture::separator = ",";
/**
* \brief Line delimiter used while creating any readable output.
*/
const char * DigitalCameraCapture::lineDelimiter = "\n";
/**
* \bief Some known widget names.
*
* Those are actually substrings of widget name.
* ie. for VIEWFINDER, Nikon uses "viewfinder", while Canon can use "eosviewfinder".
*/
const char * DigitalCameraCapture::PROP_EXPOSURE_COMPENSACTION =
"exposurecompensation";
const char * DigitalCameraCapture::PROP_SELF_TIMER_DELAY = "selftimerdelay";
const char * DigitalCameraCapture::PROP_MANUALFOCUS = "manualfocusdrive";
const char * DigitalCameraCapture::PROP_AUTOFOCUS = "autofocusdrive";
const char * DigitalCameraCapture::PROP_ISO = "iso";
const char * DigitalCameraCapture::PROP_SPEED = "shutterspeed";
const char * DigitalCameraCapture::PROP_APERTURE_NIKON = "f-number";
const char * DigitalCameraCapture::PROP_APERTURE_CANON = "aperture";
const char * DigitalCameraCapture::PROP_EXPOSURE_PROGRAM = "expprogram";
const char * DigitalCameraCapture::PROP_VIEWFINDER = "viewfinder";
/**
* Initialize gPhoto2 context, search for all available devices.
*/
void DigitalCameraCapture::initContext()
{
capturedFrames = noOfWidgets = numDevices = 0;
opened = preview = reloadOnChange = false;
firstCapturedFrameTime = 0;
context = gp_context_new();
gp_context_set_error_func(context, ctxErrorFunc, (void*) this);
gp_context_set_status_func(context, ctxStatusFunc, (void*) this);
gp_context_set_message_func(context, ctxMessageFunc, (void*) this);
try
{
// Load abilities
CR(gp_abilities_list_new(&abilitiesList));
CR(gp_abilities_list_load(abilitiesList, context));
// Load ports
CR(gp_port_info_list_new(&capablePorts));
CR(gp_port_info_list_load(capablePorts));
// Auto-detect devices
CR(gp_list_new(&allDevices));
CR(gp_camera_autodetect(allDevices, context));
CR(numDevices = gp_list_count(allDevices));
}
catch (GPhoto2Exception & e)
{
numDevices = 0;
}
}
/**
* Search for all devices while constructing.
*/
DigitalCameraCapture::DigitalCameraCapture()
{
initContext();
}
/**
* @see open(int)
*/
DigitalCameraCapture::DigitalCameraCapture(int index)
{
initContext();
if (deviceExist(index))
open(index);
}
/**
* @see findDevice(const char*)
* @see open(int)
*/
DigitalCameraCapture::DigitalCameraCapture(const String & deviceName)
{
initContext();
int index = findDevice(deviceName.c_str());
if (deviceExist(index))
open(index);
}
/**
* Always close connection to the device.
*/
DigitalCameraCapture::~DigitalCameraCapture()
{
close();
try
{
CR(gp_abilities_list_free(abilitiesList));
abilitiesList = NULL;
CR(gp_port_info_list_free(capablePorts));
capablePorts = NULL;
CR(gp_list_unref(allDevices));
allDevices = NULL;
gp_context_unref(context);
context = NULL;
}
catch (GPhoto2Exception & e)
{
message(ERROR, "destruction error", e);
}
}
/**
* Connects to selected device.
*/
bool DigitalCameraCapture::open(int index)
{
const char * model = 0, *path = 0;
int m, p;
GPPortInfo portInfo;
if (isOpened()) {
close();
}
try
{
CR(gp_camera_new(&camera));
CR(gp_list_get_name(allDevices, index, &model));
CR(gp_list_get_value(allDevices, index, &path));
// Set model abilities.
CR(m = gp_abilities_list_lookup_model(abilitiesList, model));
CR(gp_abilities_list_get_abilities(abilitiesList, m, &cameraAbilities));
CR(gp_camera_set_abilities(camera, cameraAbilities));
// Set port
CR(p = gp_port_info_list_lookup_path(capablePorts, path));
CR(gp_port_info_list_get_info(capablePorts, p, &portInfo));
CR(gp_camera_set_port_info(camera, portInfo));
// Initialize connection to the camera.
CR(gp_camera_init(camera, context));
message(STATUS, "connected camera", model);
message(STATUS, "connected using", path);
// State initialization
firstCapturedFrameTime = 0;
capturedFrames = 0;
preview = false;
reloadOnChange = false;
collectMsgs = false;
reloadConfig();
opened = true;
return true;
}
catch (GPhoto2Exception & e)
{
message(WARNING, "opening device failed", e);
return false;
}
}
/**
*
*/
bool DigitalCameraCapture::isOpened() const
{
return opened;
}
/**
* Close connection to the camera. Remove all unread frames/files.
*/
void DigitalCameraCapture::close()
{
try
{
if (!frame.empty())
{
frame.release();
}
if (camera)
{
CR(gp_camera_exit(camera, context));
CR(gp_camera_unref(camera));
camera = NULL;
}
opened = false;
if (int frames = grabbedFrames.size() > 0)
{
while (frames--)
{
CameraFile * file = grabbedFrames.front();
grabbedFrames.pop_front();
CR(gp_file_unref(file));
}
}
if (rootWidget)
{
widgetInfo.clear();
CR(gp_widget_unref(rootWidget));
rootWidget = NULL;
}
}
catch (GPhoto2Exception & e)
{
message(ERROR, "cannot close device properly", e);
}
}
/**
* @param output will be changed if possible, return 0 if changed,
* @return widget, or NULL if output value was found (saved in argument),
*/
CameraWidget * DigitalCameraCapture::getGenericProperty(int propertyId,
double & output) const
{
switch (propertyId)
{
case CV_CAP_PROP_POS_MSEC:
{
// Only seconds level precision, FUTURE: cross-platform milliseconds
output = (time(0) - firstCapturedFrameTime) * 1e2;
return NULL;
}
case CV_CAP_PROP_POS_FRAMES:
{
output = capturedFrames;
return NULL;
}
case CV_CAP_PROP_FRAME_WIDTH:
{
if (!frame.empty())
{
output = frame.cols;
}
return NULL;
}
case CV_CAP_PROP_FRAME_HEIGHT:
{
if (!frame.empty())
{
output = frame.rows;
}
return NULL;
}
case CV_CAP_PROP_FORMAT:
{
if (!frame.empty())
{
output = frame.type();
}
return NULL;
}
case CV_CAP_PROP_FPS: // returns average fps from the begin
{
double wholeProcessTime = 0;
getGenericProperty(CV_CAP_PROP_POS_MSEC, wholeProcessTime);
wholeProcessTime /= 1e2;
output = capturedFrames / wholeProcessTime;
return NULL;
}
case CV_CAP_PROP_FRAME_COUNT:
{
output = capturedFrames;
return NULL;
}
case CV_CAP_PROP_EXPOSURE:
return findWidgetByName(PROP_EXPOSURE_COMPENSACTION);
case CV_CAP_PROP_TRIGGER_DELAY:
return findWidgetByName(PROP_SELF_TIMER_DELAY);
case CV_CAP_PROP_ZOOM:
return findWidgetByName(PROP_MANUALFOCUS);
case CV_CAP_PROP_FOCUS:
return findWidgetByName(PROP_AUTOFOCUS);
case CV_CAP_PROP_ISO_SPEED:
return findWidgetByName(PROP_ISO);
case CV_CAP_PROP_SPEED:
return findWidgetByName(PROP_SPEED);
case CV_CAP_PROP_APERTURE:
{
CameraWidget * widget = findWidgetByName(PROP_APERTURE_NIKON);
return (widget == 0) ? findWidgetByName(PROP_APERTURE_CANON) : widget;
}
case CV_CAP_PROP_EXPOSUREPROGRAM:
return findWidgetByName(PROP_EXPOSURE_PROGRAM);
case CV_CAP_PROP_VIEWFINDER:
return findWidgetByName(PROP_VIEWFINDER);
}
return NULL;
}
/**
* Get property.
* @see DigitalCameraCapture for more information about returned double type.
*/
double DigitalCameraCapture::getProperty(int propertyId) const
{
CameraWidget * widget = NULL;
double output = 0;
if (propertyId < 0)
{
widget = getWidget(-propertyId);
}
else
{
switch (propertyId)
{
// gphoto2 cap featured
case CV_CAP_PROP_GPHOTO2_PREVIEW:
return preview;
case CV_CAP_PROP_GPHOTO2_WIDGET_ENUMERATE:
if (rootWidget == NULL)
return 0;
return (intptr_t) widgetInfo.c_str();
case CV_CAP_PROP_GPHOTO2_RELOAD_CONFIG:
return 0; // Trigger, only by set
case CV_CAP_PROP_GPHOTO2_RELOAD_ON_CHANGE:
return reloadOnChange;
case CV_CAP_PROP_GPHOTO2_COLLECT_MSGS:
return collectMsgs;
case CV_CAP_PROP_GPHOTO2_FLUSH_MSGS:
lastFlush = msgsBuffer.str();
msgsBuffer.str("");
msgsBuffer.clear();
return (intptr_t) lastFlush.c_str();
default:
widget = getGenericProperty(propertyId, output);
/* no break */
}
}
if (widget == NULL)
return output;
try
{
CameraWidgetType type;
CR(gp_widget_get_type(widget, &type));
switch (type)
{
case GP_WIDGET_MENU:
case GP_WIDGET_RADIO:
{
int cnt = 0, i;
const char * current;
CR(gp_widget_get_value(widget, &current));
CR(cnt = gp_widget_count_choices(widget));
for (i = 0; i < cnt; i++)
{
const char *choice;
CR(gp_widget_get_choice(widget, i, &choice));
if (std::strcmp(choice, current) == 0)
{
return i;
}
}
return -1;
}
case GP_WIDGET_TOGGLE:
{
int value;
CR(gp_widget_get_value(widget, &value));
return value;
}
case GP_WIDGET_RANGE:
{
float value;
CR(gp_widget_get_value(widget, &value));
return value;
}
default:
{
char* value;
CR(gp_widget_get_value(widget, &value));
return (intptr_t) value;
}
}
}
catch (GPhoto2Exception & e)
{
char buf[128] = "";
sprintf(buf, "cannot get property: %d", propertyId);
message(WARNING, (const char *) buf, e);
return 0;
}
}
/**
* @param output will be changed if possible, return 0 if changed,
* @return widget, or 0 if output value was found (saved in argument),
*/
CameraWidget * DigitalCameraCapture::setGenericProperty(int propertyId,
double /*FUTURE: value*/, bool & output) const
{
switch (propertyId)
{
case CV_CAP_PROP_POS_MSEC:
case CV_CAP_PROP_POS_FRAMES:
case CV_CAP_PROP_FRAME_WIDTH:
case CV_CAP_PROP_FRAME_HEIGHT:
case CV_CAP_PROP_FPS:
case CV_CAP_PROP_FRAME_COUNT:
case CV_CAP_PROP_FORMAT:
output = false;
return NULL;
case CV_CAP_PROP_EXPOSURE:
return findWidgetByName(PROP_EXPOSURE_COMPENSACTION);
case CV_CAP_PROP_TRIGGER_DELAY:
return findWidgetByName(PROP_SELF_TIMER_DELAY);
case CV_CAP_PROP_ZOOM:
return findWidgetByName(PROP_MANUALFOCUS);
case CV_CAP_PROP_FOCUS:
return findWidgetByName(PROP_AUTOFOCUS);
case CV_CAP_PROP_ISO_SPEED:
return findWidgetByName(PROP_ISO);
case CV_CAP_PROP_SPEED:
return findWidgetByName(PROP_SPEED);
case CV_CAP_PROP_APERTURE:
{
CameraWidget * widget = findWidgetByName(PROP_APERTURE_NIKON);
return (widget == NULL) ? findWidgetByName(PROP_APERTURE_CANON) : widget;
}
case CV_CAP_PROP_EXPOSUREPROGRAM:
return findWidgetByName(PROP_EXPOSURE_PROGRAM);
case CV_CAP_PROP_VIEWFINDER:
return findWidgetByName(PROP_VIEWFINDER);
}
return NULL;
}
/**
* Set property.
* @see DigitalCameraCapture for more information about value, double typed, argument.
*/
bool DigitalCameraCapture::setProperty(int propertyId, double value)
{
CameraWidget * widget = NULL;
bool output = false;
if (propertyId < 0)
{
widget = getWidget(-propertyId);
}
else
{
switch (propertyId)
{
// gphoto2 cap featured
case CV_CAP_PROP_GPHOTO2_PREVIEW:
preview = value != 0;
return true;
case CV_CAP_PROP_GPHOTO2_WIDGET_ENUMERATE:
return false;
case CV_CAP_PROP_GPHOTO2_RELOAD_CONFIG:
reloadConfig();
return true;
case CV_CAP_PROP_GPHOTO2_RELOAD_ON_CHANGE:
reloadOnChange = value != 0;
return true;
case CV_CAP_PROP_GPHOTO2_COLLECT_MSGS:
collectMsgs = value != 0;
return true;
case CV_CAP_PROP_GPHOTO2_FLUSH_MSGS:
return false;
default:
widget = setGenericProperty(propertyId, value, output);
/* no break */
}
}
if (widget == NULL)
return output;
try
{
CameraWidgetType type;
CR(gp_widget_get_type(widget, &type));
switch (type)
{
case GP_WIDGET_RADIO:
case GP_WIDGET_MENU:
{
int i = static_cast<int>(value);
char *choice;
CR(gp_widget_get_choice(widget, i, (const char**)&choice));
CR(gp_widget_set_value(widget, choice));
break;
}
case GP_WIDGET_TOGGLE:
{
int i = static_cast<int>(value);
CR(gp_widget_set_value(widget, &i));
break;
}
case GP_WIDGET_RANGE:
{
float v = static_cast<float>(value);
CR(gp_widget_set_value(widget, &v));
break;
}
default:
{
CR(gp_widget_set_value(widget, (void* )(intptr_t )&value));
break;
}
}
if (!reloadOnChange)
{
// force widget change
CR(gp_widget_set_changed(widget, 1));
}
// Use the same locale setting as while getting rootWidget.
char * localeTmp = setlocale(LC_ALL, "C");
CR(gp_camera_set_config(camera, rootWidget, context));
setlocale(LC_ALL, localeTmp);
if (reloadOnChange)
{
reloadConfig();
} else {
CR(gp_widget_set_changed(widget, 0));
}
}
catch (GPhoto2Exception & e)
{
char buf[128] = "";
sprintf(buf, "cannot set property: %d to %f", propertyId, value);
message(WARNING, (const char *) buf, e);
return false;
}
return true;
}
/**
* Capture image, and store file in @field grabbedFrames.
* Do not read a file. File will be deleted from camera automatically.
*/
bool DigitalCameraCapture::grabFrame()
{
CameraFilePath filePath;
CameraFile * file = NULL;
try
{
CR(gp_file_new(&file));
if (preview)
{
CR(gp_camera_capture_preview(camera, file, context));
}
else
{
// Capture an image
CR(gp_camera_capture(camera, GP_CAPTURE_IMAGE, &filePath, context));
CR(gp_camera_file_get(camera, filePath.folder, filePath.name, GP_FILE_TYPE_NORMAL,
file, context));
CR(gp_camera_file_delete(camera, filePath.folder, filePath.name, context));
}
// State update
if (firstCapturedFrameTime == 0)
{
firstCapturedFrameTime = time(0);
}
capturedFrames++;
grabbedFrames.push_back(file);
}
catch (GPhoto2Exception & e)
{
if (file)
gp_file_unref(file);
message(WARNING, "cannot grab new frame", e);
return false;
}
return true;
}
/**
* Read stored file with image.
*/
bool DigitalCameraCapture::retrieveFrame(int, OutputArray outputFrame)
{
if (grabbedFrames.size() > 0)
{
CameraFile * file = grabbedFrames.front();
grabbedFrames.pop_front();
try
{
readFrameFromFile(file, outputFrame);
CR(gp_file_unref(file));
}
catch (GPhoto2Exception & e)
{
message(WARNING, "cannot read file grabbed from device", e);
return false;
}
}
else
{
return false;
}
return true;
}
/**
* @return true if device exists
*/
bool DigitalCameraCapture::deviceExist(int index) const
{
return (numDevices > 0) && (index < numDevices);
}
/**
* @return device index if exists, otherwise -1
*/
int DigitalCameraCapture::findDevice(const char * deviceName) const
{
const char * model = 0;
try
{
if (deviceName != 0)
{
for (int i = 0; i < numDevices; ++i)
{
CR(gp_list_get_name(allDevices, i, &model));
if (model != 0 && strstr(model, deviceName))
{
return i;
}
}
}
}
catch (GPhoto2Exception & e)
{
; // pass
}
return -1;
}
/**
* Load device settings.
*/
void DigitalCameraCapture::reloadConfig() throw (GPhoto2Exception)
{
std::ostringstream widgetInfoListStream;
if (rootWidget != NULL)
{
widgetInfo.clear();
CR(gp_widget_unref(rootWidget));
rootWidget = NULL;
widgets.clear();
}
// Make sure, that all configs (getting setting) will use the same locale setting.
char * localeTmp = setlocale(LC_ALL, "C");
CR(gp_camera_get_config(camera, &rootWidget, context));
setlocale(LC_ALL, localeTmp);
widgetInfoListStream << "id,label,name,info,readonly,type,value,"
<< lineDelimiter;
noOfWidgets = collectWidgets(widgetInfoListStream, rootWidget) + 1;
widgetInfo = widgetInfoListStream.str();
}
/**
* Get widget which was fetched in time of last call to @reloadConfig().
*/
CameraWidget * DigitalCameraCapture::getWidget(int widgetId) const
{
CameraWidget * widget;
std::map<int, CameraWidget *>::const_iterator it = widgets.find(widgetId);
if (it == widgets.end())
return 0;
widget = it->second;
return widget;
}
/**
* Search for widget with name which has @param subName substring.
*/
CameraWidget * DigitalCameraCapture::findWidgetByName(
const char * subName) const
{
if (subName != NULL)
{
try
{
const char * name;
typedef std::map<int, CameraWidget *>::const_iterator it_t;
it_t it = widgets.begin(), end = widgets.end();
while (it != end)
{
CR(gp_widget_get_name(it->second, &name));
if (strstr(name, subName))
break;
it++;
}
return (it != end) ? it->second : NULL;
}
catch (GPhoto2Exception & e)
{
message(WARNING, "error while searching for widget", e);
}
}
return 0;
}
/**
* Image file reader.
*
* @FUTURE: RAW format reader.
*/
void DigitalCameraCapture::readFrameFromFile(CameraFile * file, OutputArray outputFrame)
throw (GPhoto2Exception)
{
// FUTURE: OpenCV cannot read RAW files right now.
const char * data;
unsigned long int size;
CR(gp_file_get_data_and_size(file, &data, &size));
if (size > 0)
{
Mat buf = Mat(1, size, CV_8UC1, (void *) data);
if(!buf.empty())
{
frame = imdecode(buf, CV_LOAD_IMAGE_UNCHANGED);
}
frame.copyTo(outputFrame);
}
}
/**
* Print widget description in @param os.
* @return real widget ID (if config was reloaded couple of times
* then IDs won't be the same)
*/
int DigitalCameraCapture::widgetDescription(std::ostream &os,
CameraWidget * widget) const throw (GPhoto2Exception)
{
const char * label, *name, *info;
int id, readonly;
CameraWidgetType type;
CR(gp_widget_get_id(widget, &id));
CR(gp_widget_get_label(widget, &label));
CR(gp_widget_get_name(widget, &name));
CR(gp_widget_get_info(widget, &info));
CR(gp_widget_get_type(widget, &type));
CR(gp_widget_get_readonly(widget, &readonly));
if ((type == GP_WIDGET_WINDOW) || (type == GP_WIDGET_SECTION)
|| (type == GP_WIDGET_BUTTON))
{
readonly = 1;
}
os << (id - noOfWidgets) << separator << label << separator << name
<< separator << info << separator << readonly << separator;
switch (type)
{
case GP_WIDGET_WINDOW:
{
os << "window" << separator /* no value */<< separator;
break;
}
case GP_WIDGET_SECTION:
{
os << "section" << separator /* no value */<< separator;
break;
}
case GP_WIDGET_TEXT:
{
os << "text" << separator;
char *txt;
CR(gp_widget_get_value(widget, &txt));
os << txt << separator;
break;
}
case GP_WIDGET_RANGE:
{
os << "range" << separator;
float f, t, b, s;
CR(gp_widget_get_range(widget, &b, &t, &s));
CR(gp_widget_get_value(widget, &f));
os << "(" << b << ":" << t << ":" << s << "):" << f << separator;
break;
}
case GP_WIDGET_TOGGLE:
{
os << "toggle" << separator;
int t;
CR(gp_widget_get_value(widget, &t));
os << t << separator;
break;
}
case GP_WIDGET_RADIO:
case GP_WIDGET_MENU:
{
if (type == GP_WIDGET_RADIO)
{
os << "radio" << separator;
}
else
{
os << "menu" << separator;
}
int cnt = 0, i;
char *current;
CR(gp_widget_get_value(widget, &current));
CR(cnt = gp_widget_count_choices(widget));
os << "(";
for (i = 0; i < cnt; i++)
{
const char *choice;
CR(gp_widget_get_choice(widget, i, &choice));
os << i << ":" << choice;
if (i + 1 < cnt)
{
os << ";";
}
}
os << "):" << current << separator;
break;
}
case GP_WIDGET_BUTTON:
{
os << "button" << separator /* no value */<< separator;
break;
}
case GP_WIDGET_DATE:
{
os << "date" << separator;
int t;
time_t xtime;
struct tm *xtm;
char timebuf[200];
CR(gp_widget_get_value(widget, &t));
xtime = t;
xtm = localtime(&xtime);
strftime(timebuf, sizeof(timebuf), "%c", xtm);
os << t << ":" << timebuf << separator;
break;
}
}
return id;
}
/**
* Write all widget descriptions to @param os.
* @return maximum of widget ID
*/
int DigitalCameraCapture::collectWidgets(std::ostream & os,
CameraWidget * widget) throw (GPhoto2Exception)
{
int id = widgetDescription(os, widget);
os << lineDelimiter;
widgets[id - noOfWidgets] = widget;
CameraWidget * child;
CameraWidgetType type;
CR(gp_widget_get_type(widget, &type));
if ((type == GP_WIDGET_WINDOW) || (type == GP_WIDGET_SECTION))
{
for (int x = 0; x < gp_widget_count_children(widget); x++)
{
CR(gp_widget_get_child(widget, x, &child));
id = std::max(id, collectWidgets(os, child));
}
}
return id;
}
/**
* Write message to @field msgsBuffer if user want to store them
* (@field collectMsgs).
* Print debug informations on screen.
*/
template<typename OsstreamPrintable>
void DigitalCameraCapture::message(MsgType msgType, const char * msg,
OsstreamPrintable & arg) const
{
#if defined(NDEBUG)
if (collectMsgs)
{
#endif
std::ostringstream msgCreator;
std::string out;
char type = (char) msgType;
msgCreator << "[gPhoto2][" << type << "]: " << msg << ": " << arg
<< lineDelimiter;
out = msgCreator.str();
#if !defined(NDEBUG)
if (collectMsgs)
{
#endif
msgsBuffer << out;
}
#if !defined(NDEBUG)
#if defined(WIN32) || defined(_WIN32)
::OutputDebugString(out.c_str());
#else
fputs(out.c_str(), stderr);
#endif
#endif
}
} // namespace gphoto2
/**
* \brief IVideoCapture creator form device index.
*/
Ptr<IVideoCapture> createGPhoto2Capture(int index)
{
Ptr<IVideoCapture> capture = makePtr<gphoto2::DigitalCameraCapture>(index);
if (capture->isOpened())
return capture;
return Ptr<gphoto2::DigitalCameraCapture>();
}
/**
* IVideoCapture creator, from device name.
*
* @param deviceName is a substring in digital camera model name.
*/
Ptr<IVideoCapture> createGPhoto2Capture(const String & deviceName)
{
Ptr<IVideoCapture> capture = makePtr<gphoto2::DigitalCameraCapture>(deviceName);
if (capture->isOpened())
return capture;
return Ptr<gphoto2::DigitalCameraCapture>();
}
} // namespace cv
#endif
......@@ -186,6 +186,9 @@ namespace cv
Ptr<IVideoCapture> createMotionJpegCapture(const String& filename);
Ptr<IVideoWriter> createMotionJpegWriter( const String& filename, double fps, Size frameSize, bool iscolor );
Ptr<IVideoCapture> createGPhoto2Capture(int index);
Ptr<IVideoCapture> createGPhoto2Capture(const String& deviceName);
};
#endif /* __VIDEOIO_H_ */
......@@ -37,6 +37,7 @@
defined(HAVE_AVFOUNDATION) || \
defined(HAVE_GIGE_API) || \
defined(HAVE_INTELPERC) || \
defined(HAVE_GPHOTO2) || \
(0)
//defined(HAVE_ANDROID_NATIVE_CAMERA) || - enable after #1193
# define BUILD_WITH_CAMERA_SUPPORT 1
......
/*
* Copyright (c) 2015, Piotr Dobrowolski dobrypd[at]gmail[dot]com
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
* THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#include <cstdlib>
#include <cstdio>
#include <iostream>
#include <algorithm>
#include <opencv2/opencv.hpp>
using namespace std;
using namespace cv;
const char * windowOriginal = "Captured preview";
const int FOCUS_STEP = 1024;
const int MAX_FOCUS_STEP = 32767;
const int FOCUS_DIRECTION_INFTY = 1;
const int DEFAULT_BREAK_LIMIT = 5;
const int DEFAULT_OUTPUT_FPS = 20;
const double epsylon = 0.0005; // compression, noice, etc.
struct Args_t
{
const char * deviceName;
const char * output;
unsigned int fps;
unsigned int minimumFocusStep;
unsigned int breakLimit;
bool measure;
bool verbose;
} GlobalArgs;
struct FocusState
{
int step;
int direction;
int minFocusStep;
int lastDirectionChange;
int stepToLastMax;
double rate;
double rateMax;
};
static ostream & operator<<(ostream & os, FocusState & state)
{
return os << "RATE=" << state.rate << "\tSTEP="
<< state.step * state.direction << "\tLast change="
<< state.lastDirectionChange << "\tstepToLastMax="
<< state.stepToLastMax;
}
static FocusState createInitialState()
{
FocusState state;
state.step = FOCUS_STEP;
state.direction = FOCUS_DIRECTION_INFTY;
state.minFocusStep = 0;
state.lastDirectionChange = 0;
state.stepToLastMax = 0;
state.rate = 0;
state.rateMax = 0;
return state;
}
static void focusDriveEnd(VideoCapture & cap, int direction)
{
while (cap.set(CAP_PROP_ZOOM, (double) MAX_FOCUS_STEP * direction))
;
}
/**
* Minimal focus step depends on lens
* and I don't want to make any assumptions about it.
*/
static int findMinFocusStep(VideoCapture & cap, unsigned int startWith,
int direction)
{
int lStep, rStep;
lStep = 0;
rStep = startWith;
focusDriveEnd(cap, direction * FOCUS_DIRECTION_INFTY);
while (lStep < rStep)
{
int mStep = (lStep + rStep) / 2;
cap.set(CAP_PROP_ZOOM, direction * FOCUS_DIRECTION_INFTY * FOCUS_STEP);
if (cap.set(CAP_PROP_ZOOM, -direction * mStep))
{
rStep = mStep;
}
else
{
lStep = mStep + 1;
}
}
cap.set(CAP_PROP_ZOOM, direction * FOCUS_DIRECTION_INFTY * MAX_FOCUS_STEP);
if (GlobalArgs.verbose)
{
cout << "Found minimal focus step = " << lStep << endl;
}
return lStep;
}
/**
* Rate frame from 0/blury/ to 1/sharp/.
*/
static double rateFrame(Mat & frame)
{
unsigned long int sum = 0;
unsigned long int size = frame.cols * frame.rows;
Mat edges;
cvtColor(frame, edges, CV_BGR2GRAY);
GaussianBlur(edges, edges, Size(7, 7), 1.5, 1.5);
Canny(edges, edges, 0, 30, 3);
MatIterator_<uchar> it, end;
for (it = edges.begin<uchar>(), end = edges.end<uchar>(); it != end; ++it)
{
sum += *it != 0;
}
return (double) sum / (double) size;
}
static int correctFocus(bool lastSucceeded, FocusState & state, double rate)
{
if (GlobalArgs.verbose)
{
cout << "RATE=" << rate << endl;
}
state.lastDirectionChange++;
double rateDelta = rate - state.rate;
if (rate >= state.rateMax + epsylon)
{
// Update Max
state.stepToLastMax = 0;
state.rateMax = rate;
// My local minimum is now on the other direction, that's why:
state.lastDirectionChange = 0;
}
if (!lastSucceeded)
{
// Focus at limit or other problem, change the direction.
state.direction *= -1;
state.lastDirectionChange = 0;
state.step /= 2;
}
else
{
if (rate < epsylon)
{ // It's hard to say anything
state.step = FOCUS_STEP;
}
else if (rateDelta < -epsylon)
{ // Wrong direction ?
state.direction *= -1;
state.step = static_cast<int>(static_cast<double>(state.step) * 0.75);
state.lastDirectionChange = 0;
}
else if ((rate + epsylon < state.rateMax)
&& ((state.lastDirectionChange > 3)
|| ((state.step < (state.minFocusStep * 1.5))
&& state.stepToLastMax > state.step)))
{ // I've done 3 steps (or I'm finishing) without improvement, go back to max.
state.direction = state.stepToLastMax >= 0 ? 1 : -1;
state.step = static_cast<int>(static_cast<double>(state.step) * 0.75);
int stepToMax = abs(state.stepToLastMax);
state.stepToLastMax = 0;
state.lastDirectionChange = 0; // Like reset.
state.rate = rate;
return stepToMax;
}
}
// Update state.
state.rate = rate;
state.stepToLastMax -= state.direction * state.step;
return state.step;
}
static void showHelp(const char * pName, bool welcomeMsg)
{
cout << "This program demonstrates usage of gPhoto2 VideoCapture.\n\n"
"With OpenCV build without gPhoto2 library support it will "
"do nothing special, just capture.\n\n"
"Simple implementation of autofocus is based on edges detection.\n"
"It was tested (this example) only with Nikon DSLR (Nikon D90).\n"
"But shall work on all Nikon DSLRs, and with little effort with other devices.\n"
"Visit http://www.gphoto.org/proj/libgphoto2/support.php\n"
"to find supported devices (need Image Capture at least).\n"
"Before run, set your camera autofocus ON.\n\n";
if (!welcomeMsg)
{
cout << "usage " << pName << ": [OPTIONS] DEVICE_NAME\n\n"
"OPTIONS:\n"
"\t-h\t\treturns this help message,\n"
"\t-o FILENAME\tsave output video in file (MJPEG only),\n"
"\t-f FPS\t\tframes per second in output video,\n"
"\t-m\t\tmeasure exposition\n"
"\t\t\t(returns rates from closest focus to INTY\n"
"\t\t\tfor every minimum step),\n"
"\t-d INT\t\tset minimum focus step,\n"
"\t-v\t\tverbose mode.\n\n\n"
"DEVICE_NAME\t\tis your digital camera model substring.\n\n\n"
"On runtime you can use keys to control:\n";
}
else
{
cout << "Actions:\n";
}
cout << "\tk:\t- focus out,\n"
"\tj:\t- focus in,\n"
"\t,:\t- focus to the closest point,\n"
"\t.:\t- focus to infinity,\n"
"\tr:\t- reset autofocus state,\n"
"\tf:\t- switch autofocus on/off,\n"
"\tq:\t- quit.\n";
}
static bool parseArguments(int argc, char ** argv)
{
int index;
GlobalArgs.deviceName = "Nikon";
GlobalArgs.output = NULL;
GlobalArgs.fps = DEFAULT_OUTPUT_FPS;
GlobalArgs.minimumFocusStep = 0;
GlobalArgs.breakLimit = DEFAULT_BREAK_LIMIT;
GlobalArgs.measure = false;
GlobalArgs.verbose = false;
for (index = 1; index < argc; index++)
{
const char * arg = argv[index];
if (strcmp(arg, "-h") == 0)
{
return false;
}
else if (strcmp(arg, "-o") == 0)
{
GlobalArgs.output = argv[++index];
}
else if (strcmp(arg, "-f") == 0)
{
if (sscanf(argv[++index], "%u", &GlobalArgs.fps) != 1
|| GlobalArgs.fps <= 0)
{
cerr << "Invalid fps argument." << endl;
return false;
}
}
else if (strcmp(arg, "-m") == 0)
{
GlobalArgs.measure = true;
}
else if (strcmp(arg, "-v") == 0)
{
GlobalArgs.verbose = true;
}
else if (strcmp(arg, "-d") == 0)
{
if (sscanf(argv[++index], "%u", &GlobalArgs.minimumFocusStep) != 1
|| GlobalArgs.minimumFocusStep <= 0)
{
cerr << "Invalid minimum focus step argument." << endl;
return false;
}
}
else if (arg[0] != '-')
{
GlobalArgs.deviceName = arg;
}
else
{
cerr << "Unknown option " << arg << endl;
}
}
return true;
}
int main(int argc, char ** argv)
{
if (!parseArguments(argc, argv))
{
showHelp(argv[0], false);
return -1;
}
VideoCapture cap(GlobalArgs.deviceName);
if (!cap.isOpened())
{
cout << "Cannot find device " << GlobalArgs.deviceName << endl;
showHelp(argv[0], false);
return -1;
}
VideoWriter videoWriter;
Mat frame;
FocusState state = createInitialState();
bool focus = true;
bool lastSucceeded = true;
namedWindow(windowOriginal, 1);
// Get settings:
if (GlobalArgs.verbose)
{
if ((cap.get(CAP_PROP_GPHOTO2_WIDGET_ENUMERATE) == 0)
|| (cap.get(CAP_PROP_GPHOTO2_WIDGET_ENUMERATE) == -1))
{
// Some VideoCapture implementations can return -1, 0.
cout << "This is not GPHOTO2 device." << endl;
return -2;
}
cout << "List of camera settings: " << endl
<< (const char *) (intptr_t) cap.get(CAP_PROP_GPHOTO2_WIDGET_ENUMERATE)
<< endl;
cap.set(CAP_PROP_GPHOTO2_COLLECT_MSGS, true);
}
cap.set(CAP_PROP_GPHOTO2_PREVIEW, true);
cap.set(CAP_PROP_VIEWFINDER, true);
cap >> frame; // To check PREVIEW output Size.
if (GlobalArgs.output != NULL)
{
Size S = Size((int) cap.get(CAP_PROP_FRAME_WIDTH), (int) cap.get(CAP_PROP_FRAME_HEIGHT));
int fourCC = CV_FOURCC('M', 'J', 'P', 'G');
videoWriter.open(GlobalArgs.output, fourCC, GlobalArgs.fps, S, true);
if (!videoWriter.isOpened())
{
cerr << "Cannot open output file " << GlobalArgs.output << endl;
showHelp(argv[0], false);
return -1;
}
}
showHelp(argv[0], true); // welcome msg
if (GlobalArgs.minimumFocusStep == 0)
{
state.minFocusStep = findMinFocusStep(cap, FOCUS_STEP / 16, -FOCUS_DIRECTION_INFTY);
}
else
{
state.minFocusStep = GlobalArgs.minimumFocusStep;
}
focusDriveEnd(cap, -FOCUS_DIRECTION_INFTY); // Start with closest
char key = 0;
while (key != 'q' && key != 27 /*ESC*/)
{
cap >> frame;
if (frame.empty())
{
break;
}
if (GlobalArgs.output != NULL)
{
videoWriter << frame;
}
if (focus && !GlobalArgs.measure)
{
int stepToCorrect = correctFocus(lastSucceeded, state, rateFrame(frame));
lastSucceeded = cap.set(CAP_PROP_ZOOM,
max(stepToCorrect, state.minFocusStep) * state.direction);
if ((!lastSucceeded) || (stepToCorrect < state.minFocusStep))
{
if (--GlobalArgs.breakLimit <= 0)
{
focus = false;
state.step = state.minFocusStep * 4;
cout << "In focus, you can press 'f' to improve with small step, "
"or 'r' to reset." << endl;
}
}
else
{
GlobalArgs.breakLimit = DEFAULT_BREAK_LIMIT;
}
}
else if (GlobalArgs.measure)
{
double rate = rateFrame(frame);
if (!cap.set(CAP_PROP_ZOOM, state.minFocusStep))
{
if (--GlobalArgs.breakLimit <= 0)
{
break;
}
}
else
{
cout << rate << endl;
}
}
if ((focus || GlobalArgs.measure) && GlobalArgs.verbose)
{
cout << "STATE\t" << state << endl;
cout << "Output from camera: " << endl
<< (const char *) (intptr_t) cap.get(CAP_PROP_GPHOTO2_FLUSH_MSGS) << endl;
}
imshow(windowOriginal, frame);
switch (key = static_cast<char>(waitKey(30)))
{
case 'k': // focus out
cap.set(CAP_PROP_ZOOM, 100);
break;
case 'j': // focus in
cap.set(CAP_PROP_ZOOM, -100);
break;
case ',': // Drive to closest
focusDriveEnd(cap, -FOCUS_DIRECTION_INFTY);
break;
case '.': // Drive to infinity
focusDriveEnd(cap, FOCUS_DIRECTION_INFTY);
break;
case 'r': // reset focus state
focus = true;
state = createInitialState();
break;
case 'f': // focus switch on/off
focus ^= true;
break;
}
}
if (GlobalArgs.verbose)
{
cout << "Captured " << (int) cap.get(CAP_PROP_FRAME_COUNT) << " frames"
<< endl << "in " << (int) (cap.get(CAP_PROP_POS_MSEC) / 1e2)
<< " seconds," << endl << "at avg speed "
<< (cap.get(CAP_PROP_FPS)) << " fps." << endl;
}
return 0;
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment