Commit 846266fd authored by Alexander Smorkalov's avatar Alexander Smorkalov

Native camera fix for some deivices with Qualcomm SoC like Samsung Galaxy S4.

parent a1e5bd71
...@@ -58,7 +58,7 @@ SET_TARGET_PROPERTIES(${the_target} PROPERTIES ...@@ -58,7 +58,7 @@ SET_TARGET_PROPERTIES(${the_target} PROPERTIES
RUNTIME_OUTPUT_DIRECTORY ${EXECUTABLE_OUTPUT_PATH} RUNTIME_OUTPUT_DIRECTORY ${EXECUTABLE_OUTPUT_PATH}
) )
if (NOT (CMAKE_BUILD_TYPE MATCHES "debug")) if (NOT (CMAKE_BUILD_TYPE MATCHES "Debug"))
ADD_CUSTOM_COMMAND( TARGET ${the_target} POST_BUILD COMMAND ${CMAKE_STRIP} --strip-unneeded "${LIBRARY_OUTPUT_PATH}/lib${the_target}.so" ) ADD_CUSTOM_COMMAND( TARGET ${the_target} POST_BUILD COMMAND ${CMAKE_STRIP} --strip-unneeded "${LIBRARY_OUTPUT_PATH}/lib${the_target}.so" )
endif() endif()
......
...@@ -61,6 +61,12 @@ ...@@ -61,6 +61,12 @@
using namespace android; using namespace android;
// non-public camera related classes are not binary compatible
// objects of these classes have different sizeof on different platforms
// additional memory tail to all system objects to overcome sizeof issue
#define MAGIC_TAIL 4096
void debugShowFPS(); void debugShowFPS();
#if defined(ANDROID_r4_1_1) || defined(ANDROID_r4_2_0) || defined(ANDROID_r4_3_0) #if defined(ANDROID_r4_1_1) || defined(ANDROID_r4_2_0) || defined(ANDROID_r4_3_0)
...@@ -90,6 +96,7 @@ public: ...@@ -90,6 +96,7 @@ public:
}; };
#endif #endif
std::string getProcessName() std::string getProcessName()
{ {
std::string result; std::string result;
...@@ -142,7 +149,14 @@ class CameraHandler: public CameraListener ...@@ -142,7 +149,14 @@ class CameraHandler: public CameraListener
protected: protected:
int cameraId; int cameraId;
sp<Camera> camera; sp<Camera> camera;
CameraParameters params; #if defined(ANDROID_r3_0_1) || defined(ANDROID_r4_0_0) || defined(ANDROID_r4_0_3)
sp<SurfaceTexture> surface;
#endif
#if defined(ANDROID_r4_1_1) || defined(ANDROID_r4_2_0) || defined(ANDROID_r4_3_0) || defined(ANDROID_r4_4_0)
sp<BufferQueue> queue;
sp<ConsumerListenerStub> listener;
#endif
CameraParameters* params;
CameraCallback cameraCallback; CameraCallback cameraCallback;
void* userData; void* userData;
...@@ -258,7 +272,7 @@ protected: ...@@ -258,7 +272,7 @@ protected:
int is_supported(const char* supp_modes_key, const char* mode) int is_supported(const char* supp_modes_key, const char* mode)
{ {
const char* supported_modes = params.get(supp_modes_key); const char* supported_modes = params->get(supp_modes_key);
return (supported_modes && mode && (strstr(supported_modes, mode) > 0)); return (supported_modes && mode && (strstr(supported_modes, mode) > 0));
} }
...@@ -268,7 +282,7 @@ protected: ...@@ -268,7 +282,7 @@ protected:
if (focus_distance_type >= 0 && focus_distance_type < 3) if (focus_distance_type >= 0 && focus_distance_type < 3)
{ {
float focus_distances[3]; float focus_distances[3];
const char* output = params.get(CameraParameters::KEY_FOCUS_DISTANCES); const char* output = params->get(CameraParameters::KEY_FOCUS_DISTANCES);
int val_num = CameraHandler::split_float(output, focus_distances, ',', 3); int val_num = CameraHandler::split_float(output, focus_distances, ',', 3);
if(val_num == 3) if(val_num == 3)
{ {
...@@ -300,10 +314,15 @@ public: ...@@ -300,10 +314,15 @@ public:
emptyCameraCallbackReported(0) emptyCameraCallbackReported(0)
{ {
LOGD("Instantiated new CameraHandler (%p, %p)", callback, _userData); LOGD("Instantiated new CameraHandler (%p, %p)", callback, _userData);
void* params_buffer = operator new(sizeof(CameraParameters) + MAGIC_TAIL);
params = new(params_buffer) CameraParameters();
} }
virtual ~CameraHandler() virtual ~CameraHandler()
{ {
if (params)
params->~CameraParameters();
operator delete(params);
LOGD("CameraHandler destructor is called"); LOGD("CameraHandler destructor is called");
} }
...@@ -534,39 +553,39 @@ CameraHandler* CameraHandler::initCameraConnect(const CameraCallback& callback, ...@@ -534,39 +553,39 @@ CameraHandler* CameraHandler::initCameraConnect(const CameraCallback& callback,
{ {
LOGI("initCameraConnect: Setting paramers from previous camera handler"); LOGI("initCameraConnect: Setting paramers from previous camera handler");
camera->setParameters(prevCameraParameters->flatten()); camera->setParameters(prevCameraParameters->flatten());
handler->params.unflatten(prevCameraParameters->flatten()); handler->params->unflatten(prevCameraParameters->flatten());
} }
else else
{ {
android::String8 params_str = camera->getParameters(); android::String8 params_str = camera->getParameters();
LOGI("initCameraConnect: [%s]", params_str.string()); LOGI("initCameraConnect: [%s]", params_str.string());
handler->params.unflatten(params_str); handler->params->unflatten(params_str);
LOGD("Supported Cameras: %s", handler->params.get("camera-indexes")); LOGD("Supported Cameras: %s", handler->params->get("camera-indexes"));
LOGD("Supported Picture Sizes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES)); LOGD("Supported Picture Sizes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES));
LOGD("Supported Picture Formats: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS)); LOGD("Supported Picture Formats: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS));
LOGD("Supported Preview Sizes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES)); LOGD("Supported Preview Sizes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES));
LOGD("Supported Preview Formats: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS)); LOGD("Supported Preview Formats: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS));
LOGD("Supported Preview Frame Rates: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES)); LOGD("Supported Preview Frame Rates: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES));
LOGD("Supported Thumbnail Sizes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES)); LOGD("Supported Thumbnail Sizes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES));
LOGD("Supported Whitebalance Modes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE)); LOGD("Supported Whitebalance Modes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE));
LOGD("Supported Effects: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_EFFECTS)); LOGD("Supported Effects: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_EFFECTS));
LOGD("Supported Scene Modes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_SCENE_MODES)); LOGD("Supported Scene Modes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_SCENE_MODES));
LOGD("Supported Focus Modes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES)); LOGD("Supported Focus Modes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES));
LOGD("Supported Antibanding Options: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_ANTIBANDING)); LOGD("Supported Antibanding Options: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_ANTIBANDING));
LOGD("Supported Flash Modes: %s", handler->params.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES)); LOGD("Supported Flash Modes: %s", handler->params->get(CameraParameters::KEY_SUPPORTED_FLASH_MODES));
#if !defined(ANDROID_r2_2_0) #if !defined(ANDROID_r2_2_0)
// Set focus mode to continuous-video if supported // Set focus mode to continuous-video if supported
const char* available_focus_modes = handler->params.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES); const char* available_focus_modes = handler->params->get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES);
if (available_focus_modes != 0) if (available_focus_modes != 0)
{ {
if (strstr(available_focus_modes, "continuous-video") != NULL) if (strstr(available_focus_modes, "continuous-video") != NULL)
{ {
handler->params.set(CameraParameters::KEY_FOCUS_MODE, CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO); handler->params->set(CameraParameters::KEY_FOCUS_MODE, CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO);
status_t resParams = handler->camera->setParameters(handler->params.flatten()); status_t resParams = handler->camera->setParameters(handler->params->flatten());
if (resParams != 0) if (resParams != 0)
{ {
...@@ -581,7 +600,7 @@ CameraHandler* CameraHandler::initCameraConnect(const CameraCallback& callback, ...@@ -581,7 +600,7 @@ CameraHandler* CameraHandler::initCameraConnect(const CameraCallback& callback,
#endif #endif
//check if yuv420sp format available. Set this format as preview format. //check if yuv420sp format available. Set this format as preview format.
const char* available_formats = handler->params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS); const char* available_formats = handler->params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS);
if (available_formats != 0) if (available_formats != 0)
{ {
const char* format_to_set = 0; const char* format_to_set = 0;
...@@ -607,9 +626,9 @@ CameraHandler* CameraHandler::initCameraConnect(const CameraCallback& callback, ...@@ -607,9 +626,9 @@ CameraHandler* CameraHandler::initCameraConnect(const CameraCallback& callback,
if (0 != format_to_set) if (0 != format_to_set)
{ {
handler->params.setPreviewFormat(format_to_set); handler->params->setPreviewFormat(format_to_set);
status_t resParams = handler->camera->setParameters(handler->params.flatten()); status_t resParams = handler->camera->setParameters(handler->params->flatten());
if (resParams != 0) if (resParams != 0)
LOGE("initCameraConnect: failed to set preview format to %s", format_to_set); LOGE("initCameraConnect: failed to set preview format to %s", format_to_set);
...@@ -617,6 +636,13 @@ CameraHandler* CameraHandler::initCameraConnect(const CameraCallback& callback, ...@@ -617,6 +636,13 @@ CameraHandler* CameraHandler::initCameraConnect(const CameraCallback& callback,
LOGD("initCameraConnect: preview format is set to %s", format_to_set); LOGD("initCameraConnect: preview format is set to %s", format_to_set);
} }
} }
handler->params->setPreviewSize(640, 480);
status_t resParams = handler->camera->setParameters(handler->params->flatten());
if (resParams != 0)
LOGE("initCameraConnect: failed to set preview resolution to 640x480");
else
LOGD("initCameraConnect: preview format is set to 640x480");
} }
status_t bufferStatus; status_t bufferStatus;
...@@ -627,22 +653,27 @@ CameraHandler* CameraHandler::initCameraConnect(const CameraCallback& callback, ...@@ -627,22 +653,27 @@ CameraHandler* CameraHandler::initCameraConnect(const CameraCallback& callback,
#elif defined(ANDROID_r2_3_3) #elif defined(ANDROID_r2_3_3)
/* Do nothing in case of 2.3 for now */ /* Do nothing in case of 2.3 for now */
#elif defined(ANDROID_r3_0_1) || defined(ANDROID_r4_0_0) || defined(ANDROID_r4_0_3) #elif defined(ANDROID_r3_0_1) || defined(ANDROID_r4_0_0) || defined(ANDROID_r4_0_3)
sp<SurfaceTexture> surfaceTexture = new SurfaceTexture(MAGIC_OPENCV_TEXTURE_ID); void* surface_texture_obj = operator new(sizeof(SurfaceTexture) + MAGIC_TAIL);
bufferStatus = camera->setPreviewTexture(surfaceTexture); handler->surface = new(surface_texture_obj) SurfaceTexture(MAGIC_OPENCV_TEXTURE_ID);
bufferStatus = camera->setPreviewTexture(handler->surface);
if (bufferStatus != 0) if (bufferStatus != 0)
LOGE("initCameraConnect: failed setPreviewTexture call (status %d); camera might not work correctly", bufferStatus); LOGE("initCameraConnect: failed setPreviewTexture call (status %d); camera might not work correctly", bufferStatus);
#elif defined(ANDROID_r4_1_1) || defined(ANDROID_r4_2_0) || defined(ANDROID_r4_3_0) #elif defined(ANDROID_r4_1_1) || defined(ANDROID_r4_2_0) || defined(ANDROID_r4_3_0)
sp<BufferQueue> bufferQueue = new BufferQueue(); void* buffer_queue_obj = operator new(sizeof(BufferQueue) + MAGIC_TAIL);
sp<BufferQueue::ConsumerListener> queueListener = new ConsumerListenerStub(); handler->queue = new(buffer_queue_obj) BufferQueue();
bufferQueue->consumerConnect(queueListener); void* consumer_listener_obj = operator new(sizeof(ConsumerListenerStub) + MAGIC_TAIL);
bufferStatus = camera->setPreviewTexture(bufferQueue); handler->listener = new(consumer_listener_obj) ConsumerListenerStub();
handler->queue->consumerConnect(handler->listener);
bufferStatus = camera->setPreviewTexture(handler->queue);
if (bufferStatus != 0) if (bufferStatus != 0)
LOGE("initCameraConnect: failed setPreviewTexture call; camera might not work correctly"); LOGE("initCameraConnect: failed setPreviewTexture call; camera might not work correctly");
# elif defined(ANDROID_r4_4_0) # elif defined(ANDROID_r4_4_0)
sp<BufferQueue> bufferQueue = new BufferQueue(); void* buffer_queue_obj = operator new(sizeof(BufferQueue) + MAGIC_TAIL);
sp<IConsumerListener> queueListener = new ConsumerListenerStub(); handler->queue = new(buffer_queue_obj) BufferQueue();
bufferQueue->consumerConnect(queueListener, true); void* consumer_listener_obj = operator new(sizeof(ConsumerListenerStub) + MAGIC_TAIL);
bufferStatus = handler->camera->setPreviewTarget(bufferQueue); handler->listener = new(consumer_listener_obj) ConsumerListenerStub();
handler->queue->consumerConnect(handler->listener, true);
bufferStatus = handler->camera->setPreviewTarget(handler->queue);
if (bufferStatus != 0) if (bufferStatus != 0)
LOGE("applyProperties: failed setPreviewTexture call; camera might not work correctly"); LOGE("applyProperties: failed setPreviewTexture call; camera might not work correctly");
# endif # endif
...@@ -723,18 +754,18 @@ double CameraHandler::getProperty(int propIdx) ...@@ -723,18 +754,18 @@ double CameraHandler::getProperty(int propIdx)
case ANDROID_CAMERA_PROPERTY_FRAMEWIDTH: case ANDROID_CAMERA_PROPERTY_FRAMEWIDTH:
{ {
int w,h; int w,h;
params.getPreviewSize(&w, &h); params->getPreviewSize(&w, &h);
return w; return w;
} }
case ANDROID_CAMERA_PROPERTY_FRAMEHEIGHT: case ANDROID_CAMERA_PROPERTY_FRAMEHEIGHT:
{ {
int w,h; int w,h;
params.getPreviewSize(&w, &h); params->getPreviewSize(&w, &h);
return h; return h;
} }
case ANDROID_CAMERA_PROPERTY_SUPPORTED_PREVIEW_SIZES_STRING: case ANDROID_CAMERA_PROPERTY_SUPPORTED_PREVIEW_SIZES_STRING:
{ {
cameraPropertySupportedPreviewSizesString = params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES); cameraPropertySupportedPreviewSizesString = params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES);
union {const char* str;double res;} u; union {const char* str;double res;} u;
memset(&u.res, 0, sizeof(u.res)); memset(&u.res, 0, sizeof(u.res));
u.str = cameraPropertySupportedPreviewSizesString.c_str(); u.str = cameraPropertySupportedPreviewSizesString.c_str();
...@@ -742,7 +773,7 @@ double CameraHandler::getProperty(int propIdx) ...@@ -742,7 +773,7 @@ double CameraHandler::getProperty(int propIdx)
} }
case ANDROID_CAMERA_PROPERTY_PREVIEW_FORMAT_STRING: case ANDROID_CAMERA_PROPERTY_PREVIEW_FORMAT_STRING:
{ {
const char* fmt = params.get(CameraParameters::KEY_PREVIEW_FORMAT); const char* fmt = params->get(CameraParameters::KEY_PREVIEW_FORMAT);
if (fmt == CameraParameters::PIXEL_FORMAT_YUV422SP) if (fmt == CameraParameters::PIXEL_FORMAT_YUV422SP)
fmt = "yuv422sp"; fmt = "yuv422sp";
else if (fmt == CameraParameters::PIXEL_FORMAT_YUV420SP) else if (fmt == CameraParameters::PIXEL_FORMAT_YUV420SP)
...@@ -762,44 +793,44 @@ double CameraHandler::getProperty(int propIdx) ...@@ -762,44 +793,44 @@ double CameraHandler::getProperty(int propIdx)
} }
case ANDROID_CAMERA_PROPERTY_EXPOSURE: case ANDROID_CAMERA_PROPERTY_EXPOSURE:
{ {
int exposure = params.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION); int exposure = params->getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION);
return exposure; return exposure;
} }
case ANDROID_CAMERA_PROPERTY_FPS: case ANDROID_CAMERA_PROPERTY_FPS:
{ {
return params.getPreviewFrameRate(); return params->getPreviewFrameRate();
} }
case ANDROID_CAMERA_PROPERTY_FLASH_MODE: case ANDROID_CAMERA_PROPERTY_FLASH_MODE:
{ {
int flash_mode = getModeNum(CameraHandler::flashModesNames, int flash_mode = getModeNum(CameraHandler::flashModesNames,
ANDROID_CAMERA_FLASH_MODES_NUM, ANDROID_CAMERA_FLASH_MODES_NUM,
params.get(CameraParameters::KEY_FLASH_MODE)); params->get(CameraParameters::KEY_FLASH_MODE));
return flash_mode; return flash_mode;
} }
case ANDROID_CAMERA_PROPERTY_FOCUS_MODE: case ANDROID_CAMERA_PROPERTY_FOCUS_MODE:
{ {
int focus_mode = getModeNum(CameraHandler::focusModesNames, int focus_mode = getModeNum(CameraHandler::focusModesNames,
ANDROID_CAMERA_FOCUS_MODES_NUM, ANDROID_CAMERA_FOCUS_MODES_NUM,
params.get(CameraParameters::KEY_FOCUS_MODE)); params->get(CameraParameters::KEY_FOCUS_MODE));
return focus_mode; return focus_mode;
} }
case ANDROID_CAMERA_PROPERTY_WHITE_BALANCE: case ANDROID_CAMERA_PROPERTY_WHITE_BALANCE:
{ {
int white_balance = getModeNum(CameraHandler::whiteBalanceModesNames, int white_balance = getModeNum(CameraHandler::whiteBalanceModesNames,
ANDROID_CAMERA_WHITE_BALANCE_MODES_NUM, ANDROID_CAMERA_WHITE_BALANCE_MODES_NUM,
params.get(CameraParameters::KEY_WHITE_BALANCE)); params->get(CameraParameters::KEY_WHITE_BALANCE));
return white_balance; return white_balance;
} }
case ANDROID_CAMERA_PROPERTY_ANTIBANDING: case ANDROID_CAMERA_PROPERTY_ANTIBANDING:
{ {
int antibanding = getModeNum(CameraHandler::antibandingModesNames, int antibanding = getModeNum(CameraHandler::antibandingModesNames,
ANDROID_CAMERA_ANTIBANDING_MODES_NUM, ANDROID_CAMERA_ANTIBANDING_MODES_NUM,
params.get(CameraParameters::KEY_ANTIBANDING)); params->get(CameraParameters::KEY_ANTIBANDING));
return antibanding; return antibanding;
} }
case ANDROID_CAMERA_PROPERTY_FOCAL_LENGTH: case ANDROID_CAMERA_PROPERTY_FOCAL_LENGTH:
{ {
float focal_length = params.getFloat(CameraParameters::KEY_FOCAL_LENGTH); float focal_length = params->getFloat(CameraParameters::KEY_FOCAL_LENGTH);
return focal_length; return focal_length;
} }
case ANDROID_CAMERA_PROPERTY_FOCUS_DISTANCE_NEAR: case ANDROID_CAMERA_PROPERTY_FOCUS_DISTANCE_NEAR:
...@@ -829,27 +860,27 @@ void CameraHandler::setProperty(int propIdx, double value) ...@@ -829,27 +860,27 @@ void CameraHandler::setProperty(int propIdx, double value)
case ANDROID_CAMERA_PROPERTY_FRAMEWIDTH: case ANDROID_CAMERA_PROPERTY_FRAMEWIDTH:
{ {
int w,h; int w,h;
params.getPreviewSize(&w, &h); params->getPreviewSize(&w, &h);
w = (int)value; w = (int)value;
params.setPreviewSize(w, h); params->setPreviewSize(w, h);
} }
break; break;
case ANDROID_CAMERA_PROPERTY_FRAMEHEIGHT: case ANDROID_CAMERA_PROPERTY_FRAMEHEIGHT:
{ {
int w,h; int w,h;
params.getPreviewSize(&w, &h); params->getPreviewSize(&w, &h);
h = (int)value; h = (int)value;
params.setPreviewSize(w, h); params->setPreviewSize(w, h);
} }
break; break;
case ANDROID_CAMERA_PROPERTY_EXPOSURE: case ANDROID_CAMERA_PROPERTY_EXPOSURE:
{ {
int max_exposure = params.getInt("max-exposure-compensation"); int max_exposure = params->getInt("max-exposure-compensation");
int min_exposure = params.getInt("min-exposure-compensation"); int min_exposure = params->getInt("min-exposure-compensation");
if(max_exposure && min_exposure){ if(max_exposure && min_exposure){
int exposure = (int)value; int exposure = (int)value;
if(exposure >= min_exposure && exposure <= max_exposure){ if(exposure >= min_exposure && exposure <= max_exposure){
params.set("exposure-compensation", exposure); params->set("exposure-compensation", exposure);
} else { } else {
LOGE("Exposure compensation not in valid range (%i,%i).", min_exposure, max_exposure); LOGE("Exposure compensation not in valid range (%i,%i).", min_exposure, max_exposure);
} }
...@@ -864,7 +895,7 @@ void CameraHandler::setProperty(int propIdx, double value) ...@@ -864,7 +895,7 @@ void CameraHandler::setProperty(int propIdx, double value)
if(new_val >= 0 && new_val < ANDROID_CAMERA_FLASH_MODES_NUM){ if(new_val >= 0 && new_val < ANDROID_CAMERA_FLASH_MODES_NUM){
const char* mode_name = flashModesNames[new_val]; const char* mode_name = flashModesNames[new_val];
if(is_supported(CameraParameters::KEY_SUPPORTED_FLASH_MODES, mode_name)) if(is_supported(CameraParameters::KEY_SUPPORTED_FLASH_MODES, mode_name))
params.set(CameraParameters::KEY_FLASH_MODE, mode_name); params->set(CameraParameters::KEY_FLASH_MODE, mode_name);
else else
LOGE("Flash mode %s is not supported.", mode_name); LOGE("Flash mode %s is not supported.", mode_name);
} else { } else {
...@@ -878,7 +909,7 @@ void CameraHandler::setProperty(int propIdx, double value) ...@@ -878,7 +909,7 @@ void CameraHandler::setProperty(int propIdx, double value)
if(new_val >= 0 && new_val < ANDROID_CAMERA_FOCUS_MODES_NUM){ if(new_val >= 0 && new_val < ANDROID_CAMERA_FOCUS_MODES_NUM){
const char* mode_name = focusModesNames[new_val]; const char* mode_name = focusModesNames[new_val];
if(is_supported(CameraParameters::KEY_SUPPORTED_FOCUS_MODES, mode_name)) if(is_supported(CameraParameters::KEY_SUPPORTED_FOCUS_MODES, mode_name))
params.set(CameraParameters::KEY_FOCUS_MODE, mode_name); params->set(CameraParameters::KEY_FOCUS_MODE, mode_name);
else else
LOGE("Focus mode %s is not supported.", mode_name); LOGE("Focus mode %s is not supported.", mode_name);
} else { } else {
...@@ -892,7 +923,7 @@ void CameraHandler::setProperty(int propIdx, double value) ...@@ -892,7 +923,7 @@ void CameraHandler::setProperty(int propIdx, double value)
if(new_val >= 0 && new_val < ANDROID_CAMERA_WHITE_BALANCE_MODES_NUM){ if(new_val >= 0 && new_val < ANDROID_CAMERA_WHITE_BALANCE_MODES_NUM){
const char* mode_name = whiteBalanceModesNames[new_val]; const char* mode_name = whiteBalanceModesNames[new_val];
if(is_supported(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, mode_name)) if(is_supported(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, mode_name))
params.set(CameraParameters::KEY_WHITE_BALANCE, mode_name); params->set(CameraParameters::KEY_WHITE_BALANCE, mode_name);
else else
LOGE("White balance mode %s is not supported.", mode_name); LOGE("White balance mode %s is not supported.", mode_name);
} else { } else {
...@@ -906,7 +937,7 @@ void CameraHandler::setProperty(int propIdx, double value) ...@@ -906,7 +937,7 @@ void CameraHandler::setProperty(int propIdx, double value)
if(new_val >= 0 && new_val < ANDROID_CAMERA_ANTIBANDING_MODES_NUM){ if(new_val >= 0 && new_val < ANDROID_CAMERA_ANTIBANDING_MODES_NUM){
const char* mode_name = antibandingModesNames[new_val]; const char* mode_name = antibandingModesNames[new_val];
if(is_supported(CameraParameters::KEY_SUPPORTED_ANTIBANDING, mode_name)) if(is_supported(CameraParameters::KEY_SUPPORTED_ANTIBANDING, mode_name))
params.set(CameraParameters::KEY_ANTIBANDING, mode_name); params->set(CameraParameters::KEY_ANTIBANDING, mode_name);
else else
LOGE("Antibanding mode %s is not supported.", mode_name); LOGE("Antibanding mode %s is not supported.", mode_name);
} else { } else {
...@@ -935,8 +966,6 @@ void CameraHandler::applyProperties(CameraHandler** ppcameraHandler) ...@@ -935,8 +966,6 @@ void CameraHandler::applyProperties(CameraHandler** ppcameraHandler)
return; return;
} }
CameraParameters curCameraParameters((*ppcameraHandler)->params.flatten());
#if defined(ANDROID_r4_0_0) || defined(ANDROID_r4_0_3) || defined(ANDROID_r4_1_1) || defined(ANDROID_r4_2_0) \ #if defined(ANDROID_r4_0_0) || defined(ANDROID_r4_0_3) || defined(ANDROID_r4_1_1) || defined(ANDROID_r4_2_0) \
|| defined(ANDROID_r4_3_0) || defined(ANDROID_r4_4_0) || defined(ANDROID_r4_3_0) || defined(ANDROID_r4_4_0)
CameraHandler* handler=*ppcameraHandler; CameraHandler* handler=*ppcameraHandler;
...@@ -951,27 +980,27 @@ void CameraHandler::applyProperties(CameraHandler** ppcameraHandler) ...@@ -951,27 +980,27 @@ void CameraHandler::applyProperties(CameraHandler** ppcameraHandler)
return; return;
} }
handler->camera->setParameters(curCameraParameters.flatten()); handler->camera->setParameters((*ppcameraHandler)->params->flatten());
handler->params.unflatten(curCameraParameters.flatten());
status_t bufferStatus; status_t bufferStatus;
# if defined(ANDROID_r4_0_0) || defined(ANDROID_r4_0_3) # if defined(ANDROID_r4_0_0) || defined(ANDROID_r4_0_3)
sp<SurfaceTexture> surfaceTexture = new SurfaceTexture(MAGIC_OPENCV_TEXTURE_ID); void* surface_texture_obj = operator new(sizeof(SurfaceTexture) + MAGIC_TAIL);
bufferStatus = handler->camera->setPreviewTexture(surfaceTexture); handler->surface = new(surface_texture_obj) SurfaceTexture(MAGIC_OPENCV_TEXTURE_ID);
bufferStatus = handler->camera->setPreviewTexture(handler->surface);
if (bufferStatus != 0) if (bufferStatus != 0)
LOGE("applyProperties: failed setPreviewTexture call (status %d); camera might not work correctly", bufferStatus); LOGE("applyProperties: failed setPreviewTexture call (status %d); camera might not work correctly", bufferStatus);
# elif defined(ANDROID_r4_1_1) || defined(ANDROID_r4_2_0) || defined(ANDROID_r4_3_0) # elif defined(ANDROID_r4_1_1) || defined(ANDROID_r4_2_0) || defined(ANDROID_r4_3_0)
sp<BufferQueue> bufferQueue = new BufferQueue(); void* buffer_queue_obj = operator new(sizeof(BufferQueue) + MAGIC_TAIL);
sp<BufferQueue::ConsumerListener> queueListener = new ConsumerListenerStub(); handler->queue = new(buffer_queue_obj) BufferQueue();
bufferQueue->consumerConnect(queueListener); handler->queue->consumerConnect(handler->listener);
bufferStatus = handler->camera->setPreviewTexture(bufferQueue); bufferStatus = handler->camera->setPreviewTexture(handler->queue);
if (bufferStatus != 0) if (bufferStatus != 0)
LOGE("applyProperties: failed setPreviewTexture call; camera might not work correctly"); LOGE("applyProperties: failed setPreviewTexture call; camera might not work correctly");
# elif defined(ANDROID_r4_4_0) # elif defined(ANDROID_r4_4_0)
sp<BufferQueue> bufferQueue = new BufferQueue(); void* buffer_queue_obj = operator new(sizeof(BufferQueue) + MAGIC_TAIL);
sp<IConsumerListener> queueListener = new ConsumerListenerStub(); handler->queue = new(buffer_queue_obj) BufferQueue();
bufferQueue->consumerConnect(queueListener, true); handler->queue->consumerConnect(handler->listener, true);
bufferStatus = handler->camera->setPreviewTarget(bufferQueue); bufferStatus = handler->camera->setPreviewTarget(handler->queue);
if (bufferStatus != 0) if (bufferStatus != 0)
LOGE("applyProperties: failed setPreviewTexture call; camera might not work correctly"); LOGE("applyProperties: failed setPreviewTexture call; camera might not work correctly");
# endif # endif
...@@ -1002,7 +1031,7 @@ void CameraHandler::applyProperties(CameraHandler** ppcameraHandler) ...@@ -1002,7 +1031,7 @@ void CameraHandler::applyProperties(CameraHandler** ppcameraHandler)
LOGD("CameraHandler::applyProperties(): after previousCameraHandler->closeCameraConnect"); LOGD("CameraHandler::applyProperties(): after previousCameraHandler->closeCameraConnect");
LOGD("CameraHandler::applyProperties(): before initCameraConnect"); LOGD("CameraHandler::applyProperties(): before initCameraConnect");
CameraHandler* handler=initCameraConnect(cameraCallback, cameraId, userData, &curCameraParameters); CameraHandler* handler=initCameraConnect(cameraCallback, cameraId, userData, (*ppcameraHandler)->params);
LOGD("CameraHandler::applyProperties(): after initCameraConnect, handler=0x%x", (int)handler); LOGD("CameraHandler::applyProperties(): after initCameraConnect, handler=0x%x", (int)handler);
if (handler == NULL) { if (handler == NULL) {
LOGE("ERROR in applyProperties --- cannot reinit camera"); LOGE("ERROR in applyProperties --- cannot reinit camera");
......
...@@ -175,7 +175,6 @@ public class NativeCameraView extends CameraBridgeViewBase { ...@@ -175,7 +175,6 @@ public class NativeCameraView extends CameraBridgeViewBase {
} }
deliverAndDrawFrame(mFrame); deliverAndDrawFrame(mFrame);
} while (!mStopThread); } while (!mStopThread);
} }
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment