summaryrefslogtreecommitdiff
authorjiyu.yang <jiyu.yang@amlogic.com>2013-01-15 07:04:39 (GMT)
committer jiyu.yang <jiyu.yang@amlogic.com>2013-01-15 11:23:16 (GMT)
commita76bcec2663788c6dd89dbe64f51ca65295192b3 (patch)
tree93d99d03fe95ea0e81f6381cccbe759d90f07fa4
parent40d1c7c015f81aea418763d824a4d189b9d95a5a (diff)
downloadcamera-a76bcec2663788c6dd89dbe64f51ca65295192b3.zip
camera-a76bcec2663788c6dd89dbe64f51ca65295192b3.tar.gz
camera-a76bcec2663788c6dd89dbe64f51ca65295192b3.tar.bz2
add virtual camera support in camerahal
this should be improved later. for only specified apk can see the virtual camera.
Diffstat
-rwxr-xr-xAndroid.mk19
-rwxr-xr-xCameraHal.cpp5
-rwxr-xr-xCameraHalCommon.cpp6
-rwxr-xr-xCameraHal_Module.cpp255
-rwxr-xr-xCameraParameters.cpp3
-rwxr-xr-xV4LCameraAdapter/V4LCameraAdapter.cpp63
-rwxr-xr-xinc/CameraProperties.h11
-rwxr-xr-xinc/V4LCameraAdapter/V4LCameraAdapter.h9
-rwxr-xr-xutils/util.cpp3
-rwxr-xr-xvircam/Android.mk118
-rwxr-xr-xvircam/AppCbNotifier.cpp1886
-rwxr-xr-xvircam/V4LCamAdpt.cpp2981
-rwxr-xr-xvircam/VirtualCamHal.cpp3760
-rwxr-xr-xvircam/inc/V4LCamAdpt.h257
-rwxr-xr-xvircam/inc/VirtualCamHal.h666
15 files changed, 10022 insertions, 20 deletions
diff --git a/Android.mk b/Android.mk
index d153d4d..308e26e 100755
--- a/Android.mk
+++ b/Android.mk
@@ -27,6 +27,11 @@ CAMERA_UTILS_SRC:= \
utils/Semaphore.cpp \
utils/util.cpp
+CAMERA_HAL_VERTURAL_CAMERA_SRC:= \
+ vircam/VirtualCamHal.cpp \
+ vircam/AppCbNotifier.cpp \
+ vircam/V4LCamAdpt.cpp
+
include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
@@ -47,6 +52,9 @@ LOCAL_C_INCLUDES += \
hardware/libhardware/modules/gralloc/ \
frameworks/native/include/media/hardware
+LOCAL_C_INCLUDES_VIRCAM := \
+ $(LOCAL_PATH)/vircam/inc
+
LOCAL_SHARED_LIBRARIES:= \
libui \
@@ -109,7 +117,18 @@ ifeq ($(BOARD_ENABLE_VIDEO_SNAPSHOT),true)
LOCAL_CFLAGS += -DAMLOGIC_ENABLE_VIDEO_SNAPSHOT
endif
+ifeq ($(BOARD_HAVE_VIRTUAL_CAMERA),true)
+ LOCAL_CFLAGS += -DAMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ ifneq ($(IS_VIRTUAL_CAMERA_NONBLOCK),false)
+ LOCAL_CFLAGS += -DAMLOGIC_VCAM_NONBLOCK_SUPPORT
+ endif
+
+ LOCAL_SRC_FILES+= \
+ $(CAMERA_HAL_VERTURAL_CAMERA_SRC)
+ LOCAL_C_INCLUDES += \
+ $(LOCAL_C_INCLUDES_VIRCAM)
+endif
LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw
LOCAL_MODULE:= camera.amlogic
diff --git a/CameraHal.cpp b/CameraHal.cpp
index 0f501bc..0e8613d 100755
--- a/CameraHal.cpp
+++ b/CameraHal.cpp
@@ -2232,7 +2232,6 @@ void CameraHal::stopRecording()
// reset internal recording hint in case camera adapter needs to make some
// decisions....(will only be sent to camera adapter if camera restart is required)
mParameters.remove(ExCameraParameters::KEY_RECORDING_HINT);
- mParameters.remove(CameraProperties::RELOAD_WHEN_OPEN);
LOG_FUNCTION_NAME_EXIT;
}
@@ -2768,6 +2767,10 @@ LOGD("getParameters, 2 mParameters KEY_PICTURE_SIZE=%s", mParameters.get(CameraP
// do not send internal parameters to upper layers
mParams.remove(ExCameraParameters::KEY_RECORDING_HINT);
mParams.remove(ExCameraParameters::KEY_AUTO_FOCUS_LOCK);
+ mParameters.remove(CameraProperties::RELOAD_WHEN_OPEN);
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ mParams.remove(CameraProperties::DEVICE_NAME);
+#endif
params_str8 = mParams.flatten();
diff --git a/CameraHalCommon.cpp b/CameraHalCommon.cpp
index ba80d40..a8eca74 100755
--- a/CameraHalCommon.cpp
+++ b/CameraHalCommon.cpp
@@ -15,6 +15,9 @@
*/
#include "CameraHal.h"
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+#include "VirtualCamHal.h"
+#endif
namespace android {
@@ -23,6 +26,9 @@ const char CameraHal::PARAMS_DELIMITER []= ",";
#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
struct timeval CameraHal::ppm_start;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+struct timeval VirtualCamHal::ppm_start;
+#endif
#endif
diff --git a/CameraHal_Module.cpp b/CameraHal_Module.cpp
index 5914da3..fa13460 100755
--- a/CameraHal_Module.cpp
+++ b/CameraHal_Module.cpp
@@ -20,13 +20,23 @@
#include <utils/threads.h>
#include "CameraHal.h"
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+#include "VirtualCamHal.h"
+#endif
+
#include "CameraProperties.h"
#include "ExCameraParameters.h"
static android::CameraProperties gCameraProperties;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+static android::CameraHal* gCameraHals[MAX_CAM_NUM_ADD_VCAM-1];
+static android::VirtualCamHal* gVCameraHals;
+#else
static android::CameraHal* gCameraHals[MAX_CAMERAS_SUPPORTED];
+#endif
static unsigned int gCamerasOpen = 0;
+static unsigned int gCamerasSupported = 0;
static android::Mutex gCameraHalDeviceLock;
static int camera_device_open(const hw_module_t* module, const char* name,
@@ -58,6 +68,9 @@ camera_module_t HAL_MODULE_INFO_SYM = {
typedef struct aml_camera_device {
camera_device_t base;
int cameraid;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ int type;
+#endif
} aml_camera_device_t;
#define LOGV ALOGV
@@ -81,6 +94,12 @@ int camera_set_preview_window(struct camera_device * device,
aml_dev = (aml_camera_device_t*) device;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ if( 1 == aml_dev->type ){
+ return gVCameraHals->setPreviewWindow(window);
+ }
+#endif
+
rv = gCameraHals[aml_dev->cameraid]->setPreviewWindow(window);
return rv;
@@ -102,6 +121,12 @@ void camera_set_callbacks(struct camera_device * device,
aml_dev = (aml_camera_device_t*) device;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ if( 1 == aml_dev->type ){
+ gVCameraHals->setCallbacks(notify_cb, data_cb, data_cb_timestamp, get_memory, user);
+ return;
+ }
+#endif
gCameraHals[aml_dev->cameraid]->setCallbacks(notify_cb, data_cb, data_cb_timestamp, get_memory, user);
}
@@ -116,6 +141,12 @@ void camera_enable_msg_type(struct camera_device * device, int32_t msg_type)
aml_dev = (aml_camera_device_t*) device;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ if( 1 == aml_dev->type ){
+ gVCameraHals->enableMsgType(msg_type);
+ return ;
+ }
+#endif
gCameraHals[aml_dev->cameraid]->enableMsgType(msg_type);
}
@@ -130,6 +161,12 @@ void camera_disable_msg_type(struct camera_device * device, int32_t msg_type)
aml_dev = (aml_camera_device_t*) device;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ if( 1 == aml_dev->type ){
+ gVCameraHals->disableMsgType(msg_type);
+ return;
+ }
+#endif
gCameraHals[aml_dev->cameraid]->disableMsgType(msg_type);
}
@@ -159,6 +196,11 @@ int camera_start_preview(struct camera_device * device)
aml_dev = (aml_camera_device_t*) device;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ if( 1 == aml_dev->type ){
+ return gVCameraHals->startPreview();
+ }
+#endif
rv = gCameraHals[aml_dev->cameraid]->startPreview();
return rv;
@@ -175,6 +217,12 @@ void camera_stop_preview(struct camera_device * device)
aml_dev = (aml_camera_device_t*) device;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ if( 1 == aml_dev->type ){
+ gVCameraHals->stopPreview();
+ return ;
+ }
+#endif
gCameraHals[aml_dev->cameraid]->stopPreview();
}
@@ -190,6 +238,11 @@ int camera_preview_enabled(struct camera_device * device)
aml_dev = (aml_camera_device_t*) device;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ if( 1 == aml_dev->type ){
+ return gVCameraHals->previewEnabled();
+ }
+#endif
rv = gCameraHals[aml_dev->cameraid]->previewEnabled();
return rv;
}
@@ -207,6 +260,11 @@ int camera_store_meta_data_in_buffers(struct camera_device * device, int enable)
aml_dev = (aml_camera_device_t*) device;
// TODO: meta data buffer not current supported
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ if( 1 == aml_dev->type ){
+ return gVCameraHals->storeMetaDataInBuffers(enable);
+ }
+#endif
rv = gCameraHals[aml_dev->cameraid]->storeMetaDataInBuffers(enable);
return rv;
//return enable ? android::INVALID_OPERATION: android::OK;
@@ -224,6 +282,11 @@ int camera_start_recording(struct camera_device * device)
aml_dev = (aml_camera_device_t*) device;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ if( 1 == aml_dev->type ){
+ return gVCameraHals->startRecording();
+ }
+#endif
rv = gCameraHals[aml_dev->cameraid]->startRecording();
return rv;
}
@@ -239,6 +302,12 @@ void camera_stop_recording(struct camera_device * device)
aml_dev = (aml_camera_device_t*) device;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ if( 1 == aml_dev->type ){
+ gVCameraHals->stopRecording();
+ return;
+ }
+#endif
gCameraHals[aml_dev->cameraid]->stopRecording();
}
@@ -254,6 +323,11 @@ int camera_recording_enabled(struct camera_device * device)
aml_dev = (aml_camera_device_t*) device;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ if( 1 == aml_dev->type ){
+ return gVCameraHals->recordingEnabled();
+ }
+#endif
rv = gCameraHals[aml_dev->cameraid]->recordingEnabled();
return rv;
}
@@ -270,6 +344,12 @@ void camera_release_recording_frame(struct camera_device * device,
aml_dev = (aml_camera_device_t*) device;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ if( 1 == aml_dev->type ){
+ gVCameraHals->releaseRecordingFrame(opaque);
+ return;
+ }
+#endif
gCameraHals[aml_dev->cameraid]->releaseRecordingFrame(opaque);
}
@@ -285,6 +365,11 @@ int camera_auto_focus(struct camera_device * device)
aml_dev = (aml_camera_device_t*) device;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ if( 1 == aml_dev->type ){
+ return gVCameraHals->autoFocus();
+ }
+#endif
rv = gCameraHals[aml_dev->cameraid]->autoFocus();
return rv;
}
@@ -301,6 +386,11 @@ int camera_cancel_auto_focus(struct camera_device * device)
aml_dev = (aml_camera_device_t*) device;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ if( 1 == aml_dev->type ){
+ return gVCameraHals->cancelAutoFocus();
+ }
+#endif
rv = gCameraHals[aml_dev->cameraid]->cancelAutoFocus();
return rv;
}
@@ -317,6 +407,11 @@ int camera_take_picture(struct camera_device * device)
aml_dev = (aml_camera_device_t*) device;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ if( 1 == aml_dev->type ){
+ return gVCameraHals->takePicture();
+ }
+#endif
rv = gCameraHals[aml_dev->cameraid]->takePicture();
return rv;
}
@@ -333,6 +428,11 @@ int camera_cancel_picture(struct camera_device * device)
aml_dev = (aml_camera_device_t*) device;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ if( 1 == aml_dev->type ){
+ return gVCameraHals->cancelPicture();
+ }
+#endif
rv = gCameraHals[aml_dev->cameraid]->cancelPicture();
return rv;
}
@@ -349,6 +449,11 @@ int camera_set_parameters(struct camera_device * device, const char *params)
aml_dev = (aml_camera_device_t*) device;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ if( 1 == aml_dev->type ){
+ return gVCameraHals->setParameters(params);
+ }
+#endif
rv = gCameraHals[aml_dev->cameraid]->setParameters(params);
return rv;
}
@@ -365,6 +470,11 @@ char* camera_get_parameters(struct camera_device * device)
aml_dev = (aml_camera_device_t*) device;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ if( 1 == aml_dev->type ){
+ return gVCameraHals->getParameters();
+ }
+#endif
param = gCameraHals[aml_dev->cameraid]->getParameters();
return param;
@@ -381,6 +491,12 @@ static void camera_put_parameters(struct camera_device *device, char *parms)
aml_dev = (aml_camera_device_t*) device;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ if( 1 == aml_dev->type ){
+ gVCameraHals->putParameters(parms);
+ return ;
+ }
+#endif
gCameraHals[aml_dev->cameraid]->putParameters(parms);
}
@@ -397,6 +513,11 @@ int camera_send_command(struct camera_device * device,
aml_dev = (aml_camera_device_t*) device;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ if( 1 == aml_dev->type ){
+ return gVCameraHals->sendCommand(cmd, arg1, arg2);
+ }
+#endif
rv = gCameraHals[aml_dev->cameraid]->sendCommand(cmd, arg1, arg2);
return rv;
}
@@ -412,6 +533,12 @@ void camera_release(struct camera_device * device)
aml_dev = (aml_camera_device_t*) device;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ if( 1 == aml_dev->type ){
+ gVCameraHals->release();
+ return ;
+ }
+#endif
gCameraHals[aml_dev->cameraid]->release();
}
@@ -425,6 +552,11 @@ int camera_dump(struct camera_device * device, int fd)
aml_dev = (aml_camera_device_t*) device;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ if( 1 == aml_dev->type ){
+ return gVCameraHals->dump(fd);
+ }
+#endif
rv = gCameraHals[aml_dev->cameraid]->dump(fd);
return rv;
}
@@ -448,11 +580,27 @@ int camera_device_close(hw_device_t* device)
aml_dev = (aml_camera_device_t*) device;
if (aml_dev) {
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ if( 1 == aml_dev->type ){
+ if (gVCameraHals) {
+ delete gVCameraHals;
+ gVCameraHals = NULL;
+ gCamerasOpen--;
+ }
+ }else{
if (gCameraHals[aml_dev->cameraid]) {
delete gCameraHals[aml_dev->cameraid];
gCameraHals[aml_dev->cameraid] = NULL;
gCamerasOpen--;
}
+ }
+#else
+ if (gCameraHals[aml_dev->cameraid]) {
+ delete gCameraHals[aml_dev->cameraid];
+ gCameraHals[aml_dev->cameraid] = NULL;
+ gCamerasOpen--;
+ }
+#endif
if (aml_dev->base.ops) {
free(aml_dev->base.ops);
@@ -504,12 +652,113 @@ int camera_device_open(const hw_module_t* module, const char* name,
goto fail;
}
+#ifndef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
if(gCamerasOpen >= MAX_SIMUL_CAMERAS_SUPPORTED)
{
LOGE("maximum number of cameras already open");
rv = -ENOMEM;
goto fail;
}
+#else
+ if((gCamerasOpen >= MAX_SIMUL_CAMERAS_SUPPORTED) &&
+ (!gVCameraHals) )
+ {
+ LOGE("maximum number of cameras already open");
+ rv = -ENOMEM;
+ goto fail;
+ }
+
+ CAMHAL_LOGDB("cameraid=%d, num_cameras-1=%d\n", cameraid, num_cameras-1);
+ CAMHAL_LOGDB("max_add-1=%d\n", gCamerasSupported-1);
+
+ if( cameraid == (gCamerasSupported-1) )
+ {
+ camera_device = (aml_camera_device_t*)malloc(sizeof(*camera_device));
+ if(!camera_device)
+ {
+ LOGE("camera_device allocation fail");
+ rv = -ENOMEM;
+ goto fail;
+ }
+
+ camera_ops = (camera_device_ops_t*)malloc(sizeof(*camera_ops));
+ if(!camera_ops)
+ {
+ LOGE("camera_ops allocation fail");
+ rv = -ENOMEM;
+ goto fail;
+ }
+
+ memset(camera_device, 0, sizeof(*camera_device));
+ memset(camera_ops, 0, sizeof(*camera_ops));
+
+ camera_device->base.common.tag = HARDWARE_DEVICE_TAG;
+ camera_device->base.common.version = 0;
+ camera_device->base.common.module = (hw_module_t *)(module);
+ camera_device->base.common.close = camera_device_close;
+ camera_device->base.ops = camera_ops;
+
+ camera_ops->set_preview_window = camera_set_preview_window;
+ camera_ops->set_callbacks = camera_set_callbacks;
+ camera_ops->enable_msg_type = camera_enable_msg_type;
+ camera_ops->disable_msg_type = camera_disable_msg_type;
+ camera_ops->msg_type_enabled = camera_msg_type_enabled;
+ camera_ops->start_preview = camera_start_preview;
+ camera_ops->stop_preview = camera_stop_preview;
+ camera_ops->preview_enabled = camera_preview_enabled;
+ camera_ops->store_meta_data_in_buffers = camera_store_meta_data_in_buffers;
+ camera_ops->start_recording = camera_start_recording;
+ camera_ops->stop_recording = camera_stop_recording;
+ camera_ops->recording_enabled = camera_recording_enabled;
+ camera_ops->release_recording_frame = camera_release_recording_frame;
+ camera_ops->auto_focus = camera_auto_focus;
+ camera_ops->cancel_auto_focus = camera_cancel_auto_focus;
+ camera_ops->take_picture = camera_take_picture;
+ camera_ops->cancel_picture = camera_cancel_picture;
+ camera_ops->set_parameters = camera_set_parameters;
+ camera_ops->get_parameters = camera_get_parameters;
+ camera_ops->put_parameters = camera_put_parameters;
+ camera_ops->send_command = camera_send_command;
+ camera_ops->release = camera_release;
+ camera_ops->dump = camera_dump;
+
+ *device = &camera_device->base.common;
+
+ // -------- vendor specific stuff --------
+
+ LOGD("virtual num_cameras=%d cameraid=%d", num_cameras, cameraid);
+ camera_device->cameraid = cameraid;
+ camera_device->type = 1;
+
+ if(gCameraProperties.getProperties(cameraid, &properties) < 0)
+ {
+ LOGE("Couldn't get virtual camera properties");
+ rv = -ENOMEM;
+ goto fail;
+ }
+
+ gVCameraHals = new android::VirtualCamHal(cameraid);
+ CAMHAL_LOGDA("Virtual CameraHal\n");
+
+ if(!gVCameraHals)
+ {
+ LOGE("Couldn't create instance of VirtualCameraHal class");
+ rv = -ENOMEM;
+ goto fail;
+ }
+
+ if(properties && (gVCameraHals->initialize(properties) != android::NO_ERROR))
+ {
+ LOGE("Couldn't initialize virtual camera instance");
+ rv = -ENODEV;
+ goto fail;
+ }
+
+ gCamerasOpen++;
+
+ return rv;
+ }
+#endif
camera_device = (aml_camera_device_t*)malloc(sizeof(*camera_device));
if(!camera_device)
@@ -566,6 +815,9 @@ int camera_device_open(const hw_module_t* module, const char* name,
LOGD("num_cameras=%d cameraid=%d", num_cameras, cameraid);
camera_device->cameraid = cameraid;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ camera_device->type = 0;
+#endif
if(gCameraProperties.getProperties(cameraid, &properties) < 0)
{
@@ -617,6 +869,9 @@ extern "C" int CameraAdapter_CameraNum();
int camera_get_number_of_cameras(void)
{
int num_cameras = CameraAdapter_CameraNum();
+ gCamerasSupported = num_cameras;
+ CAMHAL_LOGDB("gCamerasSupported=%d,num_cameras=%d\n",
+ gCamerasSupported, num_cameras);
#ifdef HAVE_VERSION_INFO
CAMHAL_LOGDB("\n--------------------------------\n"
diff --git a/CameraParameters.cpp b/CameraParameters.cpp
index a71c019..8137b8c 100755
--- a/CameraParameters.cpp
+++ b/CameraParameters.cpp
@@ -108,6 +108,9 @@ const char CameraProperties::PREFERRED_PREVIEW_SIZE_FOR_VIDEO[] = "preferred-pre
const char CameraProperties::PIXEL_FORMAT_RGB24[] = "rgb24";
const char CameraProperties::RELOAD_WHEN_OPEN[]="prop-reload-key";
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+const char CameraProperties::DEVICE_NAME[] = "device_name";
+#endif
const char CameraProperties::DEFAULT_VALUE[] = "";
diff --git a/V4LCameraAdapter/V4LCameraAdapter.cpp b/V4LCameraAdapter/V4LCameraAdapter.cpp
index b0825fd..af89e4f 100755
--- a/V4LCameraAdapter/V4LCameraAdapter.cpp
+++ b/V4LCameraAdapter/V4LCameraAdapter.cpp
@@ -28,6 +28,9 @@
#include "DebugUtils.h"
#include "V4LCameraAdapter.h"
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+#include "V4LCamAdpt.h"
+#endif
#include "CameraHal.h"
#include "ExCameraParameters.h"
#include <signal.h>
@@ -54,6 +57,7 @@
#define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false ))
static int mDebugFps = 0;
+static int iCamerasNum = -1;
#define Q16_OFFSET 16
@@ -63,14 +67,13 @@ static int mDebugFps = 0;
#define ARRAY_SIZE(x) (sizeof(x) / sizeof((x)[0]))
#endif
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
-const char *SENSOR_PATH[]={"/dev/video0",
+
+const char *SENSOR_PATH[]={
+ "/dev/video0",
"/dev/video1",
"/dev/video2",
"/dev/video3",
"/dev/video4",
- "/dev/video5",
- "/dev/video6",
- "/dev/video7",
};
#define DEVICE_PATH(_sensor_index) (SENSOR_PATH[_sensor_index])
#else
@@ -92,14 +95,6 @@ namespace android {
//frames skipped before recalculating the framerate
#define FPS_PERIOD 30
-#define V4L2_ROTATE_ID 0x980922 //V4L2_CID_ROTATE
-
-#define V4L2_CID_AUTO_FOCUS_STATUS (V4L2_CID_CAMERA_CLASS_BASE+30)
-#define V4L2_AUTO_FOCUS_STATUS_IDLE (0 << 0)
-#define V4L2_AUTO_FOCUS_STATUS_BUSY (1 << 0)
-#define V4L2_AUTO_FOCUS_STATUS_REACHED (1 << 1)
-#define V4L2_AUTO_FOCUS_STATUS_FAILED (1 << 2)
-
Mutex gAdapterLock;
extern "C" int set_night_mode(int camera_fd,const char *snm);
@@ -281,7 +276,7 @@ status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps)
ret = get_framerate(mCameraHandle, &fps, &fps_num);
if((fpsrange != NULL)&&(NO_ERROR == ret) && ( 0 !=fps_num )){
mPreviewFrameRate = fps/fps_num;
- sprintf(fpsrange,"%s%d","5,",fps/fps_num);
+ sprintf(fpsrange,"%s%d","10,",fps/fps_num);
CAMHAL_LOGDB("supported preview rates is %s\n", fpsrange);
mParams.set(CameraParameters::KEY_PREVIEW_FRAME_RATE,fps/fps_num);
@@ -2066,7 +2061,18 @@ extern "C" CameraAdapter* CameraAdapter_Factory(size_t sensor_index)
LOG_FUNCTION_NAME;
- adapter = new V4LCameraAdapter(sensor_index);
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+
+ if( sensor_index == (size_t)(iCamerasNum)){
+ //MAX_CAM_NUM_ADD_VCAM-1) ){
+ adapter = new V4LCamAdpt(sensor_index);
+ }else{
+#endif
+ adapter = new V4LCameraAdapter(sensor_index);
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ }
+#endif
+
if ( adapter ) {
CAMHAL_LOGDB("New V4L Camera adapter instance created for sensor %d", sensor_index);
} else {
@@ -2105,13 +2111,16 @@ extern "C" int CameraAdapter_Capabilities(CameraProperties::Properties* properti
return num_cameras_supported;
}
-static int iCamerasNum = -1;
extern "C" int CameraAdapter_CameraNum()
{
#if defined(AMLOGIC_FRONT_CAMERA_SUPPORT) || defined(AMLOGIC_BACK_CAMERA_SUPPORT)
LOGD("CameraAdapter_CameraNum %d",MAX_CAMERAS_SUPPORTED);
+ #ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ return MAX_CAM_NUM_ADD_VCAM;
+ #else
return MAX_CAMERAS_SUPPORTED;
-#elif defined AMLOGIC_USB_CAMERA_SUPPORT
+ #endif
+#elif defined ( AMLOGIC_VIRTUAL_CAMERA_SUPPORT)
iCamerasNum = 0;
for( int i = 0; i < (int)ARRAY_SIZE(SENSOR_PATH); i++ )
{
@@ -2121,6 +2130,17 @@ extern "C" int CameraAdapter_CameraNum()
}
}
+ LOGD("GetCameraNums %d\n", iCamerasNum+1);
+ return iCamerasNum+1;
+#elif defined (AMLOGIC_USB_CAMERA_SUPPORT)
+ iCamerasNum = 0;
+ for( int i = 0; i < (int)ARRAY_SIZE(SENSOR_PATH); i++ )
+ {
+ if( access(DEVICE_PATH(i), 0) == 0 )
+ {
+ iCamerasNum++;
+ }
+ }
iCamerasNum = iCamerasNum > MAX_CAMERAS_SUPPORTED?
MAX_CAMERAS_SUPPORTED :iCamerasNum;
return iCamerasNum;
@@ -2558,6 +2578,9 @@ static bool getCameraFocusArea(int camera_fd, char* max_num_focus_area,
return true;
}
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+extern "C" void newloadCaps(int camera_id, CameraProperties::Properties* params);
+#endif
//TODO move
extern "C" void loadCaps(int camera_id, CameraProperties::Properties* params) {
const char DEFAULT_BRIGHTNESS[] = "50";
@@ -2592,6 +2615,14 @@ extern "C" void loadCaps(int camera_id, CameraProperties::Properties* params) {
const char DEFAULT_VIDEO_SIZE[] = "640x480";
const char DEFAULT_PREFERRED_PREVIEW_SIZE_FOR_VIDEO[] = "640x480";
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ if( camera_id == iCamerasNum){
+ //(MAX_CAM_NUM_ADD_VCAM-1)){
+ newloadCaps(camera_id, params);
+ CAMHAL_LOGDA("return from newloadCaps\n");
+ return ;
+ }
+#endif
bool bFrontCam = false;
int camera_fd = -1;
diff --git a/inc/CameraProperties.h b/inc/CameraProperties.h
index abcb2e4..049d907 100755
--- a/inc/CameraProperties.h
+++ b/inc/CameraProperties.h
@@ -36,13 +36,17 @@ namespace android {
#define MAX_CAMERAS_SUPPORTED 2
#elif defined(AMLOGIC_FRONT_CAMERA_SUPPORT) || defined(AMLOGIC_BACK_CAMERA_SUPPORT) ||defined(AMLOGIC_USB_CAMERA_SUPPORT)
#define MAX_CAMERAS_SUPPORTED 1
+#elif defined (AMLOGIC_VIRTUAL_CAMERA_SUPPORT)
+ #define MAX_CAMERAS_SUPPORTED 0
#else
//if didn't define AMLOGIC_FRONT_CAMERA_SUPPORT nor AMLOGIC_BACK_CAMERA_SUPPORT,
//we set the MAX_CAMERAS_SUPPORTED to the max nums we may support ,and
//will dectect the camera number in function CameraAdapter_CameraNum();
#define MAX_CAMERAS_SUPPORTED 2
#endif
-
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+#define MAX_CAM_NUM_ADD_VCAM (MAX_CAMERAS_SUPPORTED+1)
+#endif
#define MAX_SIMUL_CAMERAS_SUPPORTED 1
#define MAX_PROP_NAME_LENGTH 50
@@ -152,6 +156,7 @@ public:
static const char PIXEL_FORMAT_RGB24[];
static const char RELOAD_WHEN_OPEN[];
+ static const char DEVICE_NAME[];
CameraProperties();
~CameraProperties();
@@ -201,7 +206,11 @@ private:
int mInitialized;
mutable Mutex mLock;
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ Properties mCameraProps[MAX_CAM_NUM_ADD_VCAM];
+#else
Properties mCameraProps[MAX_CAMERAS_SUPPORTED];
+#endif
};
diff --git a/inc/V4LCameraAdapter/V4LCameraAdapter.h b/inc/V4LCameraAdapter/V4LCameraAdapter.h
index a9073ad..e267dfa 100755
--- a/inc/V4LCameraAdapter/V4LCameraAdapter.h
+++ b/inc/V4LCameraAdapter/V4LCameraAdapter.h
@@ -208,6 +208,15 @@ typedef enum camera_focus_mode_e {
CAM_FOCUS_MODE_CONTI_PIC,
}camera_focus_mode_t;
+#define V4L2_ROTATE_ID 0x980922 //V4L2_CID_ROTATE
+
+#define V4L2_CID_AUTO_FOCUS_STATUS (V4L2_CID_CAMERA_CLASS_BASE+30)
+#define V4L2_AUTO_FOCUS_STATUS_IDLE (0 << 0)
+#define V4L2_AUTO_FOCUS_STATUS_BUSY (1 << 0)
+#define V4L2_AUTO_FOCUS_STATUS_REACHED (1 << 1)
+#define V4L2_AUTO_FOCUS_STATUS_FAILED (1 << 2)
+
+
#define IOCTL_MASK_HFLIP (1<<0)
#define IOCTL_MASK_ZOOM (1<<1)
#define IOCTL_MASK_FLASH (1<<2)
diff --git a/utils/util.cpp b/utils/util.cpp
index 6f06b37..652c463 100755
--- a/utils/util.cpp
+++ b/utils/util.cpp
@@ -321,7 +321,7 @@ void yuyv_to_yv12(unsigned char *src, unsigned char *dst, int width, int height)
src += width*4;
}
}
-#else
+#endif
void yv12_adjust_memcpy(unsigned char *dst, unsigned char *src, int width, int height)
{
//width should be an even number.
@@ -338,4 +338,3 @@ void yv12_adjust_memcpy(unsigned char *dst, unsigned char *src, int width, int h
dst+=stride;
}
}
-#endif
diff --git a/vircam/Android.mk b/vircam/Android.mk
new file mode 100755
index 0000000..13ce672
--- a/dev/null
+++ b/vircam/Android.mk
@@ -0,0 +1,118 @@
+ifneq ( true, true)
+ifneq ($(strip $(USE_CAMERA_STUB)),true)
+
+LOCAL_PATH:= $(call my-dir)
+
+CAMERA_HAL_SRC := \
+ CameraHal_Module.cpp \
+ CameraHal.cpp \
+ CameraHalUtilClasses.cpp \
+ AppCallbackNotifier.cpp \
+ ANativeWindowDisplayAdapter.cpp \
+ CameraProperties.cpp \
+ MemoryManager.cpp \
+ Encoder_libjpeg.cpp \
+ SensorListener.cpp \
+ NV12_resize.c
+
+CAMERA_COMMON_SRC:= \
+ CameraParameters.cpp \
+ ExCameraParameters.cpp \
+ CameraHalCommon.cpp
+
+CAMERA_V4L_SRC:= \
+ BaseCameraAdapter.cpp \
+ V4LCameraAdapter/V4LCameraAdapter.cpp
+
+CAMERA_UTILS_SRC:= \
+ utils/ErrorUtils.cpp \
+ utils/MessageQueue.cpp \
+ utils/Semaphore.cpp \
+ utils/util.cpp
+
+CAMERA_HAL_VERTURAL_CAMERA_SRC:= \
+ VirtualCamHal.cpp \
+ AppCbNotifier.cpp \
+ V4LCamAdpt.cpp
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ $(CAMERA_HAL_SRC) \
+ $(CAMERA_V4L_SRC) \
+ $(CAMERA_COMMON_SRC) \
+ $(CAMERA_UTILS_SRC)
+
+LOCAL_C_INCLUDES += \
+ $(LOCAL_PATH)/inc/ \
+ $(LOCAL_PATH)/utils \
+ $(LOCAL_PATH)/inc/V4LCameraAdapter \
+ frameworks/native/include/ui \
+ frameworks/native/include/utils \
+ frameworks/base/include/media/stagefright \
+ external/jhead/ \
+ external/jpeg/ \
+ hardware/libhardware/modules/gralloc/ \
+ frameworks/native/include/media/hardware
+
+
+LOCAL_SHARED_LIBRARIES:= \
+ libui \
+ libbinder \
+ libutils \
+ libcutils \
+ libcamera_client \
+ libexif \
+ libjpeg \
+ libgui
+
+LOCAL_CFLAGS := -fno-short-enums -DCOPY_IMAGE_BUFFER
+
+ifeq ($(BOARD_HAVE_FRONT_CAM),true)
+ LOCAL_CFLAGS += -DAMLOGIC_FRONT_CAMERA_SUPPORT
+endif
+
+ifeq ($(BOARD_HAVE_BACK_CAM),true)
+ LOCAL_CFLAGS += -DAMLOGIC_BACK_CAMERA_SUPPORT
+endif
+
+ifeq ($(IS_CAM_NONBLOCK),true)
+LOCAL_CFLAGS += -DAMLOGIC_CAMERA_NONBLOCK_SUPPORT
+endif
+
+ifeq ($(BOARD_USE_USB_CAMERA),true)
+ LOCAL_CFLAGS += -DAMLOGIC_USB_CAMERA_SUPPORT
+#descrease the number of camera captrue frames,and let skype run smoothly
+ifeq ($(BOARD_USB_CAMREA_DECREASE_FRAMES), true)
+ LOCAL_CFLAGS += -DAMLOGIC_USB_CAMERA_DECREASE_FRAMES
+endif
+ifeq ($(BOARD_USBCAM_IS_TWOCH),true)
+ LOCAL_CFLAGS += -DAMLOGIC_TWO_CH_UVC
+endif
+else
+ ifeq ($(BOARD_HAVE_MULTI_CAMERAS),true)
+ LOCAL_CFLAGS += -DAMLOGIC_MULTI_CAMERA_SUPPORT
+ endif
+ ifeq ($(BOARD_HAVE_FLASHLIGHT),true)
+ LOCAL_CFLAGS += -DAMLOGIC_FLASHLIGHT_SUPPORT
+ endif
+endif
+
+ifeq ($(BOARD_ENABLE_VIDEO_SNAPSHOT),true)
+ LOCAL_CFLAGS += -DAMLOGIC_ENABLE_VIDEO_SNAPSHOT
+endif
+
+ifeq ($(BOARD_HAVE_VIRTUAL_CAMERA),true)
+ LOCAL_CFLAGS += -DAMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ LOCAL_SRC_FILES+= \
+ $(CAMERA_HAL_VERTURAL_CAMERA_SRC)
+endif
+
+LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw
+LOCAL_MODULE:= camera.amlogic
+LOCAL_MODULE_TAGS:= optional
+
+#include $(BUILD_HEAPTRACKED_SHARED_LIBRARY)
+include $(BUILD_SHARED_LIBRARY)
+endif
+endif
diff --git a/vircam/AppCbNotifier.cpp b/vircam/AppCbNotifier.cpp
new file mode 100755
index 0000000..3c9728b
--- a/dev/null
+++ b/vircam/AppCbNotifier.cpp
@@ -0,0 +1,1886 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+
+#define LOG_TAG "AppCbNotif"
+
+
+#include "VirtualCamHal.h"
+#include "VideoMetadata.h"
+#include "Encoder_libjpeg.h"
+#include <MetadataBufferType.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/GraphicBufferMapper.h>
+#include "NV12_resize.h"
+
+#include <gralloc_priv.h>
+#ifndef ALIGN
+#define ALIGN(b,w) (((b)+((w)-1))/(w)*(w))
+#endif
+
+namespace android {
+
+const int AppCbNotifier::NOTIFIER_TIMEOUT = -1;
+KeyedVector<void*, sp<Encoder_libjpeg> > gVEncoderQueue;
+
+void AppCbNotifierEncoderCallback(void* main_jpeg,
+ void* thumb_jpeg,
+ CameraFrame::FrameType type,
+ void* cookie1,
+ void* cookie2,
+ void* cookie3)
+{
+ if (cookie1) {
+ AppCbNotifier* cb = (AppCbNotifier*) cookie1;
+ cb->EncoderDoneCb(main_jpeg, thumb_jpeg, type, cookie2, cookie3);
+ }
+}
+
+/*--------------------NotificationHandler Class STARTS here-----------------------------*/
+
+void AppCbNotifier::EncoderDoneCb(void* main_jpeg, void* thumb_jpeg, CameraFrame::FrameType type, void* cookie1, void* cookie2)
+{
+ camera_memory_t* encoded_mem = NULL;
+ Encoder_libjpeg::params *main_param = NULL, *thumb_param = NULL;
+ size_t jpeg_size;
+ uint8_t* src = NULL;
+ sp<Encoder_libjpeg> encoder = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ camera_memory_t* picture = NULL;
+
+ {
+ Mutex::Autolock lock(mLock);
+
+ if (!main_jpeg) {
+ goto exit;
+ }
+
+ encoded_mem = (camera_memory_t*) cookie1;
+ main_param = (Encoder_libjpeg::params *) main_jpeg;
+ jpeg_size = main_param->jpeg_size;
+ src = main_param->src;
+
+ if(encoded_mem && encoded_mem->data && (jpeg_size > 0)) {
+ if (cookie2) {
+ ExifElementsTable* exif = (ExifElementsTable*) cookie2;
+ Section_t* exif_section = NULL;
+
+ exif->insertExifToJpeg((unsigned char*) encoded_mem->data, jpeg_size);
+
+ if(thumb_jpeg) {
+ thumb_param = (Encoder_libjpeg::params *) thumb_jpeg;
+ if((thumb_param->in_width>0)&&(thumb_param->in_height>0)&&(thumb_param->out_width>0)&&(thumb_param->out_height>0))
+ exif->insertExifThumbnailImage((const char*)thumb_param->dst,(int)thumb_param->jpeg_size);
+ }
+
+ exif_section = FindSection(M_EXIF);
+
+ if (exif_section) {
+ picture = mRequestMemory(-1, jpeg_size + exif_section->Size, 1, NULL);
+ if (picture && picture->data) {
+ exif->saveJpeg((unsigned char*) picture->data, jpeg_size + exif_section->Size);
+ }
+ }
+ delete exif;
+ cookie2 = NULL;
+ } else {
+ picture = mRequestMemory(-1, jpeg_size, 1, NULL);
+ if (picture && picture->data) {
+ memcpy(picture->data, encoded_mem->data, jpeg_size);
+ }
+ }
+ }
+ } // scope for mutex lock
+
+ if (!mRawAvailable) {
+ dummyRaw();
+ } else {
+ mRawAvailable = false;
+ }
+
+ if (mNotifierState == AppCbNotifier::NOTIFIER_STARTED) {
+ mFrameProvider->returnFrame(src, type);
+ }
+
+ // Send the callback to the application only if the notifier is started and the message is enabled
+ if(picture && (mNotifierState==AppCbNotifier::NOTIFIER_STARTED) &&
+ (mCameraHal->msgTypeEnabled(CAMERA_MSG_COMPRESSED_IMAGE)))
+ {
+ Mutex::Autolock lock(mBurstLock);
+#if 0 //TODO: enable burst mode later
+ if ( mBurst )
+ {
+ `(CAMERA_MSG_BURST_IMAGE, JPEGPictureMemBase, mCallbackCookie);
+ }
+ else
+#endif
+ {
+ mDataCb(CAMERA_MSG_COMPRESSED_IMAGE, picture, 0, NULL, mCallbackCookie);
+ }
+ }
+
+ exit:
+
+ if (main_jpeg) {
+ free(main_jpeg);
+ }
+
+ if (thumb_jpeg) {
+ if (((Encoder_libjpeg::params *) thumb_jpeg)->dst) {
+ free(((Encoder_libjpeg::params *) thumb_jpeg)->dst);
+ }
+ free(thumb_jpeg);
+ }
+
+ if (encoded_mem) {
+ encoded_mem->release(encoded_mem);
+ }
+
+ if (picture) {
+ picture->release(picture);
+ }
+
+ if (cookie2) {
+ delete (ExifElementsTable*) cookie2;
+ }
+
+ if (mNotifierState == AppCbNotifier::NOTIFIER_STARTED) {
+ encoder = gVEncoderQueue.valueFor(src);
+ if (encoder.get()) {
+ gVEncoderQueue.removeItem(src);
+ encoder.clear();
+ }
+ //mFrameProvider->returnFrame(src, type);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ * NotificationHandler class
+ */
+
+///Initialization function for AppCbNotifier
+status_t AppCbNotifier::initialize()
+{
+ LOG_FUNCTION_NAME;
+
+ mMeasurementEnabled = false;
+
+ ///Create the app notifier thread
+ mNotificationThread = new NotificationThread(this);
+ if(!mNotificationThread.get())
+ {
+ CAMHAL_LOGEA("Couldn't create Notification thread");
+ return NO_MEMORY;
+ }
+
+ ///Start the display thread
+ status_t ret = mNotificationThread->run("NotificationThread", PRIORITY_URGENT_DISPLAY);
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEA("Couldn't run NotificationThread");
+ mNotificationThread.clear();
+ return ret;
+ }
+
+ mUseMetaDataBufferMode = false;
+ mUseVideoBuffers = false;
+ mRawAvailable = false;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+void AppCbNotifier::setCallbacks(VirtualCamHal* cameraHal,
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user)
+{
+ Mutex::Autolock lock(mLock);
+
+ LOG_FUNCTION_NAME;
+
+ mCameraHal = cameraHal;
+ mNotifyCb = notify_cb;
+ mDataCb = data_cb;
+ mDataCbTimestamp = data_cb_timestamp;
+ mRequestMemory = get_memory;
+ mCallbackCookie = user;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCbNotifier::setMeasurements(bool enable)
+{
+ Mutex::Autolock lock(mLock);
+
+ LOG_FUNCTION_NAME;
+
+ mMeasurementEnabled = enable;
+
+ if ( enable )
+ {
+ mFrameProvider->enableFrameNotification(CameraFrame::FRAME_DATA_SYNC);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+//All sub-components of Camera HAL call this whenever any error happens
+void AppCbNotifier::errorNotify(int error)
+{
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_LOGEB("AppCbNotifier received error %d", error);
+
+ // If it is a fatal error abort here!
+ if((error == CAMERA_ERROR_FATAL) || (error == CAMERA_ERROR_HARD)) {
+ //We kill media server if we encounter these errors as there is
+ //no point continuing and apps also don't handle errors other
+ //than media server death always.
+ abort();
+ return;
+ }
+
+ if ( ( NULL != mCameraHal ) &&
+ ( NULL != mNotifyCb ) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_ERROR) ) )
+ {
+ CAMHAL_LOGEB("AppCbNotifier mNotifyCb %d", error);
+ mNotifyCb(CAMERA_MSG_ERROR, CAMERA_ERROR_UNKNOWN, 0, mCallbackCookie);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+bool AppCbNotifier::notificationThread()
+{
+ bool shouldLive = true;
+ status_t ret;
+
+ LOG_FUNCTION_NAME;
+
+ //CAMHAL_LOGDA("Notification Thread waiting for message");
+ ret = MSGUTILS::MessageQueue::waitForMsg(&mNotificationThread->msgQ(),
+ &mEventQ,
+ &mFrameQ,
+ AppCbNotifier::NOTIFIER_TIMEOUT);
+
+ //CAMHAL_LOGDA("Notification Thread received message");
+
+ if (mNotificationThread->msgQ().hasMsg()) {
+ ///Received a message from CameraHal, process it
+ CAMHAL_LOGDA("Notification Thread received message from Camera HAL");
+ shouldLive = processMessage();
+ if(!shouldLive) {
+ CAMHAL_LOGDA("Notification Thread exiting.");
+ }
+ }
+
+ if(mEventQ.hasMsg()) {
+ ///Received an event from one of the event providers
+ CAMHAL_LOGDA("Notification Thread received an event from event provider (CameraAdapter)");
+ notifyEvent();
+ }
+
+ if(mFrameQ.hasMsg()) {
+ ///Received a frame from one of the frame providers
+ //CAMHAL_LOGDA("Notification Thread received a frame from frame provider (CameraAdapter)");
+ notifyFrame();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return shouldLive;
+}
+
+void AppCbNotifier::notifyEvent()
+{
+ ///Receive and send the event notifications to app
+ MSGUTILS::Message msg;
+ LOG_FUNCTION_NAME;
+ {
+ Mutex::Autolock lock(mLock);
+ mEventQ.get(&msg);
+ }
+ bool ret = true;
+ CameraHalEvent *evt = NULL;
+ CameraHalEvent::FocusEventData *focusEvtData;
+ CameraHalEvent::ZoomEventData *zoomEvtData;
+ CameraHalEvent::FaceEventData faceEvtData;
+ CameraHalEvent::FocusMoveEventData *focusMoveEvtData;
+
+ if(mNotifierState != AppCbNotifier::NOTIFIER_STARTED)
+ {
+ return;
+ }
+
+ switch(msg.command)
+ {
+ case AppCbNotifier::NOTIFIER_CMD_PROCESS_EVENT:
+
+ evt = ( CameraHalEvent * ) msg.arg1;
+
+ if ( NULL == evt )
+ {
+ CAMHAL_LOGEA("Invalid CameraHalEvent");
+ return;
+ }
+
+ switch(evt->mEventType)
+ {
+ case CameraHalEvent::EVENT_SHUTTER:
+
+ if ( ( NULL != mCameraHal ) &&
+ ( NULL != mNotifyCb ) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_SHUTTER) ) )
+ {
+ mNotifyCb(CAMERA_MSG_SHUTTER, 0, 0, mCallbackCookie);
+ }
+ mRawAvailable = false;
+
+ break;
+
+ case CameraHalEvent::EVENT_FOCUS_LOCKED:
+ case CameraHalEvent::EVENT_FOCUS_ERROR:
+
+ focusEvtData = &evt->mEventData->focusEvent;
+ if ( ( focusEvtData->focusLocked ) &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mNotifyCb ) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS) ) )
+ {
+ mNotifyCb(CAMERA_MSG_FOCUS, true, 0, mCallbackCookie);
+ mCameraHal->disableMsgType(CAMERA_MSG_FOCUS);
+ }
+ else if ( focusEvtData->focusError &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mNotifyCb ) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS) ) )
+ {
+ mNotifyCb(CAMERA_MSG_FOCUS, false, 0, mCallbackCookie);
+ mCameraHal->disableMsgType(CAMERA_MSG_FOCUS);
+ }
+
+ break;
+
+ case CameraHalEvent::EVENT_FOCUS_MOVE:
+
+ focusMoveEvtData = &evt->mEventData->focusMoveEvent;
+ if ( ( NULL != mCameraHal ) &&
+ ( NULL != mNotifyCb ))
+ {
+ mNotifyCb(CAMERA_MSG_FOCUS_MOVE, focusMoveEvtData->focusStart, 0, mCallbackCookie);
+ }
+
+ break;
+
+ case CameraHalEvent::EVENT_ZOOM_INDEX_REACHED:
+
+ zoomEvtData = &evt->mEventData->zoomEvent;
+
+ if ( ( NULL != mCameraHal ) &&
+ ( NULL != mNotifyCb) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_ZOOM) ) )
+ {
+ mNotifyCb(CAMERA_MSG_ZOOM, zoomEvtData->currentZoomIndex, zoomEvtData->targetZoomIndexReached, mCallbackCookie);
+ }
+
+ break;
+
+ case CameraHalEvent::EVENT_FACE:
+
+ faceEvtData = evt->mEventData->faceEvent;
+
+ if ( ( NULL != mCameraHal ) &&
+ ( NULL != mNotifyCb) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_METADATA) ) )
+ {
+ // WA for an issue inside CameraService
+ camera_memory_t *tmpBuffer = mRequestMemory(-1, 1, 1, NULL);
+
+ mDataCb(CAMERA_MSG_PREVIEW_METADATA,
+ tmpBuffer,
+ 0,
+ faceEvtData->getFaceResult(),
+ mCallbackCookie);
+
+ faceEvtData.clear();
+
+ if ( NULL != tmpBuffer ) {
+ tmpBuffer->release(tmpBuffer);
+ }
+
+ }
+
+ break;
+
+ case CameraHalEvent::ALL_EVENTS:
+ break;
+ default:
+ break;
+ }
+
+ break;
+ }
+
+ if ( NULL != evt )
+ {
+ delete evt;
+ }
+
+
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+static void copy2Dto1D(void *dst,
+ void *src,
+ int width,
+ int height,
+ unsigned int srcpixelfmtflag,
+ size_t stride,
+ uint32_t offset,
+ unsigned int bytesPerPixel,
+ size_t length,
+ const char *pixelFormat)
+{
+ unsigned int alignedRow, row;
+ unsigned char *bufferDst, *bufferSrc;
+ unsigned char *bufferDstEnd, *bufferSrcEnd;
+ uint16_t *bufferSrc_UV;
+
+ unsigned int *y_uv = (unsigned int *)src;
+
+ CAMHAL_LOGDB("copy2Dto1D() y= 0x%x ; uv=0x%x.",y_uv[0], y_uv[1]);
+ CAMHAL_LOGDB("pixelFormat= %s; offset=%d; length=%d;width=%d,%d;stride=%d;",
+ pixelFormat,offset,length,width,height,stride);
+
+ if (pixelFormat!=NULL) {
+ if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ bytesPerPixel = 2;
+ } else if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
+ strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
+ bytesPerPixel = 1;
+ bufferDst = ( unsigned char * ) dst;
+ bufferDstEnd = ( unsigned char * ) dst + width*height*bytesPerPixel;
+ bufferSrc = ( unsigned char * ) y_uv[0] + offset;
+ bufferSrcEnd = ( unsigned char * ) ( ( size_t ) y_uv[0] + length + offset);
+ row = width*bytesPerPixel;
+ alignedRow = stride-width;
+ int stride_bytes = stride / 8;
+ uint32_t xOff = offset % stride;
+ uint32_t yOff = offset / stride;
+
+ // going to convert from NV12 here and return
+ // Step 1: Y plane: iterate through each row and copy
+ for ( int i = 0 ; i < height ; i++) {
+ memcpy(bufferDst, bufferSrc, row);
+ bufferSrc += stride;
+ bufferDst += row;
+ if ( ( bufferSrc > bufferSrcEnd ) || ( bufferDst > bufferDstEnd ) ) {
+ break;
+ }
+ }
+
+ //bufferSrc_UV = ( uint16_t * ) ((uint8_t*)y_uv[1] + (stride/2)*yOff + xOff);
+ bufferSrc_UV =( uint16_t * ) ( y_uv[0]+stride*height+ (stride/2)*yOff + xOff) ;
+ if ((strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0)
+ && (CameraFrame::PIXEL_FMT_NV21 == srcpixelfmtflag) ){
+ uint16_t *bufferDst_UV;
+ bufferDst_UV = (uint16_t *) (((uint8_t*)dst)+row*height);
+ memcpy(bufferDst_UV, bufferSrc_UV, stride*height/2);
+#if 0
+ // Step 2: UV plane: convert NV12 to NV21 by swapping U & V
+ for (int i = 0 ; i < height/2 ; i++, bufferSrc_UV += alignedRow/2) {
+ int n = width;
+ asm volatile (
+ " pld [%[src], %[src_stride], lsl #2] \n\t"
+ " cmp %[n], #32 \n\t"
+ " blt 1f \n\t"
+ "0: @ 32 byte swap \n\t"
+ " sub %[n], %[n], #32 \n\t"
+ " vld2.8 {q0, q1} , [%[src]]! \n\t"
+ " vswp q0, q1 \n\t"
+ " cmp %[n], #32 \n\t"
+ " vst2.8 {q0,q1},[%[dst]]! \n\t"
+ " bge 0b \n\t"
+ "1: @ Is there enough data? \n\t"
+ " cmp %[n], #16 \n\t"
+ " blt 3f \n\t"
+ "2: @ 16 byte swap \n\t"
+ " sub %[n], %[n], #16 \n\t"
+ " vld2.8 {d0, d1} , [%[src]]! \n\t"
+ " vswp d0, d1 \n\t"
+ " cmp %[n], #16 \n\t"
+ " vst2.8 {d0,d1},[%[dst]]! \n\t"
+ " bge 2b \n\t"
+ "3: @ Is there enough data? \n\t"
+ " cmp %[n], #8 \n\t"
+ " blt 5f \n\t"
+ "4: @ 8 byte swap \n\t"
+ " sub %[n], %[n], #8 \n\t"
+ " vld2.8 {d0, d1} , [%[src]]! \n\t"
+ " vswp d0, d1 \n\t"
+ " cmp %[n], #8 \n\t"
+ " vst2.8 {d0[0],d1[0]},[%[dst]]! \n\t"
+ " bge 4b \n\t"
+ "5: @ end \n\t"
+#ifdef NEEDS_ARM_ERRATA_754319_754320
+ " vmov s0,s0 @ add noop for errata item \n\t"
+#endif
+ : [dst] "+r" (bufferDst_UV), [src] "+r" (bufferSrc_UV), [n] "+r" (n)
+ : [src_stride] "r" (stride_bytes)
+ : "cc", "memory", "q0", "q1"
+ );
+ }
+#endif
+ } else if( (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420P) == 0)
+ && (CameraFrame::PIXEL_FMT_YV12 == srcpixelfmtflag) ){
+ bufferSrc =(unsigned char *) bufferSrc_UV;
+ bufferDst = (unsigned char *)(((unsigned char*)dst)+row*height);
+ row = ALIGN(stride/2, 16);
+
+ memcpy(bufferDst, bufferSrc, row*height);
+ } else if ( (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420P) == 0)
+ && ( CameraFrame::PIXEL_FMT_NV21 == srcpixelfmtflag) ){
+ uint16_t *bufferDst_U;
+ uint16_t *bufferDst_V;
+
+ // Step 2: UV plane: convert NV12 to YV12 by de-interleaving U & V
+ // TODO(XXX): This version of CameraHal assumes NV12 format it set at
+ // camera adapter to support YV12. Need to address for
+ // USBCamera
+
+ bufferDst_U = (uint16_t *) (((uint8_t*)dst)+row*height);
+ bufferDst_V = (uint16_t *) (((uint8_t*)dst)+row*height+row*height/4);
+
+ for (int i = 0 ; i < height/2 ; i++, bufferSrc_UV += alignedRow/2) {
+ int n = width;
+ asm volatile (
+ " pld [%[src], %[src_stride], lsl #2] \n\t"
+ " cmp %[n], #32 \n\t"
+ " blt 1f \n\t"
+ "0: @ 32 byte swap \n\t"
+ " sub %[n], %[n], #32 \n\t"
+ " vld2.8 {q0, q1} , [%[src]]! \n\t"
+ " cmp %[n], #32 \n\t"
+ " vst1.8 {q1},[%[dst_v]]! \n\t"
+ " vst1.8 {q0},[%[dst_u]]! \n\t"
+ " bge 0b \n\t"
+ "1: @ Is there enough data? \n\t"
+ " cmp %[n], #16 \n\t"
+ " blt 3f \n\t"
+ "2: @ 16 byte swap \n\t"
+ " sub %[n], %[n], #16 \n\t"
+ " vld2.8 {d0, d1} , [%[src]]! \n\t"
+ " cmp %[n], #16 \n\t"
+ " vst1.8 {d1},[%[dst_v]]! \n\t"
+ " vst1.8 {d0},[%[dst_u]]! \n\t"
+ " bge 2b \n\t"
+ "3: @ Is there enough data? \n\t"
+ " cmp %[n], #8 \n\t"
+ " blt 5f \n\t"
+ "4: @ 8 byte swap \n\t"
+ " sub %[n], %[n], #8 \n\t"
+ " vld2.8 {d0, d1} , [%[src]]! \n\t"
+ " cmp %[n], #8 \n\t"
+ " vst1.8 {d1[0]},[%[dst_v]]! \n\t"
+ " vst1.8 {d0[0]},[%[dst_u]]! \n\t"
+ " bge 4b \n\t"
+ "5: @ end \n\t"
+#ifdef NEEDS_ARM_ERRATA_754319_754320
+ " vmov s0,s0 @ add noop for errata item \n\t"
+#endif
+ : [dst_u] "+r" (bufferDst_U), [dst_v] "+r" (bufferDst_V),
+ [src] "+r" (bufferSrc_UV), [n] "+r" (n)
+ : [src_stride] "r" (stride_bytes)
+ : "cc", "memory", "q0", "q1"
+ );
+ }
+ }
+ return ;
+
+ } else if(strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ bytesPerPixel = 2;
+ }
+ }
+
+ bufferDst = ( unsigned char * ) dst;
+ bufferSrc = ( unsigned char * ) y_uv[0];
+ row = width*bytesPerPixel;
+ alignedRow = ( row + ( stride -1 ) ) & ( ~ ( stride -1 ) );
+
+ //iterate through each row
+ for ( int i = 0 ; i < height ; i++, bufferSrc += alignedRow, bufferDst += row) {
+ memcpy(bufferDst, bufferSrc, row);
+ }
+}
+
+void AppCbNotifier::copyAndSendPictureFrame(CameraFrame* frame, int32_t msgType)
+{
+ camera_memory_t* picture = NULL;
+ void *dest = NULL, *src = NULL;
+
+ // scope for lock
+ {
+ Mutex::Autolock lock(mLock);
+
+ if(mNotifierState != AppCbNotifier::NOTIFIER_STARTED) {
+ goto exit;
+ }
+
+ picture = mRequestMemory(-1, frame->mLength, 1, NULL);
+
+ if (NULL != picture) {
+ dest = picture->data;
+ if (NULL != dest) {
+ src = (void *) ((unsigned int) frame->mBuffer + frame->mOffset);
+ memcpy(dest, src, frame->mLength);
+ }
+ }
+ }
+
+ exit:
+ mFrameProvider->returnFrame(frame->mBuffer, (CameraFrame::FrameType) frame->mFrameType);
+
+ if(picture) {
+ if((mNotifierState == AppCbNotifier::NOTIFIER_STARTED) &&
+ mCameraHal->msgTypeEnabled(msgType)) {
+ mDataCb(msgType, picture, 0, NULL, mCallbackCookie);
+ }
+ picture->release(picture);
+ }
+}
+
+void AppCbNotifier::copyAndSendPreviewFrame(CameraFrame* frame, int32_t msgType)
+{
+ camera_memory_t* picture = NULL;
+ void* dest = NULL;
+ uint8_t* src = NULL;
+
+ // scope for lock
+ {
+ Mutex::Autolock lock(mLock);
+
+ if(mNotifierState != AppCbNotifier::NOTIFIER_STARTED) {
+ goto exit;
+ }
+
+ if (!mPreviewMemory || !frame->mBuffer) {
+ CAMHAL_LOGDA("Error! One of the buffer is NULL");
+ goto exit;
+ }
+
+#ifdef AMLOGIC_CAMERA_OVERLAY_SUPPORT
+ camera_memory_t* VideoCameraBufferMemoryBase = (camera_memory_t*)frame->mBuffer;
+ src = (uint8_t*)VideoCameraBufferMemoryBase->data;
+#else
+ private_handle_t* gralloc_hnd = (private_handle_t*)frame->mBuffer;
+ src = (uint8_t*)gralloc_hnd->base;
+#endif
+ if (!src) {
+ CAMHAL_LOGDA("Error! Src Data buffer is NULL");
+ goto exit;
+ }
+
+ dest = (void*) mPreviewBufs[mPreviewBufCount];
+
+ CAMHAL_LOGVB("%d:copy2Dto1D(%p, %p, %d, %d, %d, %d, %d, %d,%s)",
+ __LINE__,
+ NULL, //buf,
+ frame->mBuffer,
+ frame->mWidth,
+ frame->mHeight,
+ frame->mPixelFmt,
+ frame->mAlignment,
+ 2,
+ frame->mLength,
+ mPreviewPixelFormat);
+
+ if ( NULL != dest ) {
+ // data sync frames don't need conversion
+ if (CameraFrame::FRAME_DATA_SYNC == frame->mFrameType) {
+ if ( (mPreviewMemory->size / MAX_BUFFERS) >= frame->mLength ) {
+ memcpy(dest, (void*) src, frame->mLength);
+ } else {
+ memset(dest, 0, (mPreviewMemory->size / MAX_BUFFERS));
+ }
+ } else {
+ if ((NULL == (void*)frame->mYuv[0]) || (NULL == (void*)frame->mYuv[1])){
+ CAMHAL_LOGEA("Error! One of the YUV Pointer is NULL");
+ goto exit;
+ }
+ else{
+ copy2Dto1D(dest,
+ frame->mYuv,
+ frame->mWidth,
+ frame->mHeight,
+ frame->mPixelFmt,
+ frame->mAlignment,
+ frame->mOffset,
+ 2,
+ frame->mLength,
+ mPreviewPixelFormat);
+ }
+ }
+ }
+ }
+
+ exit:
+ mFrameProvider->returnFrame(frame->mBuffer, (CameraFrame::FrameType) frame->mFrameType);
+
+ if((mNotifierState == AppCbNotifier::NOTIFIER_STARTED) &&
+ mCameraHal->msgTypeEnabled(msgType) &&
+ (dest != NULL)) {
+ mDataCb(msgType, mPreviewMemory, mPreviewBufCount, NULL, mCallbackCookie);
+ }
+
+ // increment for next buffer
+ mPreviewBufCount = (mPreviewBufCount + 1) % AppCbNotifier::MAX_BUFFERS;
+}
+
+status_t AppCbNotifier::dummyRaw()
+{
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == mRequestMemory ) {
+ CAMHAL_LOGEA("Can't allocate memory for dummy raw callback!");
+ return NO_INIT;
+ }
+
+ if ( ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) &&
+ ( NULL != mNotifyCb ) ){
+
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_RAW_IMAGE) ) {
+ camera_memory_t *dummyRaw = mRequestMemory(-1, 1, 1, NULL);
+
+ if ( NULL == dummyRaw ) {
+ CAMHAL_LOGEA("Dummy raw buffer allocation failed!");
+ return NO_MEMORY;
+ }
+
+ mDataCb(CAMERA_MSG_RAW_IMAGE, dummyRaw, 0, NULL, mCallbackCookie);
+
+ dummyRaw->release(dummyRaw);
+ } else if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_RAW_IMAGE_NOTIFY) ) {
+ mNotifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mCallbackCookie);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+void AppCbNotifier::notifyFrame()
+{
+ ///Receive and send the frame notifications to app
+ MSGUTILS::Message msg;
+ CameraFrame *frame;
+ MemoryHeapBase *heap;
+ MemoryBase *buffer = NULL;
+ sp<MemoryBase> memBase;
+ void *buf = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ {
+ Mutex::Autolock lock(mLock);
+ if(!mFrameQ.isEmpty()) {
+ mFrameQ.get(&msg);
+ } else {
+ return;
+ }
+ }
+
+ bool ret = true;
+
+ frame = NULL;
+ switch(msg.command)
+ {
+ case AppCbNotifier::NOTIFIER_CMD_PROCESS_FRAME:
+
+ frame = (CameraFrame *) msg.arg1;
+ if(!frame)
+ {
+ break;
+ }
+
+ if ( (CameraFrame::RAW_FRAME == frame->mFrameType )&&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) &&
+ ( NULL != mNotifyCb ) )
+ {
+
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_RAW_IMAGE) )
+ {
+#ifdef COPY_IMAGE_BUFFER
+ copyAndSendPictureFrame(frame, CAMERA_MSG_RAW_IMAGE);
+#else
+ //TODO: Find a way to map a Tiler buffer to a MemoryHeapBase
+#endif
+ }
+ else
+ {
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_RAW_IMAGE_NOTIFY) ) {
+ mNotifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mCallbackCookie);
+ }
+ mFrameProvider->returnFrame(frame->mBuffer,
+ (CameraFrame::FrameType) frame->mFrameType);
+ }
+ mRawAvailable = true;
+ }
+ else if ( (CameraFrame::IMAGE_FRAME == frame->mFrameType) &&
+ (NULL != mCameraHal) &&
+ (NULL != mDataCb) &&
+ ((CameraFrame::ENCODE_RAW_YUV422I_TO_JPEG & frame->mQuirks) ||
+ (CameraFrame::ENCODE_RAW_RGB24_TO_JPEG & frame->mQuirks)||
+ (CameraFrame::ENCODE_RAW_YUV420SP_TO_JPEG & frame->mQuirks)))
+ {
+
+ LOGD("IMAGE_FRAME ENCODE_RAW.. %d", __LINE__);
+ int encode_quality = 100, tn_quality = 100;
+ int tn_width, tn_height;
+ unsigned int current_snapshot = 0;
+ Encoder_libjpeg::params *main_jpeg = NULL, *tn_jpeg = NULL;
+ void* exif_data = NULL;
+ camera_memory_t* raw_picture = mRequestMemory(-1, frame->mLength, 1, NULL);
+
+ if(raw_picture) {
+ buf = raw_picture->data;
+ }else{
+ CAMHAL_LOGEA("Error! Main Jpeg encoder request memory fail!");
+ break;
+ }
+
+ CameraParameters parameters;
+ char *params = mCameraHal->getParameters();
+ const String8 strParams(params);
+ parameters.unflatten(strParams);
+
+ encode_quality = parameters.getInt(CameraParameters::KEY_JPEG_QUALITY);
+ if (encode_quality < 0 || encode_quality > 100) {
+ encode_quality = 100;
+ }
+
+ tn_quality = parameters.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
+ if (tn_quality < 0 || tn_quality > 100) {
+ tn_quality = 100;
+ }
+
+ if (CameraFrame::HAS_EXIF_DATA & frame->mQuirks) {
+ exif_data = frame->mCookie2;
+ }
+
+ main_jpeg = (Encoder_libjpeg::params*)
+ malloc(sizeof(Encoder_libjpeg::params));
+ if (main_jpeg) {
+ main_jpeg->src = (uint8_t*) frame->mBuffer;
+ main_jpeg->src_size = frame->mLength;
+ main_jpeg->dst = (uint8_t*) buf;
+ main_jpeg->dst_size = frame->mLength;
+ main_jpeg->quality = encode_quality;
+ main_jpeg->in_width = frame->mWidth;
+ main_jpeg->in_height = frame->mHeight;
+ main_jpeg->out_width = frame->mWidth;
+ main_jpeg->out_height = frame->mHeight;
+ if ((CameraFrame::ENCODE_RAW_RGB24_TO_JPEG & frame->mQuirks))
+ main_jpeg->format = CameraProperties::PIXEL_FORMAT_RGB24;
+ else if ((CameraFrame::ENCODE_RAW_YUV422I_TO_JPEG & frame->mQuirks))
+ main_jpeg->format = CameraParameters::PIXEL_FORMAT_YUV422I;
+ else if ((CameraFrame::ENCODE_RAW_YUV420SP_TO_JPEG & frame->mQuirks))
+ main_jpeg->format = CameraParameters::PIXEL_FORMAT_YUV420SP;
+ }
+
+// disable thumbnail for now. preview was stopped and mPreviewBufs was
+// cleared, so this won't work.
+ tn_width = parameters.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+ tn_height = parameters.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+
+ if(frame->mHeight>frame->mWidth){
+ int temp = tn_width;
+ tn_width = tn_height;
+ tn_height = tn_width;
+ }
+
+ if ((tn_width > 0) && (tn_height > 0)) {
+ tn_jpeg = (Encoder_libjpeg::params*)
+ malloc(sizeof(Encoder_libjpeg::params));
+ // if malloc fails just keep going and encode main jpeg
+ if (!tn_jpeg) {
+ tn_jpeg = NULL;
+ }
+ }
+
+ if (tn_jpeg) {
+ tn_jpeg->dst = (uint8_t*) malloc(tn_width*tn_height*3);
+ if(tn_jpeg->dst){
+ tn_jpeg->src = (uint8_t*) frame->mBuffer;
+ tn_jpeg->src_size = frame->mLength;
+ tn_jpeg->dst_size = tn_width*tn_height*3;
+ tn_jpeg->quality = tn_quality;
+ tn_jpeg->in_width = frame->mWidth;
+ tn_jpeg->in_height = frame->mHeight;
+ tn_jpeg->out_width = tn_width;
+ tn_jpeg->out_height = tn_height;
+ if ((CameraFrame::ENCODE_RAW_RGB24_TO_JPEG & frame->mQuirks))
+ tn_jpeg->format = CameraProperties::PIXEL_FORMAT_RGB24;
+ else if ((CameraFrame::ENCODE_RAW_YUV422I_TO_JPEG & frame->mQuirks))
+ tn_jpeg->format = CameraParameters::PIXEL_FORMAT_YUV422I;
+ else if ((CameraFrame::ENCODE_RAW_YUV420SP_TO_JPEG & frame->mQuirks))
+ tn_jpeg->format = CameraParameters::PIXEL_FORMAT_YUV420SP;
+ }else{
+ free(tn_jpeg);
+ tn_jpeg = NULL;
+ CAMHAL_LOGEA("Error! Thumbnail Jpeg encoder malloc memory fail!");
+ }
+ }
+
+ LOGD("IMAGE_FRAME ENCODE_RAW.. %d", __LINE__);
+ sp<Encoder_libjpeg> encoder = new Encoder_libjpeg(main_jpeg,
+ tn_jpeg,
+ AppCbNotifierEncoderCallback,
+ (CameraFrame::FrameType)frame->mFrameType,
+ this,
+ raw_picture,
+ exif_data);
+ encoder->run();
+ gVEncoderQueue.add(frame->mBuffer, encoder);
+ encoder.clear();
+ if (params != NULL)
+ {
+ mCameraHal->putParameters(params);
+ }
+ }
+ else if ( ( CameraFrame::IMAGE_FRAME == frame->mFrameType ) &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) )
+ {
+
+ // CTS, MTS requirements: Every 'takePicture()' call
+ // who registers a raw callback should receive one
+ // as well. This is not always the case with
+ // CameraAdapters though.
+ if (!mRawAvailable) {
+ dummyRaw();
+ } else {
+ mRawAvailable = false;
+ }
+
+#ifdef COPY_IMAGE_BUFFER
+ {
+ Mutex::Autolock lock(mBurstLock);
+#if 0 //TODO: enable burst mode later
+ if ( mBurst )
+ {
+ `(CAMERA_MSG_BURST_IMAGE, JPEGPictureMemBase, mCallbackCookie);
+ }
+ else
+#endif
+ {
+ copyAndSendPictureFrame(frame, CAMERA_MSG_COMPRESSED_IMAGE);
+ }
+ }
+#else
+ //TODO: Find a way to map a Tiler buffer to a MemoryHeapBase
+#endif
+ }
+ else if ( ( CameraFrame::VIDEO_FRAME_SYNC == frame->mFrameType ) &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_VIDEO_FRAME) ) )
+ {
+ mRecordingLock.lock();
+ if(mRecording)
+ {
+ if(mUseMetaDataBufferMode)
+ {
+ camera_memory_t *videoMedatadaBufferMemory =
+ (camera_memory_t *) mVideoMetadataBufferMemoryMap.valueFor((uint32_t) frame->mBuffer);
+ video_metadata_t *videoMetadataBuffer = (video_metadata_t *) videoMedatadaBufferMemory->data;
+
+ if( (NULL == videoMedatadaBufferMemory) || (NULL == videoMetadataBuffer) || (NULL == frame->mBuffer) )
+ {
+ CAMHAL_LOGEA("Error! One of the video buffers is NULL");
+ break;
+ }
+
+ if ( mUseVideoBuffers )
+ {
+ int vBuf = mVideoMap.valueFor((uint32_t) frame->mBuffer);
+ GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+ Rect bounds;
+ bounds.left = 0;
+ bounds.top = 0;
+ bounds.right = mVideoWidth;
+ bounds.bottom = mVideoHeight;
+
+ void *y_uv[2];
+ mapper.lock((buffer_handle_t)vBuf, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+
+ structConvImage input = {frame->mWidth,
+ frame->mHeight,
+ 4096,
+ IC_FORMAT_YCbCr420_lp,
+ (mmByte *)frame->mYuv[0],
+ (mmByte *)frame->mYuv[1],
+ frame->mOffset};
+
+ structConvImage output = {mVideoWidth,
+ mVideoHeight,
+ 4096,
+ IC_FORMAT_YCbCr420_lp,
+ (mmByte *)y_uv[0],
+ (mmByte *)y_uv[1],
+ 0};
+
+ VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0);
+ mapper.unlock((buffer_handle_t)vBuf);
+ videoMetadataBuffer->metadataBufferType = (int) kMetadataBufferTypeCameraSource;
+ videoMetadataBuffer->handle = (void *)vBuf;
+ videoMetadataBuffer->offset = 0;
+ }
+ else
+ {
+ videoMetadataBuffer->metadataBufferType = (int) kMetadataBufferTypeCameraSource;
+ videoMetadataBuffer->handle = frame->mBuffer;
+ videoMetadataBuffer->offset = frame->mOffset;
+ }
+
+ CAMHAL_LOGVB("mDataCbTimestamp : frame->mBuffer=0x%x, videoMetadataBuffer=0x%x, videoMedatadaBufferMemory=0x%x",
+ frame->mBuffer, videoMetadataBuffer, videoMedatadaBufferMemory);
+
+ mDataCbTimestamp(frame->mTimestamp, CAMERA_MSG_VIDEO_FRAME,
+ videoMedatadaBufferMemory, 0, mCallbackCookie);
+ }
+ else
+ {
+ //TODO: Need to revisit this, should ideally be mapping the TILER buffer using mRequestMemory
+ if( NULL == frame->mBuffer)
+ {
+ CAMHAL_LOGEA("Error! frame->mBuffer is NULL");
+ break;
+ }
+#ifdef AMLOGIC_CAMERA_OVERLAY_SUPPORT
+ camera_memory_t* VideoCameraBufferMemoryBase = (camera_memory_t*)frame->mBuffer;
+ if((NULL == VideoCameraBufferMemoryBase)||(NULL == VideoCameraBufferMemoryBase->data))
+ {
+ CAMHAL_LOGEA("Error! one of video buffer is NULL");
+ break;
+ }
+ mDataCbTimestamp(frame->mTimestamp, CAMERA_MSG_VIDEO_FRAME, VideoCameraBufferMemoryBase, 0, mCallbackCookie);
+#else
+ camera_memory_t* VideoCameraBufferMemoryBase = (camera_memory_t*)mVideoHeaps.valueFor((uint32_t)frame->mBuffer);
+ private_handle_t* gralloc_hnd = (private_handle_t*)frame->mBuffer;
+ if((!VideoCameraBufferMemoryBase) ||(!gralloc_hnd->base))
+ {
+ CAMHAL_LOGEA("Error! one of video buffer is NULL");
+ break;
+ }
+ uint8_t* src = (uint8_t*)gralloc_hnd->base;
+ uint8_t* dest = (uint8_t*)VideoCameraBufferMemoryBase->data;
+ memcpy(dest,src,frame->mLength);
+ mDataCbTimestamp(frame->mTimestamp, CAMERA_MSG_VIDEO_FRAME, VideoCameraBufferMemoryBase, 0, mCallbackCookie);
+#endif
+ }
+ }
+ mRecordingLock.unlock();
+ }
+ else if(( CameraFrame::SNAPSHOT_FRAME == frame->mFrameType ) &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) &&
+ ( NULL != mNotifyCb)) {
+ //When enabled, measurement data is sent instead of video data
+ if ( !mMeasurementEnabled ) {
+ copyAndSendPreviewFrame(frame, CAMERA_MSG_POSTVIEW_FRAME);
+ } else {
+ mFrameProvider->returnFrame(frame->mBuffer,
+ (CameraFrame::FrameType) frame->mFrameType);
+ }
+ }
+ else if ( ( CameraFrame::PREVIEW_FRAME_SYNC== frame->mFrameType ) &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME)) ) {
+ //When enabled, measurement data is sent instead of video data
+ if ( !mMeasurementEnabled ) {
+ copyAndSendPreviewFrame(frame, CAMERA_MSG_PREVIEW_FRAME);
+ } else {
+ mFrameProvider->returnFrame(frame->mBuffer,
+ (CameraFrame::FrameType) frame->mFrameType);
+ }
+ }
+ else if ( ( CameraFrame::FRAME_DATA_SYNC == frame->mFrameType ) &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME)) ) {
+ copyAndSendPreviewFrame(frame, CAMERA_MSG_PREVIEW_FRAME);
+ } else {
+ mFrameProvider->returnFrame(frame->mBuffer,
+ ( CameraFrame::FrameType ) frame->mFrameType);
+ CAMHAL_LOGDB("Frame type 0x%x is still unsupported!", frame->mFrameType);
+ }
+
+ break;
+
+ default:
+
+ break;
+
+ };
+
+exit:
+
+ if ( NULL != frame )
+ {
+ delete frame;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCbNotifier::frameCallbackRelay(CameraFrame* caFrame)
+{
+ LOG_FUNCTION_NAME;
+ AppCbNotifier *appcbn = (AppCbNotifier*) (caFrame->mCookie);
+ appcbn->frameCallback(caFrame);
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCbNotifier::frameCallback(CameraFrame* caFrame)
+{
+ ///Post the event to the event queue of AppCbNotifier
+ MSGUTILS::Message msg;
+ CameraFrame *frame;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != caFrame )
+ {
+ frame = new CameraFrame(*caFrame);
+ if ( NULL != frame )
+ {
+ msg.command = AppCbNotifier::NOTIFIER_CMD_PROCESS_FRAME;
+ msg.arg1 = frame;
+ mFrameQ.put(&msg);
+ }
+ else
+ {
+ CAMHAL_LOGEA("Not enough resources to allocate CameraFrame");
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCbNotifier::flushAndReturnFrames()
+{
+ MSGUTILS::Message msg;
+ CameraFrame *frame;
+
+ Mutex::Autolock lock(mLock);
+ while (!mFrameQ.isEmpty()) {
+ mFrameQ.get(&msg);
+ frame = (CameraFrame*) msg.arg1;
+ if (frame) {
+ mFrameProvider->returnFrame(frame->mBuffer,
+ (CameraFrame::FrameType) frame->mFrameType);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCbNotifier::eventCallbackRelay(CameraHalEvent* chEvt)
+{
+ LOG_FUNCTION_NAME;
+ AppCbNotifier *appcbn = (AppCbNotifier*) (chEvt->mCookie);
+ appcbn->eventCallback(chEvt);
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCbNotifier::eventCallback(CameraHalEvent* chEvt)
+{
+
+ ///Post the event to the event queue of AppCbNotifier
+ MSGUTILS::Message msg;
+ CameraHalEvent *event;
+
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != chEvt )
+ {
+
+ event = new CameraHalEvent(*chEvt);
+ if ( NULL != event )
+ {
+ msg.command = AppCbNotifier::NOTIFIER_CMD_PROCESS_EVENT;
+ msg.arg1 = event;
+ {
+ Mutex::Autolock lock(mLock);
+ mEventQ.put(&msg);
+ }
+ }
+ else
+ {
+ CAMHAL_LOGEA("Not enough resources to allocate CameraHalEvent");
+ }
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+void AppCbNotifier::flushEventQueue()
+{
+
+ {
+ Mutex::Autolock lock(mLock);
+ mEventQ.clear();
+ }
+}
+
+
+bool AppCbNotifier::processMessage()
+{
+ ///Retrieve the command from the command queue and process it
+ MSGUTILS::Message msg;
+
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_LOGDA("+Msg get...");
+ mNotificationThread->msgQ().get(&msg);
+ CAMHAL_LOGDA("-Msg get...");
+ bool ret = true;
+
+ switch(msg.command)
+ {
+ case NotificationThread::NOTIFIER_EXIT:
+ {
+ CAMHAL_LOGEA("Received NOTIFIER_EXIT command from Camera HAL");
+ mNotifierState = AppCbNotifier::NOTIFIER_EXITED;
+ ret = false;
+ break;
+ }
+ default:
+ {
+ CAMHAL_LOGEA("Error: ProcessMsg() command from Camera HAL");
+ break;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+
+
+}
+
+AppCbNotifier::~AppCbNotifier()
+{
+ LOG_FUNCTION_NAME;
+
+ ///Stop app callback notifier if not already stopped
+ stop();
+
+ ///Unregister with the frame provider
+ if ( NULL != mFrameProvider )
+ {
+ mFrameProvider->disableFrameNotification(CameraFrame::ALL_FRAMES);
+ }
+
+ //unregister with the event provider
+ if ( NULL != mEventProvider )
+ {
+ mEventProvider->disableEventNotification(CameraHalEvent::ALL_EVENTS);
+ }
+
+ MSGUTILS::Message msg = {0,0,0,0,0,0};
+ msg.command = NotificationThread::NOTIFIER_EXIT;
+
+ ///Post the message to display thread
+ mNotificationThread->msgQ().put(&msg);
+
+ //Exit and cleanup the thread
+ mNotificationThread->requestExit();
+ mNotificationThread->join();
+
+ //Delete the display thread
+ mNotificationThread.clear();
+
+
+ ///Free the event and frame providers
+ if ( NULL != mEventProvider )
+ {
+ ///Deleting the event provider
+ CAMHAL_LOGDA("Stopping Event Provider");
+ delete mEventProvider;
+ mEventProvider = NULL;
+ }
+
+ if ( NULL != mFrameProvider )
+ {
+ ///Deleting the frame provider
+ CAMHAL_LOGDA("Stopping Frame Provider");
+ delete mFrameProvider;
+ mFrameProvider = NULL;
+ }
+
+ releaseSharedVideoBuffers();
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+//Free all video heaps and buffers
+void AppCbNotifier::releaseSharedVideoBuffers()
+{
+ LOG_FUNCTION_NAME;
+
+ if(mUseMetaDataBufferMode)
+ {
+ camera_memory_t* videoMedatadaBufferMemory;
+ for (unsigned int i = 0; i < mVideoMetadataBufferMemoryMap.size(); i++)
+ {
+ videoMedatadaBufferMemory = (camera_memory_t*) mVideoMetadataBufferMemoryMap.valueAt(i);
+ if(NULL != videoMedatadaBufferMemory)
+ {
+ videoMedatadaBufferMemory->release(videoMedatadaBufferMemory);
+ CAMHAL_LOGDB("Released videoMedatadaBufferMemory=0x%x", (uint32_t)videoMedatadaBufferMemory);
+ }
+ }
+
+ mVideoMetadataBufferMemoryMap.clear();
+ mVideoMetadataBufferReverseMap.clear();
+ if (mUseVideoBuffers)
+ {
+ mVideoMap.clear();
+ }
+ }
+ else
+ {
+#ifndef AMLOGIC_CAMERA_OVERLAY_SUPPORT
+ camera_memory_t* VideoCameraBufferMemoryBase = NULL;
+ for (unsigned int i = 0; i < mVideoHeaps.size(); i++)
+ {
+ VideoCameraBufferMemoryBase = (camera_memory_t*) mVideoHeaps.valueAt(i);
+ if(NULL != VideoCameraBufferMemoryBase)
+ {
+ VideoCameraBufferMemoryBase->release(VideoCameraBufferMemoryBase);
+ CAMHAL_LOGDB("Released VideoCameraBufferMemoryBase=0x%x", (uint32_t)VideoCameraBufferMemoryBase);
+ }
+ }
+#endif
+ mVideoMap.clear();
+ mVideoHeaps.clear();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCbNotifier::setEventProvider(int32_t eventMask, MessageNotifier * eventNotifier)
+{
+
+ LOG_FUNCTION_NAME;
+ ///@remarks There is no NULL check here. We will check
+ ///for NULL when we get start command from CameraHal
+ ///@Remarks Currently only one event provider (CameraAdapter) is supported
+ ///@todo Have an array of event providers for each event bitmask
+ mEventProvider = new EventProvider(eventNotifier, this, eventCallbackRelay);
+ if ( NULL == mEventProvider )
+ {
+ CAMHAL_LOGEA("Error in creating EventProvider");
+ }
+ else
+ {
+ mEventProvider->enableEventNotification(eventMask);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCbNotifier::setFrameProvider(FrameNotifier *frameNotifier)
+{
+ LOG_FUNCTION_NAME;
+ ///@remarks There is no NULL check here. We will check
+ ///for NULL when we get the start command from CameraAdapter
+ mFrameProvider = new FrameProvider(frameNotifier, this, frameCallbackRelay);
+ if ( NULL == mFrameProvider )
+ {
+ CAMHAL_LOGEA("Error in creating FrameProvider");
+ }
+ else
+ {
+ //Register only for captured images and RAW for now
+ //TODO: Register for and handle all types of frames
+ mFrameProvider->enableFrameNotification(CameraFrame::IMAGE_FRAME);
+ mFrameProvider->enableFrameNotification(CameraFrame::RAW_FRAME);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t AppCbNotifier::startPreviewCallbacks(CameraParameters &params, void *buffers, uint32_t *offsets, int fd, size_t length, size_t count)
+{
+ sp<MemoryHeapBase> heap;
+ sp<MemoryBase> buffer;
+ unsigned int *bufArr;
+ size_t size = 0;
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mLock);
+
+ if ( NULL == mFrameProvider )
+ {
+ CAMHAL_LOGEA("Trying to start video recording without FrameProvider");
+ return -EINVAL;
+ }
+
+ if ( mPreviewing )
+ {
+ CAMHAL_LOGDA("+Already previewing");
+ return NO_INIT;
+ }
+
+ int w,h;
+ ///Get preview size
+ params.getPreviewSize(&w, &h);
+
+ //Get the preview pixel format
+ mPreviewPixelFormat = params.getPreviewFormat();
+
+ if(strcmp(mPreviewPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
+ {
+ size = w*h*2;
+ mPreviewPixelFormat = CameraParameters::PIXEL_FORMAT_YUV422I;
+ }
+ else if(strcmp(mPreviewPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 )
+ {
+ size = (w*h*3)/2;
+ mPreviewPixelFormat = CameraParameters::PIXEL_FORMAT_YUV420SP;
+ }
+ else if( strcmp(mPreviewPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV420P) == 0)
+ {
+ int y_size,c_size,c_stride;
+ w = ALIGN(w,2);
+ y_size = w*h;
+ c_stride = ALIGN(w/2, 16);
+ c_size = c_stride * h/2;
+ size = y_size + c_size*2;
+
+ mPreviewPixelFormat = CameraParameters::PIXEL_FORMAT_YUV420P;
+ }
+ else if(strcmp(mPreviewPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0)
+ {
+ size = w*h*2;
+ mPreviewPixelFormat = CameraParameters::PIXEL_FORMAT_RGB565;
+ }
+
+ mPreviewMemory = mRequestMemory(-1, size, AppCbNotifier::MAX_BUFFERS, NULL);
+ if (!mPreviewMemory) {
+ return NO_MEMORY;
+ }
+
+ for (int i=0; i < AppCbNotifier::MAX_BUFFERS; i++) {
+ mPreviewBufs[i] = (unsigned char*) mPreviewMemory->data + (i*size);
+ }
+
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME ) ) {
+ mFrameProvider->enableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+ }
+
+ mPreviewBufCount = 0;
+
+ mPreviewing = true;
+
+ LOG_FUNCTION_NAME;
+
+ return NO_ERROR;
+}
+
+void AppCbNotifier::setBurst(bool burst)
+{
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mBurstLock);
+
+ mBurst = burst;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCbNotifier::useVideoBuffers(bool useVideoBuffers)
+{
+ LOG_FUNCTION_NAME;
+#ifndef AMLOGIC_CAMERA_OVERLAY_SUPPORT
+ mUseVideoBuffers = useVideoBuffers;
+ CAMHAL_LOGDB("Set mUseVideoBuffers as %d",(uint32_t)useVideoBuffers);
+#endif
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+bool AppCbNotifier::getUseVideoBuffers()
+{
+ return mUseVideoBuffers;
+}
+
+void AppCbNotifier::setVideoRes(int width, int height)
+{
+ LOG_FUNCTION_NAME;
+
+ mVideoWidth = width;
+ mVideoHeight = height;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t AppCbNotifier::stopPreviewCallbacks()
+{
+ sp<MemoryHeapBase> heap;
+ sp<MemoryBase> buffer;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == mFrameProvider )
+ {
+ CAMHAL_LOGEA("Trying to stop preview callbacks without FrameProvider");
+ return -EINVAL;
+ }
+
+ if ( !mPreviewing )
+ {
+ return NO_INIT;
+ }
+
+ mFrameProvider->disableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+
+ {
+ Mutex::Autolock lock(mLock);
+ mPreviewMemory->release(mPreviewMemory);
+ }
+
+ mPreviewing = false;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+
+}
+
+status_t AppCbNotifier::useMetaDataBufferMode(bool enable)
+{
+ mUseMetaDataBufferMode = enable;
+ CAMHAL_LOGDB("Set mUseMetaDataBufferMode as %d",(uint32_t)enable);
+ return NO_ERROR;
+}
+
+
+status_t AppCbNotifier::startRecording()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mRecordingLock);
+
+ if ( NULL == mFrameProvider )
+ {
+ CAMHAL_LOGEA("Trying to start video recording without FrameProvider");
+ ret = -1;
+ }
+
+ if(mRecording)
+ {
+ return NO_INIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mFrameProvider->enableFrameNotification(CameraFrame::VIDEO_FRAME_SYNC);
+ }
+
+ mRecording = true;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+//Allocate metadata buffers for video recording
+status_t AppCbNotifier::initSharedVideoBuffers(void *buffers, uint32_t *offsets, int fd, size_t length, size_t count, void *vidBufs)
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
+ if(mUseMetaDataBufferMode)
+ {
+ uint32_t *bufArr = NULL;
+ camera_memory_t* videoMedatadaBufferMemory = NULL;
+
+ if(NULL == buffers)
+ {
+ CAMHAL_LOGEA("Error! Video buffers are NULL");
+ return BAD_VALUE;
+ }
+ bufArr = (uint32_t *) buffers;
+
+ for (uint32_t i = 0; i < count; i++)
+ {
+ videoMedatadaBufferMemory = mRequestMemory(-1, sizeof(video_metadata_t), 1, NULL);
+ if((NULL == videoMedatadaBufferMemory) || (NULL == videoMedatadaBufferMemory->data))
+ {
+ CAMHAL_LOGEA("Error! Could not allocate memory for Video Metadata Buffers");
+ return NO_MEMORY;
+ }
+
+ mVideoMetadataBufferMemoryMap.add(bufArr[i], (uint32_t)(videoMedatadaBufferMemory));
+ mVideoMetadataBufferReverseMap.add((uint32_t)(videoMedatadaBufferMemory->data), bufArr[i]);
+ CAMHAL_LOGDB("bufArr[%d]=0x%x, videoMedatadaBufferMemory=0x%x, videoMedatadaBufferMemory->data=0x%x",
+ i, bufArr[i], (uint32_t)videoMedatadaBufferMemory, (uint32_t)videoMedatadaBufferMemory->data);
+
+ if (vidBufs != NULL)
+ {
+ uint32_t *vBufArr = (uint32_t *) vidBufs;
+ mVideoMap.add(bufArr[i], vBufArr[i]);
+ CAMHAL_LOGVB("bufArr[%d]=0x%x, vBuffArr[%d]=0x%x", i, bufArr[i], i, vBufArr[i]);
+ }
+ }
+ }
+ else
+ {
+ uint32_t *bufArr = NULL;
+ camera_memory_t* VideoCameraBufferMemoryBase = NULL;
+
+ if(NULL == buffers)
+ {
+ CAMHAL_LOGEA("Error! Video buffers are NULL");
+ return BAD_VALUE;
+ }
+ bufArr = (uint32_t *) buffers;
+
+ for (uint32_t i = 0; i < count; i++)
+ {
+ #ifdef AMLOGIC_CAMERA_OVERLAY_SUPPORT
+ VideoCameraBufferMemoryBase = (camera_memory_t*)bufArr[i];
+ #else
+ VideoCameraBufferMemoryBase = mRequestMemory(-1, mVideoWidth*mVideoHeight*3/2, 1, NULL); // only supported nv21 or nv12;
+ #endif
+ if((NULL == VideoCameraBufferMemoryBase) || (NULL == VideoCameraBufferMemoryBase->data))
+ {
+ CAMHAL_LOGEA("Error! Could not allocate memory for Video Metadata Buffers");
+ return NO_MEMORY;
+ }
+ mVideoHeaps.add(bufArr[i], (uint32_t)(VideoCameraBufferMemoryBase));
+ mVideoMap.add((uint32_t)(VideoCameraBufferMemoryBase->data),bufArr[i]);
+ CAMHAL_LOGDB("bufArr[%d]=0x%x, VideoCameraBufferMemoryBase=0x%x, VideoCameraBufferMemoryBase->data=0x%x",
+ i, bufArr[i], (uint32_t)VideoCameraBufferMemoryBase, (uint32_t)VideoCameraBufferMemoryBase->data);
+ }
+ }
+exit:
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t AppCbNotifier::stopRecording()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mRecordingLock);
+
+ if ( NULL == mFrameProvider )
+ {
+ CAMHAL_LOGEA("Trying to stop video recording without FrameProvider");
+ ret = -1;
+ }
+
+ if(!mRecording)
+ {
+ return NO_INIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mFrameProvider->disableFrameNotification(CameraFrame::VIDEO_FRAME_SYNC);
+ }
+
+ ///Release the shared video buffers
+ releaseSharedVideoBuffers();
+
+ mRecording = false;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t AppCbNotifier::releaseRecordingFrame(const void* mem)
+{
+ status_t ret = NO_ERROR;
+ void *frame = NULL;
+
+ LOG_FUNCTION_NAME;
+ if ( NULL == mFrameProvider )
+ {
+ CAMHAL_LOGEA("Trying to stop video recording without FrameProvider");
+ ret = -1;
+ }
+
+ if ( NULL == mem )
+ {
+ CAMHAL_LOGEA("Video Frame released is invalid");
+ ret = -1;
+ }
+
+ if( NO_ERROR != ret )
+ {
+ return ret;
+ }
+
+ if(mUseMetaDataBufferMode)
+ {
+ video_metadata_t *videoMetadataBuffer = (video_metadata_t *) mem ;
+ frame = (void*) mVideoMetadataBufferReverseMap.valueFor((uint32_t) videoMetadataBuffer);
+ CAMHAL_LOGVB("Releasing frame with videoMetadataBuffer=0x%x, videoMetadataBuffer->handle=0x%x & frame handle=0x%x\n",
+ videoMetadataBuffer, videoMetadataBuffer->handle, frame);
+ }
+ else
+ {
+ frame = (void *)mVideoMap.valueFor((uint32_t)mem);
+ //CAMHAL_LOGDB("release recording mem.0x%x, frame:0x%x",(uint32_t)mem,(uint32_t)frame);
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ret = mFrameProvider->returnFrame(frame, CameraFrame::VIDEO_FRAME_SYNC);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t AppCbNotifier::enableMsgType(int32_t msgType)
+{
+ if( msgType & (CAMERA_MSG_POSTVIEW_FRAME | CAMERA_MSG_PREVIEW_FRAME) ) {
+ //if( msgType & (CAMERA_MSG_PREVIEW_FRAME) ) {
+ mFrameProvider->enableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+ }
+ return NO_ERROR;
+}
+
+status_t AppCbNotifier::disableMsgType(int32_t msgType)
+{
+ //if(!mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME | CAMERA_MSG_POSTVIEW_FRAME)) {
+ if(!(msgType & (CAMERA_MSG_PREVIEW_FRAME | CAMERA_MSG_POSTVIEW_FRAME))){
+ mFrameProvider->disableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+ }
+ return NO_ERROR;
+}
+
+status_t AppCbNotifier::start()
+{
+ LOG_FUNCTION_NAME;
+ if(mNotifierState==AppCbNotifier::NOTIFIER_STARTED)
+ {
+ CAMHAL_LOGDA("AppCbNotifier already running");
+ LOG_FUNCTION_NAME_EXIT;
+ return ALREADY_EXISTS;
+ }
+
+ ///Check whether initial conditions are met for us to start
+ ///A frame provider should be available, if not return error
+ if(!mFrameProvider)
+ {
+ ///AppCbNotifier not properly initialized
+ CAMHAL_LOGEA("AppCbNotifier not properly initialized - Frame provider is NULL");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ ///At least one event notifier should be available, if not return error
+ ///@todo Modify here when there is an array of event providers
+ if(!mEventProvider)
+ {
+ CAMHAL_LOGEA("AppCbNotifier not properly initialized - Event provider is NULL");
+ LOG_FUNCTION_NAME_EXIT;
+ ///AppCbNotifier not properly initialized
+ return NO_INIT;
+ }
+
+ mNotifierState = AppCbNotifier::NOTIFIER_STARTED;
+ CAMHAL_LOGDA(" --> AppCbNotifier NOTIFIER_STARTED \n");
+
+ gVEncoderQueue.clear();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+
+}
+
+status_t AppCbNotifier::stop()
+{
+ LOG_FUNCTION_NAME;
+
+ if(mNotifierState!=AppCbNotifier::NOTIFIER_STARTED)
+ {
+ CAMHAL_LOGDA("AppCbNotifier already in stopped state");
+ LOG_FUNCTION_NAME_EXIT;
+ return ALREADY_EXISTS;
+ }
+
+ {
+ Mutex::Autolock lock(mLock);
+
+ mNotifierState = AppCbNotifier::NOTIFIER_STOPPED;
+ CAMHAL_LOGDA(" --> AppCbNotifier NOTIFIER_STOPPED \n");
+ }
+
+ while(!gVEncoderQueue.isEmpty()) {
+ sp<Encoder_libjpeg> encoder = gVEncoderQueue.valueAt(0);
+ if(encoder.get()) {
+ encoder->cancel();
+ encoder->join();
+ encoder.clear();
+ }
+ gVEncoderQueue.removeItemsAt(0);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+}
+
+
+/*--------------------NotificationHandler Class ENDS here-----------------------------*/
+
+
+
+};
diff --git a/vircam/V4LCamAdpt.cpp b/vircam/V4LCamAdpt.cpp
new file mode 100755
index 0000000..c818784
--- a/dev/null
+++ b/vircam/V4LCamAdpt.cpp
@@ -0,0 +1,2981 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file V4LCamAdpt.cpp
+*
+* This file maps the Camera Hardware Interface to V4L2.
+*
+*/
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "V4LCamAdpt"
+//reinclude because of a bug with the log macros
+#include <utils/Log.h>
+#include "DebugUtils.h"
+
+#include "V4LCamAdpt.h"
+#include "CameraHal.h"
+#include "ExCameraParameters.h"
+#include <signal.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <fcntl.h>
+#include <unistd.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/mman.h>
+#include <sys/select.h>
+#include <linux/videodev.h>
+#include <sys/time.h>
+
+#include <cutils/properties.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include "CameraHal.h"
+
+
+//for private_handle_t TODO move out of private header
+#include <gralloc_priv.h>
+
+#define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false ))
+static int mDebugFps = 0;
+
+#define Q16_OFFSET 16
+
+#define HERE(Msg) {CAMHAL_LOGEB("--===line %d, %s===--\n", __LINE__, Msg);}
+
+#ifndef ARRAY_SIZE
+#define ARRAY_SIZE(x) (sizeof(x) / sizeof((x)[0]))
+#endif
+
+#define VIRTUAL_DEVICE_PATH(_sensor_index) \
+ (_sensor_index == (MAX_CAM_NUM_ADD_VCAM-1) ? "/dev/video11" : "/dev/video11")
+
+namespace android {
+
+#undef LOG_TAG
+///Maintain a separate tag for V4LCamAdpt logs to isolate issues
+#define LOG_TAG "V4LCamAdpt"
+
+//redefine because of a bug with the log macros
+#undef LOG_FUNCTION_NAME
+#undef LOG_FUNCTION_NAME_EXIT
+#define LOG_FUNCTION_NAME LOGV("%d: %s() ENTER", __LINE__, __FUNCTION__);
+#define LOG_FUNCTION_NAME_EXIT LOGV("%d: %s() EXIT", __LINE__, __FUNCTION__);
+
+//frames skipped before recalculating the framerate
+#define FPS_PERIOD 30
+
+#if 0
+#define V4L2_ROTATE_ID 0x980922 //V4L2_CID_ROTATE
+
+#define V4L2_CID_AUTO_FOCUS_STATUS (V4L2_CID_CAMERA_CLASS_BASE+30)
+#define V4L2_AUTO_FOCUS_STATUS_IDLE (0 << 0)
+#define V4L2_AUTO_FOCUS_STATUS_BUSY (1 << 0)
+#define V4L2_AUTO_FOCUS_STATUS_REACHED (1 << 1)
+#define V4L2_AUTO_FOCUS_STATUS_FAILED (1 << 2)
+#endif
+
+/*--------------------junk STARTS here-----------------------------*/
+#define SYSFILE_CAMERA_SET_PARA "/sys/class/vm/attr2"
+#define SYSFILE_CAMERA_SET_MIRROR "/sys/class/vm/mirror"
+static int writefile(char* path,char* content)
+{
+ FILE* fp = fopen(path, "w+");
+
+ LOGD("Write file %s(%p) content %s", path, fp, content);
+
+ if (fp) {
+ while( ((*content) != '\0') ) {
+ if (EOF == fputc(*content,fp))
+ LOGD("write char fail");
+ content++;
+ }
+
+ fclose(fp);
+ }
+ else
+ LOGD("open file fail\n");
+ return 1;
+}
+/*--------------------Camera Adapter Class STARTS here-----------------------------*/
+
+status_t V4LCamAdpt::initialize(CameraProperties::Properties* caps)
+{
+ LOG_FUNCTION_NAME;
+
+ char value[PROPERTY_VALUE_MAX];
+ char fileflag[8];
+ char const*filename = NULL;
+ property_get("debug.camera.showfps", value, "0");
+ mDebugFps = atoi(value);
+
+ int ret = NO_ERROR;
+
+ // Allocate memory for video info structure
+ mVideoInfo = (struct VideoInfo *) calloc (1, sizeof (struct VideoInfo));
+ if(!mVideoInfo)
+ {
+ return NO_MEMORY;
+ }
+
+
+ filename = caps->get(CameraProperties::DEVICE_NAME);
+ if(filename == NULL){
+ CAMHAL_LOGEB("get index=%d 's name ", mSensorIndex);
+ return -EINVAL;
+ }
+ if ((mCameraHandle = open( filename, O_RDWR)) == -1)
+ {
+ CAMHAL_LOGEB("Error while opening handle to V4L2 Camera: %s", strerror(errno));
+ return -EINVAL;
+ }
+
+
+ ret = ioctl (mCameraHandle, VIDIOC_QUERYCAP, &mVideoInfo->cap);
+ if (ret < 0)
+ {
+ CAMHAL_LOGEA("Error when querying the capabilities of the V4L Camera");
+ return -EINVAL;
+ }
+
+ if ((mVideoInfo->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0)
+ {
+ CAMHAL_LOGEA("Error while adapter initialization: video capture not supported.");
+ return -EINVAL;
+ }
+
+ if (!(mVideoInfo->cap.capabilities & V4L2_CAP_STREAMING))
+ {
+ CAMHAL_LOGEA("Error while adapter initialization: Capture device does not support streaming i/o");
+ return -EINVAL;
+ }
+
+ if (strcmp(caps->get(CameraProperties::FACING_INDEX), (const char *) android::ExCameraParameters::FACING_FRONT) == 0)
+ mbFrontCamera = true;
+ else
+ mbFrontCamera = false;
+ LOGD("mbFrontCamera=%d",mbFrontCamera);
+
+ // Initialize flags
+ mPreviewing = false;
+ mVideoInfo->isStreaming = false;
+ mRecording = false;
+ mZoomlevel = -1;
+ mEnableContiFocus = false;
+ cur_focus_mode_for_conti = CAM_FOCUS_MODE_RELEASE;
+ mFlashMode = FLASHLIGHT_OFF;
+ mPixelFormat = 0;
+
+ mPreviewWidth = 0 ;
+ mPreviewHeight = 0;
+ mCaptureWidth = 0;
+ mCaptureHeight = 0;
+
+ IoctlStateProbe();
+
+#ifdef AMLOGIC_VCAM_NONBLOCK_SUPPORT
+ int fps=0, fps_num=0;
+ char *fpsrange=(char *)calloc(32,sizeof(char));
+
+ ret = get_framerate(mCameraHandle, &fps, &fps_num);
+ if((fpsrange != NULL)&&(NO_ERROR == ret) && ( 0 !=fps_num )){
+ mPreviewFrameRate = fps/fps_num;
+ sprintf(fpsrange,"%s%d","10,",fps/fps_num);
+ CAMHAL_LOGDB("supported preview rates is %s\n", fpsrange);
+
+ mParams.set(CameraParameters::KEY_PREVIEW_FRAME_RATE,fps/fps_num);
+ mParams.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES,fpsrange);
+
+ mParams.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,fpsrange);
+ mParams.set(CameraParameters::KEY_PREVIEW_FPS_RANGE,fpsrange);
+ }else{
+ mPreviewFrameRate = 15;
+ sprintf(fpsrange,"%s%d","10,",mPreviewFrameRate);
+ CAMHAL_LOGDB("default preview rates is %s\n", fpsrange);
+
+ mParams.set(CameraParameters::KEY_PREVIEW_FRAME_RATE, mPreviewFrameRate);
+ mParams.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES,fpsrange);
+
+ mParams.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,fpsrange);
+ mParams.set(CameraParameters::KEY_PREVIEW_FPS_RANGE,fpsrange);
+ }
+#endif
+
+ writefile((char*)SYSFILE_CAMERA_SET_PARA, (char*)"1");
+ //mirror set at here will not work.
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t V4LCamAdpt::IoctlStateProbe(void)
+{
+ struct v4l2_queryctrl qc;
+ int ret = 0;
+
+ LOG_FUNCTION_NAME;
+
+ mIoctlSupport = 0;
+
+ if(get_hflip_mode(mCameraHandle)==0){
+ mIoctlSupport |= IOCTL_MASK_HFLIP;
+ }else{
+ mIoctlSupport &= ~IOCTL_MASK_HFLIP;
+ }
+
+ memset(&qc, 0, sizeof(struct v4l2_queryctrl));
+ qc.id = V4L2_CID_ZOOM_ABSOLUTE;
+ ret = ioctl (mCameraHandle, VIDIOC_QUERYCTRL, &qc);
+ if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)
+ || (qc.type != V4L2_CTRL_TYPE_INTEGER)){
+ mIoctlSupport &= ~IOCTL_MASK_ZOOM;
+ }else{
+ mIoctlSupport |= IOCTL_MASK_ZOOM;
+ }
+
+ memset(&qc, 0, sizeof(struct v4l2_queryctrl));
+ qc.id = V4L2_ROTATE_ID;
+ ret = ioctl (mCameraHandle, VIDIOC_QUERYCTRL, &qc);
+ if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)
+ || (qc.type != V4L2_CTRL_TYPE_INTEGER)){
+ mIoctlSupport &= ~IOCTL_MASK_ROTATE;
+ }else{
+ mIoctlSupport |= IOCTL_MASK_ROTATE;
+ }
+
+ if(mIoctlSupport & IOCTL_MASK_ROTATE)
+ CAMHAL_LOGDB("camera %d support capture rotate",mSensorIndex);
+ mRotateValue = 0;
+
+ memset(&qc, 0, sizeof(struct v4l2_queryctrl));
+ qc.id = V4L2_CID_EXPOSURE;
+
+ ret = ioctl (mCameraHandle, VIDIOC_QUERYCTRL, &qc);
+ if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0) ){
+ mIoctlSupport &= ~IOCTL_MASK_EXPOSURE;
+ mEVdef = 4;
+ mEVmin = 0;
+ mEVmax = 8;
+ }else{
+ mIoctlSupport |= IOCTL_MASK_EXPOSURE;
+ mEVdef = qc.default_value;
+ mEVmin = qc.minimum;
+ mEVmax = qc.maximum;
+ }
+ mEV = mEVdef;
+
+ memset(&qc, 0, sizeof(struct v4l2_queryctrl));
+ qc.id = V4L2_CID_DO_WHITE_BALANCE;
+
+ ret = ioctl (mCameraHandle, VIDIOC_QUERYCTRL, &qc);
+ if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0) ){
+ mIoctlSupport &= ~IOCTL_MASK_WB;
+ }else{
+ mIoctlSupport |= IOCTL_MASK_WB;
+ }
+
+ mWhiteBalance = qc.default_value;
+
+ memset(&qc, 0, sizeof(struct v4l2_queryctrl));
+ qc.id = V4L2_CID_BACKLIGHT_COMPENSATION;
+ ret = ioctl (mCameraHandle, VIDIOC_QUERYCTRL, &qc);
+ if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)
+ || (qc.type != V4L2_CTRL_TYPE_MENU)){
+ mIoctlSupport &= ~IOCTL_MASK_FLASH;
+ }else{
+ mIoctlSupport |= IOCTL_MASK_FLASH;
+ }
+
+ memset(&qc, 0, sizeof(struct v4l2_queryctrl));
+ qc.id = V4L2_CID_COLORFX;
+ ret = ioctl (mCameraHandle, VIDIOC_QUERYCTRL, &qc);
+ if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)
+ || (qc.type != V4L2_CTRL_TYPE_MENU)){
+ mIoctlSupport &= ~IOCTL_MASK_EFFECT;
+ }else{
+ mIoctlSupport |= IOCTL_MASK_EFFECT;
+ }
+
+ memset(&qc, 0, sizeof(struct v4l2_queryctrl));
+ qc.id = V4L2_CID_POWER_LINE_FREQUENCY;
+ ret = ioctl (mCameraHandle, VIDIOC_QUERYCTRL, &qc);
+ if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)
+ || (qc.type != V4L2_CTRL_TYPE_MENU)){
+ mIoctlSupport &= ~IOCTL_MASK_BANDING;
+ }else{
+ mIoctlSupport |= IOCTL_MASK_BANDING;
+ }
+ mAntiBanding = qc.default_value;
+
+ memset(&qc, 0, sizeof(struct v4l2_queryctrl));
+ qc.id = V4L2_CID_FOCUS_AUTO;
+ ret = ioctl (mCameraHandle, VIDIOC_QUERYCTRL, &qc);
+ if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)
+ || (qc.type != V4L2_CTRL_TYPE_MENU)){
+ mIoctlSupport &= ~IOCTL_MASK_FOCUS;
+ }else{
+ mIoctlSupport |= IOCTL_MASK_FOCUS;
+ }
+
+ memset(&qc, 0, sizeof(struct v4l2_queryctrl));
+ qc.id = V4L2_CID_AUTO_FOCUS_STATUS;
+ ret = ioctl (mCameraHandle, VIDIOC_QUERYCTRL, &qc);
+ if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)){
+ mIoctlSupport &= ~IOCTL_MASK_FOCUS_MOVE;
+ }else{
+ mIoctlSupport |= IOCTL_MASK_FOCUS_MOVE;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t V4LCamAdpt::fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType)
+{
+
+ status_t ret = NO_ERROR;
+ v4l2_buffer hbuf_query;
+ memset(&hbuf_query,0,sizeof(v4l2_buffer));
+
+ //LOGD("fillThisBuffer frameType=%d", frameType);
+ if (CameraFrame::IMAGE_FRAME == frameType)
+ {
+ //if (NULL != mEndImageCaptureCallback)
+ //mEndImageCaptureCallback(mEndCaptureData);
+ if (NULL != mReleaseImageBuffersCallback)
+ mReleaseImageBuffersCallback(mReleaseData);
+ return NO_ERROR;
+ }
+ if ( !mVideoInfo->isStreaming || !mPreviewing)
+ {
+ return NO_ERROR;
+ }
+
+ int i = mPreviewBufs.valueFor(( unsigned int )frameBuf);
+ if(i<0)
+ {
+ return BAD_VALUE;
+ }
+ if(nQueued>=mPreviewBufferCount)
+ {
+ CAMHAL_LOGEB("fill buffer error, reach the max preview buff:%d,max:%d",
+ nQueued,mPreviewBufferCount);
+ return BAD_VALUE;
+ }
+
+ hbuf_query.index = i;
+ hbuf_query.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ hbuf_query.memory = V4L2_MEMORY_MMAP;
+
+ ret = ioctl(mCameraHandle, VIDIOC_QBUF, &hbuf_query);
+ if (ret < 0) {
+ CAMHAL_LOGEB("Init: VIDIOC_QBUF %d Failed, errno=%d\n",i, errno);
+ return -1;
+ }
+ //CAMHAL_LOGEB("fillThis Buffer %d",i);
+ nQueued++;
+ return ret;
+
+}
+
+status_t V4LCamAdpt::setParameters(const CameraParameters &params)
+{
+ LOG_FUNCTION_NAME;
+
+ status_t rtn = NO_ERROR;
+
+ // Update the current parameter set
+ mParams = params;
+
+ //check zoom value
+ int zoom = mParams.getInt(CameraParameters::KEY_ZOOM);
+ int maxzoom = mParams.getInt(CameraParameters::KEY_MAX_ZOOM);
+ char *p = (char *)mParams.get(CameraParameters::KEY_ZOOM_RATIOS);
+
+ if(zoom > maxzoom){
+ rtn = INVALID_OPERATION;
+ CAMHAL_LOGEB("Zoom Parameter Out of range1------zoom level:%d,max level:%d",zoom,maxzoom);
+ zoom = maxzoom;
+ mParams.set((const char*)CameraParameters::KEY_ZOOM, maxzoom);
+ }else if(zoom <0) {
+ rtn = INVALID_OPERATION;
+ zoom = 0;
+ CAMHAL_LOGEB("Zoom Parameter Out of range2------zoom level:%d,max level:%d",zoom,maxzoom);
+ mParams.set((const char*)CameraParameters::KEY_ZOOM, zoom);
+ }
+
+ if ((p) && (zoom >= 0)&&(zoom!=mZoomlevel)) {
+ int z = (int)strtol(p, &p, 10);
+ int i = 0;
+ while (i < zoom) {
+ if (*p != ',') break;
+ z = (int)strtol(p+1, &p, 10);
+ i++;
+ }
+ CAMHAL_LOGDB("Change the zoom level---old:%d,new:%d",mZoomlevel,zoom);
+ mZoomlevel = zoom;
+ if(mIoctlSupport & IOCTL_MASK_ZOOM)
+ set_zoom_level(mCameraHandle,z);
+ notifyZoomSubscribers((mZoomlevel<0)?0:mZoomlevel,true);
+ }
+
+ int min_fps,max_fps;
+ const char *white_balance=NULL;
+ const char *exposure=NULL;
+ const char *effect=NULL;
+ //const char *night_mode=NULL;
+ const char *qulity=NULL;
+ const char *banding=NULL;
+ const char *flashmode=NULL;
+ const char *focusmode=NULL;
+ const char *supportfocusmode=NULL;
+
+ qulity=mParams.get(CameraParameters::KEY_JPEG_QUALITY);
+
+ flashmode = mParams.get(CameraParameters::KEY_FLASH_MODE);
+ if((mIoctlSupport & IOCTL_MASK_FLASH) && flashmode){
+ if(strcasecmp(flashmode, "torch")==0){
+ set_flash_mode(mCameraHandle, flashmode);
+ mFlashMode = FLASHLIGHT_TORCH;
+ }else if(strcasecmp(flashmode, "on")==0){
+ if( FLASHLIGHT_TORCH == mFlashMode){
+ set_flash_mode(mCameraHandle, "off");
+ }
+ mFlashMode = FLASHLIGHT_ON;
+ }else if(strcasecmp(flashmode, "off")==0){
+ set_flash_mode(mCameraHandle, flashmode);
+ mFlashMode = FLASHLIGHT_OFF;
+ }
+ }
+
+ exposure=mParams.get(CameraParameters::KEY_EXPOSURE_COMPENSATION);
+ if( (mIoctlSupport & IOCTL_MASK_EXPOSURE) && exposure){
+ SetExposure(mCameraHandle,exposure);
+ }
+
+ white_balance=mParams.get(CameraParameters::KEY_WHITE_BALANCE);
+ if((mIoctlSupport & IOCTL_MASK_WB) && white_balance){
+ set_white_balance(mCameraHandle,white_balance);
+ }
+
+ effect=mParams.get(CameraParameters::KEY_EFFECT);
+ if( (mIoctlSupport & IOCTL_MASK_EFFECT) && effect){
+ set_effect(mCameraHandle,effect);
+ }
+
+ banding=mParams.get(CameraParameters::KEY_ANTIBANDING);
+ if((mIoctlSupport & IOCTL_MASK_BANDING) && banding){
+ set_banding(mCameraHandle,banding);
+ }
+
+ focusmode = mParams.get(CameraParameters::KEY_FOCUS_MODE);
+ if(focusmode) {
+ if(strcasecmp(focusmode,"fixed")==0)
+ cur_focus_mode = CAM_FOCUS_MODE_FIXED;
+ else if(strcasecmp(focusmode,"auto")==0)
+ cur_focus_mode = CAM_FOCUS_MODE_AUTO;
+ else if(strcasecmp(focusmode,"infinity")==0)
+ cur_focus_mode = CAM_FOCUS_MODE_INFINITY;
+ else if(strcasecmp(focusmode,"macro")==0)
+ cur_focus_mode = CAM_FOCUS_MODE_MACRO;
+ else if(strcasecmp(focusmode,"edof")==0)
+ cur_focus_mode = CAM_FOCUS_MODE_EDOF;
+ else if(strcasecmp(focusmode,"continuous-video")==0)
+ cur_focus_mode = CAM_FOCUS_MODE_CONTI_VID;
+ else if(strcasecmp(focusmode,"continuous-picture")==0)
+ cur_focus_mode = CAM_FOCUS_MODE_CONTI_PIC;
+ else
+ cur_focus_mode = CAM_FOCUS_MODE_FIXED;
+ }
+ supportfocusmode = mParams.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES);
+ if( NULL != strstr(supportfocusmode, "continuous")){
+ if(CAM_FOCUS_MODE_AUTO != cur_focus_mode_for_conti){
+ struct v4l2_control ctl;
+ if( (CAM_FOCUS_MODE_CONTI_VID != cur_focus_mode_for_conti ) &&
+ ( (CAM_FOCUS_MODE_AUTO == cur_focus_mode )
+ ||( CAM_FOCUS_MODE_CONTI_PIC == cur_focus_mode )
+ ||( CAM_FOCUS_MODE_CONTI_VID == cur_focus_mode ) )){
+ mEnableContiFocus = true;
+ ctl.id = V4L2_CID_FOCUS_AUTO;
+ ctl.value = CAM_FOCUS_MODE_CONTI_VID;
+ if(ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl)<0){
+ CAMHAL_LOGDA("failed to set CAM_FOCUS_MODE_CONTI_VID!\n");
+ }
+ mFocusWaitCount = FOCUS_PROCESS_FRAMES;
+ bFocusMoveState = true;
+ cur_focus_mode_for_conti = CAM_FOCUS_MODE_CONTI_VID;
+ }else if( (CAM_FOCUS_MODE_CONTI_VID != cur_focus_mode_for_conti)&&
+ (CAM_FOCUS_MODE_AUTO != cur_focus_mode) &&
+ ( CAM_FOCUS_MODE_CONTI_PIC != cur_focus_mode )&&
+ ( CAM_FOCUS_MODE_CONTI_VID != cur_focus_mode )){
+ mEnableContiFocus = false;
+ ctl.id = V4L2_CID_FOCUS_AUTO;
+ ctl.value = CAM_FOCUS_MODE_RELEASE;
+ if(ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl)<0){
+ CAMHAL_LOGDA("failed to set CAM_FOCUS_MODE_RELEASE!\n");
+ }
+ cur_focus_mode_for_conti = CAM_FOCUS_MODE_RELEASE;
+ }else if( (CAM_FOCUS_MODE_INFINITY != cur_focus_mode_for_conti)&&
+ (CAM_FOCUS_MODE_INFINITY == cur_focus_mode) ){
+ mEnableContiFocus = false;
+ ctl.id = V4L2_CID_FOCUS_AUTO;
+ ctl.value = CAM_FOCUS_MODE_INFINITY;
+ if(ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl)<0){
+ CAMHAL_LOGDA("failed to set CAM_FOCUS_MODE_INFINITY!\n");
+ }
+ cur_focus_mode_for_conti = CAM_FOCUS_MODE_INFINITY;
+ }
+ }
+ }else{
+ mEnableContiFocus = false;
+ CAMHAL_LOGDA("not support continuous mode!\n");
+ }
+
+ mParams.getPreviewFpsRange(&min_fps, &max_fps);
+ if((min_fps<0)||(max_fps<0)||(max_fps<min_fps))
+ {
+ rtn = INVALID_OPERATION;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return rtn;
+}
+
+
+void V4LCamAdpt::getParameters(CameraParameters& params)
+{
+ LOG_FUNCTION_NAME;
+
+ // Return the current parameter set
+ //params = mParams;
+ //that won't work. we might wipe out the existing params
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+///API to give the buffers to Adapter
+status_t V4LCamAdpt::useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mLock);
+
+ switch(mode)
+ {
+ case CAMERA_PREVIEW:
+ ret = UseBuffersPreview(bufArr, num);
+ //maxQueueable = queueable;
+ break;
+ case CAMERA_IMAGE_CAPTURE:
+ ret = UseBuffersCapture(bufArr, num);
+ break;
+ case CAMERA_VIDEO:
+ //@warn Video capture is not fully supported yet
+ ret = UseBuffersPreview(bufArr, num);
+ //maxQueueable = queueable;
+ break;
+ default:
+ break;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t V4LCamAdpt::setBuffersFormat(int width, int height, int pixelformat)
+{
+ int ret = NO_ERROR;
+ CAMHAL_LOGDB("Width * Height %d x %d format 0x%x", width, height, pixelformat);
+
+ mVideoInfo->width = width;
+ mVideoInfo->height = height;
+ mVideoInfo->framesizeIn = (width * height << 1);
+ mVideoInfo->formatIn = pixelformat;
+
+ mVideoInfo->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->format.fmt.pix.width = width;
+ mVideoInfo->format.fmt.pix.height = height;
+ mVideoInfo->format.fmt.pix.pixelformat = pixelformat;
+
+ ret = ioctl(mCameraHandle, VIDIOC_S_FMT, &mVideoInfo->format);
+ if (ret < 0) {
+ CAMHAL_LOGEB("Open: VIDIOC_S_FMT Failed: %s", strerror(errno));
+ LOGD("ret=%d", ret);
+ return ret;
+ }
+
+ return ret;
+}
+
+status_t V4LCamAdpt::getBuffersFormat(int &width, int &height, int &pixelformat)
+{
+ int ret = NO_ERROR;
+ struct v4l2_format format;
+
+ memset(&format, 0,sizeof(struct v4l2_format));
+
+ format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ ret = ioctl(mCameraHandle, VIDIOC_G_FMT, &format);
+ if (ret < 0) {
+ CAMHAL_LOGEB("Open: VIDIOC_G_FMT Failed: %s", strerror(errno));
+ LOGD("ret=%d", ret);
+ return ret;
+ }
+ width = format.fmt.pix.width;
+ height = format.fmt.pix.height;
+ pixelformat = format.fmt.pix.pixelformat;
+ CAMHAL_LOGDB("Get BufferFormat Width * Height %d x %d format 0x%x",
+ width, height, pixelformat);
+ return ret;
+}
+
+status_t V4LCamAdpt::UseBuffersPreview(void* bufArr, int num)
+{
+ int ret = NO_ERROR;
+
+ if(NULL == bufArr)
+ {
+ return BAD_VALUE;
+ }
+
+ int width, height;
+ mParams.getPreviewSize(&width, &height);
+
+ mPreviewWidth = width;
+ mPreviewHeight = height;
+
+ const char *pixfmtchar;
+ int pixfmt = V4L2_PIX_FMT_NV21;
+
+ pixfmtchar = mParams.getPreviewFormat();
+ if(strcasecmp( pixfmtchar, "yuv420p")==0){
+ pixfmt = V4L2_PIX_FMT_YVU420;
+ mPixelFormat =CameraFrame::PIXEL_FMT_YV12;
+ }else if(strcasecmp( pixfmtchar, "yuv420sp")==0){
+ pixfmt = V4L2_PIX_FMT_NV21;
+ mPixelFormat = CameraFrame::PIXEL_FMT_NV21;
+ }else if(strcasecmp( pixfmtchar, "yuv422")==0){
+ pixfmt = V4L2_PIX_FMT_YUYV;
+ mPixelFormat = CameraFrame::PIXEL_FMT_YUYV;
+ }
+
+ setBuffersFormat(width, height, pixfmt);
+ //First allocate adapter internal buffers at V4L level for USB Cam
+ //These are the buffers from which we will copy the data into overlay buffers
+ /* Check if camera can handle NB_BUFFER buffers */
+ mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->rb.memory = V4L2_MEMORY_MMAP;
+ mVideoInfo->rb.count = num;
+
+ ret = ioctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno));
+ return ret;
+ }
+
+ for (int i = 0; i < num; i++) {
+
+ memset (&mVideoInfo->buf, 0, sizeof (struct v4l2_buffer));
+
+ mVideoInfo->buf.index = i;
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ ret = ioctl (mCameraHandle, VIDIOC_QUERYBUF, &mVideoInfo->buf);
+ if (ret < 0) {
+ CAMHAL_LOGEB("Unable to query buffer (%s)", strerror(errno));
+ return ret;
+ }
+
+ mVideoInfo->mem[i] = mmap (0,
+ mVideoInfo->buf.length,
+ PROT_READ | PROT_WRITE,
+ MAP_SHARED,
+ mCameraHandle,
+ mVideoInfo->buf.m.offset);
+
+ if (mVideoInfo->mem[i] == MAP_FAILED) {
+ CAMHAL_LOGEB("Unable to map buffer (%s)", strerror(errno));
+ return -1;
+ }
+
+ uint32_t *ptr = (uint32_t*) bufArr;
+
+ //Associate each Camera internal buffer with the one from Overlay
+ LOGD("mPreviewBufs.add %#x, %d", ptr[i], i);
+ mPreviewBufs.add((int)ptr[i], i);
+
+ }
+
+ for(int i = 0;i < num; i++)
+ {
+ mPreviewIdxs.add(mPreviewBufs.valueAt(i),i);
+ }
+
+ // Update the preview buffer count
+ mPreviewBufferCount = num;
+
+ return ret;
+}
+
+status_t V4LCamAdpt::UseBuffersCapture(void* bufArr, int num)
+{
+ int ret = NO_ERROR;
+
+ if(NULL == bufArr)
+ {
+ return BAD_VALUE;
+ }
+
+ if (num != 1)
+ {
+ LOGD("----------------- UseBuffersCapture num=%d", num);
+ }
+
+ /* This will only be called right before taking a picture, so
+ * stop preview now so that we can set buffer format here.
+ */
+ LOGD("UseBuffersCapture stopPreview..");
+ this->stopPreview();
+
+ LOGD("UseBuffersCapture setBuffersFormat..");
+ int width, height;
+ mParams.getPictureSize(&width, &height);
+ mCaptureWidth = width;
+ mCaptureHeight = height;
+
+ if(mIoctlSupport & IOCTL_MASK_ROTATE){
+ int temp = 0;
+ mRotateValue = mParams.getInt(CameraParameters::KEY_ROTATION);
+ if((mRotateValue!=0)&&(mRotateValue!=90)&&(mRotateValue!=180)&&(mRotateValue!=270))
+ mRotateValue = 0;
+ if((mRotateValue==90)||(mRotateValue==270)){
+ temp = width;
+ width = height;
+ height = temp;
+ }
+ }
+ setBuffersFormat(width, height, DEFAULT_IMAGE_CAPTURE_PIXEL_FORMAT);
+
+ //First allocate adapter internal buffers at V4L level for Cam
+ //These are the buffers from which we will copy the data into display buffers
+ /* Check if camera can handle NB_BUFFER buffers */
+ mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->rb.memory = V4L2_MEMORY_MMAP;
+ mVideoInfo->rb.count = num;
+
+ ret = ioctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno));
+ return ret;
+ }
+
+ for (int i = 0; i < num; i++) {
+
+ memset (&mVideoInfo->buf, 0, sizeof (struct v4l2_buffer));
+
+ mVideoInfo->buf.index = i;
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ ret = ioctl (mCameraHandle, VIDIOC_QUERYBUF, &mVideoInfo->buf);
+ if (ret < 0) {
+ CAMHAL_LOGEB("Unable to query buffer (%s)", strerror(errno));
+ return ret;
+ }
+
+ mVideoInfo->mem[i] = mmap (0,
+ mVideoInfo->buf.length,
+ PROT_READ | PROT_WRITE,
+ MAP_SHARED,
+ mCameraHandle,
+ mVideoInfo->buf.m.offset);
+
+ if (mVideoInfo->mem[i] == MAP_FAILED) {
+ CAMHAL_LOGEB("Unable to map buffer (%s)", strerror(errno));
+ return -1;
+ }
+
+ uint32_t *ptr = (uint32_t*) bufArr;
+ LOGV("UseBuffersCapture %#x", ptr[0]);
+ mCaptureBuf = (camera_memory_t*)ptr[0];
+ }
+
+ return ret;
+}
+
+status_t V4LCamAdpt::takePicture()
+{
+ LOG_FUNCTION_NAME;
+ if (createThread(beginPictureThread, this) == false)
+ return -1;
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+}
+
+
+int V4LCamAdpt::beginAutoFocusThread(void *cookie)
+{
+ V4LCamAdpt *c = (V4LCamAdpt *)cookie;
+ struct v4l2_control ctl;
+ int ret = -1;
+
+ if( c->mIoctlSupport & IOCTL_MASK_FOCUS){
+ ctl.id = V4L2_CID_FOCUS_AUTO;
+ ctl.value = CAM_FOCUS_MODE_AUTO;//c->cur_focus_mode;
+ ret = ioctl(c->mCameraHandle, VIDIOC_S_CTRL, &ctl);
+ for(int j=0; j<50; j++){
+ usleep(30000);//30*50ms=1.5s
+ ret = ioctl(c->mCameraHandle, VIDIOC_G_CTRL, &ctl);
+ if( (0==ret) ||
+ ((ret < 0)&&(EBUSY != errno)) ){
+ break;
+ }
+ }
+ }
+
+ c->setState(CAMERA_CANCEL_AUTOFOCUS);
+ c->commitState();
+
+ if( (c->mIoctlSupport & IOCTL_MASK_FLASH)
+ &&(FLASHLIGHT_ON == c->mFlashMode)){
+ c->set_flash_mode( c->mCameraHandle, "off");
+ }
+ if(ret < 0) {
+ if( c->mIoctlSupport & IOCTL_MASK_FOCUS)
+ CAMHAL_LOGEA("AUTO FOCUS Failed");
+ c->notifyFocusSubscribers(false);
+ } else {
+ c->notifyFocusSubscribers(true);
+ }
+ // may need release auto focus mode at here.
+ return ret;
+}
+
+status_t V4LCamAdpt::autoFocus()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if( (mIoctlSupport & IOCTL_MASK_FLASH)
+ &&(FLASHLIGHT_ON == mFlashMode)){
+ set_flash_mode( mCameraHandle, "on");
+ }
+ cur_focus_mode_for_conti = CAM_FOCUS_MODE_AUTO;
+ if (createThread(beginAutoFocusThread, this) == false)
+ {
+ ret = UNKNOWN_ERROR;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+
+status_t V4LCamAdpt::cancelAutoFocus()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+ struct v4l2_control ctl;
+
+ if( (mIoctlSupport & IOCTL_MASK_FOCUS) == 0x00 ){
+ return 0;
+ }
+
+ if ( !mEnableContiFocus){
+ ctl.id = V4L2_CID_FOCUS_AUTO;
+ ctl.value = CAM_FOCUS_MODE_RELEASE;
+ ret = ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl);
+ if(ret < 0) {
+ CAMHAL_LOGEA("AUTO FOCUS Failed");
+ }
+ }else if( CAM_FOCUS_MODE_AUTO == cur_focus_mode_for_conti){
+ if(CAM_FOCUS_MODE_INFINITY != cur_focus_mode){
+ ctl.id = V4L2_CID_FOCUS_AUTO;
+ ctl.value = CAM_FOCUS_MODE_CONTI_VID;
+ if(ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl)<0){
+ CAMHAL_LOGDA("failed to set CAM_FOCUS_MODE_CONTI_VID\n");
+ }
+ cur_focus_mode_for_conti = CAM_FOCUS_MODE_CONTI_VID;
+ }else{
+ ctl.id = V4L2_CID_FOCUS_AUTO;
+ ctl.value = CAM_FOCUS_MODE_INFINITY;
+ if(ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl)<0){
+ CAMHAL_LOGDA("failed to set CAM_FOCUS_MODE_INFINITY\n");
+ }
+ cur_focus_mode_for_conti = CAM_FOCUS_MODE_INFINITY;
+ }
+ }
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t V4LCamAdpt::startPreview()
+{
+ status_t ret = NO_ERROR;
+ int frame_count = 0,ret_c = 0;
+ void *frame_buf = NULL;
+ Mutex::Autolock lock(mPreviewBufsLock);
+
+ if(mPreviewing)
+ {
+ return BAD_VALUE;
+ }
+
+ setMirrorEffect();
+
+ if(mIoctlSupport & IOCTL_MASK_ROTATE){
+ set_rotate_value(mCameraHandle,0);
+ mRotateValue = 0;
+ }
+
+ nQueued = 0;
+ for (int i = 0; i < mPreviewBufferCount; i++)
+ {
+ frame_count = -1;
+ frame_buf = (void *)mPreviewBufs.keyAt(i);
+
+ if((ret_c = getFrameRefCount(frame_buf,CameraFrame::PREVIEW_FRAME_SYNC))>=0)
+ frame_count = ret_c;
+
+ //if((ret_c = getFrameRefCount(frame_buf,CameraFrame::VIDEO_FRAME_SYNC))>=0)
+ // frame_count += ret_c;
+
+ CAMHAL_LOGDB("startPreview--buffer address:0x%x, refcount:%d",
+ (uint32_t)frame_buf,frame_count);
+ if(frame_count>0)
+ continue;
+ //mVideoInfo->buf.index = i;
+ mVideoInfo->buf.index = mPreviewBufs.valueFor((uint32_t)frame_buf);
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ ret = ioctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
+ if (ret < 0) {
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed");
+ return -EINVAL;
+ }
+ CAMHAL_LOGDB("startPreview --length=%d, index:%d",
+ mVideoInfo->buf.length,mVideoInfo->buf.index);
+ nQueued++;
+ }
+
+ enum v4l2_buf_type bufType;
+ if (!mVideoInfo->isStreaming)
+ {
+ bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+#ifdef AMLOGIC_VCAM_NONBLOCK_SUPPORT
+ gettimeofday( &previewTime1, NULL);
+#endif
+ ret = ioctl (mCameraHandle, VIDIOC_STREAMON, &bufType);
+ if (ret < 0) {
+ CAMHAL_LOGEB("StartStreaming: Unable to start capture: %s", strerror(errno));
+ return ret;
+ }
+
+ mVideoInfo->isStreaming = true;
+ }
+
+ if( mEnableContiFocus &&
+ (CAM_FOCUS_MODE_AUTO != cur_focus_mode_for_conti) &&
+ (CAM_FOCUS_MODE_INFINITY != cur_focus_mode_for_conti)){
+ struct v4l2_control ctl;
+ ctl.id = V4L2_CID_FOCUS_AUTO;
+ ctl.value = CAM_FOCUS_MODE_CONTI_VID;
+ if(ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl)<0){
+ CAMHAL_LOGDA("failed to set CAM_FOCUS_MODE_CONTI_VID!\n");
+ }
+ cur_focus_mode_for_conti = CAM_FOCUS_MODE_CONTI_VID;
+ }
+ // Create and start preview thread for receiving buffers from V4L Camera
+ mPreviewThread = new PreviewThread(this);
+ CAMHAL_LOGDA("Created preview thread");
+ //Update the flag to indicate we are previewing
+ mPreviewing = true;
+ return ret;
+}
+
+status_t V4LCamAdpt::stopPreview()
+{
+ enum v4l2_buf_type bufType;
+ int ret = NO_ERROR;
+
+ Mutex::Autolock lock(mPreviewBufsLock);
+ if(!mPreviewing)
+ {
+ return NO_INIT;
+ }
+
+ mPreviewing = false;
+ mFocusMoveEnabled = false;
+ mPreviewThread->requestExitAndWait();
+ mPreviewThread.clear();
+
+
+ CAMHAL_LOGDA("stopPreview streamoff..\n");
+ if (mVideoInfo->isStreaming) {
+ bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ ret = ioctl (mCameraHandle, VIDIOC_STREAMOFF, &bufType);
+ if (ret < 0) {
+ CAMHAL_LOGEB("StopStreaming: Unable to stop capture: %s", strerror(errno));
+ return ret;
+ }
+
+ mVideoInfo->isStreaming = false;
+ }
+
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ nQueued = 0;
+ nDequeued = 0;
+
+ if( mEnableContiFocus &&
+ (CAM_FOCUS_MODE_AUTO != cur_focus_mode_for_conti) &&
+ (CAM_FOCUS_MODE_INFINITY != cur_focus_mode_for_conti)){
+ struct v4l2_control ctl;
+ ctl.id = V4L2_CID_FOCUS_AUTO;
+ ctl.value = CAM_FOCUS_MODE_RELEASE;
+ if(ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl)<0){
+ CAMHAL_LOGDA("failed to set CAM_FOCUS_MODE_RELEASE!\n");
+ }
+ cur_focus_mode_for_conti = CAM_FOCUS_MODE_RELEASE;
+ }
+
+ LOGD("stopPreview unmap..");
+ /* Unmap buffers */
+ for (int i = 0; i < mPreviewBufferCount; i++){
+ if (munmap(mVideoInfo->mem[i], mVideoInfo->buf.length) < 0)
+ CAMHAL_LOGEA("Unmap failed");
+
+ }
+
+ LOGD("stopPreview clearexit..");
+ mPreviewBufs.clear();
+ mPreviewIdxs.clear();
+ return ret;
+
+}
+
+char * V4LCamAdpt::GetFrame(int &index)
+{
+ int ret;
+
+ if(nQueued<=0){
+ CAMHAL_LOGEA("GetFrame: No buff for Dequeue");
+ return NULL;
+ }
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ /* DQ */
+ ret = ioctl(mCameraHandle, VIDIOC_DQBUF, &mVideoInfo->buf);
+ if (ret < 0) {
+ if(EAGAIN == errno){
+ index = -1;
+ }else{
+ CAMHAL_LOGEB("GetFrame: VIDIOC_DQBUF Failed,errno=%d\n",errno);
+ }
+ return NULL;
+ }
+ nDequeued++;
+ nQueued--;
+ index = mVideoInfo->buf.index;
+
+ return (char *)mVideoInfo->mem[mVideoInfo->buf.index];
+}
+
+//API to get the frame size required to be allocated. This size is used to override the size passed
+//by camera service when VSTAB/VNF is turned ON for example
+status_t V4LCamAdpt::getFrameSize(size_t &width, size_t &height)
+{
+ status_t ret = NO_ERROR;
+
+ // Just return the current preview size, nothing more to do here.
+ mParams.getPreviewSize(( int * ) &width,
+ ( int * ) &height);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t V4LCamAdpt::getFrameDataSize(size_t &dataFrameSize, size_t bufferCount)
+{
+ // We don't support meta data, so simply return
+ return NO_ERROR;
+}
+
+status_t V4LCamAdpt::getPictureBufferSize(size_t &length, size_t bufferCount)
+{
+ int width, height;
+ mParams.getPictureSize(&width, &height);
+ if(DEFAULT_IMAGE_CAPTURE_PIXEL_FORMAT == V4L2_PIX_FMT_RGB24){ // rgb24
+ length = width * height * 3;
+ }else if(DEFAULT_IMAGE_CAPTURE_PIXEL_FORMAT == V4L2_PIX_FMT_YUYV){ // 422I
+ length = width * height * 2;
+ }else if(DEFAULT_IMAGE_CAPTURE_PIXEL_FORMAT == V4L2_PIX_FMT_NV21){
+ length = width * height * 3/2;
+ }else{
+ length = width * height * 3;
+ }
+ return NO_ERROR;
+}
+
+static void debugShowFPS()
+{
+ static int mFrameCount = 0;
+ static int mLastFrameCount = 0;
+ static nsecs_t mLastFpsTime = 0;
+ static float mFps = 0;
+ mFrameCount++;
+ if (!(mFrameCount & 0x1F)) {
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFpsTime;
+ mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ mLastFpsTime = now;
+ mLastFrameCount = mFrameCount;
+ LOGD("Camera %d Frames, %f FPS", mFrameCount, mFps);
+ }
+ // XXX: mFPS has the value we want
+}
+
+status_t V4LCamAdpt::recalculateFPS()
+{
+ float currentFPS;
+
+ mFrameCount++;
+
+ if ( ( mFrameCount % FPS_PERIOD ) == 0 )
+ {
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFPSTime;
+ currentFPS = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ mLastFPSTime = now;
+ mLastFrameCount = mFrameCount;
+
+ if ( 1 == mIter )
+ {
+ mFPS = currentFPS;
+ }
+ else
+ {
+ //cumulative moving average
+ mFPS = mLastFPS + (currentFPS - mLastFPS)/mIter;
+ }
+
+ mLastFPS = mFPS;
+ mIter++;
+ }
+
+ return NO_ERROR;
+}
+
+void V4LCamAdpt::onOrientationEvent(uint32_t orientation, uint32_t tilt)
+{
+ //LOG_FUNCTION_NAME;
+
+ //LOG_FUNCTION_NAME_EXIT;
+}
+
+
+V4LCamAdpt::V4LCamAdpt(size_t sensor_index)
+{
+ LOG_FUNCTION_NAME;
+
+ mbDisableMirror = false;
+ mSensorIndex = sensor_index;
+ mCameraHandle = -1;
+
+#ifdef AMLOGIC_TWO_CH_UVC
+ mCamEncodeHandle = -1;
+#endif
+ CAMHAL_LOGDB("mVideoInfo=%p\n", mVideoInfo);
+ mVideoInfo = NULL;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+V4LCamAdpt::~V4LCamAdpt()
+{
+ LOG_FUNCTION_NAME;
+
+ // Close the camera handle and free the video info structure
+ close(mCameraHandle);
+#ifdef AMLOGIC_TWO_CH_UVC
+ if(mCamEncodeHandle > 0){
+ close(mCamEncodeHandle);
+ }
+#endif
+
+ if (mVideoInfo)
+ {
+ free(mVideoInfo);
+ mVideoInfo = NULL;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/* Preview Thread */
+// ---------------------------------------------------------------------------
+
+int V4LCamAdpt::previewThread()
+{
+ status_t ret = NO_ERROR;
+ int width, height;
+ CameraFrame frame;
+ unsigned delay;
+ unsigned uFrameInvals;
+
+ if (mPreviewing)
+ {
+ int index = 0;
+
+#ifdef AMLOGIC_VCAM_NONBLOCK_SUPPORT
+ uFrameInvals = (unsigned)(1000000.0f / float(mPreviewFrameRate));
+ delay = uFrameInvals >>2;
+#else
+ int previewFrameRate = mParams.getPreviewFrameRate();
+ delay = (unsigned)(1000000.0f / float(previewFrameRate));
+#endif
+
+#ifdef AMLOGIC_USB_CAMERA_DECREASE_FRAMES
+ usleep(delay*5);
+#else
+ usleep(delay);
+#endif
+
+ char *fp = this->GetFrame(index);
+#ifdef AMLOGIC_VCAM_NONBLOCK_SUPPORT
+ if((-1==index)||!fp)
+ {
+ return 0;
+ }
+#else
+ if(!fp){
+ int previewFrameRate = mParams.getPreviewFrameRate();
+ delay = (unsigned)(1000000.0f / float(previewFrameRate)) >> 1;
+ CAMHAL_LOGEB("Preview thread get frame fail, need sleep:%d",delay);
+ usleep(delay);
+ return BAD_VALUE;
+ }
+#endif
+
+ uint8_t* ptr = (uint8_t*) mPreviewBufs.keyAt(mPreviewIdxs.valueFor(index));
+
+ if (!ptr)
+ {
+ CAMHAL_LOGEA("Preview thread mPreviewBufs error!");
+ return BAD_VALUE;
+ }
+
+#ifdef AMLOGIC_VCAM_NONBLOCK_SUPPORT
+ gettimeofday( &previewTime2, NULL);
+ unsigned bwFrames = previewTime2.tv_sec - previewTime1.tv_sec;
+ bwFrames = bwFrames*1000000 + previewTime2.tv_usec -previewTime1.tv_usec;
+ if( bwFrames + 10000 < uFrameInvals ) {
+ //cts left 20ms(Android 4.1), we left 10ms, Android may cut this 20ms;
+ CAMHAL_LOGDB("bwFrames=%d, uFrameInvals=%d\n", bwFrames, uFrameInvals);
+ fillThisBuffer( ptr, CameraFrame::PREVIEW_FRAME_SYNC);
+ return 0;
+ }else{
+ memcpy( &previewTime1, &previewTime2, sizeof( struct timeval));
+ }
+#endif
+
+ uint8_t* dest = NULL;
+#ifdef AMLOGIC_CAMERA_OVERLAY_SUPPORT
+ camera_memory_t* VideoCameraBufferMemoryBase = (camera_memory_t*)ptr;
+ dest = (uint8_t*)VideoCameraBufferMemoryBase->data; //ptr;
+#else
+ private_handle_t* gralloc_hnd = (private_handle_t*)ptr;
+ dest = (uint8_t*)gralloc_hnd->base; //ptr;
+#endif
+ int width, height;
+ uint8_t* src = (uint8_t*) fp;
+ if((mPreviewWidth <= 0)||(mPreviewHeight <= 0)){
+ mParams.getPreviewSize(&width, &height);
+ }else{
+ width = mPreviewWidth;
+ height = mPreviewHeight;
+ }
+
+ if(DEFAULT_PREVIEW_PIXEL_FORMAT == V4L2_PIX_FMT_YUYV){ // 422I
+ frame.mLength = width*height*2;
+ memcpy(dest,src,frame.mLength);
+ }else if(DEFAULT_PREVIEW_PIXEL_FORMAT == V4L2_PIX_FMT_NV21){ //420sp
+
+ frame.mLength = width*height*3/2;
+ if ( CameraFrame::PIXEL_FMT_NV21 == mPixelFormat){
+ memcpy(dest,src,frame.mLength);
+ }else{
+ yv12_adjust_memcpy(dest,src,width,height);
+ }
+
+ }else{ //default case
+ frame.mLength = width*height*3/2;
+ memcpy(dest,src,frame.mLength);
+ }
+
+ frame.mFrameMask |= CameraFrame::PREVIEW_FRAME_SYNC;
+
+ if(mRecording){
+ frame.mFrameMask |= CameraFrame::VIDEO_FRAME_SYNC;
+ }
+ frame.mBuffer = ptr; //dest
+ frame.mAlignment = width;
+ frame.mOffset = 0;
+ frame.mYuv[0] = 0;
+ frame.mYuv[1] = 0;
+ frame.mWidth = width;
+ frame.mHeight = height;
+ frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);
+ frame.mPixelFmt = mPixelFormat;
+ ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask);
+ if (ret)
+ LOGE("setInitFrameRefCount err=%d", ret);
+ else
+ ret = sendFrameToSubscribers(&frame);
+ //LOGD("previewThread /sendFrameToSubscribers ret=%d", ret);
+ }
+ if( (mIoctlSupport & IOCTL_MASK_FOCUS_MOVE) && mFocusMoveEnabled ){
+ getFocusMoveStatus();
+ }
+
+ return ret;
+}
+
+/* Image Capture Thread */
+// ---------------------------------------------------------------------------
+int V4LCamAdpt::GenExif(ExifElementsTable* exiftable)
+{
+ char exifcontent[256];
+
+ //Make
+ exiftable->insertElement("Make",
+ (const char*)mParams.get(ExCameraParameters::KEY_EXIF_MAKE));
+
+ //Model
+ exiftable->insertElement("Model",
+ (const char*)mParams.get(ExCameraParameters::KEY_EXIF_MODEL));
+
+ //Image orientation
+ int orientation = mParams.getInt(CameraParameters::KEY_ROTATION);
+ //covert 0 90 180 270 to 0 1 2 3
+ LOGE("get orientaion %d",orientation);
+ if(orientation == 0)
+ orientation = 1;
+ else if(orientation == 90)
+ orientation = 6;
+ else if(orientation == 180)
+ orientation = 3;
+ else if(orientation == 270)
+ orientation = 8;
+
+
+ //Image width,height
+ int width,height;
+ if((mCaptureWidth <= 0)||(mCaptureHeight <= 0)){
+ mParams.getPictureSize(&width, &height);
+ }else{
+ width = mCaptureWidth;
+ height = mCaptureHeight;
+ }
+
+ if(mIoctlSupport & IOCTL_MASK_ROTATE){
+ orientation = 1;
+ if((mRotateValue==90)||(mRotateValue==270)){
+ int temp = width;
+ width = height;
+ height = temp;
+ }
+ }
+
+ sprintf(exifcontent,"%d",orientation);
+ //LOGD("exifcontent %s",exifcontent);
+ exiftable->insertElement("Orientation",(const char*)exifcontent);
+
+ sprintf(exifcontent,"%d",width);
+ exiftable->insertElement("ImageWidth",(const char*)exifcontent);
+ sprintf(exifcontent,"%d",height);
+ exiftable->insertElement("ImageLength",(const char*)exifcontent);
+
+ //focal length RATIONAL
+ float focallen = mParams.getFloat(CameraParameters::KEY_FOCAL_LENGTH);
+ if(focallen >= 0)
+ {
+ int focalNum = focallen*1000;
+ int focalDen = 1000;
+ sprintf(exifcontent,"%d/%d",focalNum,focalDen);
+ exiftable->insertElement("FocalLength",(const char*)exifcontent);
+ }
+
+ //datetime of photo
+ time_t times;
+ {
+ time(&times);
+ struct tm tmstruct;
+ tmstruct = *(localtime(&times)); //convert to local time
+
+ //date&time
+ strftime(exifcontent, 30, "%Y:%m:%d %H:%M:%S", &tmstruct);
+ exiftable->insertElement("DateTime",(const char*)exifcontent);
+ }
+
+ //gps date stamp & time stamp
+ times = mParams.getInt(CameraParameters::KEY_GPS_TIMESTAMP);
+ if(times != -1)
+ {
+ struct tm tmstruct;
+ tmstruct = *(gmtime(&times));//convert to standard time
+ //date
+ strftime(exifcontent, 20, "%Y:%m:%d", &tmstruct);
+ exiftable->insertElement("GPSDateStamp",(const char*)exifcontent);
+ //time
+ sprintf(exifcontent,"%d/%d,%d/%d,%d/%d",
+ tmstruct.tm_hour,1,tmstruct.tm_min,1,tmstruct.tm_sec,1);
+ exiftable->insertElement("GPSTimeStamp",(const char*)exifcontent);
+ }
+
+ //gps latitude info
+ char* latitudestr = (char*)mParams.get(CameraParameters::KEY_GPS_LATITUDE);
+ if(latitudestr!=NULL)
+ {
+ int offset = 0;
+ float latitude = mParams.getFloat(CameraParameters::KEY_GPS_LATITUDE);
+ if(latitude < 0.0)
+ {
+ offset = 1;
+ latitude*= (float)(-1);
+ }
+
+ int latitudedegree = latitude;
+ float latitudeminuts = (latitude-(float)latitudedegree)*60;
+ int latitudeminuts_int = latitudeminuts;
+ float latituseconds = (latitudeminuts-(float)latitudeminuts_int)*60+0.5;
+ int latituseconds_int = latituseconds;
+ sprintf(exifcontent,"%d/%d,%d/%d,%d/%d",
+ latitudedegree,1,latitudeminuts_int,1,latituseconds_int,1);
+ exiftable->insertElement("GPSLatitude",(const char*)exifcontent);
+
+ exiftable->insertElement("GPSLatitudeRef",(offset==1)?"S":"N");
+ }
+
+ //gps Longitude info
+ char* longitudestr = (char*)mParams.get(CameraParameters::KEY_GPS_LONGITUDE);
+ if(longitudestr!=NULL)
+ {
+ int offset = 0;
+ float longitude = mParams.getFloat(CameraParameters::KEY_GPS_LONGITUDE);
+ if(longitude < 0.0)
+ {
+ offset = 1;
+ longitude*= (float)(-1);
+ }
+
+ int longitudedegree = longitude;
+ float longitudeminuts = (longitude-(float)longitudedegree)*60;
+ int longitudeminuts_int = longitudeminuts;
+ float longitudeseconds = (longitudeminuts-(float)longitudeminuts_int)*60+0.5;
+ int longitudeseconds_int = longitudeseconds;
+ sprintf(exifcontent,"%d/%d,%d/%d,%d/%d",
+ longitudedegree,1,longitudeminuts_int,1,longitudeseconds_int,1);
+ exiftable->insertElement("GPSLongitude",(const char*)exifcontent);
+
+ exiftable->insertElement("GPSLongitudeRef",(offset==1)?"S":"N");
+ }
+
+ //gps Altitude info
+ char* altitudestr = (char*)mParams.get(CameraParameters::KEY_GPS_ALTITUDE);
+ if(altitudestr!=NULL)
+ {
+ int offset = 0;
+ float altitude = mParams.getFloat(CameraParameters::KEY_GPS_ALTITUDE);
+ if(altitude < 0.0)
+ {
+ offset = 1;
+ altitude*= (float)(-1);
+ }
+
+ int altitudenum = altitude*1000;
+ int altitudedec= 1000;
+ sprintf(exifcontent,"%d/%d",altitudenum,altitudedec);
+ exiftable->insertElement("GPSAltitude",(const char*)exifcontent);
+
+ sprintf(exifcontent,"%d",offset);
+ exiftable->insertElement("GPSAltitudeRef",(const char*)exifcontent);
+ }
+
+ //gps processing method
+ char* processmethod =
+ (char*)mParams.get(CameraParameters::KEY_GPS_PROCESSING_METHOD);
+ if(processmethod!=NULL)
+ {
+ memset(exifcontent,0,sizeof(exifcontent));
+ char ExifAsciiPrefix[] = { 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0 };//asicii
+ memcpy(exifcontent,ExifAsciiPrefix,8);
+ memcpy(exifcontent+8,processmethod,strlen(processmethod));
+ exiftable->insertElement("GPSProcessingMethod",(const char*)exifcontent);
+ }
+ return 1;
+}
+
+/*static*/ int V4LCamAdpt::beginPictureThread(void *cookie)
+{
+ V4LCamAdpt *c = (V4LCamAdpt *)cookie;
+ return c->pictureThread();
+}
+
+int V4LCamAdpt::pictureThread()
+{
+ status_t ret = NO_ERROR;
+ int width, height;
+ CameraFrame frame;
+ int dqTryNum = 3;
+
+ setMirrorEffect();
+
+ if( (mIoctlSupport & IOCTL_MASK_FLASH)
+ &&(FLASHLIGHT_ON == mFlashMode)){
+ set_flash_mode( mCameraHandle, "on");
+ }
+ if (true)
+ {
+ mVideoInfo->buf.index = 0;
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ ret = ioctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
+ if (ret < 0)
+ {
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed");
+ return -EINVAL;
+ }
+ nQueued ++;
+
+ if(mIoctlSupport & IOCTL_MASK_ROTATE){
+ set_rotate_value(mCameraHandle,mRotateValue);
+ }
+
+ enum v4l2_buf_type bufType;
+ if (!mVideoInfo->isStreaming)
+ {
+ bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ ret = ioctl (mCameraHandle, VIDIOC_STREAMON, &bufType);
+ if (ret < 0) {
+ CAMHAL_LOGEB("StartStreaming: Unable to start capture: %s",
+ strerror(errno));
+ return ret;
+ }
+
+ mVideoInfo->isStreaming = true;
+ }
+
+ int index = 0;
+ char *fp = this->GetFrame(index);
+
+#ifdef AMLOGIC_VCAM_NONBLOCK_SUPPORT
+ while(!fp && (-1 == index) ){
+ usleep( 10000 );
+ fp = this->GetFrame(index);
+ }
+#else
+ if(!fp)
+ {
+ CAMHAL_LOGDA("GetFrame fail, this may stop preview\n");
+ return 0; //BAD_VALUE;
+ }
+#endif
+ if (!mCaptureBuf || !mCaptureBuf->data)
+ {
+ return 0; //BAD_VALUE;
+ }
+
+ int width, height;
+ uint8_t* dest = (uint8_t*)mCaptureBuf->data;
+ uint8_t* src = (uint8_t*) fp;
+ if((mCaptureWidth <= 0)||(mCaptureHeight <= 0)){
+ mParams.getPictureSize(&width, &height);
+ }else{
+ width = mCaptureWidth;
+ height = mCaptureHeight;
+ }
+
+ if((mRotateValue==90)||(mRotateValue==270)){
+ int temp = 0;
+ temp = width;
+ width = height;
+ height = temp;
+ }
+
+ LOGD("pictureThread mCaptureBuf=%#x dest=%#x fp=%#x width=%d height=%d",
+ (uint32_t)mCaptureBuf, (uint32_t)dest, (uint32_t)fp, width, height);
+ LOGD("length=%d bytesused=%d index=%d",
+ mVideoInfo->buf.length, mVideoInfo->buf.bytesused, index);
+
+ if(DEFAULT_IMAGE_CAPTURE_PIXEL_FORMAT == V4L2_PIX_FMT_RGB24){ // rgb24
+
+ frame.mLength = width*height*3;
+ frame.mQuirks = CameraFrame::ENCODE_RAW_RGB24_TO_JPEG
+ | CameraFrame::HAS_EXIF_DATA;
+ memcpy(dest,src,mVideoInfo->buf.length);
+
+ }else if(DEFAULT_IMAGE_CAPTURE_PIXEL_FORMAT == V4L2_PIX_FMT_YUYV){ // 422I
+
+ frame.mLength = width*height*2;
+ frame.mQuirks = CameraFrame::ENCODE_RAW_YUV422I_TO_JPEG
+ | CameraFrame::HAS_EXIF_DATA;
+ memcpy(dest, src, mVideoInfo->buf.length);
+
+ }else if(DEFAULT_IMAGE_CAPTURE_PIXEL_FORMAT == V4L2_PIX_FMT_NV21){ // 420sp
+
+ frame.mLength = width*height*3/2;
+ frame.mQuirks = CameraFrame::ENCODE_RAW_YUV420SP_TO_JPEG
+ | CameraFrame::HAS_EXIF_DATA;
+ memcpy(dest,src,mVideoInfo->buf.length);
+
+ }else{ //default case
+
+ frame.mLength = width*height*3;
+ frame.mQuirks = CameraFrame::ENCODE_RAW_RGB24_TO_JPEG
+ | CameraFrame::HAS_EXIF_DATA;
+ memcpy(dest, src, mVideoInfo->buf.length);
+
+ }
+
+ notifyShutterSubscribers();
+ //TODO correct time to call this?
+ if (NULL != mEndImageCaptureCallback)
+ mEndImageCaptureCallback(mEndCaptureData);
+
+ //gen exif message
+ ExifElementsTable* exiftable = new ExifElementsTable();
+ GenExif(exiftable);
+
+ frame.mFrameMask = CameraFrame::IMAGE_FRAME;
+ frame.mFrameType = CameraFrame::IMAGE_FRAME;
+ frame.mBuffer = mCaptureBuf->data;
+ frame.mCookie2 = (void*)exiftable;
+ frame.mAlignment = width;
+ frame.mOffset = 0;
+ frame.mYuv[0] = 0;
+ frame.mYuv[1] = 0;
+ frame.mWidth = width;
+ frame.mHeight = height;
+ frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);
+
+ if (mVideoInfo->isStreaming)
+ {
+ bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ ret = ioctl (mCameraHandle, VIDIOC_STREAMOFF, &bufType);
+ if (ret < 0)
+ {
+ CAMHAL_LOGEB("StopStreaming: Unable to stop capture: %s",
+ strerror(errno));
+ return ret;
+ }
+
+ mVideoInfo->isStreaming = false;
+ }
+
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ nQueued = 0;
+ nDequeued = 0;
+
+ /* Unmap buffers */
+ if (munmap(mVideoInfo->mem[0], mVideoInfo->buf.length) < 0)
+ CAMHAL_LOGEA("Unmap failed");
+
+
+ }
+
+ if( (mIoctlSupport & IOCTL_MASK_FLASH)
+ &&(FLASHLIGHT_ON == mFlashMode)){
+ set_flash_mode( mCameraHandle, "off");
+ }
+ if(mIoctlSupport & IOCTL_MASK_ROTATE){
+ set_rotate_value(mCameraHandle,0);
+ mRotateValue = 0;
+ }
+
+ // start preview thread again after stopping it in UseBuffersCapture
+ {
+ Mutex::Autolock lock(mPreviewBufferLock);
+ UseBuffersPreview(mPreviewBuffers, mPreviewBufferCount);
+ }
+ startPreview();
+
+ ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask);
+ if (ret)
+ LOGE("setInitFrameRefCount err=%d", ret);
+ else
+ ret = sendFrameToSubscribers(&frame);
+
+ return ret;
+}
+
+
+status_t V4LCamAdpt::disableMirror(bool bDisable) {
+ LOGD("disableMirror %d",bDisable);
+ mbDisableMirror = bDisable;
+ setMirrorEffect();
+ return NO_ERROR;
+}
+
+status_t V4LCamAdpt::setMirrorEffect() {
+
+ bool bEnable = mbFrontCamera&&(!mbDisableMirror);
+ LOGD("setmirror effect %d",bEnable);
+
+ if(mIoctlSupport & IOCTL_MASK_HFLIP){
+ if(set_hflip_mode(mCameraHandle,bEnable))
+ writefile((char *)SYSFILE_CAMERA_SET_MIRROR,(char*)(bEnable?"1":"0"));
+ }else{
+ writefile((char *)SYSFILE_CAMERA_SET_MIRROR,(char*)(bEnable?"1":"0"));
+ }
+ return NO_ERROR;
+}
+
+
+
+// ---------------------------------------------------------------------------
+
+
+bool V4LCamAdpt::isPreviewDevice(int camera_fd)
+{
+ int ret;
+ int index;
+ struct v4l2_fmtdesc fmtdesc;
+
+ for(index=0;;index++){
+ memset(&fmtdesc, 0, sizeof(struct v4l2_fmtdesc));
+ fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ fmtdesc.index = index;
+ ret = ioctl( camera_fd, VIDIOC_ENUM_FMT, &fmtdesc);
+ if((V4L2_PIX_FMT_YUYV==fmtdesc.pixelformat) ||
+ (V4L2_PIX_FMT_NV21==fmtdesc.pixelformat)){
+ return true;
+ }
+ if(ret < 0)
+ break;
+ }
+
+ return false;
+}
+
+int V4LCamAdpt::getValidFrameSize( int pixel_format, char *framesize)
+{
+ struct v4l2_frmsizeenum frmsize;
+ int i=0;
+ char tempsize[12];
+ framesize[0] = '\0';
+
+ memset(&frmsize,0,sizeof(v4l2_frmsizeenum));
+ for(i=0;;i++){
+ frmsize.index = i;
+ frmsize.pixel_format = pixel_format;
+ if(ioctl(mCameraHandle, VIDIOC_ENUM_FRAMESIZES, &frmsize) == 0){
+ if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
+
+ snprintf(tempsize, sizeof(tempsize), "%dx%d,",
+ frmsize.discrete.width, frmsize.discrete.height);
+ strcat(framesize, tempsize);
+
+ }
+ else
+ break;
+ }
+ else
+ break;
+ }
+
+ if(framesize[0] == '\0')
+ return -1;
+ else
+ return 0;
+}
+
+int V4LCamAdpt::getCameraOrientation(bool frontcamera, char* property)
+{
+ int degree = -1;
+ if(frontcamera){
+ if (property_get("ro.camera.orientation.front", property, NULL) > 0){
+ degree = atoi(property);
+ }
+ }else{
+ if (property_get("ro.camera.orientation.back", property, NULL) > 0){
+ degree = atoi(property);
+ }
+ }
+ if((degree != 0)&&(degree != 90)
+ &&(degree != 180)&&(degree != 270))
+ degree = -1;
+ return degree;
+}
+
+static int enumCtrlMenu(int camera_fd, struct v4l2_queryctrl *qi,
+ char* menu_items, char*def_menu_item)
+{
+ struct v4l2_queryctrl qc;
+ struct v4l2_querymenu qm;
+ int ret;
+ int mode_count = -1;
+
+ memset(&qc, 0, sizeof(struct v4l2_queryctrl));
+ qc.id = qi->id;
+ ret = ioctl (camera_fd, VIDIOC_QUERYCTRL, &qc);
+ if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED) ){
+ CAMHAL_LOGDB("camera handle %d can't support this ctrl",camera_fd);
+ return mode_count;
+ }else if( qc.type != V4L2_CTRL_TYPE_MENU){
+ CAMHAL_LOGDB("this ctrl of camera handle %d can't support menu type",camera_fd);
+ return 0;
+ }else{
+ memset(&qm, 0, sizeof(qm));
+ qm.id = qi->id;
+ qm.index = qc.default_value;
+ if(ioctl (camera_fd, VIDIOC_QUERYMENU, &qm) < 0){
+ return 0;
+ } else {
+ strcpy(def_menu_item, (char*)qm.name);
+ }
+ int index = 0;
+ mode_count = 0;
+
+ for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
+ memset(&qm, 0, sizeof(struct v4l2_querymenu));
+ qm.id = qi->id;
+ qm.index = index;
+ if(ioctl (camera_fd, VIDIOC_QUERYMENU, &qm) < 0){
+ continue;
+ } else {
+ if(mode_count>0)
+ strcat(menu_items, ",");
+ strcat( menu_items, (char*)qm.name);
+ mode_count++;
+ }
+ }
+ }
+ return mode_count;
+}
+
+bool V4LCamAdpt::getCameraWhiteBalance( char* wb_modes, char*def_wb_mode)
+{
+ struct v4l2_queryctrl qc;
+ int item_count=0;
+
+ memset( &qc, 0, sizeof(qc));
+
+ qc.id = V4L2_CID_DO_WHITE_BALANCE;
+ item_count = enumCtrlMenu( mCameraHandle, &qc, wb_modes, def_wb_mode);
+
+ if(0 >= item_count){
+ strcpy( wb_modes, "auto,daylight,incandescent,fluorescent");
+ strcpy(def_wb_mode, "auto");
+ }
+ return true;
+}
+
+bool V4LCamAdpt::getCameraBanding(char* banding_modes, char*def_banding_mode)
+{
+ struct v4l2_queryctrl qc;
+ int item_count=0;
+ char *tmpbuf=NULL;
+
+ memset( &qc, 0, sizeof(qc));
+ qc.id = V4L2_CID_POWER_LINE_FREQUENCY;
+
+ item_count = enumCtrlMenu( mCameraHandle, &qc, banding_modes, def_banding_mode);
+
+ if(0 >= item_count){
+ strcpy( banding_modes, "50hz,60hz");
+ strcpy( def_banding_mode, "50hz");
+ }
+ return true;
+}
+
+#define MAX_LEVEL_FOR_EXPOSURE 16
+#define MIN_LEVEL_FOR_EXPOSURE 3
+
+bool V4LCamAdpt::getCameraExposureValue(int &min, int &max,
+ int &step, int &def)
+{
+ struct v4l2_queryctrl qc;
+ int ret=0;
+ int level = 0;
+ int middle = 0;
+
+ memset( &qc, 0, sizeof(qc));
+
+ qc.id = V4L2_CID_EXPOSURE;
+ ret = ioctl( mCameraHandle, VIDIOC_QUERYCTRL, &qc);
+ if(ret<0){
+ CAMHAL_LOGDB("QUERYCTRL failed, errno=%d\n", errno);
+ min = -4;
+ max = 4;
+ def = 0;
+ step = 1;
+ return true;
+ }
+
+ if(0 < qc.step)
+ level = ( qc.maximum - qc.minimum + 1 )/qc.step;
+
+ if((level > MAX_LEVEL_FOR_EXPOSURE)
+ || (level < MIN_LEVEL_FOR_EXPOSURE)){
+ min = -4;
+ max = 4;
+ def = 0;
+ step = 1;
+ CAMHAL_LOGDB("not in[min,max], min=%d, max=%d, def=%d, step=%d\n",
+ min, max, def, step);
+ return true;
+ }
+
+ middle = (qc.minimum+qc.maximum)/2;
+ min = qc.minimum - middle;
+ max = qc.maximum - middle;
+ def = qc.default_value - middle;
+ step = qc.step;
+
+ return true;
+}
+
+bool V4LCamAdpt::getCameraAutoFocus( char* focus_mode_str, char*def_focus_mode)
+{
+ struct v4l2_queryctrl qc;
+ struct v4l2_querymenu qm;
+ bool auto_focus_enable = false;
+ int menu_num = 0;
+ int mode_count = 0;
+
+ if((!focus_mode_str)||(!def_focus_mode)){
+ CAMHAL_LOGEA("focus mode str buf error");
+ return auto_focus_enable;
+ }
+
+
+ memset(&qc, 0, sizeof(struct v4l2_queryctrl));
+ qc.id = V4L2_CID_FOCUS_AUTO;
+ menu_num = ioctl (mCameraHandle, VIDIOC_QUERYCTRL, &qc);
+ if((qc.flags == V4L2_CTRL_FLAG_DISABLED)
+ ||( menu_num < 0) || (qc.type != V4L2_CTRL_TYPE_MENU)){
+ auto_focus_enable = false;
+ CAMHAL_LOGDB("camera handle %d can't support auto focus",mCameraHandle);
+ }else {
+ memset(&qm, 0, sizeof(qm));
+ qm.id = V4L2_CID_FOCUS_AUTO;
+ qm.index = qc.default_value;
+ strcpy(def_focus_mode, "auto");
+
+ for (int index = qc.minimum; index <= qc.maximum; index+= qc.step) {
+ memset(&qm, 0, sizeof(struct v4l2_querymenu));
+ qm.id = V4L2_CID_FOCUS_AUTO;
+ qm.index = index;
+ if(ioctl (mCameraHandle, VIDIOC_QUERYMENU, &qm) < 0){
+ continue;
+ } else {
+ if(mode_count>0)
+ strcat(focus_mode_str, ",");
+ strcat(focus_mode_str, (char*)qm.name);
+ mode_count++;
+ }
+ }
+ if(mode_count>0)
+ auto_focus_enable = true;
+ }
+ return auto_focus_enable;
+}
+
+bool V4LCamAdpt::getCameraHandle()
+{
+ return mCameraHandle;
+}
+
+bool V4LCamAdpt::isVolatileCam()
+{
+
+ char *bus_info;
+ bool ret = true;
+ int size = 0;
+
+ size = sizeof(mVideoInfo->cap.bus_info);
+ bus_info = (char *)calloc( 1, size);
+ memset( bus_info, 0, size);
+
+ strncpy( bus_info, (char *)&mVideoInfo->cap.bus_info, size);
+ if( strstr( bus_info, "usb")){
+ ret = true;
+ CAMHAL_LOGDA("usb device\n")
+ }else{
+ ret = false;
+ CAMHAL_LOGDA("not usb device\n")
+ }
+ CAMHAL_LOGDB("bus_info=%s\n", bus_info);
+
+ if(bus_info){
+ free(bus_info);
+ bus_info = NULL;
+ }
+
+ return ret;
+
+}
+bool V4LCamAdpt::isFrontCam( int camera_id )
+{
+ int bFrontCam = false;
+
+ if (camera_id == 0) {
+#ifdef AMLOGIC_BACK_CAMERA_SUPPORT
+ bFrontCam = false;
+#elif defined(AMLOGIC_FRONT_CAMERA_SUPPORT)
+ bFrontCam = true;
+#elif defined(AMLOGIC_USB_CAMERA_SUPPORT)
+ bFrontCam = true;
+#else//defined nothing, we try by ourself.we assume, the 0 is front camera, 1 is back camera
+ bFrontCam = true;
+#endif
+ } else if (camera_id == 1) {
+#if defined(AMLOGIC_BACK_CAMERA_SUPPORT) && defined(AMLOGIC_FRONT_CAMERA_SUPPORT)
+ bFrontCam = true;
+#else//defined nothing, we try to by ourself
+ bFrontCam = false;
+#endif
+ }
+ return bFrontCam;
+ //return true;// virtual camera is a front camera.
+}
+
+extern "C" void newloadCaps(int camera_id, CameraProperties::Properties* params) {
+ const char DEFAULT_BRIGHTNESS[] = "50";
+ const char DEFAULT_CONTRAST[] = "100";
+ const char DEFAULT_IPP[] = "ldc-nsf";
+ const char DEFAULT_GBCE[] = "disable";
+ const char DEFAULT_ISO_MODE[] = "auto";
+ const char DEFAULT_PICTURE_FORMAT[] = "jpeg";
+ const char DEFAULT_PICTURE_SIZE[] = "640x480";
+ const char PREVIEW_FORMAT_420SP[] = "yuv420sp";
+ const char PREVIEW_FORMAT_422I[] = "yuv422i-yuyv";
+ const char DEFAULT_PREVIEW_SIZE[] = "640x480";
+ const char DEFAULT_NUM_PREV_BUFS[] = "6";
+ const char DEFAULT_NUM_PIC_BUFS[] = "1";
+ const char DEFAULT_MAX_FOCUS_AREAS[] = "1";
+ const char DEFAULT_SATURATION[] = "100";
+ const char DEFAULT_SCENE_MODE[] = "auto";
+ const char DEFAULT_SHARPNESS[] = "100";
+ const char DEFAULT_VSTAB[] = "false";
+ const char DEFAULT_VSTAB_SUPPORTED[] = "true";
+ const char DEFAULT_MAX_FD_HW_FACES[] = "0";
+ const char DEFAULT_MAX_FD_SW_FACES[] = "0";
+ const char DEFAULT_FOCAL_LENGTH_PRIMARY[] = "4.31";
+ const char DEFAULT_FOCAL_LENGTH_SECONDARY[] = "1.95";
+ const char DEFAULT_HOR_ANGLE[] = "54.8";
+ const char DEFAULT_VER_ANGLE[] = "42.5";
+ const char DEFAULT_AE_LOCK[] = "false";
+ const char DEFAULT_AWB_LOCK[] = "false";
+ const char DEFAULT_MAX_NUM_METERING_AREAS[] = "0";
+ const char DEFAULT_LOCK_SUPPORTED[] = "true";
+ const char DEFAULT_LOCK_UNSUPPORTED[] = "false";
+ const char DEFAULT_VIDEO_SIZE[] = "640x480";
+ const char DEFAULT_PREFERRED_PREVIEW_SIZE_FOR_VIDEO[] = "640x480";
+
+ bool bFrontCam = false;
+ int tempid = camera_id;
+ int camera_fd = -1;
+ V4LCamAdpt v(camera_id);
+
+ const char *device_name = VIRTUAL_DEVICE_PATH(camera_id);
+ if(device_name){
+ params->set(CameraProperties::DEVICE_NAME, device_name);
+ }else{
+ CAMHAL_LOGDA("no virtual camera device node\n");
+ params->set(CameraProperties::DEVICE_NAME, "/dev/video11");
+ }
+
+ int iret = 0;
+ if(v.initialize( params ) != NO_ERROR){
+ CAMHAL_LOGEA("Unable to create or initialize V4LCamAdpt!!");
+ }
+
+#ifdef AMLOGIC_USB_CAMERA_SUPPORT
+ params->set(CameraProperties::RELOAD_WHEN_OPEN, "1");
+#else
+ params->set(CameraProperties::RELOAD_WHEN_OPEN, "0");
+#endif
+
+ bFrontCam = v.isFrontCam( camera_id );
+ CAMHAL_LOGDB("%s\n", bFrontCam?"front cam":"back cam");
+ //should changed while the screen orientation changed.
+ int degree = -1;
+ char property[64];
+ memset(property,0,sizeof(property));
+ if(bFrontCam == true) {
+ params->set(CameraProperties::FACING_INDEX, ExCameraParameters::FACING_FRONT);
+ if(v.getCameraOrientation(bFrontCam,property)>=0){
+ params->set(CameraProperties::ORIENTATION_INDEX,property);
+ }else{
+#ifdef AMLOGIC_USB_CAMERA_SUPPORT
+ params->set(CameraProperties::ORIENTATION_INDEX,"0");
+#else
+ params->set(CameraProperties::ORIENTATION_INDEX,"270");
+#endif
+ }
+ } else {
+ params->set(CameraProperties::FACING_INDEX, ExCameraParameters::FACING_BACK);
+ if( v.getCameraOrientation(bFrontCam,property)>=0){
+ params->set(CameraProperties::ORIENTATION_INDEX,property);
+ }else{
+#ifdef AMLOGIC_USB_CAMERA_SUPPORT
+ params->set(CameraProperties::ORIENTATION_INDEX,"180");
+#else
+ params->set(CameraProperties::ORIENTATION_INDEX,"90");
+#endif
+ }
+ }
+
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FORMATS,"yuv420sp,yuv420p"); //yuv420p for cts
+ if(DEFAULT_PREVIEW_PIXEL_FORMAT == V4L2_PIX_FMT_YUYV){ // 422I
+ //params->set(CameraProperties::SUPPORTED_PREVIEW_FORMATS,PREVIEW_FORMAT_422I);
+ params->set(CameraProperties::PREVIEW_FORMAT,PREVIEW_FORMAT_422I);
+ }else if(DEFAULT_PREVIEW_PIXEL_FORMAT == V4L2_PIX_FMT_NV21){ //420sp
+ //params->set(CameraProperties::SUPPORTED_PREVIEW_FORMATS,PREVIEW_FORMAT_420SP);
+ params->set(CameraProperties::PREVIEW_FORMAT,PREVIEW_FORMAT_420SP);
+ }else{ //default case
+ //params->set(CameraProperties::SUPPORTED_PREVIEW_FORMATS,PREVIEW_FORMAT_420SP);
+ params->set(CameraProperties::PREVIEW_FORMAT,PREVIEW_FORMAT_420SP);
+ }
+
+#ifdef AMLOGIC_VCAM_NONBLOCK_SUPPORT
+ int fps=0, fps_num=0;
+ int ret;
+ char *fpsrange=(char *)calloc(32,sizeof(char));
+
+ ret = v.enumFramerate(&fps, &fps_num);
+ if((fpsrange != NULL)&&(NO_ERROR == ret) && ( 0 !=fps_num )){
+ sprintf(fpsrange,"%s%d","10,",fps/fps_num);
+ CAMHAL_LOGDA("O_NONBLOCK operation to do previewThread\n");
+
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES, fpsrange);
+ params->set(CameraProperties::PREVIEW_FRAME_RATE, fps/fps_num);
+
+ memset( fpsrange, 0, 32*sizeof(char));
+ sprintf(fpsrange,"%s%d","10000,",fps*1000/fps_num);
+ params->set(CameraProperties::FRAMERATE_RANGE_IMAGE, fpsrange);
+ params->set(CameraProperties::FRAMERATE_RANGE_VIDEO, fpsrange);
+
+ memset( fpsrange, 0, 32*sizeof(char));
+ sprintf(fpsrange,"(%s%d)","5000,",fps*1000/fps_num);
+ params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, fpsrange);
+ memset( fpsrange, 0, 32*sizeof(char));
+ sprintf(fpsrange,"%s%d","5000,",fps*1000/fps_num);
+ params->set(CameraProperties::FRAMERATE_RANGE, fpsrange);
+ }else{
+ if(NO_ERROR != ret)
+ CAMHAL_LOGDA("sensor driver need to implement VIDIOC_G_PARM!!!\n");
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES, "10,15");
+ params->set(CameraProperties::PREVIEW_FRAME_RATE, "15");
+
+ params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, "(5000,26623)");
+ params->set(CameraProperties::FRAMERATE_RANGE, "5000,26623");
+ params->set(CameraProperties::FRAMERATE_RANGE_IMAGE, "10000,15000");
+ params->set(CameraProperties::FRAMERATE_RANGE_VIDEO, "10000,15000");
+ }
+#else
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES, "10,15");
+ params->set(CameraProperties::PREVIEW_FRAME_RATE, "15");
+
+ params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, "(5000,26623)");
+ params->set(CameraProperties::FRAMERATE_RANGE, "5000,26623");
+ params->set(CameraProperties::FRAMERATE_RANGE_IMAGE, "10000,15000");
+ params->set(CameraProperties::FRAMERATE_RANGE_VIDEO, "10000,15000");
+#endif
+
+ //get preview size & set
+ char *sizes = (char *) calloc (1, 1024);
+ if(!sizes){
+ CAMHAL_LOGEA("Alloc string buff error!");
+ return;
+ }
+
+ memset(sizes,0,1024);
+ uint32_t preview_format = DEFAULT_PREVIEW_PIXEL_FORMAT;
+ if (!v.getValidFrameSize( preview_format, sizes)) {
+ int len = strlen(sizes);
+ unsigned int supported_w = 0, supported_h = 0,w = 0,h = 0;
+ if(len>1){
+ if(sizes[len-1] == ',')
+ sizes[len-1] = '\0';
+ }
+
+ char small_size[8] = "176x144"; //for cts
+ if(strstr(sizes,small_size)==NULL){
+ if((len+sizeof(small_size))<(1024-1)){
+ strcat(sizes,",");
+ strcat(sizes,small_size);
+ }
+ }
+
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, sizes);
+
+ char * b = (char *)sizes;
+ while(b != NULL){
+ if (sscanf(b, "%dx%d", &supported_w, &supported_h) != 2){
+ break;
+ }
+ if((supported_w*supported_h)>(w*h)){
+ w = supported_w;
+ h = supported_h;
+ }
+ b = strchr(b, ',');
+ if(b)
+ b++;
+ }
+ if((w>0)&&(h>0)){
+ memset(sizes, 0, 1024);
+ sprintf(sizes,"%dx%d",w,h);
+ }
+ //char * b = strrchr(sizes, ',');
+ //if (b)
+ // b++;
+ //else
+ // b = sizes;
+ params->set(CameraProperties::PREVIEW_SIZE, sizes);
+ }
+ else
+ {
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES,
+ "640x480,352x288,176x144");
+ params->set(CameraProperties::PREVIEW_SIZE,"640x480");
+ }
+
+ params->set(CameraProperties::SUPPORTED_PICTURE_FORMATS, DEFAULT_PICTURE_FORMAT);
+ params->set(CameraProperties::PICTURE_FORMAT,DEFAULT_PICTURE_FORMAT);
+ params->set(CameraProperties::JPEG_QUALITY, 90);
+
+ //must have >2 sizes and contain "0x0"
+ params->set(CameraProperties::SUPPORTED_THUMBNAIL_SIZES, "180x160,0x0");
+ params->set(CameraProperties::JPEG_THUMBNAIL_SIZE, "180x160");
+ params->set(CameraProperties::JPEG_THUMBNAIL_QUALITY, 90);
+
+ //get & set picture size
+ memset(sizes,0,1024);
+ uint32_t picture_format = DEFAULT_IMAGE_CAPTURE_PIXEL_FORMAT;
+ CAMHAL_LOGDB("default-picture-format=%d", DEFAULT_IMAGE_CAPTURE_PIXEL_FORMAT);
+ if (!v.getValidFrameSize( picture_format, sizes)) {
+ int len = strlen(sizes);
+ unsigned int supported_w = 0, supported_h = 0,w = 0,h = 0;
+ if(len>1){
+ if(sizes[len-1] == ',')
+ sizes[len-1] = '\0';
+ }
+
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, sizes);
+
+ char * b = (char *)sizes;
+ while(b != NULL){
+ if (sscanf(b, "%dx%d", &supported_w, &supported_h) != 2){
+ break;
+ }
+ if((supported_w*supported_h)>(w*h)){
+ w = supported_w;
+ h = supported_h;
+ }
+ b = strchr(b, ',');
+ if(b)
+ b++;
+ }
+ if((w>0)&&(h>0)){
+ memset(sizes, 0, 1024);
+ sprintf(sizes,"%dx%d",w,h);
+ }
+ //char * b = strrchr(sizes, ',');
+ //if (b)
+ // b++;
+ //else
+ // b = sizes;
+ params->set(CameraProperties::PICTURE_SIZE, sizes);
+ }
+ else
+ {
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, "640x480");
+ params->set(CameraProperties::PICTURE_SIZE,"640x480");
+ }
+ if(sizes){
+ free(sizes);
+ sizes = NULL;
+ }
+
+ char *focus_mode = (char *) calloc (1, 256);
+ char * def_focus_mode = (char *) calloc (1, 64);
+ if((focus_mode)&&(def_focus_mode)){
+ memset(focus_mode,0,256);
+ memset(def_focus_mode,0,64);
+ if(v.getCameraAutoFocus( focus_mode,def_focus_mode)) {
+ params->set(CameraProperties::SUPPORTED_FOCUS_MODES, focus_mode);
+ params->set(CameraProperties::FOCUS_MODE, def_focus_mode);
+ }else {
+ params->set(CameraProperties::SUPPORTED_FOCUS_MODES, "fixed");
+ params->set(CameraProperties::FOCUS_MODE, "fixed");
+ }
+ }else{
+ params->set(CameraProperties::SUPPORTED_FOCUS_MODES, "fixed");
+ params->set(CameraProperties::FOCUS_MODE, "fixed");
+ }
+ if(focus_mode){
+ free(focus_mode);
+ focus_mode = NULL;
+ }
+ if(def_focus_mode){
+ free(def_focus_mode);
+ def_focus_mode = NULL;
+ }
+
+ char *banding_mode = (char *) calloc (1, 256);
+ char *def_banding_mode = (char *) calloc (1, 64);
+ if((banding_mode)&&(def_banding_mode)){
+ memset(banding_mode,0,256);
+ memset(def_banding_mode,0,64);
+
+ v.getCameraBanding(banding_mode, def_banding_mode);
+ params->set(CameraProperties::SUPPORTED_ANTIBANDING, banding_mode);
+ params->set(CameraProperties::ANTIBANDING, def_banding_mode);
+ CAMHAL_LOGDB("def_banding=%s, banding=%s\n", def_banding_mode, banding_mode);
+ }else{
+ params->set(CameraProperties::SUPPORTED_ANTIBANDING, "50hz,60hz");
+ params->set(CameraProperties::ANTIBANDING, "50hz");
+ CAMHAL_LOGDA("banding default value\n");
+ }
+ if(banding_mode){
+ free(banding_mode);
+ banding_mode = NULL;
+ }
+ if(def_banding_mode){
+ free(def_banding_mode);
+ def_banding_mode = NULL;
+ }
+
+ params->set(CameraProperties::FOCAL_LENGTH, "4.31");
+
+ params->set(CameraProperties::HOR_ANGLE,"54.8");
+ params->set(CameraProperties::VER_ANGLE,"42.5");
+
+ char *wb_mode = (char *) calloc (1, 256);
+ char *def_wb_mode = (char *) calloc (1, 64);
+
+
+ if( wb_mode && def_wb_mode){
+ memset(wb_mode, 0, 256);
+ memset(def_wb_mode, 0, 64);
+ v.getCameraWhiteBalance( wb_mode, def_wb_mode);
+ params->set(CameraProperties::SUPPORTED_WHITE_BALANCE, wb_mode);
+ params->set(CameraProperties::WHITEBALANCE, def_wb_mode);
+ }else{
+
+
+ params->set(CameraProperties::SUPPORTED_WHITE_BALANCE, "auto");
+ params->set(CameraProperties::WHITEBALANCE, "auto");
+ }
+
+ if(wb_mode){
+ free(wb_mode);
+ wb_mode = NULL;
+ }
+ if(def_wb_mode){
+ free(def_wb_mode);
+ def_wb_mode = NULL;
+ }
+
+ params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK, DEFAULT_AWB_LOCK);
+
+ params->set(CameraProperties::SUPPORTED_EFFECTS, "none,negative,sepia");
+ params->set(CameraProperties::EFFECT, "none");
+
+ char *flash_mode = (char *) calloc (1, 256);
+ char *def_flash_mode = (char *) calloc (1, 64);
+ if((flash_mode)&&(def_flash_mode)){
+ memset(flash_mode,0,256);
+ memset(def_flash_mode,0,64);
+ if (v.get_flash_mode( flash_mode,def_flash_mode)) {
+ params->set(CameraProperties::SUPPORTED_FLASH_MODES, flash_mode);
+ params->set(CameraProperties::FLASH_MODE, def_flash_mode);
+ CAMHAL_LOGDB("def_flash_mode=%s, flash_mode=%s\n",
+ def_flash_mode, flash_mode);
+ }
+ }
+ if (flash_mode) {
+ free(flash_mode);
+ flash_mode = NULL;
+ }
+ if (def_flash_mode) {
+ free(def_flash_mode);
+ def_flash_mode = NULL;
+ }
+
+ //params->set(CameraParameters::KEY_SUPPORTED_SCENE_MODES,"auto,night,snow");
+ //params->set(CameraParameters::KEY_SCENE_MODE,"auto");
+
+ params->set(CameraProperties::EXPOSURE_MODE, "auto");
+ params->set(CameraProperties::SUPPORTED_EXPOSURE_MODES, "auto");
+ params->set(CameraProperties::AUTO_EXPOSURE_LOCK, DEFAULT_AE_LOCK);
+
+ int min=0, max =0, def=0, step =0;
+ v.getCameraExposureValue( min, max, step, def);
+ params->set(CameraProperties::SUPPORTED_EV_MAX, max);
+ params->set(CameraProperties::SUPPORTED_EV_MIN, min);
+ params->set(CameraProperties::EV_COMPENSATION, def);
+ params->set(CameraProperties::SUPPORTED_EV_STEP, step);
+
+ //don't support digital zoom now
+
+ params->set(CameraProperties::ZOOM_SUPPORTED,"false");
+ params->set(CameraProperties::SMOOTH_ZOOM_SUPPORTED,"false");
+ params->set(CameraProperties::SUPPORTED_ZOOM_RATIOS,"100");
+ params->set(CameraProperties::SUPPORTED_ZOOM_STAGES,0); //think the zoom ratios as a array, the max zoom is the max index
+ params->set(CameraProperties::ZOOM, 0);//default should be 0
+
+ params->set(CameraProperties::SUPPORTED_ISO_VALUES, "auto");
+ params->set(CameraProperties::ISO_MODE, DEFAULT_ISO_MODE);
+
+ params->set(CameraProperties::SUPPORTED_IPP_MODES, DEFAULT_IPP);
+ params->set(CameraProperties::IPP, DEFAULT_IPP);
+
+ params->set(CameraProperties::SUPPORTED_SCENE_MODES, "auto");
+ params->set(CameraProperties::SCENE_MODE, DEFAULT_SCENE_MODE);
+
+ params->set(CameraProperties::BRIGHTNESS, DEFAULT_BRIGHTNESS);
+ params->set(CameraProperties::CONTRAST, DEFAULT_CONTRAST);
+ params->set(CameraProperties::GBCE, DEFAULT_GBCE);
+ params->set(CameraProperties::SATURATION, DEFAULT_SATURATION);
+ params->set(CameraProperties::SHARPNESS, DEFAULT_SHARPNESS);
+ params->set(CameraProperties::VSTAB, DEFAULT_VSTAB);
+ params->set(CameraProperties::VSTAB_SUPPORTED, DEFAULT_VSTAB_SUPPORTED);
+ params->set(CameraProperties::MAX_FD_HW_FACES, DEFAULT_MAX_FD_HW_FACES);
+ params->set(CameraProperties::MAX_FD_SW_FACES, DEFAULT_MAX_FD_SW_FACES);
+ params->set(CameraProperties::REQUIRED_PREVIEW_BUFS, DEFAULT_NUM_PREV_BUFS);
+ params->set(CameraProperties::REQUIRED_IMAGE_BUFS, DEFAULT_NUM_PIC_BUFS);
+#ifdef AMLOGIC_ENABLE_VIDEO_SNAPSHOT
+ params->set(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED, "true");
+#else
+ params->set(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED, "false");
+#endif
+
+ params->set(CameraProperties::VIDEO_SIZE, DEFAULT_VIDEO_SIZE);
+ params->set(CameraProperties::PREFERRED_PREVIEW_SIZE_FOR_VIDEO, DEFAULT_PREFERRED_PREVIEW_SIZE_FOR_VIDEO);
+
+
+ CAMHAL_LOGDA("newloadCaps end!\n");
+}
+
+#ifdef AMLOGIC_VCAM_NONBLOCK_SUPPORT
+/* gets video device defined frame rate (not real - consider it a maximum value)
+ * args:
+ *
+ * returns: VIDIOC_G_PARM ioctl result value
+*/
+int V4LCamAdpt::get_framerate ( int camera_fd, int *fps, int *fps_num)
+{
+ int ret=0;
+
+ struct v4l2_streamparm streamparm;
+
+ streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ ret = ioctl( camera_fd,VIDIOC_G_PARM,&streamparm);
+ if (ret < 0)
+ {
+ CAMHAL_LOGDA("VIDIOC_G_PARM - Unable to get timeperframe");
+ }
+ else
+ {
+ if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) {
+ // it seems numerator is allways 1 but we don't do assumptions here :-)
+ *fps = streamparm.parm.capture.timeperframe.denominator;
+ *fps_num = streamparm.parm.capture.timeperframe.numerator;
+ }
+ }
+
+ return ret;
+}
+
+int V4LCamAdpt::enumFramerate ( int *fps, int *fps_num)
+{
+ int ret=0;
+ int framerate=0;
+ int temp_rate=0;
+ struct v4l2_frmivalenum fival;
+ int i,j;
+
+ int pixelfmt_tbl[]={
+ V4L2_PIX_FMT_NV21,
+ V4L2_PIX_FMT_YVU420,
+ };
+ struct v4l2_frmsize_discrete resolution_tbl[]={
+ {640, 480},
+ {320, 240},
+ };
+
+ for( i = 0; i < (int) ARRAY_SIZE(pixelfmt_tbl); i++){
+ for( j = 0; j < (int) ARRAY_SIZE(resolution_tbl); j++){
+
+ memset(&fival, 0, sizeof(fival));
+ fival.index = 0;
+ fival.pixel_format = pixelfmt_tbl[i];
+ fival.width = resolution_tbl[j].width;
+ fival.height = resolution_tbl[j].height;
+
+ while ((ret = ioctl( mCameraHandle,
+ VIDIOC_ENUM_FRAMEINTERVALS, &fival)) == 0)
+ {
+ if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE)
+ {
+ temp_rate = fival.discrete.denominator/fival.discrete.numerator;
+ if(framerate < temp_rate){
+ framerate = temp_rate;
+ }
+ }
+ else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS)
+ {
+ framerate = fival.stepwise.max.denominator
+ /fival.stepwise.max.numerator;
+ CAMHAL_LOGDB("pixelfmt=%d,resolution:%dx%d,"
+ "FRAME TYPE is continuous,step=%d/%d s\n",
+ pixelfmt_tbl[i],
+ resolution_tbl[j].width,
+ resolution_tbl[j].height,
+ fival.stepwise.max.numerator,
+ fival.stepwise.max.denominator);
+ break;
+ }
+ else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE)
+ {
+ CAMHAL_LOGDB("pixelfmt=%d,resolution:%dx%d,"
+ "FRAME TYPE is step wise,step=%d/%d s\n",
+ pixelfmt_tbl[i],
+ resolution_tbl[j].width,
+ resolution_tbl[j].height,
+ fival.stepwise.step.numerator,
+ fival.stepwise.step.denominator);
+ framerate = fival.stepwise.max.denominator
+ /fival.stepwise.max.numerator;
+ break;
+ }
+
+ fival.index++;
+ }
+ }
+ }
+
+ *fps = framerate;
+ *fps_num = 1;
+
+ CAMHAL_LOGDB("enum framerate=%d\n", framerate);
+ if( framerate <= 1){
+ return -1;
+ }
+
+ return 0;
+}
+#endif
+
+
+int V4LCamAdpt::set_white_balance(int camera_fd,const char *swb)
+{
+ int ret = 0;
+ struct v4l2_control ctl;
+ if(camera_fd<0)
+ return -1;
+
+ ctl.id = V4L2_CID_DO_WHITE_BALANCE;
+
+ if(strcasecmp(swb,"auto")==0)
+ ctl.value=CAM_WB_AUTO;
+ else if(strcasecmp(swb,"daylight")==0)
+ ctl.value=CAM_WB_DAYLIGHT;
+ else if(strcasecmp(swb,"incandescent")==0)
+ ctl.value=CAM_WB_INCANDESCENCE;
+ else if(strcasecmp(swb,"fluorescent")==0)
+ ctl.value=CAM_WB_FLUORESCENT;
+ else if(strcasecmp(swb,"cloudy-daylight")==0)
+ ctl.value=CAM_WB_CLOUD;
+ else if(strcasecmp(swb,"shade")==0)
+ ctl.value=CAM_WB_SHADE;
+ else if(strcasecmp(swb,"twilight")==0)
+ ctl.value=CAM_WB_TWILIGHT;
+ else if(strcasecmp(swb,"warm-fluorescent")==0)
+ ctl.value=CAM_WB_WARM_FLUORESCENT;
+
+ if(mWhiteBalance == ctl.value){
+ return 0;
+ }else{
+ mWhiteBalance = ctl.value;
+ }
+ ret = ioctl(camera_fd, VIDIOC_S_CTRL, &ctl);
+ if(ret<0)
+ CAMHAL_LOGEB("AMLOGIC CAMERA Set white balance fail: %s. ret=%d", strerror(errno),ret);
+ return ret ;
+}
+
+status_t V4LCamAdpt::getFocusMoveStatus()
+{
+ struct v4l2_control ctl;
+ int ret;
+
+ if( (cur_focus_mode != CAM_FOCUS_MODE_CONTI_VID) &&
+ (cur_focus_mode != CAM_FOCUS_MODE_CONTI_PIC) &&
+ (cur_focus_mode != CAM_FOCUS_MODE_AUTO)){
+ mFocusMoveEnabled = false;
+ return 0;
+ }
+
+ mFocusWaitCount --;
+ if(mFocusWaitCount >= 0){
+ return 0;
+ }
+ mFocusWaitCount = 0;
+
+ memset( &ctl, 0, sizeof(ctl));
+ ctl.id =V4L2_CID_AUTO_FOCUS_STATUS;
+ ret = ioctl(mCameraHandle, VIDIOC_G_CTRL, &ctl);
+ if ( 0 > ret ){
+ CAMHAL_LOGDA("V4L2_CID_AUTO_FOCUS_STATUS failed\n");
+ return -EINVAL;
+ }
+
+ if( ctl.value == V4L2_AUTO_FOCUS_STATUS_BUSY ){
+ if(!bFocusMoveState){
+ bFocusMoveState = true;
+ notifyFocusMoveSubscribers(FOCUS_MOVE_START);
+ }
+ }else {
+ mFocusWaitCount = FOCUS_PROCESS_FRAMES;
+ if(bFocusMoveState){
+ bFocusMoveState = false;
+ notifyFocusMoveSubscribers(FOCUS_MOVE_STOP);
+ }
+ }
+
+ return ctl.value;
+}
+
+extern "C" int V4LCamAdpt::SetExposure(int camera_fd,const char *sbn)
+{
+ int ret = 0;
+ struct v4l2_control ctl;
+ int level;
+
+ if(camera_fd<0)
+ return -1;
+
+ level = atoi(sbn);
+ if(mEV == level){
+ return 0;
+ }else{
+ mEV = level;
+ }
+
+ memset(&ctl, 0, sizeof(ctl));
+
+ ctl.id = V4L2_CID_EXPOSURE;
+ ctl.value = level + (mEVmax - mEVmin)/2;
+
+ ret = ioctl(camera_fd, VIDIOC_S_CTRL, &ctl);
+ if(ret<0)
+ CAMHAL_LOGEB("AMLOGIC CAMERA Set Exposure fail: %s. ret=%d",
+ strerror(errno),ret);
+
+ return ret ;
+}
+
+int V4LCamAdpt::set_effect(int camera_fd,const char *sef)
+{
+ int ret = 0;
+ struct v4l2_control ctl;
+ if(camera_fd<0)
+ return -1;
+
+ memset(&ctl, 0, sizeof(ctl));
+ ctl.id = V4L2_CID_COLORFX;
+
+ if(strcasecmp(sef,"none")==0)
+ ctl.value=CAM_EFFECT_ENC_NORMAL;
+ else if(strcasecmp(sef,"negative")==0)
+ ctl.value=CAM_EFFECT_ENC_COLORINV;
+ else if(strcasecmp(sef,"sepia")==0)
+ ctl.value=CAM_EFFECT_ENC_SEPIA;
+ ret = ioctl(camera_fd, VIDIOC_S_CTRL, &ctl);
+ if(ret<0)
+ CAMHAL_LOGEB("AMLOGIC CAMERA Set effect fail: %s. ret=%d", strerror(errno),ret);
+ return ret ;
+}
+
+int V4LCamAdpt::set_night_mode(int camera_fd,const char *snm)
+{
+ int ret = 0;
+ struct v4l2_control ctl;
+ if(camera_fd<0)
+ return -1;
+
+ memset( &ctl, 0, sizeof(ctl));
+ if(strcasecmp(snm,"auto")==0)
+ ctl.value=CAM_NM_AUTO;
+ else if(strcasecmp(snm,"night")==0)
+ ctl.value=CAM_NM_ENABLE;
+
+ ctl.id = V4L2_CID_DO_WHITE_BALANCE;
+
+ ret = ioctl(camera_fd, VIDIOC_S_CTRL, &ctl);
+ if(ret<0)
+ CAMHAL_LOGEB("AMLOGIC CAMERA Set night mode fail: %s. ret=%d",
+ strerror(errno),ret);
+ return ret ;
+}
+
+extern "C" int V4LCamAdpt::set_banding(int camera_fd,const char *snm)
+{
+ int ret = 0;
+ struct v4l2_control ctl;
+
+ if(camera_fd<0)
+ return -1;
+
+ memset( &ctl, 0, sizeof(ctl));
+ if(strcasecmp(snm,"50hz")==0)
+ ctl.value= CAM_ANTIBANDING_50HZ;
+ else if(strcasecmp(snm,"60hz")==0)
+ ctl.value= CAM_ANTIBANDING_60HZ;
+ else if(strcasecmp(snm,"auto")==0)
+ ctl.value= CAM_ANTIBANDING_AUTO;
+ else if(strcasecmp(snm,"off")==0)
+ ctl.value= CAM_ANTIBANDING_OFF;
+
+ ctl.id = V4L2_CID_POWER_LINE_FREQUENCY;
+
+ if(mAntiBanding == ctl.value){
+ return 0;
+ }else{
+ mAntiBanding = ctl.value;
+ }
+ ret = ioctl(camera_fd, VIDIOC_S_CTRL, &ctl);
+ if(ret<0){
+ CAMHAL_LOGEB("AMLOGIC CAMERA Set banding fail: %s. ret=%d",
+ strerror(errno),ret);
+ }
+ return ret ;
+}
+
+bool V4LCamAdpt::get_flash_mode(char *flash_status,
+ char *def_flash_status)
+{
+ struct v4l2_queryctrl qc;
+ struct v4l2_querymenu qm;
+ bool flash_enable = false;
+ int ret = NO_ERROR;
+ int status_count = 0;
+
+ if((!flash_status)||(!def_flash_status)){
+ CAMHAL_LOGEA("flash status str buf error\n");
+ return flash_enable;
+ }
+
+ memset(&qc, 0, sizeof(qc));
+ qc.id = V4L2_CID_BACKLIGHT_COMPENSATION;
+ ret = ioctl (mCameraHandle, VIDIOC_QUERYCTRL, &qc);
+ if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0) || (qc.type != V4L2_CTRL_TYPE_MENU)){
+ flash_enable = false;
+ CAMHAL_LOGDB("camera handle %d can't support flash\n",mCameraHandle);
+ }else {
+ memset(&qm, 0, sizeof(qm));
+ qm.id = V4L2_CID_BACKLIGHT_COMPENSATION;
+ qm.index = qc.default_value;
+ if(ioctl ( mCameraHandle, VIDIOC_QUERYMENU, &qm) < 0){
+ strcpy(def_flash_status, "off");
+ } else {
+ strcpy(def_flash_status, (char*)qm.name);
+ }
+ int index = 0;
+ for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
+ memset(&qm, 0, sizeof(struct v4l2_querymenu));
+ qm.id = V4L2_CID_BACKLIGHT_COMPENSATION;
+ qm.index = index;
+ if(ioctl (mCameraHandle, VIDIOC_QUERYMENU, &qm) < 0){
+ continue;
+ } else {
+ if(status_count>0)
+ strcat(flash_status, ",");
+ strcat(flash_status, (char*)qm.name);
+ status_count++;
+ }
+ }
+ if(status_count>0)
+ flash_enable = true;
+ }
+ return flash_enable;
+}
+
+int V4LCamAdpt::set_flash_mode(int camera_fd, const char *sfm)
+{
+ int ret = NO_ERROR;
+ struct v4l2_control ctl;
+
+ memset(&ctl, 0, sizeof(ctl));
+ if(strcasecmp(sfm,"auto")==0)
+ ctl.value=FLASHLIGHT_AUTO;
+ else if(strcasecmp(sfm,"on")==0)
+ ctl.value=FLASHLIGHT_ON;
+ else if(strcasecmp(sfm,"off")==0)
+ ctl.value=FLASHLIGHT_OFF;
+ else if(strcasecmp(sfm,"torch")==0)
+ ctl.value=FLASHLIGHT_TORCH;
+ else if(strcasecmp(sfm,"red-eye")==0)
+ ctl.value=FLASHLIGHT_RED_EYE;
+
+ ctl.id = V4L2_CID_BACKLIGHT_COMPENSATION;
+ ret = ioctl( camera_fd, VIDIOC_S_CTRL, &ctl);
+ if( ret < 0 ){
+ CAMHAL_LOGDB("BACKLIGHT_COMPENSATION failed, errno=%d\n", errno);
+ }
+
+ return ret;
+}
+
+int V4LCamAdpt::get_hflip_mode(int camera_fd)
+{
+ struct v4l2_queryctrl qc;
+ int ret = 0;
+
+ if(camera_fd<0){
+ CAMHAL_LOGEA("Get_hflip_mode --camera handle is invalid\n");
+ return -1;
+ }
+
+ memset(&qc, 0, sizeof(qc));
+ qc.id = V4L2_CID_HFLIP;
+ ret = ioctl (camera_fd, VIDIOC_QUERYCTRL, &qc);
+ if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0) || (qc.type != V4L2_CTRL_TYPE_INTEGER)){
+ ret = -1;
+ CAMHAL_LOGDB("camera handle %d can't support HFlip!\n",camera_fd);
+ }else{
+ CAMHAL_LOGDB("camera handle %d supports HFlip!\n",camera_fd);
+ }
+ return ret;
+}
+
+
+int V4LCamAdpt::set_hflip_mode(int camera_fd, bool mode)
+{
+ int ret = 0;
+ struct v4l2_control ctl;
+ if(camera_fd<0)
+ return -1;
+
+ memset(&ctl, 0,sizeof(ctl));
+ ctl.value=mode?1:0;
+
+ ctl.id = V4L2_CID_HFLIP;
+
+ ret = ioctl(camera_fd, VIDIOC_S_CTRL, &ctl);
+ if(ret<0)
+ CAMHAL_LOGEB("Set hflip mode fail: %s. ret=%d", strerror(errno),ret);
+ return ret ;
+}
+
+int V4LCamAdpt::get_supported_zoom(int camera_fd, char * zoom_str)
+{
+ int ret = 0;
+ struct v4l2_queryctrl qc;
+ char str_zoom_element[10];
+ if((camera_fd<0)||(!zoom_str))
+ return -1;
+
+ memset(&qc, 0, sizeof(qc));
+ qc.id = V4L2_CID_ZOOM_ABSOLUTE;
+ ret = ioctl (camera_fd, VIDIOC_QUERYCTRL, &qc);
+ if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0) || (qc.type != V4L2_CTRL_TYPE_INTEGER)){
+ ret = -1;
+ CAMHAL_LOGDB("camera handle %d can't get zoom level!\n",camera_fd);
+ }else{
+ int i = 0;
+ ret = (qc.maximum - qc.minimum)/qc.step;
+ for (i=qc.minimum; i<=qc.maximum; i+=qc.step) {
+ memset(str_zoom_element,0,sizeof(str_zoom_element));
+ sprintf(str_zoom_element,"%d,", i);
+ strcat(zoom_str,str_zoom_element);
+ }
+ }
+ return ret ;
+}
+
+int V4LCamAdpt::set_zoom_level(int camera_fd, int zoom)
+{
+ int ret = 0;
+ struct v4l2_control ctl;
+ if((camera_fd<0)||(zoom<0))
+ return -1;
+
+ memset( &ctl, 0, sizeof(ctl));
+ ctl.value=zoom;
+ ctl.id = V4L2_CID_ZOOM_ABSOLUTE;
+ ret = ioctl(camera_fd, VIDIOC_S_CTRL, &ctl);
+ if(ret<0)
+ CAMHAL_LOGEB("Set zoom level fail: %s. ret=%d", strerror(errno),ret);
+ return ret ;
+}
+
+int V4LCamAdpt::set_rotate_value(int camera_fd, int value)
+{
+ int ret = 0;
+ struct v4l2_control ctl;
+ if(camera_fd<0)
+ return -1;
+
+ if((value!=0)&&(value!=90)&&(value!=180)&&(value!=270)){
+ CAMHAL_LOGEB("Set rotate value invalid: %d.", value);
+ return -1;
+ }
+
+ memset( &ctl, 0, sizeof(ctl));
+
+ ctl.value=value;
+
+ ctl.id = V4L2_ROTATE_ID;
+
+ ret = ioctl(camera_fd, VIDIOC_S_CTRL, &ctl);
+ if(ret<0)
+ CAMHAL_LOGEB("Set rotate value fail: %s. ret=%d", strerror(errno),ret);
+ return ret ;
+}
+
+};
+
+
+/*--------------------Camera Adapter Class ENDS here-----------------------------*/
+
diff --git a/vircam/VirtualCamHal.cpp b/vircam/VirtualCamHal.cpp
new file mode 100755
index 0000000..5ed52c6
--- a/dev/null
+++ b/vircam/VirtualCamHal.cpp
@@ -0,0 +1,3760 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file VirtualCamHal.cpp
+*
+* This file maps the Camera Hardware Interface to V4L2.
+*
+*/
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "VirtCamHAL"
+
+#include "VirtualCamHal.h"
+#include "ANativeWindowDisplayAdapter.h"
+#include "ExCameraParameters.h"
+#include "CameraProperties.h"
+#include <cutils/properties.h>
+
+#include <poll.h>
+#include <math.h>
+
+namespace android {
+#define LOGD ALOGD
+#define LOGE ALOGE
+#define LOGV ALOGV
+#define LOGI ALOGI
+
+static void write_sys_int(const char *path, int val)
+{
+ char cmd[16];
+ int fd = open(path, O_RDWR);
+
+ if(fd >= 0) {
+ sprintf(cmd, "%d", val);
+ write(fd, cmd, strlen(cmd));
+ close(fd);
+ }
+}
+
+static void write_sys_string(const char *path, const char *s)
+{
+ int fd = open(path, O_RDWR);
+
+ if(fd >= 0) {
+ write(fd, s, strlen(s));
+ close(fd);
+ }
+}
+
+#define DISABLE_VIDEO "/sys/class/video/disable_video"
+#define ENABLE_AVSYNC "/sys/class/tsync/enable"
+#define TSYNC_EVENT "/sys/class/tsync/event"
+#define VIDEO_ZOOM "/sys/class/video/zoom"
+#define SCREEN_MODE "/sys/class/video/screen_mode"
+
+static int SYS_enable_nextvideo()
+{
+ write_sys_int(DISABLE_VIDEO, 2);
+ return 0;
+}
+
+static int SYS_close_video()
+{
+ write_sys_int(DISABLE_VIDEO, 1);
+ return 0;
+}
+
+static int SYS_open_video()
+{
+ write_sys_int(DISABLE_VIDEO, 0);
+ return 0;
+}
+
+static int SYS_disable_avsync()
+{
+ write_sys_int(ENABLE_AVSYNC, 0);
+ return 0;
+}
+
+static int SYS_disable_video_pause()
+{
+ write_sys_string(TSYNC_EVENT, "VIDEO_PAUSE:0x0");
+ return 0;
+}
+
+extern "C" CameraAdapter* CameraAdapter_Factory(size_t);
+
+/*****************************************************************************/
+
+////Constant definitions and declarations
+////@todo Have a CameraProperties class to store these parameters as constants for every camera
+//// Currently, they are hard-coded
+
+const int VirtualCamHal::NO_BUFFERS_PREVIEW = MAX_CAMERA_BUFFERS;
+const int VirtualCamHal::NO_BUFFERS_IMAGE_CAPTURE = 2;
+
+/******************************************************************************/
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+struct timeval VirtualCamHal::mStartPreview;
+struct timeval VirtualCamHal::mStartFocus;
+struct timeval VirtualCamHal::mStartCapture;
+
+#endif
+
+static void orientation_cb(uint32_t orientation, uint32_t tilt, void* cookie) {
+ VirtualCamHal *camera = NULL;
+
+ if (cookie) {
+ camera = (VirtualCamHal*) cookie;
+ camera->onOrientationEvent(orientation, tilt);
+ }
+
+}
+/*-------------Camera Hal Interface Method definitions STARTS here--------------------*/
+
+/**
+ Callback function to receive orientation events from SensorListener
+ */
+void VirtualCamHal::onOrientationEvent(uint32_t orientation, uint32_t tilt) {
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != mCameraAdapter ) {
+ mCameraAdapter->onOrientationEvent(orientation, tilt);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Set the notification and data callbacks
+
+ @param[in] notify_cb Notify callback for notifying the app about events and errors
+ @param[in] data_cb Buffer callback for sending the preview/raw frames to the app
+ @param[in] data_cb_timestamp Buffer callback for sending the video frames w/ timestamp
+ @param[in] user Callback cookie
+ @return none
+
+ */
+void VirtualCamHal::setCallbacks(camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != mAppCbNotifier.get() )
+ {
+ mAppCbNotifier->setCallbacks(this,
+ notify_cb,
+ data_cb,
+ data_cb_timestamp,
+ get_memory,
+ user);
+ }
+
+ if ( NULL != mMemoryManager.get() )
+ {
+ mMemoryManager->setRequestMemoryCallback(get_memory);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Enable a message, or set of messages.
+
+ @param[in] msgtype Bitmask of the messages to enable (defined in include/ui/Camera.h)
+ @return none
+
+ */
+void VirtualCamHal::enableMsgType(int32_t msgType)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( ( msgType & CAMERA_MSG_SHUTTER ) && ( !mShutterEnabled ) )
+ {
+ msgType &= ~CAMERA_MSG_SHUTTER;
+ }
+
+ // ignoring enable focus message from camera service
+ // we will enable internally in autoFocus call
+ if (msgType & CAMERA_MSG_FOCUS) {
+ msgType &= ~CAMERA_MSG_FOCUS;
+ }
+ if (msgType & CAMERA_MSG_FOCUS_MOVE ) {
+ msgType &= ~CAMERA_MSG_FOCUS_MOVE;
+ }
+
+ {
+ Mutex::Autolock lock(mLock);
+ mMsgEnabled |= msgType;
+ }
+
+ if(mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME)
+ {
+ if(mDisplayPaused)
+ {
+ CAMHAL_LOGDA("Preview currently paused...will enable preview callback when restarted");
+ msgType &= ~CAMERA_MSG_PREVIEW_FRAME;
+ }else
+ {
+ CAMHAL_LOGDA("Enabling Preview Callback");
+ }
+ }
+ else
+ {
+ CAMHAL_LOGDB("Preview callback not enabled %x", msgType);
+ }
+
+
+ ///Configure app callback notifier with the message callback required
+ mAppCbNotifier->enableMsgType (msgType);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Disable a message, or set of messages.
+
+ @param[in] msgtype Bitmask of the messages to disable (defined in include/ui/Camera.h)
+ @return none
+
+ */
+void VirtualCamHal::disableMsgType(int32_t msgType)
+{
+ LOG_FUNCTION_NAME;
+ int32_t CurMsg = 0;
+ {
+ Mutex::Autolock lock(mLock);
+ mMsgEnabled &= ~msgType;
+ CurMsg = mMsgEnabled;
+ }
+
+ if( msgType & CAMERA_MSG_PREVIEW_FRAME){
+ CAMHAL_LOGDA("Disabling Preview Callback");
+ }
+
+ ///Configure app callback notifier
+ mAppCbNotifier->disableMsgType (CurMsg);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Query whether a message, or a set of messages, is enabled.
+
+ Note that this is operates as an AND, if any of the messages queried are off, this will
+ return false.
+
+ @param[in] msgtype Bitmask of the messages to query (defined in include/ui/Camera.h)
+ @return true If all message types are enabled
+ false If any message type
+
+ */
+int VirtualCamHal::msgTypeEnabled(int32_t msgType)
+{
+ LOG_FUNCTION_NAME;
+ Mutex::Autolock lock(mLock);
+ LOG_FUNCTION_NAME_EXIT;
+ return (mMsgEnabled & msgType);
+}
+
+/**
+ @brief Set the camera parameters.
+
+ @param[in] params Camera parameters to configure the camera
+ @return NO_ERROR
+ @todo Define error codes
+
+ */
+int VirtualCamHal::setParameters(const char* parameters)
+{
+
+ LOG_FUNCTION_NAME;
+
+ CameraParameters params;
+
+ String8 str_params(parameters);
+ params.unflatten(str_params);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return setParameters(params);
+}
+
+/**
+ @brief Set the camera parameters.
+
+ @param[in] params Camera parameters to configure the camera
+ @return NO_ERROR
+ @todo Define error codes
+
+ */
+int VirtualCamHal::setParameters(const CameraParameters& params)
+{
+
+ LOG_FUNCTION_NAME;
+
+ int w, h;
+ int w_orig, h_orig;
+ int framerate,minframerate;
+ int maxFPS, minFPS;
+ int error;
+ int base;
+ const char *valstr = NULL;
+ const char *prevFormat;
+ char *af_coord;
+ MSGUTILS::Message msg;
+ status_t ret = NO_ERROR;
+ // Needed for KEY_RECORDING_HINT
+ bool restartPreviewRequired = false;
+ bool updateRequired = false;
+ CameraParameters oldParams(mParameters.flatten());
+ bool videoMode = false;
+ char range[MAX_PROP_VALUE_LENGTH];
+
+ {
+ Mutex::Autolock lock(mLock);
+
+ ///Ensure that preview is not enabled when the below parameters are changed.
+ if(!previewEnabled())
+ {
+
+ CAMHAL_LOGDB("PreviewFormat %s", params.getPreviewFormat());
+
+ if ((valstr = params.getPreviewFormat()) != NULL) {
+ if ( isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS))) {
+ mParameters.setPreviewFormat(valstr);
+ } else {
+ CAMHAL_LOGEB("Invalid preview format.Supported: %s", mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS));
+ return -EINVAL;
+ }
+ }
+
+ if ((valstr = params.get(ExCameraParameters::KEY_VNF)) != NULL) {
+ if ( (params.getInt(ExCameraParameters::KEY_VNF)==0) || (params.getInt(ExCameraParameters::KEY_VNF)==1) ) {
+ CAMHAL_LOGDB("VNF set %s", params.get(ExCameraParameters::KEY_VNF));
+ mParameters.set(ExCameraParameters::KEY_VNF, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid VNF: %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if ((valstr = params.get(CameraParameters::KEY_VIDEO_STABILIZATION)) != NULL) {
+ // make sure we support vstab...if we don't and application is trying to set
+ // vstab then return an error
+ if (strcmp(mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED),
+ CameraParameters::TRUE) == 0) {
+ CAMHAL_LOGDB("VSTAB %s",
+ params.get(CameraParameters::KEY_VIDEO_STABILIZATION));
+ mParameters.set(CameraParameters::KEY_VIDEO_STABILIZATION,
+ params.get(CameraParameters::KEY_VIDEO_STABILIZATION));
+ } else if (strcmp(valstr, CameraParameters::TRUE) == 0) {
+ CAMHAL_LOGEB("ERROR: Invalid VSTAB: %s", valstr);
+ ret = -EINVAL;
+ } else {
+ mParameters.set(CameraParameters::KEY_VIDEO_STABILIZATION,
+ CameraParameters::FALSE);
+ }
+ }
+
+ if( (valstr = params.get(ExCameraParameters::KEY_CAP_MODE)) != NULL)
+ {
+ CAMHAL_LOGDB("Capture mode set %s", params.get(ExCameraParameters::KEY_CAP_MODE));
+ mParameters.set(ExCameraParameters::KEY_CAP_MODE, valstr);
+ }
+
+ if ((valstr = params.get(ExCameraParameters::KEY_IPP)) != NULL) {
+ if (isParameterValid(valstr,mCameraProperties->get(CameraProperties::SUPPORTED_IPP_MODES))) {
+ CAMHAL_LOGDB("IPP mode set %s", params.get(ExCameraParameters::KEY_IPP));
+ mParameters.set(ExCameraParameters::KEY_IPP, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid IPP mode: %s. Supported: %s", valstr,
+ mCameraProperties->get(CameraProperties::SUPPORTED_IPP_MODES));
+ ret = -EINVAL;
+ }
+ }
+
+ if((valstr = params.get(ExCameraParameters::KEY_S3D2D_PREVIEW)) != NULL)
+ {
+ CAMHAL_LOGDB("Stereo 3D->2D Preview mode is %s", params.get(ExCameraParameters::KEY_S3D2D_PREVIEW));
+ mParameters.set(ExCameraParameters::KEY_S3D2D_PREVIEW, valstr);
+ }
+
+ if((valstr = params.get(ExCameraParameters::KEY_AUTOCONVERGENCE)) != NULL)
+ {
+ CAMHAL_LOGDB("AutoConvergence mode is %s", params.get(ExCameraParameters::KEY_AUTOCONVERGENCE));
+ mParameters.set(ExCameraParameters::KEY_AUTOCONVERGENCE, valstr);
+ }
+
+ }
+
+ params.getPreviewSize(&w, &h);
+ if (w == -1 && h == -1) {
+ CAMHAL_LOGEA("Unable to get preview size");
+ return -EINVAL;
+ }
+
+ int oldWidth, oldHeight;
+ mParameters.getPreviewSize(&oldWidth, &oldHeight);
+
+ int orientation =0;
+ if((valstr = params.get(ExCameraParameters::KEY_SENSOR_ORIENTATION)) != NULL)
+ {
+ CAMHAL_LOGDB("Sensor Orientation is set to %s", params.get(ExCameraParameters::KEY_SENSOR_ORIENTATION));
+ mParameters.set(ExCameraParameters::KEY_SENSOR_ORIENTATION, valstr);
+ orientation = params.getInt(ExCameraParameters::KEY_SENSOR_ORIENTATION);
+ }
+
+ if(orientation ==90 || orientation ==270)
+ {
+ if ( !isResolutionValid(h,w, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES)))
+ {
+ CAMHAL_LOGEB("Invalid preview resolution %d x %d. Supported: %s", w, h,
+ mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES));
+ return -EINVAL;
+ }
+ else
+ {
+ mParameters.setPreviewSize(w, h);
+ mVideoWidth = w;
+ mVideoHeight = h;
+ }
+ }
+ else
+ {
+ if ( !isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES)))
+ {
+ CAMHAL_LOGEB("Invalid preview resolution2 %d x %d. Supported: %s", w, h,
+ mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES));
+ return -EINVAL;
+ }
+ else
+ {
+ mParameters.setPreviewSize(w, h);
+ }
+ }
+
+ if ( ( oldWidth != w ) || ( oldHeight != h ) )
+ {
+ restartPreviewRequired |= true;
+ }
+
+ CAMHAL_LOGDB("PreviewResolution by App %d x %d", w, h);
+
+ // Handle RECORDING_HINT to Set/Reset Video Mode Parameters
+ valstr = params.get(CameraParameters::KEY_RECORDING_HINT);
+ if(valstr != NULL)
+ {
+ if(strcmp(valstr, CameraParameters::TRUE) == 0)
+ {
+ CAMHAL_LOGDB("Recording Hint is set to %s", valstr);
+ mParameters.set(CameraParameters::KEY_RECORDING_HINT, valstr);
+ videoMode = true;
+ int w, h;
+
+ params.getPreviewSize(&w, &h);
+ CAMHAL_LOGVB("%s Preview Width=%d Height=%d\n", __FUNCTION__, w, h);
+ //HACK FOR MMS
+ mVideoWidth = w;
+ mVideoHeight = h;
+ CAMHAL_LOGVB("%s Video Width=%d Height=%d\n", __FUNCTION__, mVideoWidth, mVideoHeight);
+
+ //setPreferredPreviewRes(w, h);
+ mParameters.getPreviewSize(&w, &h);
+ CAMHAL_LOGVB("%s Preview Width=%d Height=%d\n", __FUNCTION__, w, h);
+ //Avoid restarting preview for MMS HACK
+ if ((w != mVideoWidth) && (h != mVideoHeight))
+ {
+ restartPreviewRequired = false;
+ }
+
+ restartPreviewRequired |= setVideoModeParameters(params);
+ }
+ else if(strcmp(valstr, CameraParameters::FALSE) == 0)
+ {
+ CAMHAL_LOGDB("Recording Hint is set to %s", valstr);
+ mParameters.set(CameraParameters::KEY_RECORDING_HINT, valstr);
+ restartPreviewRequired |= resetVideoModeParameters();
+ params.getPreviewSize(&mVideoWidth, &mVideoHeight);
+ }
+ else
+ {
+ CAMHAL_LOGEA("Invalid RECORDING_HINT");
+ return -EINVAL;
+ }
+ }
+ else
+ {
+ // This check is required in following case.
+ // If VideoRecording activity sets KEY_RECORDING_HINT to TRUE and
+ // ImageCapture activity doesnot set KEY_RECORDING_HINT to FALSE (i.e. simply NULL),
+ // then Video Mode parameters may remain present in ImageCapture activity as well.
+ CAMHAL_LOGDA("Recording Hint is set to NULL");
+ mParameters.set(CameraParameters::KEY_RECORDING_HINT, "");
+ restartPreviewRequired |= resetVideoModeParameters();
+ params.getPreviewSize(&mVideoWidth, &mVideoHeight);
+ }
+
+ if ((valstr = params.get(CameraParameters::KEY_FOCUS_MODE)) != NULL) {
+ if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_FOCUS_MODES))) {
+ CAMHAL_LOGDB("Focus mode set %s", params.get(CameraParameters::KEY_FOCUS_MODE));
+
+ // we need to take a decision on the capture mode based on whether CAF picture or
+ // video is chosen so the behavior of each is consistent to the application
+ if(strcmp(valstr, CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE) == 0){
+ restartPreviewRequired |= resetVideoModeParameters();
+ } else if (strcmp(valstr, CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO) == 0){
+ restartPreviewRequired |= setVideoModeParameters(params);
+ }
+
+ mParameters.set(CameraParameters::KEY_FOCUS_MODE, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid FOCUS mode = %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ ///Below parameters can be changed when the preview is running
+ if ( (valstr = params.getPictureFormat()) != NULL ) {
+ if (isParameterValid(params.getPictureFormat(),mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_FORMATS))) {
+ mParameters.setPictureFormat(valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid picture format: %s",valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ params.getPictureSize(&w, &h);
+ if ( isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIZES))) {
+ mParameters.setPictureSize(w, h);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid picture resolution %dx%d", w, h);
+ ret = -EINVAL;
+ }
+
+ CAMHAL_LOGDB("Picture Size by App %d x %d", w, h);
+
+ if ((valstr = params.get(ExCameraParameters::KEY_BURST)) != NULL) {
+ if (params.getInt(ExCameraParameters::KEY_BURST) >=0) {
+ CAMHAL_LOGDB("Burst set %s", params.get(ExCameraParameters::KEY_BURST));
+ mParameters.set(ExCameraParameters::KEY_BURST, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Burst value: %s",valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ framerate = params.getPreviewFrameRate();
+ valstr = params.get(CameraParameters::KEY_PREVIEW_FPS_RANGE);
+ CAMHAL_LOGDB("FRAMERATE %d", framerate);
+
+ CAMHAL_LOGDB("Passed FRR: %s, Supported FRR %s", valstr
+ , mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED));
+ CAMHAL_LOGDB("Passed FR: %d, Supported FR %s", framerate
+ , mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES));
+
+
+ if (valstr == NULL)
+ valstr = "";
+ //Perform parameter validation
+ if(!isParameterValid(valstr
+ , mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED))
+ || !isParameterInRange(framerate,
+ mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES)))
+ {
+ CAMHAL_LOGEA("Invalid frame rate range or frame rate");
+ return -EINVAL;
+ }
+
+ // Variable framerate ranges have higher priority over
+ // deprecated constant FPS. "KEY_PREVIEW_FPS_RANGE" should
+ // be cleared by the client in order for constant FPS to get
+ // applied.
+ if ( strcmp(valstr, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE)) != 0)
+ {
+ // APP wants to set FPS range
+ //Set framerate = MAXFPS
+ CAMHAL_LOGDA("APP IS CHANGING FRAME RATE RANGE");
+ params.getPreviewFpsRange(&minFPS, &maxFPS);
+
+ if ( ( 0 > minFPS ) || ( 0 > maxFPS ) )
+ {
+ CAMHAL_LOGEA("ERROR: FPS Range is negative!");
+ return -EINVAL;
+ }
+
+ framerate = maxFPS /VirtualCamHal::VFR_SCALE;
+
+ }
+ else
+ {
+ if ( framerate != atoi(mCameraProperties->get(CameraProperties::PREVIEW_FRAME_RATE)) )
+ {
+
+ selectFPSRange(framerate, &minFPS, &maxFPS);
+ CAMHAL_LOGDB("Select FPS Range %d %d", minFPS, maxFPS);
+ }
+ else
+ {
+ if (videoMode) {
+ valstr = mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_VIDEO);
+ CameraParameters temp;
+ temp.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, valstr);
+ temp.getPreviewFpsRange(&minFPS, &maxFPS);
+ }
+ else {
+ valstr = mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_IMAGE);
+ CameraParameters temp;
+ temp.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, valstr);
+ temp.getPreviewFpsRange(&minFPS, &maxFPS);
+ }
+
+ //framerate = maxFPS / VirtualCamHal::VFR_SCALE;
+ }
+
+ }
+
+ CAMHAL_LOGDB("FPS Range = %s", valstr);
+ CAMHAL_LOGDB("DEFAULT FPS Range = %s", mCameraProperties->get(CameraProperties::FRAMERATE_RANGE));
+
+ minFPS /= VirtualCamHal::VFR_SCALE;
+ maxFPS /= VirtualCamHal::VFR_SCALE;
+
+ if ( ( 0 == minFPS ) || ( 0 == maxFPS ) )
+ {
+ CAMHAL_LOGEA("ERROR: FPS Range is invalid!");
+ ret = -EINVAL;
+ }
+
+ if ( maxFPS < minFPS )
+ {
+ CAMHAL_LOGEA("ERROR: Max FPS is smaller than Min FPS!");
+ ret = -EINVAL;
+ }
+ if(framerate < minFPS)
+ framerate = minFPS;
+ if(framerate > maxFPS)
+ framerate = maxFPS;
+ CAMHAL_LOGDB("SET FRAMERATE %d", framerate);
+ mParameters.setPreviewFrameRate(framerate);
+ valstr = params.get(CameraParameters::KEY_PREVIEW_FPS_RANGE);
+ if (!valstr) valstr = "";
+ mParameters.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, valstr);
+
+ CAMHAL_LOGDB("FPS Range [%d, %d]", minFPS, maxFPS);
+ mParameters.set(ExCameraParameters::KEY_MINFRAMERATE, minFPS);
+ mParameters.set(ExCameraParameters::KEY_MAXFRAMERATE, maxFPS);
+
+ if( ( valstr = params.get(ExCameraParameters::KEY_GBCE) ) != NULL )
+ {
+ CAMHAL_LOGDB("GBCE Value = %s", valstr);
+ mParameters.set(ExCameraParameters::KEY_GBCE, valstr);
+ }
+
+ if( ( valstr = params.get(ExCameraParameters::KEY_GLBCE) ) != NULL )
+ {
+ CAMHAL_LOGDB("GLBCE Value = %s", valstr);
+ mParameters.set(ExCameraParameters::KEY_GLBCE, valstr);
+ }
+
+ ///Update the current parameter set
+ if( (valstr = params.get(ExCameraParameters::KEY_AUTOCONVERGENCE)) != NULL)
+ {
+ CAMHAL_LOGDB("AutoConvergence Mode is set = %s", params.get(ExCameraParameters::KEY_AUTOCONVERGENCE));
+ mParameters.set(ExCameraParameters::KEY_AUTOCONVERGENCE, valstr);
+ }
+
+ if( (valstr = params.get(ExCameraParameters::KEY_MANUALCONVERGENCE_VALUES)) !=NULL )
+ {
+ CAMHAL_LOGDB("ManualConvergence Value = %s", params.get(ExCameraParameters::KEY_MANUALCONVERGENCE_VALUES));
+ mParameters.set(ExCameraParameters::KEY_MANUALCONVERGENCE_VALUES, valstr);
+ }
+
+ if ((valstr = params.get(ExCameraParameters::KEY_EXPOSURE_MODE)) != NULL) {
+ if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_EXPOSURE_MODES))) {
+ CAMHAL_LOGDB("Exposure set = %s", valstr);
+ mParameters.set(ExCameraParameters::KEY_EXPOSURE_MODE, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Exposure = %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if ((valstr = params.get(CameraParameters::KEY_WHITE_BALANCE)) != NULL) {
+ if ( isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_WHITE_BALANCE))) {
+ CAMHAL_LOGDB("White balance set %s", valstr);
+ mParameters.set(CameraParameters::KEY_WHITE_BALANCE, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid white balance = %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if ((valstr = params.get(ExCameraParameters::KEY_CONTRAST)) != NULL) {
+ if (params.getInt(ExCameraParameters::KEY_CONTRAST) >= 0 ) {
+ CAMHAL_LOGDB("Contrast set %s", valstr);
+ mParameters.set(ExCameraParameters::KEY_CONTRAST, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Contrast = %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if ((valstr =params.get(ExCameraParameters::KEY_SHARPNESS)) != NULL) {
+ if (params.getInt(ExCameraParameters::KEY_SHARPNESS) >= 0 ) {
+ CAMHAL_LOGDB("Sharpness set %s", valstr);
+ mParameters.set(ExCameraParameters::KEY_SHARPNESS, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Sharpness = %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if ((valstr = params.get(ExCameraParameters::KEY_SATURATION)) != NULL) {
+ if (params.getInt(ExCameraParameters::KEY_SATURATION) >= 0 ) {
+ CAMHAL_LOGDB("Saturation set %s", valstr);
+ mParameters.set(ExCameraParameters::KEY_SATURATION, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Saturation = %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if ((valstr = params.get(ExCameraParameters::KEY_BRIGHTNESS)) != NULL) {
+ if (params.getInt(ExCameraParameters::KEY_BRIGHTNESS) >= 0 ) {
+ CAMHAL_LOGDB("Brightness set %s", valstr);
+ mParameters.set(ExCameraParameters::KEY_BRIGHTNESS, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Brightness = %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if ((valstr = params.get(CameraParameters::KEY_ANTIBANDING)) != NULL) {
+ if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_ANTIBANDING))) {
+ CAMHAL_LOGDB("Antibanding set %s", valstr);
+ mParameters.set(CameraParameters::KEY_ANTIBANDING, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Antibanding = %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if ((valstr = params.get(ExCameraParameters::KEY_ISO)) != NULL) {
+ if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_ISO_VALUES))) {
+ CAMHAL_LOGDB("ISO set %s", valstr);
+ mParameters.set(ExCameraParameters::KEY_ISO, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid ISO = %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_FOCUS_AREAS)) != NULL )
+ {
+ CAMHAL_LOGEB("Focus areas position set %s", params.get(CameraParameters::KEY_FOCUS_AREAS));
+ mParameters.set(CameraParameters::KEY_FOCUS_AREAS, valstr);
+ }
+
+ if( (valstr = params.get(ExCameraParameters::KEY_MEASUREMENT_ENABLE)) != NULL )
+ {
+ CAMHAL_LOGDB("Measurements set to %s", params.get(ExCameraParameters::KEY_MEASUREMENT_ENABLE));
+ mParameters.set(ExCameraParameters::KEY_MEASUREMENT_ENABLE, valstr);
+
+ if (strcmp(valstr, (const char *) ExCameraParameters::MEASUREMENT_ENABLE) == 0)
+ {
+ mMeasurementEnabled = true;
+ }
+ else if (strcmp(valstr, (const char *) ExCameraParameters::MEASUREMENT_DISABLE) == 0)
+ {
+ mMeasurementEnabled = false;
+ }
+ else
+ {
+ mMeasurementEnabled = false;
+ }
+
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_EXPOSURE_COMPENSATION)) != NULL)
+ {
+ CAMHAL_LOGDB("Exposure compensation set %s", params.get(CameraParameters::KEY_EXPOSURE_COMPENSATION));
+ mParameters.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, valstr);
+ }
+
+ if ((valstr = params.get(CameraParameters::KEY_SCENE_MODE)) != NULL) {
+ if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES))) {
+ CAMHAL_LOGDB("Scene mode set %s", valstr);
+ doesSetParameterNeedUpdate(valstr,
+ mParameters.get(CameraParameters::KEY_SCENE_MODE),
+ updateRequired);
+ mParameters.set(CameraParameters::KEY_SCENE_MODE, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Scene mode = %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if ((valstr = params.get(CameraParameters::KEY_FLASH_MODE)) != NULL) {
+ const char* supportlist = mCameraProperties->get(CameraProperties::SUPPORTED_FLASH_MODES);
+ if (supportlist != NULL) {
+ if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_FLASH_MODES))) {
+ CAMHAL_LOGDB("Flash mode set %s", valstr);
+ mParameters.set(CameraParameters::KEY_FLASH_MODE, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Flash mode = %s", valstr);
+ ret = -EINVAL;
+ }
+ } else {
+
+ CAMHAL_LOGDA("WARNING : not support flash light, skip the parameter");
+
+ }
+ }
+
+ if ((valstr = params.get(CameraParameters::KEY_EFFECT)) != NULL) {
+ if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_EFFECTS))) {
+ CAMHAL_LOGDB("Effect set %s", valstr);
+ mParameters.set(CameraParameters::KEY_EFFECT, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Effect = %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if(( (valstr = params.get(CameraParameters::KEY_ROTATION)) != NULL)
+ && (params.getInt(CameraParameters::KEY_ROTATION) >=0))
+ {
+ CAMHAL_LOGDB("Rotation set %s", params.get(CameraParameters::KEY_ROTATION));
+ mParameters.set(CameraParameters::KEY_ROTATION, valstr);
+ }
+
+ if(( (valstr = params.get(CameraParameters::KEY_JPEG_QUALITY)) != NULL)
+ && (params.getInt(CameraParameters::KEY_JPEG_QUALITY) >=0))
+ {
+ CAMHAL_LOGDB("Jpeg quality set %s", params.get(CameraParameters::KEY_JPEG_QUALITY));
+ mParameters.set(CameraParameters::KEY_JPEG_QUALITY, valstr);
+ }
+
+ if(( (valstr = params.get(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH)) != NULL)
+ && (params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH) >=0))
+ {
+ CAMHAL_LOGDB("Thumbnail width set %s", params.get(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH));
+ mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, valstr);
+ }
+
+ if(( (valstr = params.get(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT)) != NULL)
+ && (params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT) >=0))
+ {
+ CAMHAL_LOGDB("Thumbnail width set %s", params.get(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT));
+ mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, valstr);
+ }
+
+ if(( (valstr = params.get(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY)) != NULL )
+ && (params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY) >=0))
+ {
+ CAMHAL_LOGDB("Thumbnail quality set %s", params.get(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY));
+ mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, valstr);
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_GPS_LATITUDE)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS latitude set %s", params.get(CameraParameters::KEY_GPS_LATITUDE));
+ mParameters.set(CameraParameters::KEY_GPS_LATITUDE, valstr);
+ }else{
+ mParameters.remove(CameraParameters::KEY_GPS_LATITUDE);
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_GPS_LONGITUDE)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS longitude set %s", params.get(CameraParameters::KEY_GPS_LONGITUDE));
+ mParameters.set(CameraParameters::KEY_GPS_LONGITUDE, valstr);
+ }else{
+ mParameters.remove(CameraParameters::KEY_GPS_LONGITUDE);
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_GPS_ALTITUDE)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS altitude set %s", params.get(CameraParameters::KEY_GPS_ALTITUDE));
+ mParameters.set(CameraParameters::KEY_GPS_ALTITUDE, valstr);
+ }else{
+ mParameters.remove(CameraParameters::KEY_GPS_ALTITUDE);
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_GPS_TIMESTAMP)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS timestamp set %s", params.get(CameraParameters::KEY_GPS_TIMESTAMP));
+ mParameters.set(CameraParameters::KEY_GPS_TIMESTAMP, valstr);
+ }else{
+ mParameters.remove(CameraParameters::KEY_GPS_TIMESTAMP);
+ }
+
+ if( (valstr = params.get(ExCameraParameters::KEY_GPS_DATESTAMP)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS datestamp set %s", params.get(ExCameraParameters::KEY_GPS_DATESTAMP));
+ mParameters.set(ExCameraParameters::KEY_GPS_DATESTAMP, valstr);
+ }else{
+ mParameters.remove(ExCameraParameters::KEY_GPS_DATESTAMP);
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_GPS_PROCESSING_METHOD)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS processing method set %s", params.get(CameraParameters::KEY_GPS_PROCESSING_METHOD));
+ mParameters.set(CameraParameters::KEY_GPS_PROCESSING_METHOD, valstr);
+ }else{
+ mParameters.remove(CameraParameters::KEY_GPS_PROCESSING_METHOD);
+ }
+
+ if( (valstr = params.get(ExCameraParameters::KEY_GPS_MAPDATUM )) != NULL )
+ {
+ CAMHAL_LOGDB("GPS MAPDATUM set %s", params.get(ExCameraParameters::KEY_GPS_MAPDATUM));
+ mParameters.set(ExCameraParameters::KEY_GPS_MAPDATUM, valstr);
+ }else{
+ mParameters.remove(ExCameraParameters::KEY_GPS_MAPDATUM);
+ }
+
+ if( (valstr = params.get(ExCameraParameters::KEY_GPS_VERSION)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS MAPDATUM set %s", params.get(ExCameraParameters::KEY_GPS_VERSION));
+ mParameters.set(ExCameraParameters::KEY_GPS_VERSION, valstr);
+ }else{
+ mParameters.remove(ExCameraParameters::KEY_GPS_VERSION);
+ }
+
+ if( (valstr = params.get(ExCameraParameters::KEY_EXIF_MODEL)) != NULL )
+ {
+ CAMHAL_LOGDB("EXIF Model set %s", params.get(ExCameraParameters::KEY_EXIF_MODEL));
+ mParameters.set(ExCameraParameters::KEY_EXIF_MODEL, valstr);
+ }
+
+ if( (valstr = params.get(ExCameraParameters::KEY_EXIF_MAKE)) != NULL )
+ {
+ CAMHAL_LOGDB("EXIF Make set %s", params.get(ExCameraParameters::KEY_EXIF_MAKE));
+ mParameters.set(ExCameraParameters::KEY_EXIF_MAKE, valstr);
+ }
+
+ if( (valstr = params.get(ExCameraParameters::KEY_EXP_BRACKETING_RANGE)) != NULL )
+ {
+ CAMHAL_LOGDB("Exposure Bracketing set %s", params.get(ExCameraParameters::KEY_EXP_BRACKETING_RANGE));
+ mParameters.set(ExCameraParameters::KEY_EXP_BRACKETING_RANGE, valstr);
+ }
+ else
+ {
+ mParameters.remove(ExCameraParameters::KEY_EXP_BRACKETING_RANGE);
+ }
+
+ if ((valstr = params.get(CameraParameters::KEY_ZOOM)) != NULL ) {
+ if ((params.getInt(CameraParameters::KEY_ZOOM) >= 0 ) &&
+ (params.getInt(CameraParameters::KEY_ZOOM) <= mMaxZoomSupported )) {
+ CAMHAL_LOGDB("Zoom set %s", valstr);
+ doesSetParameterNeedUpdate(valstr,
+ mParameters.get(CameraParameters::KEY_ZOOM),
+ updateRequired);
+ mParameters.set(CameraParameters::KEY_ZOOM, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Zoom: %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK)) != NULL )
+ {
+ CAMHAL_LOGDB("Auto Exposure Lock set %s", params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK));
+ doesSetParameterNeedUpdate(valstr,
+ mParameters.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK),
+ updateRequired);
+ mParameters.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, valstr);
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK)) != NULL )
+ {
+ CAMHAL_LOGDB("Auto WhiteBalance Lock set %s", params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK));
+ doesSetParameterNeedUpdate(valstr,
+ mParameters.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK),
+ updateRequired);
+ mParameters.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, valstr);
+ }
+ if( (valstr = params.get(CameraParameters::KEY_METERING_AREAS)) != NULL )
+ {
+ CAMHAL_LOGEB("Metering areas position set %s", params.get(CameraParameters::KEY_METERING_AREAS));
+ mParameters.set(CameraParameters::KEY_METERING_AREAS, valstr);
+ }
+
+LOGD("setParameters, 1 mParameters KEY_PICTURE_SIZE=%s", mParameters.get(CameraParameters::KEY_PICTURE_SIZE));
+ CameraParameters adapterParams = mParameters;
+
+ // Only send parameters to adapter if preview is already
+ // enabled or doesSetParameterNeedUpdate says so. Initial setParameters to camera adapter,
+ // will be called in startPreview()
+ // TODO(XXX): Need to identify other parameters that need update from camera adapter
+LOGD("setParameters mCameraAdapter=%#x mPreviewEnabled=%d updateRequired=%d", (uint32_t)mCameraAdapter, (uint32_t)mPreviewEnabled, (uint32_t)updateRequired);
+ if ( (NULL != mCameraAdapter) && (mPreviewEnabled || updateRequired) ) {
+ ret |= mCameraAdapter->setParameters(adapterParams);
+ }
+LOGD("setParameters, 2 mParameters KEY_PICTURE_SIZE=%s", mParameters.get(CameraParameters::KEY_PICTURE_SIZE));
+
+ if( NULL != params.get(ExCameraParameters::KEY_TEMP_BRACKETING_RANGE_POS) )
+ {
+ int posBracketRange = params.getInt(ExCameraParameters::KEY_TEMP_BRACKETING_RANGE_POS);
+ if ( 0 < posBracketRange )
+ {
+ mBracketRangePositive = posBracketRange;
+ }
+ }
+ CAMHAL_LOGDB("Positive bracketing range %d", mBracketRangePositive);
+
+
+ if( NULL != params.get(ExCameraParameters::KEY_TEMP_BRACKETING_RANGE_NEG) )
+ {
+ int negBracketRange = params.getInt(ExCameraParameters::KEY_TEMP_BRACKETING_RANGE_NEG);
+ if ( 0 < negBracketRange )
+ {
+ mBracketRangeNegative = negBracketRange;
+ }
+ }
+ CAMHAL_LOGDB("Negative bracketing range %d", mBracketRangeNegative);
+
+ if( ( (valstr = params.get(ExCameraParameters::KEY_TEMP_BRACKETING)) != NULL) &&
+ ( strcmp(valstr, ExCameraParameters::BRACKET_ENABLE) == 0 ))
+ {
+ if ( !mBracketingEnabled )
+ {
+ CAMHAL_LOGDA("Enabling bracketing");
+ mBracketingEnabled = true;
+
+ //Wait for AF events to enable bracketing
+ if ( NULL != mCameraAdapter )
+ {
+ setEventProvider( CameraHalEvent::ALL_EVENTS, mCameraAdapter );
+ }
+ }
+ else
+ {
+ CAMHAL_LOGDA("Bracketing already enabled");
+ }
+ }
+ else if ( ( (valstr = params.get(ExCameraParameters::KEY_TEMP_BRACKETING)) != NULL ) &&
+ ( strcmp(valstr, ExCameraParameters::BRACKET_DISABLE) == 0 ))
+ {
+ CAMHAL_LOGDA("Disabling bracketing");
+
+ mBracketingEnabled = false;
+ stopImageBracketing();
+
+ //Remove AF events subscription
+ if ( NULL != mEventProvider )
+ {
+ mEventProvider->disableEventNotification( CameraHalEvent::ALL_EVENTS );
+ delete mEventProvider;
+ mEventProvider = NULL;
+ }
+
+ }
+
+ if( ( (valstr = params.get(ExCameraParameters::KEY_SHUTTER_ENABLE)) != NULL ) &&
+ ( strcmp(valstr, ExCameraParameters::SHUTTER_ENABLE) == 0 ))
+ {
+ CAMHAL_LOGDA("Enabling shutter sound");
+
+ mShutterEnabled = true;
+ mMsgEnabled |= CAMERA_MSG_SHUTTER;
+ mParameters.set(ExCameraParameters::KEY_SHUTTER_ENABLE, valstr);
+ }
+ else if ( ( (valstr = params.get(ExCameraParameters::KEY_SHUTTER_ENABLE)) != NULL ) &&
+ ( strcmp(valstr, ExCameraParameters::SHUTTER_DISABLE) == 0 ))
+ {
+ CAMHAL_LOGDA("Disabling shutter sound");
+
+ mShutterEnabled = false;
+ mMsgEnabled &= ~CAMERA_MSG_SHUTTER;
+ mParameters.set(ExCameraParameters::KEY_SHUTTER_ENABLE, valstr);
+ }
+
+ }
+
+ //On fail restore old parameters
+ if ( NO_ERROR != ret ) {
+ mParameters.unflatten(oldParams.flatten());
+ }
+
+ // Restart Preview if needed by KEY_RECODING_HINT only if preview is already running.
+ // If preview is not started yet, Video Mode parameters will take effect on next startPreview()
+ if (restartPreviewRequired && previewEnabled() && !mRecordingEnabled) {
+ CAMHAL_LOGDA("Restarting Preview");
+ ret = restartPreview();
+ } else if (restartPreviewRequired && !previewEnabled() &&
+ mDisplayPaused && !mRecordingEnabled) {
+ CAMHAL_LOGDA("Stopping Preview");
+ forceStopPreview();
+ }
+
+ if (ret != NO_ERROR)
+ {
+ CAMHAL_LOGEA("Failed to restart Preview");
+ return ret;
+ }
+
+LOGD("setParameters, 3 mParameters KEY_PICTURE_SIZE=%s", mParameters.get(CameraParameters::KEY_PICTURE_SIZE));
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t VirtualCamHal::allocPreviewBufs(int width, int height, const char* previewFormat,
+ unsigned int buffercount, unsigned int &max_queueable)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if(mDisplayAdapter.get() == NULL)
+ {
+ // Memory allocation of preview buffers is now placed in gralloc
+ // VirtualCamHal should not allocate preview buffers without DisplayAdapter
+ return NO_MEMORY;
+ }
+
+ if(!mPreviewBufs)
+ {
+ ///@todo Pluralise the name of this method to allocateBuffers
+ mPreviewLength = 0;
+#ifndef AMLOGIC_CAMERA_OVERLAY_SUPPORT
+ mPreviewBufs = (int32_t *) mDisplayAdapter->allocateBuffer(width, height,
+ previewFormat,
+ mPreviewLength,
+ buffercount);
+
+ LOGD("allocPreviewBufs buffercount=%d", buffercount);
+
+ if (NULL == mPreviewBufs ) {
+ CAMHAL_LOGEA("Couldn't allocate preview buffers");
+ return NO_MEMORY;
+ }
+
+ mPreviewOffsets = (uint32_t *) mDisplayAdapter->getOffsets();
+ if ( NULL == mPreviewOffsets ) {
+ CAMHAL_LOGEA("Buffer mapping failed");
+ return BAD_VALUE;
+ }
+
+ mPreviewFd = mDisplayAdapter->getFd();
+ /* mPreviewFd and desc.mFd seem to be unused.
+ if ( -1 == mPreviewFd ) {
+ CAMHAL_LOGEA("Invalid handle");
+ return BAD_VALUE;
+ }*/
+
+ mBufProvider = (BufferProvider*) mDisplayAdapter.get();
+
+ ret = mDisplayAdapter->maxQueueableBuffers(max_queueable);
+ if (ret != NO_ERROR) {
+ return ret;
+ }
+#else
+ int buf_size = 0;
+ if ( previewFormat != NULL ) {
+ if(strcmp(previewFormat,(const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ buf_size = width * height * 2;
+ }else if((strcmp(previewFormat, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) ||
+ (strcmp(previewFormat, CameraParameters::PIXEL_FORMAT_YUV420P) == 0)) {
+ buf_size = width * height * 3 / 2;
+ }else if(strcmp(previewFormat,(const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ buf_size = width * height * 2;
+ } else {
+ CAMHAL_LOGEA("Invalid format");
+ buf_size = 0;
+ }
+ } else {
+ CAMHAL_LOGEA("Preview format is NULL");
+ buf_size = 0;
+ }
+
+ //buf_size = ((buf_size+4095)/4096)*4096;
+ mPreviewBufs = (int32_t *)mMemoryManager->allocateBuffer(0, 0, NULL, buf_size, buffercount);
+
+ LOGD("allocPreviewBufs buffercount=%d", buffercount);
+
+ if (NULL == mPreviewBufs ) {
+ CAMHAL_LOGEA("Couldn't allocate preview buffers");
+ return NO_MEMORY;
+ }
+
+ mPreviewLength = buf_size;
+
+ mPreviewOffsets = (uint32_t *) mMemoryManager->getOffsets();
+ //if ( NULL == mPreviewOffsets ) {
+ // CAMHAL_LOGEA("Buffer mapping failed");
+ // return BAD_VALUE;
+ //}
+
+ mPreviewFd = mMemoryManager->getFd();
+ /* mPreviewFd and desc.mFd seem to be unused.
+ if ( -1 == mPreviewFd ) {
+ CAMHAL_LOGEA("Invalid handle");
+ return BAD_VALUE;
+ }*/
+
+ mBufProvider = (BufferProvider*) mMemoryManager.get();
+ max_queueable = buffercount;
+ //ret = mDisplayAdapter->maxQueueableBuffers(max_queueable);
+ //if (ret != NO_ERROR) {
+ // return ret;
+ //}
+#endif
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+
+}
+
+status_t VirtualCamHal::freePreviewBufs()
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_LOGDB("mPreviewBufs = 0x%x", (unsigned int)mPreviewBufs);
+ if(mPreviewBufs)
+ {
+ ///@todo Pluralise the name of this method to freeBuffers
+ ret = mBufProvider->freeBuffer(mPreviewBufs);
+ mPreviewBufs = NULL;
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+
+status_t VirtualCamHal::allocPreviewDataBufs(size_t size, size_t bufferCount)
+{
+ status_t ret = NO_ERROR;
+ int bytes;
+
+ LOG_FUNCTION_NAME;
+
+ bytes = size;
+
+ if ( NO_ERROR == ret )
+ {
+ if( NULL != mPreviewDataBufs )
+ {
+ ret = freePreviewDataBufs();
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ bytes = ((bytes+4095)/4096)*4096;
+ mPreviewDataBufs = (int32_t *)mMemoryManager->allocateBuffer(0, 0, NULL, bytes, bufferCount);
+
+ CAMHAL_LOGDB("Size of Preview data buffer = %d", bytes);
+ if( NULL == mPreviewDataBufs )
+ {
+ CAMHAL_LOGEA("Couldn't allocate image buffers using memory manager");
+ ret = -NO_MEMORY;
+ }
+ else
+ {
+ bytes = size;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mPreviewDataFd = mMemoryManager->getFd();
+ mPreviewDataLength = bytes;
+ mPreviewDataOffsets = mMemoryManager->getOffsets();
+ }
+ else
+ {
+ mPreviewDataFd = -1;
+ mPreviewDataLength = 0;
+ mPreviewDataOffsets = NULL;
+ }
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t VirtualCamHal::freePreviewDataBufs()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NO_ERROR == ret )
+ {
+
+ if( NULL != mPreviewDataBufs )
+ {
+
+ ///@todo Pluralise the name of this method to freeBuffers
+ ret = mMemoryManager->freeBuffer(mPreviewDataBufs);
+ mPreviewDataBufs = NULL;
+
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t VirtualCamHal::allocImageBufs(unsigned int width, unsigned int height, size_t size, const char* previewFormat, unsigned int bufferCount)
+{
+ status_t ret = NO_ERROR;
+ int bytes;
+
+ LOG_FUNCTION_NAME;
+
+ bytes = size;
+
+ // allocate image buffers only if not already allocated
+ if(NULL != mImageBufs) {
+ CAMHAL_LOGEB("mImageBufs is not null:0x%p",mImageBufs);
+ return NO_ERROR;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ bytes = ((bytes+4095)/4096)*4096;
+ mImageBufs = (int32_t *)mMemoryManager->allocateBuffer(0, 0, previewFormat, bytes, bufferCount);
+
+ CAMHAL_LOGDB("Size of Image cap buffer = %d", bytes);
+ if( NULL == mImageBufs )
+ {
+ CAMHAL_LOGEA("Couldn't allocate image buffers using memory manager");
+ ret = -NO_MEMORY;
+ }
+ else
+ {
+ bytes = size;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mImageFd = mMemoryManager->getFd();
+ mImageLength = bytes;
+ mImageOffsets = mMemoryManager->getOffsets();
+ }
+ else
+ {
+ mImageFd = -1;
+ mImageLength = 0;
+ mImageOffsets = NULL;
+ }
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t VirtualCamHal::allocVideoBufs(uint32_t width, uint32_t height, uint32_t bufferCount)
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
+ if( NULL != mVideoBufs ){
+ ret = freeVideoBufs(mVideoBufs);
+ mVideoBufs = NULL;
+ }
+
+ if ( NO_ERROR == ret ){
+ int32_t stride;
+ buffer_handle_t *bufsArr = new buffer_handle_t [bufferCount];
+
+ if (bufsArr != NULL){
+ for (uint32_t i = 0; i< bufferCount; i++){
+ GraphicBufferAllocator &GrallocAlloc = GraphicBufferAllocator::get();
+ buffer_handle_t buf;
+ ret = GrallocAlloc.alloc(width, height, HAL_PIXEL_FORMAT_NV12, CAMHAL_GRALLOC_USAGE, &buf, &stride);
+ if (ret != NO_ERROR){
+ CAMHAL_LOGEA("Couldn't allocate video buffers using Gralloc");
+ ret = -NO_MEMORY;
+ for (uint32_t j=0; j< i; j++){
+ buf = (buffer_handle_t)bufsArr[j];
+ CAMHAL_LOGEB("Freeing Gralloc Buffer 0x%x", (uint32_t)buf);
+ GrallocAlloc.free(buf);
+ }
+ delete [] bufsArr;
+ goto exit;
+ }
+ bufsArr[i] = buf;
+ CAMHAL_LOGVB("*** Gralloc Handle =0x%x ***", (uint32_t)buf);
+ }
+
+ mVideoBufs = (int32_t *)bufsArr;
+ }
+ else{
+ CAMHAL_LOGEA("Couldn't allocate video buffers ");
+ ret = -NO_MEMORY;
+ }
+ }
+
+ exit:
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+void endImageCapture( void *userData)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != userData )
+ {
+ VirtualCamHal *c = reinterpret_cast<VirtualCamHal *>(userData);
+ c->signalEndImageCapture();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void releaseImageBuffers(void *userData)
+{
+ LOG_FUNCTION_NAME;
+
+ if (NULL != userData) {
+ VirtualCamHal *c = reinterpret_cast<VirtualCamHal *>(userData);
+ c->freeImageBufs();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t VirtualCamHal::signalEndImageCapture()
+{
+ status_t ret = NO_ERROR;
+ int w,h;
+ CameraParameters adapterParams = mParameters;
+ Mutex::Autolock lock(mLock);
+
+ LOG_FUNCTION_NAME;
+
+ if ( mBracketingRunning ) {
+ stopImageBracketing();
+ } else {
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t VirtualCamHal::freeImageBufs()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NO_ERROR == ret )
+ {
+
+ if( NULL != mImageBufs )
+ {
+
+ ///@todo Pluralise the name of this method to freeBuffers
+ ret = mMemoryManager->freeBuffer(mImageBufs);
+ mImageBufs = NULL;
+
+ }
+ else
+ {
+ ret = -EINVAL;
+ }
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t VirtualCamHal::freeVideoBufs(void *bufs)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ buffer_handle_t *pBuf = (buffer_handle_t*)bufs;
+ int count = atoi(mCameraProperties->get(CameraProperties::REQUIRED_PREVIEW_BUFS));
+ if(pBuf == NULL)
+ {
+ CAMHAL_LOGEA("NULL pointer passed to freeVideoBuffer");
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ GraphicBufferAllocator &GrallocAlloc = GraphicBufferAllocator::get();
+
+ for(int i = 0; i < count; i++){
+ buffer_handle_t ptr = *pBuf++;
+ CAMHAL_LOGVB("Free Video Gralloc Handle 0x%x", (uint32_t)ptr);
+ GrallocAlloc.free(ptr);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Start preview mode.
+
+ @param none
+ @return NO_ERROR Camera switched to VF mode
+ @todo Update function header with the different errors that are possible
+
+ */
+status_t VirtualCamHal::startPreview()
+{
+ status_t ret = NO_ERROR;
+ CameraAdapter::BuffersDescriptor desc;
+ CameraFrame frame;
+ const char *valstr = NULL;
+ unsigned int required_buffer_count;
+ unsigned int max_queueble_buffers;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ gettimeofday(&mStartPreview, NULL);
+#endif
+
+ LOG_FUNCTION_NAME;
+
+ if ( mPreviewEnabled ){
+ CAMHAL_LOGDA("Preview already running");
+ LOG_FUNCTION_NAME_EXIT;
+ return ALREADY_EXISTS;
+ }
+
+ if ( NULL != mCameraAdapter ) {
+ ret = mCameraAdapter->setParameters(mParameters);
+ }
+
+ if ((mPreviewStartInProgress == false) && (mDisplayPaused == false)){
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_RESOLUTION_PREVIEW,( int ) &frame);
+ if ( NO_ERROR != ret ){
+ CAMHAL_LOGEB("Error: CAMERA_QUERY_RESOLUTION_PREVIEW %d", ret);
+ return ret;
+ }
+
+ ///Update the current preview width and height
+ mPreviewWidth = frame.mWidth;
+ mPreviewHeight = frame.mHeight;
+ //Update the padded width and height - required for VNF and VSTAB
+ mParameters.set(ExCameraParameters::KEY_PADDED_WIDTH, mPreviewWidth);
+ mParameters.set(ExCameraParameters::KEY_PADDED_HEIGHT, mPreviewHeight);
+
+ }
+
+ ///If we don't have the preview callback enabled and display adapter,
+ if(!mSetPreviewWindowCalled || (mDisplayAdapter.get() == NULL)){
+ CAMHAL_LOGEA("Preview not started. Preview in progress flag set");
+ mPreviewStartInProgress = true;
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_SWITCH_TO_EXECUTING);
+ if ( NO_ERROR != ret ){
+ CAMHAL_LOGEB("Error: CAMERA_SWITCH_TO_EXECUTING %d", ret);
+ return ret;
+ }
+ return NO_ERROR;
+ }
+
+ if( (mDisplayAdapter.get() != NULL) && ( !mPreviewEnabled ) && ( mDisplayPaused ) )
+ {
+ CAMHAL_LOGDA("Preview is in paused state");
+
+ mDisplayPaused = false;
+ mPreviewEnabled = true;
+ if ( NO_ERROR == ret )
+ {
+ ret = mDisplayAdapter->pauseDisplay(mDisplayPaused);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Display adapter resume failed %x", ret);
+ }
+ }
+ //restart preview callbacks
+ if(mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME)
+ {
+ mAppCbNotifier->enableMsgType (CAMERA_MSG_PREVIEW_FRAME);
+ }
+ return ret;
+ }
+ required_buffer_count = atoi(mCameraProperties->get(CameraProperties::REQUIRED_PREVIEW_BUFS));
+
+ ///Allocate the preview buffers
+ ret = allocPreviewBufs(mPreviewWidth, mPreviewHeight, mParameters.getPreviewFormat(), required_buffer_count, max_queueble_buffers);
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEA("Couldn't allocate buffers for Preview");
+ goto error;
+ }
+
+ if ( mMeasurementEnabled )
+ {
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA,
+ ( int ) &frame,
+ required_buffer_count);
+ if ( NO_ERROR != ret )
+ {
+ return ret;
+ }
+
+ ///Allocate the preview data buffers
+ ret = allocPreviewDataBufs(frame.mLength, required_buffer_count);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEA("Couldn't allocate preview data buffers");
+ goto error;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ desc.mBuffers = mPreviewDataBufs;
+ desc.mOffsets = mPreviewDataOffsets;
+ desc.mFd = mPreviewDataFd;
+ desc.mLength = mPreviewDataLength;
+ desc.mCount = ( size_t ) required_buffer_count;
+ desc.mMaxQueueable = (size_t) required_buffer_count;
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW_DATA,
+ ( int ) &desc);
+ }
+ }
+
+ ///Pass the buffers to Camera Adapter
+ desc.mBuffers = mPreviewBufs;
+ desc.mOffsets = mPreviewOffsets;
+ desc.mFd = mPreviewFd;
+ desc.mLength = mPreviewLength;
+ desc.mCount = ( size_t ) required_buffer_count;
+ desc.mMaxQueueable = (size_t) max_queueble_buffers;
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW,
+ ( int ) &desc);
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Failed to register preview buffers: 0x%x", ret);
+ freePreviewBufs();
+ return ret;
+ }
+
+ mAppCbNotifier->startPreviewCallbacks(mParameters, mPreviewBufs, mPreviewOffsets, mPreviewFd, mPreviewLength, required_buffer_count);
+
+ ///Start the callback notifier
+ ret = mAppCbNotifier->start();
+
+ if( ALREADY_EXISTS == ret )
+ {
+ //Already running, do nothing
+ CAMHAL_LOGDA("AppCbNotifier already running");
+ ret = NO_ERROR;
+ }
+ else if ( NO_ERROR == ret ) {
+ CAMHAL_LOGDA("Started AppCbNotifier..");
+ mAppCbNotifier->setMeasurements(mMeasurementEnabled);
+ }
+ else
+ {
+ CAMHAL_LOGDA("Couldn't start AppCallbackNotifier");
+ goto error;
+ }
+
+ ///Enable the display adapter if present, actual overlay enable happens when we post the buffer
+ if(mDisplayAdapter.get() != NULL)
+ {
+ CAMHAL_LOGDA("Enabling display");
+ bool isS3d = false;
+ DisplayAdapter::S3DParameters s3dParams;
+ int width, height;
+ mParameters.getPreviewSize(&width, &height);
+#if 0 //TODO: s3d is not part of bringup...will reenable
+ if ( (valstr = mParameters.get(ExCameraParameters::KEY_S3D_SUPPORTED)) != NULL) {
+ isS3d = (strcmp(valstr, "true") == 0);
+ }
+ if ( (valstr = mParameters.get(ExCameraParameters::KEY_S3D2D_PREVIEW)) != NULL) {
+ if (strcmp(valstr, "off") == 0)
+ {
+ CAMHAL_LOGEA("STEREO 3D->2D PREVIEW MODE IS OFF");
+ //TODO: obtain the frame packing configuration from camera or user settings
+ //once side by side configuration is supported
+ s3dParams.mode = OVERLAY_S3D_MODE_ON;
+ s3dParams.framePacking = OVERLAY_S3D_FORMAT_OVERUNDER;
+ s3dParams.order = OVERLAY_S3D_ORDER_LF;
+ s3dParams.subSampling = OVERLAY_S3D_SS_NONE;
+ }
+ else
+ {
+ CAMHAL_LOGEA("STEREO 3D->2D PREVIEW MODE IS ON");
+ s3dParams.mode = OVERLAY_S3D_MODE_OFF;
+ s3dParams.framePacking = OVERLAY_S3D_FORMAT_OVERUNDER;
+ s3dParams.order = OVERLAY_S3D_ORDER_LF;
+ s3dParams.subSampling = OVERLAY_S3D_SS_NONE;
+ }
+ }
+#endif //if 0
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ ret = mDisplayAdapter->enableDisplay(width, height, &mStartPreview, isS3d ? &s3dParams : NULL);
+#else
+ ret = mDisplayAdapter->enableDisplay(width, height, NULL, isS3d ? &s3dParams : NULL);
+#endif
+ if ( ret != NO_ERROR )
+ {
+ CAMHAL_LOGEA("Couldn't enable display");
+ goto error;
+ }
+ }
+
+ ///Send START_PREVIEW command to adapter
+ CAMHAL_LOGDA("Starting CameraAdapter preview mode");
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_PREVIEW);
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEA("Couldn't start preview w/ CameraAdapter");
+ goto error;
+ }
+ CAMHAL_LOGDA("Started preview");
+
+ mPreviewEnabled = true;
+ mPreviewStartInProgress = false;
+ return ret;
+
+error:
+ CAMHAL_LOGEA("Performing cleanup after error");
+ //Do all the cleanup
+ freePreviewBufs();
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_PREVIEW);
+ if(mDisplayAdapter.get() != NULL)
+ {
+ mDisplayAdapter->disableDisplay(false);
+ }
+ mAppCbNotifier->stop();
+ mPreviewStartInProgress = false;
+ mPreviewEnabled = false;
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+/**
+ @brief Sets ANativeWindow object.
+
+ Preview buffers provided to VirtualCamHal via this object. DisplayAdapter will be interfacing with it
+ to render buffers to display.
+
+ @param[in] window The ANativeWindow object created by Surface flinger
+ @return NO_ERROR If the ANativeWindow object passes validation criteria
+ @todo Define validation criteria for ANativeWindow object. Define error codes for scenarios
+
+ */
+status_t VirtualCamHal::setPreviewWindow(struct preview_stream_ops *window)
+{
+ status_t ret = NO_ERROR;
+ CameraAdapter::BuffersDescriptor desc;
+
+ LOG_FUNCTION_NAME;
+ mSetPreviewWindowCalled = true;
+
+ ///If the Camera service passes a null window, we destroy existing window and free the DisplayAdapter
+ if(!window)
+ {
+ if(mDisplayAdapter.get() != NULL)
+ {
+ ///NULL window passed, destroy the display adapter if present
+ CAMHAL_LOGEA("NULL window passed, destroying display adapter");
+ mDisplayAdapter.clear();
+ ///@remarks If there was a window previously existing, we usually expect another valid window to be passed by the client
+ ///@remarks so, we will wait until it passes a valid window to begin the preview again
+ mSetPreviewWindowCalled = false;
+ }
+ CAMHAL_LOGEA("NULL ANativeWindow passed to setPreviewWindow");
+ return NO_ERROR;
+ }else if(mDisplayAdapter.get() == NULL)
+ {
+ // Need to create the display adapter since it has not been created
+ // Create display adapter
+ mDisplayAdapter = new ANativeWindowDisplayAdapter();
+ ret = NO_ERROR;
+ if(!mDisplayAdapter.get() || ((ret=mDisplayAdapter->initialize())!=NO_ERROR))
+ {
+ if(ret!=NO_ERROR)
+ {
+ mDisplayAdapter.clear();
+ CAMHAL_LOGEA("DisplayAdapter initialize failed");
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+ else
+ {
+ CAMHAL_LOGEA("Couldn't create DisplayAdapter");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_MEMORY;
+ }
+ }
+
+ // DisplayAdapter needs to know where to get the CameraFrames from inorder to display
+ // Since CameraAdapter is the one that provides the frames, set it as the frame provider for DisplayAdapter
+ mDisplayAdapter->setFrameProvider(mCameraAdapter);
+
+ // Any dynamic errors that happen during the camera use case has to be propagated back to the application
+ // via CAMERA_MSG_ERROR. AppCallbackNotifier is the class that notifies such errors to the application
+ // Set it as the error handler for the DisplayAdapter
+ mDisplayAdapter->setErrorHandler(mAppCbNotifier.get());
+
+ // Update the display adapter with the new window that is passed from CameraService
+ ret = mDisplayAdapter->setPreviewWindow(window);
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEB("DisplayAdapter setPreviewWindow returned error %d", ret);
+ }
+
+ if(mPreviewStartInProgress)
+ {
+ CAMHAL_LOGDA("setPreviewWindow called when preview running");
+ // Start the preview since the window is now available
+ ret = startPreview();
+ }
+ }else
+ {
+ /* If mDisplayAdpater is already created. No need to do anything.
+ * We get a surface handle directly now, so we can reconfigure surface
+ * itself in DisplayAdapter if dimensions have changed
+ */
+ }
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+
+}
+
+
+/**
+ @brief Stop a previously started preview.
+
+ @param none
+ @return none
+
+ */
+void VirtualCamHal::stopPreview()
+{
+ LOG_FUNCTION_NAME;
+
+ if( (!previewEnabled() && !mDisplayPaused) || mRecordingEnabled)
+ {
+ LOG_FUNCTION_NAME_EXIT;
+ return;
+ }
+
+ bool imageCaptureRunning = (mCameraAdapter->getState() == CameraAdapter::CAPTURE_STATE) &&
+ (mCameraAdapter->getNextState() != CameraAdapter::PREVIEW_STATE);
+ if(mDisplayPaused && !imageCaptureRunning)
+ {
+ // Display is paused, which essentially means there is no preview active.
+ // Note: this is done so that when stopPreview is called by client after
+ // an image capture, we do not de-initialize the camera adapter and
+ // restart over again.
+ return;
+ }
+
+ forceStopPreview();
+
+ // Reset Capture-Mode to default, so that when we switch from VideoRecording
+ // to ImageCapture, CAPTURE_MODE is not left to VIDEO_MODE.
+ CAMHAL_LOGDA("Resetting Capture-Mode to default");
+ mParameters.set(ExCameraParameters::KEY_CAP_MODE, "");
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Returns true if preview is enabled
+
+ @param none
+ @return true If preview is running currently
+ false If preview has been stopped
+
+ */
+bool VirtualCamHal::previewEnabled()
+{
+ LOG_FUNCTION_NAME;
+
+ return (mPreviewEnabled || mPreviewStartInProgress);
+}
+
+/**
+ @brief Start record mode.
+
+ When a record image is available a CAMERA_MSG_VIDEO_FRAME message is sent with
+ the corresponding frame. Every record frame must be released by calling
+ releaseRecordingFrame().
+
+ @param none
+ @return NO_ERROR If recording could be started without any issues
+ @todo Update the header with possible error values in failure scenarios
+
+ */
+status_t VirtualCamHal::startRecording( )
+{
+ int w, h;
+ const char *valstr = NULL;
+ bool restartPreviewRequired = false;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ gettimeofday(&mStartPreview, NULL);
+#endif
+
+ if(!previewEnabled())
+ {
+ return NO_INIT;
+ }
+
+ // set internal recording hint in case camera adapter needs to make some
+ // decisions....(will only be sent to camera adapter if camera restart is required)
+ mParameters.set(ExCameraParameters::KEY_RECORDING_HINT, CameraParameters::TRUE);
+
+ // if application starts recording in continuous focus picture mode...
+ // then we need to force default capture mode (as opposed to video mode)
+ if ( ((valstr = mParameters.get(CameraParameters::KEY_FOCUS_MODE)) != NULL) &&
+ (strcmp(valstr, CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE) == 0) ){
+ restartPreviewRequired = resetVideoModeParameters();
+ }
+
+ // only need to check recording hint if preview restart is not already needed
+ valstr = mParameters.get(CameraParameters::KEY_RECORDING_HINT);
+ if ( !restartPreviewRequired &&
+ (!valstr || (valstr && (strcmp(valstr, CameraParameters::TRUE) != 0))) ) {
+ restartPreviewRequired = setVideoModeParameters(mParameters);
+ }
+
+ if (restartPreviewRequired) {
+ ret = restartPreview();
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ int count = atoi(mCameraProperties->get(CameraProperties::REQUIRED_PREVIEW_BUFS));
+ mParameters.getPreviewSize(&w, &h);
+ CAMHAL_LOGDB("%s Video Width=%d Height=%d", __FUNCTION__, mVideoWidth, mVideoHeight);
+
+ if ((w != mVideoWidth) && (h != mVideoHeight))
+ {
+ ret = allocVideoBufs(mVideoWidth, mVideoHeight, count);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("allocVideoBufs returned error 0x%x", ret);
+ mParameters.remove(ExCameraParameters::KEY_RECORDING_HINT);
+ return ret;
+ }
+
+ mAppCbNotifier->useVideoBuffers(true);
+ mAppCbNotifier->setVideoRes(mVideoWidth, mVideoHeight);
+ ret = mAppCbNotifier->initSharedVideoBuffers(mPreviewBufs, mPreviewOffsets, mPreviewFd, mPreviewLength, count, mVideoBufs);
+ }
+ else
+ {
+ mAppCbNotifier->useVideoBuffers(false);
+ mAppCbNotifier->setVideoRes(mPreviewWidth, mPreviewHeight);
+ ret = mAppCbNotifier->initSharedVideoBuffers(mPreviewBufs, mPreviewOffsets, mPreviewFd, mPreviewLength, count, NULL);
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ret = mAppCbNotifier->startRecording();
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ///Buffers for video capture (if different from preview) are expected to be allocated within CameraAdapter
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_VIDEO);
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mRecordingEnabled = true;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Set the camera parameters specific to Video Recording.
+
+ This function checks for the camera parameters which have to be set for recording.
+ Video Recording needs CAPTURE_MODE to be VIDEO_MODE. This function sets it.
+ This function also enables Video Recording specific functions like VSTAB & VNF.
+
+ @param none
+ @return true if preview needs to be restarted for VIDEO_MODE parameters to take effect.
+ @todo Modify the policies for enabling VSTAB & VNF usecase based later.
+
+ */
+bool VirtualCamHal::setVideoModeParameters(const CameraParameters& params)
+{
+ const char *valstr = NULL;
+ bool restartPreviewRequired = false;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ // Set CAPTURE_MODE to VIDEO_MODE, if not set already and Restart Preview
+ valstr = mParameters.get(ExCameraParameters::KEY_CAP_MODE);
+ if ( (valstr == NULL) ||
+ ( (valstr != NULL) && (strcmp(valstr, (const char *) ExCameraParameters::VIDEO_MODE) != 0) ) )
+ {
+ CAMHAL_LOGDA("Set CAPTURE_MODE to VIDEO_MODE");
+ mParameters.set(ExCameraParameters::KEY_CAP_MODE, (const char *) ExCameraParameters::VIDEO_MODE);
+ restartPreviewRequired = true;
+ }
+
+ // Check if CAPTURE_MODE is VIDEO_MODE, since VSTAB & VNF work only in VIDEO_MODE.
+ valstr = mParameters.get(ExCameraParameters::KEY_CAP_MODE);
+ if (strcmp(valstr, (const char *) ExCameraParameters::VIDEO_MODE) == 0) {
+ // set VSTAB. restart is required if vstab value has changed
+ if (params.get(CameraParameters::KEY_VIDEO_STABILIZATION) != NULL) {
+ // make sure we support vstab
+ if (strcmp(mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED),
+ CameraParameters::TRUE) == 0) {
+ valstr = mParameters.get(CameraParameters::KEY_VIDEO_STABILIZATION);
+ // vstab value has changed
+ if ((valstr != NULL) &&
+ strcmp(valstr, params.get(CameraParameters::KEY_VIDEO_STABILIZATION)) != 0) {
+ restartPreviewRequired = true;
+ }
+ mParameters.set(CameraParameters::KEY_VIDEO_STABILIZATION,
+ params.get(CameraParameters::KEY_VIDEO_STABILIZATION));
+ }
+ } else if (mParameters.get(CameraParameters::KEY_VIDEO_STABILIZATION)) {
+ // vstab was configured but now unset
+ restartPreviewRequired = true;
+ mParameters.remove(CameraParameters::KEY_VIDEO_STABILIZATION);
+ }
+
+ // Set VNF
+ if (params.get(ExCameraParameters::KEY_VNF) == NULL) {
+ CAMHAL_LOGDA("Enable VNF");
+ mParameters.set(ExCameraParameters::KEY_VNF, "1");
+ restartPreviewRequired = true;
+ } else {
+ valstr = mParameters.get(ExCameraParameters::KEY_VNF);
+ if (valstr && strcmp(valstr, params.get(ExCameraParameters::KEY_VNF)) != 0) {
+ restartPreviewRequired = true;
+ }
+ mParameters.set(ExCameraParameters::KEY_VNF, params.get(ExCameraParameters::KEY_VNF));
+ }
+
+ // For VSTAB alone for 1080p resolution, padded width goes > 2048, which cannot be rendered by GPU.
+ // In such case, there is support in Ducati for combination of VSTAB & VNF requiring padded width < 2048.
+ // So we are forcefully enabling VNF, if VSTAB is enabled for 1080p resolution.
+ valstr = mParameters.get(CameraParameters::KEY_VIDEO_STABILIZATION);
+ if (valstr && (strcmp(valstr, CameraParameters::TRUE) == 0) && (mPreviewWidth == 1920)) {
+ CAMHAL_LOGDA("Force Enable VNF for 1080p");
+ mParameters.set(ExCameraParameters::KEY_VNF, "1");
+ restartPreviewRequired = true;
+ }
+ }
+ LOG_FUNCTION_NAME_EXIT;
+
+ return restartPreviewRequired;
+}
+
+/**
+ @brief Reset the camera parameters specific to Video Recording.
+
+ This function resets CAPTURE_MODE and disables Recording specific functions like VSTAB & VNF.
+
+ @param none
+ @return true if preview needs to be restarted for VIDEO_MODE parameters to take effect.
+
+ */
+bool VirtualCamHal::resetVideoModeParameters()
+{
+ const char *valstr = NULL;
+ bool restartPreviewRequired = false;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ // ignore this if we are already recording
+ if (mRecordingEnabled) {
+ return false;
+ }
+
+ // Set CAPTURE_MODE to VIDEO_MODE, if not set already and Restart Preview
+ valstr = mParameters.get(ExCameraParameters::KEY_CAP_MODE);
+ if ((valstr != NULL) && (strcmp(valstr, ExCameraParameters::VIDEO_MODE) == 0)) {
+ CAMHAL_LOGDA("Reset Capture-Mode to default");
+ mParameters.set(ExCameraParameters::KEY_CAP_MODE, "");
+ restartPreviewRequired = true;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return restartPreviewRequired;
+}
+
+/**
+ @brief Restart the preview with setParameter.
+
+ This function restarts preview, for some VIDEO_MODE parameters to take effect.
+
+ @param none
+ @return NO_ERROR If recording parameters could be set without any issues
+
+ */
+status_t VirtualCamHal::restartPreview()
+{
+ const char *valstr = NULL;
+ char tmpvalstr[30];
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ // Retain CAPTURE_MODE before calling stopPreview(), since it is reset in stopPreview().
+ tmpvalstr[0] = 0;
+ valstr = mParameters.get(ExCameraParameters::KEY_CAP_MODE);
+ if(valstr != NULL)
+ {
+ if(sizeof(tmpvalstr) < (strlen(valstr)+1))
+ {
+ return -EINVAL;
+ }
+
+ strncpy(tmpvalstr, valstr, sizeof(tmpvalstr));
+ tmpvalstr[sizeof(tmpvalstr)-1] = 0;
+ }
+
+ forceStopPreview();
+
+ {
+ Mutex::Autolock lock(mLock);
+ mParameters.set(ExCameraParameters::KEY_CAP_MODE, tmpvalstr);
+ mCameraAdapter->setParameters(mParameters);
+ }
+
+ ret = startPreview();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Stop a previously started recording.
+
+ @param none
+ @return none
+
+ */
+void VirtualCamHal::stopRecording()
+{
+ CameraAdapter::AdapterState currentState;
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mLock);
+
+ if (!mRecordingEnabled )
+ {
+ return;
+ }
+
+ currentState = mCameraAdapter->getState();
+ if (currentState == CameraAdapter::VIDEO_CAPTURE_STATE) {
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE);
+ }
+
+ mAppCbNotifier->stopRecording();
+
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_VIDEO);
+
+ mRecordingEnabled = false;
+
+ if ( mAppCbNotifier->getUseVideoBuffers() ){
+ freeVideoBufs(mVideoBufs);
+ if (mVideoBufs){
+ CAMHAL_LOGVB(" FREEING mVideoBufs 0x%x", (uint32_t)mVideoBufs);
+ delete [] mVideoBufs;
+ }
+ mVideoBufs = NULL;
+ }
+
+ // reset internal recording hint in case camera adapter needs to make some
+ // decisions....(will only be sent to camera adapter if camera restart is required)
+ mParameters.remove(ExCameraParameters::KEY_RECORDING_HINT);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Returns true if recording is enabled.
+
+ @param none
+ @return true If recording is currently running
+ false If recording has been stopped
+
+ */
+int VirtualCamHal::recordingEnabled()
+{
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return mRecordingEnabled;
+}
+
+/**
+ @brief Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
+
+ @param[in] mem MemoryBase pointer to the frame being released. Must be one of the buffers
+ previously given by VirtualCamHal
+ @return none
+
+ */
+void VirtualCamHal::releaseRecordingFrame(const void* mem)
+{
+ LOG_FUNCTION_NAME;
+
+ //CAMHAL_LOGDB(" 0x%x", mem->pointer());
+
+ if ( ( mRecordingEnabled ) && mem != NULL)
+ {
+ mAppCbNotifier->releaseRecordingFrame(mem);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return;
+}
+
+/**
+ @brief Start auto focus
+
+ This call asynchronous.
+ The notification callback routine is called with CAMERA_MSG_FOCUS once when
+ focusing is complete. autoFocus() will be called again if another auto focus is
+ needed.
+
+ @param none
+ @return NO_ERROR
+ @todo Define the error codes if the focus is not locked
+
+ */
+status_t VirtualCamHal::autoFocus()
+{
+ status_t ret = NO_ERROR;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ gettimeofday(&mStartFocus, NULL);
+
+#endif
+
+
+ LOG_FUNCTION_NAME;
+
+ {
+ Mutex::Autolock lock(mLock);
+ mMsgEnabled |= CAMERA_MSG_FOCUS;
+ }
+
+
+ if ( NULL != mCameraAdapter )
+ {
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //pass the autoFocus timestamp along with the command to camera adapter
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_PERFORM_AUTOFOCUS, ( int ) &mStartFocus);
+
+#else
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_PERFORM_AUTOFOCUS);
+
+#endif
+
+ }
+ else
+ {
+ ret = -1;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Cancels auto-focus function.
+
+ If the auto-focus is still in progress, this function will cancel it.
+ Whether the auto-focus is in progress or not, this function will return the
+ focus position to the default. If the camera does not support auto-focus, this is a no-op.
+
+
+ @param none
+ @return NO_ERROR If the cancel succeeded
+ @todo Define error codes if cancel didnt succeed
+
+ */
+status_t VirtualCamHal::cancelAutoFocus()
+{
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mLock);
+ CameraParameters adapterParams = mParameters;
+ mMsgEnabled &= ~CAMERA_MSG_FOCUS;
+
+ if( NULL != mCameraAdapter )
+ {
+ adapterParams.set(ExCameraParameters::KEY_AUTO_FOCUS_LOCK, CameraParameters::FALSE);
+ mCameraAdapter->setParameters(adapterParams);
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_CANCEL_AUTOFOCUS);
+ mAppCbNotifier->flushEventQueue();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+}
+
+void VirtualCamHal::setEventProvider(int32_t eventMask, MessageNotifier * eventNotifier)
+{
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != mEventProvider )
+ {
+ mEventProvider->disableEventNotification(CameraHalEvent::ALL_EVENTS);
+ delete mEventProvider;
+ mEventProvider = NULL;
+ }
+
+ mEventProvider = new EventProvider(eventNotifier, this, eventCallbackRelay);
+ if ( NULL == mEventProvider )
+ {
+ CAMHAL_LOGEA("Error in creating EventProvider");
+ }
+ else
+ {
+ mEventProvider->enableEventNotification(eventMask);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void VirtualCamHal::eventCallbackRelay(CameraHalEvent* event)
+{
+ LOG_FUNCTION_NAME;
+
+ VirtualCamHal *appcbn = ( VirtualCamHal * ) (event->mCookie);
+ appcbn->eventCallback(event );
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void VirtualCamHal::eventCallback(CameraHalEvent* event)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != event )
+ {
+ switch( event->mEventType )
+ {
+ case CameraHalEvent::EVENT_FOCUS_LOCKED:
+ case CameraHalEvent::EVENT_FOCUS_ERROR:
+ {
+ if ( mBracketingEnabled )
+ {
+ startImageBracketing();
+ }
+ break;
+ }
+ default:
+ {
+ break;
+ }
+ };
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t VirtualCamHal::startImageBracketing()
+{
+ status_t ret = NO_ERROR;
+ CameraFrame frame;
+ CameraAdapter::BuffersDescriptor desc;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ gettimeofday(&mStartCapture, NULL);
+
+#endif
+
+ LOG_FUNCTION_NAME;
+
+ if(!previewEnabled() && !mDisplayPaused)
+ {
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ if ( !mBracketingEnabled )
+ {
+ return ret;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mBracketingRunning = true;
+ }
+
+ if ( (NO_ERROR == ret) && ( NULL != mCameraAdapter ) )
+ {
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE,
+ ( int ) &frame,
+ ( mBracketRangeNegative + 1 ));
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE returned error 0x%x", ret);
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ if ( NULL != mAppCbNotifier.get() )
+ {
+ mAppCbNotifier->setBurst(true);
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mParameters.getPictureSize(( int * ) &frame.mWidth,
+ ( int * ) &frame.mHeight);
+
+ ret = allocImageBufs(frame.mWidth,
+ frame.mHeight,
+ frame.mLength,
+ mParameters.getPictureFormat(),
+ ( mBracketRangeNegative + 1 ));
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("allocImageBufs returned error 0x%x", ret);
+ }
+ }
+
+ if ( (NO_ERROR == ret) && ( NULL != mCameraAdapter ) )
+ {
+
+ desc.mBuffers = mImageBufs;
+ desc.mOffsets = mImageOffsets;
+ desc.mFd = mImageFd;
+ desc.mLength = mImageLength;
+ desc.mCount = ( size_t ) ( mBracketRangeNegative + 1 );
+ desc.mMaxQueueable = ( size_t ) ( mBracketRangeNegative + 1 );
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_IMAGE_CAPTURE,
+ ( int ) &desc);
+
+ if ( NO_ERROR == ret )
+ {
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //pass capture timestamp along with the camera adapter command
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_BRACKET_CAPTURE, ( mBracketRangePositive + 1 ), (int) &mStartCapture);
+
+#else
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_BRACKET_CAPTURE, ( mBracketRangePositive + 1 ));
+
+#endif
+
+ }
+ }
+
+ return ret;
+}
+
+status_t VirtualCamHal::stopImageBracketing()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if( !previewEnabled() )
+ {
+ return NO_INIT;
+ }
+
+ mBracketingRunning = false;
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_BRACKET_CAPTURE);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Take a picture.
+
+ @param none
+ @return NO_ERROR If able to switch to image capture
+ @todo Define error codes if unable to switch to image capture
+
+ */
+status_t VirtualCamHal::takePicture( )
+{
+ status_t ret = NO_ERROR;
+ CameraFrame frame;
+ CameraAdapter::BuffersDescriptor desc;
+ int burst;
+ const char *valstr = NULL;
+ unsigned int bufferCount = 1;
+
+ Mutex::Autolock lock(mLock);
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ gettimeofday(&mStartCapture, NULL);
+
+#endif
+
+ LOG_FUNCTION_NAME;
+
+ if(!previewEnabled() && !mDisplayPaused)
+ {
+ LOG_FUNCTION_NAME_EXIT;
+ CAMHAL_LOGEA("Preview not started...");
+ return NO_INIT;
+ }
+
+ // return error if we are already capturing
+ if((mCameraAdapter->getState() == CameraAdapter::CAPTURE_STATE &&
+ mCameraAdapter->getNextState() != CameraAdapter::PREVIEW_STATE) ||
+ (mCameraAdapter->getState() == CameraAdapter::VIDEO_CAPTURE_STATE &&
+ mCameraAdapter->getNextState() != CameraAdapter::VIDEO_STATE) ) {
+ CAMHAL_LOGEA("Already capturing an image...");
+ return NO_INIT;
+ }
+
+ // we only support video snapshot if we are in video mode (recording hint is set)
+ valstr = mParameters.get(ExCameraParameters::KEY_CAP_MODE);
+ if((mCameraAdapter->getState() == CameraAdapter::VIDEO_STATE) &&
+ (valstr && strcmp(valstr, ExCameraParameters::VIDEO_MODE)) ) {
+ CAMHAL_LOGEA("Trying to capture while recording without recording hint set...");
+ return INVALID_OPERATION;
+ }
+
+ if ( !mBracketingRunning )
+ {
+ if ( NO_ERROR == ret )
+ {
+ burst = mParameters.getInt(ExCameraParameters::KEY_BURST);
+ }
+
+ //Allocate all buffers only in burst capture case
+ if ( burst > 1 )
+ {
+ bufferCount = VirtualCamHal::NO_BUFFERS_IMAGE_CAPTURE;
+ if ( NULL != mAppCbNotifier.get() )
+ {
+ mAppCbNotifier->setBurst(true);
+ }
+ }
+ else
+ {
+ if ( NULL != mAppCbNotifier.get() )
+ {
+ mAppCbNotifier->setBurst(false);
+ }
+ }
+
+ // pause preview during normal image capture
+ // do not pause preview if recording (video state)
+ if (NO_ERROR == ret &&
+ NULL != mDisplayAdapter.get() &&
+ burst < 1) {
+ if (mCameraAdapter->getState() != CameraAdapter::VIDEO_STATE) {
+ mDisplayPaused = true;
+ mPreviewEnabled = false;
+ ret = mDisplayAdapter->pauseDisplay(mDisplayPaused);
+ // since preview is paused we should stop sending preview frames too
+ if(mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) {
+ mAppCbNotifier->disableMsgType (mMsgEnabled & CAMERA_MSG_POSTVIEW_FRAME);
+ CAMHAL_LOGDA("disable MSG_PREVIEW_FRAME");
+ }
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ CAMHAL_LOGDA("setSnapshotTimeRef!!\n");
+ mDisplayAdapter->setSnapshotTimeRef(&mStartCapture);
+#endif
+ }
+
+ // if we taking video snapshot...
+ if ((NO_ERROR == ret) && (mCameraAdapter->getState() == CameraAdapter::VIDEO_STATE)) {
+ // enable post view frames if not already enabled so we can internally
+ // save snapshot frames for generating thumbnail
+ if((mMsgEnabled & CAMERA_MSG_POSTVIEW_FRAME) == 0) {
+ mAppCbNotifier->enableMsgType(CAMERA_MSG_POSTVIEW_FRAME);
+ }
+ }
+
+ if ( (NO_ERROR == ret) && (NULL != mCameraAdapter) )
+ {
+ if ( NO_ERROR == ret )
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE,
+ ( int ) &frame,
+ bufferCount);
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE returned error 0x%x, count:%d", ret,bufferCount);
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mParameters.getPictureSize(( int * ) &frame.mWidth,
+ ( int * ) &frame.mHeight);
+
+ ret = allocImageBufs(frame.mWidth,
+ frame.mHeight,
+ frame.mLength,
+ mParameters.getPictureFormat(),
+ bufferCount);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("allocImageBufs returned error 0x%x", ret);
+ }
+ }
+
+ if ( (NO_ERROR == ret) && ( NULL != mCameraAdapter ) )
+ {
+ desc.mBuffers = mImageBufs;
+ desc.mOffsets = mImageOffsets;
+ desc.mFd = mImageFd;
+ desc.mLength = mImageLength;
+ desc.mCount = ( size_t ) bufferCount;
+ desc.mMaxQueueable = ( size_t ) bufferCount;
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_IMAGE_CAPTURE, ( int ) &desc);
+ }
+ }
+
+ if ( ( NO_ERROR == ret ) && ( NULL != mCameraAdapter ) )
+ {
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ //pass capture timestamp along with the camera adapter command
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_IMAGE_CAPTURE, (int) &mStartCapture);
+#else
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_IMAGE_CAPTURE);
+#endif
+ }
+
+ return ret;
+}
+
+/**
+ @brief Cancel a picture that was started with takePicture.
+
+ Calling this method when no picture is being taken is a no-op.
+
+ @param none
+ @return NO_ERROR If cancel succeeded. Cancel can succeed if image callback is not sent
+ @todo Define error codes
+
+ */
+status_t VirtualCamHal::cancelPicture( )
+{
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mLock);
+
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE);
+
+ return NO_ERROR;
+}
+
+/**
+ @brief Return the camera parameters.
+
+ @param none
+ @return Currently configured camera parameters
+
+ */
+char* VirtualCamHal::getParameters()
+{
+ String8 params_str8;
+ char* params_string;
+ const char * valstr = NULL;
+
+ LOG_FUNCTION_NAME;
+
+LOGD("getParameters, 1 mParameters KEY_PICTURE_SIZE=%s", mParameters.get(CameraParameters::KEY_PICTURE_SIZE));
+ if( NULL != mCameraAdapter )
+ {
+ mCameraAdapter->getParameters(mParameters);
+ }
+LOGD("getParameters, 2 mParameters KEY_PICTURE_SIZE=%s", mParameters.get(CameraParameters::KEY_PICTURE_SIZE));
+
+ CameraParameters mParams = mParameters;
+
+ // Handle RECORDING_HINT to Set/Reset Video Mode Parameters
+ valstr = mParameters.get(CameraParameters::KEY_RECORDING_HINT);
+ if(valstr != NULL)
+ {
+ if(strcmp(valstr, CameraParameters::TRUE) == 0)
+ {
+ //HACK FOR MMS MODE
+ resetPreviewRes(&mParams, mVideoWidth, mVideoHeight);
+ }
+ }
+
+ // do not send internal parameters to upper layers
+ mParams.remove(ExCameraParameters::KEY_RECORDING_HINT);
+ mParams.remove(ExCameraParameters::KEY_AUTO_FOCUS_LOCK);
+ mParameters.remove(CameraProperties::RELOAD_WHEN_OPEN);
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ mParams.remove(CameraProperties::DEVICE_NAME);
+#endif
+
+ params_str8 = mParams.flatten();
+
+ // camera service frees this string...
+ params_string = (char*) malloc(sizeof(char) * (params_str8.length()+1));
+ strcpy(params_string, params_str8.string());
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ ///Return the current set of parameters
+
+ return params_string;
+}
+
+void VirtualCamHal::putParameters(char *parms)
+{
+ free(parms);
+}
+
+/**
+ @brief Send command to camera driver.
+
+ @param none
+ @return NO_ERROR If the command succeeds
+ @todo Define the error codes that this function can return
+
+ */
+status_t VirtualCamHal::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+
+ if ( ( NO_ERROR == ret ) && ( NULL == mCameraAdapter ) )
+ {
+ CAMHAL_LOGEA("No CameraAdapter instance");
+ ret = -EINVAL;
+ }
+
+ if ( ( NO_ERROR == ret ) && ( !previewEnabled() ))
+ {
+ if( cmd == CAMERA_CMD_SET_DISPLAY_ORIENTATION) {
+ if(arg2 == 1) {//disable mirror
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_DISABLE_MIRROR, 1);
+ }
+ }
+ if( CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG != cmd){
+ CAMHAL_LOGEA("Preview is not running");
+ ret = -EINVAL;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ switch(cmd)
+ {
+ case CAMERA_CMD_SET_DISPLAY_ORIENTATION:
+
+ if(arg2 == 1) {//disable mirror
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_DISABLE_MIRROR, 1);
+ }
+
+ break;
+ case CAMERA_CMD_START_SMOOTH_ZOOM:
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_SMOOTH_ZOOM, arg1);
+
+ break;
+ case CAMERA_CMD_STOP_SMOOTH_ZOOM:
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_SMOOTH_ZOOM);
+
+ case CAMERA_CMD_START_FACE_DETECTION:
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_FD);
+
+ break;
+
+ case CAMERA_CMD_STOP_FACE_DETECTION:
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_FD);
+
+ break;
+
+ case CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG:
+
+ mMsgEnabled |= CAMERA_MSG_FOCUS_MOVE;
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_FOCUS_MOVE_MSG);
+
+ break;
+
+ default:
+ break;
+ };
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Release the hardware resources owned by this object.
+
+ Note that this is *not* done in the destructor.
+
+ @param none
+ @return none
+
+ */
+void VirtualCamHal::release()
+{
+ LOG_FUNCTION_NAME;
+ ///@todo Investigate on how release is used by CameraService. Vaguely remember that this is called
+ ///just before VirtualCamHal object destruction
+ deinitialize();
+
+ SYS_enable_nextvideo();
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+/**
+ @brief Dump state of the camera hardware
+
+ @param[in] fd File descriptor
+ @param[in] args Arguments
+ @return NO_ERROR Dump succeeded
+ @todo Error codes for dump fail
+
+ */
+status_t VirtualCamHal::dump(int fd) const
+{
+ LOG_FUNCTION_NAME;
+ ///Implement this method when the h/w dump function is supported on Ducati side
+ return NO_ERROR;
+}
+
+/*-------------Camera Hal Interface Method definitions ENDS here--------------------*/
+
+
+
+
+/*-------------Camera Hal Internal Method definitions STARTS here--------------------*/
+
+/**
+ @brief Constructor of VirtualCamHal
+
+ Member variables are initialized here. No allocations should be done here as we
+ don't use c++ exceptions in the code.
+
+ */
+VirtualCamHal::VirtualCamHal(int cameraId)
+{
+ LOG_FUNCTION_NAME;
+
+ ///Initialize all the member variables to their defaults
+ mPreviewEnabled = false;
+ mPreviewBufs = NULL;
+ mImageBufs = NULL;
+ mBufProvider = NULL;
+ mPreviewStartInProgress = false;
+ mVideoBufs = NULL;
+ mVideoBufProvider = NULL;
+ mRecordingEnabled = false;
+ mDisplayPaused = false;
+ mSetPreviewWindowCalled = false;
+ mMsgEnabled = 0;
+ mAppCbNotifier = NULL;
+ mMemoryManager = NULL;
+ mCameraAdapter = NULL;
+ mBracketingEnabled = false;
+ mBracketingRunning = false;
+ mEventProvider = NULL;
+ mBracketRangePositive = 1;
+ mBracketRangeNegative = 1;
+ mMaxZoomSupported = 0;
+ mShutterEnabled = true;
+ mMeasurementEnabled = false;
+ mPreviewDataBufs = NULL;
+ mCameraProperties = NULL;
+ mCurrentTime = 0;
+ mFalsePreview = 0;
+ mImageOffsets = NULL;
+ mImageLength = 0;
+ mImageFd = -1;
+ mVideoOffsets = NULL;
+ mVideoFd = -1;
+ mVideoLength = 0;
+ mPreviewDataOffsets = NULL;
+ mPreviewDataFd = -1;
+ mPreviewDataLength = 0;
+ mPreviewFd = -1;
+ mPreviewWidth = 0;
+ mPreviewHeight = 0;
+ mPreviewLength = 0;
+ mPreviewOffsets = NULL;
+ mPreviewRunning = 0;
+ mPreviewStateOld = 0;
+ mRecordingEnabled = 0;
+ mRecordEnabled = 0;
+#ifdef ENABLE_SENSOR_LISTENER
+ mSensorListener = NULL;
+#endif
+ mVideoWidth = 0;
+ mVideoHeight = 0;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //Initialize the CameraHAL constructor timestamp, which is used in the
+ // PPM() method as time reference if the user does not supply one.
+ gettimeofday(&ppm_start, NULL);
+
+#endif
+
+ mCameraIndex = cameraId;
+
+ SYS_disable_avsync();
+ SYS_disable_video_pause();
+#ifdef AMLOGIC_CAMERA_OVERLAY_SUPPORT
+ SYS_enable_nextvideo();
+#else
+ SYS_close_video();
+#endif
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Destructor of VirtualCamHal
+
+ This function simply calls deinitialize() to free up memory allocate during construct
+ phase
+ */
+VirtualCamHal::~VirtualCamHal()
+{
+ LOG_FUNCTION_NAME;
+
+ ///Call de-initialize here once more - it is the last chance for us to relinquish all the h/w and s/w resources
+ deinitialize();
+
+ if ( NULL != mEventProvider )
+ {
+ mEventProvider->disableEventNotification(CameraHalEvent::ALL_EVENTS);
+ delete mEventProvider;
+ mEventProvider = NULL;
+ }
+
+ /// Free the callback notifier
+ mAppCbNotifier.clear();
+
+ /// Free the display adapter
+ mDisplayAdapter.clear();
+
+ if ( NULL != mCameraAdapter ) {
+ int strongCount = mCameraAdapter->getStrongCount();
+
+ mCameraAdapter->decStrong(mCameraAdapter);
+
+ mCameraAdapter = NULL;
+ }
+
+ freeImageBufs();
+
+ /// Free the memory manager
+ mMemoryManager.clear();
+
+ SYS_enable_nextvideo();
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Initialize the Camera HAL
+
+ Creates CameraAdapter, AppCallbackNotifier, DisplayAdapter and MemoryManager
+
+ @param None
+ @return NO_ERROR - On success
+ NO_MEMORY - On failure to allocate memory for any of the objects
+ @remarks Camera Hal internal function
+
+ */
+
+status_t VirtualCamHal::initialize(CameraProperties::Properties* properties)
+{
+ LOG_FUNCTION_NAME;
+
+ int sensor_index = 0;
+
+ ///Initialize the event mask used for registering an event provider for AppCallbackNotifier
+ ///Currently, registering all events as to be coming from CameraAdapter
+ int32_t eventMask = CameraHalEvent::ALL_EVENTS;
+
+ // Get my camera properties
+ mCameraProperties = properties;
+
+ if(!mCameraProperties)
+ {
+ goto fail_loop;
+ }
+
+ // Dump the properties of this Camera
+ // will only print if DEBUG macro is defined
+ mCameraProperties->dump();
+
+ if (strcmp(CameraProperties::DEFAULT_VALUE, mCameraProperties->get(CameraProperties::CAMERA_SENSOR_INDEX)) != 0 )
+ {
+ sensor_index = atoi(mCameraProperties->get(CameraProperties::CAMERA_SENSOR_INDEX));
+ }
+
+ CAMHAL_LOGDB("Sensor index %d", sensor_index);
+
+ mCameraAdapter = CameraAdapter_Factory(sensor_index);
+ if ( ( NULL == mCameraAdapter ) || (mCameraAdapter->initialize(properties)!=NO_ERROR))
+ {
+ CAMHAL_LOGEA("Unable to create or initialize CameraAdapter");
+ mCameraAdapter = NULL;
+ goto fail_loop;
+ }
+
+ mCameraAdapter->incStrong(mCameraAdapter);
+ mCameraAdapter->registerImageReleaseCallback(releaseImageBuffers, (void *) this);
+ mCameraAdapter->registerEndCaptureCallback(endImageCapture, (void *)this);
+
+ if(!mAppCbNotifier.get())
+ {
+ /// Create the callback notifier
+ mAppCbNotifier = new AppCbNotifier();
+ if( ( NULL == mAppCbNotifier.get() ) || ( mAppCbNotifier->initialize() != NO_ERROR))
+ {
+ CAMHAL_LOGEA("Unable to create or initialize AppCbNotifier");
+ goto fail_loop;
+ }
+ }
+
+ if(!mMemoryManager.get())
+ {
+ /// Create Memory Manager
+ mMemoryManager = new MemoryManager();
+ if( ( NULL == mMemoryManager.get() ) || ( mMemoryManager->initialize() != NO_ERROR))
+ {
+ CAMHAL_LOGEA("Unable to create or initialize MemoryManager");
+ goto fail_loop;
+ }
+ }
+
+ ///Setup the class dependencies...
+
+ ///AppCallbackNotifier has to know where to get the Camera frames and the events like auto focus lock etc from.
+ ///CameraAdapter is the one which provides those events
+ ///Set it as the frame and event providers for AppCallbackNotifier
+ ///@remarks setEventProvider API takes in a bit mask of events for registering a provider for the different events
+ /// That way, if events can come from DisplayAdapter in future, we will be able to add it as provider
+ /// for any event
+ mAppCbNotifier->setEventProvider(eventMask, mCameraAdapter);
+ mAppCbNotifier->setFrameProvider(mCameraAdapter);
+
+ ///Any dynamic errors that happen during the camera use case has to be propagated back to the application
+ ///via CAMERA_MSG_ERROR. AppCallbackNotifier is the class that notifies such errors to the application
+ ///Set it as the error handler for CameraAdapter
+ mCameraAdapter->setErrorHandler(mAppCbNotifier.get());
+
+ ///Start the callback notifier
+ if(mAppCbNotifier->start() != NO_ERROR)
+ {
+ CAMHAL_LOGEA("Couldn't start AppCallbackNotifier");
+ goto fail_loop;
+ }
+
+ CAMHAL_LOGDA("Started AppCallbackNotifier..");
+ mAppCbNotifier->setMeasurements(mMeasurementEnabled);
+
+ ///Initialize default parameters
+ initDefaultParameters();
+
+ if ( setParameters(mParameters) != NO_ERROR )
+ {
+ CAMHAL_LOGEA("Failed to set default parameters?!");
+ }
+
+#ifdef ENABLE_SENSOR_LISTENER
+ // register for sensor events
+ mSensorListener = new SensorListener();
+ if (mSensorListener.get()) {
+ if (mSensorListener->initialize() == NO_ERROR) {
+ mSensorListener->setCallbacks(orientation_cb, this);
+ mSensorListener->enableSensor(SensorListener::SENSOR_ORIENTATION);
+ } else {
+ CAMHAL_LOGEA("Error initializing SensorListener. not fatal, continuing");
+ mSensorListener.clear();
+ mSensorListener = NULL;
+ }
+ }
+#endif
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+
+fail_loop:
+
+ ///Free up the resources because we failed somewhere up
+ deinitialize();
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_MEMORY;
+
+}
+
+#if 1//ndef AMLOGIC_USB_CAMERA_SUPPORT
+//By vm or mipi driver, the resolution only need be smaller the max preview size. (1920*1080)
+bool VirtualCamHal::isResolutionValid(unsigned int width, unsigned int height, const char *supportedResolutions)
+{
+ bool ret = false;
+ status_t status = NO_ERROR;
+ char *pos = NULL;
+ unsigned int supported_w = 0, supported_h = 0;
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == supportedResolutions )
+ {
+ CAMHAL_LOGEA("Invalid supported resolutions string");
+ ret = false;
+ goto exit;
+ }
+ pos = (char *)supportedResolutions;
+ while(pos != NULL){
+ if (sscanf(pos, "%dx%d", &supported_w, &supported_h) != 2){
+ CAMHAL_LOGEB("Read supported resolutions string error!(%s)",pos);
+ ret = false;
+ break;
+ }
+ //CAMHAL_LOGVB("Read supported resolutions %dx%d",supported_w,supported_h);
+ if((width<=supported_w)&&(height<=supported_h)){
+ ret = true;
+ break;
+ }
+ pos = strchr(pos, ',');
+ if(pos)
+ pos++;
+ }
+
+exit:
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+#else
+bool VirtualCamHal::isResolutionValid(unsigned int width, unsigned int height, const char *supportedResolutions)
+{
+ bool ret = true;
+ status_t status = NO_ERROR;
+ char tmpBuffer[PARAM_BUFFER + 1];
+ char *pos = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == supportedResolutions )
+ {
+ CAMHAL_LOGEA("Invalid supported resolutions string");
+ ret = false;
+ goto exit;
+ }
+
+ status = snprintf(tmpBuffer, PARAM_BUFFER, "%dx%d", width, height);
+ if ( 0 > status )
+ {
+ CAMHAL_LOGEA("Error encountered while generating validation string");
+ ret = false;
+ goto exit;
+ }
+
+ pos = strstr(supportedResolutions, tmpBuffer);
+ if ( NULL == pos )
+ {
+ ret = false;
+ }
+ else
+ {
+ ret = true;
+ }
+
+exit:
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+#endif
+
+bool VirtualCamHal::isParameterValid(const char *param, const char *supportedParams)
+{
+ bool ret = true;
+ char *pos = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == supportedParams )
+ {
+ CAMHAL_LOGEA("Invalid supported parameters string");
+ ret = false;
+ goto exit;
+ }
+
+ if ( NULL == param )
+ {
+ CAMHAL_LOGEA("Invalid parameter string");
+ ret = false;
+ goto exit;
+ }
+
+ pos = strstr(supportedParams, param);
+ if ( NULL == pos )
+ {
+ ret = false;
+ }
+ else
+ {
+ ret = true;
+ }
+
+exit:
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+bool VirtualCamHal::isParameterValid(int param, const char *supportedParams)
+{
+ bool ret = true;
+ char *pos = NULL;
+ status_t status;
+ char tmpBuffer[PARAM_BUFFER + 1];
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == supportedParams )
+ {
+ CAMHAL_LOGEA("Invalid supported parameters string");
+ ret = false;
+ goto exit;
+ }
+
+ status = snprintf(tmpBuffer, PARAM_BUFFER, "%d", param);
+ if ( 0 > status )
+ {
+ CAMHAL_LOGEA("Error encountered while generating validation string");
+ ret = false;
+ goto exit;
+ }
+
+ pos = strstr(supportedParams, tmpBuffer);
+ if ( NULL == pos )
+ {
+ ret = false;
+ }
+ else
+ {
+ ret = true;
+ }
+
+exit:
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+bool VirtualCamHal::isParameterInRange(int param, const char *supportedParams)
+{
+ bool ret = true;
+ char *pos = NULL;
+ status_t status;
+ int min_range = 0, max_range = 0;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == supportedParams )
+ {
+ CAMHAL_LOGEA("Invalid supported parameters string");
+ ret = false;
+ goto exit;
+ }
+ if (sscanf(supportedParams, "%d,%d", &min_range, &max_range) != 2){
+ CAMHAL_LOGEA("Error encountered while get Parameter Range");
+ ret = false;
+ goto exit;
+ }
+ if(min_range==max_range){
+ CAMHAL_LOGEA("Parameter Range Invalid");
+ ret = false;
+ goto exit;
+ }
+
+ if(min_range>max_range){
+ int temp = max_range;
+ max_range = min_range;
+ min_range = temp;
+ }
+
+ if((min_range<=param)&&(param<=max_range))
+ ret = true;
+ else
+ ret = false;
+exit:
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t VirtualCamHal::doesSetParameterNeedUpdate(const char* new_param, const char* old_param, bool& update)
+{
+ if (!new_param || !old_param) {
+ return -EINVAL;
+ }
+
+ // if params mismatch we should update parameters for camera adapter
+ if ((strcmp(new_param, old_param) != 0)) {
+ update = true;
+ }
+
+ return NO_ERROR;
+}
+
+status_t VirtualCamHal::parseResolution(const char *resStr, int &width, int &height)
+{
+ status_t ret = NO_ERROR;
+ char *ctx, *pWidth, *pHeight;
+ const char *sep = "x";
+ char *tmp = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == resStr )
+ {
+ return -EINVAL;
+ }
+
+ //This fixes "Invalid input resolution"
+ char *resStr_copy = (char *)malloc(strlen(resStr) + 1);
+ if ( NULL!=resStr_copy ) {
+ if ( NO_ERROR == ret )
+ {
+ strcpy(resStr_copy, resStr);
+ pWidth = strtok_r( (char *) resStr_copy, sep, &ctx);
+
+ if ( NULL != pWidth )
+ {
+ width = atoi(pWidth);
+ }
+ else
+ {
+ CAMHAL_LOGEB("Invalid input resolution %s", resStr);
+ ret = -EINVAL;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ pHeight = strtok_r(NULL, sep, &ctx);
+
+ if ( NULL != pHeight )
+ {
+ height = atoi(pHeight);
+ }
+ else
+ {
+ CAMHAL_LOGEB("Invalid input resolution %s", resStr);
+ ret = -EINVAL;
+ }
+ }
+
+ free(resStr_copy);
+ resStr_copy = NULL;
+ }
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+void VirtualCamHal::insertSupportedParams()
+{
+ char tmpBuffer[PARAM_BUFFER + 1];
+
+ LOG_FUNCTION_NAME;
+
+ CameraParameters &p = mParameters;
+
+ ///Set the name of the camera
+ p.set(ExCameraParameters::KEY_CAMERA_NAME, mCameraProperties->get(CameraProperties::CAMERA_NAME));
+
+ mMaxZoomSupported = atoi(mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_STAGES));
+
+ p.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIZES));
+ p.set(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_FORMATS));
+ p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES));
+ p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS));
+ p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES));
+ p.set(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_THUMBNAIL_SIZES));
+ p.set(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, mCameraProperties->get(CameraProperties::SUPPORTED_WHITE_BALANCE));
+ p.set(CameraParameters::KEY_SUPPORTED_EFFECTS, mCameraProperties->get(CameraProperties::SUPPORTED_EFFECTS));
+ p.set(CameraParameters::KEY_SUPPORTED_SCENE_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES));
+
+ const char *flashmode = mCameraProperties->get(CameraProperties::SUPPORTED_FLASH_MODES);
+ if(flashmode&&(flashmode[0]!=0)){
+ p.set(CameraParameters::KEY_SUPPORTED_FLASH_MODES, flashmode);
+ }
+
+ p.set(CameraParameters::KEY_SUPPORTED_FOCUS_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_FOCUS_MODES));
+ p.set(CameraParameters::KEY_SUPPORTED_ANTIBANDING, mCameraProperties->get(CameraProperties::SUPPORTED_ANTIBANDING));
+ p.set(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::SUPPORTED_EV_MAX));
+ p.set(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::SUPPORTED_EV_MIN));
+ p.set(CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP, mCameraProperties->get(CameraProperties::SUPPORTED_EV_STEP));
+ p.set(CameraParameters::KEY_SUPPORTED_SCENE_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES));
+ p.set(ExCameraParameters::KEY_SUPPORTED_EXPOSURE, mCameraProperties->get(CameraProperties::SUPPORTED_EXPOSURE_MODES));
+ p.set(ExCameraParameters::KEY_SUPPORTED_ISO_VALUES, mCameraProperties->get(CameraProperties::SUPPORTED_ISO_VALUES));
+ p.set(CameraParameters::KEY_ZOOM_RATIOS, mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_RATIOS));
+ p.set(CameraParameters::KEY_MAX_ZOOM, mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_STAGES));
+ p.set(CameraParameters::KEY_ZOOM_SUPPORTED, mCameraProperties->get(CameraProperties::ZOOM_SUPPORTED));
+ p.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, mCameraProperties->get(CameraProperties::SMOOTH_ZOOM_SUPPORTED));
+ p.set(ExCameraParameters::KEY_SUPPORTED_IPP, mCameraProperties->get(CameraProperties::SUPPORTED_IPP_MODES));
+ p.set(ExCameraParameters::KEY_S3D_SUPPORTED,mCameraProperties->get(CameraProperties::S3D_SUPPORTED));
+ p.set(ExCameraParameters::KEY_S3D2D_PREVIEW_MODE,mCameraProperties->get(CameraProperties::S3D2D_PREVIEW_MODES));
+ p.set(ExCameraParameters::KEY_AUTOCONVERGENCE_MODE, mCameraProperties->get(CameraProperties::AUTOCONVERGENCE_MODE));
+ p.set(ExCameraParameters::KEY_MANUALCONVERGENCE_VALUES, mCameraProperties->get(CameraProperties::MANUALCONVERGENCE_VALUES));
+ p.set(CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED));
+ p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED));
+ p.set(ExCameraParameters::KEY_SENSOR_ORIENTATION, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION));
+ p.set(ExCameraParameters::KEY_SENSOR_ORIENTATION_VALUES, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION_VALUES));
+ p.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, mCameraProperties->get(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED));
+ p.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, mCameraProperties->get(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED));
+ p.set(CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED, mCameraProperties->get(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED));
+
+ //p.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES));
+
+ p.set(CameraParameters::KEY_FOCUS_DISTANCES,"0.95,1.9,Infinity");
+
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+void VirtualCamHal::initDefaultParameters()
+{
+ //Purpose of this function is to initialize the default current and supported parameters for the currently
+ //selected camera.
+
+ CameraParameters &p = mParameters;
+ int currentRevision, adapterRevision;
+ status_t ret = NO_ERROR;
+ int width, height;
+
+ LOG_FUNCTION_NAME;
+
+ ret = parseResolution(mCameraProperties->get(CameraProperties::PREVIEW_SIZE), width, height);
+
+ if ( NO_ERROR == ret )
+ {
+ p.setPreviewSize(width, height);
+ }
+ else
+ {
+ p.setPreviewSize(MIN_WIDTH, MIN_HEIGHT);
+ }
+
+ ret = parseResolution(mCameraProperties->get(CameraProperties::PICTURE_SIZE), width, height);
+
+ if ( NO_ERROR == ret )
+ {
+ p.setPictureSize(width, height);
+ }
+ else
+ {
+ p.setPictureSize(PICTURE_WIDTH, PICTURE_HEIGHT);
+ }
+
+ ret = parseResolution(mCameraProperties->get(CameraProperties::JPEG_THUMBNAIL_SIZE), width, height);
+
+ if ( NO_ERROR == ret )
+ {
+ p.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, width);
+ p.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, height);
+ }
+ else
+ {
+ p.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, MIN_WIDTH);
+ p.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, MIN_HEIGHT);
+ }
+
+ insertSupportedParams();
+
+ //Insert default values
+ p.setPreviewFrameRate(atoi(mCameraProperties->get(CameraProperties::PREVIEW_FRAME_RATE)));
+ p.setPreviewFormat(mCameraProperties->get(CameraProperties::PREVIEW_FORMAT));
+ p.setPictureFormat(mCameraProperties->get(CameraProperties::PICTURE_FORMAT));
+ p.set(CameraParameters::KEY_JPEG_QUALITY, mCameraProperties->get(CameraProperties::JPEG_QUALITY));
+ p.set(CameraParameters::KEY_WHITE_BALANCE, mCameraProperties->get(CameraProperties::WHITEBALANCE));
+ p.set(CameraParameters::KEY_EFFECT, mCameraProperties->get(CameraProperties::EFFECT));
+ p.set(CameraParameters::KEY_ANTIBANDING, mCameraProperties->get(CameraProperties::ANTIBANDING));
+ p.set(CameraParameters::KEY_FOCUS_MODE, mCameraProperties->get(CameraProperties::FOCUS_MODE));
+ p.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::EV_COMPENSATION));
+ p.set(CameraParameters::KEY_SCENE_MODE, mCameraProperties->get(CameraProperties::SCENE_MODE));
+
+ const char *flashmode = mCameraProperties->get(CameraProperties::FLASH_MODE);
+ if(flashmode&&(flashmode[0]!=0)){
+ p.set(CameraParameters::KEY_FLASH_MODE, flashmode);
+ }
+
+ p.set(CameraParameters::KEY_ZOOM, mCameraProperties->get(CameraProperties::ZOOM));
+ p.set(ExCameraParameters::KEY_CONTRAST, mCameraProperties->get(CameraProperties::CONTRAST));
+ p.set(ExCameraParameters::KEY_SATURATION, mCameraProperties->get(CameraProperties::SATURATION));
+ p.set(ExCameraParameters::KEY_BRIGHTNESS, mCameraProperties->get(CameraProperties::BRIGHTNESS));
+ p.set(ExCameraParameters::KEY_SHARPNESS, mCameraProperties->get(CameraProperties::SHARPNESS));
+ p.set(ExCameraParameters::KEY_EXPOSURE_MODE, mCameraProperties->get(CameraProperties::EXPOSURE_MODE));
+ p.set(ExCameraParameters::KEY_ISO, mCameraProperties->get(CameraProperties::ISO_MODE));
+ p.set(ExCameraParameters::KEY_IPP, mCameraProperties->get(CameraProperties::IPP));
+ p.set(ExCameraParameters::KEY_GBCE, mCameraProperties->get(CameraProperties::GBCE));
+ p.set(ExCameraParameters::KEY_S3D2D_PREVIEW, mCameraProperties->get(CameraProperties::S3D2D_PREVIEW));
+ p.set(ExCameraParameters::KEY_AUTOCONVERGENCE, mCameraProperties->get(CameraProperties::AUTOCONVERGENCE));
+ p.set(ExCameraParameters::KEY_MANUALCONVERGENCE_VALUES, mCameraProperties->get(CameraProperties::MANUALCONVERGENCE_VALUES));
+ p.set(CameraParameters::KEY_VIDEO_STABILIZATION, mCameraProperties->get(CameraProperties::VSTAB));
+ p.set(CameraParameters::KEY_FOCAL_LENGTH, mCameraProperties->get(CameraProperties::FOCAL_LENGTH));
+ p.set(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, mCameraProperties->get(CameraProperties::HOR_ANGLE));
+ p.set(CameraParameters::KEY_VERTICAL_VIEW_ANGLE, mCameraProperties->get(CameraProperties::VER_ANGLE));
+ p.set(CameraParameters::KEY_PREVIEW_FPS_RANGE,mCameraProperties->get(CameraProperties::FRAMERATE_RANGE));
+ p.set(ExCameraParameters::KEY_SENSOR_ORIENTATION, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION));
+ p.set(ExCameraParameters::KEY_SENSOR_ORIENTATION_VALUES, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION_VALUES));
+ p.set(ExCameraParameters::KEY_EXIF_MAKE, mCameraProperties->get(CameraProperties::EXIF_MAKE));
+ p.set(ExCameraParameters::KEY_EXIF_MODEL, mCameraProperties->get(CameraProperties::EXIF_MODEL));
+ p.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, mCameraProperties->get(CameraProperties::JPEG_THUMBNAIL_QUALITY));
+ p.set(CameraParameters::KEY_VIDEO_FRAME_FORMAT, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP);
+ p.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW, mCameraProperties->get(CameraProperties::MAX_FD_HW_FACES));
+ p.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_SW, mCameraProperties->get(CameraProperties::MAX_FD_SW_FACES));
+
+ // Only one area a.k.a Touch AF for now.
+ // TODO: Add support for multiple focus areas.
+ p.set(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS, mCameraProperties->get(CameraProperties::MAX_FOCUS_AREAS));
+ p.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, mCameraProperties->get(CameraProperties::AUTO_EXPOSURE_LOCK));
+ p.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, mCameraProperties->get(CameraProperties::AUTO_WHITEBALANCE_LOCK));
+ p.set(CameraParameters::KEY_MAX_NUM_METERING_AREAS, mCameraProperties->get(CameraProperties::MAX_NUM_METERING_AREAS));
+ p.set(CameraParameters::KEY_VIDEO_SIZE, mCameraProperties->get(CameraProperties::VIDEO_SIZE));
+ //p.set(CameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, mCameraProperties->get(CameraProperties::PREFERRED_PREVIEW_SIZE_FOR_VIDEO));
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Stop a previously started preview.
+ @param none
+ @return none
+
+ */
+void VirtualCamHal::forceStopPreview()
+{
+ LOG_FUNCTION_NAME;
+
+ // stop bracketing if it is running
+ stopImageBracketing();
+
+ if(mDisplayAdapter.get() != NULL) {
+ ///Stop the buffer display first
+ mDisplayAdapter->disableDisplay();
+ }
+
+ if(mAppCbNotifier.get() != NULL) {
+ //Stop the callback sending
+ mAppCbNotifier->stop();
+ mAppCbNotifier->flushAndReturnFrames();
+ mAppCbNotifier->stopPreviewCallbacks();
+ }
+
+ if ( NULL != mCameraAdapter ) {
+ // only need to send these control commands to state machine if we are
+ // passed the LOADED_PREVIEW_STATE
+ if (mCameraAdapter->getState() > CameraAdapter::LOADED_PREVIEW_STATE) {
+ // according to javadoc...FD should be stopped in stopPreview
+ // and application needs to call startFaceDection again
+ // to restart FD
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_FD);
+ }
+
+ LOGD("rollback!!!!!!!!");
+ mCameraAdapter->rollbackToInitializedState();
+
+ }
+
+ freePreviewBufs();
+ freePreviewDataBufs();
+
+ mPreviewEnabled = false;
+ mDisplayPaused = false;
+ mPreviewStartInProgress = false;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Deallocates memory for all the resources held by Camera HAL.
+
+ Frees the following objects- CameraAdapter, AppCallbackNotifier, DisplayAdapter,
+ and Memory Manager
+
+ @param none
+ @return none
+
+ */
+void VirtualCamHal::deinitialize()
+{
+ LOG_FUNCTION_NAME;
+
+ if ( mPreviewEnabled || mDisplayPaused ) {
+ forceStopPreview();
+ }
+
+ mSetPreviewWindowCalled = false;
+
+#ifdef ENABLE_SENSOR_LISTENER
+ if (mSensorListener.get()) {
+ mSensorListener->disableSensor(SensorListener::SENSOR_ORIENTATION);
+ mSensorListener.clear();
+ mSensorListener = NULL;
+ }
+#endif
+
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+status_t VirtualCamHal::storeMetaDataInBuffers(bool enable)
+{
+ LOG_FUNCTION_NAME;
+
+ return mAppCbNotifier->useMetaDataBufferMode(enable);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void VirtualCamHal::selectFPSRange(int framerate, int *min_fps, int *max_fps)
+{
+ char * ptr;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ int fpsrangeArray[2];
+ int i = 0;
+
+ LOG_FUNCTION_NAME;
+ size_t size = strlen(mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED))+1;
+ strncpy(supported, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED), size);
+
+ ptr = strtok (supported," (,)");
+
+ while (ptr != NULL)
+ {
+ fpsrangeArray[i]= atoi(ptr)/VirtualCamHal::VFR_SCALE;
+ if (i == 1)
+ {
+ if ((framerate <= fpsrangeArray[i])&&(framerate >= fpsrangeArray[i-1]))
+ {
+ CAMHAL_LOGDB("SETTING FPS RANGE min = %d max = %d \n", fpsrangeArray[0], fpsrangeArray[1]);
+ *min_fps = fpsrangeArray[0]*VirtualCamHal::VFR_SCALE;
+ *max_fps = fpsrangeArray[1]*VirtualCamHal::VFR_SCALE;
+ break;
+ }
+ }
+ ptr = strtok (NULL, " (,)");
+ i++;
+ i%=2;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+void VirtualCamHal::setPreferredPreviewRes(int width, int height)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( (width == 320) && (height == 240)){
+ mParameters.setPreviewSize(640,480);
+ }
+ if ( (width == 176) && (height == 144)){
+ mParameters.setPreviewSize(704,576);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void VirtualCamHal::resetPreviewRes(CameraParameters *mParams, int width, int height)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( (width <= 320) && (height <= 240)){
+ mParams->setPreviewSize(mVideoWidth, mVideoHeight);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+};
+
+
diff --git a/vircam/inc/V4LCamAdpt.h b/vircam/inc/V4LCamAdpt.h
new file mode 100755
index 0000000..6426651
--- a/dev/null
+++ b/vircam/inc/V4LCamAdpt.h
@@ -0,0 +1,257 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#ifndef V4L_CAM_ADPT_H
+#define V4L_CAM_ADPT_H
+
+#include "CameraHal.h"
+#include "BaseCameraAdapter.h"
+#include "DebugUtils.h"
+#include "Encoder_libjpeg.h"
+#include "V4LCameraAdapter.h"
+
+
+namespace android {
+
+#ifndef DEFAULT_PREVIEW_PIXEL_FORMAT
+#define DEFAULT_PREVIEW_PIXEL_FORMAT V4L2_PIX_FMT_NV21
+#define DEFAULT_IMAGE_CAPTURE_PIXEL_FORMAT V4L2_PIX_FMT_RGB24
+#endif
+#define NB_BUFFER 6
+
+/**
+ * Class which completely abstracts the camera hardware interaction from camera hal
+ * TODO: Need to list down here, all the message types that will be supported by this class
+ */
+class V4LCamAdpt : public BaseCameraAdapter
+{
+public:
+
+ /*--------------------Constant declarations----------------------------------------*/
+ static const int32_t MAX_NO_BUFFERS = 20;
+
+ //static const int MAX_NO_PORTS = 6;
+
+ ///Five second timeout
+ static const int CAMERA_ADAPTER_TIMEOUT = 5000*1000;
+
+public:
+
+ V4LCamAdpt(size_t sensor_index);
+ ~V4LCamAdpt();
+
+ int SetExposure(int camera_fd,const char *sbn);
+ int SetExposureMode(int camera_fd, unsigned int mode);
+ int set_white_balance(int camera_fd,const char *swb);
+ int set_banding(int camera_fd,const char *snm);
+ int set_night_mode(int camera_fd,const char *snm);
+ int set_effect(int camera_fd,const char *sef);
+ int set_flash_mode(int camera_fd, const char *sfm);
+ bool get_flash_mode( char *flash_status,
+ char *def_flash_status);
+
+ int getValidFrameSize(int pixel_format, char *framesize);
+ int getCameraOrientation(bool frontcamera, char* property);
+ bool getCameraWhiteBalance(char* wb_modes, char*def_wb_mode);
+ bool getCameraBanding(char* banding_modes, char*def_banding_mode);
+ bool getCameraExposureValue(int &min, int &max,
+ int &step, int &def);
+ bool getCameraAutoFocus( char* focus_mode_str, char*def_focus_mode);
+ int set_hflip_mode(int camera_fd, bool mode);
+ int get_hflip_mode(int camera_fd);
+ int get_supported_zoom(int camera_fd, char * zoom_str);
+ int set_zoom_level(int camera_fd, int zoom);
+
+#ifdef AMLOGIC_VCAM_NONBLOCK_SUPPORT
+ int get_framerate (int camera_fd,int *fps, int *fps_num);
+ int enumFramerate ( int *fps, int *fps_num);
+#endif
+#if 1//ndef AMLOGIC_USB_CAMERA_SUPPORT
+ int set_rotate_value(int camera_fd, int value);
+#endif
+
+ bool isPreviewDevice(int camera_fd);
+ bool isFrontCam( int camera_id );
+ bool isVolatileCam();
+ bool getCameraHandle();
+
+ ///Initialzes the camera adapter creates any resources required
+ virtual status_t initialize(CameraProperties::Properties*);
+ //virtual status_t initialize(CameraProperties::Properties*, int sensor_index=0);
+
+ //APIs to configure Camera adapter and get the current parameter set
+ virtual status_t setParameters(const CameraParameters& params);
+ virtual void getParameters(CameraParameters& params);
+
+ // API
+ virtual status_t UseBuffersPreview(void* bufArr, int num);
+ virtual status_t UseBuffersCapture(void* bufArr, int num);
+
+ //API to flush the buffers for preview
+ status_t flushBuffers();
+
+protected:
+
+//----------Parent class method implementation------------------------------------
+ virtual status_t takePicture();
+ virtual status_t autoFocus();
+ virtual status_t cancelAutoFocus();
+ virtual status_t startPreview();
+ virtual status_t stopPreview();
+ virtual status_t useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable);
+ virtual status_t fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType);
+ virtual status_t getFrameSize(size_t &width, size_t &height);
+ virtual status_t getPictureBufferSize(size_t &length, size_t bufferCount);
+ virtual status_t getFrameDataSize(size_t &dataFrameSize, size_t bufferCount);
+ virtual void onOrientationEvent(uint32_t orientation, uint32_t tilt);
+//-----------------------------------------------------------------------------
+ status_t disableMirror(bool bDisable);
+ status_t setMirrorEffect();
+ status_t getFocusMoveStatus();
+
+private:
+
+ class PreviewThread : public Thread {
+ V4LCamAdpt* mAdapter;
+ public:
+ PreviewThread(V4LCamAdpt* hw) :
+ Thread(false), mAdapter(hw) { }
+ virtual void onFirstRef() {
+ run("CameraPreviewThread", PRIORITY_URGENT_DISPLAY);
+ }
+ virtual bool threadLoop() {
+ mAdapter->previewThread();
+ // loop until we need to quit
+ return true;
+ }
+ };
+
+ status_t setBuffersFormat(int width, int height, int pixelformat);
+ status_t getBuffersFormat(int &width, int &height, int &pixelformat);
+
+ //Used for calculation of the average frame rate during preview
+ status_t recalculateFPS();
+
+ char * GetFrame(int &index);
+
+ int previewThread();
+
+ static int beginPictureThread(void *cookie);
+ int pictureThread();
+
+ static int beginAutoFocusThread(void *cookie);
+
+ int GenExif(ExifElementsTable* exiftable);
+
+ status_t IoctlStateProbe();
+
+public:
+
+private:
+ int mPreviewBufferCount;
+ KeyedVector<int, int> mPreviewBufs;
+ KeyedVector<int, int> mPreviewIdxs;
+ mutable Mutex mPreviewBufsLock;
+
+ //TODO use members from BaseCameraAdapter
+ camera_memory_t *mCaptureBuf;
+
+ CameraParameters mParams;
+
+ int mPreviewWidth;
+ int mPreviewHeight;
+ int mCaptureWidth;
+ int mCaptureHeight;
+
+ bool mPreviewing;
+ bool mCapturing;
+ Mutex mLock;
+
+ int mFrameCount;
+ int mLastFrameCount;
+ unsigned int mIter;
+ nsecs_t mLastFPSTime;
+
+ //variables holding the estimated framerate
+ float mFPS, mLastFPS;
+
+ int mSensorIndex;
+ bool mbFrontCamera;
+ bool mbDisableMirror;
+
+ // protected by mLock
+ sp<PreviewThread> mPreviewThread;
+
+ struct VideoInfo *mVideoInfo;
+ int mCameraHandle;
+
+#ifdef AMLOGIC_TWO_CH_UVC
+ int mCamEncodeHandle;
+ int mCamEncodeIndex;
+#endif
+
+ int nQueued;
+ int nDequeued;
+
+ int mZoomlevel;
+ unsigned int mPixelFormat;
+
+#if 0//def AMLOGIC_USB_CAMERA_SUPPORT
+ int mUsbCameraStatus;
+
+ bool mIsDequeuedEIOError;
+
+ enum UsbCameraStatus
+ {
+ USBCAMERA_NO_INIT,
+ USBCAMERA_INITED,
+ USBCAMERA_ACTIVED
+ };
+#endif
+ //int maxQueueable;//the max queued buffers in v4l
+
+ camera_focus_mode_t cur_focus_mode;
+ camera_focus_mode_t cur_focus_mode_for_conti;
+ bool bFocusMoveState;
+
+ bool mEnableContiFocus;
+ camera_flashlight_status_t mFlashMode;
+ unsigned int mIoctlSupport;
+
+ int mWhiteBalance;
+ int mEV;
+ int mEVdef;
+ int mEVmin;
+ int mEVmax;
+ int mAntiBanding;
+ int mFocusWaitCount;
+ //suppose every 17frames to check the focus is running;
+ //in continuous mode
+ static const int FOCUS_PROCESS_FRAMES = 17;
+
+#ifdef AMLOGIC_VCAM_NONBLOCK_SUPPORT
+ int mPreviewFrameRate;
+ struct timeval previewTime1, previewTime2;
+#endif
+#if 1//ndef AMLOGIC_USB_CAMERA_SUPPORT
+ int mRotateValue;
+#endif
+};
+}; //// namespace
+#endif //V4L_CAMERA_ADAPTER_H
+
diff --git a/vircam/inc/VirtualCamHal.h b/vircam/inc/VirtualCamHal.h
new file mode 100755
index 0000000..03d9baa
--- a/dev/null
+++ b/vircam/inc/VirtualCamHal.h
@@ -0,0 +1,666 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#ifndef ANDROID_VIRTUAL_CAMERA_HARDWARE_H
+#define ANDROID_VIRTUAL_CAMERA_HARDWARE_H
+#include "CameraHal.h"
+
+#define MIN_WIDTH 640
+#define MIN_HEIGHT 480
+#define PICTURE_WIDTH 3264 /* 5mp - 2560. 8mp - 3280 */ /* Make sure it is a multiple of 16. */
+#define PICTURE_HEIGHT 2448 /* 5mp - 2048. 8mp - 2464 */ /* Make sure it is a multiple of 16. */
+#define PREVIEW_WIDTH 176
+#define PREVIEW_HEIGHT 144
+//#define PIXEL_FORMAT V4L2_PIX_FMT_UYVY
+
+#define VIDEO_FRAME_COUNT_MAX 8 //NUM_OVERLAY_BUFFERS_REQUESTED
+#define MAX_CAMERA_BUFFERS 8 //NUM_OVERLAY_BUFFERS_REQUESTED
+#define MAX_ZOOM 3
+#define THUMB_WIDTH 80
+#define THUMB_HEIGHT 60
+#define PIX_YUV422I 0
+#define PIX_YUV420P 1
+
+#define SATURATION_OFFSET 100
+#define SHARPNESS_OFFSET 100
+#define CONTRAST_OFFSET 100
+
+#define CAMHAL_GRALLOC_USAGE GRALLOC_USAGE_HW_TEXTURE | \
+ GRALLOC_USAGE_HW_RENDER | \
+ GRALLOC_USAGE_SW_READ_RARELY | \
+ GRALLOC_USAGE_SW_WRITE_NEVER
+
+//Enables Absolute PPM measurements in logcat
+#ifndef PPM_INSTRUMENTATION_ABS
+#define PPM_INSTRUMENTATION_ABS 1
+#endif
+
+#define LOCK_BUFFER_TRIES 5
+//TODO this is wrong. fix this:
+#define HAL_PIXEL_FORMAT_NV12 HAL_PIXEL_FORMAT_YCrCb_420_SP
+
+//sensor listener is useless now, camera don't need to knwo the orientation now
+//disable it now
+//#define ENABLE_SENSOR_LISTENER 1
+
+//Uncomment to enable more verbose/debug logs
+#define DEBUG_LOG
+
+#define LOGE ALOGE
+#define LOGV ALOGV
+#define LOGI ALOGI
+#define LOGD ALOGD
+
+///Camera HAL Logging Functions
+#ifndef DEBUG_LOG
+
+#define CAMHAL_LOGDA(str)
+#define CAMHAL_LOGDB(str, ...)
+#define CAMHAL_LOGVA(str)
+#define CAMHAL_LOGVB(str, ...)
+
+#define CAMHAL_LOGEA LOGE
+#define CAMHAL_LOGEB LOGE
+
+#undef LOG_FUNCTION_NAME
+#undef LOG_FUNCTION_NAME_EXIT
+#define LOG_FUNCTION_NAME
+#define LOG_FUNCTION_NAME_EXIT
+
+#else
+
+#define CAMHAL_LOGDA DBGUTILS_LOGDA
+#define CAMHAL_LOGDB DBGUTILS_LOGDB
+#define CAMHAL_LOGVA DBGUTILS_LOGVA
+#define CAMHAL_LOGVB DBGUTILS_LOGVB
+
+#define CAMHAL_LOGEA DBGUTILS_LOGEA
+#define CAMHAL_LOGEB DBGUTILS_LOGEB
+
+#endif
+
+
+//#define AMLOGIC_CAMERA_OVERLAY_SUPPORT
+//#define AMLOGIC_USB_CAMERA_SUPPORT
+
+#define NONNEG_ASSIGN(x,y) \
+ if(x > -1) \
+ y = x
+
+namespace android {
+
+#define PARAM_BUFFER 6000
+
+///Forward declarations
+class VirtualCamHal;
+class CameraFrame;
+class VirtualCamHalEvent;
+class DisplayFrame;
+
+/**
+ * Class for handling data and notify callbacks to application
+ */
+class AppCbNotifier: public ErrorNotifier , public virtual RefBase
+{
+
+public:
+
+ ///Constants
+ static const int NOTIFIER_TIMEOUT;
+ static const int32_t MAX_BUFFERS = 8;
+
+ enum NotifierCommands
+ {
+ NOTIFIER_CMD_PROCESS_EVENT,
+ NOTIFIER_CMD_PROCESS_FRAME,
+ NOTIFIER_CMD_PROCESS_ERROR
+ };
+
+ enum NotifierState
+ {
+ NOTIFIER_STOPPED,
+ NOTIFIER_STARTED,
+ NOTIFIER_EXITED
+ };
+
+public:
+
+ ~AppCbNotifier();
+
+ ///Initialzes the callback notifier, creates any resources required
+ status_t initialize();
+
+ ///Starts the callbacks to application
+ status_t start();
+
+ ///Stops the callbacks from going to application
+ status_t stop();
+
+ void setEventProvider(int32_t eventMask, MessageNotifier * eventProvider);
+ void setFrameProvider(FrameNotifier *frameProvider);
+
+ //All sub-components of Camera HAL call this whenever any error happens
+ virtual void errorNotify(int error);
+
+ status_t startPreviewCallbacks(CameraParameters &params, void *buffers, uint32_t *offsets, int fd, size_t length, size_t count);
+ status_t stopPreviewCallbacks();
+
+ status_t enableMsgType(int32_t msgType);
+ status_t disableMsgType(int32_t msgType);
+
+ //API for enabling/disabling measurement data
+ void setMeasurements(bool enable);
+
+ //thread loops
+ bool notificationThread();
+
+ ///Notification callback functions
+ static void frameCallbackRelay(CameraFrame* caFrame);
+ static void eventCallbackRelay(CameraHalEvent* chEvt);
+ void frameCallback(CameraFrame* caFrame);
+ void eventCallback(CameraHalEvent* chEvt);
+ void flushAndReturnFrames();
+
+ void setCallbacks(VirtualCamHal *cameraHal,
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user);
+
+ //Set Burst mode
+ void setBurst(bool burst);
+
+ //Notifications from CameraHal for video recording case
+ status_t startRecording();
+ status_t stopRecording();
+ status_t initSharedVideoBuffers(void *buffers, uint32_t *offsets, int fd, size_t length, size_t count, void *vidBufs);
+ status_t releaseRecordingFrame(const void *opaque);
+
+ status_t useMetaDataBufferMode(bool enable);
+
+ void EncoderDoneCb(void*, void*, CameraFrame::FrameType type, void* cookie1, void* cookie2);
+
+ void useVideoBuffers(bool useVideoBuffers);
+
+ bool getUseVideoBuffers();
+ void setVideoRes(int width, int height);
+
+ void flushEventQueue();
+
+ //Internal class definitions
+ class NotificationThread : public Thread {
+ AppCbNotifier* mAppCbNotifier;
+ MSGUTILS::MessageQueue mNotificationThreadQ;
+ public:
+ enum NotificationThreadCommands
+ {
+ NOTIFIER_START,
+ NOTIFIER_STOP,
+ NOTIFIER_EXIT,
+ };
+ public:
+ NotificationThread(AppCbNotifier* nh)
+ : Thread(false), mAppCbNotifier(nh) { }
+ virtual bool threadLoop() {
+ return mAppCbNotifier->notificationThread();
+ }
+
+ MSGUTILS::MessageQueue &msgQ() { return mNotificationThreadQ;}
+ };
+
+ //Friend declarations
+ friend class NotificationThread;
+
+private:
+ void notifyEvent();
+ void notifyFrame();
+ bool processMessage();
+ void releaseSharedVideoBuffers();
+ status_t dummyRaw();
+ void copyAndSendPictureFrame(CameraFrame* frame, int32_t msgType);
+ void copyAndSendPreviewFrame(CameraFrame* frame, int32_t msgType);
+
+private:
+ mutable Mutex mLock;
+ mutable Mutex mBurstLock;
+ VirtualCamHal* mCameraHal;
+ camera_notify_callback mNotifyCb;
+ camera_data_callback mDataCb;
+ camera_data_timestamp_callback mDataCbTimestamp;
+ camera_request_memory mRequestMemory;
+ void *mCallbackCookie;
+
+ //Keeps Video MemoryHeaps and Buffers within
+ //these objects
+ KeyedVector<unsigned int, unsigned int> mVideoHeaps;
+ KeyedVector<unsigned int, unsigned int> mVideoBuffers;
+ KeyedVector<unsigned int, unsigned int> mVideoMap;
+
+ //Keeps list of Gralloc handles and associated Video Metadata Buffers
+ KeyedVector<uint32_t, uint32_t> mVideoMetadataBufferMemoryMap;
+ KeyedVector<uint32_t, uint32_t> mVideoMetadataBufferReverseMap;
+
+ bool mBufferReleased;
+
+ sp< NotificationThread> mNotificationThread;
+ EventProvider *mEventProvider;
+ FrameProvider *mFrameProvider;
+ MSGUTILS::MessageQueue mEventQ;
+ MSGUTILS::MessageQueue mFrameQ;
+ NotifierState mNotifierState;
+
+ bool mPreviewing;
+ camera_memory_t* mPreviewMemory;
+ unsigned char* mPreviewBufs[MAX_BUFFERS];
+ int mPreviewBufCount;
+ const char *mPreviewPixelFormat;
+ KeyedVector<unsigned int, sp<MemoryHeapBase> > mSharedPreviewHeaps;
+ KeyedVector<unsigned int, sp<MemoryBase> > mSharedPreviewBuffers;
+
+ //Burst mode active
+ bool mBurst;
+ mutable Mutex mRecordingLock;
+ bool mRecording;
+ bool mMeasurementEnabled;
+
+ bool mUseMetaDataBufferMode;
+ bool mRawAvailable;
+
+ bool mUseVideoBuffers;
+
+ int mVideoWidth;
+ int mVideoHeight;
+
+};
+static void releaseImageBuffers(void *userData);
+
+static void endImageCapture(void *userData);
+
+ /**
+ Implementation of the Android Camera hardware abstraction layer
+
+*/
+class VirtualCamHal
+
+{
+
+public:
+ ///Constants
+ static const int NO_BUFFERS_PREVIEW;
+ static const int NO_BUFFERS_IMAGE_CAPTURE;
+ static const uint32_t VFR_SCALE = 1000;
+
+
+ /*--------------------Interface Methods---------------------------------*/
+
+ //@{
+public:
+
+ /** Set the notification and data callbacks */
+ void setCallbacks(camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user);
+
+ /** Receives orientation events from SensorListener **/
+ void onOrientationEvent(uint32_t orientation, uint32_t tilt);
+
+ /**
+ * The following three functions all take a msgtype,
+ * which is a bitmask of the messages defined in
+ * include/ui/Camera.h
+ */
+
+ /**
+ * Enable a message, or set of messages.
+ */
+ void enableMsgType(int32_t msgType);
+
+ /**
+ * Disable a message, or a set of messages.
+ */
+ void disableMsgType(int32_t msgType);
+
+ /**
+ * Query whether a message, or a set of messages, is enabled.
+ * Note that this is operates as an AND, if any of the messages
+ * queried are off, this will return false.
+ */
+ int msgTypeEnabled(int32_t msgType);
+
+ /**
+ * Start preview mode.
+ */
+ int startPreview();
+
+ /**
+ * Only used if overlays are used for camera preview.
+ */
+ int setPreviewWindow(struct preview_stream_ops *window);
+
+ /**
+ * Stop a previously started preview.
+ */
+ void stopPreview();
+
+ /**
+ * Returns true if preview is enabled.
+ */
+ bool previewEnabled();
+
+ /**
+ * Start record mode. When a record image is available a CAMERA_MSG_VIDEO_FRAME
+ * message is sent with the corresponding frame. Every record frame must be released
+ * by calling releaseRecordingFrame().
+ */
+ int startRecording();
+
+ /**
+ * Stop a previously started recording.
+ */
+ void stopRecording();
+
+ /**
+ * Returns true if recording is enabled.
+ */
+ int recordingEnabled();
+
+ /**
+ * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
+ */
+ void releaseRecordingFrame(const void *opaque);
+
+ /**
+ * Start auto focus, the notification callback routine is called
+ * with CAMERA_MSG_FOCUS once when focusing is complete. autoFocus()
+ * will be called again if another auto focus is needed.
+ */
+ int autoFocus();
+
+ /**
+ * Cancels auto-focus function. If the auto-focus is still in progress,
+ * this function will cancel it. Whether the auto-focus is in progress
+ * or not, this function will return the focus position to the default.
+ * If the camera does not support auto-focus, this is a no-op.
+ */
+ int cancelAutoFocus();
+
+ /**
+ * Take a picture.
+ */
+ int takePicture();
+
+ /**
+ * Cancel a picture that was started with takePicture. Calling this
+ * method when no picture is being taken is a no-op.
+ */
+ int cancelPicture();
+
+ /** Set the camera parameters. */
+ int setParameters(const char* params);
+ int setParameters(const CameraParameters& params);
+
+ /** Return the camera parameters. */
+ char* getParameters();
+ void putParameters(char *);
+
+ /**
+ * Send command to camera driver.
+ */
+ int sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
+
+ /**
+ * Release the hardware resources owned by this object. Note that this is
+ * *not* done in the destructor.
+ */
+ void release();
+
+ /**
+ * Dump state of the camera hardware
+ */
+ int dump(int fd) const;
+
+
+ status_t storeMetaDataInBuffers(bool enable);
+
+ //@}
+
+/*--------------------Internal Member functions - Public---------------------------------*/
+
+public:
+ /** @name internalFunctionsPublic */
+ //@{
+
+ /** Constructor of VirtualCamHal */
+ VirtualCamHal(int cameraId);
+
+ // Destructor of VirtualCamHal
+ ~VirtualCamHal();
+
+ /** Initialize VirtualCamHal */
+ status_t initialize(CameraProperties::Properties*);
+
+ /** Deinitialize VirtualCamHal */
+ void deinitialize();
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //Uses the constructor timestamp as a reference to calcluate the
+ // elapsed time
+ static void PPM(const char *);
+ //Uses a user provided timestamp as a reference to calcluate the
+ // elapsed time
+ static void PPM(const char *, struct timeval*, ...);
+
+#endif
+
+ /** Free image bufs */
+ status_t freeImageBufs();
+
+ //Signals the end of image capture
+ status_t signalEndImageCapture();
+
+ //Events
+ static void eventCallbackRelay(CameraHalEvent* event);
+ void eventCallback(CameraHalEvent* event);
+ void setEventProvider(int32_t eventMask, MessageNotifier * eventProvider);
+
+/*--------------------Internal Member functions - Private---------------------------------*/
+private:
+
+ /** @name internalFunctionsPrivate */
+ //@{
+
+ /** Set the camera parameters specific to Video Recording. */
+ bool setVideoModeParameters(const CameraParameters&);
+
+ /** Reset the camera parameters specific to Video Recording. */
+ bool resetVideoModeParameters();
+
+ /** Restart the preview with setParameter. */
+ status_t restartPreview();
+
+ status_t parseResolution(const char *resStr, int &width, int &height);
+
+ void insertSupportedParams();
+
+ /** Allocate preview data buffers */
+ status_t allocPreviewDataBufs(size_t size, size_t bufferCount);
+
+ /** Free preview data buffers */
+ status_t freePreviewDataBufs();
+
+ /** Allocate preview buffers */
+ status_t allocPreviewBufs(int width, int height, const char* previewFormat, unsigned int bufferCount, unsigned int &max_queueable);
+
+ /** Allocate video buffers */
+ status_t allocVideoBufs(uint32_t width, uint32_t height, uint32_t bufferCount);
+
+ /** Allocate image capture buffers */
+ status_t allocImageBufs(unsigned int width, unsigned int height, size_t length, const char* previewFormat, unsigned int bufferCount);
+
+ /** Free preview buffers */
+ status_t freePreviewBufs();
+
+ /** Free video bufs */
+ status_t freeVideoBufs(void *bufs);
+
+ //Check if a given resolution is supported by the current camera
+ //instance
+ bool isResolutionValid(unsigned int width, unsigned int height, const char *supportedResolutions);
+
+ //Check if a given parameter is supported by the current camera
+ // instance
+ bool isParameterValid(const char *param, const char *supportedParams);
+ bool isParameterValid(int param, const char *supportedParams);
+ bool isParameterInRange(int param, const char *supportedParams);
+ status_t doesSetParameterNeedUpdate(const char *new_param, const char *old_params, bool &update);
+
+ /** Initialize default parameters */
+ void initDefaultParameters();
+
+ void dumpProperties(CameraProperties::Properties& cameraProps);
+
+ status_t startImageBracketing();
+
+ status_t stopImageBracketing();
+
+ void setShutter(bool enable);
+
+ void forceStopPreview();
+
+ void selectFPSRange(int framerate, int *min_fps, int *max_fps);
+
+ void setPreferredPreviewRes(int width, int height);
+ void resetPreviewRes(CameraParameters *mParams, int width, int height);
+
+ //@}
+
+
+/*----------Member variables - Public ---------------------*/
+public:
+ int32_t mMsgEnabled;
+ bool mRecordEnabled;
+ nsecs_t mCurrentTime;
+ bool mFalsePreview;
+ bool mPreviewEnabled;
+ uint32_t mTakePictureQueue;
+ bool mBracketingEnabled;
+ bool mBracketingRunning;
+ //User shutter override
+ bool mShutterEnabled;
+ bool mMeasurementEnabled;
+ //Google's parameter delimiter
+ static const char PARAMS_DELIMITER[];
+
+ CameraAdapter *mCameraAdapter;
+ sp<AppCbNotifier> mAppCbNotifier;
+ sp<DisplayAdapter> mDisplayAdapter;
+ sp<MemoryManager> mMemoryManager;
+
+ sp<IMemoryHeap> mPictureHeap;
+
+ int* mGrallocHandles;
+ bool mFpsRangeChangedByApp;
+
+
+
+
+
+///static member vars
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //Timestamp from the VirtualCamHal constructor
+ static struct timeval ppm_start;
+ //Timestamp of the autoFocus command
+ static struct timeval mStartFocus;
+ //Timestamp of the startPreview command
+ static struct timeval mStartPreview;
+ //Timestamp of the takePicture command
+ static struct timeval mStartCapture;
+
+#endif
+
+/*----------Member variables - Private ---------------------*/
+private:
+ bool mDynamicPreviewSwitch;
+ //keeps paused state of display
+ bool mDisplayPaused;
+ //Index of current camera adapter
+ int mCameraIndex;
+
+ mutable Mutex mLock;
+
+#ifdef ENABLE_SENSOR_LISTENER
+ sp<SensorListener> mSensorListener;
+#endif
+ void* mCameraAdapterHandle;
+
+ CameraParameters mParameters;
+ bool mPreviewRunning;
+ bool mPreviewStateOld;
+ bool mRecordingEnabled;
+ EventProvider *mEventProvider;
+
+ int32_t *mPreviewDataBufs;
+ uint32_t *mPreviewDataOffsets;
+ int mPreviewDataFd;
+ int mPreviewDataLength;
+ int32_t *mImageBufs;
+ uint32_t *mImageOffsets;
+ int mImageFd;
+ int mImageLength;
+ int32_t *mPreviewBufs;
+ uint32_t *mPreviewOffsets;
+ int mPreviewLength;
+ int mPreviewFd;
+ int32_t *mVideoBufs;
+ uint32_t *mVideoOffsets;
+ int mVideoFd;
+ int mVideoLength;
+
+ int mBracketRangePositive;
+ int mBracketRangeNegative;
+
+ ///@todo Rename this as preview buffer provider
+ BufferProvider *mBufProvider;
+ BufferProvider *mVideoBufProvider;
+
+
+ CameraProperties::Properties* mCameraProperties;
+
+ bool mPreviewStartInProgress;
+
+ bool mSetPreviewWindowCalled;
+
+ uint32_t mPreviewWidth;
+ uint32_t mPreviewHeight;
+ int32_t mMaxZoomSupported;
+
+ int mVideoWidth;
+ int mVideoHeight;
+
+};
+
+
+}; // namespace android
+
+#endif